diff --git a/MANIFEST.in b/MANIFEST.in index d41947e7b7..55e2206e3d 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,11 +4,12 @@ recursive-include awx/templates *.html recursive-include awx/ui *.html *.js recursive-include awx/ui/static *.css *.ico *.png *.gif *.jpg recursive-include awx/ui/static *.eot *.svg *.ttf *.woff *.otf +recursive-include awx/lib/site-packages * recursive-include config * recursive-include config/deb * recursive-include config/rpm * recursive-exclude awx devonly.py -recursive-exclude awx/settings local_settings.py +recursive-exclude awx/settings local_settings.py* include *.py *.txt *.md include MANIFEST.in include COPYING diff --git a/awx/__init__.py b/awx/__init__.py index 2d8bf6893e..d0d07598b6 100644 --- a/awx/__init__.py +++ b/awx/__init__.py @@ -19,6 +19,10 @@ except ImportError: def manage(): # Update the default settings environment variable based on current mode. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE) + # Add local site-packages directory to path. + local_site_packages = os.path.join(os.path.dirname(__file__), 'lib', + 'site-packages') + sys.path.insert(0, local_site_packages) from django.core.management import execute_from_command_line if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): sys.stdout.write('awx-%s\n' % __version__) diff --git a/awx/lib/site-packages/ANSI.py b/awx/lib/site-packages/ANSI.py new file mode 100644 index 0000000000..537017e90b --- /dev/null +++ b/awx/lib/site-packages/ANSI.py @@ -0,0 +1,334 @@ +"""This implements an ANSI terminal emulator as a subclass of screen. + +$Id: ANSI.py 491 2007-12-16 20:04:57Z noah $ +""" +# references: +# http://www.retards.org/terminals/vt102.html +# http://vt100.net/docs/vt102-ug/contents.html +# http://vt100.net/docs/vt220-rm/ +# http://www.termsys.demon.co.uk/vtansi.htm + +import screen +import FSM +import copy +import string + +def Emit (fsm): + + screen = fsm.memory[0] + screen.write_ch(fsm.input_symbol) + +def StartNumber (fsm): + + fsm.memory.append (fsm.input_symbol) + +def BuildNumber (fsm): + + ns = fsm.memory.pop() + ns = ns + fsm.input_symbol + fsm.memory.append (ns) + +def DoBackOne (fsm): + + screen = fsm.memory[0] + screen.cursor_back () + +def DoBack (fsm): + + count = int(fsm.memory.pop()) + screen = fsm.memory[0] + screen.cursor_back (count) + +def DoDownOne (fsm): + + screen = fsm.memory[0] + screen.cursor_down () + +def DoDown (fsm): + + count = int(fsm.memory.pop()) + screen = fsm.memory[0] + screen.cursor_down (count) + +def DoForwardOne (fsm): + + screen = fsm.memory[0] + screen.cursor_forward () + +def DoForward (fsm): + + count = int(fsm.memory.pop()) + screen = fsm.memory[0] + screen.cursor_forward (count) + +def DoUpReverse (fsm): + + screen = fsm.memory[0] + screen.cursor_up_reverse() + +def DoUpOne (fsm): + + screen = fsm.memory[0] + screen.cursor_up () + +def DoUp (fsm): + + count = int(fsm.memory.pop()) + screen = fsm.memory[0] + screen.cursor_up (count) + +def DoHome (fsm): + + c = int(fsm.memory.pop()) + r = int(fsm.memory.pop()) + screen = fsm.memory[0] + screen.cursor_home (r,c) + +def DoHomeOrigin (fsm): + + c = 1 + r = 1 + screen = fsm.memory[0] + screen.cursor_home (r,c) + +def DoEraseDown (fsm): + + screen = fsm.memory[0] + screen.erase_down() + +def DoErase (fsm): + + arg = int(fsm.memory.pop()) + screen = fsm.memory[0] + if arg == 0: + screen.erase_down() + elif arg == 1: + screen.erase_up() + elif arg == 2: + screen.erase_screen() + +def DoEraseEndOfLine (fsm): + + screen = fsm.memory[0] + screen.erase_end_of_line() + +def DoEraseLine (fsm): + + screen = fsm.memory[0] + if arg == 0: + screen.end_of_line() + elif arg == 1: + screen.start_of_line() + elif arg == 2: + screen.erase_line() + +def DoEnableScroll (fsm): + + screen = fsm.memory[0] + screen.scroll_screen() + +def DoCursorSave (fsm): + + screen = fsm.memory[0] + screen.cursor_save_attrs() + +def DoCursorRestore (fsm): + + screen = fsm.memory[0] + screen.cursor_restore_attrs() + +def DoScrollRegion (fsm): + + screen = fsm.memory[0] + r2 = int(fsm.memory.pop()) + r1 = int(fsm.memory.pop()) + screen.scroll_screen_rows (r1,r2) + +def DoMode (fsm): + + screen = fsm.memory[0] + mode = fsm.memory.pop() # Should be 4 + # screen.setReplaceMode () + +def Log (fsm): + + screen = fsm.memory[0] + fsm.memory = [screen] + fout = open ('log', 'a') + fout.write (fsm.input_symbol + ',' + fsm.current_state + '\n') + fout.close() + +class term (screen.screen): + """This is a placeholder. + In theory I might want to add other terminal types. + """ + def __init__ (self, r=24, c=80): + screen.screen.__init__(self, r,c) + +class ANSI (term): + + """This class encapsulates a generic terminal. It filters a stream and + maintains the state of a screen object. """ + + def __init__ (self, r=24,c=80): + + term.__init__(self,r,c) + + #self.screen = screen (24,80) + self.state = FSM.FSM ('INIT',[self]) + self.state.set_default_transition (Log, 'INIT') + self.state.add_transition_any ('INIT', Emit, 'INIT') + self.state.add_transition ('\x1b', 'INIT', None, 'ESC') + self.state.add_transition_any ('ESC', Log, 'INIT') + self.state.add_transition ('(', 'ESC', None, 'G0SCS') + self.state.add_transition (')', 'ESC', None, 'G1SCS') + self.state.add_transition_list ('AB012', 'G0SCS', None, 'INIT') + self.state.add_transition_list ('AB012', 'G1SCS', None, 'INIT') + self.state.add_transition ('7', 'ESC', DoCursorSave, 'INIT') + self.state.add_transition ('8', 'ESC', DoCursorRestore, 'INIT') + self.state.add_transition ('M', 'ESC', DoUpReverse, 'INIT') + self.state.add_transition ('>', 'ESC', DoUpReverse, 'INIT') + self.state.add_transition ('<', 'ESC', DoUpReverse, 'INIT') + self.state.add_transition ('=', 'ESC', None, 'INIT') # Selects application keypad. + self.state.add_transition ('#', 'ESC', None, 'GRAPHICS_POUND') + self.state.add_transition_any ('GRAPHICS_POUND', None, 'INIT') + self.state.add_transition ('[', 'ESC', None, 'ELB') + # ELB means Escape Left Bracket. That is ^[[ + self.state.add_transition ('H', 'ELB', DoHomeOrigin, 'INIT') + self.state.add_transition ('D', 'ELB', DoBackOne, 'INIT') + self.state.add_transition ('B', 'ELB', DoDownOne, 'INIT') + self.state.add_transition ('C', 'ELB', DoForwardOne, 'INIT') + self.state.add_transition ('A', 'ELB', DoUpOne, 'INIT') + self.state.add_transition ('J', 'ELB', DoEraseDown, 'INIT') + self.state.add_transition ('K', 'ELB', DoEraseEndOfLine, 'INIT') + self.state.add_transition ('r', 'ELB', DoEnableScroll, 'INIT') + self.state.add_transition ('m', 'ELB', None, 'INIT') + self.state.add_transition ('?', 'ELB', None, 'MODECRAP') + self.state.add_transition_list (string.digits, 'ELB', StartNumber, 'NUMBER_1') + self.state.add_transition_list (string.digits, 'NUMBER_1', BuildNumber, 'NUMBER_1') + self.state.add_transition ('D', 'NUMBER_1', DoBack, 'INIT') + self.state.add_transition ('B', 'NUMBER_1', DoDown, 'INIT') + self.state.add_transition ('C', 'NUMBER_1', DoForward, 'INIT') + self.state.add_transition ('A', 'NUMBER_1', DoUp, 'INIT') + self.state.add_transition ('J', 'NUMBER_1', DoErase, 'INIT') + self.state.add_transition ('K', 'NUMBER_1', DoEraseLine, 'INIT') + self.state.add_transition ('l', 'NUMBER_1', DoMode, 'INIT') + ### It gets worse... the 'm' code can have infinite number of + ### number;number;number before it. I've never seen more than two, + ### but the specs say it's allowed. crap! + self.state.add_transition ('m', 'NUMBER_1', None, 'INIT') + ### LED control. Same problem as 'm' code. + self.state.add_transition ('q', 'NUMBER_1', None, 'INIT') + + # \E[?47h appears to be "switch to alternate screen" + # \E[?47l restores alternate screen... I think. + self.state.add_transition_list (string.digits, 'MODECRAP', StartNumber, 'MODECRAP_NUM') + self.state.add_transition_list (string.digits, 'MODECRAP_NUM', BuildNumber, 'MODECRAP_NUM') + self.state.add_transition ('l', 'MODECRAP_NUM', None, 'INIT') + self.state.add_transition ('h', 'MODECRAP_NUM', None, 'INIT') + +#RM Reset Mode Esc [ Ps l none + self.state.add_transition (';', 'NUMBER_1', None, 'SEMICOLON') + self.state.add_transition_any ('SEMICOLON', Log, 'INIT') + self.state.add_transition_list (string.digits, 'SEMICOLON', StartNumber, 'NUMBER_2') + self.state.add_transition_list (string.digits, 'NUMBER_2', BuildNumber, 'NUMBER_2') + self.state.add_transition_any ('NUMBER_2', Log, 'INIT') + self.state.add_transition ('H', 'NUMBER_2', DoHome, 'INIT') + self.state.add_transition ('f', 'NUMBER_2', DoHome, 'INIT') + self.state.add_transition ('r', 'NUMBER_2', DoScrollRegion, 'INIT') + ### It gets worse... the 'm' code can have infinite number of + ### number;number;number before it. I've never seen more than two, + ### but the specs say it's allowed. crap! + self.state.add_transition ('m', 'NUMBER_2', None, 'INIT') + ### LED control. Same problem as 'm' code. + self.state.add_transition ('q', 'NUMBER_2', None, 'INIT') + + def process (self, c): + + self.state.process(c) + + def process_list (self, l): + + self.write(l) + + def write (self, s): + + for c in s: + self.process(c) + + def flush (self): + + pass + + def write_ch (self, ch): + + """This puts a character at the current cursor position. cursor + position if moved forward with wrap-around, but no scrolling is done if + the cursor hits the lower-right corner of the screen. """ + + #\r and \n both produce a call to crlf(). + ch = ch[0] + + if ch == '\r': + # self.crlf() + return + if ch == '\n': + self.crlf() + return + if ch == chr(screen.BS): + self.cursor_back() + self.put_abs(self.cur_r, self.cur_c, ' ') + return + + if ch not in string.printable: + fout = open ('log', 'a') + fout.write ('Nonprint: ' + str(ord(ch)) + '\n') + fout.close() + return + self.put_abs(self.cur_r, self.cur_c, ch) + old_r = self.cur_r + old_c = self.cur_c + self.cursor_forward() + if old_c == self.cur_c: + self.cursor_down() + if old_r != self.cur_r: + self.cursor_home (self.cur_r, 1) + else: + self.scroll_up () + self.cursor_home (self.cur_r, 1) + self.erase_line() + +# def test (self): +# +# import sys +# write_text = 'I\'ve got a ferret sticking up my nose.\n' + \ +# '(He\'s got a ferret sticking up his nose.)\n' + \ +# 'How it got there I can\'t tell\n' + \ +# 'But now it\'s there it hurts like hell\n' + \ +# 'And what is more it radically affects my sense of smell.\n' + \ +# '(His sense of smell.)\n' + \ +# 'I can see a bare-bottomed mandril.\n' + \ +# '(Slyly eyeing his other nostril.)\n' + \ +# 'If it jumps inside there too I really don\'t know what to do\n' + \ +# 'I\'ll be the proud posessor of a kind of nasal zoo.\n' + \ +# '(A nasal zoo.)\n' + \ +# 'I\'ve got a ferret sticking up my nose.\n' + \ +# '(And what is worst of all it constantly explodes.)\n' + \ +# '"Ferrets don\'t explode," you say\n' + \ +# 'But it happened nine times yesterday\n' + \ +# 'And I should know for each time I was standing in the way.\n' + \ +# 'I\'ve got a ferret sticking up my nose.\n' + \ +# '(He\'s got a ferret sticking up his nose.)\n' + \ +# 'How it got there I can\'t tell\n' + \ +# 'But now it\'s there it hurts like hell\n' + \ +# 'And what is more it radically affects my sense of smell.\n' + \ +# '(His sense of smell.)' +# self.fill('.') +# self.cursor_home() +# for c in write_text: +# self.write_ch (c) +# print str(self) +# +#if __name__ == '__main__': +# t = ANSI(6,65) +# t.test() diff --git a/awx/lib/site-packages/FSM.py b/awx/lib/site-packages/FSM.py new file mode 100644 index 0000000000..751eb37e13 --- /dev/null +++ b/awx/lib/site-packages/FSM.py @@ -0,0 +1,331 @@ +#!/usr/bin/env python + +"""This module implements a Finite State Machine (FSM). In addition to state +this FSM also maintains a user defined "memory". So this FSM can be used as a +Push-down Automata (PDA) since a PDA is a FSM + memory. + +The following describes how the FSM works, but you will probably also need to +see the example function to understand how the FSM is used in practice. + +You define an FSM by building tables of transitions. For a given input symbol +the process() method uses these tables to decide what action to call and what +the next state will be. The FSM has a table of transitions that associate: + + (input_symbol, current_state) --> (action, next_state) + +Where "action" is a function you define. The symbols and states can be any +objects. You use the add_transition() and add_transition_list() methods to add +to the transition table. The FSM also has a table of transitions that +associate: + + (current_state) --> (action, next_state) + +You use the add_transition_any() method to add to this transition table. The +FSM also has one default transition that is not associated with any specific +input_symbol or state. You use the set_default_transition() method to set the +default transition. + +When an action function is called it is passed a reference to the FSM. The +action function may then access attributes of the FSM such as input_symbol, +current_state, or "memory". The "memory" attribute can be any object that you +want to pass along to the action functions. It is not used by the FSM itself. +For parsing you would typically pass a list to be used as a stack. + +The processing sequence is as follows. The process() method is given an +input_symbol to process. The FSM will search the table of transitions that +associate: + + (input_symbol, current_state) --> (action, next_state) + +If the pair (input_symbol, current_state) is found then process() will call the +associated action function and then set the current state to the next_state. + +If the FSM cannot find a match for (input_symbol, current_state) it will then +search the table of transitions that associate: + + (current_state) --> (action, next_state) + +If the current_state is found then the process() method will call the +associated action function and then set the current state to the next_state. +Notice that this table lacks an input_symbol. It lets you define transitions +for a current_state and ANY input_symbol. Hence, it is called the "any" table. +Remember, it is always checked after first searching the table for a specific +(input_symbol, current_state). + +For the case where the FSM did not match either of the previous two cases the +FSM will try to use the default transition. If the default transition is +defined then the process() method will call the associated action function and +then set the current state to the next_state. This lets you define a default +transition as a catch-all case. You can think of it as an exception handler. +There can be only one default transition. + +Finally, if none of the previous cases are defined for an input_symbol and +current_state then the FSM will raise an exception. This may be desirable, but +you can always prevent this just by defining a default transition. + +Noah Spurrier 20020822 +""" + +class ExceptionFSM(Exception): + + """This is the FSM Exception class.""" + + def __init__(self, value): + self.value = value + + def __str__(self): + return `self.value` + +class FSM: + + """This is a Finite State Machine (FSM). + """ + + def __init__(self, initial_state, memory=None): + + """This creates the FSM. You set the initial state here. The "memory" + attribute is any object that you want to pass along to the action + functions. It is not used by the FSM. For parsing you would typically + pass a list to be used as a stack. """ + + # Map (input_symbol, current_state) --> (action, next_state). + self.state_transitions = {} + # Map (current_state) --> (action, next_state). + self.state_transitions_any = {} + self.default_transition = None + + self.input_symbol = None + self.initial_state = initial_state + self.current_state = self.initial_state + self.next_state = None + self.action = None + self.memory = memory + + def reset (self): + + """This sets the current_state to the initial_state and sets + input_symbol to None. The initial state was set by the constructor + __init__(). """ + + self.current_state = self.initial_state + self.input_symbol = None + + def add_transition (self, input_symbol, state, action=None, next_state=None): + + """This adds a transition that associates: + + (input_symbol, current_state) --> (action, next_state) + + The action may be set to None in which case the process() method will + ignore the action and only set the next_state. The next_state may be + set to None in which case the current state will be unchanged. + + You can also set transitions for a list of symbols by using + add_transition_list(). """ + + if next_state is None: + next_state = state + self.state_transitions[(input_symbol, state)] = (action, next_state) + + def add_transition_list (self, list_input_symbols, state, action=None, next_state=None): + + """This adds the same transition for a list of input symbols. + You can pass a list or a string. Note that it is handy to use + string.digits, string.whitespace, string.letters, etc. to add + transitions that match character classes. + + The action may be set to None in which case the process() method will + ignore the action and only set the next_state. The next_state may be + set to None in which case the current state will be unchanged. """ + + if next_state is None: + next_state = state + for input_symbol in list_input_symbols: + self.add_transition (input_symbol, state, action, next_state) + + def add_transition_any (self, state, action=None, next_state=None): + + """This adds a transition that associates: + + (current_state) --> (action, next_state) + + That is, any input symbol will match the current state. + The process() method checks the "any" state associations after it first + checks for an exact match of (input_symbol, current_state). + + The action may be set to None in which case the process() method will + ignore the action and only set the next_state. The next_state may be + set to None in which case the current state will be unchanged. """ + + if next_state is None: + next_state = state + self.state_transitions_any [state] = (action, next_state) + + def set_default_transition (self, action, next_state): + + """This sets the default transition. This defines an action and + next_state if the FSM cannot find the input symbol and the current + state in the transition list and if the FSM cannot find the + current_state in the transition_any list. This is useful as a final + fall-through state for catching errors and undefined states. + + The default transition can be removed by setting the attribute + default_transition to None. """ + + self.default_transition = (action, next_state) + + def get_transition (self, input_symbol, state): + + """This returns (action, next state) given an input_symbol and state. + This does not modify the FSM state, so calling this method has no side + effects. Normally you do not call this method directly. It is called by + process(). + + The sequence of steps to check for a defined transition goes from the + most specific to the least specific. + + 1. Check state_transitions[] that match exactly the tuple, + (input_symbol, state) + + 2. Check state_transitions_any[] that match (state) + In other words, match a specific state and ANY input_symbol. + + 3. Check if the default_transition is defined. + This catches any input_symbol and any state. + This is a handler for errors, undefined states, or defaults. + + 4. No transition was defined. If we get here then raise an exception. + """ + + if self.state_transitions.has_key((input_symbol, state)): + return self.state_transitions[(input_symbol, state)] + elif self.state_transitions_any.has_key (state): + return self.state_transitions_any[state] + elif self.default_transition is not None: + return self.default_transition + else: + raise ExceptionFSM ('Transition is undefined: (%s, %s).' % + (str(input_symbol), str(state)) ) + + def process (self, input_symbol): + + """This is the main method that you call to process input. This may + cause the FSM to change state and call an action. This method calls + get_transition() to find the action and next_state associated with the + input_symbol and current_state. If the action is None then the action + is not called and only the current state is changed. This method + processes one complete input symbol. You can process a list of symbols + (or a string) by calling process_list(). """ + + self.input_symbol = input_symbol + (self.action, self.next_state) = self.get_transition (self.input_symbol, self.current_state) + if self.action is not None: + self.action (self) + self.current_state = self.next_state + self.next_state = None + + def process_list (self, input_symbols): + + """This takes a list and sends each element to process(). The list may + be a string or any iterable object. """ + + for s in input_symbols: + self.process (s) + +############################################################################## +# The following is an example that demonstrates the use of the FSM class to +# process an RPN expression. Run this module from the command line. You will +# get a prompt > for input. Enter an RPN Expression. Numbers may be integers. +# Operators are * / + - Use the = sign to evaluate and print the expression. +# For example: +# +# 167 3 2 2 * * * 1 - = +# +# will print: +# +# 2003 +############################################################################## + +import sys, os, traceback, optparse, time, string + +# +# These define the actions. +# Note that "memory" is a list being used as a stack. +# + +def BeginBuildNumber (fsm): + fsm.memory.append (fsm.input_symbol) + +def BuildNumber (fsm): + s = fsm.memory.pop () + s = s + fsm.input_symbol + fsm.memory.append (s) + +def EndBuildNumber (fsm): + s = fsm.memory.pop () + fsm.memory.append (int(s)) + +def DoOperator (fsm): + ar = fsm.memory.pop() + al = fsm.memory.pop() + if fsm.input_symbol == '+': + fsm.memory.append (al + ar) + elif fsm.input_symbol == '-': + fsm.memory.append (al - ar) + elif fsm.input_symbol == '*': + fsm.memory.append (al * ar) + elif fsm.input_symbol == '/': + fsm.memory.append (al / ar) + +def DoEqual (fsm): + print str(fsm.memory.pop()) + +def Error (fsm): + print 'That does not compute.' + print str(fsm.input_symbol) + +def main(): + + """This is where the example starts and the FSM state transitions are + defined. Note that states are strings (such as 'INIT'). This is not + necessary, but it makes the example easier to read. """ + + f = FSM ('INIT', []) # "memory" will be used as a stack. + f.set_default_transition (Error, 'INIT') + f.add_transition_any ('INIT', None, 'INIT') + f.add_transition ('=', 'INIT', DoEqual, 'INIT') + f.add_transition_list (string.digits, 'INIT', BeginBuildNumber, 'BUILDING_NUMBER') + f.add_transition_list (string.digits, 'BUILDING_NUMBER', BuildNumber, 'BUILDING_NUMBER') + f.add_transition_list (string.whitespace, 'BUILDING_NUMBER', EndBuildNumber, 'INIT') + f.add_transition_list ('+-*/', 'INIT', DoOperator, 'INIT') + + print + print 'Enter an RPN Expression.' + print 'Numbers may be integers. Operators are * / + -' + print 'Use the = sign to evaluate and print the expression.' + print 'For example: ' + print ' 167 3 2 2 * * * 1 - =' + inputstr = raw_input ('> ') + f.process_list(inputstr) + +if __name__ == '__main__': + try: + start_time = time.time() + parser = optparse.OptionParser(formatter=optparse.TitledHelpFormatter(), usage=globals()['__doc__'], version='$Id: FSM.py 490 2007-12-07 15:46:24Z noah $') + parser.add_option ('-v', '--verbose', action='store_true', default=False, help='verbose output') + (options, args) = parser.parse_args() + if options.verbose: print time.asctime() + main() + if options.verbose: print time.asctime() + if options.verbose: print 'TOTAL TIME IN MINUTES:', + if options.verbose: print (time.time() - start_time) / 60.0 + sys.exit(0) + except KeyboardInterrupt, e: # Ctrl-C + raise e + except SystemExit, e: # sys.exit() + raise e + except Exception, e: + print 'ERROR, UNEXPECTED EXCEPTION' + print str(e) + traceback.print_exc() + os._exit(1) diff --git a/awx/lib/site-packages/README b/awx/lib/site-packages/README new file mode 100644 index 0000000000..d9d46fd10f --- /dev/null +++ b/awx/lib/site-packages/README @@ -0,0 +1,22 @@ +Local versions of third-party packages required by AWX. Package names and +versions are listed below, along with notes on which files are included. + +amqp-1.0.11 (amqp/*) +anyjson-0.3.3 (anyjson/*) +billiard-2.7.3.28 (billiard/*, funtests/*, excluded _billiard.so) +celery-3.0.19 (celery/*, excluded bin/celery* and bin/camqadm) +django-celery-3.0.17 (djcelery/*, excluded bin/djcelerymon) +django-extensions-1.1.1 (django_extensions/*) +django-jsonfield-0.9.10 (jsonfield/*) +django-taggit-0.10a1 (taggit/*) +djangorestframework-2.3.5 (rest_framework/*) +importlib-1.0.2 (importlib/*, needed for Python 2.6 support) +kombu-2.5.10 (kombu/*) +Markdown-2.3.1 (markdown/*, excluded bin/markdown_py) +ordereddict-1.1 (ordereddict.py, needed for Python 2.6 support) +pexpect-2.4 (pexpect.py, pxssh.py, fdpexpect.py, FSM.py, screen.py, ANSI.py) +python-dateutil-2.1 (dateutil/*) +pytz-2013b (pytz/*) +requests-1.2.3 (requests/*) +six-1.3.0 (six.py) +South-0.8.1 (south/*) diff --git a/awx/lib/site-packages/amqp/__init__.py b/awx/lib/site-packages/amqp/__init__.py new file mode 100644 index 0000000000..82abe7f721 --- /dev/null +++ b/awx/lib/site-packages/amqp/__init__.py @@ -0,0 +1,50 @@ +"""Low-level AMQP client for Python (fork of amqplib)""" +# Copyright (C) 2007-2008 Barry Pederson +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 +from __future__ import absolute_import + +VERSION = (1, 0, 11) +__version__ = '.'.join(map(str, VERSION[0:3])) + ''.join(VERSION[3:]) +__author__ = 'Barry Pederson' +__maintainer__ = 'Ask Solem' +__contact__ = 'pyamqp@celeryproject.org' +__homepage__ = 'http://github.com/celery/py-amqp' +__docformat__ = 'restructuredtext' + +# -eof meta- + +# +# Pull in the public items from the various sub-modules +# +from .basic_message import Message +from .channel import Channel +from .connection import Connection +from .exceptions import ( + AMQPError, + ConnectionError, + ChannelError, + ConsumerCancel, +) + +__all__ = [ + 'Connection', + 'Channel', + 'Message', + 'AMQPError', + 'ConnectionError', + 'ChannelError', + 'ConsumerCancel', +] diff --git a/awx/lib/site-packages/amqp/abstract_channel.py b/awx/lib/site-packages/amqp/abstract_channel.py new file mode 100644 index 0000000000..ea53fde7e2 --- /dev/null +++ b/awx/lib/site-packages/amqp/abstract_channel.py @@ -0,0 +1,94 @@ +"""Code common to Connection and Channel objects.""" +# Copyright (C) 2007-2008 Barry Pederson +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 +from __future__ import absolute_import + +from .exceptions import AMQPError +from .serialization import AMQPWriter + +try: + bytes +except NameError: + # Python 2.5 and lower + bytes = str + +__all__ = ['AbstractChannel'] + + +class AbstractChannel(object): + """Superclass for both the Connection, which is treated + as channel 0, and other user-created Channel objects. + + The subclasses must have a _METHOD_MAP class property, mapping + between AMQP method signatures and Python methods. + + """ + def __init__(self, connection, channel_id): + self.connection = connection + self.channel_id = channel_id + connection.channels[channel_id] = self + self.method_queue = [] # Higher level queue for methods + self.auto_decode = False + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + + def _send_method(self, method_sig, args=bytes(), content=None): + """Send a method for our channel.""" + if isinstance(args, AMQPWriter): + args = args.getvalue() + + self.connection.method_writer.write_method( + self.channel_id, method_sig, args, content, + ) + + def close(self): + """Close this Channel or Connection""" + raise NotImplementedError('Must be overriden in subclass') + + def wait(self, allowed_methods=None): + """Wait for a method that matches our allowed_methods parameter (the + default value of None means match any method), and dispatch to it.""" + method_sig, args, content = self.connection._wait_method( + self.channel_id, allowed_methods) + + return self.dispatch_method(method_sig, args, content) + + def dispatch_method(self, method_sig, args, content): + if content and \ + self.auto_decode and \ + hasattr(content, 'content_encoding'): + try: + content.body = content.body.decode(content.content_encoding) + except Exception: + pass + + try: + amqp_method = self._METHOD_MAP[method_sig] + except KeyError: + raise AMQPError('Unknown AMQP method %r' % (method_sig, )) + + if content is None: + return amqp_method(self, args) + else: + return amqp_method(self, args, content) + + #: Placeholder, the concrete implementations will have to + #: supply their own versions of _METHOD_MAP + _METHOD_MAP = {} diff --git a/awx/lib/site-packages/amqp/basic_message.py b/awx/lib/site-packages/amqp/basic_message.py new file mode 100644 index 0000000000..dc7c7a1237 --- /dev/null +++ b/awx/lib/site-packages/amqp/basic_message.py @@ -0,0 +1,123 @@ +"""Messages for AMQP""" +# Copyright (C) 2007-2008 Barry Pederson +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 +from __future__ import absolute_import + +from .serialization import GenericContent + +__all__ = ['Message'] + + +class Message(GenericContent): + """A Message for use with the Channnel.basic_* methods.""" + + #: Instances of this class have these attributes, which + #: are passed back and forth as message properties between + #: client and server + PROPERTIES = [ + ('content_type', 'shortstr'), + ('content_encoding', 'shortstr'), + ('application_headers', 'table'), + ('delivery_mode', 'octet'), + ('priority', 'octet'), + ('correlation_id', 'shortstr'), + ('reply_to', 'shortstr'), + ('expiration', 'shortstr'), + ('message_id', 'shortstr'), + ('timestamp', 'timestamp'), + ('type', 'shortstr'), + ('user_id', 'shortstr'), + ('app_id', 'shortstr'), + ('cluster_id', 'shortstr') + ] + + def __init__(self, body='', children=None, **properties): + """Expected arg types + + body: string + children: (not supported) + + Keyword properties may include: + + content_type: shortstr + MIME content type + + content_encoding: shortstr + MIME content encoding + + application_headers: table + Message header field table, a dict with string keys, + and string | int | Decimal | datetime | dict values. + + delivery_mode: octet + Non-persistent (1) or persistent (2) + + priority: octet + The message priority, 0 to 9 + + correlation_id: shortstr + The application correlation identifier + + reply_to: shortstr + The destination to reply to + + expiration: shortstr + Message expiration specification + + message_id: shortstr + The application message identifier + + timestamp: datetime.datetime + The message timestamp + + type: shortstr + The message type name + + user_id: shortstr + The creating user id + + app_id: shortstr + The creating application id + + cluster_id: shortstr + Intra-cluster routing identifier + + Unicode bodies are encoded according to the 'content_encoding' + argument. If that's None, it's set to 'UTF-8' automatically. + + example:: + + msg = Message('hello world', + content_type='text/plain', + application_headers={'foo': 7}) + + """ + super(Message, self).__init__(**properties) + self.body = body + + def __eq__(self, other): + """Check if the properties and bodies of this Message and another + Message are the same. + + Received messages may contain a 'delivery_info' attribute, + which isn't compared. + + """ + try: + return (super(Message, self).__eq__(other) and + self.body == other.body) + except AttributeError: + return NotImplemented diff --git a/awx/lib/site-packages/amqp/channel.py b/awx/lib/site-packages/amqp/channel.py new file mode 100644 index 0000000000..fa6cd2dcf7 --- /dev/null +++ b/awx/lib/site-packages/amqp/channel.py @@ -0,0 +1,2506 @@ +"""AMQP Channels""" +# Copyright (C) 2007-2008 Barry Pederson +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 +from __future__ import absolute_import + +import logging + +from collections import defaultdict +from Queue import Queue +from warnings import warn + +from .abstract_channel import AbstractChannel +from .exceptions import ChannelError, ConsumerCancel +from .serialization import AMQPWriter + +__all__ = ['Channel'] + +AMQP_LOGGER = logging.getLogger('amqp') + + +class Channel(AbstractChannel): + """Work with channels + + The channel class provides methods for a client to establish a + virtual connection - a channel - to a server and for both peers to + operate the virtual connection thereafter. + + GRAMMAR:: + + channel = open-channel *use-channel close-channel + open-channel = C:OPEN S:OPEN-OK + use-channel = C:FLOW S:FLOW-OK + / S:FLOW C:FLOW-OK + / functional-class + close-channel = C:CLOSE S:CLOSE-OK + / S:CLOSE C:CLOSE-OK + + """ + + def __init__(self, connection, channel_id=None, auto_decode=True): + """Create a channel bound to a connection and using the specified + numeric channel_id, and open on the server. + + The 'auto_decode' parameter (defaults to True), indicates + whether the library should attempt to decode the body + of Messages to a Unicode string if there's a 'content_encoding' + property for the message. If there's no 'content_encoding' + property, or the decode raises an Exception, the message body + is left as plain bytes. + + """ + if channel_id: + connection._claim_channel_id(channel_id) + else: + channel_id = connection._get_free_channel_id() + + AMQP_LOGGER.debug('using channel_id: %d', channel_id) + + super(Channel, self).__init__(connection, channel_id) + + self.is_open = False + self.active = True # Flow control + self.returned_messages = Queue() + self.callbacks = {} + self.cancel_callbacks = {} + self.auto_decode = auto_decode + self.events = defaultdict(set) + self.no_ack_consumers = set() + + self._x_open() + + def _do_close(self): + """Tear down this object, after we've agreed to close + with the server.""" + AMQP_LOGGER.debug('Closed channel #%d', self.channel_id) + self.is_open = False + channel_id, self.channel_id = self.channel_id, None + connection, self.connection = self.connection, None + if connection: + connection.channels.pop(channel_id, None) + connection._avail_channel_ids.append(channel_id) + self.callbacks.clear() + self.cancel_callbacks.clear() + self.events.clear() + self.no_ack_consumers.clear() + + def _do_revive(self): + self.is_open = False + self._x_open() + + def close(self, reply_code=0, reply_text='', method_sig=(0, 0)): + """Request a channel close + + This method indicates that the sender wants to close the + channel. This may be due to internal conditions (e.g. a forced + shut-down) or due to an error handling a specific method, i.e. + an exception. When a close is due to an exception, the sender + provides the class and method id of the method which caused + the exception. + + RULE: + + After sending this method any received method except + Channel.Close-OK MUST be discarded. + + RULE: + + The peer sending this method MAY use a counter or timeout + to detect failure of the other peer to respond correctly + with Channel.Close-OK.. + + PARAMETERS: + reply_code: short + + The reply code. The AMQ reply codes are defined in AMQ + RFC 011. + + reply_text: shortstr + + The localised reply text. This text can be logged as an + aid to resolving issues. + + class_id: short + + failing method class + + When the close is provoked by a method exception, this + is the class of the method. + + method_id: short + + failing method ID + + When the close is provoked by a method exception, this + is the ID of the method. + + """ + try: + if not self.is_open or self.connection is None: + return + + args = AMQPWriter() + args.write_short(reply_code) + args.write_shortstr(reply_text) + args.write_short(method_sig[0]) # class_id + args.write_short(method_sig[1]) # method_id + self._send_method((20, 40), args) + return self.wait(allowed_methods=[ + (20, 40), # Channel.close + (20, 41), # Channel.close_ok + ]) + finally: + self.connection = None + + def _close(self, args): + """Request a channel close + + This method indicates that the sender wants to close the + channel. This may be due to internal conditions (e.g. a forced + shut-down) or due to an error handling a specific method, i.e. + an exception. When a close is due to an exception, the sender + provides the class and method id of the method which caused + the exception. + + RULE: + + After sending this method any received method except + Channel.Close-OK MUST be discarded. + + RULE: + + The peer sending this method MAY use a counter or timeout + to detect failure of the other peer to respond correctly + with Channel.Close-OK.. + + PARAMETERS: + reply_code: short + + The reply code. The AMQ reply codes are defined in AMQ + RFC 011. + + reply_text: shortstr + + The localised reply text. This text can be logged as an + aid to resolving issues. + + class_id: short + + failing method class + + When the close is provoked by a method exception, this + is the class of the method. + + method_id: short + + failing method ID + + When the close is provoked by a method exception, this + is the ID of the method. + + """ + + reply_code = args.read_short() + reply_text = args.read_shortstr() + class_id = args.read_short() + method_id = args.read_short() + + self._send_method((20, 41)) + self._do_revive() + + raise ChannelError(reply_code, reply_text, (class_id, method_id)) + + def _close_ok(self, args): + """Confirm a channel close + + This method confirms a Channel.Close method and tells the + recipient that it is safe to release resources for the channel + and close the socket. + + RULE: + + A peer that detects a socket closure without having + received a Channel.Close-Ok handshake method SHOULD log + the error. + + """ + self._do_close() + + def flow(self, active): + """Enable/disable flow from peer + + This method asks the peer to pause or restart the flow of + content data. This is a simple flow-control mechanism that a + peer can use to avoid oveflowing its queues or otherwise + finding itself receiving more messages than it can process. + Note that this method is not intended for window control. The + peer that receives a request to stop sending content should + finish sending the current content, if any, and then wait + until it receives a Flow restart method. + + RULE: + + When a new channel is opened, it is active. Some + applications assume that channels are inactive until + started. To emulate this behaviour a client MAY open the + channel, then pause it. + + RULE: + + When sending content data in multiple frames, a peer + SHOULD monitor the channel for incoming methods and + respond to a Channel.Flow as rapidly as possible. + + RULE: + + A peer MAY use the Channel.Flow method to throttle + incoming content data for internal reasons, for example, + when exchangeing data over a slower connection. + + RULE: + + The peer that requests a Channel.Flow method MAY + disconnect and/or ban a peer that does not respect the + request. + + PARAMETERS: + active: boolean + + start/stop content frames + + If True, the peer starts sending content frames. If + False, the peer stops sending content frames. + + """ + args = AMQPWriter() + args.write_bit(active) + self._send_method((20, 20), args) + return self.wait(allowed_methods=[ + (20, 21), # Channel.flow_ok + ]) + + def _flow(self, args): + """Enable/disable flow from peer + + This method asks the peer to pause or restart the flow of + content data. This is a simple flow-control mechanism that a + peer can use to avoid oveflowing its queues or otherwise + finding itself receiving more messages than it can process. + Note that this method is not intended for window control. The + peer that receives a request to stop sending content should + finish sending the current content, if any, and then wait + until it receives a Flow restart method. + + RULE: + + When a new channel is opened, it is active. Some + applications assume that channels are inactive until + started. To emulate this behaviour a client MAY open the + channel, then pause it. + + RULE: + + When sending content data in multiple frames, a peer + SHOULD monitor the channel for incoming methods and + respond to a Channel.Flow as rapidly as possible. + + RULE: + + A peer MAY use the Channel.Flow method to throttle + incoming content data for internal reasons, for example, + when exchangeing data over a slower connection. + + RULE: + + The peer that requests a Channel.Flow method MAY + disconnect and/or ban a peer that does not respect the + request. + + PARAMETERS: + active: boolean + + start/stop content frames + + If True, the peer starts sending content frames. If + False, the peer stops sending content frames. + + """ + self.active = args.read_bit() + self._x_flow_ok(self.active) + + def _x_flow_ok(self, active): + """Confirm a flow method + + Confirms to the peer that a flow command was received and + processed. + + PARAMETERS: + active: boolean + + current flow setting + + Confirms the setting of the processed flow method: + True means the peer will start sending or continue + to send content frames; False means it will not. + + """ + args = AMQPWriter() + args.write_bit(active) + self._send_method((20, 21), args) + + def _flow_ok(self, args): + """Confirm a flow method + + Confirms to the peer that a flow command was received and + processed. + + PARAMETERS: + active: boolean + + current flow setting + + Confirms the setting of the processed flow method: + True means the peer will start sending or continue + to send content frames; False means it will not. + + """ + return args.read_bit() + + def _x_open(self): + """Open a channel for use + + This method opens a virtual connection (a channel). + + RULE: + + This method MUST NOT be called when the channel is already + open. + + PARAMETERS: + out_of_band: shortstr (DEPRECATED) + + out-of-band settings + + Configures out-of-band transfers on this channel. The + syntax and meaning of this field will be formally + defined at a later date. + + """ + if self.is_open: + return + + args = AMQPWriter() + args.write_shortstr('') # out_of_band: deprecated + self._send_method((20, 10), args) + return self.wait(allowed_methods=[ + (20, 11), # Channel.open_ok + ]) + + def _open_ok(self, args): + """Signal that the channel is ready + + This method signals to the client that the channel is ready + for use. + + """ + self.is_open = True + AMQP_LOGGER.debug('Channel open') + + ############# + # + # Exchange + # + # + # work with exchanges + # + # Exchanges match and distribute messages across queues. + # Exchanges can be configured in the server or created at runtime. + # + # GRAMMAR:: + # + # exchange = C:DECLARE S:DECLARE-OK + # / C:DELETE S:DELETE-OK + # + # RULE: + # + # The server MUST implement the direct and fanout exchange + # types, and predeclare the corresponding exchanges named + # amq.direct and amq.fanout in each virtual host. The server + # MUST also predeclare a direct exchange to act as the default + # exchange for content Publish methods and for default queue + # bindings. + # + # RULE: + # + # The server SHOULD implement the topic exchange type, and + # predeclare the corresponding exchange named amq.topic in + # each virtual host. + # + # RULE: + # + # The server MAY implement the system exchange type, and + # predeclare the corresponding exchanges named amq.system in + # each virtual host. If the client attempts to bind a queue to + # the system exchange, the server MUST raise a connection + # exception with reply code 507 (not allowed). + # + + def exchange_declare(self, exchange, type, passive=False, durable=False, + auto_delete=True, nowait=False, arguments=None): + """Declare exchange, create if needed + + This method creates an exchange if it does not already exist, + and if the exchange exists, verifies that it is of the correct + and expected class. + + RULE: + + The server SHOULD support a minimum of 16 exchanges per + virtual host and ideally, impose no limit except as + defined by available resources. + + PARAMETERS: + exchange: shortstr + + RULE: + + Exchange names starting with "amq." are reserved + for predeclared and standardised exchanges. If + the client attempts to create an exchange starting + with "amq.", the server MUST raise a channel + exception with reply code 403 (access refused). + + type: shortstr + + exchange type + + Each exchange belongs to one of a set of exchange + types implemented by the server. The exchange types + define the functionality of the exchange - i.e. how + messages are routed through it. It is not valid or + meaningful to attempt to change the type of an + existing exchange. + + RULE: + + If the exchange already exists with a different + type, the server MUST raise a connection exception + with a reply code 507 (not allowed). + + RULE: + + If the server does not support the requested + exchange type it MUST raise a connection exception + with a reply code 503 (command invalid). + + passive: boolean + + do not create exchange + + If set, the server will not create the exchange. The + client can use this to check whether an exchange + exists without modifying the server state. + + RULE: + + If set, and the exchange does not already exist, + the server MUST raise a channel exception with + reply code 404 (not found). + + durable: boolean + + request a durable exchange + + If set when creating a new exchange, the exchange will + be marked as durable. Durable exchanges remain active + when a server restarts. Non-durable exchanges + (transient exchanges) are purged if/when a server + restarts. + + RULE: + + The server MUST support both durable and transient + exchanges. + + RULE: + + The server MUST ignore the durable field if the + exchange already exists. + + auto_delete: boolean + + auto-delete when unused + + If set, the exchange is deleted when all queues have + finished using it. + + RULE: + + The server SHOULD allow for a reasonable delay + between the point when it determines that an + exchange is not being used (or no longer used), + and the point when it deletes the exchange. At + the least it must allow a client to create an + exchange and then bind a queue to it, with a small + but non-zero delay between these two actions. + + RULE: + + The server MUST ignore the auto-delete field if + the exchange already exists. + + nowait: boolean + + do not send a reply method + + If set, the server will not respond to the method. The + client should not wait for a reply method. If the + server could not complete the method it will raise a + channel or connection exception. + + arguments: table + + arguments for declaration + + A set of arguments for the declaration. The syntax and + semantics of these arguments depends on the server + implementation. This field is ignored if passive is + True. + + """ + arguments = {} if arguments is None else arguments + args = AMQPWriter() + args.write_short(0) + args.write_shortstr(exchange) + args.write_shortstr(type) + args.write_bit(passive) + args.write_bit(durable) + args.write_bit(auto_delete) + args.write_bit(False) # internal: deprecated + args.write_bit(nowait) + args.write_table(arguments) + self._send_method((40, 10), args) + + if auto_delete: + warn(DeprecationWarning( + 'auto_delete exchanges has been deprecated')) + + if not nowait: + return self.wait(allowed_methods=[ + (40, 11), # Channel.exchange_declare_ok + ]) + + def _exchange_declare_ok(self, args): + """Confirms an exchange declaration + + This method confirms a Declare method and confirms the name of + the exchange, essential for automatically-named exchanges. + + """ + pass + + def exchange_delete(self, exchange, if_unused=False, nowait=False): + """Delete an exchange + + This method deletes an exchange. When an exchange is deleted + all queue bindings on the exchange are cancelled. + + PARAMETERS: + exchange: shortstr + + RULE: + + The exchange MUST exist. Attempting to delete a + non-existing exchange causes a channel exception. + + if_unused: boolean + + delete only if unused + + If set, the server will only delete the exchange if it + has no queue bindings. If the exchange has queue + bindings the server does not delete it but raises a + channel exception instead. + + RULE: + + If set, the server SHOULD delete the exchange but + only if it has no queue bindings. + + RULE: + + If set, the server SHOULD raise a channel + exception if the exchange is in use. + + nowait: boolean + + do not send a reply method + + If set, the server will not respond to the method. The + client should not wait for a reply method. If the + server could not complete the method it will raise a + channel or connection exception. + + """ + args = AMQPWriter() + args.write_short(0) + args.write_shortstr(exchange) + args.write_bit(if_unused) + args.write_bit(nowait) + self._send_method((40, 20), args) + + if not nowait: + return self.wait(allowed_methods=[ + (40, 21), # Channel.exchange_delete_ok + ]) + + def _exchange_delete_ok(self, args): + """Confirm deletion of an exchange + + This method confirms the deletion of an exchange. + + """ + pass + + def exchange_bind(self, destination, source='', routing_key='', + nowait=False, arguments=None): + """This method binds an exchange to an exchange. + + RULE: + + A server MUST allow and ignore duplicate bindings - that + is, two or more bind methods for a specific exchanges, + with identical arguments - without treating these as an + error. + + RULE: + + A server MUST allow cycles of exchange bindings to be + created including allowing an exchange to be bound to + itself. + + RULE: + + A server MUST not deliver the same message more than once + to a destination exchange, even if the topology of + exchanges and bindings results in multiple (even infinite) + routes to that exchange. + + PARAMETERS: + reserved-1: short + + destination: shortstr + + Specifies the name of the destination exchange to + bind. + + RULE: + + A client MUST NOT be allowed to bind a non- + existent destination exchange. + + RULE: + + The server MUST accept a blank exchange name to + mean the default exchange. + + source: shortstr + + Specifies the name of the source exchange to bind. + + RULE: + + A client MUST NOT be allowed to bind a non- + existent source exchange. + + RULE: + + The server MUST accept a blank exchange name to + mean the default exchange. + + routing-key: shortstr + + Specifies the routing key for the binding. The routing + key is used for routing messages depending on the + exchange configuration. Not all exchanges use a + routing key - refer to the specific exchange + documentation. + + no-wait: bit + + arguments: table + + A set of arguments for the binding. The syntax and + semantics of these arguments depends on the exchange + class. + + """ + arguments = {} if arguments is None else arguments + args = AMQPWriter() + args.write_short(0) + args.write_shortstr(destination) + args.write_shortstr(source) + args.write_shortstr(routing_key) + args.write_bit(nowait) + args.write_table(arguments) + self._send_method((40, 30), args) + + if not nowait: + return self.wait(allowed_methods=[ + (40, 31), # Channel.exchange_bind_ok + ]) + + def exchange_unbind(self, destination, source='', routing_key='', + nowait=False, arguments=None): + """This method unbinds an exchange from an exchange. + + RULE: + + If a unbind fails, the server MUST raise a connection + exception. + + PARAMETERS: + reserved-1: short + + destination: shortstr + + Specifies the name of the destination exchange to + unbind. + + RULE: + + The client MUST NOT attempt to unbind an exchange + that does not exist from an exchange. + + RULE: + + The server MUST accept a blank exchange name to + mean the default exchange. + + source: shortstr + + Specifies the name of the source exchange to unbind. + + RULE: + + The client MUST NOT attempt to unbind an exchange + from an exchange that does not exist. + + RULE: + + The server MUST accept a blank exchange name to + mean the default exchange. + + routing-key: shortstr + + Specifies the routing key of the binding to unbind. + + no-wait: bit + + arguments: table + + Specifies the arguments of the binding to unbind. + + """ + arguments = {} if arguments is None else arguments + args = AMQPWriter() + args.write_short(0) + args.write_shortstr(destination) + args.write_shortstr(source) + args.write_shortstr(routing_key) + args.write_bit(nowait) + args.write_table(arguments) + self._send_method((40, 40), args) + + if not nowait: + return self.wait(allowed_methods=[ + (40, 51), # Channel.exchange_unbind_ok + ]) + + def _exchange_bind_ok(self, args): + """Confirm bind successful + + This method confirms that the bind was successful. + + """ + pass + + def _exchange_unbind_ok(self, args): + """Confirm unbind successful + + This method confirms that the unbind was successful. + + """ + pass + + ############# + # + # Queue + # + # + # work with queues + # + # Queues store and forward messages. Queues can be configured in + # the server or created at runtime. Queues must be attached to at + # least one exchange in order to receive messages from publishers. + # + # GRAMMAR:: + # + # queue = C:DECLARE S:DECLARE-OK + # / C:BIND S:BIND-OK + # / C:PURGE S:PURGE-OK + # / C:DELETE S:DELETE-OK + # + # RULE: + # + # A server MUST allow any content class to be sent to any + # queue, in any mix, and queue and delivery these content + # classes independently. Note that all methods that fetch + # content off queues are specific to a given content class. + # + + def queue_bind(self, queue, exchange='', routing_key='', + nowait=False, arguments=None): + """Bind queue to an exchange + + This method binds a queue to an exchange. Until a queue is + bound it will not receive any messages. In a classic + messaging model, store-and-forward queues are bound to a dest + exchange and subscription queues are bound to a dest_wild + exchange. + + RULE: + + A server MUST allow ignore duplicate bindings - that is, + two or more bind methods for a specific queue, with + identical arguments - without treating these as an error. + + RULE: + + If a bind fails, the server MUST raise a connection + exception. + + RULE: + + The server MUST NOT allow a durable queue to bind to a + transient exchange. If the client attempts this the server + MUST raise a channel exception. + + RULE: + + Bindings for durable queues are automatically durable and + the server SHOULD restore such bindings after a server + restart. + + RULE: + + The server SHOULD support at least 4 bindings per queue, + and ideally, impose no limit except as defined by + available resources. + + PARAMETERS: + queue: shortstr + + Specifies the name of the queue to bind. If the queue + name is empty, refers to the current queue for the + channel, which is the last declared queue. + + RULE: + + If the client did not previously declare a queue, + and the queue name in this method is empty, the + server MUST raise a connection exception with + reply code 530 (not allowed). + + RULE: + + If the queue does not exist the server MUST raise + a channel exception with reply code 404 (not + found). + + exchange: shortstr + + The name of the exchange to bind to. + + RULE: + + If the exchange does not exist the server MUST + raise a channel exception with reply code 404 (not + found). + + routing_key: shortstr + + message routing key + + Specifies the routing key for the binding. The + routing key is used for routing messages depending on + the exchange configuration. Not all exchanges use a + routing key - refer to the specific exchange + documentation. If the routing key is empty and the + queue name is empty, the routing key will be the + current queue for the channel, which is the last + declared queue. + + nowait: boolean + + do not send a reply method + + If set, the server will not respond to the method. The + client should not wait for a reply method. If the + server could not complete the method it will raise a + channel or connection exception. + + arguments: table + + arguments for binding + + A set of arguments for the binding. The syntax and + semantics of these arguments depends on the exchange + class. + """ + arguments = {} if arguments is None else arguments + args = AMQPWriter() + args.write_short(0) + args.write_shortstr(queue) + args.write_shortstr(exchange) + args.write_shortstr(routing_key) + args.write_bit(nowait) + args.write_table(arguments) + self._send_method((50, 20), args) + + if not nowait: + return self.wait(allowed_methods=[ + (50, 21), # Channel.queue_bind_ok + ]) + + def _queue_bind_ok(self, args): + """Confirm bind successful + + This method confirms that the bind was successful. + + """ + pass + + def queue_unbind(self, queue, exchange, routing_key='', + nowait=False, arguments=None): + """Unbind a queue from an exchange + + This method unbinds a queue from an exchange. + + RULE: + + If a unbind fails, the server MUST raise a connection exception. + + PARAMETERS: + queue: shortstr + + Specifies the name of the queue to unbind. + + RULE: + + The client MUST either specify a queue name or have + previously declared a queue on the same channel + + RULE: + + The client MUST NOT attempt to unbind a queue that + does not exist. + + exchange: shortstr + + The name of the exchange to unbind from. + + RULE: + + The client MUST NOT attempt to unbind a queue from an + exchange that does not exist. + + RULE: + + The server MUST accept a blank exchange name to mean + the default exchange. + + routing_key: shortstr + + routing key of binding + + Specifies the routing key of the binding to unbind. + + arguments: table + + arguments of binding + + Specifies the arguments of the binding to unbind. + + """ + arguments = {} if arguments is None else arguments + args = AMQPWriter() + args.write_short(0) + args.write_shortstr(queue) + args.write_shortstr(exchange) + args.write_shortstr(routing_key) + #args.write_bit(nowait) + args.write_table(arguments) + self._send_method((50, 50), args) + + if not nowait: + return self.wait(allowed_methods=[ + (50, 51), # Channel.queue_unbind_ok + ]) + + def _queue_unbind_ok(self, args): + """Confirm unbind successful + + This method confirms that the unbind was successful. + + """ + pass + + def queue_declare(self, queue='', passive=False, durable=False, + exclusive=False, auto_delete=True, nowait=False, + arguments=None): + """Declare queue, create if needed + + This method creates or checks a queue. When creating a new + queue the client can specify various properties that control + the durability of the queue and its contents, and the level of + sharing for the queue. + + RULE: + + The server MUST create a default binding for a newly- + created queue to the default exchange, which is an + exchange of type 'direct'. + + RULE: + + The server SHOULD support a minimum of 256 queues per + virtual host and ideally, impose no limit except as + defined by available resources. + + PARAMETERS: + queue: shortstr + + RULE: + + The queue name MAY be empty, in which case the + server MUST create a new queue with a unique + generated name and return this to the client in + the Declare-Ok method. + + RULE: + + Queue names starting with "amq." are reserved for + predeclared and standardised server queues. If + the queue name starts with "amq." and the passive + option is False, the server MUST raise a connection + exception with reply code 403 (access refused). + + passive: boolean + + do not create queue + + If set, the server will not create the queue. The + client can use this to check whether a queue exists + without modifying the server state. + + RULE: + + If set, and the queue does not already exist, the + server MUST respond with a reply code 404 (not + found) and raise a channel exception. + + durable: boolean + + request a durable queue + + If set when creating a new queue, the queue will be + marked as durable. Durable queues remain active when + a server restarts. Non-durable queues (transient + queues) are purged if/when a server restarts. Note + that durable queues do not necessarily hold persistent + messages, although it does not make sense to send + persistent messages to a transient queue. + + RULE: + + The server MUST recreate the durable queue after a + restart. + + RULE: + + The server MUST support both durable and transient + queues. + + RULE: + + The server MUST ignore the durable field if the + queue already exists. + + exclusive: boolean + + request an exclusive queue + + Exclusive queues may only be consumed from by the + current connection. Setting the 'exclusive' flag + always implies 'auto-delete'. + + RULE: + + The server MUST support both exclusive (private) + and non-exclusive (shared) queues. + + RULE: + + The server MUST raise a channel exception if + 'exclusive' is specified and the queue already + exists and is owned by a different connection. + + auto_delete: boolean + + auto-delete queue when unused + + If set, the queue is deleted when all consumers have + finished using it. Last consumer can be cancelled + either explicitly or because its channel is closed. If + there was no consumer ever on the queue, it won't be + deleted. + + RULE: + + The server SHOULD allow for a reasonable delay + between the point when it determines that a queue + is not being used (or no longer used), and the + point when it deletes the queue. At the least it + must allow a client to create a queue and then + create a consumer to read from it, with a small + but non-zero delay between these two actions. The + server should equally allow for clients that may + be disconnected prematurely, and wish to re- + consume from the same queue without losing + messages. We would recommend a configurable + timeout, with a suitable default value being one + minute. + + RULE: + + The server MUST ignore the auto-delete field if + the queue already exists. + + nowait: boolean + + do not send a reply method + + If set, the server will not respond to the method. The + client should not wait for a reply method. If the + server could not complete the method it will raise a + channel or connection exception. + + arguments: table + + arguments for declaration + + A set of arguments for the declaration. The syntax and + semantics of these arguments depends on the server + implementation. This field is ignored if passive is + True. + + Returns a tuple containing 3 items: + the name of the queue (essential for automatically-named queues) + message count + consumer count + + """ + arguments = {} if arguments is None else arguments + args = AMQPWriter() + args.write_short(0) + args.write_shortstr(queue) + args.write_bit(passive) + args.write_bit(durable) + args.write_bit(exclusive) + args.write_bit(auto_delete) + args.write_bit(nowait) + args.write_table(arguments) + self._send_method((50, 10), args) + + if not nowait: + return self.wait(allowed_methods=[ + (50, 11), # Channel.queue_declare_ok + ]) + + def _queue_declare_ok(self, args): + """Confirms a queue definition + + This method confirms a Declare method and confirms the name of + the queue, essential for automatically-named queues. + + PARAMETERS: + queue: shortstr + + Reports the name of the queue. If the server generated + a queue name, this field contains that name. + + message_count: long + + number of messages in queue + + Reports the number of messages in the queue, which + will be zero for newly-created queues. + + consumer_count: long + + number of consumers + + Reports the number of active consumers for the queue. + Note that consumers can suspend activity + (Channel.Flow) in which case they do not appear in + this count. + + """ + queue = args.read_shortstr() + message_count = args.read_long() + consumer_count = args.read_long() + return queue, message_count, consumer_count + + def queue_delete(self, queue='', + if_unused=False, if_empty=False, nowait=False): + """Delete a queue + + This method deletes a queue. When a queue is deleted any + pending messages are sent to a dead-letter queue if this is + defined in the server configuration, and all consumers on the + queue are cancelled. + + RULE: + + The server SHOULD use a dead-letter queue to hold messages + that were pending on a deleted queue, and MAY provide + facilities for a system administrator to move these + messages back to an active queue. + + PARAMETERS: + queue: shortstr + + Specifies the name of the queue to delete. If the + queue name is empty, refers to the current queue for + the channel, which is the last declared queue. + + RULE: + + If the client did not previously declare a queue, + and the queue name in this method is empty, the + server MUST raise a connection exception with + reply code 530 (not allowed). + + RULE: + + The queue must exist. Attempting to delete a non- + existing queue causes a channel exception. + + if_unused: boolean + + delete only if unused + + If set, the server will only delete the queue if it + has no consumers. If the queue has consumers the + server does does not delete it but raises a channel + exception instead. + + RULE: + + The server MUST respect the if-unused flag when + deleting a queue. + + if_empty: boolean + + delete only if empty + + If set, the server will only delete the queue if it + has no messages. If the queue is not empty the server + raises a channel exception. + + nowait: boolean + + do not send a reply method + + If set, the server will not respond to the method. The + client should not wait for a reply method. If the + server could not complete the method it will raise a + channel or connection exception. + + """ + args = AMQPWriter() + args.write_short(0) + args.write_shortstr(queue) + args.write_bit(if_unused) + args.write_bit(if_empty) + args.write_bit(nowait) + self._send_method((50, 40), args) + + if not nowait: + return self.wait(allowed_methods=[ + (50, 41), # Channel.queue_delete_ok + ]) + + def _queue_delete_ok(self, args): + """Confirm deletion of a queue + + This method confirms the deletion of a queue. + + PARAMETERS: + message_count: long + + number of messages purged + + Reports the number of messages purged. + + """ + return args.read_long() + + def queue_purge(self, queue='', nowait=False): + """Purge a queue + + This method removes all messages from a queue. It does not + cancel consumers. Purged messages are deleted without any + formal "undo" mechanism. + + RULE: + + A call to purge MUST result in an empty queue. + + RULE: + + On transacted channels the server MUST not purge messages + that have already been sent to a client but not yet + acknowledged. + + RULE: + + The server MAY implement a purge queue or log that allows + system administrators to recover accidentally-purged + messages. The server SHOULD NOT keep purged messages in + the same storage spaces as the live messages since the + volumes of purged messages may get very large. + + PARAMETERS: + queue: shortstr + + Specifies the name of the queue to purge. If the + queue name is empty, refers to the current queue for + the channel, which is the last declared queue. + + RULE: + + If the client did not previously declare a queue, + and the queue name in this method is empty, the + server MUST raise a connection exception with + reply code 530 (not allowed). + + RULE: + + The queue must exist. Attempting to purge a non- + existing queue causes a channel exception. + + nowait: boolean + + do not send a reply method + + If set, the server will not respond to the method. The + client should not wait for a reply method. If the + server could not complete the method it will raise a + channel or connection exception. + + if nowait is False, returns a message_count + + """ + args = AMQPWriter() + args.write_short(0) + args.write_shortstr(queue) + args.write_bit(nowait) + self._send_method((50, 30), args) + + if not nowait: + return self.wait(allowed_methods=[ + (50, 31), # Channel.queue_purge_ok + ]) + + def _queue_purge_ok(self, args): + """Confirms a queue purge + + This method confirms the purge of a queue. + + PARAMETERS: + message_count: long + + number of messages purged + + Reports the number of messages purged. + + """ + return args.read_long() + + ############# + # + # Basic + # + # + # work with basic content + # + # The Basic class provides methods that support an industry- + # standard messaging model. + # + # GRAMMAR:: + # + # basic = C:QOS S:QOS-OK + # / C:CONSUME S:CONSUME-OK + # / C:CANCEL S:CANCEL-OK + # / C:PUBLISH content + # / S:RETURN content + # / S:DELIVER content + # / C:GET ( S:GET-OK content / S:GET-EMPTY ) + # / C:ACK + # / C:REJECT + # + # RULE: + # + # The server SHOULD respect the persistent property of basic + # messages and SHOULD make a best-effort to hold persistent + # basic messages on a reliable storage mechanism. + # + # RULE: + # + # The server MUST NOT discard a persistent basic message in + # case of a queue overflow. The server MAY use the + # Channel.Flow method to slow or stop a basic message + # publisher when necessary. + # + # RULE: + # + # The server MAY overflow non-persistent basic messages to + # persistent storage and MAY discard or dead-letter non- + # persistent basic messages on a priority basis if the queue + # size exceeds some configured limit. + # + # RULE: + # + # The server MUST implement at least 2 priority levels for + # basic messages, where priorities 0-4 and 5-9 are treated as + # two distinct levels. The server MAY implement up to 10 + # priority levels. + # + # RULE: + # + # The server MUST deliver messages of the same priority in + # order irrespective of their individual persistence. + # + # RULE: + # + # The server MUST support both automatic and explicit + # acknowledgements on Basic content. + # + + def basic_ack(self, delivery_tag, multiple=False): + """Acknowledge one or more messages + + This method acknowledges one or more messages delivered via + the Deliver or Get-Ok methods. The client can ask to confirm + a single message or a set of messages up to and including a + specific message. + + PARAMETERS: + delivery_tag: longlong + + server-assigned delivery tag + + The server-assigned and channel-specific delivery tag + + RULE: + + The delivery tag is valid only within the channel + from which the message was received. I.e. a client + MUST NOT receive a message on one channel and then + acknowledge it on another. + + RULE: + + The server MUST NOT use a zero value for delivery + tags. Zero is reserved for client use, meaning "all + messages so far received". + + multiple: boolean + + acknowledge multiple messages + + If set to True, the delivery tag is treated as "up to + and including", so that the client can acknowledge + multiple messages with a single method. If set to + False, the delivery tag refers to a single message. + If the multiple field is True, and the delivery tag + is zero, tells the server to acknowledge all + outstanding mesages. + + RULE: + + The server MUST validate that a non-zero delivery- + tag refers to an delivered message, and raise a + channel exception if this is not the case. + + """ + args = AMQPWriter() + args.write_longlong(delivery_tag) + args.write_bit(multiple) + self._send_method((60, 80), args) + + def basic_cancel(self, consumer_tag, nowait=False): + """End a queue consumer + + This method cancels a consumer. This does not affect already + delivered messages, but it does mean the server will not send + any more messages for that consumer. The client may receive + an abitrary number of messages in between sending the cancel + method and receiving the cancel-ok reply. + + RULE: + + If the queue no longer exists when the client sends a + cancel command, or the consumer has been cancelled for + other reasons, this command has no effect. + + PARAMETERS: + consumer_tag: shortstr + + consumer tag + + Identifier for the consumer, valid within the current + connection. + + RULE: + + The consumer tag is valid only within the channel + from which the consumer was created. I.e. a client + MUST NOT create a consumer in one channel and then + use it in another. + + nowait: boolean + + do not send a reply method + + If set, the server will not respond to the method. The + client should not wait for a reply method. If the + server could not complete the method it will raise a + channel or connection exception. + + """ + if self.connection is not None: + self.no_ack_consumers.discard(consumer_tag) + args = AMQPWriter() + args.write_shortstr(consumer_tag) + args.write_bit(nowait) + self._send_method((60, 30), args) + return self.wait(allowed_methods=[ + (60, 31), # Channel.basic_cancel_ok + ]) + + def _basic_cancel_notify(self, args): + """Consumer cancelled by server. + + Most likely the queue was deleted. + + """ + consumer_tag = args.read_shortstr() + callback = self._on_cancel(consumer_tag) + if callback: + callback(consumer_tag) + else: + raise ConsumerCancel('tag %r' % (consumer_tag, )) + + def _basic_cancel_ok(self, args): + """Confirm a cancelled consumer + + This method confirms that the cancellation was completed. + + PARAMETERS: + consumer_tag: shortstr + + consumer tag + + Identifier for the consumer, valid within the current + connection. + + RULE: + + The consumer tag is valid only within the channel + from which the consumer was created. I.e. a client + MUST NOT create a consumer in one channel and then + use it in another. + + """ + consumer_tag = args.read_shortstr() + self._on_cancel(consumer_tag) + + def _on_cancel(self, consumer_tag): + self.callbacks.pop(consumer_tag, None) + return self.cancel_callbacks.pop(consumer_tag, None) + + def basic_consume(self, queue='', consumer_tag='', no_local=False, + no_ack=False, exclusive=False, nowait=False, + callback=None, arguments=None, on_cancel=None): + """Start a queue consumer + + This method asks the server to start a "consumer", which is a + transient request for messages from a specific queue. + Consumers last as long as the channel they were created on, or + until the client cancels them. + + RULE: + + The server SHOULD support at least 16 consumers per queue, + unless the queue was declared as private, and ideally, + impose no limit except as defined by available resources. + + PARAMETERS: + queue: shortstr + + Specifies the name of the queue to consume from. If + the queue name is null, refers to the current queue + for the channel, which is the last declared queue. + + RULE: + + If the client did not previously declare a queue, + and the queue name in this method is empty, the + server MUST raise a connection exception with + reply code 530 (not allowed). + + consumer_tag: shortstr + + Specifies the identifier for the consumer. The + consumer tag is local to a connection, so two clients + can use the same consumer tags. If this field is empty + the server will generate a unique tag. + + RULE: + + The tag MUST NOT refer to an existing consumer. If + the client attempts to create two consumers with + the same non-empty tag the server MUST raise a + connection exception with reply code 530 (not + allowed). + + no_local: boolean + + do not deliver own messages + + If the no-local field is set the server will not send + messages to the client that published them. + + no_ack: boolean + + no acknowledgement needed + + If this field is set the server does not expect + acknowledgments for messages. That is, when a message + is delivered to the client the server automatically and + silently acknowledges it on behalf of the client. This + functionality increases performance but at the cost of + reliability. Messages can get lost if a client dies + before it can deliver them to the application. + + exclusive: boolean + + request exclusive access + + Request exclusive consumer access, meaning only this + consumer can access the queue. + + RULE: + + If the server cannot grant exclusive access to the + queue when asked, - because there are other + consumers active - it MUST raise a channel + exception with return code 403 (access refused). + + nowait: boolean + + do not send a reply method + + If set, the server will not respond to the method. The + client should not wait for a reply method. If the + server could not complete the method it will raise a + channel or connection exception. + + callback: Python callable + + function/method called with each delivered message + + For each message delivered by the broker, the + callable will be called with a Message object + as the single argument. If no callable is specified, + messages are quietly discarded, no_ack should probably + be set to True in that case. + + """ + args = AMQPWriter() + args.write_short(0) + args.write_shortstr(queue) + args.write_shortstr(consumer_tag) + args.write_bit(no_local) + args.write_bit(no_ack) + args.write_bit(exclusive) + args.write_bit(nowait) + args.write_table(arguments or {}) + self._send_method((60, 20), args) + + if not nowait: + consumer_tag = self.wait(allowed_methods=[ + (60, 21), # Channel.basic_consume_ok + ]) + + self.callbacks[consumer_tag] = callback + + if on_cancel: + self.cancel_callbacks[consumer_tag] = on_cancel + if no_ack: + self.no_ack_consumers.add(consumer_tag) + + return consumer_tag + + def _basic_consume_ok(self, args): + """Confirm a new consumer + + The server provides the client with a consumer tag, which is + used by the client for methods called on the consumer at a + later stage. + + PARAMETERS: + consumer_tag: shortstr + + Holds the consumer tag specified by the client or + provided by the server. + + """ + return args.read_shortstr() + + def _basic_deliver(self, args, msg): + """Notify the client of a consumer message + + This method delivers a message to the client, via a consumer. + In the asynchronous message delivery model, the client starts + a consumer using the Consume method, then the server responds + with Deliver methods as and when messages arrive for that + consumer. + + RULE: + + The server SHOULD track the number of times a message has + been delivered to clients and when a message is + redelivered a certain number of times - e.g. 5 times - + without being acknowledged, the server SHOULD consider the + message to be unprocessable (possibly causing client + applications to abort), and move the message to a dead + letter queue. + + PARAMETERS: + consumer_tag: shortstr + + consumer tag + + Identifier for the consumer, valid within the current + connection. + + RULE: + + The consumer tag is valid only within the channel + from which the consumer was created. I.e. a client + MUST NOT create a consumer in one channel and then + use it in another. + + delivery_tag: longlong + + server-assigned delivery tag + + The server-assigned and channel-specific delivery tag + + RULE: + + The delivery tag is valid only within the channel + from which the message was received. I.e. a client + MUST NOT receive a message on one channel and then + acknowledge it on another. + + RULE: + + The server MUST NOT use a zero value for delivery + tags. Zero is reserved for client use, meaning "all + messages so far received". + + redelivered: boolean + + message is being redelivered + + This indicates that the message has been previously + delivered to this or another client. + + exchange: shortstr + + Specifies the name of the exchange that the message + was originally published to. + + routing_key: shortstr + + Message routing key + + Specifies the routing key name specified when the + message was published. + + """ + consumer_tag = args.read_shortstr() + delivery_tag = args.read_longlong() + redelivered = args.read_bit() + exchange = args.read_shortstr() + routing_key = args.read_shortstr() + + msg.delivery_info = { + 'consumer_tag': consumer_tag, + 'delivery_tag': delivery_tag, + 'redelivered': redelivered, + 'exchange': exchange, + 'routing_key': routing_key, + } + + fun = self.callbacks.get(consumer_tag, None) + if fun is not None: + fun(msg) + + def basic_get(self, queue='', no_ack=False): + """Direct access to a queue + + This method provides a direct access to the messages in a + queue using a synchronous dialogue that is designed for + specific types of application where synchronous functionality + is more important than performance. + + PARAMETERS: + queue: shortstr + + Specifies the name of the queue to consume from. If + the queue name is null, refers to the current queue + for the channel, which is the last declared queue. + + RULE: + + If the client did not previously declare a queue, + and the queue name in this method is empty, the + server MUST raise a connection exception with + reply code 530 (not allowed). + + no_ack: boolean + + no acknowledgement needed + + If this field is set the server does not expect + acknowledgments for messages. That is, when a message + is delivered to the client the server automatically and + silently acknowledges it on behalf of the client. This + functionality increases performance but at the cost of + reliability. Messages can get lost if a client dies + before it can deliver them to the application. + + Non-blocking, returns a message object, or None. + + """ + args = AMQPWriter() + args.write_short(0) + args.write_shortstr(queue) + args.write_bit(no_ack) + self._send_method((60, 70), args) + return self.wait(allowed_methods=[ + (60, 71), # Channel.basic_get_ok + (60, 72), # Channel.basic_get_empty + ]) + + def _basic_get_empty(self, args): + """Indicate no messages available + + This method tells the client that the queue has no messages + available for the client. + + PARAMETERS: + cluster_id: shortstr + + Cluster id + + For use by cluster applications, should not be used by + client applications. + + """ + cluster_id = args.read_shortstr() # noqa + + def _basic_get_ok(self, args, msg): + """Provide client with a message + + This method delivers a message to the client following a get + method. A message delivered by 'get-ok' must be acknowledged + unless the no-ack option was set in the get method. + + PARAMETERS: + delivery_tag: longlong + + server-assigned delivery tag + + The server-assigned and channel-specific delivery tag + + RULE: + + The delivery tag is valid only within the channel + from which the message was received. I.e. a client + MUST NOT receive a message on one channel and then + acknowledge it on another. + + RULE: + + The server MUST NOT use a zero value for delivery + tags. Zero is reserved for client use, meaning "all + messages so far received". + + redelivered: boolean + + message is being redelivered + + This indicates that the message has been previously + delivered to this or another client. + + exchange: shortstr + + Specifies the name of the exchange that the message + was originally published to. If empty, the message + was published to the default exchange. + + routing_key: shortstr + + Message routing key + + Specifies the routing key name specified when the + message was published. + + message_count: long + + number of messages pending + + This field reports the number of messages pending on + the queue, excluding the message being delivered. + Note that this figure is indicative, not reliable, and + can change arbitrarily as messages are added to the + queue and removed by other clients. + + """ + delivery_tag = args.read_longlong() + redelivered = args.read_bit() + exchange = args.read_shortstr() + routing_key = args.read_shortstr() + message_count = args.read_long() + + msg.delivery_info = { + 'delivery_tag': delivery_tag, + 'redelivered': redelivered, + 'exchange': exchange, + 'routing_key': routing_key, + 'message_count': message_count + } + return msg + + def basic_publish(self, msg, exchange='', routing_key='', + mandatory=False, immediate=False): + """Publish a message + + This method publishes a message to a specific exchange. The + message will be routed to queues as defined by the exchange + configuration and distributed to any active consumers when the + transaction, if any, is committed. + + PARAMETERS: + exchange: shortstr + + Specifies the name of the exchange to publish to. The + exchange name can be empty, meaning the default + exchange. If the exchange name is specified, and that + exchange does not exist, the server will raise a + channel exception. + + RULE: + + The server MUST accept a blank exchange name to + mean the default exchange. + + RULE: + + The exchange MAY refuse basic content in which + case it MUST raise a channel exception with reply + code 540 (not implemented). + + routing_key: shortstr + + Message routing key + + Specifies the routing key for the message. The + routing key is used for routing messages depending on + the exchange configuration. + + mandatory: boolean + + indicate mandatory routing + + This flag tells the server how to react if the message + cannot be routed to a queue. If this flag is True, the + server will return an unroutable message with a Return + method. If this flag is False, the server silently + drops the message. + + RULE: + + The server SHOULD implement the mandatory flag. + + immediate: boolean + + request immediate delivery + + This flag tells the server how to react if the message + cannot be routed to a queue consumer immediately. If + this flag is set, the server will return an + undeliverable message with a Return method. If this + flag is zero, the server will queue the message, but + with no guarantee that it will ever be consumed. + + RULE: + + The server SHOULD implement the immediate flag. + + """ + args = AMQPWriter() + args.write_short(0) + args.write_shortstr(exchange) + args.write_shortstr(routing_key) + args.write_bit(mandatory) + args.write_bit(immediate) + + self._send_method((60, 40), args, msg) + + def basic_qos(self, prefetch_size, prefetch_count, a_global): + """Specify quality of service + + This method requests a specific quality of service. The QoS + can be specified for the current channel or for all channels + on the connection. The particular properties and semantics of + a qos method always depend on the content class semantics. + Though the qos method could in principle apply to both peers, + it is currently meaningful only for the server. + + PARAMETERS: + prefetch_size: long + + prefetch window in octets + + The client can request that messages be sent in + advance so that when the client finishes processing a + message, the following message is already held + locally, rather than needing to be sent down the + channel. Prefetching gives a performance improvement. + This field specifies the prefetch window size in + octets. The server will send a message in advance if + it is equal to or smaller in size than the available + prefetch size (and also falls into other prefetch + limits). May be set to zero, meaning "no specific + limit", although other prefetch limits may still + apply. The prefetch-size is ignored if the no-ack + option is set. + + RULE: + + The server MUST ignore this setting when the + client is not processing any messages - i.e. the + prefetch size does not limit the transfer of + single messages to a client, only the sending in + advance of more messages while the client still + has one or more unacknowledged messages. + + prefetch_count: short + + prefetch window in messages + + Specifies a prefetch window in terms of whole + messages. This field may be used in combination with + the prefetch-size field; a message will only be sent + in advance if both prefetch windows (and those at the + channel and connection level) allow it. The prefetch- + count is ignored if the no-ack option is set. + + RULE: + + The server MAY send less data in advance than + allowed by the client's specified prefetch windows + but it MUST NOT send more. + + a_global: boolean + + apply to entire connection + + By default the QoS settings apply to the current + channel only. If this field is set, they are applied + to the entire connection. + + """ + args = AMQPWriter() + args.write_long(prefetch_size) + args.write_short(prefetch_count) + args.write_bit(a_global) + self._send_method((60, 10), args) + return self.wait(allowed_methods=[ + (60, 11), # Channel.basic_qos_ok + ]) + + def _basic_qos_ok(self, args): + """Confirm the requested qos + + This method tells the client that the requested QoS levels + could be handled by the server. The requested QoS applies to + all active consumers until a new QoS is defined. + + """ + pass + + def basic_recover(self, requeue=False): + """Redeliver unacknowledged messages + + This method asks the broker to redeliver all unacknowledged + messages on a specified channel. Zero or more messages may be + redelivered. This method is only allowed on non-transacted + channels. + + RULE: + + The server MUST set the redelivered flag on all messages + that are resent. + + RULE: + + The server MUST raise a channel exception if this is + called on a transacted channel. + + PARAMETERS: + requeue: boolean + + requeue the message + + If this field is False, the message will be redelivered + to the original recipient. If this field is True, the + server will attempt to requeue the message, + potentially then delivering it to an alternative + subscriber. + + """ + args = AMQPWriter() + args.write_bit(requeue) + self._send_method((60, 110), args) + + def basic_recover_async(self, requeue=False): + args = AMQPWriter() + args.write_bit(requeue) + self._send_method((60, 100), args) + + def _basic_recover_ok(self, args): + """In 0-9-1 the deprecated recover solicits a response.""" + pass + + def basic_reject(self, delivery_tag, requeue): + """Reject an incoming message + + This method allows a client to reject a message. It can be + used to interrupt and cancel large incoming messages, or + return untreatable messages to their original queue. + + RULE: + + The server SHOULD be capable of accepting and process the + Reject method while sending message content with a Deliver + or Get-Ok method. I.e. the server should read and process + incoming methods while sending output frames. To cancel a + partially-send content, the server sends a content body + frame of size 1 (i.e. with no data except the frame-end + octet). + + RULE: + + The server SHOULD interpret this method as meaning that + the client is unable to process the message at this time. + + RULE: + + A client MUST NOT use this method as a means of selecting + messages to process. A rejected message MAY be discarded + or dead-lettered, not necessarily passed to another + client. + + PARAMETERS: + delivery_tag: longlong + + server-assigned delivery tag + + The server-assigned and channel-specific delivery tag + + RULE: + + The delivery tag is valid only within the channel + from which the message was received. I.e. a client + MUST NOT receive a message on one channel and then + acknowledge it on another. + + RULE: + + The server MUST NOT use a zero value for delivery + tags. Zero is reserved for client use, meaning "all + messages so far received". + + requeue: boolean + + requeue the message + + If this field is False, the message will be discarded. + If this field is True, the server will attempt to + requeue the message. + + RULE: + + The server MUST NOT deliver the message to the + same client within the context of the current + channel. The recommended strategy is to attempt + to deliver the message to an alternative consumer, + and if that is not possible, to move the message + to a dead-letter queue. The server MAY use more + sophisticated tracking to hold the message on the + queue and redeliver it to the same client at a + later stage. + + """ + args = AMQPWriter() + args.write_longlong(delivery_tag) + args.write_bit(requeue) + self._send_method((60, 90), args) + + def _basic_return(self, args, msg): + """Return a failed message + + This method returns an undeliverable message that was + published with the "immediate" flag set, or an unroutable + message published with the "mandatory" flag set. The reply + code and text provide information about the reason that the + message was undeliverable. + + PARAMETERS: + reply_code: short + + The reply code. The AMQ reply codes are defined in AMQ + RFC 011. + + reply_text: shortstr + + The localised reply text. This text can be logged as an + aid to resolving issues. + + exchange: shortstr + + Specifies the name of the exchange that the message + was originally published to. + + routing_key: shortstr + + Message routing key + + Specifies the routing key name specified when the + message was published. + + """ + reply_code = args.read_short() + reply_text = args.read_shortstr() + exchange = args.read_shortstr() + routing_key = args.read_shortstr() + + self.returned_messages.put( + (reply_code, reply_text, exchange, routing_key, msg) + ) + + ############# + # + # Tx + # + # + # work with standard transactions + # + # Standard transactions provide so-called "1.5 phase commit". We + # can ensure that work is never lost, but there is a chance of + # confirmations being lost, so that messages may be resent. + # Applications that use standard transactions must be able to + # detect and ignore duplicate messages. + # + # GRAMMAR:: + # + # tx = C:SELECT S:SELECT-OK + # / C:COMMIT S:COMMIT-OK + # / C:ROLLBACK S:ROLLBACK-OK + # + # RULE: + # + # An client using standard transactions SHOULD be able to + # track all messages received within a reasonable period, and + # thus detect and reject duplicates of the same message. It + # SHOULD NOT pass these to the application layer. + # + # + + def tx_commit(self): + """Commit the current transaction + + This method commits all messages published and acknowledged in + the current transaction. A new transaction starts immediately + after a commit. + + """ + self._send_method((90, 20)) + return self.wait(allowed_methods=[ + (90, 21), # Channel.tx_commit_ok + ]) + + def _tx_commit_ok(self, args): + """Confirm a successful commit + + This method confirms to the client that the commit succeeded. + Note that if a commit fails, the server raises a channel + exception. + + """ + pass + + def tx_rollback(self): + """Abandon the current transaction + + This method abandons all messages published and acknowledged + in the current transaction. A new transaction starts + immediately after a rollback. + + """ + self._send_method((90, 30)) + return self.wait(allowed_methods=[ + (90, 31), # Channel.tx_rollback_ok + ]) + + def _tx_rollback_ok(self, args): + """Confirm a successful rollback + + This method confirms to the client that the rollback + succeeded. Note that if an rollback fails, the server raises a + channel exception. + + """ + pass + + def tx_select(self): + """Select standard transaction mode + + This method sets the channel to use standard transactions. + The client must use this method at least once on a channel + before using the Commit or Rollback methods. + + """ + self._send_method((90, 10)) + return self.wait(allowed_methods=[ + (90, 11), # Channel.tx_select_ok + ]) + + def _tx_select_ok(self, args): + """Confirm transaction mode + + This method confirms to the client that the channel was + successfully set to use standard transactions. + + """ + pass + + def confirm_select(self, nowait=False): + """Enables publisher confirms for this channel (an RabbitMQ + extension). + + Can now be used if the channel is in transactional mode. + + :param nowait: + If set, the server will not respond to the method. + The client should not wait for a reply method. If the + server could not complete the method it will raise a channel + or connection exception. + + """ + args = AMQPWriter() + args.write_bit(nowait) + + self._send_method((85, 10), args) + if not nowait: + self.wait(allowed_methods=[ + (85, 11), # Confirm.select_ok + ]) + + def _confirm_select_ok(self, args): + """With this method the broker confirms to the client that + the channel is now using publisher confirms.""" + pass + + def _basic_ack_recv(self, args): + delivery_tag = args.read_longlong() + multiple = args.read_bit() + for callback in self.events['basic_ack']: + callback(delivery_tag, multiple) + + _METHOD_MAP = { + (20, 11): _open_ok, + (20, 20): _flow, + (20, 21): _flow_ok, + (20, 40): _close, + (20, 41): _close_ok, + (40, 11): _exchange_declare_ok, + (40, 21): _exchange_delete_ok, + (40, 31): _exchange_bind_ok, + (40, 51): _exchange_unbind_ok, + (50, 11): _queue_declare_ok, + (50, 21): _queue_bind_ok, + (50, 31): _queue_purge_ok, + (50, 41): _queue_delete_ok, + (50, 51): _queue_unbind_ok, + (60, 11): _basic_qos_ok, + (60, 21): _basic_consume_ok, + (60, 30): _basic_cancel_notify, + (60, 31): _basic_cancel_ok, + (60, 50): _basic_return, + (60, 60): _basic_deliver, + (60, 71): _basic_get_ok, + (60, 72): _basic_get_empty, + (60, 80): _basic_ack_recv, + (60, 111): _basic_recover_ok, + (85, 11): _confirm_select_ok, + (90, 11): _tx_select_ok, + (90, 21): _tx_commit_ok, + (90, 31): _tx_rollback_ok, + } + + _IMMEDIATE_METHODS = [ + (60, 50), # basic_return + ] diff --git a/awx/lib/site-packages/amqp/connection.py b/awx/lib/site-packages/amqp/connection.py new file mode 100644 index 0000000000..6768513d68 --- /dev/null +++ b/awx/lib/site-packages/amqp/connection.py @@ -0,0 +1,926 @@ +"""AMQP Connections""" +# Copyright (C) 2007-2008 Barry Pederson +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 +from __future__ import absolute_import + +import logging +import socket + +from array import array +try: + from ssl import SSLError +except ImportError: + class SSLError(Exception): # noqa + pass + +from . import __version__ +from .abstract_channel import AbstractChannel +from .channel import Channel +from .exceptions import ChannelError, ConnectionError +from .method_framing import MethodReader, MethodWriter +from .serialization import AMQPWriter +from .transport import create_transport + +HAS_MSG_PEEK = hasattr(socket, 'MSG_PEEK') + +START_DEBUG_FMT = """ +Start from server, version: %d.%d, properties: %s, mechanisms: %s, locales: %s +""".strip() + +__all__ = ['Connection'] + +# +# Client property info that gets sent to the server on connection startup +# +LIBRARY_PROPERTIES = { + 'product': 'py-amqp', + 'product_version': __version__, + 'capabilities': {}, +} + +AMQP_LOGGER = logging.getLogger('amqp') + + +class Connection(AbstractChannel): + """The connection class provides methods for a client to establish a + network connection to a server, and for both peers to operate the + connection thereafter. + + GRAMMAR:: + + connection = open-connection *use-connection close-connection + open-connection = C:protocol-header + S:START C:START-OK + *challenge + S:TUNE C:TUNE-OK + C:OPEN S:OPEN-OK + challenge = S:SECURE C:SECURE-OK + use-connection = *channel + close-connection = C:CLOSE S:CLOSE-OK + / S:CLOSE C:CLOSE-OK + + """ + Channel = Channel + + prev_sent = None + prev_recv = None + missed_heartbeats = 0 + + def __init__(self, host='localhost', userid='guest', password='guest', + login_method='AMQPLAIN', login_response=None, + virtual_host='/', locale='en_US', client_properties=None, + ssl=False, connect_timeout=None, channel_max=None, + frame_max=None, heartbeat=0, **kwargs): + """Create a connection to the specified host, which should be + a 'host[:port]', such as 'localhost', or '1.2.3.4:5672' + (defaults to 'localhost', if a port is not specified then + 5672 is used) + + If login_response is not specified, one is built up for you from + userid and password if they are present. + + The 'ssl' parameter may be simply True/False, or for Python >= 2.6 + a dictionary of options to pass to ssl.wrap_socket() such as + requiring certain certificates. + + """ + channel_max = channel_max or 65535 + frame_max = frame_max or 131072 + if (login_response is None) \ + and (userid is not None) \ + and (password is not None): + login_response = AMQPWriter() + login_response.write_table({'LOGIN': userid, 'PASSWORD': password}) + login_response = login_response.getvalue()[4:] # Skip the length + # at the beginning + + d = dict(LIBRARY_PROPERTIES, **client_properties or {}) + self._method_override = {(60, 50): self._dispatch_basic_return} + + self.channels = {} + # The connection object itself is treated as channel 0 + super(Connection, self).__init__(self, 0) + + self.transport = None + + # Properties set in the Tune method + self.channel_max = channel_max + self.frame_max = frame_max + self.heartbeat = heartbeat + + self._avail_channel_ids = array('H', range(self.channel_max, 0, -1)) + + # Properties set in the Start method + self.version_major = 0 + self.version_minor = 0 + self.server_properties = {} + self.mechanisms = [] + self.locales = [] + + # Let the transport.py module setup the actual + # socket connection to the broker. + # + self.transport = create_transport(host, connect_timeout, ssl) + + self.method_reader = MethodReader(self.transport) + self.method_writer = MethodWriter(self.transport, self.frame_max) + + self.wait(allowed_methods=[ + (10, 10), # start + ]) + + self._x_start_ok(d, login_method, login_response, locale) + + self._wait_tune_ok = True + while self._wait_tune_ok: + self.wait(allowed_methods=[ + (10, 20), # secure + (10, 30), # tune + ]) + + return self._x_open(virtual_host) + + def _do_close(self): + try: + self.transport.close() + + temp_list = [x for x in self.channels.values() if x is not self] + for ch in temp_list: + ch._do_close() + except socket.error: + pass # connection already closed on the other end + finally: + self.transport = self.connection = self.channels = None + + def _get_free_channel_id(self): + try: + return self._avail_channel_ids.pop() + except IndexError: + raise ConnectionError( + 'No free channel ids, current=%d, channel_max=%d' % ( + len(self.channels), self.channel_max), (20, 10)) + + def _claim_channel_id(self, channel_id): + try: + return self._avail_channel_ids.remove(channel_id) + except ValueError: + raise ConnectionError( + 'Channel %r already open' % (channel_id, )) + + def _wait_method(self, channel_id, allowed_methods): + """Wait for a method from the server destined for + a particular channel.""" + # + # Check the channel's deferred methods + # + method_queue = self.channels[channel_id].method_queue + + for queued_method in method_queue: + method_sig = queued_method[0] + if (allowed_methods is None) \ + or (method_sig in allowed_methods) \ + or (method_sig == (20, 40)): + method_queue.remove(queued_method) + return queued_method + + # + # Nothing queued, need to wait for a method from the peer + # + while 1: + channel, method_sig, args, content = \ + self.method_reader.read_method() + + if channel == channel_id and ( + allowed_methods is None or + method_sig in allowed_methods or + method_sig == (20, 40)): + return method_sig, args, content + + # + # Certain methods like basic_return should be dispatched + # immediately rather than being queued, even if they're not + # one of the 'allowed_methods' we're looking for. + # + if channel and method_sig in self.Channel._IMMEDIATE_METHODS: + self.channels[channel].dispatch_method( + method_sig, args, content, + ) + continue + + # + # Not the channel and/or method we were looking for. Queue + # this method for later + # + self.channels[channel].method_queue.append( + (method_sig, args, content) + ) + + # + # If we just queued up a method for channel 0 (the Connection + # itself) it's probably a close method in reaction to some + # error, so deal with it right away. + # + if not channel: + self.wait() + + def channel(self, channel_id=None): + """Fetch a Channel object identified by the numeric channel_id, or + create that object if it doesn't already exist.""" + try: + return self.channels[channel_id] + except KeyError: + return self.Channel(self, channel_id) + + def is_alive(self): + if HAS_MSG_PEEK: + sock = self.sock + prev = sock.gettimeout() + sock.settimeout(0.0001) + try: + sock.recv(1, socket.MSG_PEEK) + except socket.timeout: + pass + except socket.error: + return False + finally: + sock.settimeout(prev) + return True + + def drain_events(self, timeout=None): + """Wait for an event on a channel.""" + chanmap = self.channels + chanid, method_sig, args, content = self._wait_multiple( + chanmap, None, timeout=timeout, + ) + + channel = chanmap[chanid] + + if (content and + channel.auto_decode and + hasattr(content, 'content_encoding')): + try: + content.body = content.body.decode(content.content_encoding) + except Exception: + pass + + amqp_method = (self._method_override.get(method_sig) or + channel._METHOD_MAP.get(method_sig, None)) + + if amqp_method is None: + raise Exception('Unknown AMQP method %r' % (method_sig, )) + + if content is None: + return amqp_method(channel, args) + else: + return amqp_method(channel, args, content) + + def read_timeout(self, timeout=None): + if timeout is None: + return self.method_reader.read_method() + sock = self.sock + prev = sock.gettimeout() + if prev != timeout: + sock.settimeout(timeout) + try: + try: + return self.method_reader.read_method() + except SSLError, exc: + # http://bugs.python.org/issue10272 + if 'timed out' in str(exc): + raise socket.timeout() + # Non-blocking SSL sockets can throw SSLError + if 'The operation did not complete' in str(exc): + raise socket.timeout() + raise + finally: + if prev != timeout: + sock.settimeout(prev) + + def _wait_multiple(self, channels, allowed_methods, timeout=None): + for channel_id, channel in channels.iteritems(): + method_queue = channel.method_queue + for queued_method in method_queue: + method_sig = queued_method[0] + if (allowed_methods is None or + method_sig in allowed_methods or + method_sig == (20, 40)): + method_queue.remove(queued_method) + method_sig, args, content = queued_method + return channel_id, method_sig, args, content + + # Nothing queued, need to wait for a method from the peer + read_timeout = self.read_timeout + wait = self.wait + while 1: + channel, method_sig, args, content = read_timeout(timeout) + + if channel in channels and ( + allowed_methods is None or + method_sig in allowed_methods or + method_sig == (20, 40)): + return channel, method_sig, args, content + + # Not the channel and/or method we were looking for. Queue + # this method for later + channels[channel].method_queue.append((method_sig, args, content)) + + # + # If we just queued up a method for channel 0 (the Connection + # itself) it's probably a close method in reaction to some + # error, so deal with it right away. + # + if channel == 0: + wait() + + def _dispatch_basic_return(self, channel, args, msg): + reply_code = args.read_short() + reply_text = args.read_shortstr() + exchange = args.read_shortstr() + routing_key = args.read_shortstr() + + exc = ChannelError('basic.return', reply_code, reply_text, (50, 60)) + handlers = channel.events.get('basic_return') + if not handlers: + raise exc + for callback in handlers: + callback(exc, exchange, routing_key, msg) + + def close(self, reply_code=0, reply_text='', method_sig=(0, 0)): + """Request a connection close + + This method indicates that the sender wants to close the + connection. This may be due to internal conditions (e.g. a + forced shut-down) or due to an error handling a specific + method, i.e. an exception. When a close is due to an + exception, the sender provides the class and method id of the + method which caused the exception. + + RULE: + + After sending this method any received method except the + Close-OK method MUST be discarded. + + RULE: + + The peer sending this method MAY use a counter or timeout + to detect failure of the other peer to respond correctly + with the Close-OK method. + + RULE: + + When a server receives the Close method from a client it + MUST delete all server-side resources associated with the + client's context. A client CANNOT reconnect to a context + after sending or receiving a Close method. + + PARAMETERS: + reply_code: short + + The reply code. The AMQ reply codes are defined in AMQ + RFC 011. + + reply_text: shortstr + + The localised reply text. This text can be logged as an + aid to resolving issues. + + class_id: short + + failing method class + + When the close is provoked by a method exception, this + is the class of the method. + + method_id: short + + failing method ID + + When the close is provoked by a method exception, this + is the ID of the method. + + """ + if self.transport is None: + # already closed + return + + args = AMQPWriter() + args.write_short(reply_code) + args.write_shortstr(reply_text) + args.write_short(method_sig[0]) # class_id + args.write_short(method_sig[1]) # method_id + self._send_method((10, 50), args) + return self.wait(allowed_methods=[ + (10, 50), # Connection.close + (10, 51), # Connection.close_ok + ]) + + def _close(self, args): + """Request a connection close + + This method indicates that the sender wants to close the + connection. This may be due to internal conditions (e.g. a + forced shut-down) or due to an error handling a specific + method, i.e. an exception. When a close is due to an + exception, the sender provides the class and method id of the + method which caused the exception. + + RULE: + + After sending this method any received method except the + Close-OK method MUST be discarded. + + RULE: + + The peer sending this method MAY use a counter or timeout + to detect failure of the other peer to respond correctly + with the Close-OK method. + + RULE: + + When a server receives the Close method from a client it + MUST delete all server-side resources associated with the + client's context. A client CANNOT reconnect to a context + after sending or receiving a Close method. + + PARAMETERS: + reply_code: short + + The reply code. The AMQ reply codes are defined in AMQ + RFC 011. + + reply_text: shortstr + + The localised reply text. This text can be logged as an + aid to resolving issues. + + class_id: short + + failing method class + + When the close is provoked by a method exception, this + is the class of the method. + + method_id: short + + failing method ID + + When the close is provoked by a method exception, this + is the ID of the method. + + """ + reply_code = args.read_short() + reply_text = args.read_shortstr() + class_id = args.read_short() + method_id = args.read_short() + + self._x_close_ok() + + raise ConnectionError(reply_code, reply_text, (class_id, method_id)) + + def _x_close_ok(self): + """Confirm a connection close + + This method confirms a Connection.Close method and tells the + recipient that it is safe to release resources for the + connection and close the socket. + + RULE: + + A peer that detects a socket closure without having + received a Close-Ok handshake method SHOULD log the error. + + """ + self._send_method((10, 51)) + self._do_close() + + def _close_ok(self, args): + """Confirm a connection close + + This method confirms a Connection.Close method and tells the + recipient that it is safe to release resources for the + connection and close the socket. + + RULE: + + A peer that detects a socket closure without having + received a Close-Ok handshake method SHOULD log the error. + + """ + self._do_close() + + def _x_open(self, virtual_host, capabilities=''): + """Open connection to virtual host + + This method opens a connection to a virtual host, which is a + collection of resources, and acts to separate multiple + application domains within a server. + + RULE: + + The client MUST open the context before doing any work on + the connection. + + PARAMETERS: + virtual_host: shortstr + + virtual host name + + The name of the virtual host to work with. + + RULE: + + If the server supports multiple virtual hosts, it + MUST enforce a full separation of exchanges, + queues, and all associated entities per virtual + host. An application, connected to a specific + virtual host, MUST NOT be able to access resources + of another virtual host. + + RULE: + + The server SHOULD verify that the client has + permission to access the specified virtual host. + + RULE: + + The server MAY configure arbitrary limits per + virtual host, such as the number of each type of + entity that may be used, per connection and/or in + total. + + capabilities: shortstr + + required capabilities + + The client may specify a number of capability names, + delimited by spaces. The server can use this string + to how to process the client's connection request. + + """ + args = AMQPWriter() + args.write_shortstr(virtual_host) + args.write_shortstr(capabilities) + args.write_bit(False) + self._send_method((10, 40), args) + return self.wait(allowed_methods=[ + (10, 41), # Connection.open_ok + ]) + + def _open_ok(self, args): + """Signal that the connection is ready + + This method signals to the client that the connection is ready + for use. + + PARAMETERS: + known_hosts: shortstr (deprecated) + + """ + AMQP_LOGGER.debug('Open OK!') + + def _secure(self, args): + """Security mechanism challenge + + The SASL protocol works by exchanging challenges and responses + until both peers have received sufficient information to + authenticate each other. This method challenges the client to + provide more information. + + PARAMETERS: + challenge: longstr + + security challenge data + + Challenge information, a block of opaque binary data + passed to the security mechanism. + + """ + challenge = args.read_longstr() # noqa + + def _x_secure_ok(self, response): + """Security mechanism response + + This method attempts to authenticate, passing a block of SASL + data for the security mechanism at the server side. + + PARAMETERS: + response: longstr + + security response data + + A block of opaque data passed to the security + mechanism. The contents of this data are defined by + the SASL security mechanism. + + """ + args = AMQPWriter() + args.write_longstr(response) + self._send_method((10, 21), args) + + def _start(self, args): + """Start connection negotiation + + This method starts the connection negotiation process by + telling the client the protocol version that the server + proposes, along with a list of security mechanisms which the + client can use for authentication. + + RULE: + + If the client cannot handle the protocol version suggested + by the server it MUST close the socket connection. + + RULE: + + The server MUST provide a protocol version that is lower + than or equal to that requested by the client in the + protocol header. If the server cannot support the + specified protocol it MUST NOT send this method, but MUST + close the socket connection. + + PARAMETERS: + version_major: octet + + protocol major version + + The protocol major version that the server agrees to + use, which cannot be higher than the client's major + version. + + version_minor: octet + + protocol major version + + The protocol minor version that the server agrees to + use, which cannot be higher than the client's minor + version. + + server_properties: table + + server properties + + mechanisms: longstr + + available security mechanisms + + A list of the security mechanisms that the server + supports, delimited by spaces. Currently ASL supports + these mechanisms: PLAIN. + + locales: longstr + + available message locales + + A list of the message locales that the server + supports, delimited by spaces. The locale defines the + language in which the server will send reply texts. + + RULE: + + All servers MUST support at least the en_US + locale. + + """ + self.version_major = args.read_octet() + self.version_minor = args.read_octet() + self.server_properties = args.read_table() + self.mechanisms = args.read_longstr().split(' ') + self.locales = args.read_longstr().split(' ') + + AMQP_LOGGER.debug( + START_DEBUG_FMT, + self.version_major, self.version_minor, + self.server_properties, self.mechanisms, self.locales, + ) + + def _x_start_ok(self, client_properties, mechanism, response, locale): + """Select security mechanism and locale + + This method selects a SASL security mechanism. ASL uses SASL + (RFC2222) to negotiate authentication and encryption. + + PARAMETERS: + client_properties: table + + client properties + + mechanism: shortstr + + selected security mechanism + + A single security mechanisms selected by the client, + which must be one of those specified by the server. + + RULE: + + The client SHOULD authenticate using the highest- + level security profile it can handle from the list + provided by the server. + + RULE: + + The mechanism field MUST contain one of the + security mechanisms proposed by the server in the + Start method. If it doesn't, the server MUST close + the socket. + + response: longstr + + security response data + + A block of opaque data passed to the security + mechanism. The contents of this data are defined by + the SASL security mechanism. For the PLAIN security + mechanism this is defined as a field table holding two + fields, LOGIN and PASSWORD. + + locale: shortstr + + selected message locale + + A single message local selected by the client, which + must be one of those specified by the server. + + """ + if self.server_capabilities.get('consumer_cancel_notify'): + if 'capabilities' not in client_properties: + client_properties['capabilities'] = {} + client_properties['capabilities']['consumer_cancel_notify'] = True + args = AMQPWriter() + args.write_table(client_properties) + args.write_shortstr(mechanism) + args.write_longstr(response) + args.write_shortstr(locale) + self._send_method((10, 11), args) + + def _tune(self, args): + """Propose connection tuning parameters + + This method proposes a set of connection configuration values + to the client. The client can accept and/or adjust these. + + PARAMETERS: + channel_max: short + + proposed maximum channels + + The maximum total number of channels that the server + allows per connection. Zero means that the server does + not impose a fixed limit, but the number of allowed + channels may be limited by available server resources. + + frame_max: long + + proposed maximum frame size + + The largest frame size that the server proposes for + the connection. The client can negotiate a lower + value. Zero means that the server does not impose any + specific limit but may reject very large frames if it + cannot allocate resources for them. + + RULE: + + Until the frame-max has been negotiated, both + peers MUST accept frames of up to 4096 octets + large. The minimum non-zero value for the frame- + max field is 4096. + + heartbeat: short + + desired heartbeat delay + + The delay, in seconds, of the connection heartbeat + that the server wants. Zero means the server does not + want a heartbeat. + + """ + self.channel_max = args.read_short() or self.channel_max + self.frame_max = args.read_long() or self.frame_max + self.method_writer.frame_max = self.frame_max + heartbeat = args.read_short() # noqa + + self._x_tune_ok(self.channel_max, self.frame_max, self.heartbeat) + + def send_heartbeat(self): + self.transport.write_frame(8, 0, bytes()) + + def heartbeat_tick(self, rate=2): + """Verify that hartbeats are sent and received. + + :keyword rate: Rate is how often the tick is called + compared to the actual heartbeat value. E.g. if + the heartbeat is set to 3 seconds, and the tick + is called every 3 / 2 seconds, then the rate is 2. + + """ + sent_now = self.method_writer.bytes_sent + recv_now = self.method_reader.bytes_recv + + if self.prev_sent is not None and self.prev_sent == sent_now: + self.send_heartbeat() + + if self.prev_recv is not None and self.prev_recv == recv_now: + self.missed_heartbeats += 1 + else: + self.missed_heartbeats = 0 + + self.prev_sent, self.prev_recv = sent_now, recv_now + + if self.missed_heartbeats >= rate: + raise ConnectionError('Too many heartbeats missed') + + def _x_tune_ok(self, channel_max, frame_max, heartbeat): + """Negotiate connection tuning parameters + + This method sends the client's connection tuning parameters to + the server. Certain fields are negotiated, others provide + capability information. + + PARAMETERS: + channel_max: short + + negotiated maximum channels + + The maximum total number of channels that the client + will use per connection. May not be higher than the + value specified by the server. + + RULE: + + The server MAY ignore the channel-max value or MAY + use it for tuning its resource allocation. + + frame_max: long + + negotiated maximum frame size + + The largest frame size that the client and server will + use for the connection. Zero means that the client + does not impose any specific limit but may reject very + large frames if it cannot allocate resources for them. + Note that the frame-max limit applies principally to + content frames, where large contents can be broken + into frames of arbitrary size. + + RULE: + + Until the frame-max has been negotiated, both + peers must accept frames of up to 4096 octets + large. The minimum non-zero value for the frame- + max field is 4096. + + heartbeat: short + + desired heartbeat delay + + The delay, in seconds, of the connection heartbeat + that the client wants. Zero means the client does not + want a heartbeat. + + """ + args = AMQPWriter() + args.write_short(channel_max) + args.write_long(frame_max) + args.write_short(heartbeat or 0) + self._send_method((10, 31), args) + self._wait_tune_ok = False + + @property + def sock(self): + return self.transport.sock + + @property + def server_capabilities(self): + return self.server_properties.get('capabilities') or {} + + _METHOD_MAP = { + (10, 10): _start, + (10, 20): _secure, + (10, 30): _tune, + (10, 41): _open_ok, + (10, 50): _close, + (10, 51): _close_ok, + } + + _IMMEDIATE_METHODS = [] + connection_errors = ( + ConnectionError, + socket.error, + IOError, + OSError, + ) + channel_errors = (ChannelError, ) diff --git a/awx/lib/site-packages/amqp/exceptions.py b/awx/lib/site-packages/amqp/exceptions.py new file mode 100644 index 0000000000..7f786a9e5c --- /dev/null +++ b/awx/lib/site-packages/amqp/exceptions.py @@ -0,0 +1,125 @@ +"""Exceptions used by amqp""" +# Copyright (C) 2007-2008 Barry Pederson +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 +from __future__ import absolute_import + +from struct import pack, unpack + +__all__ = ['AMQPError', 'ConnectionError', 'ChannelError'] + + +class AMQPError(Exception): + + def __init__(self, msg, reply_code=None, reply_text=None, + method_sig=None, method_name=None): + self.message = msg + self.amqp_reply_code = reply_code + self.amqp_reply_text = reply_text + self.amqp_method_sig = method_sig + self.method_name = method_name or '' + if method_sig and not self.method_name: + self.method_name = METHOD_NAME_MAP.get(method_sig, '') + Exception.__init__(self, msg, reply_code, + reply_text, method_sig, self.method_name) + + def __str__(self): + if self.amqp_reply_code: + return '%s: (%s, %s, %s)' % ( + self.message, self.amqp_reply_code, self.amqp_reply_text, + self.amqp_method_sig) + return self.message + + +class ConnectionError(AMQPError): + pass + + +class ChannelError(AMQPError): + pass + + +class ConsumerCancel(ChannelError): + pass + + +METHOD_NAME_MAP = { + (10, 10): 'Connection.start', + (10, 11): 'Connection.start_ok', + (10, 20): 'Connection.secure', + (10, 21): 'Connection.secure_ok', + (10, 30): 'Connection.tune', + (10, 31): 'Connection.tune_ok', + (10, 40): 'Connection.open', + (10, 41): 'Connection.open_ok', + (10, 50): 'Connection.close', + (10, 51): 'Connection.close_ok', + (20, 10): 'Channel.open', + (20, 11): 'Channel.open_ok', + (20, 20): 'Channel.flow', + (20, 21): 'Channel.flow_ok', + (20, 40): 'Channel.close', + (20, 41): 'Channel.close_ok', + (30, 10): 'Access.request', + (30, 11): 'Access.request_ok', + (40, 10): 'Exchange.declare', + (40, 11): 'Exchange.declare_ok', + (40, 20): 'Exchange.delete', + (40, 21): 'Exchange.delete_ok', + (40, 30): 'Exchange.bind', + (40, 31): 'Exchange.bind_ok', + (40, 40): 'Exchange.unbind', + (40, 41): 'Exchange.unbind_ok', + (50, 10): 'Queue.declare', + (50, 11): 'Queue.declare_ok', + (50, 20): 'Queue.bind', + (50, 21): 'Queue.bind_ok', + (50, 30): 'Queue.purge', + (50, 31): 'Queue.purge_ok', + (50, 40): 'Queue.delete', + (50, 41): 'Queue.delete_ok', + (50, 50): 'Queue.unbind', + (50, 51): 'Queue.unbind_ok', + (60, 10): 'Basic.qos', + (60, 11): 'Basic.qos_ok', + (60, 20): 'Basic.consume', + (60, 21): 'Basic.consume_ok', + (60, 30): 'Basic.cancel', + (60, 31): 'Basic.cancel_ok', + (60, 40): 'Basic.publish', + (60, 50): 'Basic.return', + (60, 60): 'Basic.deliver', + (60, 70): 'Basic.get', + (60, 71): 'Basic.get_ok', + (60, 72): 'Basic.get_empty', + (60, 80): 'Basic.ack', + (60, 90): 'Basic.reject', + (60, 100): 'Basic.recover_async', + (60, 110): 'Basic.recover', + (60, 111): 'Basic.recover_ok', + (60, 120): 'Basic.nack', + (90, 10): 'Tx.select', + (90, 11): 'Tx.select_ok', + (90, 20): 'Tx.commit', + (90, 21): 'Tx.commit_ok', + (90, 30): 'Tx.rollback', + (90, 31): 'Tx.rollback_ok', + (85, 10): 'Confirm.select', + (85, 11): 'Confirm.select_ok', +} + + +for _method_id, _method_name in list(METHOD_NAME_MAP.items()): + METHOD_NAME_MAP[unpack('>I', pack('>HH', *_method_id))[0]] = _method_name diff --git a/awx/lib/site-packages/amqp/method_framing.py b/awx/lib/site-packages/amqp/method_framing.py new file mode 100644 index 0000000000..0225ddd35f --- /dev/null +++ b/awx/lib/site-packages/amqp/method_framing.py @@ -0,0 +1,229 @@ +"""Convert between frames and higher-level AMQP methods""" +# Copyright (C) 2007-2008 Barry Pederson +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 +from __future__ import absolute_import + +from collections import defaultdict +from struct import pack, unpack +from Queue import Queue + +try: + bytes +except NameError: + # Python 2.5 and lower + bytes = str + +from .basic_message import Message +from .exceptions import AMQPError +from .serialization import AMQPReader + +__all__ = ['MethodReader'] + +# +# MethodReader needs to know which methods are supposed +# to be followed by content headers and bodies. +# +_CONTENT_METHODS = [ + (60, 50), # Basic.return + (60, 60), # Basic.deliver + (60, 71), # Basic.get_ok +] + + +class _PartialMessage(object): + """Helper class to build up a multi-frame method.""" + + def __init__(self, method_sig, args): + self.method_sig = method_sig + self.args = args + self.msg = Message() + self.body_parts = [] + self.body_received = 0 + self.body_size = None + self.complete = False + + def add_header(self, payload): + class_id, weight, self.body_size = unpack('>HHQ', payload[:12]) + self.msg._load_properties(payload[12:]) + self.complete = (self.body_size == 0) + + def add_payload(self, payload): + self.body_parts.append(payload) + self.body_received += len(payload) + + if self.body_received == self.body_size: + self.msg.body = bytes().join(self.body_parts) + self.complete = True + + +class MethodReader(object): + """Helper class to receive frames from the broker, combine them if + necessary with content-headers and content-bodies into complete methods. + + Normally a method is represented as a tuple containing + (channel, method_sig, args, content). + + In the case of a framing error, an :exc:`ConnectionError` is placed + in the queue. + + In the case of unexpected frames, a tuple made up of + ``(channel, ChannelError)`` is placed in the queue. + + """ + + def __init__(self, source): + self.source = source + self.queue = Queue() + self.running = False + self.partial_messages = {} + self.heartbeats = 0 + # For each channel, which type is expected next + self.expected_types = defaultdict(lambda: 1) + # not an actual byte count, just incremented whenever we receive + self.bytes_recv = 0 + + def _next_method(self): + """Read the next method from the source, once one complete method has + been assembled it is placed in the internal queue.""" + empty = self.queue.empty + read_frame = self.source.read_frame + while empty(): + try: + frame_type, channel, payload = read_frame() + except Exception, e: + # + # Connection was closed? Framing Error? + # + self.queue.put(e) + break + + self.bytes_recv += 1 + + if frame_type not in (self.expected_types[channel], 8): + self.queue.put(( + channel, + AMQPError( + 'Received frame type %s while expecting type: %s' % ( + frame_type, self.expected_types[channel]) + ), + )) + elif frame_type == 1: + self._process_method_frame(channel, payload) + elif frame_type == 2: + self._process_content_header(channel, payload) + elif frame_type == 3: + self._process_content_body(channel, payload) + elif frame_type == 8: + self._process_heartbeat(channel, payload) + + def _process_heartbeat(self, channel, payload): + self.heartbeats += 1 + + def _process_method_frame(self, channel, payload): + """Process Method frames""" + method_sig = unpack('>HH', payload[:4]) + args = AMQPReader(payload[4:]) + + if method_sig in _CONTENT_METHODS: + # + # Save what we've got so far and wait for the content-header + # + self.partial_messages[channel] = _PartialMessage(method_sig, args) + self.expected_types[channel] = 2 + else: + self.queue.put((channel, method_sig, args, None)) + + def _process_content_header(self, channel, payload): + """Process Content Header frames""" + partial = self.partial_messages[channel] + partial.add_header(payload) + + if partial.complete: + # + # a bodyless message, we're done + # + self.queue.put((channel, partial.method_sig, + partial.args, partial.msg)) + self.partial_messages.pop(channel, None) + self.expected_types[channel] = 1 + else: + # + # wait for the content-body + # + self.expected_types[channel] = 3 + + def _process_content_body(self, channel, payload): + """Process Content Body frames""" + partial = self.partial_messages[channel] + partial.add_payload(payload) + if partial.complete: + # + # Stick the message in the queue and go back to + # waiting for method frames + # + self.queue.put((channel, partial.method_sig, + partial.args, partial.msg)) + self.partial_messages.pop(channel, None) + self.expected_types[channel] = 1 + + def read_method(self): + """Read a method from the peer.""" + self._next_method() + m = self.queue.get() + if isinstance(m, Exception): + raise m + if isinstance(m, tuple) and isinstance(m[1], AMQPError): + raise m[1] + return m + + +class MethodWriter(object): + """Convert AMQP methods into AMQP frames and send them out + to the peer.""" + + def __init__(self, dest, frame_max): + self.dest = dest + self.frame_max = frame_max + self.bytes_sent = 0 + + def write_method(self, channel, method_sig, args, content=None): + write_frame = self.dest.write_frame + payload = pack('>HH', method_sig[0], method_sig[1]) + args + + if content: + # do this early, so we can raise an exception if there's a + # problem with the content properties, before sending the + # first frame + body = content.body + if isinstance(body, unicode): + coding = content.properties.get('content_encoding', None) + if coding is None: + coding = content.properties['content_encoding'] = 'UTF-8' + + body = body.encode(coding) + properties = content._serialize_properties() + + write_frame(1, channel, payload) + + if content: + payload = pack('>HHQ', method_sig[0], 0, len(body)) + properties + + write_frame(2, channel, payload) + + chunk_size = self.frame_max - 8 + for i in xrange(0, len(body), chunk_size): + write_frame(3, channel, body[i:i + chunk_size]) + self.bytes_sent += 1 diff --git a/awx/lib/site-packages/amqp/serialization.py b/awx/lib/site-packages/amqp/serialization.py new file mode 100644 index 0000000000..bcb3b90b47 --- /dev/null +++ b/awx/lib/site-packages/amqp/serialization.py @@ -0,0 +1,465 @@ +""" +Convert between bytestreams and higher-level AMQP types. + +2007-11-05 Barry Pederson + +""" +# Copyright (C) 2007 Barry Pederson +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 +from __future__ import absolute_import + +import sys + +from datetime import datetime +from decimal import Decimal +from struct import pack, unpack +from time import mktime + +IS_PY3K = sys.version_info[0] >= 3 + +if IS_PY3K: + def byte(n): + return bytes([n]) +else: + byte = chr + +try: + from io import BytesIO +except ImportError: # Py2.5 + try: + from cStringIO import StringIO as BytesIO # noqa + except ImportError: + from StringIO import StringIO as BytesIO # noqa + +try: + bytes +except NameError: + # Python 2.5 and lower + bytes = str + + +class AMQPReader(object): + """Read higher-level AMQP types from a bytestream.""" + def __init__(self, source): + """Source should be either a file-like object with a read() method, or + a plain (non-unicode) string.""" + if isinstance(source, bytes): + self.input = BytesIO(source) + elif hasattr(source, 'read'): + self.input = source + else: + raise ValueError( + 'AMQPReader needs a file-like object or plain string') + + self.bitcount = self.bits = 0 + + def close(self): + self.input.close() + + def read(self, n): + """Read n bytes.""" + self.bitcount = self.bits = 0 + return self.input.read(n) + + def read_bit(self): + """Read a single boolean value.""" + if not self.bitcount: + self.bits = ord(self.input.read(1)) + self.bitcount = 8 + result = (self.bits & 1) == 1 + self.bits >>= 1 + self.bitcount -= 1 + return result + + def read_octet(self): + """Read one byte, return as an integer""" + self.bitcount = self.bits = 0 + return unpack('B', self.input.read(1))[0] + + def read_short(self): + """Read an unsigned 16-bit integer""" + self.bitcount = self.bits = 0 + return unpack('>H', self.input.read(2))[0] + + def read_long(self): + """Read an unsigned 32-bit integer""" + self.bitcount = self.bits = 0 + return unpack('>I', self.input.read(4))[0] + + def read_longlong(self): + """Read an unsigned 64-bit integer""" + self.bitcount = self.bits = 0 + return unpack('>Q', self.input.read(8))[0] + + def read_float(self): + """Read float value.""" + self.bitcount = self.bits = 0 + return unpack('>d', self.input.read(8))[0] + + def read_shortstr(self): + """Read a short string that's stored in up to 255 bytes. + + The encoding isn't specified in the AMQP spec, so + assume it's utf-8 + + """ + self.bitcount = self.bits = 0 + slen = unpack('B', self.input.read(1))[0] + return self.input.read(slen).decode('utf-8') + + def read_longstr(self): + """Read a string that's up to 2**32 bytes. + + The encoding isn't specified in the AMQP spec, so + assume it's utf-8 + + """ + self.bitcount = self.bits = 0 + slen = unpack('>I', self.input.read(4))[0] + return self.input.read(slen).decode('utf-8') + + def read_table(self): + """Read an AMQP table, and return as a Python dictionary.""" + self.bitcount = self.bits = 0 + tlen = unpack('>I', self.input.read(4))[0] + table_data = AMQPReader(self.input.read(tlen)) + result = {} + while table_data.input.tell() < tlen: + name = table_data.read_shortstr() + val = table_data.read_item() + result[name] = val + return result + + def read_item(self): + ftype = ord(self.input.read(1)) + if ftype == 83: # 'S' + val = self.read_longstr() + elif ftype == 73: # 'I' + val = unpack('>i', self.input.read(4))[0] + elif ftype == 68: # 'D' + d = self.read_octet() + n = unpack('>i', self.input.read(4))[0] + val = Decimal(n) / Decimal(10 ** d) + elif ftype == 84: # 'T' + val = self.read_timestamp() + elif ftype == 70: # 'F' + val = self.read_table() # recurse + elif ftype == 65: # 'A' + val = self.read_array() + elif ftype == 116: + val = self.read_bit() + elif ftype == 100: + val = self.read_float() + else: + raise ValueError( + 'Unknown value in table: %r (%r)' % ( + ftype, type(ftype))) + return val + + def read_array(self): + array_length = unpack('>I', self.input.read(4))[0] + array_data = AMQPReader(self.input.read(array_length)) + result = [] + while array_data.input.tell() < array_length: + val = array_data.read_item() + result.append(val) + return result + + def read_timestamp(self): + """Read and AMQP timestamp, which is a 64-bit integer representing + seconds since the Unix epoch in 1-second resolution. + + Return as a Python datetime.datetime object, + expressed as localtime. + + """ + return datetime.fromtimestamp(self.read_longlong()) + + +class AMQPWriter(object): + """Convert higher-level AMQP types to bytestreams.""" + + def __init__(self, dest=None): + """dest may be a file-type object (with a write() method). If None + then a BytesIO is created, and the contents can be accessed with + this class's getvalue() method.""" + self.out = BytesIO() if dest is None else dest + self.bits = [] + self.bitcount = 0 + + def _flushbits(self): + if self.bits: + out = self.out + for b in self.bits: + out.write(pack('B', b)) + self.bits = [] + self.bitcount = 0 + + def close(self): + """Pass through if possible to any file-like destinations.""" + try: + self.out.close() + except AttributeError: + pass + + def flush(self): + """Pass through if possible to any file-like destinations.""" + try: + self.out.flush() + except AttributeError: + pass + + def getvalue(self): + """Get what's been encoded so far if we're working with a BytesIO.""" + self._flushbits() + return self.out.getvalue() + + def write(self, s): + """Write a plain Python string with no special encoding in Python 2.x, + or bytes in Python 3.x""" + self._flushbits() + self.out.write(s) + + def write_bit(self, b): + """Write a boolean value.""" + b = 1 if b else 0 + shift = self.bitcount % 8 + if shift == 0: + self.bits.append(0) + self.bits[-1] |= (b << shift) + self.bitcount += 1 + + def write_octet(self, n): + """Write an integer as an unsigned 8-bit value.""" + if n < 0 or n > 255: + raise ValueError('Octet %r out of range 0..255' % (n, )) + self._flushbits() + self.out.write(pack('B', n)) + + def write_short(self, n): + """Write an integer as an unsigned 16-bit value.""" + if n < 0 or n > 65535: + raise ValueError('Octet %r out of range 0..65535' % (n, )) + self._flushbits() + self.out.write(pack('>H', int(n))) + + def write_long(self, n): + """Write an integer as an unsigned2 32-bit value.""" + if n < 0 or n >= 4294967296: + raise ValueError('Octet %r out of range 0..2**31-1' % (n, )) + self._flushbits() + self.out.write(pack('>I', n)) + + def write_longlong(self, n): + """Write an integer as an unsigned 64-bit value.""" + if n < 0 or n >= 18446744073709551616: + raise ValueError('Octet %r out of range 0..2**64-1' % (n, )) + self._flushbits() + self.out.write(pack('>Q', n)) + + def write_shortstr(self, s): + """Write a string up to 255 bytes long (after any encoding). + + If passed a unicode string, encode with UTF-8. + + """ + self._flushbits() + if isinstance(s, unicode): + s = s.encode('utf-8') + if len(s) > 255: + raise ValueError('String too long (%r)' % (len(s), )) + self.write_octet(len(s)) + self.out.write(s) + + def write_longstr(self, s): + """Write a string up to 2**32 bytes long after encoding. + + If passed a unicode string, encode as UTF-8. + + """ + self._flushbits() + if isinstance(s, unicode): + s = s.encode('utf-8') + self.write_long(len(s)) + self.out.write(s) + + def write_table(self, d): + """Write out a Python dictionary made of up string keys, and values + that are strings, signed integers, Decimal, datetime.datetime, or + sub-dictionaries following the same constraints.""" + self._flushbits() + table_data = AMQPWriter() + for k, v in d.iteritems(): + table_data.write_shortstr(k) + table_data.write_item(v) + table_data = table_data.getvalue() + self.write_long(len(table_data)) + self.out.write(table_data) + + def write_item(self, v): + if isinstance(v, basestring): + if isinstance(v, unicode): + v = v.encode('utf-8') + self.write(byte(83)) # 'S' + self.write_longstr(v) + elif isinstance(v, bool): + self.write(pack('>cB', byte(116), int(v))) # 't' + elif isinstance(v, float): + self.write(pack('>cd', byte(100), v)) # 'd' + elif isinstance(v, (int, long)): + self.write(pack('>ci', byte(73), v)) # 'I' + elif isinstance(v, Decimal): + self.write(byte(68)) # 'D' + sign, digits, exponent = v.as_tuple() + v = 0 + for d in digits: + v = (v * 10) + d + if sign: + v = -v + self.write_octet(-exponent) + self.write(pack('>i', v)) + elif isinstance(v, datetime): + self.write(byte(84)) # 'T' + self.write_timestamp(v) + ## FIXME: timezone ? + elif isinstance(v, dict): + self.write(byte(70)) # 'F' + self.write_table(v) + elif isinstance(v, (list, tuple)): + self.write(byte(65)) # 'A' + self.write_array(v) + else: + raise ValueError( + 'Table type %r not handled by amqp: %r' % ( + type(v), v)) + + def write_array(self, a): + array_data = AMQPWriter() + for v in a: + array_data.write_item(v) + array_data = array_data.getvalue() + self.write_long(len(array_data)) + self.out.write(array_data) + + def write_timestamp(self, v): + """Write out a Python datetime.datetime object as a 64-bit integer + representing seconds since the Unix epoch.""" + self.out.write(pack('>q', long(mktime(v.timetuple())))) + + +class GenericContent(object): + """Abstract base class for AMQP content. + + Subclasses should override the PROPERTIES attribute. + + """ + PROPERTIES = [('dummy', 'shortstr')] + + def __init__(self, **props): + """Save the properties appropriate to this AMQP content type + in a 'properties' dictionary.""" + d = {} + for propname, _ in self.PROPERTIES: + if propname in props: + d[propname] = props[propname] + # FIXME: should we ignore unknown properties? + + self.properties = d + + def __eq__(self, other): + """Check if this object has the same properties as another + content object.""" + try: + return self.properties == other.properties + except AttributeError: + return NotImplemented + + def __getattr__(self, name): + """Look for additional properties in the 'properties' + dictionary, and if present - the 'delivery_info' + dictionary.""" + if name == '__setstate__': + # Allows pickling/unpickling to work + raise AttributeError('__setstate__') + + if name in self.properties: + return self.properties[name] + + if 'delivery_info' in self.__dict__ \ + and name in self.delivery_info: + return self.delivery_info[name] + + raise AttributeError(name) + + def _load_properties(self, raw_bytes): + """Given the raw bytes containing the property-flags and property-list + from a content-frame-header, parse and insert into a dictionary + stored in this object as an attribute named 'properties'.""" + r = AMQPReader(raw_bytes) + + # + # Read 16-bit shorts until we get one with a low bit set to zero + # + flags = [] + while 1: + flag_bits = r.read_short() + flags.append(flag_bits) + if flag_bits & 1 == 0: + break + + shift = 0 + d = {} + for key, proptype in self.PROPERTIES: + if shift == 0: + if not flags: + break + flag_bits, flags = flags[0], flags[1:] + shift = 15 + if flag_bits & (1 << shift): + d[key] = getattr(r, 'read_' + proptype)() + shift -= 1 + + self.properties = d + + def _serialize_properties(self): + """serialize the 'properties' attribute (a dictionary) into + the raw bytes making up a set of property flags and a + property list, suitable for putting into a content frame header.""" + shift = 15 + flag_bits = 0 + flags = [] + raw_bytes = AMQPWriter() + for key, proptype in self.PROPERTIES: + val = self.properties.get(key, None) + if val is not None: + if shift == 0: + flags.append(flag_bits) + flag_bits = 0 + shift = 15 + + flag_bits |= (1 << shift) + if proptype != 'bit': + getattr(raw_bytes, 'write_' + proptype)(val) + + shift -= 1 + + flags.append(flag_bits) + result = AMQPWriter() + for flag_bits in flags: + result.write_short(flag_bits) + result.write(raw_bytes.getvalue()) + + return result.getvalue() diff --git a/awx/lib/site-packages/amqp/transport.py b/awx/lib/site-packages/amqp/transport.py new file mode 100644 index 0000000000..8092c5fec3 --- /dev/null +++ b/awx/lib/site-packages/amqp/transport.py @@ -0,0 +1,252 @@ +""" +Read/Write AMQP frames over network transports. + +2009-01-14 Barry Pederson + +""" +# Copyright (C) 2009 Barry Pederson +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 +from __future__ import absolute_import + +import errno +import re +import socket + +# Jython does not have this attribute +try: + from socket import SOL_TCP +except ImportError: # pragma: no cover + from socket import IPPROTO_TCP as SOL_TCP # noqa + +# +# See if Python 2.6+ SSL support is available +# +try: + import ssl + HAVE_PY26_SSL = True +except: + HAVE_PY26_SSL = False + +try: + bytes +except: + # Python 2.5 and lower + bytes = str + +from struct import pack, unpack + +from .exceptions import AMQPError + +AMQP_PORT = 5672 + +# Yes, Advanced Message Queuing Protocol Protocol is redundant +AMQP_PROTOCOL_HEADER = 'AMQP\x01\x01\x00\x09'.encode('latin_1') + +# Match things like: [fe80::1]:5432, from RFC 2732 +IPV6_LITERAL = re.compile(r'\[([\.0-9a-f:]+)\](?::(\d+))?') + + +class _AbstractTransport(object): + """Common superclass for TCP and SSL transports""" + + def __init__(self, host, connect_timeout): + msg = 'socket.getaddrinfo() for %s returned an empty list' % host + port = AMQP_PORT + + m = IPV6_LITERAL.match(host) + if m: + host = m.group(1) + if m.group(2): + port = int(m.group(2)) + else: + if ':' in host: + host, port = host.rsplit(':', 1) + port = int(port) + + self.sock = None + last_err = None + for res in socket.getaddrinfo(host, port, 0, + socket.SOCK_STREAM, SOL_TCP): + af, socktype, proto, canonname, sa = res + try: + self.sock = socket.socket(af, socktype, proto) + self.sock.settimeout(connect_timeout) + self.sock.connect(sa) + except socket.error, msg: + self.sock.close() + self.sock = None + last_err = msg + continue + break + + if not self.sock: + # Didn't connect, return the most recent error message + raise socket.error(last_err) + + self.sock.settimeout(None) + self.sock.setsockopt(SOL_TCP, socket.TCP_NODELAY, 1) + self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + + self._setup_transport() + + self._write(AMQP_PROTOCOL_HEADER) + + def __del__(self): + try: + self.close() + except socket.error: + pass + finally: + self.sock = None + + def _read(self, n, initial=False): + """Read exactly n bytes from the peer""" + raise NotImplementedError('Must be overriden in subclass') + + def _setup_transport(self): + """Do any additional initialization of the class (used + by the subclasses).""" + pass + + def _shutdown_transport(self): + """Do any preliminary work in shutting down the connection.""" + pass + + def _write(self, s): + """Completely write a string to the peer.""" + raise NotImplementedError('Must be overriden in subclass') + + def close(self): + if self.sock is not None: + self._shutdown_transport() + # Call shutdown first to make sure that pending messages + # reach the AMQP broker if the program exits after + # calling this method. + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + self.sock = None + + def read_frame(self): + """Read an AMQP frame.""" + frame_type, channel, size = unpack('>BHI', self._read(7, True)) + payload = self._read(size) + ch = ord(self._read(1)) + if ch == 206: # '\xce' + return frame_type, channel, payload + else: + raise AMQPError( + 'Framing Error, received 0x%02x while expecting 0xce' % ch) + + def write_frame(self, frame_type, channel, payload): + """Write out an AMQP frame.""" + size = len(payload) + self._write( + pack('>BHI%dsB' % size, frame_type, channel, size, payload, 0xce), + ) + + +class SSLTransport(_AbstractTransport): + """Transport that works over SSL""" + + def __init__(self, host, connect_timeout, ssl): + if isinstance(ssl, dict): + self.sslopts = ssl + self.sslobj = None + super(SSLTransport, self).__init__(host, connect_timeout) + + def _setup_transport(self): + """Wrap the socket in an SSL object, either the + new Python 2.6 version, or the older Python 2.5 and + lower version.""" + if HAVE_PY26_SSL: + if hasattr(self, 'sslopts'): + self.sslobj = ssl.wrap_socket(self.sock, **self.sslopts) + else: + self.sslobj = ssl.wrap_socket(self.sock) + self.sslobj.do_handshake() + else: + self.sslobj = socket.ssl(self.sock) + + def _shutdown_transport(self): + """Unwrap a Python 2.6 SSL socket, so we can call shutdown()""" + if HAVE_PY26_SSL and (self.sslobj is not None): + self.sock = self.sslobj.unwrap() + self.sslobj = None + + def _read(self, n, initial=False): + """It seems that SSL Objects read() method may not supply as much + as you're asking for, at least with extremely large messages. + somewhere > 16K - found this in the test_channel.py test_large + unittest.""" + result = '' + + while len(result) < n: + try: + s = self.sslobj.read(n - len(result)) + except socket.error, exc: + if not initial and exc.errno in (errno.EAGAIN, errno.EINTR): + continue + raise + if not s: + raise IOError('Socket closed') + result += s + + return result + + def _write(self, s): + """Write a string out to the SSL socket fully.""" + while s: + n = self.sslobj.write(s) + if not n: + raise IOError('Socket closed') + s = s[n:] + + +class TCPTransport(_AbstractTransport): + """Transport that deals directly with TCP socket.""" + + def _setup_transport(self): + """Setup to _write() directly to the socket, and + do our own buffered reads.""" + self._write = self.sock.sendall + self._read_buffer = bytes() + + def _read(self, n, initial=False): + """Read exactly n bytes from the socket""" + while len(self._read_buffer) < n: + try: + s = self.sock.recv(65536) + except socket.error, exc: + if not initial and exc.errno in (errno.EAGAIN, errno.EINTR): + continue + raise + if not s: + raise IOError('Socket closed') + self._read_buffer += s + + result = self._read_buffer[:n] + self._read_buffer = self._read_buffer[n:] + + return result + + +def create_transport(host, connect_timeout, ssl=False): + """Given a few parameters from the Connection constructor, + select and create a subclass of _AbstractTransport.""" + if ssl: + return SSLTransport(host, connect_timeout, ssl) + else: + return TCPTransport(host, connect_timeout) diff --git a/awx/lib/site-packages/anyjson/__init__.py b/awx/lib/site-packages/anyjson/__init__.py new file mode 100644 index 0000000000..1f671be163 --- /dev/null +++ b/awx/lib/site-packages/anyjson/__init__.py @@ -0,0 +1,142 @@ +"""Wraps the best available JSON implementation available in a common +interface""" + +import sys + +VERSION = (0, 3, 3) +__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:]) +__author__ = "Rune Halvorsen" +__contact__ = "runefh@gmail.com" +__homepage__ = "http://bitbucket.org/runeh/anyjson/" +__docformat__ = "restructuredtext" + +# -eof meta- + +#: The json implementation object. This is probably not useful to you, +#: except to get the name of the implementation in use. The name is +#: available through ``implementation.name``. +implementation = None + +# json.loads does not support buffer() objects, +# so we load() and StringIO instead, and it won't copy. +if sys.version_info[0] == 3: + from io import StringIO +else: + try: + from cStringIO import StringIO # noqa + except ImportError: + from StringIO import StringIO # noqa + +#: List of known json modules, and the names of their loads/dumps +#: methods, as well as the exceptions they throw. Exception can be either +#: an exception class or a string. +_modules = [("yajl", "dumps", TypeError, "loads", ValueError, "load"), + ("jsonlib2", "write", "WriteError", "read", "ReadError", None), + ("jsonlib", "write", "WriteError", "read", "ReadError", None), + ("simplejson", "dumps", TypeError, "loads", ValueError, "load"), + ("json", "dumps", TypeError, "loads", ValueError, "load"), + ("django.utils.simplejson", "dumps", TypeError, "loads", ValueError, "load"), + ("cjson", "encode", "EncodeError", "decode", "DecodeError", None) + ] + +_fields = ("modname", "encoder", "encerror", + "decoder", "decerror", "filedecoder") + + +class _JsonImplementation(object): + """Incapsulates a JSON implementation""" + + def __init__(self, modspec): + modinfo = dict(zip(_fields, modspec)) + + if modinfo["modname"] == "cjson": + import warnings + warnings.warn("cjson is deprecated! See http://pypi.python.org/pypi/python-cjson/1.0.5", DeprecationWarning) + + # No try block. We want importerror to end up at caller + module = self._attempt_load(modinfo["modname"]) + + self.implementation = modinfo["modname"] + self._encode = getattr(module, modinfo["encoder"]) + self._decode = getattr(module, modinfo["decoder"]) + fdec = modinfo["filedecoder"] + self._filedecode = fdec and getattr(module, fdec) + self._encode_error = modinfo["encerror"] + self._decode_error = modinfo["decerror"] + + if isinstance(modinfo["encerror"], basestring): + self._encode_error = getattr(module, modinfo["encerror"]) + if isinstance(modinfo["decerror"], basestring): + self._decode_error = getattr(module, modinfo["decerror"]) + + self.name = modinfo["modname"] + + def __repr__(self): + return "<_JsonImplementation instance using %s>" % self.name + + def _attempt_load(self, modname): + """Attempt to load module name modname, returning it on success, + throwing ImportError if module couldn't be imported""" + __import__(modname) + return sys.modules[modname] + + def dumps(self, data): + """Serialize the datastructure to json. Returns a string. Raises + TypeError if the object could not be serialized.""" + try: + return self._encode(data) + except self._encode_error, exc: + raise TypeError, TypeError(*exc.args), sys.exc_info()[2] + serialize = dumps + + def loads(self, s): + """deserialize the string to python data types. Raises + ValueError if the string could not be parsed.""" + # uses StringIO to support buffer objects. + try: + if self._filedecode and not isinstance(s, basestring): + return self._filedecode(StringIO(s)) + return self._decode(s) + except self._decode_error, exc: + raise ValueError, ValueError(*exc.args), sys.exc_info()[2] + deserialize = loads + + +def force_implementation(modname): + """Forces anyjson to use a specific json module if it's available""" + global implementation + for name, spec in [(e[0], e) for e in _modules]: + if name == modname: + implementation = _JsonImplementation(spec) + return + raise ImportError("No module named: %s" % modname) + + +if __name__ == "__main__": + # If run as a script, we do nothing but print an error message. + # We do NOT try to load a compatible module because that may throw an + # exception, which renders the package uninstallable with easy_install + # (It trys to execfile the script when installing, to make sure it works) + print "Running anyjson as a stand alone script is not supported" + sys.exit(1) +else: + for modspec in _modules: + try: + implementation = _JsonImplementation(modspec) + break + except ImportError: + pass + else: + raise ImportError("No supported JSON module found") + + + def loads(value): + """Serialize the object to JSON.""" + return implementation.loads(value) + deserialize = loads # compat + + + def dumps(value): + """Deserialize JSON-encoded object to a Python object.""" + return implementation.dumps(value) + serialize = dumps diff --git a/awx/lib/site-packages/billiard/__init__.py b/awx/lib/site-packages/billiard/__init__.py new file mode 100644 index 0000000000..5b22a4fa1a --- /dev/null +++ b/awx/lib/site-packages/billiard/__init__.py @@ -0,0 +1,323 @@ +"""Python multiprocessing fork with improvements and bugfixes""" +# +# Package analogous to 'threading.py' but using processes +# +# multiprocessing/__init__.py +# +# This package is intended to duplicate the functionality (and much of +# the API) of threading.py but uses processes instead of threads. A +# subpackage 'multiprocessing.dummy' has the same API but is a simple +# wrapper for 'threading'. +# +# Try calling `multiprocessing.doc.main()` to read the html +# documentation in a webbrowser. +# +# +# Copyright (c) 2006-2008, R Oudkerk +# Licensed to PSF under a Contributor Agreement. +# + +from __future__ import absolute_import +from __future__ import with_statement + +VERSION = (2, 7, 3, 28) +__version__ = ".".join(map(str, VERSION[0:4])) + "".join(VERSION[4:]) +__author__ = 'R Oudkerk / Python Software Foundation' +__author_email__ = 'python-dev@python.org' +__maintainer__ = 'Ask Solem', +__contact__ = "ask@celeryproject.org" +__homepage__ = "http://github.com/celery/billiard" +__docformat__ = "restructuredtext" + +# -eof meta- + +__all__ = [ + 'Process', 'current_process', 'active_children', 'freeze_support', + 'Manager', 'Pipe', 'cpu_count', 'log_to_stderr', 'get_logger', + 'allow_connection_pickling', 'BufferTooShort', 'TimeoutError', + 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', + 'Event', 'Queue', 'JoinableQueue', 'Pool', 'Value', 'Array', + 'RawValue', 'RawArray', 'SUBDEBUG', 'SUBWARNING', 'set_executable', + 'forking_enable', 'forking_is_enabled' +] + +# +# Imports +# + +import os +import sys +import warnings + +from .exceptions import ( # noqa + ProcessError, + BufferTooShort, + TimeoutError, + AuthenticationError, + TimeLimitExceeded, + SoftTimeLimitExceeded, + WorkerLostError, +) +from .process import Process, current_process, active_children +from .util import SUBDEBUG, SUBWARNING + + +def ensure_multiprocessing(): + from ._ext import ensure_multiprocessing + return ensure_multiprocessing() + + +W_NO_EXECV = """\ +force_execv is not supported as the billiard C extension \ +is not installed\ +""" + +# +# Definitions not depending on native semaphores +# + + +def Manager(): + ''' + Returns a manager associated with a running server process + + The managers methods such as `Lock()`, `Condition()` and `Queue()` + can be used to create shared objects. + ''' + from .managers import SyncManager + m = SyncManager() + m.start() + return m + + +def Pipe(duplex=True): + ''' + Returns two connection object connected by a pipe + ''' + if sys.version_info[0] == 3: + from multiprocessing.connection import Pipe + else: + from billiard._connection import Pipe + return Pipe(duplex) + + +def cpu_count(): + ''' + Returns the number of CPUs in the system + ''' + if sys.platform == 'win32': + try: + num = int(os.environ['NUMBER_OF_PROCESSORS']) + except (ValueError, KeyError): + num = 0 + elif 'bsd' in sys.platform or sys.platform == 'darwin': + comm = '/sbin/sysctl -n hw.ncpu' + if sys.platform == 'darwin': + comm = '/usr' + comm + try: + with os.popen(comm) as p: + num = int(p.read()) + except ValueError: + num = 0 + else: + try: + num = os.sysconf('SC_NPROCESSORS_ONLN') + except (ValueError, OSError, AttributeError): + num = 0 + + if num >= 1: + return num + else: + raise NotImplementedError('cannot determine number of cpus') + + +def freeze_support(): + ''' + Check whether this is a fake forked process in a frozen executable. + If so then run code specified by commandline and exit. + ''' + if sys.platform == 'win32' and getattr(sys, 'frozen', False): + from .forking import freeze_support + freeze_support() + + +def get_logger(): + ''' + Return package logger -- if it does not already exist then it is created + ''' + from .util import get_logger + return get_logger() + + +def log_to_stderr(level=None): + ''' + Turn on logging and add a handler which prints to stderr + ''' + from .util import log_to_stderr + return log_to_stderr(level) + + +def allow_connection_pickling(): + ''' + Install support for sending connections and sockets between processes + ''' + from . import reduction # noqa + +# +# Definitions depending on native semaphores +# + + +def Lock(): + ''' + Returns a non-recursive lock object + ''' + from .synchronize import Lock + return Lock() + + +def RLock(): + ''' + Returns a recursive lock object + ''' + from .synchronize import RLock + return RLock() + + +def Condition(lock=None): + ''' + Returns a condition object + ''' + from .synchronize import Condition + return Condition(lock) + + +def Semaphore(value=1): + ''' + Returns a semaphore object + ''' + from .synchronize import Semaphore + return Semaphore(value) + + +def BoundedSemaphore(value=1): + ''' + Returns a bounded semaphore object + ''' + from .synchronize import BoundedSemaphore + return BoundedSemaphore(value) + + +def Event(): + ''' + Returns an event object + ''' + from .synchronize import Event + return Event() + + +def Queue(maxsize=0): + ''' + Returns a queue object + ''' + from .queues import Queue + return Queue(maxsize) + + +def JoinableQueue(maxsize=0): + ''' + Returns a queue object + ''' + from .queues import JoinableQueue + return JoinableQueue(maxsize) + + +def Pool(processes=None, initializer=None, initargs=(), maxtasksperchild=None): + ''' + Returns a process pool object + ''' + from .pool import Pool + return Pool(processes, initializer, initargs, maxtasksperchild) + + +def RawValue(typecode_or_type, *args): + ''' + Returns a shared object + ''' + from .sharedctypes import RawValue + return RawValue(typecode_or_type, *args) + + +def RawArray(typecode_or_type, size_or_initializer): + ''' + Returns a shared array + ''' + from .sharedctypes import RawArray + return RawArray(typecode_or_type, size_or_initializer) + + +def Value(typecode_or_type, *args, **kwds): + ''' + Returns a synchronized shared object + ''' + from .sharedctypes import Value + return Value(typecode_or_type, *args, **kwds) + + +def Array(typecode_or_type, size_or_initializer, **kwds): + ''' + Returns a synchronized shared array + ''' + from .sharedctypes import Array + return Array(typecode_or_type, size_or_initializer, **kwds) + +# +# +# + + +def set_executable(executable): + ''' + Sets the path to a python.exe or pythonw.exe binary used to run + child processes on Windows instead of sys.executable. + Useful for people embedding Python. + ''' + from .forking import set_executable + set_executable(executable) + + +def forking_is_enabled(): + ''' + Returns a boolean value indicating whether billiard is + currently set to create child processes by forking the current + python process rather than by starting a new instances of python. + + On Windows this always returns `False`. On Unix it returns `True` by + default. + ''' + from . import forking + return forking._forking_is_enabled + + +def forking_enable(value): + ''' + Enable/disable creation of child process by forking the current process. + + `value` should be a boolean value. If `value` is true then + forking is enabled. If `value` is false then forking is disabled. + On systems with `os.fork()` forking is enabled by default, and on + other systems it is always disabled. + ''' + if not value: + from ._ext import supports_exec + if supports_exec: + from . import forking + if value and not hasattr(os, 'fork'): + raise ValueError('os.fork() not found') + forking._forking_is_enabled = bool(value) + if not value: + os.environ["MULTIPROCESSING_FORKING_DISABLE"] = "1" + else: + warnings.warn(RuntimeWarning(W_NO_EXECV)) +if os.environ.get("MULTIPROCESSING_FORKING_DISABLE"): + forking_enable(False) diff --git a/awx/lib/site-packages/billiard/_connection.py b/awx/lib/site-packages/billiard/_connection.py new file mode 100644 index 0000000000..6d8ffde839 --- /dev/null +++ b/awx/lib/site-packages/billiard/_connection.py @@ -0,0 +1,473 @@ +# +# A higher level module for using sockets (or Windows named pipes) +# +# multiprocessing/connection.py +# +# Copyright (c) 2006-2008, R Oudkerk +# Licensed to PSF under a Contributor Agreement. +# + +from __future__ import absolute_import +from __future__ import with_statement + +__all__ = ['Client', 'Listener', 'Pipe'] + +import os +import sys +import socket +import errno +import time +import tempfile +import itertools + +from . import AuthenticationError +from ._ext import _billiard, win32 +from .compat import get_errno +from .util import get_temp_dir, Finalize, sub_debug, debug +from .forking import duplicate, close +from .compat import bytes + +try: + WindowsError = WindowsError # noqa +except NameError: + WindowsError = None # noqa + + +# global set later +xmlrpclib = None + + +# +# +# + +BUFSIZE = 8192 +# A very generous timeout when it comes to local connections... +CONNECTION_TIMEOUT = 20. + +_mmap_counter = itertools.count() + +default_family = 'AF_INET' +families = ['AF_INET'] + +if hasattr(socket, 'AF_UNIX'): + default_family = 'AF_UNIX' + families += ['AF_UNIX'] + +if sys.platform == 'win32': + default_family = 'AF_PIPE' + families += ['AF_PIPE'] + + +def _init_timeout(timeout=CONNECTION_TIMEOUT): + return time.time() + timeout + + +def _check_timeout(t): + return time.time() > t + +# +# +# + + +def arbitrary_address(family): + ''' + Return an arbitrary free address for the given family + ''' + if family == 'AF_INET': + return ('localhost', 0) + elif family == 'AF_UNIX': + return tempfile.mktemp(prefix='listener-', dir=get_temp_dir()) + elif family == 'AF_PIPE': + return tempfile.mktemp(prefix=r'\\.\pipe\pyc-%d-%d-' % + (os.getpid(), _mmap_counter.next())) + else: + raise ValueError('unrecognized family') + + +def address_type(address): + ''' + Return the types of the address + + This can be 'AF_INET', 'AF_UNIX', or 'AF_PIPE' + ''' + if type(address) == tuple: + return 'AF_INET' + elif type(address) is str and address.startswith('\\\\'): + return 'AF_PIPE' + elif type(address) is str: + return 'AF_UNIX' + else: + raise ValueError('address type of %r unrecognized' % address) + +# +# Public functions +# + + +class Listener(object): + ''' + Returns a listener object. + + This is a wrapper for a bound socket which is 'listening' for + connections, or for a Windows named pipe. + ''' + def __init__(self, address=None, family=None, backlog=1, authkey=None): + family = (family or + (address and address_type(address)) or + default_family) + address = address or arbitrary_address(family) + + if family == 'AF_PIPE': + self._listener = PipeListener(address, backlog) + else: + self._listener = SocketListener(address, family, backlog) + + if authkey is not None and not isinstance(authkey, bytes): + raise TypeError('authkey should be a byte string') + + self._authkey = authkey + + def accept(self): + ''' + Accept a connection on the bound socket or named pipe of `self`. + + Returns a `Connection` object. + ''' + if self._listener is None: + raise IOError('listener is closed') + c = self._listener.accept() + if self._authkey: + deliver_challenge(c, self._authkey) + answer_challenge(c, self._authkey) + return c + + def close(self): + ''' + Close the bound socket or named pipe of `self`. + ''' + if self._listener is not None: + self._listener.close() + self._listener = None + + address = property(lambda self: self._listener._address) + last_accepted = property(lambda self: self._listener._last_accepted) + + def __enter__(self): + return self + + def __exit__(self, *exc_args): + self.close() + + +def Client(address, family=None, authkey=None): + ''' + Returns a connection to the address of a `Listener` + ''' + family = family or address_type(address) + if family == 'AF_PIPE': + c = PipeClient(address) + else: + c = SocketClient(address) + + if authkey is not None and not isinstance(authkey, bytes): + raise TypeError('authkey should be a byte string') + + if authkey is not None: + answer_challenge(c, authkey) + deliver_challenge(c, authkey) + + return c + + +if sys.platform != 'win32': + + def Pipe(duplex=True): + ''' + Returns pair of connection objects at either end of a pipe + ''' + if duplex: + s1, s2 = socket.socketpair() + c1 = _billiard.Connection(os.dup(s1.fileno())) + c2 = _billiard.Connection(os.dup(s2.fileno())) + s1.close() + s2.close() + else: + fd1, fd2 = os.pipe() + c1 = _billiard.Connection(fd1, writable=False) + c2 = _billiard.Connection(fd2, readable=False) + + return c1, c2 + +else: + + def Pipe(duplex=True): # noqa + ''' + Returns pair of connection objects at either end of a pipe + ''' + address = arbitrary_address('AF_PIPE') + if duplex: + openmode = win32.PIPE_ACCESS_DUPLEX + access = win32.GENERIC_READ | win32.GENERIC_WRITE + obsize, ibsize = BUFSIZE, BUFSIZE + else: + openmode = win32.PIPE_ACCESS_INBOUND + access = win32.GENERIC_WRITE + obsize, ibsize = 0, BUFSIZE + + h1 = win32.CreateNamedPipe( + address, openmode, + win32.PIPE_TYPE_MESSAGE | win32.PIPE_READMODE_MESSAGE | + win32.PIPE_WAIT, + 1, obsize, ibsize, win32.NMPWAIT_WAIT_FOREVER, win32.NULL + ) + h2 = win32.CreateFile( + address, access, 0, win32.NULL, win32.OPEN_EXISTING, 0, win32.NULL + ) + win32.SetNamedPipeHandleState( + h2, win32.PIPE_READMODE_MESSAGE, None, None + ) + + try: + win32.ConnectNamedPipe(h1, win32.NULL) + except WindowsError, e: + if e.args[0] != win32.ERROR_PIPE_CONNECTED: + raise + + c1 = _billiard.PipeConnection(h1, writable=duplex) + c2 = _billiard.PipeConnection(h2, readable=duplex) + + return c1, c2 + +# +# Definitions for connections based on sockets +# + + +class SocketListener(object): + ''' + Representation of a socket which is bound to an address and listening + ''' + def __init__(self, address, family, backlog=1): + self._socket = socket.socket(getattr(socket, family)) + try: + # SO_REUSEADDR has different semantics on Windows (Issue #2550). + if os.name == 'posix': + self._socket.setsockopt(socket.SOL_SOCKET, + socket.SO_REUSEADDR, 1) + self._socket.bind(address) + self._socket.listen(backlog) + self._address = self._socket.getsockname() + except OSError: + self._socket.close() + raise + self._family = family + self._last_accepted = None + + if family == 'AF_UNIX': + self._unlink = Finalize( + self, os.unlink, args=(address,), exitpriority=0 + ) + else: + self._unlink = None + + def accept(self): + s, self._last_accepted = self._socket.accept() + fd = duplicate(s.fileno()) + conn = _billiard.Connection(fd) + s.close() + return conn + + def close(self): + self._socket.close() + if self._unlink is not None: + self._unlink() + + +def SocketClient(address): + ''' + Return a connection object connected to the socket given by `address` + ''' + family = address_type(address) + s = socket.socket(getattr(socket, family)) + t = _init_timeout() + + while 1: + try: + s.connect(address) + except socket.error, exc: + if get_errno(exc) != errno.ECONNREFUSED or _check_timeout(t): + debug('failed to connect to address %s', address) + raise + time.sleep(0.01) + else: + break + else: + raise + + fd = duplicate(s.fileno()) + conn = _billiard.Connection(fd) + s.close() + return conn + +# +# Definitions for connections based on named pipes +# + +if sys.platform == 'win32': + + class PipeListener(object): + ''' + Representation of a named pipe + ''' + def __init__(self, address, backlog=None): + self._address = address + handle = win32.CreateNamedPipe( + address, win32.PIPE_ACCESS_DUPLEX, + win32.PIPE_TYPE_MESSAGE | win32.PIPE_READMODE_MESSAGE | + win32.PIPE_WAIT, + win32.PIPE_UNLIMITED_INSTANCES, BUFSIZE, BUFSIZE, + win32.NMPWAIT_WAIT_FOREVER, win32.NULL + ) + self._handle_queue = [handle] + self._last_accepted = None + + sub_debug('listener created with address=%r', self._address) + + self.close = Finalize( + self, PipeListener._finalize_pipe_listener, + args=(self._handle_queue, self._address), exitpriority=0 + ) + + def accept(self): + newhandle = win32.CreateNamedPipe( + self._address, win32.PIPE_ACCESS_DUPLEX, + win32.PIPE_TYPE_MESSAGE | win32.PIPE_READMODE_MESSAGE | + win32.PIPE_WAIT, + win32.PIPE_UNLIMITED_INSTANCES, BUFSIZE, BUFSIZE, + win32.NMPWAIT_WAIT_FOREVER, win32.NULL + ) + self._handle_queue.append(newhandle) + handle = self._handle_queue.pop(0) + try: + win32.ConnectNamedPipe(handle, win32.NULL) + except WindowsError, e: + if e.args[0] != win32.ERROR_PIPE_CONNECTED: + raise + return _billiard.PipeConnection(handle) + + @staticmethod + def _finalize_pipe_listener(queue, address): + sub_debug('closing listener with address=%r', address) + for handle in queue: + close(handle) + + def PipeClient(address): + ''' + Return a connection object connected to the pipe given by `address` + ''' + t = _init_timeout() + while 1: + try: + win32.WaitNamedPipe(address, 1000) + h = win32.CreateFile( + address, win32.GENERIC_READ | win32.GENERIC_WRITE, + 0, win32.NULL, win32.OPEN_EXISTING, 0, win32.NULL, + ) + except WindowsError, e: + if e.args[0] not in ( + win32.ERROR_SEM_TIMEOUT, + win32.ERROR_PIPE_BUSY) or _check_timeout(t): + raise + else: + break + else: + raise + + win32.SetNamedPipeHandleState( + h, win32.PIPE_READMODE_MESSAGE, None, None + ) + return _billiard.PipeConnection(h) + +# +# Authentication stuff +# + +MESSAGE_LENGTH = 20 + +CHALLENGE = bytes('#CHALLENGE#', 'ascii') +WELCOME = bytes('#WELCOME#', 'ascii') +FAILURE = bytes('#FAILURE#', 'ascii') + + +def deliver_challenge(connection, authkey): + import hmac + assert isinstance(authkey, bytes) + message = os.urandom(MESSAGE_LENGTH) + connection.send_bytes(CHALLENGE + message) + digest = hmac.new(authkey, message).digest() + response = connection.recv_bytes(256) # reject large message + if response == digest: + connection.send_bytes(WELCOME) + else: + connection.send_bytes(FAILURE) + raise AuthenticationError('digest received was wrong') + + +def answer_challenge(connection, authkey): + import hmac + assert isinstance(authkey, bytes) + message = connection.recv_bytes(256) # reject large message + assert message[:len(CHALLENGE)] == CHALLENGE, 'message = %r' % message + message = message[len(CHALLENGE):] + digest = hmac.new(authkey, message).digest() + connection.send_bytes(digest) + response = connection.recv_bytes(256) # reject large message + if response != WELCOME: + raise AuthenticationError('digest sent was rejected') + +# +# Support for using xmlrpclib for serialization +# + + +class ConnectionWrapper(object): + def __init__(self, conn, dumps, loads): + self._conn = conn + self._dumps = dumps + self._loads = loads + for attr in ('fileno', 'close', 'poll', 'recv_bytes', 'send_bytes'): + obj = getattr(conn, attr) + setattr(self, attr, obj) + + def send(self, obj): + s = self._dumps(obj) + self._conn.send_bytes(s) + + def recv(self): + s = self._conn.recv_bytes() + return self._loads(s) + + +def _xml_dumps(obj): + return xmlrpclib.dumps((obj,), None, None, None, 1).encode('utf8') + + +def _xml_loads(s): + (obj,), method = xmlrpclib.loads(s.decode('utf8')) + return obj + + +class XmlListener(Listener): + def accept(self): + global xmlrpclib + import xmlrpclib # noqa + obj = Listener.accept(self) + return ConnectionWrapper(obj, _xml_dumps, _xml_loads) + + +def XmlClient(*args, **kwds): + global xmlrpclib + import xmlrpclib # noqa + return ConnectionWrapper(Client(*args, **kwds), _xml_dumps, _xml_loads) diff --git a/awx/lib/site-packages/billiard/_ext.py b/awx/lib/site-packages/billiard/_ext.py new file mode 100644 index 0000000000..a02a156669 --- /dev/null +++ b/awx/lib/site-packages/billiard/_ext.py @@ -0,0 +1,39 @@ +from __future__ import absolute_import + +import sys + +supports_exec = True + +if sys.platform.startswith("java"): + _billiard = None +else: + try: + import _billiard # noqa + except ImportError: + import _multiprocessing as _billiard # noqa + supports_exec = False + try: + Connection = _billiard.Connection + except AttributeError: # Py3 + from multiprocessing.connection import Connection # noqa + + PipeConnection = getattr(_billiard, "PipeConnection", None) + win32 = getattr(_billiard, "win32", None) + + +def ensure_multiprocessing(): + if _billiard is None: + raise NotImplementedError("multiprocessing not supported") + + +def ensure_SemLock(): + try: + from _billiard import SemLock # noqa + except ImportError: + try: + from _multiprocessing import SemLock # noqa + except ImportError: + raise ImportError("""\ +This platform lacks a functioning sem_open implementation, therefore, +the required synchronization primitives needed will not function, +see issue 3770.""") diff --git a/awx/lib/site-packages/billiard/_win.py b/awx/lib/site-packages/billiard/_win.py new file mode 100644 index 0000000000..fd6db55532 --- /dev/null +++ b/awx/lib/site-packages/billiard/_win.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +""" + billiard._win + ~~~~~~~~~~~~~ + + Windows utilities to terminate process groups. + +""" +from __future__ import absolute_import + +import os + +# psutil is painfully slow in win32. So to avoid adding big +# dependencies like pywin32 a ctypes based solution is preferred + +# Code based on the winappdbg project http://winappdbg.sourceforge.net/ +# (BSD License) +from ctypes import ( + byref, sizeof, windll, + Structure, WinError, POINTER, + c_size_t, c_char, c_void_p, +) +from ctypes.wintypes import DWORD, LONG + +ERROR_NO_MORE_FILES = 18 +INVALID_HANDLE_VALUE = c_void_p(-1).value + + +class PROCESSENTRY32(Structure): + _fields_ = [ + ('dwSize', DWORD), + ('cntUsage', DWORD), + ('th32ProcessID', DWORD), + ('th32DefaultHeapID', c_size_t), + ('th32ModuleID', DWORD), + ('cntThreads', DWORD), + ('th32ParentProcessID', DWORD), + ('pcPriClassBase', LONG), + ('dwFlags', DWORD), + ('szExeFile', c_char * 260), + ] +LPPROCESSENTRY32 = POINTER(PROCESSENTRY32) + + +def CreateToolhelp32Snapshot(dwFlags=2, th32ProcessID=0): + hSnapshot = windll.kernel32.CreateToolhelp32Snapshot(dwFlags, + th32ProcessID) + if hSnapshot == INVALID_HANDLE_VALUE: + raise WinError() + return hSnapshot + + +def Process32First(hSnapshot, pe=None): + return _Process32n(windll.kernel32.Process32First, hSnapshot, pe) + + +def Process32Next(hSnapshot, pe=None): + return _Process32n(windll.kernel32.Process32Next, hSnapshot, pe) + + +def _Process32n(fun, hSnapshot, pe=None): + if pe is None: + pe = PROCESSENTRY32() + pe.dwSize = sizeof(PROCESSENTRY32) + success = fun(hSnapshot, byref(pe)) + if not success: + if windll.kernel32.GetLastError() == ERROR_NO_MORE_FILES: + return + raise WinError() + return pe + + +def get_all_processes_pids(): + """Return a dictionary with all processes pids as keys and their + parents as value. Ignore processes with no parents. + """ + h = CreateToolhelp32Snapshot() + parents = {} + pe = Process32First(h) + while pe: + if pe.th32ParentProcessID: + parents[pe.th32ProcessID] = pe.th32ParentProcessID + pe = Process32Next(h, pe) + + return parents + + +def get_processtree_pids(pid, include_parent=True): + """Return a list with all the pids of a process tree""" + parents = get_all_processes_pids() + all_pids = parents.keys() + pids = set([pid]) + while 1: + pids_new = pids.copy() + + for _pid in all_pids: + if parents[_pid] in pids: + pids_new.add(_pid) + + if pids_new == pids: + break + + pids = pids_new.copy() + + if not include_parent: + pids.remove(pid) + + return list(pids) + + +def kill_processtree(pid, signum): + """Kill a process and all its descendants""" + family_pids = get_processtree_pids(pid) + + for _pid in family_pids: + os.kill(_pid, signum) diff --git a/awx/lib/site-packages/billiard/common.py b/awx/lib/site-packages/billiard/common.py new file mode 100644 index 0000000000..04c37ea67d --- /dev/null +++ b/awx/lib/site-packages/billiard/common.py @@ -0,0 +1,73 @@ +from __future__ import absolute_import + +import signal +import sys + +from time import time + +from .exceptions import RestartFreqExceeded + +TERMSIGS = ( + 'SIGHUP', + 'SIGQUIT', + 'SIGILL', + 'SIGTRAP', + 'SIGABRT', + 'SIGEMT', + 'SIGFPE', + 'SIGBUS', + 'SIGSEGV', + 'SIGSYS', + 'SIGPIPE', + 'SIGALRM', + 'SIGTERM', + 'SIGXCPU', + 'SIGXFSZ', + 'SIGVTALRM', + 'SIGPROF', + 'SIGUSR1', + 'SIGUSR2', +) + + +def _shutdown_cleanup(signum, frame): + sys.exit(-(256 - signum)) + + +def reset_signals(handler=_shutdown_cleanup): + for sig in TERMSIGS: + try: + signum = getattr(signal, sig) + current = signal.getsignal(signum) + if current is not None and current != signal.SIG_IGN: + signal.signal(signum, handler) + except (OSError, AttributeError, ValueError, RuntimeError): + pass + + +class restart_state(object): + RestartFreqExceeded = RestartFreqExceeded + + def __init__(self, maxR, maxT): + self.maxR, self.maxT = maxR, maxT + self.R, self.T = 0, None + + def step(self, now=None): + now = time() if now is None else now + R = self.R + if self.T and now - self.T >= self.maxT: + # maxT passed, reset counter and time passed. + self.T, self.R = now, 0 + elif self.maxR and self.R >= self.maxR: + # verify that R has a value as the result handler + # resets this when a job is accepted. If a job is accepted + # the startup probably went fine (startup restart burst + # protection) + if self.R: # pragma: no cover + pass + self.R = 0 # reset in case someone catches the error + raise self.RestartFreqExceeded("%r in %rs" % (R, self.maxT)) + # first run sets T + if self.T is None: + self.T = now + self.R += 1 diff --git a/awx/lib/site-packages/billiard/compat.py b/awx/lib/site-packages/billiard/compat.py new file mode 100644 index 0000000000..ac31f3b84b --- /dev/null +++ b/awx/lib/site-packages/billiard/compat.py @@ -0,0 +1,48 @@ +from __future__ import absolute_import + +import errno +import os +import sys +import __builtin__ + +if sys.version_info[0] == 3: + bytes = bytes +else: + try: + _bytes = __builtin__.bytes + except AttributeError: + _bytes = str + + class bytes(_bytes): # noqa + + def __new__(cls, *args): + if len(args) > 1: + return _bytes(args[0]).encode(*args[1:]) + return _bytes(*args) + +try: + closerange = os.closerange +except AttributeError: + + def closerange(fd_low, fd_high): # noqa + for fd in reversed(xrange(fd_low, fd_high)): + try: + os.close(fd) + except OSError, exc: + if exc.errno != errno.EBADF: + raise + + +def get_errno(exc): + """:exc:`socket.error` and :exc:`IOError` first got + the ``.errno`` attribute in Py2.7""" + try: + return exc.errno + except AttributeError: + try: + # e.args = (errno, reason) + if isinstance(exc.args, tuple) and len(exc.args) == 2: + return exc.args[0] + except AttributeError: + pass + return 0 diff --git a/awx/lib/site-packages/billiard/connection.py b/awx/lib/site-packages/billiard/connection.py new file mode 100644 index 0000000000..58e9a2ffa3 --- /dev/null +++ b/awx/lib/site-packages/billiard/connection.py @@ -0,0 +1,11 @@ +from __future__ import absolute_import + + +import sys + +if sys.version_info[0] == 3: + from multiprocessing import connection +else: + from billiard import _connection as connection # noqa + +sys.modules[__name__] = connection diff --git a/awx/lib/site-packages/billiard/dummy/__init__.py b/awx/lib/site-packages/billiard/dummy/__init__.py new file mode 100644 index 0000000000..96b996fdda --- /dev/null +++ b/awx/lib/site-packages/billiard/dummy/__init__.py @@ -0,0 +1,167 @@ +# +# Support for the API of the multiprocessing package using threads +# +# multiprocessing/dummy/__init__.py +# +# Copyright (c) 2006-2008, R Oudkerk +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# 3. Neither the name of author nor the names of any contributors may be +# used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +# SUCH DAMAGE. +# +from __future__ import absolute_import + +__all__ = [ + 'Process', 'current_process', 'active_children', 'freeze_support', + 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', + 'Event', 'Queue', 'Manager', 'Pipe', 'Pool', 'JoinableQueue' +] + +# +# Imports +# + +import threading +import sys +import weakref +import array + +from threading import Lock, RLock, Semaphore, BoundedSemaphore +from threading import Event +from Queue import Queue + +if sys.version_info[0] == 3: + from multiprocessing.connection import Pipe +else: + from billiard._connection import Pipe + + +class DummyProcess(threading.Thread): + + def __init__(self, group=None, target=None, name=None, args=(), kwargs={}): + threading.Thread.__init__(self, group, target, name, args, kwargs) + self._pid = None + self._children = weakref.WeakKeyDictionary() + self._start_called = False + self._parent = current_process() + + def start(self): + assert self._parent is current_process() + self._start_called = True + self._parent._children[self] = None + threading.Thread.start(self) + + @property + def exitcode(self): + if self._start_called and not self.is_alive(): + return 0 + else: + return None + + +try: + _Condition = threading._Condition +except AttributeError: # Py3 + _Condition = threading.Condition # noqa + + +class Condition(_Condition): + if sys.version_info[0] == 3: + notify_all = _Condition.notifyAll + else: + notify_all = _Condition.notifyAll.im_func + + +Process = DummyProcess +current_process = threading.currentThread +current_process()._children = weakref.WeakKeyDictionary() + + +def active_children(): + children = current_process()._children + for p in list(children): + if not p.is_alive(): + children.pop(p, None) + return list(children) + + +def freeze_support(): + pass + + +class Namespace(object): + + def __init__(self, **kwds): + self.__dict__.update(kwds) + + def __repr__(self): + items = self.__dict__.items() + temp = [] + for name, value in items: + if not name.startswith('_'): + temp.append('%s=%r' % (name, value)) + temp.sort() + return 'Namespace(%s)' % str.join(', ', temp) + + +dict = dict +list = list + + +def Array(typecode, sequence, lock=True): + return array.array(typecode, sequence) + + +class Value(object): + + def __init__(self, typecode, value, lock=True): + self._typecode = typecode + self._value = value + + def _get(self): + return self._value + + def _set(self, value): + self._value = value + value = property(_get, _set) + + def __repr__(self): + return '<%r(%r, %r)>' % (type(self).__name__, + self._typecode, self._value) + + +def Manager(): + return sys.modules[__name__] + + +def shutdown(): + pass + + +def Pool(processes=None, initializer=None, initargs=()): + from billiard.pool import ThreadPool + return ThreadPool(processes, initializer, initargs) + +JoinableQueue = Queue diff --git a/awx/lib/site-packages/billiard/dummy/connection.py b/awx/lib/site-packages/billiard/dummy/connection.py new file mode 100644 index 0000000000..62f8ae510f --- /dev/null +++ b/awx/lib/site-packages/billiard/dummy/connection.py @@ -0,0 +1,94 @@ +# +# Analogue of `multiprocessing.connection` which uses queues instead of sockets +# +# multiprocessing/dummy/connection.py +# +# Copyright (c) 2006-2008, R Oudkerk +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# 3. Neither the name of author nor the names of any contributors may be +# used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +# SUCH DAMAGE. +# +from __future__ import absolute_import + +__all__ = ['Client', 'Listener', 'Pipe'] + +from Queue import Queue + + +families = [None] + + +class Listener(object): + + def __init__(self, address=None, family=None, backlog=1): + self._backlog_queue = Queue(backlog) + + def accept(self): + return Connection(*self._backlog_queue.get()) + + def close(self): + self._backlog_queue = None + + address = property(lambda self: self._backlog_queue) + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + + +def Client(address): + _in, _out = Queue(), Queue() + address.put((_out, _in)) + return Connection(_in, _out) + + +def Pipe(duplex=True): + a, b = Queue(), Queue() + return Connection(a, b), Connection(b, a) + + +class Connection(object): + + def __init__(self, _in, _out): + self._out = _out + self._in = _in + self.send = self.send_bytes = _out.put + self.recv = self.recv_bytes = _in.get + + def poll(self, timeout=0.0): + if self._in.qsize() > 0: + return True + if timeout <= 0.0: + return False + self._in.not_empty.acquire() + self._in.not_empty.wait(timeout) + self._in.not_empty.release() + return self._in.qsize() > 0 + + def close(self): + pass diff --git a/awx/lib/site-packages/billiard/einfo.py b/awx/lib/site-packages/billiard/einfo.py new file mode 100644 index 0000000000..3b6848cca5 --- /dev/null +++ b/awx/lib/site-packages/billiard/einfo.py @@ -0,0 +1,112 @@ +from __future__ import absolute_import + +import sys +import traceback + + +class _Code(object): + + def __init__(self, code): + self.co_filename = code.co_filename + self.co_name = code.co_name + + +class _Frame(object): + Code = _Code + + def __init__(self, frame): + self.f_globals = { + "__file__": frame.f_globals.get("__file__", "__main__"), + "__name__": frame.f_globals.get("__name__"), + "__loader__": None, + } + self.f_locals = fl = {} + try: + fl["__traceback_hide__"] = frame.f_locals["__traceback_hide__"] + except KeyError: + pass + self.f_code = self.Code(frame.f_code) + self.f_lineno = frame.f_lineno + + +class _Object(object): + + def __init__(self, **kw): + [setattr(self, k, v) for k, v in kw.iteritems()] + + +class _Truncated(object): + + def __init__(self): + self.tb_lineno = -1 + self.tb_frame = _Object( + f_globals={"__file__": "", + "__name__": "", + "__loader__": None}, + f_fileno=None, + f_code=_Object(co_filename="...", + co_name="[rest of traceback truncated]"), + ) + self.tb_next = None + + +class Traceback(object): + Frame = _Frame + + tb_frame = tb_lineno = tb_next = None + max_frames = sys.getrecursionlimit() // 8 + + def __init__(self, tb, max_frames=None, depth=0): + limit = self.max_frames = max_frames or self.max_frames + self.tb_frame = self.Frame(tb.tb_frame) + self.tb_lineno = tb.tb_lineno + if tb.tb_next is not None: + if depth <= limit: + self.tb_next = Traceback(tb.tb_next, limit, depth + 1) + else: + self.tb_next = _Truncated() + + +class ExceptionInfo(object): + """Exception wrapping an exception and its traceback. + + :param exc_info: The exception info tuple as returned by + :func:`sys.exc_info`. + + """ + + #: Exception type. + type = None + + #: Exception instance. + exception = None + + #: Pickleable traceback instance for use with :mod:`traceback` + tb = None + + #: String representation of the traceback. + traceback = None + + #: Set to true if this is an internal error. + internal = False + + def __init__(self, exc_info=None, internal=False): + self.type, self.exception, tb = exc_info or sys.exc_info() + try: + self.tb = Traceback(tb) + self.traceback = ''.join( + traceback.format_exception(self.type, self.exception, tb), + ) + self.internal = internal + finally: + del(tb) + + def __str__(self): + return self.traceback + + def __repr__(self): + return "" % (self.exception, ) + + @property + def exc_info(self): + return self.type, self.exception, self.tb diff --git a/awx/lib/site-packages/billiard/exceptions.py b/awx/lib/site-packages/billiard/exceptions.py new file mode 100644 index 0000000000..df560833e5 --- /dev/null +++ b/awx/lib/site-packages/billiard/exceptions.py @@ -0,0 +1,54 @@ +from __future__ import absolute_import + +try: + from multiprocessing import ( + ProcessError, + BufferTooShort, + TimeoutError, + AuthenticationError, + ) +except ImportError: + class ProcessError(Exception): # noqa + pass + + class BufferTooShort(Exception): # noqa + pass + + class TimeoutError(Exception): # noqa + pass + + class AuthenticationError(Exception): # noqa + pass + + +class TimeLimitExceeded(Exception): + """The time limit has been exceeded and the job has been terminated.""" + + def __str__(self): + return "TimeLimitExceeded%s" % (self.args, ) + + +class SoftTimeLimitExceeded(Exception): + """The soft time limit has been exceeded. This exception is raised + to give the task a chance to clean up.""" + + def __str__(self): + return "SoftTimeLimitExceeded%s" % (self.args, ) + + +class WorkerLostError(Exception): + """The worker processing a job has exited prematurely.""" + + +class Terminated(Exception): + """The worker processing a job has been terminated by user request.""" + + +class RestartFreqExceeded(Exception): + """Restarts too fast.""" + + +class CoroStop(Exception): + """Coroutine exit, as opposed to StopIteration which may + mean it should be restarted.""" + pass diff --git a/awx/lib/site-packages/billiard/forking.py b/awx/lib/site-packages/billiard/forking.py new file mode 100644 index 0000000000..1f1d311dd5 --- /dev/null +++ b/awx/lib/site-packages/billiard/forking.py @@ -0,0 +1,667 @@ +# +# Module for starting a process object using os.fork() or CreateProcess() +# +# multiprocessing/forking.py +# +# Copyright (c) 2006-2008, R Oudkerk +# Licensed to PSF under a Contributor Agreement. +# + +from __future__ import absolute_import + +import os +import sys +import signal +import warnings + +from ._ext import Connection, PipeConnection, win32 +from pickle import load, HIGHEST_PROTOCOL +from billiard import util, process + +__all__ = ['Popen', 'assert_spawning', 'exit', + 'duplicate', 'close', 'ForkingPickler'] + +try: + WindowsError = WindowsError # noqa +except NameError: + class WindowsError(Exception): # noqa + pass + +W_OLD_DJANGO_LAYOUT = """\ +Will add directory %r to path! This is necessary to accommodate \ +pre-Django 1.4 layouts using setup_environ. +You can skip this warning by adding a DJANGO_SETTINGS_MODULE=settings \ +environment variable. +""" + +# +# Choose whether to do a fork or spawn (fork+exec) on Unix. +# This affects how some shared resources should be created. +# + +_forking_is_enabled = sys.platform != 'win32' + +# +# Check that the current thread is spawning a child process +# + + +def assert_spawning(self): + if not Popen.thread_is_spawning(): + raise RuntimeError( + '%s objects should only be shared between processes' + ' through inheritance' % type(self).__name__ + ) + +# +# Try making some callable types picklable +# +from pickle import Pickler + +if sys.version_info[0] == 3: + from copyreg import dispatch_table + + class ForkingPickler(Pickler): + _extra_reducers = {} + + def __init__(self, *args, **kwargs): + Pickler.__init__(self, *args, **kwargs) + self.dispatch_table = dispatch_table.copy() + self.dispatch_table.update(self._extra_reducers) + + @classmethod + def register(cls, type, reduce): + cls._extra_reducers[type] = reduce + + def _reduce_method(m): + if m.__self__ is None: + return getattr, (m.__class__, m.__func__.__name__) + else: + return getattr, (m.__self__, m.__func__.__name__) + + class _C: + def f(self): + pass + ForkingPickler.register(type(_C().f), _reduce_method) + +else: + + class ForkingPickler(Pickler): # noqa + dispatch = Pickler.dispatch.copy() + + @classmethod + def register(cls, type, reduce): + def dispatcher(self, obj): + rv = reduce(obj) + self.save_reduce(obj=obj, *rv) + cls.dispatch[type] = dispatcher + + def _reduce_method(m): # noqa + if m.im_self is None: + return getattr, (m.im_class, m.im_func.func_name) + else: + return getattr, (m.im_self, m.im_func.func_name) + ForkingPickler.register(type(ForkingPickler.save), _reduce_method) + + +def _reduce_method_descriptor(m): + return getattr, (m.__objclass__, m.__name__) +ForkingPickler.register(type(list.append), _reduce_method_descriptor) +ForkingPickler.register(type(int.__add__), _reduce_method_descriptor) + +try: + from functools import partial +except ImportError: + pass +else: + + def _reduce_partial(p): + return _rebuild_partial, (p.func, p.args, p.keywords or {}) + + def _rebuild_partial(func, args, keywords): + return partial(func, *args, **keywords) + ForkingPickler.register(partial, _reduce_partial) + + +def dump(obj, file, protocol=None): + ForkingPickler(file, protocol).dump(obj) + +# +# Make (Pipe)Connection picklable +# + + +def reduce_connection(conn): + # XXX check not necessary since only registered with ForkingPickler + if not Popen.thread_is_spawning(): + raise RuntimeError( + 'By default %s objects can only be shared between processes\n' + 'using inheritance' % type(conn).__name__ + ) + return type(conn), (Popen.duplicate_for_child(conn.fileno()), + conn.readable, conn.writable) + +ForkingPickler.register(Connection, reduce_connection) +if PipeConnection: + ForkingPickler.register(PipeConnection, reduce_connection) + + +# +# Unix +# + +if sys.platform != 'win32': + import thread + import select + + WINEXE = False + WINSERVICE = False + + exit = os._exit + duplicate = os.dup + close = os.close + _select = util._eintr_retry(select.select) + + # + # We define a Popen class similar to the one from subprocess, but + # whose constructor takes a process object as its argument. + # + + class Popen(object): + + _tls = thread._local() + + def __init__(self, process_obj): + _Django_old_layout_hack__save() + sys.stdout.flush() + sys.stderr.flush() + self.returncode = None + r, w = os.pipe() + self.sentinel = r + + if _forking_is_enabled: + self.pid = os.fork() + if self.pid == 0: + os.close(r) + if 'random' in sys.modules: + import random + random.seed() + code = process_obj._bootstrap() + os._exit(code) + else: + from_parent_fd, to_child_fd = os.pipe() + cmd = get_command_line() + [str(from_parent_fd)] + + self.pid = os.fork() + if self.pid == 0: + os.close(r) + os.close(to_child_fd) + os.execv(sys.executable, cmd) + + # send information to child + prep_data = get_preparation_data(process_obj._name) + os.close(from_parent_fd) + to_child = os.fdopen(to_child_fd, 'wb') + Popen._tls.process_handle = self.pid + try: + dump(prep_data, to_child, HIGHEST_PROTOCOL) + dump(process_obj, to_child, HIGHEST_PROTOCOL) + finally: + del(Popen._tls.process_handle) + to_child.close() + + # `w` will be closed when the child exits, at which point `r` + # will become ready for reading (using e.g. select()). + os.close(w) + util.Finalize(self, os.close, (r,)) + + def poll(self, flag=os.WNOHANG): + if self.returncode is None: + try: + pid, sts = os.waitpid(self.pid, flag) + except os.error: + # Child process not yet created. See #1731717 + # e.errno == errno.ECHILD == 10 + return None + if pid == self.pid: + if os.WIFSIGNALED(sts): + self.returncode = -os.WTERMSIG(sts) + else: + assert os.WIFEXITED(sts) + self.returncode = os.WEXITSTATUS(sts) + return self.returncode + + def wait(self, timeout=None): + if self.returncode is None: + if timeout is not None: + r = _select([self.sentinel], [], [], timeout)[0] + if not r: + return None + # This shouldn't block if select() returned successfully. + return self.poll(os.WNOHANG if timeout == 0.0 else 0) + return self.returncode + + def terminate(self): + if self.returncode is None: + try: + os.kill(self.pid, signal.SIGTERM) + except OSError: + if self.wait(timeout=0.1) is None: + raise + + @staticmethod + def thread_is_spawning(): + if _forking_is_enabled: + return False + else: + return getattr(Popen._tls, 'process_handle', None) is not None + + @staticmethod + def duplicate_for_child(handle): + return handle + +# +# Windows +# + +else: + import thread + import msvcrt + import _subprocess + + # + # + # + + TERMINATE = 0x10000 + WINEXE = (sys.platform == 'win32' and getattr(sys, 'frozen', False)) + WINSERVICE = sys.executable.lower().endswith("pythonservice.exe") + + exit = win32.ExitProcess + close = win32.CloseHandle + + # + # + # + + def duplicate(handle, target_process=None, inheritable=False): + if target_process is None: + target_process = _subprocess.GetCurrentProcess() + return _subprocess.DuplicateHandle( + _subprocess.GetCurrentProcess(), handle, target_process, + 0, inheritable, _subprocess.DUPLICATE_SAME_ACCESS + ).Detach() + + # + # We define a Popen class similar to the one from subprocess, but + # whose constructor takes a process object as its argument. + # + + class Popen(object): + ''' + Start a subprocess to run the code of a process object + ''' + _tls = thread._local() + + def __init__(self, process_obj): + _Django_old_layout_hack__save() + # create pipe for communication with child + rfd, wfd = os.pipe() + + # get handle for read end of the pipe and make it inheritable + rhandle = duplicate(msvcrt.get_osfhandle(rfd), inheritable=True) + os.close(rfd) + + # start process + cmd = get_command_line() + [rhandle] + cmd = ' '.join('"%s"' % x for x in cmd) + hp, ht, pid, tid = _subprocess.CreateProcess( + _python_exe, cmd, None, None, 1, 0, None, None, None + ) + ht.Close() + close(rhandle) + + # set attributes of self + self.pid = pid + self.returncode = None + self._handle = hp + self.sentinel = int(hp) + + # send information to child + prep_data = get_preparation_data(process_obj._name) + to_child = os.fdopen(wfd, 'wb') + Popen._tls.process_handle = int(hp) + try: + dump(prep_data, to_child, HIGHEST_PROTOCOL) + dump(process_obj, to_child, HIGHEST_PROTOCOL) + finally: + del Popen._tls.process_handle + to_child.close() + + @staticmethod + def thread_is_spawning(): + return getattr(Popen._tls, 'process_handle', None) is not None + + @staticmethod + def duplicate_for_child(handle): + return duplicate(handle, Popen._tls.process_handle) + + def wait(self, timeout=None): + if self.returncode is None: + if timeout is None: + msecs = _subprocess.INFINITE + else: + msecs = max(0, int(timeout * 1000 + 0.5)) + + res = _subprocess.WaitForSingleObject(int(self._handle), msecs) + if res == _subprocess.WAIT_OBJECT_0: + code = _subprocess.GetExitCodeProcess(self._handle) + if code == TERMINATE: + code = -signal.SIGTERM + self.returncode = code + + return self.returncode + + def poll(self): + return self.wait(timeout=0) + + def terminate(self): + if self.returncode is None: + try: + _subprocess.TerminateProcess(int(self._handle), TERMINATE) + except WindowsError: + if self.wait(timeout=0.1) is None: + raise + + # + # + # + +if WINSERVICE: + _python_exe = os.path.join(sys.exec_prefix, 'python.exe') +else: + _python_exe = sys.executable + + +def set_executable(exe): + global _python_exe + _python_exe = exe + + +def is_forking(argv): + ''' + Return whether commandline indicates we are forking + ''' + if len(argv) >= 2 and argv[1] == '--billiard-fork': + assert len(argv) == 3 + os.environ["FORKED_BY_MULTIPROCESSING"] = "1" + return True + else: + return False + + +def freeze_support(): + ''' + Run code for process object if this in not the main process + ''' + if is_forking(sys.argv): + main() + sys.exit() + + +def get_command_line(): + ''' + Returns prefix of command line used for spawning a child process + ''' + if process.current_process()._identity == () and is_forking(sys.argv): + raise RuntimeError(''' + Attempt to start a new process before the current process + has finished its bootstrapping phase. + + This probably means that have forgotten to use the proper + idiom in the main module: + + if __name__ == '__main__': + freeze_support() + ... + + The "freeze_support()" line can be omitted if the program + is not going to be frozen to produce a Windows executable.''') + + if getattr(sys, 'frozen', False): + return [sys.executable, '--billiard-fork'] + else: + prog = 'from billiard.forking import main; main()' + return [_python_exe, '-c', prog, '--billiard-fork'] + + +def _Django_old_layout_hack__save(): + if 'DJANGO_PROJECT_DIR' not in os.environ: + try: + settings_name = os.environ['DJANGO_SETTINGS_MODULE'] + except KeyError: + return # not using Django. + + conf_settings = sys.modules.get('django.conf.settings') + configured = conf_settings and conf_settings.configured + try: + project_name, _ = settings_name.split('.', 1) + except ValueError: + return # not modified by setup_environ + + project = __import__(project_name) + try: + project_dir = os.path.normpath(_module_parent_dir(project)) + except AttributeError: + return # dynamically generated module (no __file__) + if configured: + warnings.warn(UserWarning( + W_OLD_DJANGO_LAYOUT % os.path.realpath(project_dir) + )) + os.environ['DJANGO_PROJECT_DIR'] = project_dir + + +def _Django_old_layout_hack__load(): + try: + sys.path.append(os.environ['DJANGO_PROJECT_DIR']) + except KeyError: + pass + + +def _module_parent_dir(mod): + dir, filename = os.path.split(_module_dir(mod)) + if dir == os.curdir or not dir: + dir = os.getcwd() + return dir + + +def _module_dir(mod): + if '__init__.py' in mod.__file__: + return os.path.dirname(mod.__file__) + return mod.__file__ + + +def main(): + ''' + Run code specifed by data received over pipe + ''' + global _forking_is_enabled + _Django_old_layout_hack__load() + + assert is_forking(sys.argv) + _forking_is_enabled = False + + handle = int(sys.argv[-1]) + if sys.platform == 'win32': + fd = msvcrt.open_osfhandle(handle, os.O_RDONLY) + else: + fd = handle + from_parent = os.fdopen(fd, 'rb') + + process.current_process()._inheriting = True + preparation_data = load(from_parent) + prepare(preparation_data) + # Huge hack to make logging before Process.run work. + try: + os.environ["MP_MAIN_FILE"] = sys.modules["__main__"].__file__ + except KeyError: + pass + loglevel = os.environ.get("_MP_FORK_LOGLEVEL_") + logfile = os.environ.get("_MP_FORK_LOGFILE_") or None + format = os.environ.get("_MP_FORK_LOGFORMAT_") + if loglevel: + from billiard import util + import logging + logger = util.get_logger() + logger.setLevel(int(loglevel)) + if not logger.handlers: + logger._rudimentary_setup = True + logfile = logfile or sys.__stderr__ + if hasattr(logfile, "write"): + handler = logging.StreamHandler(logfile) + else: + handler = logging.FileHandler(logfile) + formatter = logging.Formatter( + format or util.DEFAULT_LOGGING_FORMAT, + ) + handler.setFormatter(formatter) + logger.addHandler(handler) + + self = load(from_parent) + process.current_process()._inheriting = False + + from_parent.close() + + exitcode = self._bootstrap() + exit(exitcode) + + +def get_preparation_data(name): + ''' + Return info about parent needed by child to unpickle process object + ''' + from billiard.util import _logger, _log_to_stderr + + d = dict( + name=name, + sys_path=sys.path, + sys_argv=sys.argv, + log_to_stderr=_log_to_stderr, + orig_dir=process.ORIGINAL_DIR, + authkey=process.current_process().authkey, + ) + + if _logger is not None: + d['log_level'] = _logger.getEffectiveLevel() + + if not WINEXE and not WINSERVICE: + main_path = getattr(sys.modules['__main__'], '__file__', None) + if not main_path and sys.argv[0] not in ('', '-c'): + main_path = sys.argv[0] + if main_path is not None: + if (not os.path.isabs(main_path) and + process.ORIGINAL_DIR is not None): + main_path = os.path.join(process.ORIGINAL_DIR, main_path) + d['main_path'] = os.path.normpath(main_path) + + return d + + # + # Make (Pipe)Connection picklable + # + + def reduce_connection(conn): + if not Popen.thread_is_spawning(): + raise RuntimeError( + 'By default %s objects can only be shared between processes\n' + 'using inheritance' % type(conn).__name__ + ) + return type(conn), (Popen.duplicate_for_child(conn.fileno()), + conn.readable, conn.writable) + + ForkingPickler.register(Connection, reduce_connection) + ForkingPickler.register(PipeConnection, reduce_connection) + +# +# Prepare current process +# + +old_main_modules = [] + + +def prepare(data): + ''' + Try to get current process ready to unpickle process object + ''' + old_main_modules.append(sys.modules['__main__']) + + if 'name' in data: + process.current_process().name = data['name'] + + if 'authkey' in data: + process.current_process()._authkey = data['authkey'] + + if 'log_to_stderr' in data and data['log_to_stderr']: + util.log_to_stderr() + + if 'log_level' in data: + util.get_logger().setLevel(data['log_level']) + + if 'sys_path' in data: + sys.path = data['sys_path'] + + if 'sys_argv' in data: + sys.argv = data['sys_argv'] + + if 'dir' in data: + os.chdir(data['dir']) + + if 'orig_dir' in data: + process.ORIGINAL_DIR = data['orig_dir'] + + if 'main_path' in data: + main_path = data['main_path'] + main_name = os.path.splitext(os.path.basename(main_path))[0] + if main_name == '__init__': + main_name = os.path.basename(os.path.dirname(main_path)) + + if main_name == '__main__': + main_module = sys.modules['__main__'] + main_module.__file__ = main_path + elif main_name != 'ipython': + # Main modules not actually called __main__.py may + # contain additional code that should still be executed + import imp + + if main_path is None: + dirs = None + elif os.path.basename(main_path).startswith('__init__.py'): + dirs = [os.path.dirname(os.path.dirname(main_path))] + else: + dirs = [os.path.dirname(main_path)] + + assert main_name not in sys.modules, main_name + file, path_name, etc = imp.find_module(main_name, dirs) + try: + # We would like to do "imp.load_module('__main__', ...)" + # here. However, that would cause 'if __name__ == + # "__main__"' clauses to be executed. + main_module = imp.load_module( + '__parents_main__', file, path_name, etc + ) + finally: + if file: + file.close() + + sys.modules['__main__'] = main_module + main_module.__name__ = '__main__' + + # Try to make the potentially picklable objects in + # sys.modules['__main__'] realize they are in the main + # module -- somewhat ugly. + for obj in main_module.__dict__.values(): + try: + if obj.__module__ == '__parents_main__': + obj.__module__ = '__main__' + except Exception: + pass diff --git a/awx/lib/site-packages/billiard/heap.py b/awx/lib/site-packages/billiard/heap.py new file mode 100644 index 0000000000..bbc0bfc809 --- /dev/null +++ b/awx/lib/site-packages/billiard/heap.py @@ -0,0 +1,254 @@ +# +# Module which supports allocation of memory from an mmap +# +# multiprocessing/heap.py +# +# Copyright (c) 2006-2008, R Oudkerk +# Licensed to PSF under a Contributor Agreement. +# +from __future__ import absolute_import + +import bisect +import mmap +import os +import sys +import threading +import itertools + +from ._ext import _billiard, win32 +from .util import Finalize, info, get_temp_dir +from .forking import assert_spawning, ForkingPickler + +__all__ = ['BufferWrapper'] + +try: + maxsize = sys.maxsize +except AttributeError: + maxsize = sys.maxint + +# +# Inheirtable class which wraps an mmap, and from which blocks can be allocated +# + +if sys.platform == 'win32': + + class Arena(object): + + _counter = itertools.count() + + def __init__(self, size): + self.size = size + self.name = 'pym-%d-%d' % (os.getpid(), Arena._counter.next()) + self.buffer = mmap.mmap(-1, self.size, tagname=self.name) + assert win32.GetLastError() == 0, 'tagname already in use' + self._state = (self.size, self.name) + + def __getstate__(self): + assert_spawning(self) + return self._state + + def __setstate__(self, state): + self.size, self.name = self._state = state + self.buffer = mmap.mmap(-1, self.size, tagname=self.name) + assert win32.GetLastError() == win32.ERROR_ALREADY_EXISTS + +else: + + class Arena(object): + + _counter = itertools.count() + + def __init__(self, size, fileno=-1): + from .forking import _forking_is_enabled + self.size = size + self.fileno = fileno + if fileno == -1 and not _forking_is_enabled: + name = os.path.join( + get_temp_dir(), + 'pym-%d-%d' % (os.getpid(), self._counter.next())) + self.fileno = os.open( + name, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0600) + os.unlink(name) + os.ftruncate(self.fileno, size) + self.buffer = mmap.mmap(self.fileno, self.size) + + def reduce_arena(a): + if a.fileno == -1: + raise ValueError('Arena is unpicklable because' + 'forking was enabled when it was created') + return Arena, (a.size, a.fileno) + + ForkingPickler.register(Arena, reduce_arena) + +# +# Class allowing allocation of chunks of memory from arenas +# + + +class Heap(object): + + _alignment = 8 + + def __init__(self, size=mmap.PAGESIZE): + self._lastpid = os.getpid() + self._lock = threading.Lock() + self._size = size + self._lengths = [] + self._len_to_seq = {} + self._start_to_block = {} + self._stop_to_block = {} + self._allocated_blocks = set() + self._arenas = [] + # list of pending blocks to free - see free() comment below + self._pending_free_blocks = [] + + @staticmethod + def _roundup(n, alignment): + # alignment must be a power of 2 + mask = alignment - 1 + return (n + mask) & ~mask + + def _malloc(self, size): + # returns a large enough block -- it might be much larger + i = bisect.bisect_left(self._lengths, size) + if i == len(self._lengths): + length = self._roundup(max(self._size, size), mmap.PAGESIZE) + self._size *= 2 + info('allocating a new mmap of length %d', length) + arena = Arena(length) + self._arenas.append(arena) + return (arena, 0, length) + else: + length = self._lengths[i] + seq = self._len_to_seq[length] + block = seq.pop() + if not seq: + del self._len_to_seq[length], self._lengths[i] + + (arena, start, stop) = block + del self._start_to_block[(arena, start)] + del self._stop_to_block[(arena, stop)] + return block + + def _free(self, block): + # free location and try to merge with neighbours + (arena, start, stop) = block + + try: + prev_block = self._stop_to_block[(arena, start)] + except KeyError: + pass + else: + start, _ = self._absorb(prev_block) + + try: + next_block = self._start_to_block[(arena, stop)] + except KeyError: + pass + else: + _, stop = self._absorb(next_block) + + block = (arena, start, stop) + length = stop - start + + try: + self._len_to_seq[length].append(block) + except KeyError: + self._len_to_seq[length] = [block] + bisect.insort(self._lengths, length) + + self._start_to_block[(arena, start)] = block + self._stop_to_block[(arena, stop)] = block + + def _absorb(self, block): + # deregister this block so it can be merged with a neighbour + (arena, start, stop) = block + del self._start_to_block[(arena, start)] + del self._stop_to_block[(arena, stop)] + + length = stop - start + seq = self._len_to_seq[length] + seq.remove(block) + if not seq: + del self._len_to_seq[length] + self._lengths.remove(length) + + return start, stop + + def _free_pending_blocks(self): + # Free all the blocks in the pending list - called with the lock held + while 1: + try: + block = self._pending_free_blocks.pop() + except IndexError: + break + self._allocated_blocks.remove(block) + self._free(block) + + def free(self, block): + # free a block returned by malloc() + # Since free() can be called asynchronously by the GC, it could happen + # that it's called while self._lock is held: in that case, + # self._lock.acquire() would deadlock (issue #12352). To avoid that, a + # trylock is used instead, and if the lock can't be acquired + # immediately, the block is added to a list of blocks to be freed + # synchronously sometimes later from malloc() or free(), by calling + # _free_pending_blocks() (appending and retrieving from a list is not + # strictly thread-safe but under cPython it's atomic thanks + # to the GIL). + assert os.getpid() == self._lastpid + if not self._lock.acquire(False): + # can't aquire the lock right now, add the block to the list of + # pending blocks to free + self._pending_free_blocks.append(block) + else: + # we hold the lock + try: + self._free_pending_blocks() + self._allocated_blocks.remove(block) + self._free(block) + finally: + self._lock.release() + + def malloc(self, size): + # return a block of right size (possibly rounded up) + assert 0 <= size < maxsize + if os.getpid() != self._lastpid: + self.__init__() # reinitialize after fork + self._lock.acquire() + self._free_pending_blocks() + try: + size = self._roundup(max(size, 1), self._alignment) + (arena, start, stop) = self._malloc(size) + new_stop = start + size + if new_stop < stop: + self._free((arena, new_stop, stop)) + block = (arena, start, new_stop) + self._allocated_blocks.add(block) + return block + finally: + self._lock.release() + +# +# Class representing a chunk of an mmap -- can be inherited +# + + +class BufferWrapper(object): + + _heap = Heap() + + def __init__(self, size): + assert 0 <= size < maxsize + block = BufferWrapper._heap.malloc(size) + self._state = (block, size) + Finalize(self, BufferWrapper._heap.free, args=(block,)) + + def get_address(self): + (arena, start, stop), size = self._state + address, length = _billiard.address_of_buffer(arena.buffer) + assert size <= length + return address + start + + def get_size(self): + return self._state[1] diff --git a/awx/lib/site-packages/billiard/managers.py b/awx/lib/site-packages/billiard/managers.py new file mode 100644 index 0000000000..a116723c5e --- /dev/null +++ b/awx/lib/site-packages/billiard/managers.py @@ -0,0 +1,1170 @@ +# +# Module providing the `SyncManager` class for dealing +# with shared objects +# +# multiprocessing/managers.py +# +# Copyright (c) 2006-2008, R Oudkerk +# Licensed to PSF under a Contributor Agreement. +# +from __future__ import absolute_import +from __future__ import with_statement + +__all__ = ['BaseManager', 'SyncManager', 'BaseProxy', 'Token'] + +# +# Imports +# + +import sys +import threading +import array +import Queue + +from traceback import format_exc +from time import time as _time + +from . import Process, current_process, active_children, Pool, util, connection +from .process import AuthenticationString +from .forking import exit, Popen, ForkingPickler +from .util import Finalize, error, info + +# +# Register some things for pickling +# + + +def reduce_array(a): + return array.array, (a.typecode, a.tostring()) +ForkingPickler.register(array.array, reduce_array) + +view_types = [type(getattr({}, name)()) + for name in ('items', 'keys', 'values')] +if view_types[0] is not list: # only needed in Py3.0 + + def rebuild_as_list(obj): + return list, (list(obj), ) + for view_type in view_types: + ForkingPickler.register(view_type, rebuild_as_list) + try: + import copyreg + except ImportError: + pass + else: + copyreg.pickle(view_type, rebuild_as_list) + +# +# Type for identifying shared objects +# + + +class Token(object): + ''' + Type to uniquely indentify a shared object + ''' + __slots__ = ('typeid', 'address', 'id') + + def __init__(self, typeid, address, id): + (self.typeid, self.address, self.id) = (typeid, address, id) + + def __getstate__(self): + return (self.typeid, self.address, self.id) + + def __setstate__(self, state): + (self.typeid, self.address, self.id) = state + + def __repr__(self): + return 'Token(typeid=%r, address=%r, id=%r)' % \ + (self.typeid, self.address, self.id) + +# +# Function for communication with a manager's server process +# + + +def dispatch(c, id, methodname, args=(), kwds={}): + ''' + Send a message to manager using connection `c` and return response + ''' + c.send((id, methodname, args, kwds)) + kind, result = c.recv() + if kind == '#RETURN': + return result + raise convert_to_error(kind, result) + + +def convert_to_error(kind, result): + if kind == '#ERROR': + return result + elif kind == '#TRACEBACK': + assert type(result) is str + return RemoteError(result) + elif kind == '#UNSERIALIZABLE': + assert type(result) is str + return RemoteError('Unserializable message: %s\n' % result) + else: + return ValueError('Unrecognized message type') + + +class RemoteError(Exception): + + def __str__(self): + return ('\n' + '-' * 75 + '\n' + str(self.args[0]) + '-' * 75) + +# +# Functions for finding the method names of an object +# + + +def all_methods(obj): + ''' + Return a list of names of methods of `obj` + ''' + temp = [] + for name in dir(obj): + func = getattr(obj, name) + if callable(func): + temp.append(name) + return temp + + +def public_methods(obj): + ''' + Return a list of names of methods of `obj` which do not start with '_' + ''' + return [name for name in all_methods(obj) if name[0] != '_'] + +# +# Server which is run in a process controlled by a manager +# + + +class Server(object): + ''' + Server class which runs in a process controlled by a manager object + ''' + public = ['shutdown', 'create', 'accept_connection', 'get_methods', + 'debug_info', 'number_of_objects', 'dummy', 'incref', 'decref'] + + def __init__(self, registry, address, authkey, serializer): + assert isinstance(authkey, bytes) + self.registry = registry + self.authkey = AuthenticationString(authkey) + Listener, Client = listener_client[serializer] + + # do authentication later + self.listener = Listener(address=address, backlog=16) + self.address = self.listener.address + + self.id_to_obj = {'0': (None, ())} + self.id_to_refcount = {} + self.mutex = threading.RLock() + self.stop = 0 + + def serve_forever(self): + ''' + Run the server forever + ''' + current_process()._manager_server = self + try: + try: + while 1: + try: + c = self.listener.accept() + except (OSError, IOError): + continue + t = threading.Thread(target=self.handle_request, args=(c,)) + t.daemon = True + t.start() + except (KeyboardInterrupt, SystemExit): + pass + finally: + self.stop = 999 + self.listener.close() + + def handle_request(self, c): + ''' + Handle a new connection + ''' + funcname = result = request = None + try: + connection.deliver_challenge(c, self.authkey) + connection.answer_challenge(c, self.authkey) + request = c.recv() + ignore, funcname, args, kwds = request + assert funcname in self.public, '%r unrecognized' % funcname + func = getattr(self, funcname) + except Exception: + msg = ('#TRACEBACK', format_exc()) + else: + try: + result = func(c, *args, **kwds) + except Exception: + msg = ('#TRACEBACK', format_exc()) + else: + msg = ('#RETURN', result) + try: + c.send(msg) + except Exception, e: + try: + c.send(('#TRACEBACK', format_exc())) + except Exception: + pass + info('Failure to send message: %r', msg) + info(' ... request was %r', request) + info(' ... exception was %r', e) + + c.close() + + def serve_client(self, conn): + ''' + Handle requests from the proxies in a particular process/thread + ''' + util.debug('starting server thread to service %r', + threading.currentThread().name) + + recv = conn.recv + send = conn.send + id_to_obj = self.id_to_obj + + while not self.stop: + + try: + methodname = obj = None + request = recv() + ident, methodname, args, kwds = request + obj, exposed, gettypeid = id_to_obj[ident] + + if methodname not in exposed: + raise AttributeError( + 'method %r of %r object is not in exposed=%r' % ( + methodname, type(obj), exposed) + ) + + function = getattr(obj, methodname) + + try: + res = function(*args, **kwds) + except Exception, e: + msg = ('#ERROR', e) + else: + typeid = gettypeid and gettypeid.get(methodname, None) + if typeid: + rident, rexposed = self.create(conn, typeid, res) + token = Token(typeid, self.address, rident) + msg = ('#PROXY', (rexposed, token)) + else: + msg = ('#RETURN', res) + + except AttributeError: + if methodname is None: + msg = ('#TRACEBACK', format_exc()) + else: + try: + fallback_func = self.fallback_mapping[methodname] + result = fallback_func( + self, conn, ident, obj, *args, **kwds + ) + msg = ('#RETURN', result) + except Exception: + msg = ('#TRACEBACK', format_exc()) + + except EOFError: + util.debug('got EOF -- exiting thread serving %r', + threading.currentThread().name) + sys.exit(0) + + except Exception: + msg = ('#TRACEBACK', format_exc()) + + try: + try: + send(msg) + except Exception, e: + send(('#UNSERIALIZABLE', repr(msg))) + except Exception, e: + info('exception in thread serving %r', + threading.currentThread().name) + info(' ... message was %r', msg) + info(' ... exception was %r', e) + conn.close() + sys.exit(1) + + def fallback_getvalue(self, conn, ident, obj): + return obj + + def fallback_str(self, conn, ident, obj): + return str(obj) + + def fallback_repr(self, conn, ident, obj): + return repr(obj) + + fallback_mapping = { + '__str__': fallback_str, + '__repr__': fallback_repr, + '#GETVALUE': fallback_getvalue, + } + + def dummy(self, c): + pass + + def debug_info(self, c): + ''' + Return some info --- useful to spot problems with refcounting + ''' + with self.mutex: + result = [] + keys = self.id_to_obj.keys() + keys.sort() + for ident in keys: + if ident != '0': + result.append(' %s: refcount=%s\n %s' % + (ident, self.id_to_refcount[ident], + str(self.id_to_obj[ident][0])[:75])) + return '\n'.join(result) + + def number_of_objects(self, c): + ''' + Number of shared objects + ''' + return len(self.id_to_obj) - 1 # don't count ident='0' + + def shutdown(self, c): + ''' + Shutdown this process + ''' + try: + try: + util.debug('manager received shutdown message') + c.send(('#RETURN', None)) + + if sys.stdout != sys.__stdout__: + util.debug('resetting stdout, stderr') + sys.stdout = sys.__stdout__ + sys.stderr = sys.__stderr__ + + util._run_finalizers(0) + + for p in active_children(): + util.debug('terminating a child process of manager') + p.terminate() + + for p in active_children(): + util.debug('terminating a child process of manager') + p.join() + + util._run_finalizers() + info('manager exiting with exitcode 0') + except: + if not error("Error while manager shutdown", exc_info=True): + import traceback + traceback.print_exc() + finally: + exit(0) + + def create(self, c, typeid, *args, **kwds): + ''' + Create a new shared object and return its id + ''' + with self.mutex: + callable, exposed, method_to_typeid, proxytype = \ + self.registry[typeid] + + if callable is None: + assert len(args) == 1 and not kwds + obj = args[0] + else: + obj = callable(*args, **kwds) + + if exposed is None: + exposed = public_methods(obj) + if method_to_typeid is not None: + assert type(method_to_typeid) is dict + exposed = list(exposed) + list(method_to_typeid) + + ident = '%x' % id(obj) # convert to string because xmlrpclib + # only has 32 bit signed integers + util.debug('%r callable returned object with id %r', typeid, ident) + + self.id_to_obj[ident] = (obj, set(exposed), method_to_typeid) + if ident not in self.id_to_refcount: + self.id_to_refcount[ident] = 0 + # increment the reference count immediately, to avoid + # this object being garbage collected before a Proxy + # object for it can be created. The caller of create() + # is responsible for doing a decref once the Proxy object + # has been created. + self.incref(c, ident) + return ident, tuple(exposed) + + def get_methods(self, c, token): + ''' + Return the methods of the shared object indicated by token + ''' + return tuple(self.id_to_obj[token.id][1]) + + def accept_connection(self, c, name): + ''' + Spawn a new thread to serve this connection + ''' + threading.currentThread().name = name + c.send(('#RETURN', None)) + self.serve_client(c) + + def incref(self, c, ident): + with self.mutex: + self.id_to_refcount[ident] += 1 + + def decref(self, c, ident): + with self.mutex: + assert self.id_to_refcount[ident] >= 1 + self.id_to_refcount[ident] -= 1 + if self.id_to_refcount[ident] == 0: + del self.id_to_obj[ident], self.id_to_refcount[ident] + util.debug('disposing of obj with id %r', ident) + +# +# Class to represent state of a manager +# + + +class State(object): + __slots__ = ['value'] + INITIAL = 0 + STARTED = 1 + SHUTDOWN = 2 + +# +# Mapping from serializer name to Listener and Client types +# + +listener_client = { + 'pickle': (connection.Listener, connection.Client), + 'xmlrpclib': (connection.XmlListener, connection.XmlClient), +} + +# +# Definition of BaseManager +# + + +class BaseManager(object): + ''' + Base class for managers + ''' + _registry = {} + _Server = Server + + def __init__(self, address=None, authkey=None, serializer='pickle'): + if authkey is None: + authkey = current_process().authkey + self._address = address # XXX not final address if eg ('', 0) + self._authkey = AuthenticationString(authkey) + self._state = State() + self._state.value = State.INITIAL + self._serializer = serializer + self._Listener, self._Client = listener_client[serializer] + + def __reduce__(self): + return (type(self).from_address, + (self._address, self._authkey, self._serializer)) + + def get_server(self): + ''' + Return server object with serve_forever() method and address attribute + ''' + assert self._state.value == State.INITIAL + return Server(self._registry, self._address, + self._authkey, self._serializer) + + def connect(self): + ''' + Connect manager object to the server process + ''' + Listener, Client = listener_client[self._serializer] + conn = Client(self._address, authkey=self._authkey) + dispatch(conn, None, 'dummy') + self._state.value = State.STARTED + + def start(self, initializer=None, initargs=()): + ''' + Spawn a server process for this manager object + ''' + assert self._state.value == State.INITIAL + + if initializer is not None and not callable(initializer): + raise TypeError('initializer must be a callable') + + # pipe over which we will retrieve address of server + reader, writer = connection.Pipe(duplex=False) + + # spawn process which runs a server + self._process = Process( + target=type(self)._run_server, + args=(self._registry, self._address, self._authkey, + self._serializer, writer, initializer, initargs), + ) + ident = ':'.join(str(i) for i in self._process._identity) + self._process.name = type(self).__name__ + '-' + ident + self._process.start() + + # get address of server + writer.close() + self._address = reader.recv() + reader.close() + + # register a finalizer + self._state.value = State.STARTED + self.shutdown = Finalize( + self, type(self)._finalize_manager, + args=(self._process, self._address, self._authkey, + self._state, self._Client), + exitpriority=0 + ) + + @classmethod + def _run_server(cls, registry, address, authkey, serializer, writer, + initializer=None, initargs=()): + ''' + Create a server, report its address and run it + ''' + if initializer is not None: + initializer(*initargs) + + # create server + server = cls._Server(registry, address, authkey, serializer) + + # inform parent process of the server's address + writer.send(server.address) + writer.close() + + # run the manager + info('manager serving at %r', server.address) + server.serve_forever() + + def _create(self, typeid, *args, **kwds): + ''' + Create a new shared object; return the token and exposed tuple + ''' + assert self._state.value == State.STARTED, 'server not yet started' + conn = self._Client(self._address, authkey=self._authkey) + try: + id, exposed = dispatch(conn, None, 'create', + (typeid,) + args, kwds) + finally: + conn.close() + return Token(typeid, self._address, id), exposed + + def join(self, timeout=None): + ''' + Join the manager process (if it has been spawned) + ''' + self._process.join(timeout) + + def _debug_info(self): + ''' + Return some info about the servers shared objects and connections + ''' + conn = self._Client(self._address, authkey=self._authkey) + try: + return dispatch(conn, None, 'debug_info') + finally: + conn.close() + + def _number_of_objects(self): + ''' + Return the number of shared objects + ''' + conn = self._Client(self._address, authkey=self._authkey) + try: + return dispatch(conn, None, 'number_of_objects') + finally: + conn.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.shutdown() + + @staticmethod + def _finalize_manager(process, address, authkey, state, _Client): + ''' + Shutdown the manager process; will be registered as a finalizer + ''' + if process.is_alive(): + info('sending shutdown message to manager') + try: + conn = _Client(address, authkey=authkey) + try: + dispatch(conn, None, 'shutdown') + finally: + conn.close() + except Exception: + pass + + process.join(timeout=0.2) + if process.is_alive(): + info('manager still alive') + if hasattr(process, 'terminate'): + info('trying to `terminate()` manager process') + process.terminate() + process.join(timeout=0.1) + if process.is_alive(): + info('manager still alive after terminate') + + state.value = State.SHUTDOWN + try: + del BaseProxy._address_to_local[address] + except KeyError: + pass + + address = property(lambda self: self._address) + + @classmethod + def register(cls, typeid, callable=None, proxytype=None, exposed=None, + method_to_typeid=None, create_method=True): + ''' + Register a typeid with the manager type + ''' + if '_registry' not in cls.__dict__: + cls._registry = cls._registry.copy() + + if proxytype is None: + proxytype = AutoProxy + + exposed = exposed or getattr(proxytype, '_exposed_', None) + + method_to_typeid = ( + method_to_typeid or + getattr(proxytype, '_method_to_typeid_', None) + ) + + if method_to_typeid: + for key, value in method_to_typeid.items(): + assert type(key) is str, '%r is not a string' % key + assert type(value) is str, '%r is not a string' % value + + cls._registry[typeid] = ( + callable, exposed, method_to_typeid, proxytype + ) + + if create_method: + def temp(self, *args, **kwds): + util.debug('requesting creation of a shared %r object', typeid) + token, exp = self._create(typeid, *args, **kwds) + proxy = proxytype( + token, self._serializer, manager=self, + authkey=self._authkey, exposed=exp + ) + conn = self._Client(token.address, authkey=self._authkey) + dispatch(conn, None, 'decref', (token.id,)) + return proxy + temp.__name__ = typeid + setattr(cls, typeid, temp) + +# +# Subclass of set which get cleared after a fork +# + + +class ProcessLocalSet(set): + + def __init__(self): + util.register_after_fork(self, lambda obj: obj.clear()) + + def __reduce__(self): + return type(self), () + +# +# Definition of BaseProxy +# + + +class BaseProxy(object): + ''' + A base for proxies of shared objects + ''' + _address_to_local = {} + _mutex = util.ForkAwareThreadLock() + + def __init__(self, token, serializer, manager=None, + authkey=None, exposed=None, incref=True): + BaseProxy._mutex.acquire() + try: + tls_idset = BaseProxy._address_to_local.get(token.address, None) + if tls_idset is None: + tls_idset = util.ForkAwareLocal(), ProcessLocalSet() + BaseProxy._address_to_local[token.address] = tls_idset + finally: + BaseProxy._mutex.release() + + # self._tls is used to record the connection used by this + # thread to communicate with the manager at token.address + self._tls = tls_idset[0] + + # self._idset is used to record the identities of all shared + # objects for which the current process owns references and + # which are in the manager at token.address + self._idset = tls_idset[1] + + self._token = token + self._id = self._token.id + self._manager = manager + self._serializer = serializer + self._Client = listener_client[serializer][1] + + if authkey is not None: + self._authkey = AuthenticationString(authkey) + elif self._manager is not None: + self._authkey = self._manager._authkey + else: + self._authkey = current_process().authkey + + if incref: + self._incref() + + util.register_after_fork(self, BaseProxy._after_fork) + + def _connect(self): + util.debug('making connection to manager') + name = current_process().name + if threading.currentThread().name != 'MainThread': + name += '|' + threading.currentThread().name + conn = self._Client(self._token.address, authkey=self._authkey) + dispatch(conn, None, 'accept_connection', (name,)) + self._tls.connection = conn + + def _callmethod(self, methodname, args=(), kwds={}): + ''' + Try to call a method of the referrent and return a copy of the result + ''' + try: + conn = self._tls.connection + except AttributeError: + util.debug('thread %r does not own a connection', + threading.currentThread().name) + self._connect() + conn = self._tls.connection + + conn.send((self._id, methodname, args, kwds)) + kind, result = conn.recv() + + if kind == '#RETURN': + return result + elif kind == '#PROXY': + exposed, token = result + proxytype = self._manager._registry[token.typeid][-1] + proxy = proxytype( + token, self._serializer, manager=self._manager, + authkey=self._authkey, exposed=exposed + ) + conn = self._Client(token.address, authkey=self._authkey) + dispatch(conn, None, 'decref', (token.id,)) + return proxy + raise convert_to_error(kind, result) + + def _getvalue(self): + ''' + Get a copy of the value of the referent + ''' + return self._callmethod('#GETVALUE') + + def _incref(self): + conn = self._Client(self._token.address, authkey=self._authkey) + dispatch(conn, None, 'incref', (self._id,)) + util.debug('INCREF %r', self._token.id) + + self._idset.add(self._id) + + state = self._manager and self._manager._state + + self._close = Finalize( + self, BaseProxy._decref, + args=(self._token, self._authkey, state, + self._tls, self._idset, self._Client), + exitpriority=10 + ) + + @staticmethod + def _decref(token, authkey, state, tls, idset, _Client): + idset.discard(token.id) + + # check whether manager is still alive + if state is None or state.value == State.STARTED: + # tell manager this process no longer cares about referent + try: + util.debug('DECREF %r', token.id) + conn = _Client(token.address, authkey=authkey) + dispatch(conn, None, 'decref', (token.id,)) + except Exception, e: + util.debug('... decref failed %s', e) + + else: + util.debug('DECREF %r -- manager already shutdown', token.id) + + # check whether we can close this thread's connection because + # the process owns no more references to objects for this manager + if not idset and hasattr(tls, 'connection'): + util.debug('thread %r has no more proxies so closing conn', + threading.currentThread().name) + tls.connection.close() + del tls.connection + + def _after_fork(self): + self._manager = None + try: + self._incref() + except Exception, e: + # the proxy may just be for a manager which has shutdown + info('incref failed: %s', e) + + def __reduce__(self): + kwds = {} + if Popen.thread_is_spawning(): + kwds['authkey'] = self._authkey + + if getattr(self, '_isauto', False): + kwds['exposed'] = self._exposed_ + return (RebuildProxy, + (AutoProxy, self._token, self._serializer, kwds)) + else: + return (RebuildProxy, + (type(self), self._token, self._serializer, kwds)) + + def __deepcopy__(self, memo): + return self._getvalue() + + def __repr__(self): + return '<%s object, typeid %r at %s>' % \ + (type(self).__name__, self._token.typeid, '0x%x' % id(self)) + + def __str__(self): + ''' + Return representation of the referent (or a fall-back if that fails) + ''' + try: + return self._callmethod('__repr__') + except Exception: + return repr(self)[:-1] + "; '__str__()' failed>" + +# +# Function used for unpickling +# + + +def RebuildProxy(func, token, serializer, kwds): + ''' + Function used for unpickling proxy objects. + + If possible the shared object is returned, or otherwise a proxy for it. + ''' + server = getattr(current_process(), '_manager_server', None) + + if server and server.address == token.address: + return server.id_to_obj[token.id][0] + else: + incref = ( + kwds.pop('incref', True) and + not getattr(current_process(), '_inheriting', False) + ) + return func(token, serializer, incref=incref, **kwds) + +# +# Functions to create proxies and proxy types +# + + +def MakeProxyType(name, exposed, _cache={}): + ''' + Return an proxy type whose methods are given by `exposed` + ''' + exposed = tuple(exposed) + try: + return _cache[(name, exposed)] + except KeyError: + pass + + dic = {} + + for meth in exposed: + exec('''def %s(self, *args, **kwds): + return self._callmethod(%r, args, kwds)''' % (meth, meth), dic) + + ProxyType = type(name, (BaseProxy,), dic) + ProxyType._exposed_ = exposed + _cache[(name, exposed)] = ProxyType + return ProxyType + + +def AutoProxy(token, serializer, manager=None, authkey=None, + exposed=None, incref=True): + ''' + Return an auto-proxy for `token` + ''' + _Client = listener_client[serializer][1] + + if exposed is None: + conn = _Client(token.address, authkey=authkey) + try: + exposed = dispatch(conn, None, 'get_methods', (token,)) + finally: + conn.close() + + if authkey is None and manager is not None: + authkey = manager._authkey + if authkey is None: + authkey = current_process().authkey + + ProxyType = MakeProxyType('AutoProxy[%s]' % token.typeid, exposed) + proxy = ProxyType(token, serializer, manager=manager, authkey=authkey, + incref=incref) + proxy._isauto = True + return proxy + +# +# Types/callables which we will register with SyncManager +# + + +class Namespace(object): + + def __init__(self, **kwds): + self.__dict__.update(kwds) + + def __repr__(self): + items = self.__dict__.items() + temp = [] + for name, value in items: + if not name.startswith('_'): + temp.append('%s=%r' % (name, value)) + temp.sort() + return 'Namespace(%s)' % str.join(', ', temp) + + +class Value(object): + + def __init__(self, typecode, value, lock=True): + self._typecode = typecode + self._value = value + + def get(self): + return self._value + + def set(self, value): + self._value = value + + def __repr__(self): + return '%s(%r, %r)' % (type(self).__name__, + self._typecode, self._value) + value = property(get, set) + + +def Array(typecode, sequence, lock=True): + return array.array(typecode, sequence) + +# +# Proxy types used by SyncManager +# + + +class IteratorProxy(BaseProxy): + if sys.version_info[0] == 3: + _exposed = ('__next__', 'send', 'throw', 'close') + else: + _exposed_ = ('__next__', 'next', 'send', 'throw', 'close') + + def next(self, *args): + return self._callmethod('next', args) + + def __iter__(self): + return self + + def __next__(self, *args): + return self._callmethod('__next__', args) + + def send(self, *args): + return self._callmethod('send', args) + + def throw(self, *args): + return self._callmethod('throw', args) + + def close(self, *args): + return self._callmethod('close', args) + + +class AcquirerProxy(BaseProxy): + _exposed_ = ('acquire', 'release') + + def acquire(self, blocking=True): + return self._callmethod('acquire', (blocking,)) + + def release(self): + return self._callmethod('release') + + def __enter__(self): + return self._callmethod('acquire') + + def __exit__(self, exc_type, exc_val, exc_tb): + return self._callmethod('release') + + +class ConditionProxy(AcquirerProxy): + _exposed_ = ('acquire', 'release', 'wait', 'notify', 'notify_all') + + def wait(self, timeout=None): + return self._callmethod('wait', (timeout,)) + + def notify(self): + return self._callmethod('notify') + + def notify_all(self): + return self._callmethod('notify_all') + + def wait_for(self, predicate, timeout=None): + result = predicate() + if result: + return result + if timeout is not None: + endtime = _time() + timeout + else: + endtime = None + waittime = None + while not result: + if endtime is not None: + waittime = endtime - _time() + if waittime <= 0: + break + self.wait(waittime) + result = predicate() + return result + + +class EventProxy(BaseProxy): + _exposed_ = ('is_set', 'set', 'clear', 'wait') + + def is_set(self): + return self._callmethod('is_set') + + def set(self): + return self._callmethod('set') + + def clear(self): + return self._callmethod('clear') + + def wait(self, timeout=None): + return self._callmethod('wait', (timeout,)) + + +class NamespaceProxy(BaseProxy): + _exposed_ = ('__getattribute__', '__setattr__', '__delattr__') + + def __getattr__(self, key): + if key[0] == '_': + return object.__getattribute__(self, key) + callmethod = object.__getattribute__(self, '_callmethod') + return callmethod('__getattribute__', (key,)) + + def __setattr__(self, key, value): + if key[0] == '_': + return object.__setattr__(self, key, value) + callmethod = object.__getattribute__(self, '_callmethod') + return callmethod('__setattr__', (key, value)) + + def __delattr__(self, key): + if key[0] == '_': + return object.__delattr__(self, key) + callmethod = object.__getattribute__(self, '_callmethod') + return callmethod('__delattr__', (key,)) + + +class ValueProxy(BaseProxy): + _exposed_ = ('get', 'set') + + def get(self): + return self._callmethod('get') + + def set(self, value): + return self._callmethod('set', (value,)) + value = property(get, set) + + +BaseListProxy = MakeProxyType('BaseListProxy', ( + '__add__', '__contains__', '__delitem__', '__delslice__', + '__getitem__', '__getslice__', '__len__', '__mul__', + '__reversed__', '__rmul__', '__setitem__', '__setslice__', + 'append', 'count', 'extend', 'index', 'insert', 'pop', 'remove', + 'reverse', 'sort', '__imul__', +)) # XXX __getslice__ and __setslice__ unneeded in Py3.0 + + +class ListProxy(BaseListProxy): + + def __iadd__(self, value): + self._callmethod('extend', (value,)) + return self + + def __imul__(self, value): + self._callmethod('__imul__', (value,)) + return self + + +DictProxy = MakeProxyType('DictProxy', ( + '__contains__', '__delitem__', '__getitem__', '__len__', + '__setitem__', 'clear', 'copy', 'get', 'has_key', 'items', + 'keys', 'pop', 'popitem', 'setdefault', 'update', 'values', +)) + + +ArrayProxy = MakeProxyType('ArrayProxy', ( + '__len__', '__getitem__', '__setitem__', '__getslice__', '__setslice__', +)) # XXX __getslice__ and __setslice__ unneeded in Py3.0 + + +PoolProxy = MakeProxyType('PoolProxy', ( + 'apply', 'apply_async', 'close', 'imap', 'imap_unordered', 'join', + 'map', 'map_async', 'starmap', 'starmap_async', 'terminate', +)) +PoolProxy._method_to_typeid_ = { + 'apply_async': 'AsyncResult', + 'map_async': 'AsyncResult', + 'starmap_async': 'AsyncResult', + 'imap': 'Iterator', + 'imap_unordered': 'Iterator', +} + +# +# Definition of SyncManager +# + + +class SyncManager(BaseManager): + ''' + Subclass of `BaseManager` which supports a number of shared object types. + + The types registered are those intended for the synchronization + of threads, plus `dict`, `list` and `Namespace`. + + The `billiard.Manager()` function creates started instances of + this class. + ''' + +SyncManager.register('Queue', Queue.Queue) +SyncManager.register('JoinableQueue', Queue.Queue) +SyncManager.register('Event', threading.Event, EventProxy) +SyncManager.register('Lock', threading.Lock, AcquirerProxy) +SyncManager.register('RLock', threading.RLock, AcquirerProxy) +SyncManager.register('Semaphore', threading.Semaphore, AcquirerProxy) +SyncManager.register('BoundedSemaphore', threading.BoundedSemaphore, + AcquirerProxy) +SyncManager.register('Condition', threading.Condition, ConditionProxy) +SyncManager.register('Pool', Pool, PoolProxy) +SyncManager.register('list', list, ListProxy) +SyncManager.register('dict', dict, DictProxy) +SyncManager.register('Value', Value, ValueProxy) +SyncManager.register('Array', Array, ArrayProxy) +SyncManager.register('Namespace', Namespace, NamespaceProxy) + +# types returned by methods of PoolProxy +SyncManager.register('Iterator', proxytype=IteratorProxy, create_method=False) +SyncManager.register('AsyncResult', create_method=False) diff --git a/awx/lib/site-packages/billiard/pool.py b/awx/lib/site-packages/billiard/pool.py new file mode 100644 index 0000000000..71c637656a --- /dev/null +++ b/awx/lib/site-packages/billiard/pool.py @@ -0,0 +1,1670 @@ +# -*- coding: utf-8 -*- +# +# Module providing the `Pool` class for managing a process pool +# +# multiprocessing/pool.py +# +# Copyright (c) 2006-2008, R Oudkerk +# Licensed to PSF under a Contributor Agreement. +# +from __future__ import absolute_import +from __future__ import with_statement + +# +# Imports +# + +import collections +import errno +import itertools +import logging +import os +import platform +import signal +import sys +import threading +import time +import Queue +import warnings + +from . import Event, Process, cpu_count +from . import util +from .common import reset_signals, restart_state +from .compat import get_errno +from .einfo import ExceptionInfo +from .exceptions import ( + CoroStop, + RestartFreqExceeded, + SoftTimeLimitExceeded, + Terminated, + TimeLimitExceeded, + TimeoutError, + WorkerLostError, +) +from .util import Finalize, debug + +if platform.system() == 'Windows': # pragma: no cover + # On Windows os.kill calls TerminateProcess which cannot be + # handled by # any process, so this is needed to terminate the task + # *and its children* (if any). + from ._win import kill_processtree as _kill # noqa +else: + from os import kill as _kill # noqa + + +try: + next = next +except NameError: + def next(it, *args): # noqa + try: + return it.next() + except StopIteration: + if not args: + raise + return args[0] + +PY3 = sys.version_info[0] == 3 + +try: + TIMEOUT_MAX = threading.TIMEOUT_MAX +except AttributeError: # pragma: no cover + TIMEOUT_MAX = 1e10 # noqa + + +if PY3: + _Semaphore = threading.Semaphore +else: + _Semaphore = threading._Semaphore # noqa + +# +# Constants representing the state of a pool +# + +RUN = 0 +CLOSE = 1 +TERMINATE = 2 + +# +# Constants representing the state of a job +# + +ACK = 0 +READY = 1 + +# +# Exit code constants +# +EX_OK = 0 +EX_FAILURE = 1 +EX_RECYCLE = 0x9B + + +# Signal used for soft time limits. +SIG_SOFT_TIMEOUT = getattr(signal, "SIGUSR1", None) + +# +# Miscellaneous +# + +LOST_WORKER_TIMEOUT = 10.0 +EX_OK = getattr(os, "EX_OK", 0) + +job_counter = itertools.count() + + +def mapstar(args): + return map(*args) + + +def starmapstar(args): + return list(itertools.starmap(args[0], args[1])) + + +def error(msg, *args, **kwargs): + if util._logger: + util._logger.error(msg, *args, **kwargs) + + +def stop_if_not_current(thread, timeout=None): + if thread is not threading.currentThread(): + thread.stop(timeout) + + +class LaxBoundedSemaphore(_Semaphore): + """Semaphore that checks that # release is <= # acquires, + but ignores if # releases >= value.""" + + def __init__(self, value=1, verbose=None): + if PY3: + _Semaphore.__init__(self, value) + else: + _Semaphore.__init__(self, value, verbose) + self._initial_value = value + + def grow(self): + if PY3: + cond = self._cond + else: + cond = self._Semaphore__cond + with cond: + self._initial_value += 1 + self._Semaphore__value += 1 + cond.notify() + + def shrink(self): + self._initial_value -= 1 + self.acquire() + + if PY3: + + def release(self): + cond = self._cond + with cond: + if self._value < self._initial_value: + self._value += 1 + cond.notify_all() + if __debug__: + self._note( + "%s.release: success, value=%s", self, self._value, + ) + else: + if __debug__: + self._note( + "%s.release: success, value=%s (unchanged)" % ( + self, self._value)) + + def clear(self): + while self._value < self._initial_value: + _Semaphore.release(self) + else: + + def release(self): # noqa + cond = self._Semaphore__cond + with cond: + if self._Semaphore__value < self._initial_value: + self._Semaphore__value += 1 + cond.notifyAll() + if __debug__: + self._note("%s.release: success, value=%s", + self, self._Semaphore__value) + else: + if __debug__: + self._note( + "%s.release: success, value=%s (unchanged)" % ( + self, self._Semaphore__value)) + + def clear(self): # noqa + while self._Semaphore__value < self._initial_value: + _Semaphore.release(self) + +# +# Exceptions +# + + +class MaybeEncodingError(Exception): + """Wraps possible unpickleable errors, so they can be + safely sent through the socket.""" + + def __init__(self, exc, value): + self.exc = repr(exc) + self.value = repr(value) + super(MaybeEncodingError, self).__init__(self.exc, self.value) + + def __repr__(self): + return "" % str(self) + + def __str__(self): + return "Error sending result: '%r'. Reason: '%r'." % ( + self.value, self.exc) + + +class WorkersJoined(Exception): + """All workers have terminated.""" + + +def soft_timeout_sighandler(signum, frame): + raise SoftTimeLimitExceeded() + +# +# Code run by worker processes +# + + +def worker(inqueue, outqueue, initializer=None, initargs=(), + maxtasks=None, sentinel=None): + # Re-init logging system. + # Workaround for http://bugs.python.org/issue6721#msg140215 + # Python logging module uses RLock() objects which are broken after + # fork. This can result in a deadlock (Issue #496). + logger_names = logging.Logger.manager.loggerDict.keys() + logger_names.append(None) # for root logger + for name in logger_names: + for handler in logging.getLogger(name).handlers: + handler.createLock() + logging._lock = threading.RLock() + + pid = os.getpid() + assert maxtasks is None or (type(maxtasks) == int and maxtasks > 0) + put = outqueue.put + get = inqueue.get + + if hasattr(inqueue, '_reader'): + + def poll(timeout): + if inqueue._reader.poll(timeout): + return True, get() + return False, None + else: + + def poll(timeout): # noqa + try: + return True, get(timeout=timeout) + except Queue.Empty: + return False, None + + if hasattr(inqueue, '_writer'): + inqueue._writer.close() + outqueue._reader.close() + + if initializer is not None: + initializer(*initargs) + + # Make sure all exiting signals call finally: blocks. + # this is important for the semaphore to be released. + reset_signals() + + # install signal handler for soft timeouts. + if SIG_SOFT_TIMEOUT is not None: + signal.signal(SIG_SOFT_TIMEOUT, soft_timeout_sighandler) + + try: + signal.signal(signal.SIGINT, signal.SIG_IGN) + except (AttributeError): + pass + + exitcode = None + completed = 0 + while maxtasks is None or (maxtasks and completed < maxtasks): + if sentinel is not None and sentinel.is_set(): + debug('worker got sentinel -- exiting') + exitcode = EX_OK + break + + try: + ready, task = poll(1.0) + if not ready: + continue + except (EOFError, IOError), exc: + if get_errno(exc) == errno.EINTR: + continue # interrupted, maybe by gdb + debug('worker got EOFError or IOError -- exiting') + exitcode = EX_FAILURE + break + + if task is None: + debug('worker got sentinel -- exiting') + exitcode = EX_OK + break + + job, i, func, args, kwds = task + put((ACK, (job, i, time.time(), pid))) + try: + result = (True, func(*args, **kwds)) + except Exception: + result = (False, ExceptionInfo()) + try: + put((READY, (job, i, result))) + except Exception, exc: + _, _, tb = sys.exc_info() + try: + wrapped = MaybeEncodingError(exc, result[1]) + einfo = ExceptionInfo((MaybeEncodingError, wrapped, tb)) + put((READY, (job, i, (False, einfo)))) + finally: + del(tb) + + completed += 1 + debug('worker exiting after %d tasks', completed) + if exitcode is None and maxtasks: + exitcode = EX_RECYCLE if completed == maxtasks else EX_FAILURE + sys.exit(exitcode or EX_OK) + +# +# Class representing a process pool +# + + +class PoolThread(threading.Thread): + + def __init__(self, *args, **kwargs): + threading.Thread.__init__(self) + self._state = RUN + self._was_started = False + self.daemon = True + + def run(self): + try: + return self.body() + except RestartFreqExceeded, exc: + error("Thread %r crashed: %r", type(self).__name__, exc, + exc_info=True) + _kill(os.getpid(), signal.SIGTERM) + sys.exit() + except Exception, exc: + error("Thread %r crashed: %r", type(self).__name__, exc, + exc_info=True) + os._exit(1) + + def start(self, *args, **kwargs): + self._was_started = True + super(PoolThread, self).start(*args, **kwargs) + + def on_stop_not_started(self): + pass + + def stop(self, timeout=None): + if self._was_started: + self.join(timeout) + return + self.on_stop_not_started() + + def terminate(self): + self._state = TERMINATE + + def close(self): + self._state = CLOSE + + +class Supervisor(PoolThread): + + def __init__(self, pool): + self.pool = pool + super(Supervisor, self).__init__() + + def body(self): + debug('worker handler starting') + + time.sleep(0.8) + + pool = self.pool + + try: + # do a burst at startup to verify that we can start + # our pool processes, and in that time we lower + # the max restart frequency. + prev_state = pool.restart_state + pool.restart_state = restart_state(10 * pool._processes, 1) + for _ in xrange(10): + if self._state == RUN and pool._state == RUN: + pool._maintain_pool() + time.sleep(0.1) + + # Keep maintaing workers until the cache gets drained, unless + # the pool is termianted + pool.restart_state = prev_state + while self._state == RUN and pool._state == RUN: + pool._maintain_pool() + time.sleep(0.8) + except RestartFreqExceeded: + pool.close() + pool.join() + raise + debug('worker handler exiting') + + +class TaskHandler(PoolThread): + + def __init__(self, taskqueue, put, outqueue, pool): + self.taskqueue = taskqueue + self.put = put + self.outqueue = outqueue + self.pool = pool + super(TaskHandler, self).__init__() + + def body(self): + taskqueue = self.taskqueue + put = self.put + + for taskseq, set_length in iter(taskqueue.get, None): + try: + i = -1 + for i, task in enumerate(taskseq): + if self._state: + debug('task handler found thread._state != RUN') + break + try: + put(task) + except IOError: + debug('could not put task on queue') + break + else: + if set_length: + debug('doing set_length()') + set_length(i + 1) + continue + break + except Exception, exc: + print("Task Handler ERROR: %r" % (exc, )) + break + else: + debug('task handler got sentinel') + + self.tell_others() + + def tell_others(self): + outqueue = self.outqueue + put = self.put + pool = self.pool + + try: + # tell result handler to finish when cache is empty + debug('task handler sending sentinel to result handler') + outqueue.put(None) + + # tell workers there is no more work + debug('task handler sending sentinel to workers') + for p in pool: + put(None) + except IOError: + debug('task handler got IOError when sending sentinels') + + debug('task handler exiting') + + def on_stop_not_started(self): + self.tell_others() + + +class TimeoutHandler(PoolThread): + + def __init__(self, processes, cache, t_soft, t_hard): + self.processes = processes + self.cache = cache + self.t_soft = t_soft + self.t_hard = t_hard + self._it = None + super(TimeoutHandler, self).__init__() + + def _process_by_pid(self, pid): + for index, process in enumerate(self.processes): + if process.pid == pid: + return process, index + return None, None + + def on_soft_timeout(self, job): + debug('soft time limit exceeded for %r', job) + process, _index = self._process_by_pid(job._worker_pid) + if not process: + return + + # Run timeout callback + if job._timeout_callback is not None: + job._timeout_callback(soft=True, timeout=job._soft_timeout) + + try: + _kill(job._worker_pid, SIG_SOFT_TIMEOUT) + except OSError, exc: + if get_errno(exc) != errno.ESRCH: + raise + + def on_hard_timeout(self, job): + if job.ready(): + return + debug('hard time limit exceeded for %r', job) + # Remove from cache and set return value to an exception + try: + raise TimeLimitExceeded(job._timeout) + except TimeLimitExceeded: + job._set(job._job, (False, ExceptionInfo())) + else: # pragma: no cover + pass + + # Remove from _pool + process, _index = self._process_by_pid(job._worker_pid) + + # Run timeout callback + if job._timeout_callback is not None: + job._timeout_callback(soft=False, timeout=job._timeout) + if process: + self._trywaitkill(process) + + def _trywaitkill(self, worker): + debug('timeout: sending TERM to %s', worker._name) + try: + worker.terminate() + except OSError: + pass + else: + if worker._popen.wait(timeout=0.1): + return + debug('timeout: TERM timed-out, now sending KILL to %s', worker._name) + try: + _kill(worker.pid, signal.SIGKILL) + except OSError: + pass + + def handle_timeouts(self): + cache = self.cache + t_hard, t_soft = self.t_hard, self.t_soft + dirty = set() + on_soft_timeout = self.on_soft_timeout + on_hard_timeout = self.on_hard_timeout + + def _timed_out(start, timeout): + if not start or not timeout: + return False + if time.time() >= start + timeout: + return True + + # Inner-loop + while self._state == RUN: + + # Remove dirty items not in cache anymore + if dirty: + dirty = set(k for k in dirty if k in cache) + + for i, job in cache.items(): + ack_time = job._time_accepted + soft_timeout = job._soft_timeout + if soft_timeout is None: + soft_timeout = t_soft + hard_timeout = job._timeout + if hard_timeout is None: + hard_timeout = t_hard + if _timed_out(ack_time, hard_timeout): + on_hard_timeout(job) + elif i not in dirty and _timed_out(ack_time, soft_timeout): + on_soft_timeout(job) + dirty.add(i) + yield + + def body(self): + while self._state == RUN: + try: + for _ in self.handle_timeouts(): + time.sleep(1.0) # don't spin + except CoroStop: + break + debug('timeout handler exiting') + + def handle_event(self, *args): + if self._it is None: + self._it = self.handle_timeouts() + try: + self._it.next() + except StopIteration: + self._it = None + + +class ResultHandler(PoolThread): + + def __init__(self, outqueue, get, cache, poll, + join_exited_workers, putlock, restart_state, check_timeouts): + self.outqueue = outqueue + self.get = get + self.cache = cache + self.poll = poll + self.join_exited_workers = join_exited_workers + self.putlock = putlock + self.restart_state = restart_state + self._it = None + self._shutdown_complete = False + self.check_timeouts = check_timeouts + super(ResultHandler, self).__init__() + + def on_stop_not_started(self): + # used when pool started without result handler thread. + self.finish_at_shutdown(handle_timeouts=True) + + def _process_result(self, timeout=1.0): + cache = self.cache + poll = self.poll + putlock = self.putlock + restart_state = self.restart_state + + def on_ack(job, i, time_accepted, pid): + try: + cache[job]._ack(i, time_accepted, pid) + except (KeyError, AttributeError): + # Object gone or doesn't support _ack (e.g. IMAPIterator). + pass + + def on_ready(job, i, obj): + restart_state.R = 0 + try: + item = cache[job] + except KeyError: + return + if not item.ready(): + if putlock is not None: + putlock.release() + try: + item._set(i, obj) + except KeyError: + pass + + state_handlers = {ACK: on_ack, READY: on_ready} + + def on_state_change(task): + state, args = task + try: + state_handlers[state](*args) + except KeyError: + debug("Unknown job state: %s (args=%s)", state, args) + + while 1: + try: + ready, task = poll(timeout) + except (IOError, EOFError), exc: + debug('result handler got %r -- exiting', exc) + raise CoroStop() + + if self._state: + assert self._state == TERMINATE + debug('result handler found thread._state=TERMINATE') + raise CoroStop() + + if ready: + if task is None: + debug('result handler got sentinel') + raise CoroStop() + on_state_change(task) + if timeout != 0: # blocking + break + else: + break + + yield + + def handle_event(self, *args): + if self._state == RUN: + if self._it is None: + self._it = self._process_result(0) # non-blocking + try: + self._it.next() + except (StopIteration, CoroStop): + self._it = None + + def body(self): + debug('result handler starting') + try: + while self._state == RUN: + try: + for _ in self._process_result(1.0): # blocking + pass + except CoroStop: + break + finally: + self.finish_at_shutdown() + + def finish_at_shutdown(self, handle_timeouts=False): + self._shutdown_complete = True + get = self.get + outqueue = self.outqueue + cache = self.cache + poll = self.poll + join_exited_workers = self.join_exited_workers + putlock = self.putlock + restart_state = self.restart_state + check_timeouts = self.check_timeouts + + def on_ack(job, i, time_accepted, pid): + try: + cache[job]._ack(i, time_accepted, pid) + except (KeyError, AttributeError): + # Object gone or doesn't support _ack (e.g. IMAPIterator). + pass + + def on_ready(job, i, obj): + restart_state.R = 0 + try: + item = cache[job] + except KeyError: + return + if not item.ready(): + if putlock is not None: + putlock.release() + try: + item._set(i, obj) + except KeyError: + pass + + state_handlers = {ACK: on_ack, READY: on_ready} + + def on_state_change(task): + state, args = task + try: + state_handlers[state](*args) + except KeyError: + debug("Unknown job state: %s (args=%s)", state, args) + + time_terminate = None + while cache and self._state != TERMINATE: + if check_timeouts is not None: + check_timeouts() + try: + ready, task = poll(1.0) + except (IOError, EOFError), exc: + debug('result handler got %r -- exiting', exc) + return + + if ready: + if task is None: + debug('result handler ignoring extra sentinel') + continue + + on_state_change(task) + try: + join_exited_workers(shutdown=True) + except WorkersJoined: + now = time.time() + if not time_terminate: + time_terminate = now + else: + if now - time_terminate > 5.0: + debug('result handler exiting: timed out') + break + debug('result handler: all workers terminated, ' + 'timeout in %ss', + abs(min(now - time_terminate - 5.0, 0))) + + if hasattr(outqueue, '_reader'): + debug('ensuring that outqueue is not full') + # If we don't make room available in outqueue then + # attempts to add the sentinel (None) to outqueue may + # block. There is guaranteed to be no more than 2 sentinels. + try: + for i in range(10): + if not outqueue._reader.poll(): + break + get() + except (IOError, EOFError): + pass + + debug('result handler exiting: len(cache)=%s, thread._state=%s', + len(cache), self._state) + + +class Pool(object): + ''' + Class which supports an async version of applying functions to arguments. + ''' + Process = Process + Supervisor = Supervisor + TaskHandler = TaskHandler + TimeoutHandler = TimeoutHandler + ResultHandler = ResultHandler + SoftTimeLimitExceeded = SoftTimeLimitExceeded + + def __init__(self, processes=None, initializer=None, initargs=(), + maxtasksperchild=None, timeout=None, soft_timeout=None, + lost_worker_timeout=LOST_WORKER_TIMEOUT, + max_restarts=None, max_restart_freq=1, + on_process_up=None, + on_process_down=None, + on_timeout_set=None, + on_timeout_cancel=None, + threads=True, + semaphore=None, + putlocks=False, + allow_restart=False): + self._setup_queues() + self._taskqueue = Queue.Queue() + self._cache = {} + self._state = RUN + self.timeout = timeout + self.soft_timeout = soft_timeout + self._maxtasksperchild = maxtasksperchild + self._initializer = initializer + self._initargs = initargs + self.lost_worker_timeout = lost_worker_timeout or LOST_WORKER_TIMEOUT + self.on_process_up = on_process_up + self.on_process_down = on_process_down + self.on_timeout_set = on_timeout_set + self.on_timeout_cancel = on_timeout_cancel + self.threads = threads + self.readers = {} + self.allow_restart = allow_restart + # Contains processes that we have terminated, + # and that the supervisor should not raise an error for. + self.signalled = set() + + if soft_timeout and SIG_SOFT_TIMEOUT is None: + warnings.warn(UserWarning( + "Soft timeouts are not supported: " + "on this platform: It does not have the SIGUSR1 signal.", + )) + soft_timeout = None + + if processes is None: + try: + processes = cpu_count() + except NotImplementedError: + processes = 1 + self._processes = processes + self.max_restarts = max_restarts or round(processes * 100) + self.restart_state = restart_state(max_restarts, max_restart_freq or 1) + + if initializer is not None and not callable(initializer): + raise TypeError('initializer must be a callable') + + self._pool = [] + self._poolctrl = {} + self.putlocks = putlocks + self._putlock = semaphore or LaxBoundedSemaphore(self._processes) + for i in range(processes): + self._create_worker_process(i) + + self._worker_handler = self.Supervisor(self) + if threads: + self._worker_handler.start() + else: + self.readers.update( + dict((w._popen.sentinel, self.maintain_pool) + for w in self._pool)) + + self._task_handler = self.TaskHandler(self._taskqueue, + self._quick_put, + self._outqueue, + self._pool) + if threads: + self._task_handler.start() + + # Thread killing timedout jobs. + self._timeout_handler = self.TimeoutHandler( + self._pool, self._cache, + self.soft_timeout, self.timeout, + ) + self._timeout_handler_mutex = threading.Lock() + self._timeout_handler_started = False + if self.timeout is not None or self.soft_timeout is not None: + self._start_timeout_handler() + + # If running without threads, we need to check for timeouts + # while waiting for unfinished work at shutdown. + check_timeouts = None + if not threads: + check_timeouts = self._timeout_handler.handle_event + + # Thread processing results in the outqueue. + self._result_handler = self.ResultHandler( + self._outqueue, self._quick_get, self._cache, + self._poll_result, self._join_exited_workers, + self._putlock, self.restart_state, check_timeouts, + ) + + if threads: + self._result_handler.start() + else: + self.readers[self._outqueue._reader] = \ + self._result_handler.handle_event + + self._terminate = Finalize( + self, self._terminate_pool, + args=(self._taskqueue, self._inqueue, self._outqueue, + self._pool, self._worker_handler, self._task_handler, + self._result_handler, self._cache, + self._timeout_handler), + exitpriority=15, + ) + + def _create_worker_process(self, i): + sentinel = Event() if self.allow_restart else None + w = self.Process( + target=worker, + args=( + self._inqueue, self._outqueue, + self._initializer, self._initargs, + self._maxtasksperchild, + sentinel + ), + ) + self._pool.append(w) + w.name = w.name.replace('Process', 'PoolWorker') + w.daemon = True + w.index = i + w.start() + if self.on_process_up: + self.on_process_up(w) + self._poolctrl[w.pid] = sentinel + return w + + def _join_exited_workers(self, shutdown=False): + """Cleanup after any worker processes which have exited due to + reaching their specified lifetime. Returns True if any workers were + cleaned up. + """ + now = None + # The worker may have published a result before being terminated, + # but we have no way to accurately tell if it did. So we wait for + # _lost_worker_timeout seconds before we mark the job with + # WorkerLostError. + for job in [job for job in self._cache.values() + if not job.ready() and job._worker_lost]: + now = now or time.time() + lost_time, lost_ret = job._worker_lost + if now - lost_time > job._lost_worker_timeout: + try: + raise WorkerLostError( + "Worker exited prematurely (exitcode: %r)." % ( + lost_ret, )) + except WorkerLostError: + exc_info = ExceptionInfo() + job._set(None, (False, exc_info)) + else: # pragma: no cover + pass + + if shutdown and not len(self._pool): + raise WorkersJoined() + + cleaned, exitcodes = {}, {} + for i in reversed(range(len(self._pool))): + worker = self._pool[i] + if worker.exitcode is not None: + # worker exited + debug('Supervisor: cleaning up worker %d', i) + worker.join() + debug('Supervisor: worked %d joined', i) + cleaned[worker.pid] = worker + exitcodes[worker.pid] = worker.exitcode + if worker.exitcode not in (EX_OK, EX_RECYCLE): + error('Process %r pid:%r exited with exitcode %r' % ( + worker.name, worker.pid, worker.exitcode)) + del self._pool[i] + del self._poolctrl[worker.pid] + if cleaned: + for job in self._cache.values(): + for worker_pid in job.worker_pids(): + if worker_pid in cleaned and not job.ready(): + if worker_pid in self.signalled: + try: + raise Terminated(-exitcodes[worker_pid]) + except Terminated: + job._set(None, (False, ExceptionInfo())) + else: + job._worker_lost = (time.time(), + exitcodes[worker_pid]) + break + for worker in cleaned.itervalues(): + if self.on_process_down: + self.on_process_down(worker) + return exitcodes.values() + return [] + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + return self.terminate() + + def shrink(self, n=1): + for i, worker in enumerate(self._iterinactive()): + self._processes -= 1 + if self._putlock: + self._putlock.shrink() + worker.terminate() + if i == n - 1: + return + raise ValueError("Can't shrink pool. All processes busy!") + + def grow(self, n=1): + for i in xrange(n): + #assert len(self._pool) == self._processes + self._processes += 1 + if self._putlock: + self._putlock.grow() + + def _iterinactive(self): + for worker in self._pool: + if not self._worker_active(worker): + yield worker + raise StopIteration() + + def _worker_active(self, worker): + for job in self._cache.values(): + if worker.pid in job.worker_pids(): + return True + return False + + def _repopulate_pool(self, exitcodes): + """Bring the number of pool processes up to the specified number, + for use after reaping workers which have exited. + """ + for i in range(self._processes - len(self._pool)): + if self._state != RUN: + return + try: + if exitcodes and exitcodes[i] not in (EX_OK, EX_RECYCLE): + self.restart_state.step() + except IndexError: + self.restart_state.step() + self._create_worker_process(self._avail_index()) + debug('added worker') + + def _avail_index(self): + assert len(self._pool) < self._processes + indices = set(p.index for p in self._pool) + return next(i for i in range(self._processes) if i not in indices) + + def did_start_ok(self): + return not self._join_exited_workers() + + def _maintain_pool(self): + """"Clean up any exited workers and start replacements for them. + """ + joined = self._join_exited_workers() + self._repopulate_pool(joined) + for i in range(len(joined)): + if self._putlock is not None: + self._putlock.release() + + def maintain_pool(self, *args, **kwargs): + if self._worker_handler._state == RUN and self._state == RUN: + try: + self._maintain_pool() + except RestartFreqExceeded: + self.close() + self.join() + raise + + def _setup_queues(self): + from billiard.queues import SimpleQueue + self._inqueue = SimpleQueue() + self._outqueue = SimpleQueue() + self._quick_put = self._inqueue._writer.send + self._quick_get = self._outqueue._reader.recv + + def _poll_result(timeout): + if self._outqueue._reader.poll(timeout): + return True, self._quick_get() + return False, None + self._poll_result = _poll_result + + def _start_timeout_handler(self): + # ensure more than one thread does not start the timeout handler + # thread at once. + if self.threads: + with self._timeout_handler_mutex: + if not self._timeout_handler_started: + self._timeout_handler_started = True + self._timeout_handler.start() + + def apply(self, func, args=(), kwds={}): + ''' + Equivalent of `func(*args, **kwargs)`. + ''' + if self._state == RUN: + return self.apply_async(func, args, kwds).get() + + def starmap(self, func, iterable, chunksize=None): + ''' + Like `map()` method but the elements of the `iterable` are expected to + be iterables as well and will be unpacked as arguments. Hence + `func` and (a, b) becomes func(a, b). + ''' + if self._state == RUN: + return self._map_async(func, iterable, + starmapstar, chunksize).get() + + def starmap_async(self, func, iterable, chunksize=None, + callback=None, error_callback=None): + ''' + Asynchronous version of `starmap()` method. + ''' + if self._state == RUN: + return self._map_async(func, iterable, starmapstar, chunksize, + callback, error_callback) + + def map(self, func, iterable, chunksize=None): + ''' + Apply `func` to each element in `iterable`, collecting the results + in a list that is returned. + ''' + if self._state == RUN: + return self.map_async(func, iterable, chunksize).get() + + def imap(self, func, iterable, chunksize=1, lost_worker_timeout=None): + ''' + Equivalent of `map()` -- can be MUCH slower than `Pool.map()`. + ''' + if self._state != RUN: + return + lost_worker_timeout = lost_worker_timeout or self.lost_worker_timeout + if chunksize == 1: + result = IMapIterator(self._cache, + lost_worker_timeout=lost_worker_timeout) + self._taskqueue.put(( + ((result._job, i, func, (x,), {}) + for i, x in enumerate(iterable)), + result._set_length, + )) + return result + else: + assert chunksize > 1 + task_batches = Pool._get_tasks(func, iterable, chunksize) + result = IMapIterator(self._cache, + lost_worker_timeout=lost_worker_timeout) + self._taskqueue.put(( + ((result._job, i, mapstar, (x,), {}) + for i, x in enumerate(task_batches)), + result._set_length, + )) + return (item for chunk in result for item in chunk) + + def imap_unordered(self, func, iterable, chunksize=1, + lost_worker_timeout=None): + ''' + Like `imap()` method but ordering of results is arbitrary. + ''' + if self._state != RUN: + return + lost_worker_timeout = lost_worker_timeout or self.lost_worker_timeout + if chunksize == 1: + result = IMapUnorderedIterator( + self._cache, lost_worker_timeout=lost_worker_timeout, + ) + self._taskqueue.put(( + ((result._job, i, func, (x,), {}) + for i, x in enumerate(iterable)), + result._set_length, + )) + return result + else: + assert chunksize > 1 + task_batches = Pool._get_tasks(func, iterable, chunksize) + result = IMapUnorderedIterator( + self._cache, lost_worker_timeout=lost_worker_timeout, + ) + self._taskqueue.put(( + ((result._job, i, mapstar, (x,), {}) + for i, x in enumerate(task_batches)), + result._set_length, + )) + return (item for chunk in result for item in chunk) + + def apply_async(self, func, args=(), kwds={}, + callback=None, error_callback=None, accept_callback=None, + timeout_callback=None, waitforslot=None, + soft_timeout=None, timeout=None, lost_worker_timeout=None, + callbacks_propagate=()): + ''' + Asynchronous equivalent of `apply()` method. + + Callback is called when the functions return value is ready. + The accept callback is called when the job is accepted to be executed. + + Simplified the flow is like this: + + >>> if accept_callback: + ... accept_callback() + >>> retval = func(*args, **kwds) + >>> if callback: + ... callback(retval) + + ''' + if self._state != RUN: + return + soft_timeout = soft_timeout or self.soft_timeout + timeout = timeout or self.timeout + lost_worker_timeout = lost_worker_timeout or self.lost_worker_timeout + if soft_timeout and SIG_SOFT_TIMEOUT is None: + warnings.warn(UserWarning( + "Soft timeouts are not supported: " + "on this platform: It does not have the SIGUSR1 signal.", + )) + soft_timeout = None + if waitforslot is None: + waitforslot = self.putlocks + if waitforslot and self._putlock is not None and self._state == RUN: + self._putlock.acquire() + if self._state == RUN: + result = ApplyResult( + self._cache, callback, accept_callback, timeout_callback, + error_callback, soft_timeout, timeout, lost_worker_timeout, + on_timeout_set=self.on_timeout_set, + on_timeout_cancel=self.on_timeout_cancel, + callbacks_propagate=callbacks_propagate, + ) + if timeout or soft_timeout: + # start the timeout handler thread when required. + self._start_timeout_handler() + if self.threads: + self._taskqueue.put(([(result._job, None, + func, args, kwds)], None)) + else: + self._quick_put((result._job, None, func, args, kwds)) + return result + + def terminate_job(self, pid, sig=None): + self.signalled.add(pid) + _kill(pid, sig or signal.SIGTERM) + + def map_async(self, func, iterable, chunksize=None, + callback=None, error_callback=None): + ''' + Asynchronous equivalent of `map()` method. + ''' + return self._map_async( + func, iterable, mapstar, chunksize, callback, error_callback, + ) + + def _map_async(self, func, iterable, mapper, chunksize=None, + callback=None, error_callback=None): + ''' + Helper function to implement map, starmap and their async counterparts. + ''' + if self._state != RUN: + return + if not hasattr(iterable, '__len__'): + iterable = list(iterable) + + if chunksize is None: + chunksize, extra = divmod(len(iterable), len(self._pool) * 4) + if extra: + chunksize += 1 + if len(iterable) == 0: + chunksize = 0 + + task_batches = Pool._get_tasks(func, iterable, chunksize) + result = MapResult(self._cache, chunksize, len(iterable), callback, + error_callback=error_callback) + self._taskqueue.put((((result._job, i, mapper, (x,), {}) + for i, x in enumerate(task_batches)), None)) + return result + + @staticmethod + def _get_tasks(func, it, size): + it = iter(it) + while 1: + x = tuple(itertools.islice(it, size)) + if not x: + return + yield (func, x) + + def __reduce__(self): + raise NotImplementedError( + 'pool objects cannot be passed between processes or pickled', + ) + + def close(self): + debug('closing pool') + if self._state == RUN: + self._state = CLOSE + if self._putlock: + self._putlock.clear() + self._worker_handler.close() + self._taskqueue.put(None) + stop_if_not_current(self._worker_handler) + + def terminate(self): + debug('terminating pool') + self._state = TERMINATE + self._worker_handler.terminate() + self._terminate() + + def join(self): + assert self._state in (CLOSE, TERMINATE) + debug('joining worker handler') + stop_if_not_current(self._worker_handler) + debug('joining task handler') + stop_if_not_current(self._task_handler) + debug('joining result handler') + stop_if_not_current(self._result_handler) + debug('result handler joined') + for i, p in enumerate(self._pool): + debug('joining worker %s/%s (%r)', i, len(self._pool), p) + p.join() + + def restart(self): + for e in self._poolctrl.itervalues(): + e.set() + + @staticmethod + def _help_stuff_finish(inqueue, task_handler, size): + # task_handler may be blocked trying to put items on inqueue + debug('removing tasks from inqueue until task handler finished') + inqueue._rlock.acquire() + while task_handler.is_alive() and inqueue._reader.poll(): + inqueue._reader.recv() + time.sleep(0) + + @classmethod + def _terminate_pool(cls, taskqueue, inqueue, outqueue, pool, + worker_handler, task_handler, + result_handler, cache, timeout_handler): + + # this is guaranteed to only be called once + debug('finalizing pool') + + worker_handler.terminate() + + task_handler.terminate() + taskqueue.put(None) # sentinel + + debug('helping task handler/workers to finish') + cls._help_stuff_finish(inqueue, task_handler, len(pool)) + + result_handler.terminate() + outqueue.put(None) # sentinel + + if timeout_handler is not None: + timeout_handler.terminate() + + # Terminate workers which haven't already finished + if pool and hasattr(pool[0], 'terminate'): + debug('terminating workers') + for p in pool: + if p.exitcode is None: + p.terminate() + + debug('joining task handler') + task_handler.stop() + + debug('joining result handler') + result_handler.stop() + + if timeout_handler is not None: + debug('joining timeout handler') + timeout_handler.stop(TIMEOUT_MAX) + + if pool and hasattr(pool[0], 'terminate'): + debug('joining pool workers') + for p in pool: + if p.is_alive(): + # worker has not yet exited + debug('cleaning up worker %d', p.pid) + p.join() + debug('pool workers joined') +DynamicPool = Pool + +# +# Class whose instances are returned by `Pool.apply_async()` +# + + +class ApplyResult(object): + _worker_lost = None + + def __init__(self, cache, callback, accept_callback=None, + timeout_callback=None, error_callback=None, soft_timeout=None, + timeout=None, lost_worker_timeout=LOST_WORKER_TIMEOUT, + on_timeout_set=None, on_timeout_cancel=None, + callbacks_propagate=()): + self._mutex = threading.Lock() + self._event = threading.Event() + self._job = job_counter.next() + self._cache = cache + self._callback = callback + self._accept_callback = accept_callback + self._error_callback = error_callback + self._timeout_callback = timeout_callback + self._timeout = timeout + self._soft_timeout = soft_timeout + self._lost_worker_timeout = lost_worker_timeout + self._on_timeout_set = on_timeout_set + self._on_timeout_cancel = on_timeout_cancel + self._callbacks_propagate = callbacks_propagate or () + + self._accepted = False + self._worker_pid = None + self._time_accepted = None + cache[self._job] = self + + def ready(self): + return self._event.isSet() + + def accepted(self): + return self._accepted + + def successful(self): + assert self.ready() + return self._success + + def worker_pids(self): + return filter(None, [self._worker_pid]) + + def wait(self, timeout=None): + self._event.wait(timeout) + + def get(self, timeout=None): + self.wait(timeout) + if not self.ready(): + raise TimeoutError + if self._success: + return self._value + else: + raise self._value.exception + + def safe_apply_callback(self, fun, *args): + if fun: + try: + fun(*args) + except self._callbacks_propagate: + raise + except Exception, exc: + error("Pool callback raised exception: %r", exc, + exc_info=True) + + def _set(self, i, obj): + with self._mutex: + if self._on_timeout_cancel: + self._on_timeout_cancel(self) + self._success, self._value = obj + self._event.set() + if self._accepted: + self._cache.pop(self._job, None) + + # apply callbacks last + if self._callback and self._success: + self.safe_apply_callback( + self._callback, self._value) + if (self._value is not None and + self._error_callback and not self._success): + self.safe_apply_callback( + self._error_callback, self._value) + + def _ack(self, i, time_accepted, pid): + with self._mutex: + self._accepted = True + self._time_accepted = time_accepted + self._worker_pid = pid + if self.ready(): + self._cache.pop(self._job, None) + if self._on_timeout_set: + self._on_timeout_set(self, self._soft_timeout, self._timeout) + if self._accept_callback: + self.safe_apply_callback( + self._accept_callback, pid, time_accepted) + +# +# Class whose instances are returned by `Pool.map_async()` +# + + +class MapResult(ApplyResult): + + def __init__(self, cache, chunksize, length, callback, error_callback): + ApplyResult.__init__( + self, cache, callback, error_callback=error_callback, + ) + self._success = True + self._length = length + self._value = [None] * length + self._accepted = [False] * length + self._worker_pid = [None] * length + self._time_accepted = [None] * length + self._chunksize = chunksize + if chunksize <= 0: + self._number_left = 0 + self._event.set() + del cache[self._job] + else: + self._number_left = length // chunksize + bool(length % chunksize) + + def _set(self, i, success_result): + success, result = success_result + if success: + self._value[i * self._chunksize:(i + 1) * self._chunksize] = result + self._number_left -= 1 + if self._number_left == 0: + if self._callback: + self._callback(self._value) + if self._accepted: + self._cache.pop(self._job, None) + self._event.set() + else: + self._success = False + self._value = result + if self._error_callback: + self._error_callback(self._value) + if self._accepted: + self._cache.pop(self._job, None) + self._event.set() + + def _ack(self, i, time_accepted, pid): + start = i * self._chunksize + stop = (i + 1) * self._chunksize + for j in range(start, stop): + self._accepted[j] = True + self._worker_pid[j] = pid + self._time_accepted[j] = time_accepted + if self.ready(): + self._cache.pop(self._job, None) + + def accepted(self): + return all(self._accepted) + + def worker_pids(self): + return filter(None, self._worker_pid) + +# +# Class whose instances are returned by `Pool.imap()` +# + + +class IMapIterator(object): + _worker_lost = None + + def __init__(self, cache, lost_worker_timeout=LOST_WORKER_TIMEOUT): + self._cond = threading.Condition(threading.Lock()) + self._job = job_counter.next() + self._cache = cache + self._items = collections.deque() + self._index = 0 + self._length = None + self._ready = False + self._unsorted = {} + self._worker_pids = [] + self._lost_worker_timeout = lost_worker_timeout + cache[self._job] = self + + def __iter__(self): + return self + + def next(self, timeout=None): + with self._cond: + try: + item = self._items.popleft() + except IndexError: + if self._index == self._length: + self._ready = True + raise StopIteration + self._cond.wait(timeout) + try: + item = self._items.popleft() + except IndexError: + if self._index == self._length: + self._ready = True + raise StopIteration + raise TimeoutError + + success, value = item + if success: + return value + raise Exception(value) + + __next__ = next # XXX + + def _set(self, i, obj): + with self._cond: + if self._index == i: + self._items.append(obj) + self._index += 1 + while self._index in self._unsorted: + obj = self._unsorted.pop(self._index) + self._items.append(obj) + self._index += 1 + self._cond.notify() + else: + self._unsorted[i] = obj + + if self._index == self._length: + self._ready = True + del self._cache[self._job] + + def _set_length(self, length): + with self._cond: + self._length = length + if self._index == self._length: + self._ready = True + self._cond.notify() + del self._cache[self._job] + + def _ack(self, i, time_accepted, pid): + self._worker_pids.append(pid) + + def ready(self): + return self._ready + + def worker_pids(self): + return self._worker_pids + +# +# Class whose instances are returned by `Pool.imap_unordered()` +# + + +class IMapUnorderedIterator(IMapIterator): + + def _set(self, i, obj): + with self._cond: + self._items.append(obj) + self._index += 1 + self._cond.notify() + if self._index == self._length: + self._ready = True + del self._cache[self._job] + +# +# +# + + +class ThreadPool(Pool): + + from billiard.dummy import Process as DummyProcess + Process = DummyProcess + + def __init__(self, processes=None, initializer=None, initargs=()): + Pool.__init__(self, processes, initializer, initargs) + + def _setup_queues(self): + self._inqueue = Queue.Queue() + self._outqueue = Queue.Queue() + self._quick_put = self._inqueue.put + self._quick_get = self._outqueue.get + + def _poll_result(timeout): + try: + return True, self._quick_get(timeout=timeout) + except Queue.Empty: + return False, None + self._poll_result = _poll_result + + @staticmethod + def _help_stuff_finish(inqueue, task_handler, size): + # put sentinels at head of inqueue to make workers finish + with inqueue.not_empty: + inqueue.queue.clear() + inqueue.queue.extend([None] * size) + inqueue.not_empty.notify_all() diff --git a/awx/lib/site-packages/billiard/process.py b/awx/lib/site-packages/billiard/process.py new file mode 100644 index 0000000000..79c850bd4b --- /dev/null +++ b/awx/lib/site-packages/billiard/process.py @@ -0,0 +1,330 @@ +# +# Module providing the `Process` class which emulates `threading.Thread` +# +# multiprocessing/process.py +# +# Copyright (c) 2006-2008, R Oudkerk +# Licensed to PSF under a Contributor Agreement. +# +from __future__ import absolute_import + +__all__ = ['Process', 'current_process', 'active_children'] + +# +# Imports +# + +import os +import sys +import signal +import itertools +import binascii + +from .compat import bytes +try: + from _weakrefset import WeakSet +except ImportError: + WeakSet = None # noqa + +try: + ORIGINAL_DIR = os.path.abspath(os.getcwd()) +except OSError: + ORIGINAL_DIR = None + +# +# Public functions +# + + +def current_process(): + ''' + Return process object representing the current process + ''' + return _current_process + + +def _cleanup(): + # check for processes which have finished + for p in list(_current_process._children): + if p._popen.poll() is not None: + _current_process._children.discard(p) + + +def active_children(_cleanup=_cleanup): + ''' + Return list of process objects corresponding to live child processes + ''' + try: + _cleanup() + except TypeError: + # called after gc collect so _cleanup does not exist anymore + return [] + return list(_current_process._children) + + +class Process(object): + ''' + Process objects represent activity that is run in a separate process + + The class is analagous to `threading.Thread` + ''' + _Popen = None + + def __init__(self, group=None, target=None, name=None, + args=(), kwargs={}, daemon=None, **_kw): + assert group is None, 'group argument must be None for now' + count = _current_process._counter.next() + self._identity = _current_process._identity + (count,) + self._authkey = _current_process._authkey + if daemon is not None: + self._daemonic = daemon + else: + self._daemonic = _current_process._daemonic + self._tempdir = _current_process._tempdir + self._semprefix = _current_process._semprefix + self._unlinkfd = _current_process._unlinkfd + self._parent_pid = os.getpid() + self._popen = None + self._target = target + self._args = tuple(args) + self._kwargs = dict(kwargs) + self._name = ( + name or type(self).__name__ + '-' + + ':'.join(str(i) for i in self._identity) + ) + if _dangling is not None: + _dangling.add(self) + + def run(self): + ''' + Method to be run in sub-process; can be overridden in sub-class + ''' + if self._target: + self._target(*self._args, **self._kwargs) + + def start(self): + ''' + Start child process + ''' + assert self._popen is None, 'cannot start a process twice' + assert self._parent_pid == os.getpid(), \ + 'can only start a process object created by current process' + assert not _current_process._daemonic, \ + 'daemonic processes are not allowed to have children' + _cleanup() + if self._Popen is not None: + Popen = self._Popen + else: + from .forking import Popen + self._popen = Popen(self) + self._sentinel = self._popen.sentinel + _current_process._children.add(self) + + def terminate(self): + ''' + Terminate process; sends SIGTERM signal or uses TerminateProcess() + ''' + self._popen.terminate() + + def join(self, timeout=None): + ''' + Wait until child process terminates + ''' + assert self._parent_pid == os.getpid(), 'can only join a child process' + assert self._popen is not None, 'can only join a started process' + res = self._popen.wait(timeout) + if res is not None: + _current_process._children.discard(self) + + def is_alive(self): + ''' + Return whether process is alive + ''' + if self is _current_process: + return True + assert self._parent_pid == os.getpid(), 'can only test a child process' + if self._popen is None: + return False + self._popen.poll() + return self._popen.returncode is None + + def _get_name(self): + return self._name + + def _set_name(self, value): + assert isinstance(name, basestring), 'name must be a string' + self._name = value + name = property(_get_name, _set_name) + + def _get_daemon(self): + return self._daemonic + + def _set_daemon(self, daemonic): + assert self._popen is None, 'process has already started' + self._daemonic = daemonic + daemon = property(_get_daemon, _set_daemon) + + def _get_authkey(self): + return self._authkey + + def _set_authkey(self, authkey): + self._authkey = AuthenticationString(authkey) + authkey = property(_get_authkey, _set_authkey) + + @property + def exitcode(self): + ''' + Return exit code of process or `None` if it has yet to stop + ''' + if self._popen is None: + return self._popen + return self._popen.poll() + + @property + def ident(self): + ''' + Return identifier (PID) of process or `None` if it has yet to start + ''' + if self is _current_process: + return os.getpid() + else: + return self._popen and self._popen.pid + + pid = ident + + @property + def sentinel(self): + ''' + Return a file descriptor (Unix) or handle (Windows) suitable for + waiting for process termination. + ''' + try: + return self._sentinel + except AttributeError: + raise ValueError("process not started") + + def __repr__(self): + if self is _current_process: + status = 'started' + elif self._parent_pid != os.getpid(): + status = 'unknown' + elif self._popen is None: + status = 'initial' + else: + if self._popen.poll() is not None: + status = self.exitcode + else: + status = 'started' + + if type(status) is int: + if status == 0: + status = 'stopped' + else: + status = 'stopped[%s]' % _exitcode_to_name.get(status, status) + + return '<%s(%s, %s%s)>' % (type(self).__name__, self._name, + status, self._daemonic and ' daemon' or '') + + ## + + def _bootstrap(self): + from . import util + global _current_process + + try: + self._children = set() + self._counter = itertools.count(1) + if sys.stdin is not None: + try: + sys.stdin.close() + sys.stdin = open(os.devnull) + except (OSError, ValueError): + pass + old_process = _current_process + _current_process = self + try: + util._finalizer_registry.clear() + util._run_after_forkers() + finally: + # delay finalization of the old process object until after + # _run_after_forkers() is executed + del old_process + util.info('child process %s calling self.run()', self.pid) + try: + self.run() + exitcode = 0 + finally: + util._exit_function() + except SystemExit, e: + if not e.args: + exitcode = 1 + elif isinstance(e.args[0], int): + exitcode = e.args[0] + else: + sys.stderr.write(str(e.args[0]) + '\n') + sys.stderr.flush() + exitcode = 0 if isinstance(e.args[0], str) else 1 + except: + exitcode = 1 + if not util.error('Process %s', self.name, exc_info=True): + import traceback + sys.stderr.write('Process %s:\n' % self.name) + traceback.print_exc() + finally: + util.info('process %s exiting with exitcode %d', + self.pid, exitcode) + sys.stdout.flush() + sys.stderr.flush() + return exitcode + +# +# We subclass bytes to avoid accidental transmission of auth keys over network +# + + +class AuthenticationString(bytes): + + def __reduce__(self): + from .forking import Popen + + if not Popen.thread_is_spawning(): + raise TypeError( + 'Pickling an AuthenticationString object is ' + 'disallowed for security reasons') + return AuthenticationString, (bytes(self),) + +# +# Create object representing the main process +# + + +class _MainProcess(Process): + + def __init__(self): + self._identity = () + self._daemonic = False + self._name = 'MainProcess' + self._parent_pid = None + self._popen = None + self._counter = itertools.count(1) + self._children = set() + self._authkey = AuthenticationString(os.urandom(32)) + self._tempdir = None + self._semprefix = 'mp-' + binascii.hexlify( + os.urandom(4)).decode('ascii') + self._unlinkfd = None + +_current_process = _MainProcess() +del _MainProcess + +# +# Give names to some return codes +# + +_exitcode_to_name = {} + +for name, signum in signal.__dict__.items(): + if name[:3] == 'SIG' and '_' not in name: + _exitcode_to_name[-signum] = name + +_dangling = WeakSet() if WeakSet is not None else None diff --git a/awx/lib/site-packages/billiard/queues.py b/awx/lib/site-packages/billiard/queues.py new file mode 100644 index 0000000000..a44c2b47a7 --- /dev/null +++ b/awx/lib/site-packages/billiard/queues.py @@ -0,0 +1,354 @@ +# +# Module implementing queues +# +# multiprocessing/queues.py +# +# Copyright (c) 2006-2008, R Oudkerk +# Licensed to PSF under a Contributor Agreement. +# +from __future__ import absolute_import +from __future__ import with_statement + +__all__ = ['Queue', 'SimpleQueue', 'JoinableQueue'] + +import sys +import os +import threading +import collections +import time +import weakref +import errno + +from Queue import Empty, Full + +from . import Pipe +from ._ext import _billiard +from .compat import get_errno +from .synchronize import Lock, BoundedSemaphore, Semaphore, Condition +from .util import debug, error, info, Finalize, register_after_fork +from .forking import assert_spawning + + +class Queue(object): + ''' + Queue type using a pipe, buffer and thread + ''' + def __init__(self, maxsize=0): + if maxsize <= 0: + maxsize = _billiard.SemLock.SEM_VALUE_MAX + self._maxsize = maxsize + self._reader, self._writer = Pipe(duplex=False) + self._rlock = Lock() + self._opid = os.getpid() + if sys.platform == 'win32': + self._wlock = None + else: + self._wlock = Lock() + self._sem = BoundedSemaphore(maxsize) + # For use by concurrent.futures + self._ignore_epipe = False + + self._after_fork() + + if sys.platform != 'win32': + register_after_fork(self, Queue._after_fork) + + def __getstate__(self): + assert_spawning(self) + return (self._ignore_epipe, self._maxsize, self._reader, self._writer, + self._rlock, self._wlock, self._sem, self._opid) + + def __setstate__(self, state): + (self._ignore_epipe, self._maxsize, self._reader, self._writer, + self._rlock, self._wlock, self._sem, self._opid) = state + self._after_fork() + + def _after_fork(self): + debug('Queue._after_fork()') + self._notempty = threading.Condition(threading.Lock()) + self._buffer = collections.deque() + self._thread = None + self._jointhread = None + self._joincancelled = False + self._closed = False + self._close = None + self._send = self._writer.send + self._recv = self._reader.recv + self._poll = self._reader.poll + + def put(self, obj, block=True, timeout=None): + assert not self._closed + if not self._sem.acquire(block, timeout): + raise Full + + with self._notempty: + if self._thread is None: + self._start_thread() + self._buffer.append(obj) + self._notempty.notify() + + def get(self, block=True, timeout=None): + if block and timeout is None: + with self._rlock: + res = self._recv() + self._sem.release() + return res + + else: + if block: + deadline = time.time() + timeout + if not self._rlock.acquire(block, timeout): + raise Empty + try: + if block: + timeout = deadline - time.time() + if timeout < 0 or not self._poll(timeout): + raise Empty + elif not self._poll(): + raise Empty + res = self._recv() + self._sem.release() + return res + finally: + self._rlock.release() + + def qsize(self): + # Raises NotImplementedError on Mac OSX because + # of broken sem_getvalue() + return self._maxsize - self._sem._semlock._get_value() + + def empty(self): + return not self._poll() + + def full(self): + return self._sem._semlock._is_zero() + + def get_nowait(self): + return self.get(False) + + def put_nowait(self, obj): + return self.put(obj, False) + + def close(self): + self._closed = True + self._reader.close() + if self._close: + self._close() + + def join_thread(self): + debug('Queue.join_thread()') + assert self._closed + if self._jointhread: + self._jointhread() + + def cancel_join_thread(self): + debug('Queue.cancel_join_thread()') + self._joincancelled = True + try: + self._jointhread.cancel() + except AttributeError: + pass + + def _start_thread(self): + debug('Queue._start_thread()') + + # Start thread which transfers data from buffer to pipe + self._buffer.clear() + self._thread = threading.Thread( + target=Queue._feed, + args=(self._buffer, self._notempty, self._send, + self._wlock, self._writer.close, self._ignore_epipe), + name='QueueFeederThread' + ) + self._thread.daemon = True + + debug('doing self._thread.start()') + self._thread.start() + debug('... done self._thread.start()') + + # On process exit we will wait for data to be flushed to pipe. + # + # However, if this process created the queue then all + # processes which use the queue will be descendants of this + # process. Therefore waiting for the queue to be flushed + # is pointless once all the child processes have been joined. + created_by_this_process = (self._opid == os.getpid()) + if not self._joincancelled and not created_by_this_process: + self._jointhread = Finalize( + self._thread, Queue._finalize_join, + [weakref.ref(self._thread)], + exitpriority=-5 + ) + + # Send sentinel to the thread queue object when garbage collected + self._close = Finalize( + self, Queue._finalize_close, + [self._buffer, self._notempty], + exitpriority=10 + ) + + @staticmethod + def _finalize_join(twr): + debug('joining queue thread') + thread = twr() + if thread is not None: + thread.join() + debug('... queue thread joined') + else: + debug('... queue thread already dead') + + @staticmethod + def _finalize_close(buffer, notempty): + debug('telling queue thread to quit') + with notempty: + buffer.append(_sentinel) + notempty.notify() + + @staticmethod + def _feed(buffer, notempty, send, writelock, close, ignore_epipe): + debug('starting thread to feed data to pipe') + from .util import is_exiting + + ncond = notempty + nwait = notempty.wait + bpopleft = buffer.popleft + sentinel = _sentinel + if sys.platform != 'win32': + wlock = writelock + else: + wlock = None + + try: + while 1: + with ncond: + if not buffer: + nwait() + try: + while 1: + obj = bpopleft() + if obj is sentinel: + debug('feeder thread got sentinel -- exiting') + close() + return + + if wlock is None: + send(obj) + else: + with wlock: + send(obj) + except IndexError: + pass + except Exception, exc: + if ignore_epipe and get_errno(exc) == errno.EPIPE: + return + # Since this runs in a daemon thread the resources it uses + # may be become unusable while the process is cleaning up. + # We ignore errors which happen after the process has + # started to cleanup. + try: + if is_exiting(): + info('error in queue thread: %r', exc, exc_info=True) + else: + if not error('error in queue thread: %r', exc, + exc_info=True): + import traceback + traceback.print_exc() + except Exception: + pass + +_sentinel = object() + + +class JoinableQueue(Queue): + ''' + A queue type which also supports join() and task_done() methods + + Note that if you do not call task_done() for each finished task then + eventually the counter's semaphore may overflow causing Bad Things + to happen. + ''' + + def __init__(self, maxsize=0): + Queue.__init__(self, maxsize) + self._unfinished_tasks = Semaphore(0) + self._cond = Condition() + + def __getstate__(self): + return Queue.__getstate__(self) + (self._cond, self._unfinished_tasks) + + def __setstate__(self, state): + Queue.__setstate__(self, state[:-2]) + self._cond, self._unfinished_tasks = state[-2:] + + def put(self, obj, block=True, timeout=None): + assert not self._closed + if not self._sem.acquire(block, timeout): + raise Full + + with self._notempty: + with self._cond: + if self._thread is None: + self._start_thread() + self._buffer.append(obj) + self._unfinished_tasks.release() + self._notempty.notify() + + def task_done(self): + with self._cond: + if not self._unfinished_tasks.acquire(False): + raise ValueError('task_done() called too many times') + if self._unfinished_tasks._semlock._is_zero(): + self._cond.notify_all() + + def join(self): + with self._cond: + if not self._unfinished_tasks._semlock._is_zero(): + self._cond.wait() + + +class SimpleQueue(object): + ''' + Simplified Queue type -- really just a locked pipe + ''' + + def __init__(self): + self._reader, self._writer = Pipe(duplex=False) + self._rlock = Lock() + self._poll = self._reader.poll + if sys.platform == 'win32': + self._wlock = None + else: + self._wlock = Lock() + self._make_methods() + + def empty(self): + return not self._poll() + + def __getstate__(self): + assert_spawning(self) + return (self._reader, self._writer, self._rlock, self._wlock) + + def __setstate__(self, state): + (self._reader, self._writer, self._rlock, self._wlock) = state + self._make_methods() + + def _make_methods(self): + recv = self._reader.recv + rlock = self._rlock + + def get(): + with rlock: + return recv() + self.get = get + + if self._wlock is None: + # writes to a message oriented win32 pipe are atomic + self.put = self._writer.send + else: + send = self._writer.send + wlock = self._wlock + + def put(obj): + with wlock: + return send(obj) + self.put = put diff --git a/awx/lib/site-packages/billiard/reduction.py b/awx/lib/site-packages/billiard/reduction.py new file mode 100644 index 0000000000..c04a90db0d --- /dev/null +++ b/awx/lib/site-packages/billiard/reduction.py @@ -0,0 +1,200 @@ +# +# Module to allow connection and socket objects to be transferred +# between processes +# +# multiprocessing/reduction.py +# +# Copyright (c) 2006-2008, R Oudkerk +# Licensed to PSF under a Contributor Agreement. +# + +from __future__ import absolute_import + +__all__ = [] + +import os +import sys +import socket +import threading + +if sys.version_info[0] == 3: + from multiprocessing.connection import Client, Listener +else: + from billiard._connection import Client, Listener # noqa + +from . import current_process +from ._ext import _billiard, win32 +from .forking import Popen, duplicate, close, ForkingPickler +from .util import register_after_fork, debug, sub_debug + +if not(sys.platform == 'win32' or hasattr(_billiard, 'recvfd')): + raise ImportError('pickling of connections not supported') + + +# globals set later +_listener = None +_lock = None +_cache = set() + +# +# Platform specific definitions +# + +if sys.platform == 'win32': + # XXX Should this subprocess import be here? + import _subprocess # noqa + + def send_handle(conn, handle, destination_pid): + process_handle = win32.OpenProcess( + win32.PROCESS_ALL_ACCESS, False, destination_pid + ) + try: + new_handle = duplicate(handle, process_handle) + conn.send(new_handle) + finally: + close(process_handle) + + def recv_handle(conn): + return conn.recv() + +else: + def send_handle(conn, handle, destination_pid): # noqa + _billiard.sendfd(conn.fileno(), handle) + + def recv_handle(conn): # noqa + return _billiard.recvfd(conn.fileno()) + +# +# Support for a per-process server thread which caches pickled handles +# + + +def _reset(obj): + global _lock, _listener, _cache + for h in _cache: + close(h) + _cache.clear() + _lock = threading.Lock() + _listener = None + +_reset(None) +register_after_fork(_reset, _reset) + + +def _get_listener(): + global _listener + + if _listener is None: + _lock.acquire() + try: + if _listener is None: + debug('starting listener and thread for sending handles') + _listener = Listener(authkey=current_process().authkey) + t = threading.Thread(target=_serve) + t.daemon = True + t.start() + finally: + _lock.release() + + return _listener + + +def _serve(): + from .util import is_exiting, sub_warning + + while 1: + try: + conn = _listener.accept() + handle_wanted, destination_pid = conn.recv() + _cache.remove(handle_wanted) + send_handle(conn, handle_wanted, destination_pid) + close(handle_wanted) + conn.close() + except: + if not is_exiting(): + sub_warning('thread for sharing handles raised exception', + exc_info=True) + +# +# Functions to be used for pickling/unpickling objects with handles +# + + +def reduce_handle(handle): + if Popen.thread_is_spawning(): + return (None, Popen.duplicate_for_child(handle), True) + dup_handle = duplicate(handle) + _cache.add(dup_handle) + sub_debug('reducing handle %d', handle) + return (_get_listener().address, dup_handle, False) + + +def rebuild_handle(pickled_data): + address, handle, inherited = pickled_data + if inherited: + return handle + sub_debug('rebuilding handle %d', handle) + conn = Client(address, authkey=current_process().authkey) + conn.send((handle, os.getpid())) + new_handle = recv_handle(conn) + conn.close() + return new_handle + +# +# Register `_billiard.Connection` with `ForkingPickler` +# + + +def reduce_connection(conn): + rh = reduce_handle(conn.fileno()) + return rebuild_connection, (rh, conn.readable, conn.writable) + + +def rebuild_connection(reduced_handle, readable, writable): + handle = rebuild_handle(reduced_handle) + return _billiard.Connection( + handle, readable=readable, writable=writable + ) + +ForkingPickler.register(_billiard.Connection, reduce_connection) + +# +# Register `socket.socket` with `ForkingPickler` +# + + +def fromfd(fd, family, type_, proto=0): + s = socket.fromfd(fd, family, type_, proto) + if s.__class__ is not socket.socket: + s = socket.socket(_sock=s) + return s + + +def reduce_socket(s): + reduced_handle = reduce_handle(s.fileno()) + return rebuild_socket, (reduced_handle, s.family, s.type, s.proto) + + +def rebuild_socket(reduced_handle, family, type_, proto): + fd = rebuild_handle(reduced_handle) + _sock = fromfd(fd, family, type_, proto) + close(fd) + return _sock +ForkingPickler.register(socket.socket, reduce_socket) + +# +# Register `_billiard.PipeConnection` with `ForkingPickler` +# + +if sys.platform == 'win32': + + def reduce_pipe_connection(conn): + rh = reduce_handle(conn.fileno()) + return rebuild_pipe_connection, (rh, conn.readable, conn.writable) + + def rebuild_pipe_connection(reduced_handle, readable, writable): + handle = rebuild_handle(reduced_handle) + return _billiard.PipeConnection( + handle, readable=readable, writable=writable + ) + ForkingPickler.register(_billiard.PipeConnection, reduce_pipe_connection) diff --git a/awx/lib/site-packages/billiard/sharedctypes.py b/awx/lib/site-packages/billiard/sharedctypes.py new file mode 100644 index 0000000000..fc676d0ebb --- /dev/null +++ b/awx/lib/site-packages/billiard/sharedctypes.py @@ -0,0 +1,244 @@ +# +# Module which supports allocation of ctypes objects from shared memory +# +# multiprocessing/sharedctypes.py +# +# Copyright (c) 2006-2008, R Oudkerk +# Licensed to PSF under a Contributor Agreement. +# +from __future__ import absolute_import + +import ctypes +import weakref + +from . import heap, RLock +from .forking import assert_spawning, ForkingPickler + +__all__ = ['RawValue', 'RawArray', 'Value', 'Array', 'copy', 'synchronized'] + +typecode_to_type = { + 'c': ctypes.c_char, 'u': ctypes.c_wchar, + 'b': ctypes.c_byte, 'B': ctypes.c_ubyte, + 'h': ctypes.c_short, 'H': ctypes.c_ushort, + 'i': ctypes.c_int, 'I': ctypes.c_uint, + 'l': ctypes.c_long, 'L': ctypes.c_ulong, + 'f': ctypes.c_float, 'd': ctypes.c_double +} + + +def _new_value(type_): + size = ctypes.sizeof(type_) + wrapper = heap.BufferWrapper(size) + return rebuild_ctype(type_, wrapper, None) + + +def RawValue(typecode_or_type, *args): + ''' + Returns a ctypes object allocated from shared memory + ''' + type_ = typecode_to_type.get(typecode_or_type, typecode_or_type) + obj = _new_value(type_) + ctypes.memset(ctypes.addressof(obj), 0, ctypes.sizeof(obj)) + obj.__init__(*args) + return obj + + +def RawArray(typecode_or_type, size_or_initializer): + ''' + Returns a ctypes array allocated from shared memory + ''' + type_ = typecode_to_type.get(typecode_or_type, typecode_or_type) + if isinstance(size_or_initializer, (int, long)): + type_ = type_ * size_or_initializer + obj = _new_value(type_) + ctypes.memset(ctypes.addressof(obj), 0, ctypes.sizeof(obj)) + return obj + else: + type_ = type_ * len(size_or_initializer) + result = _new_value(type_) + result.__init__(*size_or_initializer) + return result + + +def Value(typecode_or_type, *args, **kwds): + ''' + Return a synchronization wrapper for a Value + ''' + lock = kwds.pop('lock', None) + if kwds: + raise ValueError('unrecognized keyword argument(s): %s' % kwds.keys()) + obj = RawValue(typecode_or_type, *args) + if lock is False: + return obj + if lock in (True, None): + lock = RLock() + if not hasattr(lock, 'acquire'): + raise AttributeError("'%r' has no method 'acquire'" % lock) + return synchronized(obj, lock) + + +def Array(typecode_or_type, size_or_initializer, **kwds): + ''' + Return a synchronization wrapper for a RawArray + ''' + lock = kwds.pop('lock', None) + if kwds: + raise ValueError('unrecognized keyword argument(s): %s' % kwds.keys()) + obj = RawArray(typecode_or_type, size_or_initializer) + if lock is False: + return obj + if lock in (True, None): + lock = RLock() + if not hasattr(lock, 'acquire'): + raise AttributeError("'%r' has no method 'acquire'" % lock) + return synchronized(obj, lock) + + +def copy(obj): + new_obj = _new_value(type(obj)) + ctypes.pointer(new_obj)[0] = obj + return new_obj + + +def synchronized(obj, lock=None): + assert not isinstance(obj, SynchronizedBase), 'object already synchronized' + + if isinstance(obj, ctypes._SimpleCData): + return Synchronized(obj, lock) + elif isinstance(obj, ctypes.Array): + if obj._type_ is ctypes.c_char: + return SynchronizedString(obj, lock) + return SynchronizedArray(obj, lock) + else: + cls = type(obj) + try: + scls = class_cache[cls] + except KeyError: + names = [field[0] for field in cls._fields_] + d = dict((name, make_property(name)) for name in names) + classname = 'Synchronized' + cls.__name__ + scls = class_cache[cls] = type(classname, (SynchronizedBase,), d) + return scls(obj, lock) + +# +# Functions for pickling/unpickling +# + + +def reduce_ctype(obj): + assert_spawning(obj) + if isinstance(obj, ctypes.Array): + return rebuild_ctype, (obj._type_, obj._wrapper, obj._length_) + else: + return rebuild_ctype, (type(obj), obj._wrapper, None) + + +def rebuild_ctype(type_, wrapper, length): + if length is not None: + type_ = type_ * length + ForkingPickler.register(type_, reduce_ctype) + obj = type_.from_address(wrapper.get_address()) + obj._wrapper = wrapper + return obj + +# +# Function to create properties +# + + +def make_property(name): + try: + return prop_cache[name] + except KeyError: + d = {} + exec(template % ((name, ) * 7), d) + prop_cache[name] = d[name] + return d[name] + +template = ''' +def get%s(self): + self.acquire() + try: + return self._obj.%s + finally: + self.release() +def set%s(self, value): + self.acquire() + try: + self._obj.%s = value + finally: + self.release() +%s = property(get%s, set%s) +''' + +prop_cache = {} +class_cache = weakref.WeakKeyDictionary() + +# +# Synchronized wrappers +# + + +class SynchronizedBase(object): + + def __init__(self, obj, lock=None): + self._obj = obj + self._lock = lock or RLock() + self.acquire = self._lock.acquire + self.release = self._lock.release + + def __reduce__(self): + assert_spawning(self) + return synchronized, (self._obj, self._lock) + + def get_obj(self): + return self._obj + + def get_lock(self): + return self._lock + + def __repr__(self): + return '<%s wrapper for %s>' % (type(self).__name__, self._obj) + + +class Synchronized(SynchronizedBase): + value = make_property('value') + + +class SynchronizedArray(SynchronizedBase): + + def __len__(self): + return len(self._obj) + + def __getitem__(self, i): + self.acquire() + try: + return self._obj[i] + finally: + self.release() + + def __setitem__(self, i, value): + self.acquire() + try: + self._obj[i] = value + finally: + self.release() + + def __getslice__(self, start, stop): + self.acquire() + try: + return self._obj[start:stop] + finally: + self.release() + + def __setslice__(self, start, stop, values): + self.acquire() + try: + self._obj[start:stop] = values + finally: + self.release() + + +class SynchronizedString(SynchronizedArray): + value = make_property('value') + raw = make_property('raw') diff --git a/awx/lib/site-packages/billiard/synchronize.py b/awx/lib/site-packages/billiard/synchronize.py new file mode 100644 index 0000000000..c487699888 --- /dev/null +++ b/awx/lib/site-packages/billiard/synchronize.py @@ -0,0 +1,446 @@ +# +# Module implementing synchronization primitives +# +# multiprocessing/synchronize.py +# +# Copyright (c) 2006-2008, R Oudkerk +# Licensed to PSF under a Contributor Agreement. +# +from __future__ import absolute_import + +__all__ = [ + 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event', +] + +import itertools +import os +import signal +import sys +import threading + + +from time import time as _time + +from ._ext import _billiard, ensure_SemLock +from .process import current_process +from .util import Finalize, register_after_fork, debug +from .forking import assert_spawning, Popen +from .compat import bytes, closerange + +# Try to import the mp.synchronize module cleanly, if it fails +# raise ImportError for platforms lacking a working sem_open implementation. +# See issue 3770 +ensure_SemLock() + +# +# Constants +# + +RECURSIVE_MUTEX, SEMAPHORE = range(2) +SEM_VALUE_MAX = _billiard.SemLock.SEM_VALUE_MAX + +try: + sem_unlink = _billiard.SemLock.sem_unlink +except AttributeError: + sem_unlink = None + +# +# Base class for semaphores and mutexes; wraps `_billiard.SemLock` +# + + +def _semname(sl): + try: + return sl.name + except AttributeError: + pass + + +class SemLock(object): + _counter = itertools.count() + + def __init__(self, kind, value, maxvalue): + from .forking import _forking_is_enabled + unlink_immediately = _forking_is_enabled or sys.platform == 'win32' + if sem_unlink: + sl = self._semlock = _billiard.SemLock( + kind, value, maxvalue, self._make_name(), unlink_immediately) + else: + sl = self._semlock = _billiard.SemLock(kind, value, maxvalue) + + debug('created semlock with handle %s', sl.handle) + self._make_methods() + + if sem_unlink: + + if sys.platform != 'win32': + def _after_fork(obj): + obj._semlock._after_fork() + register_after_fork(self, _after_fork) + + if _semname(self._semlock) is not None: + # We only get here if we are on Unix with forking + # disabled. When the object is garbage collected or the + # process shuts down we unlink the semaphore name + Finalize(self, sem_unlink, (self._semlock.name,), + exitpriority=0) + # In case of abnormal termination unlink semaphore name + _cleanup_semaphore_if_leaked(self._semlock.name) + + def _make_methods(self): + self.acquire = self._semlock.acquire + self.release = self._semlock.release + + def __enter__(self): + return self._semlock.__enter__() + + def __exit__(self, *args): + return self._semlock.__exit__(*args) + + def __getstate__(self): + assert_spawning(self) + sl = self._semlock + state = (Popen.duplicate_for_child(sl.handle), sl.kind, sl.maxvalue) + try: + state += (sl.name, ) + except AttributeError: + pass + return state + + def __setstate__(self, state): + self._semlock = _billiard.SemLock._rebuild(*state) + debug('recreated blocker with handle %r', state[0]) + self._make_methods() + + @staticmethod + def _make_name(): + return '/%s-%s-%s' % (current_process()._semprefix, + os.getpid(), SemLock._counter.next()) + + +class Semaphore(SemLock): + + def __init__(self, value=1): + SemLock.__init__(self, SEMAPHORE, value, SEM_VALUE_MAX) + + def get_value(self): + return self._semlock._get_value() + + def __repr__(self): + try: + value = self._semlock._get_value() + except Exception: + value = 'unknown' + return '' % value + + +class BoundedSemaphore(Semaphore): + + def __init__(self, value=1): + SemLock.__init__(self, SEMAPHORE, value, value) + + def __repr__(self): + try: + value = self._semlock._get_value() + except Exception: + value = 'unknown' + return '' % \ + (value, self._semlock.maxvalue) + + +class Lock(SemLock): + ''' + Non-recursive lock. + ''' + + def __init__(self): + SemLock.__init__(self, SEMAPHORE, 1, 1) + + def __repr__(self): + try: + if self._semlock._is_mine(): + name = current_process().name + if threading.currentThread().name != 'MainThread': + name += '|' + threading.currentThread().name + elif self._semlock._get_value() == 1: + name = 'None' + elif self._semlock._count() > 0: + name = 'SomeOtherThread' + else: + name = 'SomeOtherProcess' + except Exception: + name = 'unknown' + return '' % name + + +class RLock(SemLock): + ''' + Recursive lock + ''' + + def __init__(self): + SemLock.__init__(self, RECURSIVE_MUTEX, 1, 1) + + def __repr__(self): + try: + if self._semlock._is_mine(): + name = current_process().name + if threading.currentThread().name != 'MainThread': + name += '|' + threading.currentThread().name + count = self._semlock._count() + elif self._semlock._get_value() == 1: + name, count = 'None', 0 + elif self._semlock._count() > 0: + name, count = 'SomeOtherThread', 'nonzero' + else: + name, count = 'SomeOtherProcess', 'nonzero' + except Exception: + name, count = 'unknown', 'unknown' + return '' % (name, count) + + +class Condition(object): + ''' + Condition variable + ''' + + def __init__(self, lock=None): + self._lock = lock or RLock() + self._sleeping_count = Semaphore(0) + self._woken_count = Semaphore(0) + self._wait_semaphore = Semaphore(0) + self._make_methods() + + def __getstate__(self): + assert_spawning(self) + return (self._lock, self._sleeping_count, + self._woken_count, self._wait_semaphore) + + def __setstate__(self, state): + (self._lock, self._sleeping_count, + self._woken_count, self._wait_semaphore) = state + self._make_methods() + + def __enter__(self): + return self._lock.__enter__() + + def __exit__(self, *args): + return self._lock.__exit__(*args) + + def _make_methods(self): + self.acquire = self._lock.acquire + self.release = self._lock.release + + def __repr__(self): + try: + num_waiters = (self._sleeping_count._semlock._get_value() - + self._woken_count._semlock._get_value()) + except Exception: + num_waiters = 'unkown' + return '' % (self._lock, num_waiters) + + def wait(self, timeout=None): + assert self._lock._semlock._is_mine(), \ + 'must acquire() condition before using wait()' + + # indicate that this thread is going to sleep + self._sleeping_count.release() + + # release lock + count = self._lock._semlock._count() + for i in xrange(count): + self._lock.release() + + try: + # wait for notification or timeout + ret = self._wait_semaphore.acquire(True, timeout) + finally: + # indicate that this thread has woken + self._woken_count.release() + + # reacquire lock + for i in xrange(count): + self._lock.acquire() + return ret + + def notify(self): + assert self._lock._semlock._is_mine(), 'lock is not owned' + assert not self._wait_semaphore.acquire(False) + + # to take account of timeouts since last notify() we subtract + # woken_count from sleeping_count and rezero woken_count + while self._woken_count.acquire(False): + res = self._sleeping_count.acquire(False) + assert res + + if self._sleeping_count.acquire(False): # try grabbing a sleeper + self._wait_semaphore.release() # wake up one sleeper + self._woken_count.acquire() # wait for sleeper to wake + + # rezero _wait_semaphore in case a timeout just happened + self._wait_semaphore.acquire(False) + + def notify_all(self): + assert self._lock._semlock._is_mine(), 'lock is not owned' + assert not self._wait_semaphore.acquire(False) + + # to take account of timeouts since last notify*() we subtract + # woken_count from sleeping_count and rezero woken_count + while self._woken_count.acquire(False): + res = self._sleeping_count.acquire(False) + assert res + + sleepers = 0 + while self._sleeping_count.acquire(False): + self._wait_semaphore.release() # wake up one sleeper + sleepers += 1 + + if sleepers: + for i in xrange(sleepers): + self._woken_count.acquire() # wait for a sleeper to wake + + # rezero wait_semaphore in case some timeouts just happened + while self._wait_semaphore.acquire(False): + pass + + def wait_for(self, predicate, timeout=None): + result = predicate() + if result: + return result + if timeout is not None: + endtime = _time() + timeout + else: + endtime = None + waittime = None + while not result: + if endtime is not None: + waittime = endtime - _time() + if waittime <= 0: + break + self.wait(waittime) + result = predicate() + return result + + +class Event(object): + + def __init__(self): + self._cond = Condition(Lock()) + self._flag = Semaphore(0) + + def is_set(self): + self._cond.acquire() + try: + if self._flag.acquire(False): + self._flag.release() + return True + return False + finally: + self._cond.release() + + def set(self): + self._cond.acquire() + try: + self._flag.acquire(False) + self._flag.release() + self._cond.notify_all() + finally: + self._cond.release() + + def clear(self): + self._cond.acquire() + try: + self._flag.acquire(False) + finally: + self._cond.release() + + def wait(self, timeout=None): + self._cond.acquire() + try: + if self._flag.acquire(False): + self._flag.release() + else: + self._cond.wait(timeout) + + if self._flag.acquire(False): + self._flag.release() + return True + return False + finally: + self._cond.release() + + +if sys.platform != 'win32': + # + # Protection against unlinked semaphores if the program ends abnormally + # and forking has been disabled. + # + + def _cleanup_semaphore_if_leaked(name): + name = name.encode('ascii') + bytes('\0', 'ascii') + if len(name) > 512: + # posix guarantees that writes to a pipe of less than PIPE_BUF + # bytes are atomic, and that PIPE_BUF >= 512 + raise ValueError('name too long') + fd = _get_unlinkfd() + bits = os.write(fd, name) + assert bits == len(name) + + def _get_unlinkfd(): + cp = current_process() + if cp._unlinkfd is None: + r, w = os.pipe() + pid = os.fork() + if pid == 0: + try: + from setproctitle import setproctitle + setproctitle("[sem_cleanup for %r]" % cp.pid) + except: + pass + + # Fork a process which will survive until all other processes + # which have a copy of the write end of the pipe have exited. + # The forked process just collects names of semaphores until + # EOF is indicated. Then it tries unlinking all the names it + # has collected. + _collect_names_then_unlink(r) + os._exit(0) + os.close(r) + cp._unlinkfd = w + return cp._unlinkfd + + def _collect_names_then_unlink(r): + # protect the process from ^C and "killall python" etc + signal.signal(signal.SIGINT, signal.SIG_IGN) + signal.signal(signal.SIGTERM, signal.SIG_IGN) + + # close all fds except r + try: + MAXFD = os.sysconf("SC_OPEN_MAX") + except: + MAXFD = 256 + closerange(0, r) + closerange(r + 1, MAXFD) + + # collect data written to pipe + data = [] + while 1: + try: + s = os.read(r, 512) + except: + # XXX IO lock might be held at fork, so don't try + # printing unexpected exception - see issue 6721 + pass + else: + if not s: + break + data.append(s) + + # attempt to unlink each collected name + for name in bytes('', 'ascii').join(data).split(bytes('\0', 'ascii')): + try: + sem_unlink(name.decode('ascii')) + except: + # XXX IO lock might be held at fork, so don't try + # printing unexpected exception - see issue 6721 + pass diff --git a/awx/lib/site-packages/billiard/tests/__init__.py b/awx/lib/site-packages/billiard/tests/__init__.py new file mode 100644 index 0000000000..1ed389a005 --- /dev/null +++ b/awx/lib/site-packages/billiard/tests/__init__.py @@ -0,0 +1,18 @@ +from __future__ import absolute_import + +import atexit + + +def teardown(): + # Workaround for multiprocessing bug where logging + # is attempted after global already collected at shutdown. + cancelled = set() + try: + import multiprocessing.util + cancelled.add(multiprocessing.util._exit_function) + except (AttributeError, ImportError): + pass + + atexit._exithandlers[:] = [ + e for e in atexit._exithandlers if e[0] not in cancelled + ] diff --git a/awx/lib/site-packages/billiard/tests/compat.py b/awx/lib/site-packages/billiard/tests/compat.py new file mode 100644 index 0000000000..30eb853b0d --- /dev/null +++ b/awx/lib/site-packages/billiard/tests/compat.py @@ -0,0 +1,85 @@ +from __future__ import absolute_import + +import sys + + +class WarningMessage(object): + + """Holds the result of a single showwarning() call.""" + + _WARNING_DETAILS = ('message', 'category', 'filename', 'lineno', 'file', + 'line') + + def __init__(self, message, category, filename, lineno, file=None, + line=None): + local_values = locals() + for attr in self._WARNING_DETAILS: + setattr(self, attr, local_values[attr]) + + self._category_name = category and category.__name__ or None + + def __str__(self): + return ('{message : %r, category : %r, filename : %r, lineno : %s, ' + 'line : %r}' % (self.message, self._category_name, + self.filename, self.lineno, self.line)) + + +class catch_warnings(object): + + """A context manager that copies and restores the warnings filter upon + exiting the context. + + The 'record' argument specifies whether warnings should be captured by a + custom implementation of warnings.showwarning() and be appended to a list + returned by the context manager. Otherwise None is returned by the context + manager. The objects appended to the list are arguments whose attributes + mirror the arguments to showwarning(). + + The 'module' argument is to specify an alternative module to the module + named 'warnings' and imported under that name. This argument is only + useful when testing the warnings module itself. + + """ + + def __init__(self, record=False, module=None): + """Specify whether to record warnings and if an alternative module + should be used other than sys.modules['warnings']. + + For compatibility with Python 3.0, please consider all arguments to be + keyword-only. + + """ + self._record = record + self._module = module is None and sys.modules['warnings'] or module + self._entered = False + + def __repr__(self): + args = [] + if self._record: + args.append('record=True') + if self._module is not sys.modules['warnings']: + args.append('module=%r' % self._module) + name = type(self).__name__ + return '%s(%s)' % (name, ', '.join(args)) + + def __enter__(self): + if self._entered: + raise RuntimeError('Cannot enter %r twice' % self) + self._entered = True + self._filters = self._module.filters + self._module.filters = self._filters[:] + self._showwarning = self._module.showwarning + if self._record: + log = [] + + def showwarning(*args, **kwargs): + log.append(WarningMessage(*args, **kwargs)) + + self._module.showwarning = showwarning + return log + + def __exit__(self, *exc_info): + if not self._entered: + raise RuntimeError('Cannot exit %r without entering first' % self) + self._module.filters = self._filters + self._module.showwarning = self._showwarning diff --git a/awx/lib/site-packages/billiard/tests/test_common.py b/awx/lib/site-packages/billiard/tests/test_common.py new file mode 100644 index 0000000000..e141dd93d4 --- /dev/null +++ b/awx/lib/site-packages/billiard/tests/test_common.py @@ -0,0 +1,98 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import os +import signal + +from contextlib import contextmanager +from mock import call, patch, Mock +from time import time + +from billiard.common import ( + _shutdown_cleanup, + reset_signals, + restart_state, +) + +from .utils import Case + + +def signo(name): + return getattr(signal, name) + + +@contextmanager +def termsigs(*sigs): + from billiard import common + prev, common.TERMSIGS = common.TERMSIGS, sigs + try: + yield + finally: + common.TERMSIGS = prev + + +class test_reset_signals(Case): + + def test_shutdown_handler(self): + with patch('sys.exit') as exit: + _shutdown_cleanup(15, Mock()) + self.assertTrue(exit.called) + self.assertEqual(os.WTERMSIG(exit.call_args[0][0]), 15) + + def test_does_not_reset_ignored_signal(self, sigs=['SIGTERM']): + with self.assert_context(sigs, signal.SIG_IGN) as (_, SET): + self.assertFalse(SET.called) + + def test_does_not_reset_if_current_is_None(self, sigs=['SIGTERM']): + with self.assert_context(sigs, None) as (_, SET): + self.assertFalse(SET.called) + + def test_resets_for_SIG_DFL(self, sigs=['SIGTERM', 'SIGINT', 'SIGUSR1']): + with self.assert_context(sigs, signal.SIG_DFL) as (_, SET): + SET.assert_has_calls([ + call(signo(sig), _shutdown_cleanup) for sig in sigs + ]) + + def test_resets_for_obj(self, sigs=['SIGTERM', 'SIGINT', 'SIGUSR1']): + with self.assert_context(sigs, object()) as (_, SET): + SET.assert_has_calls([ + call(signo(sig), _shutdown_cleanup) for sig in sigs + ]) + + def test_handles_errors(self, sigs=['SIGTERM']): + for exc in (OSError(), AttributeError(), + ValueError(), RuntimeError()): + with self.assert_context(sigs, signal.SIG_DFL, exc) as (_, SET): + self.assertTrue(SET.called) + + @contextmanager + def assert_context(self, sigs, get_returns=None, set_effect=None): + with termsigs(*sigs): + with patch('signal.getsignal') as GET: + with patch('signal.signal') as SET: + GET.return_value = get_returns + SET.side_effect = set_effect + reset_signals() + GET.assert_has_calls([ + call(signo(sig)) for sig in sigs + ]) + yield GET, SET + + +class test_restart_state(Case): + + def test_raises(self): + s = restart_state(100, 1) # max 100 restarts in 1 second. + s.R = 99 + s.step() + with self.assertRaises(s.RestartFreqExceeded): + s.step() + + def test_time_passed_resets_counter(self): + s = restart_state(100, 10) + s.R, s.T = 100, time() + with self.assertRaises(s.RestartFreqExceeded): + s.step() + s.R, s.T = 100, time() + s.step(time() + 20) + self.assertEqual(s.R, 1) diff --git a/awx/lib/site-packages/billiard/tests/test_package.py b/awx/lib/site-packages/billiard/tests/test_package.py new file mode 100644 index 0000000000..79347187d6 --- /dev/null +++ b/awx/lib/site-packages/billiard/tests/test_package.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +import billiard + +from .utils import Case + + +class test_billiard(Case): + + def test_has_version(self): + self.assertTrue(billiard.__version__) + self.assertIsInstance(billiard.__version__, str) diff --git a/awx/lib/site-packages/billiard/tests/utils.py b/awx/lib/site-packages/billiard/tests/utils.py new file mode 100644 index 0000000000..25c6d23294 --- /dev/null +++ b/awx/lib/site-packages/billiard/tests/utils.py @@ -0,0 +1,144 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import re +import sys +import warnings + +try: + import unittest # noqa + unittest.skip + from unittest.util import safe_repr, unorderable_list_difference +except AttributeError: + import unittest2 as unittest # noqa + from unittest2.util import safe_repr, unorderable_list_difference # noqa + +from .compat import catch_warnings + +# -- adds assertWarns from recent unittest2, not in Python 2.7. + + +class _AssertRaisesBaseContext(object): + + def __init__(self, expected, test_case, callable_obj=None, + expected_regex=None): + self.expected = expected + self.failureException = test_case.failureException + self.obj_name = None + if isinstance(expected_regex, basestring): + expected_regex = re.compile(expected_regex) + self.expected_regex = expected_regex + + +class _AssertWarnsContext(_AssertRaisesBaseContext): + """A context manager used to implement TestCase.assertWarns* methods.""" + + def __enter__(self): + # The __warningregistry__'s need to be in a pristine state for tests + # to work properly. + warnings.resetwarnings() + for v in sys.modules.values(): + if getattr(v, '__warningregistry__', None): + v.__warningregistry__ = {} + self.warnings_manager = catch_warnings(record=True) + self.warnings = self.warnings_manager.__enter__() + warnings.simplefilter('always', self.expected) + return self + + def __exit__(self, exc_type, exc_value, tb): + self.warnings_manager.__exit__(exc_type, exc_value, tb) + if exc_type is not None: + # let unexpected exceptions pass through + return + try: + exc_name = self.expected.__name__ + except AttributeError: + exc_name = str(self.expected) + first_matching = None + for m in self.warnings: + w = m.message + if not isinstance(w, self.expected): + continue + if first_matching is None: + first_matching = w + if (self.expected_regex is not None and + not self.expected_regex.search(str(w))): + continue + # store warning for later retrieval + self.warning = w + self.filename = m.filename + self.lineno = m.lineno + return + # Now we simply try to choose a helpful failure message + if first_matching is not None: + raise self.failureException( + '%r does not match %r' % ( + self.expected_regex.pattern, str(first_matching))) + if self.obj_name: + raise self.failureException( + '%s not triggered by %s' % (exc_name, self.obj_name)) + else: + raise self.failureException('%s not triggered' % exc_name) + + +class Case(unittest.TestCase): + + def assertWarns(self, expected_warning): + return _AssertWarnsContext(expected_warning, self, None) + + def assertWarnsRegex(self, expected_warning, expected_regex): + return _AssertWarnsContext(expected_warning, self, + None, expected_regex) + + def assertDictContainsSubset(self, expected, actual, msg=None): + missing, mismatched = [], [] + + for key, value in expected.iteritems(): + if key not in actual: + missing.append(key) + elif value != actual[key]: + mismatched.append('%s, expected: %s, actual: %s' % ( + safe_repr(key), safe_repr(value), + safe_repr(actual[key]))) + + if not (missing or mismatched): + return + + standard_msg = '' + if missing: + standard_msg = 'Missing: %s' % ','.join(map(safe_repr, missing)) + + if mismatched: + if standard_msg: + standard_msg += '; ' + standard_msg += 'Mismatched values: %s' % ( + ','.join(mismatched)) + + self.fail(self._formatMessage(msg, standard_msg)) + + def assertItemsEqual(self, expected_seq, actual_seq, msg=None): + missing = unexpected = None + try: + expected = sorted(expected_seq) + actual = sorted(actual_seq) + except TypeError: + # Unsortable items (example: set(), complex(), ...) + expected = list(expected_seq) + actual = list(actual_seq) + missing, unexpected = unorderable_list_difference( + expected, actual) + else: + return self.assertSequenceEqual(expected, actual, msg=msg) + + errors = [] + if missing: + errors.append( + 'Expected, but missing:\n %s' % (safe_repr(missing), ), + ) + if unexpected: + errors.append( + 'Unexpected, but present:\n %s' % (safe_repr(unexpected), ), + ) + if errors: + standardMsg = '\n'.join(errors) + self.fail(self._formatMessage(msg, standardMsg)) diff --git a/awx/lib/site-packages/billiard/util.py b/awx/lib/site-packages/billiard/util.py new file mode 100644 index 0000000000..f669512049 --- /dev/null +++ b/awx/lib/site-packages/billiard/util.py @@ -0,0 +1,339 @@ +# +# Module providing various facilities to other parts of the package +# +# billiard/util.py +# +# Copyright (c) 2006-2008, R Oudkerk --- see COPYING.txt +# Licensed to PSF under a Contributor Agreement. +# +from __future__ import absolute_import + +import errno +import functools +import itertools +import weakref +import atexit +import shutil +import tempfile +import threading # we want threading to install its + # cleanup function before multiprocessing does + +from .compat import get_errno +from .process import current_process, active_children + +__all__ = [ + 'sub_debug', 'debug', 'info', 'sub_warning', 'get_logger', + 'log_to_stderr', 'get_temp_dir', 'register_after_fork', + 'is_exiting', 'Finalize', 'ForkAwareThreadLock', 'ForkAwareLocal', + 'SUBDEBUG', 'SUBWARNING', +] + +# +# Logging +# + +NOTSET = 0 +SUBDEBUG = 5 +DEBUG = 10 +INFO = 20 +SUBWARNING = 25 +ERROR = 40 + +LOGGER_NAME = 'multiprocessing' +DEFAULT_LOGGING_FORMAT = '[%(levelname)s/%(processName)s] %(message)s' + +_logger = None +_log_to_stderr = False + +#: Support for reinitialization of objects when bootstrapping a child process +_afterfork_registry = weakref.WeakValueDictionary() +_afterfork_counter = itertools.count() + +#: Finalization using weakrefs +_finalizer_registry = {} +_finalizer_counter = itertools.count() + +#: set to true if the process is shutting down. +_exiting = False + + +def sub_debug(msg, *args, **kwargs): + if _logger: + _logger.log(SUBDEBUG, msg, *args, **kwargs) + + +def debug(msg, *args, **kwargs): + if _logger: + _logger.log(DEBUG, msg, *args, **kwargs) + return True + return False + + +def info(msg, *args, **kwargs): + if _logger: + _logger.log(INFO, msg, *args, **kwargs) + return True + return False + + +def sub_warning(msg, *args, **kwargs): + if _logger: + _logger.log(SUBWARNING, msg, *args, **kwargs) + return True + return False + + +def error(msg, *args, **kwargs): + if _logger: + _logger.log(ERROR, msg, *args, **kwargs) + return True + return False + + +def get_logger(): + ''' + Returns logger used by multiprocessing + ''' + global _logger + import logging + + logging._acquireLock() + try: + if not _logger: + + _logger = logging.getLogger(LOGGER_NAME) + _logger.propagate = 0 + logging.addLevelName(SUBDEBUG, 'SUBDEBUG') + logging.addLevelName(SUBWARNING, 'SUBWARNING') + + # XXX multiprocessing should cleanup before logging + if hasattr(atexit, 'unregister'): + atexit.unregister(_exit_function) + atexit.register(_exit_function) + else: + atexit._exithandlers.remove((_exit_function, (), {})) + atexit._exithandlers.append((_exit_function, (), {})) + finally: + logging._releaseLock() + + return _logger + + +def log_to_stderr(level=None): + ''' + Turn on logging and add a handler which prints to stderr + ''' + global _log_to_stderr + import logging + + logger = get_logger() + formatter = logging.Formatter(DEFAULT_LOGGING_FORMAT) + handler = logging.StreamHandler() + handler.setFormatter(formatter) + logger.addHandler(handler) + + if level: + logger.setLevel(level) + _log_to_stderr = True + return _logger + + +def get_temp_dir(): + ''' + Function returning a temp directory which will be removed on exit + ''' + # get name of a temp directory which will be automatically cleaned up + if current_process()._tempdir is None: + tempdir = tempfile.mkdtemp(prefix='pymp-') + info('created temp directory %s', tempdir) + Finalize(None, shutil.rmtree, args=[tempdir], exitpriority=-100) + current_process()._tempdir = tempdir + return current_process()._tempdir + + +def _run_after_forkers(): + items = list(_afterfork_registry.items()) + items.sort() + for (index, ident, func), obj in items: + try: + func(obj) + except Exception, e: + info('after forker raised exception %s', e) + + +def register_after_fork(obj, func): + _afterfork_registry[(_afterfork_counter.next(), id(obj), func)] = obj + + +class Finalize(object): + ''' + Class which supports object finalization using weakrefs + ''' + def __init__(self, obj, callback, args=(), kwargs=None, exitpriority=None): + assert exitpriority is None or type(exitpriority) is int + + if obj is not None: + self._weakref = weakref.ref(obj, self) + else: + assert exitpriority is not None + + self._callback = callback + self._args = args + self._kwargs = kwargs or {} + self._key = (exitpriority, _finalizer_counter.next()) + + _finalizer_registry[self._key] = self + + def __call__(self, wr=None, + # Need to bind these locally because the globals + # could've been cleared at shutdown + _finalizer_registry=_finalizer_registry, + sub_debug=sub_debug): + ''' + Run the callback unless it has already been called or cancelled + ''' + try: + del _finalizer_registry[self._key] + except KeyError: + sub_debug('finalizer no longer registered') + else: + sub_debug( + 'finalizer calling %s with args %s and kwargs %s', + self._callback, self._args, self._kwargs, + ) + res = self._callback(*self._args, **self._kwargs) + self._weakref = self._callback = self._args = \ + self._kwargs = self._key = None + return res + + def cancel(self): + ''' + Cancel finalization of the object + ''' + try: + del _finalizer_registry[self._key] + except KeyError: + pass + else: + self._weakref = self._callback = self._args = \ + self._kwargs = self._key = None + + def still_active(self): + ''' + Return whether this finalizer is still waiting to invoke callback + ''' + return self._key in _finalizer_registry + + def __repr__(self): + try: + obj = self._weakref() + except (AttributeError, TypeError): + obj = None + + if obj is None: + return '' + + x = '' + + +def _run_finalizers(minpriority=None): + ''' + Run all finalizers whose exit priority is not None and at least minpriority + + Finalizers with highest priority are called first; finalizers with + the same priority will be called in reverse order of creation. + ''' + if minpriority is None: + f = lambda p: p[0][0] is not None + else: + f = lambda p: p[0][0] is not None and p[0][0] >= minpriority + + items = [x for x in _finalizer_registry.items() if f(x)] + items.sort(reverse=True) + + for key, finalizer in items: + sub_debug('calling %s', finalizer) + try: + finalizer() + except Exception: + if not error("Error calling finalizer %r", finalizer, + exc_info=True): + import traceback + traceback.print_exc() + + if minpriority is None: + _finalizer_registry.clear() + + +def is_exiting(): + ''' + Returns true if the process is shutting down + ''' + return _exiting or _exiting is None + + +def _exit_function(): + ''' + Clean up on exit + ''' + + global _exiting + + info('process shutting down') + debug('running all "atexit" finalizers with priority >= 0') + _run_finalizers(0) + + for p in active_children(): + if p._daemonic: + info('calling terminate() for daemon %s', p.name) + p._popen.terminate() + + for p in active_children(): + info('calling join() for process %s', p.name) + p.join() + + debug('running the remaining "atexit" finalizers') + _run_finalizers() +atexit.register(_exit_function) + + +class ForkAwareThreadLock(object): + + def __init__(self): + self._lock = threading.Lock() + self.acquire = self._lock.acquire + self.release = self._lock.release + register_after_fork(self, ForkAwareThreadLock.__init__) + + +class ForkAwareLocal(threading.local): + + def __init__(self): + register_after_fork(self, lambda obj: obj.__dict__.clear()) + + def __reduce__(self): + return type(self), () + + +def _eintr_retry(func): + ''' + Automatic retry after EINTR. + ''' + + @functools.wraps(func) + def wrapped(*args, **kwargs): + while 1: + try: + return func(*args, **kwargs) + except OSError, exc: + if get_errno(exc) != errno.EINTR: + raise + return wrapped diff --git a/awx/lib/site-packages/celery/__compat__.py b/awx/lib/site-packages/celery/__compat__.py new file mode 100644 index 0000000000..e09772346c --- /dev/null +++ b/awx/lib/site-packages/celery/__compat__.py @@ -0,0 +1,203 @@ +# -*- coding: utf-8 -*- +""" + celery.__compat__ + ~~~~~~~~~~~~~~~~~ + + This module contains utilities to dynamically + recreate modules, either for lazy loading or + to create old modules at runtime instead of + having them litter the source tree. + +""" +from __future__ import absolute_import + +import operator +import sys + +from functools import reduce +from importlib import import_module +from types import ModuleType + +from .local import Proxy + +MODULE_DEPRECATED = """ +The module %s is deprecated and will be removed in a future version. +""" + +DEFAULT_ATTRS = set(['__file__', '__path__', '__doc__', '__all__']) + +# im_func is no longer available in Py3. +# instead the unbound method itself can be used. +if sys.version_info[0] == 3: # pragma: no cover + def fun_of_method(method): + return method +else: + def fun_of_method(method): # noqa + return method.im_func + + +def getappattr(path): + """Gets attribute from the current_app recursively, + e.g. getappattr('amqp.get_task_consumer')``.""" + from celery import current_app + return current_app._rgetattr(path) + + +def _compat_task_decorator(*args, **kwargs): + from celery import current_app + kwargs.setdefault('accept_magic_kwargs', True) + return current_app.task(*args, **kwargs) + + +def _compat_periodic_task_decorator(*args, **kwargs): + from celery.task import periodic_task + kwargs.setdefault('accept_magic_kwargs', True) + return periodic_task(*args, **kwargs) + + +COMPAT_MODULES = { + 'celery': { + 'execute': { + 'send_task': 'send_task', + }, + 'decorators': { + 'task': _compat_task_decorator, + 'periodic_task': _compat_periodic_task_decorator, + }, + 'log': { + 'get_default_logger': 'log.get_default_logger', + 'setup_logger': 'log.setup_logger', + 'setup_loggig_subsystem': 'log.setup_logging_subsystem', + 'redirect_stdouts_to_logger': 'log.redirect_stdouts_to_logger', + }, + 'messaging': { + 'TaskPublisher': 'amqp.TaskPublisher', + 'TaskConsumer': 'amqp.TaskConsumer', + 'establish_connection': 'connection', + 'with_connection': 'with_default_connection', + 'get_consumer_set': 'amqp.TaskConsumer', + }, + 'registry': { + 'tasks': 'tasks', + }, + }, + 'celery.task': { + 'control': { + 'broadcast': 'control.broadcast', + 'rate_limit': 'control.rate_limit', + 'time_limit': 'control.time_limit', + 'ping': 'control.ping', + 'revoke': 'control.revoke', + 'discard_all': 'control.purge', + 'inspect': 'control.inspect', + }, + 'schedules': 'celery.schedules', + 'chords': 'celery.canvas', + } +} + + +class class_property(object): + + def __init__(self, fget=None, fset=None): + assert fget and isinstance(fget, classmethod) + assert isinstance(fset, classmethod) if fset else True + self.__get = fget + self.__set = fset + + info = fget.__get__(object) # just need the info attrs. + self.__doc__ = info.__doc__ + self.__name__ = info.__name__ + self.__module__ = info.__module__ + + def __get__(self, obj, type=None): + if obj and type is None: + type = obj.__class__ + return self.__get.__get__(obj, type)() + + def __set__(self, obj, value): + if obj is None: + return self + return self.__set.__get__(obj)(value) + + +def reclassmethod(method): + return classmethod(fun_of_method(method)) + + +class MagicModule(ModuleType): + _compat_modules = () + _all_by_module = {} + _direct = {} + _object_origins = {} + + def __getattr__(self, name): + if name in self._object_origins: + module = __import__(self._object_origins[name], None, None, [name]) + for item in self._all_by_module[module.__name__]: + setattr(self, item, getattr(module, item)) + return getattr(module, name) + elif name in self._direct: + module = __import__(self._direct[name], None, None, [name]) + setattr(self, name, module) + return module + return ModuleType.__getattribute__(self, name) + + def __dir__(self): + return list(set(self.__all__) | DEFAULT_ATTRS) + + +def create_module(name, attrs, cls_attrs=None, pkg=None, + base=MagicModule, prepare_attr=None): + fqdn = '.'.join([pkg.__name__, name]) if pkg else name + cls_attrs = {} if cls_attrs is None else cls_attrs + + attrs = dict((attr_name, prepare_attr(attr) if prepare_attr else attr) + for attr_name, attr in attrs.iteritems()) + module = sys.modules[fqdn] = type(name, (base, ), cls_attrs)(fqdn) + module.__dict__.update(attrs) + return module + + +def recreate_module(name, compat_modules=(), by_module={}, direct={}, + base=MagicModule, **attrs): + old_module = sys.modules[name] + origins = get_origins(by_module) + compat_modules = COMPAT_MODULES.get(name, ()) + + cattrs = dict( + _compat_modules=compat_modules, + _all_by_module=by_module, _direct=direct, + _object_origins=origins, + __all__=tuple(set(reduce( + operator.add, + [tuple(v) for v in [compat_modules, origins, direct, attrs]], + ))), + ) + new_module = create_module(name, attrs, cls_attrs=cattrs, base=base) + new_module.__dict__.update(dict((mod, get_compat_module(new_module, mod)) + for mod in compat_modules)) + return old_module, new_module + + +def get_compat_module(pkg, name): + + def prepare(attr): + if isinstance(attr, basestring): + return Proxy(getappattr, (attr, )) + return attr + + attrs = COMPAT_MODULES[pkg.__name__][name] + if isinstance(attrs, basestring): + fqdn = '.'.join([pkg.__name__, name]) + module = sys.modules[fqdn] = import_module(attrs) + return module + attrs['__all__'] = list(attrs) + return create_module(name, dict(attrs), pkg=pkg, prepare_attr=prepare) + + +def get_origins(defs): + origins = {} + for module, items in defs.iteritems(): + origins.update(dict((item, module) for item in items)) + return origins diff --git a/awx/lib/site-packages/celery/__init__.py b/awx/lib/site-packages/celery/__init__.py new file mode 100644 index 0000000000..0aa88032cf --- /dev/null +++ b/awx/lib/site-packages/celery/__init__.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +"""Distributed Task Queue""" +# :copyright: (c) 2009 - 2012 Ask Solem and individual contributors, +# All rights reserved. +# :copyright: (c) 2012 VMware, Inc., All rights reserved. +# :license: BSD (3 Clause), see LICENSE for more details. + +from __future__ import absolute_import + +SERIES = 'Chiastic Slide' +VERSION = (3, 0, 19) +__version__ = '.'.join(str(p) for p in VERSION[0:3]) + ''.join(VERSION[3:]) +__author__ = 'Ask Solem' +__contact__ = 'ask@celeryproject.org' +__homepage__ = 'http://celeryproject.org' +__docformat__ = 'restructuredtext' +__all__ = [ + 'Celery', 'bugreport', 'shared_task', 'Task', + 'current_app', 'current_task', + 'chain', 'chord', 'chunks', 'group', 'subtask', + 'xmap', 'xstarmap', 'uuid', 'VERSION', '__version__', +] +VERSION_BANNER = '%s (%s)' % (__version__, SERIES) + +# -eof meta- + +STATICA_HACK = True +globals()['kcah_acitats'[::-1].upper()] = False +if STATICA_HACK: + # This is never executed, but tricks static analyzers (PyDev, PyCharm, + # pylint, etc.) into knowing the types of these symbols, and what + # they contain. + from celery.app.base import Celery + from celery.app.utils import bugreport + from celery.app.task import Task + from celery._state import current_app, current_task + from celery.canvas import ( + chain, chord, chunks, group, subtask, xmap, xstarmap, + ) + from celery.utils import uuid + +# Lazy loading +from .__compat__ import recreate_module + +old_module, new_module = recreate_module( # pragma: no cover + __name__, + by_module={ + 'celery.app': ['Celery', 'bugreport', 'shared_task'], + 'celery.app.task': ['Task'], + 'celery._state': ['current_app', 'current_task'], + 'celery.canvas': ['chain', 'chord', 'chunks', 'group', + 'subtask', 'xmap', 'xstarmap'], + 'celery.utils': ['uuid'], + }, + direct={'task': 'celery.task'}, + __package__='celery', __file__=__file__, + __path__=__path__, __doc__=__doc__, __version__=__version__, + __author__=__author__, __contact__=__contact__, + __homepage__=__homepage__, __docformat__=__docformat__, + VERSION=VERSION, SERIES=SERIES, VERSION_BANNER=VERSION_BANNER, +) diff --git a/awx/lib/site-packages/celery/__main__.py b/awx/lib/site-packages/celery/__main__.py new file mode 100644 index 0000000000..9f0576593d --- /dev/null +++ b/awx/lib/site-packages/celery/__main__.py @@ -0,0 +1,36 @@ +from __future__ import absolute_import + +import sys + + +def maybe_patch_concurrency(): + from celery.platforms import maybe_patch_concurrency + maybe_patch_concurrency(sys.argv, ['-P'], ['--pool']) + + +def main(): + maybe_patch_concurrency() + from celery.bin.celery import main + main() + + +def _compat_worker(): + maybe_patch_concurrency() + from celery.bin.celeryd import main + main() + + +def _compat_multi(): + maybe_patch_concurrency() + from celery.bin.celeryd_multi import main + main() + + +def _compat_beat(): + maybe_patch_concurrency() + from celery.bin.celerybeat import main + main() + + +if __name__ == '__main__': + main() diff --git a/awx/lib/site-packages/celery/_state.py b/awx/lib/site-packages/celery/_state.py new file mode 100644 index 0000000000..137e44e933 --- /dev/null +++ b/awx/lib/site-packages/celery/_state.py @@ -0,0 +1,106 @@ +# -*- coding: utf-8 -*- +""" + celery._state + ~~~~~~~~~~~~~~~ + + This is an internal module containing thread state + like the ``current_app``, and ``current_task``. + + This module shouldn't be used directly. + +""" +from __future__ import absolute_import + +import os +import sys +import threading +import weakref + +from celery.local import Proxy +from celery.utils.threads import LocalStack + +#: Global default app used when no current app. +default_app = None + +#: List of all app instances (weakrefs), must not be used directly. +_apps = set() + + +class _TLS(threading.local): + #: Apps with the :attr:`~celery.app.base.BaseApp.set_as_current` attribute + #: sets this, so it will always contain the last instantiated app, + #: and is the default app returned by :func:`app_or_default`. + current_app = None +_tls = _TLS() + +_task_stack = LocalStack() + + +def set_default_app(app): + global default_app + default_app = app + + +def _get_current_app(): + if default_app is None: + #: creates the global fallback app instance. + from celery.app import Celery + set_default_app(Celery( + 'default', + loader=os.environ.get('CELERY_LOADER') or 'default', + set_as_current=False, accept_magic_kwargs=True, + )) + return _tls.current_app or default_app + +C_STRICT_APP = os.environ.get('C_STRICT_APP') +if os.environ.get('C_STRICT_APP'): + def get_current_app(): + import traceback + sys.stderr.write('USES CURRENT_APP\n') + traceback.print_stack(file=sys.stderr) + return _get_current_app() +else: + get_current_app = _get_current_app + + +def get_current_task(): + """Currently executing task.""" + return _task_stack.top + + +def get_current_worker_task(): + """Currently executing task, that was applied by the worker. + + This is used to differentiate between the actual task + executed by the worker and any task that was called within + a task (using ``task.__call__`` or ``task.apply``) + + """ + for task in reversed(_task_stack.stack): + if not task.request.called_directly: + return task + + +#: Proxy to current app. +current_app = Proxy(get_current_app) + +#: Proxy to current task. +current_task = Proxy(get_current_task) + + +def _register_app(app): + _apps.add(weakref.ref(app)) + + +def _get_active_apps(): + dirty = [] + try: + for appref in _apps: + app = appref() + if app is None: + dirty.append(appref) + else: + yield app + finally: + while dirty: + _apps.discard(dirty.pop()) diff --git a/awx/lib/site-packages/celery/app/__init__.py b/awx/lib/site-packages/celery/app/__init__.py new file mode 100644 index 0000000000..becee39d43 --- /dev/null +++ b/awx/lib/site-packages/celery/app/__init__.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +""" + celery.app + ~~~~~~~~~~ + + Celery Application. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import os + +from celery.local import Proxy +from celery import _state +from celery._state import ( # noqa + set_default_app, + get_current_app as current_app, + get_current_task as current_task, + _get_active_apps, +) +from celery.utils import gen_task_name + +from .builtins import shared_task as _shared_task +from .base import Celery, AppPickler # noqa + +#: Proxy always returning the app set as default. +default_app = Proxy(lambda: _state.default_app) + +#: Function returning the app provided or the default app if none. +#: +#: The environment variable :envvar:`CELERY_TRACE_APP` is used to +#: trace app leaks. When enabled an exception is raised if there +#: is no active app. +app_or_default = None + +#: The 'default' loader is the default loader used by old applications. +#: This is deprecated and should no longer be used as it's set too early +#: to be affected by --loader argument. +default_loader = os.environ.get('CELERY_LOADER') or 'default' # XXX + + +def bugreport(): + return current_app().bugreport() + + +def _app_or_default(app=None): + if app is None: + return _state.get_current_app() + return app + + +def _app_or_default_trace(app=None): # pragma: no cover + from traceback import print_stack + from billiard import current_process + if app is None: + if getattr(_state._tls, 'current_app', None): + print('-- RETURNING TO CURRENT APP --') # noqa+ + print_stack() + return _state._tls.current_app + if current_process()._name == 'MainProcess': + raise Exception('DEFAULT APP') + print('-- RETURNING TO DEFAULT APP --') # noqa+ + print_stack() + return _state.default_app + return app + + +def enable_trace(): + global app_or_default + app_or_default = _app_or_default_trace + + +def disable_trace(): + global app_or_default + app_or_default = _app_or_default + +if os.environ.get('CELERY_TRACE_APP'): # pragma: no cover + enable_trace() +else: + disable_trace() + +App = Celery # XXX Compat + + +def shared_task(*args, **kwargs): + """Task decorator that creates shared tasks, + and returns a proxy that always returns the task from the current apps + task registry. + + This can be used by library authors to create tasks that will work + for any app environment. + + Example: + + >>> from celery import Celery, shared_task + >>> @shared_task + ... def add(x, y): + ... return x + y + + >>> app1 = Celery(broker='amqp://') + >>> add.app is app1 + True + + >>> app2 = Celery(broker='redis://') + >>> add.app is app2 + + """ + + def create_shared_task(**options): + + def __inner(fun): + name = options.get('name') + # Set as shared task so that unfinalized apps, + # and future apps will load the task. + _shared_task(lambda app: app._task_from_fun(fun, **options)) + + # Force all finalized apps to take this task as well. + for app in _get_active_apps(): + if app.finalized: + with app._finalize_mutex: + app._task_from_fun(fun, **options) + + # Returns a proxy that always gets the task from the current + # apps task registry. + def task_by_cons(): + app = current_app() + return app.tasks[ + name or gen_task_name(app, fun.__name__, fun.__module__) + ] + return Proxy(task_by_cons) + return __inner + + if len(args) == 1 and callable(args[0]): + return create_shared_task(**kwargs)(args[0]) + return create_shared_task(*args, **kwargs) diff --git a/awx/lib/site-packages/celery/app/abstract.py b/awx/lib/site-packages/celery/app/abstract.py new file mode 100644 index 0000000000..96ba1e7142 --- /dev/null +++ b/awx/lib/site-packages/celery/app/abstract.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +""" + celery.app.abstract + ~~~~~~~~~~~~~~~~~~~ + + Abstract class that takes default attribute values + from the configuration. + +""" +from __future__ import absolute_import + + +class from_config(object): + + def __init__(self, key=None): + self.key = key + + def get_key(self, attr): + return attr if self.key is None else self.key + + +class _configurated(type): + + def __new__(cls, name, bases, attrs): + attrs['__confopts__'] = dict((attr, spec.get_key(attr)) + for attr, spec in attrs.iteritems() + if isinstance(spec, from_config)) + inherit_from = attrs.get('inherit_confopts', ()) + for subcls in bases: + try: + attrs['__confopts__'].update(subcls.__confopts__) + except AttributeError: + pass + for subcls in inherit_from: + attrs['__confopts__'].update(subcls.__confopts__) + attrs = dict((k, v if not isinstance(v, from_config) else None) + for k, v in attrs.iteritems()) + return super(_configurated, cls).__new__(cls, name, bases, attrs) + + +class configurated(object): + __metaclass__ = _configurated + + def setup_defaults(self, kwargs, namespace='celery'): + confopts = self.__confopts__ + app, find = self.app, self.app.conf.find_value_for_key + + for attr, keyname in confopts.iteritems(): + try: + value = kwargs[attr] + except KeyError: + value = find(keyname, namespace) + else: + if value is None: + value = find(keyname, namespace) + setattr(self, attr, value) + + for attr_name, attr_value in kwargs.iteritems(): + if attr_name not in confopts and attr_value is not None: + setattr(self, attr_name, attr_value) + + def confopts_as_dict(self): + return dict((key, getattr(self, key)) for key in self.__confopts__) diff --git a/awx/lib/site-packages/celery/app/amqp.py b/awx/lib/site-packages/celery/app/amqp.py new file mode 100644 index 0000000000..09787bcf63 --- /dev/null +++ b/awx/lib/site-packages/celery/app/amqp.py @@ -0,0 +1,425 @@ +# -*- coding: utf-8 -*- +""" + celery.app.amqp + ~~~~~~~~~~~~~~~ + + Sending and receiving messages using Kombu. + +""" +from __future__ import absolute_import + +from datetime import timedelta +from weakref import WeakValueDictionary + +from kombu import Connection, Consumer, Exchange, Producer, Queue +from kombu.common import entry_to_queue +from kombu.pools import ProducerPool +from kombu.utils import cached_property, uuid +from kombu.utils.encoding import safe_repr + +from celery import signals +from celery.utils.text import indent as textindent + +from . import app_or_default +from . import routes as _routes + +#: Human readable queue declaration. +QUEUE_FORMAT = """ +.> %(name)s exchange:%(exchange)s(%(exchange_type)s) binding:%(routing_key)s +""" + + +class Queues(dict): + """Queue name⇒ declaration mapping. + + :param queues: Initial list/tuple or dict of queues. + :keyword create_missing: By default any unknown queues will be + added automatically, but if disabled + the occurrence of unknown queues + in `wanted` will raise :exc:`KeyError`. + :keyword ha_policy: Default HA policy for queues with none set. + + + """ + #: If set, this is a subset of queues to consume from. + #: The rest of the queues are then used for routing only. + _consume_from = None + + def __init__(self, queues=None, default_exchange=None, + create_missing=True, ha_policy=None): + dict.__init__(self) + self.aliases = WeakValueDictionary() + self.default_exchange = default_exchange + self.create_missing = create_missing + self.ha_policy = ha_policy + if isinstance(queues, (tuple, list)): + queues = dict((q.name, q) for q in queues) + for name, q in (queues or {}).iteritems(): + self.add(q) if isinstance(q, Queue) else self.add_compat(name, **q) + + def __getitem__(self, name): + try: + return self.aliases[name] + except KeyError: + return dict.__getitem__(self, name) + + def __setitem__(self, name, queue): + if self.default_exchange and (not queue.exchange or + not queue.exchange.name): + queue.exchange = self.default_exchange + dict.__setitem__(self, name, queue) + if queue.alias: + self.aliases[queue.alias] = queue + + def __missing__(self, name): + if self.create_missing: + return self.add(self.new_missing(name)) + raise KeyError(name) + + def add(self, queue, **kwargs): + """Add new queue. + + :param queue: Name of the queue. + :keyword exchange: Name of the exchange. + :keyword routing_key: Binding key. + :keyword exchange_type: Type of exchange. + :keyword \*\*options: Additional declaration options. + + """ + if not isinstance(queue, Queue): + return self.add_compat(queue, **kwargs) + if self.ha_policy: + if queue.queue_arguments is None: + queue.queue_arguments = {} + self._set_ha_policy(queue.queue_arguments) + self[queue.name] = queue + return queue + + def add_compat(self, name, **options): + # docs used to use binding_key as routing key + options.setdefault('routing_key', options.get('binding_key')) + if options['routing_key'] is None: + options['routing_key'] = name + if self.ha_policy is not None: + self._set_ha_policy(options.setdefault('queue_arguments', {})) + q = self[name] = entry_to_queue(name, **options) + return q + + def _set_ha_policy(self, args): + policy = self.ha_policy + if isinstance(policy, (list, tuple)): + return args.update({'x-ha-policy': 'nodes', + 'x-ha-policy-params': list(policy)}) + args['x-ha-policy'] = policy + + def format(self, indent=0, indent_first=True): + """Format routing table into string for log dumps.""" + active = self.consume_from + if not active: + return '' + info = [ + QUEUE_FORMAT.strip() % { + 'name': (name + ':').ljust(12), + 'exchange': q.exchange.name, + 'exchange_type': q.exchange.type, + 'routing_key': q.routing_key} + for name, q in sorted(active.iteritems())] + if indent_first: + return textindent('\n'.join(info), indent) + return info[0] + '\n' + textindent('\n'.join(info[1:]), indent) + + def select_add(self, queue, **kwargs): + """Add new task queue that will be consumed from even when + a subset has been selected using the :option:`-Q` option.""" + q = self.add(queue, **kwargs) + if self._consume_from is not None: + self._consume_from[q.name] = q + return q + + def select_subset(self, wanted): + """Sets :attr:`consume_from` by selecting a subset of the + currently defined queues. + + :param wanted: List of wanted queue names. + """ + if wanted: + self._consume_from = dict((name, self[name]) for name in wanted) + + def select_remove(self, queue): + if self._consume_from is None: + self.select_subset(k for k in self if k != queue) + else: + self._consume_from.pop(queue, None) + + def new_missing(self, name): + return Queue(name, Exchange(name), name) + + @property + def consume_from(self): + if self._consume_from is not None: + return self._consume_from + return self + + +class TaskProducer(Producer): + app = None + auto_declare = False + retry = False + retry_policy = None + utc = True + event_dispatcher = None + send_sent_event = False + + def __init__(self, channel=None, exchange=None, *args, **kwargs): + self.retry = kwargs.pop('retry', self.retry) + self.retry_policy = kwargs.pop('retry_policy', + self.retry_policy or {}) + self.send_sent_event = kwargs.pop('send_sent_event', + self.send_sent_event) + exchange = exchange or self.exchange + self.queues = self.app.amqp.queues # shortcut + self.default_queue = self.app.amqp.default_queue + super(TaskProducer, self).__init__(channel, exchange, *args, **kwargs) + + def publish_task(self, task_name, task_args=None, task_kwargs=None, + countdown=None, eta=None, task_id=None, group_id=None, + taskset_id=None, # compat alias to group_id + expires=None, exchange=None, exchange_type=None, + event_dispatcher=None, retry=None, retry_policy=None, + queue=None, now=None, retries=0, chord=None, + callbacks=None, errbacks=None, routing_key=None, + serializer=None, delivery_mode=None, compression=None, + declare=None, **kwargs): + """Send task message.""" + + qname = queue + if queue is None and exchange is None: + queue = self.default_queue + if queue is not None: + if isinstance(queue, basestring): + qname, queue = queue, self.queues[queue] + else: + qname = queue.name + exchange = exchange or queue.exchange.name + routing_key = routing_key or queue.routing_key + declare = declare or ([queue] if queue else []) + + # merge default and custom policy + retry = self.retry if retry is None else retry + _rp = (dict(self.retry_policy, **retry_policy) if retry_policy + else self.retry_policy) + task_id = task_id or uuid() + task_args = task_args or [] + task_kwargs = task_kwargs or {} + if not isinstance(task_args, (list, tuple)): + raise ValueError('task args must be a list or tuple') + if not isinstance(task_kwargs, dict): + raise ValueError('task kwargs must be a dictionary') + if countdown: # Convert countdown to ETA. + now = now or self.app.now() + eta = now + timedelta(seconds=countdown) + if isinstance(expires, (int, float)): + now = now or self.app.now() + expires = now + timedelta(seconds=expires) + eta = eta and eta.isoformat() + expires = expires and expires.isoformat() + + body = { + 'task': task_name, + 'id': task_id, + 'args': task_args, + 'kwargs': task_kwargs, + 'retries': retries or 0, + 'eta': eta, + 'expires': expires, + 'utc': self.utc, + 'callbacks': callbacks, + 'errbacks': errbacks, + 'taskset': group_id or taskset_id, + 'chord': chord, + } + + self.publish( + body, + exchange=exchange, routing_key=routing_key, + serializer=serializer or self.serializer, + compression=compression or self.compression, + retry=retry, retry_policy=_rp, + delivery_mode=delivery_mode, declare=declare, + **kwargs + ) + + signals.task_sent.send(sender=task_name, **body) + if self.send_sent_event: + evd = event_dispatcher or self.event_dispatcher + exname = exchange or self.exchange + if isinstance(exname, Exchange): + exname = exname.name + evd.publish( + 'task-sent', + { + 'uuid': task_id, + 'name': task_name, + 'args': safe_repr(task_args), + 'kwargs': safe_repr(task_kwargs), + 'retries': retries, + 'eta': eta, + 'expires': expires, + 'queue': qname, + 'exchange': exname, + 'routing_key': routing_key, + }, + self, retry=retry, retry_policy=retry_policy, + ) + return task_id + delay_task = publish_task # XXX Compat + + @cached_property + def event_dispatcher(self): + # We call Dispatcher.publish with a custom producer + # so don't need the dispatcher to be "enabled". + return self.app.events.Dispatcher(enabled=False) + + +class TaskPublisher(TaskProducer): + """Deprecated version of :class:`TaskProducer`.""" + + def __init__(self, channel=None, exchange=None, *args, **kwargs): + self.app = app_or_default(kwargs.pop('app', self.app)) + self.retry = kwargs.pop('retry', self.retry) + self.retry_policy = kwargs.pop('retry_policy', + self.retry_policy or {}) + exchange = exchange or self.exchange + if not isinstance(exchange, Exchange): + exchange = Exchange(exchange, + kwargs.pop('exchange_type', 'direct')) + self.queues = self.app.amqp.queues # shortcut + super(TaskPublisher, self).__init__(channel, exchange, *args, **kwargs) + + +class TaskConsumer(Consumer): + app = None + + def __init__(self, channel, queues=None, app=None, accept=None, **kw): + self.app = app or self.app + if accept is None: + accept = self.app.conf.CELERY_ACCEPT_CONTENT + super(TaskConsumer, self).__init__( + channel, + queues or self.app.amqp.queues.consume_from.values(), + accept=accept, + **kw + ) + + +class AMQP(object): + Connection = Connection + Consumer = Consumer + + #: compat alias to Connection + BrokerConnection = Connection + + producer_cls = TaskProducer + consumer_cls = TaskConsumer + + #: Cached and prepared routing table. + _rtable = None + + #: Underlying producer pool instance automatically + #: set by the :attr:`producer_pool`. + _producer_pool = None + + def __init__(self, app): + self.app = app + + def flush_routes(self): + self._rtable = _routes.prepare(self.app.conf.CELERY_ROUTES) + + def Queues(self, queues, create_missing=None, ha_policy=None): + """Create new :class:`Queues` instance, using queue defaults + from the current configuration.""" + conf = self.app.conf + if create_missing is None: + create_missing = conf.CELERY_CREATE_MISSING_QUEUES + if ha_policy is None: + ha_policy = conf.CELERY_QUEUE_HA_POLICY + if not queues and conf.CELERY_DEFAULT_QUEUE: + queues = (Queue(conf.CELERY_DEFAULT_QUEUE, + exchange=self.default_exchange, + routing_key=conf.CELERY_DEFAULT_ROUTING_KEY), ) + return Queues(queues, self.default_exchange, create_missing, ha_policy) + + def Router(self, queues=None, create_missing=None): + """Returns the current task router.""" + return _routes.Router(self.routes, queues or self.queues, + self.app.either('CELERY_CREATE_MISSING_QUEUES', + create_missing), app=self.app) + + @cached_property + def TaskConsumer(self): + """Return consumer configured to consume from the queues + we are configured for (``app.amqp.queues.consume_from``).""" + return self.app.subclass_with_self(self.consumer_cls, + reverse='amqp.TaskConsumer') + get_task_consumer = TaskConsumer # XXX compat + + @cached_property + def TaskProducer(self): + """Returns publisher used to send tasks. + + You should use `app.send_task` instead. + + """ + conf = self.app.conf + return self.app.subclass_with_self( + self.producer_cls, + reverse='amqp.TaskProducer', + exchange=self.default_exchange, + routing_key=conf.CELERY_DEFAULT_ROUTING_KEY, + serializer=conf.CELERY_TASK_SERIALIZER, + compression=conf.CELERY_MESSAGE_COMPRESSION, + retry=conf.CELERY_TASK_PUBLISH_RETRY, + retry_policy=conf.CELERY_TASK_PUBLISH_RETRY_POLICY, + send_sent_event=conf.CELERY_SEND_TASK_SENT_EVENT, + utc=conf.CELERY_ENABLE_UTC, + ) + TaskPublisher = TaskProducer # compat + + @cached_property + def default_queue(self): + return self.queues[self.app.conf.CELERY_DEFAULT_QUEUE] + + @cached_property + def queues(self): + """Queue name⇒ declaration mapping.""" + return self.Queues(self.app.conf.CELERY_QUEUES) + + @queues.setter # noqa + def queues(self, queues): + return self.Queues(queues) + + @property + def routes(self): + if self._rtable is None: + self.flush_routes() + return self._rtable + + @cached_property + def router(self): + return self.Router() + + @property + def producer_pool(self): + if self._producer_pool is None: + self._producer_pool = ProducerPool( + self.app.pool, + limit=self.app.pool.limit, + Producer=self.TaskProducer, + ) + return self._producer_pool + publisher_pool = producer_pool # compat alias + + @cached_property + def default_exchange(self): + return Exchange(self.app.conf.CELERY_DEFAULT_EXCHANGE, + self.app.conf.CELERY_DEFAULT_EXCHANGE_TYPE) diff --git a/awx/lib/site-packages/celery/app/annotations.py b/awx/lib/site-packages/celery/app/annotations.py new file mode 100644 index 0000000000..3e29cb6a12 --- /dev/null +++ b/awx/lib/site-packages/celery/app/annotations.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +""" + celery.app.annotations + ~~~~~~~~~~~~~~~~~~~~~~ + + Annotations is a nice term for moneky patching + task classes in the configuration. + + This prepares and performs the annotations in the + :setting:`CELERY_ANNOTATIONS` setting. + +""" +from __future__ import absolute_import + +from celery.utils.functional import firstmethod, mpromise +from celery.utils.imports import instantiate + +_first_match = firstmethod('annotate') +_first_match_any = firstmethod('annotate_any') + + +def resolve_all(anno, task): + return (r for r in (_first_match(anno, task), _first_match_any(anno)) if r) + + +class MapAnnotation(dict): + + def annotate_any(self): + try: + return dict(self['*']) + except KeyError: + pass + + def annotate(self, task): + try: + return dict(self[task.name]) + except KeyError: + pass + + +def prepare(annotations): + """Expands the :setting:`CELERY_ANNOTATIONS` setting.""" + + def expand_annotation(annotation): + if isinstance(annotation, dict): + return MapAnnotation(annotation) + elif isinstance(annotation, basestring): + return mpromise(instantiate, annotation) + return annotation + + if annotations is None: + return () + elif not isinstance(annotations, (list, tuple)): + annotations = (annotations, ) + return [expand_annotation(anno) for anno in annotations] diff --git a/awx/lib/site-packages/celery/app/base.py b/awx/lib/site-packages/celery/app/base.py new file mode 100644 index 0000000000..c4804aae0c --- /dev/null +++ b/awx/lib/site-packages/celery/app/base.py @@ -0,0 +1,516 @@ +# -*- coding: utf-8 -*- +""" + celery.app.base + ~~~~~~~~~~~~~~~ + + Actual App instance implementation. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import os +import threading +import warnings + +from collections import deque +from contextlib import contextmanager +from copy import deepcopy +from functools import wraps + +from billiard.util import register_after_fork +from kombu.clocks import LamportClock +from kombu.utils import cached_property + +from celery import platforms +from celery.exceptions import AlwaysEagerIgnored, ImproperlyConfigured +from celery.loaders import get_loader_cls +from celery.local import PromiseProxy, maybe_evaluate +from celery._state import _task_stack, _tls, get_current_app, _register_app +from celery.utils.functional import first +from celery.utils.imports import instantiate, symbol_by_name + +from .annotations import prepare as prepare_annotations +from .builtins import shared_task, load_shared_tasks +from .defaults import DEFAULTS, find_deprecated_settings +from .registry import TaskRegistry +from .utils import AppPickler, Settings, bugreport, _unpickle_app + +_EXECV = os.environ.get('FORKED_BY_MULTIPROCESSING') + + +def _unpickle_appattr(reverse_name, args): + """Given an attribute name and a list of args, gets + the attribute from the current app and calls it.""" + return get_current_app()._rgetattr(reverse_name)(*args) + + +class Celery(object): + Pickler = AppPickler + + SYSTEM = platforms.SYSTEM + IS_OSX, IS_WINDOWS = platforms.IS_OSX, platforms.IS_WINDOWS + + amqp_cls = 'celery.app.amqp:AMQP' + backend_cls = None + events_cls = 'celery.events:Events' + loader_cls = 'celery.loaders.app:AppLoader' + log_cls = 'celery.app.log:Logging' + control_cls = 'celery.app.control:Control' + registry_cls = TaskRegistry + _pool = None + + def __init__(self, main=None, loader=None, backend=None, + amqp=None, events=None, log=None, control=None, + set_as_current=True, accept_magic_kwargs=False, + tasks=None, broker=None, include=None, changes=None, + config_source=None, + **kwargs): + self.clock = LamportClock() + self.main = main + self.amqp_cls = amqp or self.amqp_cls + self.backend_cls = backend or self.backend_cls + self.events_cls = events or self.events_cls + self.loader_cls = loader or self.loader_cls + self.log_cls = log or self.log_cls + self.control_cls = control or self.control_cls + self.set_as_current = set_as_current + self.registry_cls = symbol_by_name(self.registry_cls) + self.accept_magic_kwargs = accept_magic_kwargs + self._config_source = config_source + + self.configured = False + self._pending_defaults = deque() + + self.finalized = False + self._finalize_mutex = threading.Lock() + self._pending = deque() + self._tasks = tasks + if not isinstance(self._tasks, TaskRegistry): + self._tasks = TaskRegistry(self._tasks or {}) + + # these options are moved to the config to + # simplify pickling of the app object. + self._preconf = changes or {} + if broker: + self._preconf['BROKER_URL'] = broker + if include: + self._preconf['CELERY_IMPORTS'] = include + + if self.set_as_current: + self.set_current() + + # See Issue #1126 + # this is used when pickling the app object so that configuration + # is reread without having to pickle the contents + # (which is often unpickleable anyway) + if self._config_source: + self.config_from_object(self._config_source) + + self.on_init() + _register_app(self) + + def set_current(self): + _tls.current_app = self + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + + def close(self): + self._maybe_close_pool() + + def on_init(self): + """Optional callback called at init.""" + pass + + def start(self, argv=None): + return instantiate( + 'celery.bin.celery:CeleryCommand', + app=self).execute_from_commandline(argv) + + def worker_main(self, argv=None): + return instantiate( + 'celery.bin.celeryd:WorkerCommand', + app=self).execute_from_commandline(argv) + + def task(self, *args, **opts): + """Creates new task class from any callable.""" + if _EXECV and not opts.get('_force_evaluate'): + # When using execv the task in the original module will point to a + # different app, so doing things like 'add.request' will point to + # a differnt task instance. This makes sure it will always use + # the task instance from the current app. + # Really need a better solution for this :( + from . import shared_task as proxies_to_curapp + opts['_force_evaluate'] = True # XXX Py2.5 + return proxies_to_curapp(*args, **opts) + + def inner_create_task_cls(shared=True, filter=None, **opts): + _filt = filter # stupid 2to3 + + def _create_task_cls(fun): + if shared: + cons = lambda app: app._task_from_fun(fun, **opts) + cons.__name__ = fun.__name__ + shared_task(cons) + if self.accept_magic_kwargs: # compat mode + task = self._task_from_fun(fun, **opts) + if filter: + task = filter(task) + return task + + # return a proxy object that is only evaluated when first used + promise = PromiseProxy(self._task_from_fun, (fun, ), opts) + self._pending.append(promise) + if _filt: + return _filt(promise) + return promise + + return _create_task_cls + + if len(args) == 1 and callable(args[0]): + return inner_create_task_cls(**opts)(*args) + if args: + raise TypeError( + 'task() takes no arguments (%s given)' % (len(args, ))) + return inner_create_task_cls(**opts) + + def _task_from_fun(self, fun, **options): + base = options.pop('base', None) or self.Task + + T = type(fun.__name__, (base, ), dict({ + 'app': self, + 'accept_magic_kwargs': False, + 'run': staticmethod(fun), + '__doc__': fun.__doc__, + '__module__': fun.__module__}, **options))() + task = self._tasks[T.name] # return global instance. + task.bind(self) + return task + + def finalize(self): + with self._finalize_mutex: + if not self.finalized: + self.finalized = True + load_shared_tasks(self) + + pending = self._pending + while pending: + maybe_evaluate(pending.popleft()) + + for task in self._tasks.itervalues(): + task.bind(self) + + def add_defaults(self, fun): + if not callable(fun): + d, fun = fun, lambda: d + if self.configured: + return self.conf.add_defaults(fun()) + self._pending_defaults.append(fun) + + def config_from_object(self, obj, silent=False): + del(self.conf) + self._config_source = obj + return self.loader.config_from_object(obj, silent=silent) + + def config_from_envvar(self, variable_name, silent=False): + module_name = os.environ.get(variable_name) + if not module_name: + if silent: + return False + raise ImproperlyConfigured(self.error_envvar_not_set % module_name) + return self.config_from_object(module_name, silent=silent) + + def config_from_cmdline(self, argv, namespace='celery'): + self.conf.update(self.loader.cmdline_config_parser(argv, namespace)) + + def send_task(self, name, args=None, kwargs=None, countdown=None, + eta=None, task_id=None, producer=None, connection=None, + result_cls=None, expires=None, queues=None, publisher=None, + **options): + producer = producer or publisher # XXX compat + if self.conf.CELERY_ALWAYS_EAGER: # pragma: no cover + warnings.warn(AlwaysEagerIgnored( + 'CELERY_ALWAYS_EAGER has no effect on send_task')) + + result_cls = result_cls or self.AsyncResult + router = self.amqp.Router(queues) + options.setdefault('compression', + self.conf.CELERY_MESSAGE_COMPRESSION) + options = router.route(options, name, args, kwargs) + with self.producer_or_acquire(producer) as producer: + return result_cls(producer.publish_task( + name, args, kwargs, + task_id=task_id, + countdown=countdown, eta=eta, + expires=expires, **options + )) + + def connection(self, hostname=None, userid=None, + password=None, virtual_host=None, port=None, ssl=None, + insist=None, connect_timeout=None, transport=None, + transport_options=None, heartbeat=None, **kwargs): + conf = self.conf + return self.amqp.Connection( + hostname or conf.BROKER_HOST, + userid or conf.BROKER_USER, + password or conf.BROKER_PASSWORD, + virtual_host or conf.BROKER_VHOST, + port or conf.BROKER_PORT, + transport=transport or conf.BROKER_TRANSPORT, + insist=self.either('BROKER_INSIST', insist), + ssl=self.either('BROKER_USE_SSL', ssl), + connect_timeout=self.either( + 'BROKER_CONNECTION_TIMEOUT', connect_timeout), + heartbeat=heartbeat, + transport_options=dict(conf.BROKER_TRANSPORT_OPTIONS, + **transport_options or {})) + broker_connection = connection + + @contextmanager + def connection_or_acquire(self, connection=None, pool=True, + *args, **kwargs): + if connection: + yield connection + else: + if pool: + with self.pool.acquire(block=True) as connection: + yield connection + else: + with self.connection() as connection: + yield connection + default_connection = connection_or_acquire # XXX compat + + @contextmanager + def producer_or_acquire(self, producer=None): + if producer: + yield producer + else: + with self.amqp.producer_pool.acquire(block=True) as producer: + yield producer + default_producer = producer_or_acquire # XXX compat + + def with_default_connection(self, fun): + """With any function accepting a `connection` + keyword argument, establishes a default connection if one is + not already passed to it. + + Any automatically established connection will be closed after + the function returns. + + **Deprecated** + + Use ``with app.connection_or_acquire(connection)`` instead. + + """ + @wraps(fun) + def _inner(*args, **kwargs): + connection = kwargs.pop('connection', None) + with self.connection_or_acquire(connection) as c: + return fun(*args, **dict(kwargs, connection=c)) + return _inner + + def prepare_config(self, c): + """Prepare configuration before it is merged with the defaults.""" + return find_deprecated_settings(c) + + def now(self): + return self.loader.now(utc=self.conf.CELERY_ENABLE_UTC) + + def mail_admins(self, subject, body, fail_silently=False): + if self.conf.ADMINS: + to = [admin_email for _, admin_email in self.conf.ADMINS] + return self.loader.mail_admins( + subject, body, fail_silently, to=to, + sender=self.conf.SERVER_EMAIL, + host=self.conf.EMAIL_HOST, + port=self.conf.EMAIL_PORT, + user=self.conf.EMAIL_HOST_USER, + password=self.conf.EMAIL_HOST_PASSWORD, + timeout=self.conf.EMAIL_TIMEOUT, + use_ssl=self.conf.EMAIL_USE_SSL, + use_tls=self.conf.EMAIL_USE_TLS, + ) + + def select_queues(self, queues=None): + return self.amqp.queues.select_subset(queues) + + def either(self, default_key, *values): + """Fallback to the value of a configuration key if none of the + `*values` are true.""" + return first(None, values) or self.conf.get(default_key) + + def bugreport(self): + return bugreport(self) + + def _get_backend(self): + from celery.backends import get_backend_by_url + backend, url = get_backend_by_url( + self.backend_cls or self.conf.CELERY_RESULT_BACKEND, + self.loader) + return backend(app=self, url=url) + + def _get_config(self): + self.configured = True + s = Settings({}, [self.prepare_config(self.loader.conf), + deepcopy(DEFAULTS)]) + + # load lazy config dict initializers. + pending = self._pending_defaults + while pending: + s.add_defaults(pending.popleft()()) + if self._preconf: + for key, value in self._preconf.iteritems(): + setattr(s, key, value) + return s + + def _after_fork(self, obj_): + self._maybe_close_pool() + + def _maybe_close_pool(self): + if self._pool: + self._pool.force_close_all() + self._pool = None + amqp = self.amqp + if amqp._producer_pool: + amqp._producer_pool.force_close_all() + amqp._producer_pool = None + + def create_task_cls(self): + """Creates a base task class using default configuration + taken from this app.""" + return self.subclass_with_self('celery.app.task:Task', name='Task', + attribute='_app', abstract=True) + + def subclass_with_self(self, Class, name=None, attribute='app', + reverse=None, **kw): + """Subclass an app-compatible class by setting its app attribute + to be this app instance. + + App-compatible means that the class has a class attribute that + provides the default app it should use, e.g. + ``class Foo: app = None``. + + :param Class: The app-compatible class to subclass. + :keyword name: Custom name for the target class. + :keyword attribute: Name of the attribute holding the app, + default is 'app'. + + """ + Class = symbol_by_name(Class) + reverse = reverse if reverse else Class.__name__ + + def __reduce__(self): + return _unpickle_appattr, (reverse, self.__reduce_args__()) + + attrs = dict({attribute: self}, __module__=Class.__module__, + __doc__=Class.__doc__, __reduce__=__reduce__, **kw) + + return type(name or Class.__name__, (Class, ), attrs) + + def _rgetattr(self, path): + return reduce(getattr, [self] + path.split('.')) + + def __repr__(self): + return '<%s %s:0x%x>' % (self.__class__.__name__, + self.main or '__main__', id(self), ) + + def __reduce__(self): + # Reduce only pickles the configuration changes, + # so the default configuration doesn't have to be passed + # between processes. + return ( + _unpickle_app, + (self.__class__, self.Pickler) + self.__reduce_args__(), + ) + + def __reduce_args__(self): + return (self.main, self.conf.changes, self.loader_cls, + self.backend_cls, self.amqp_cls, self.events_cls, + self.log_cls, self.control_cls, self.accept_magic_kwargs, + self._config_source) + + @cached_property + def Worker(self): + return self.subclass_with_self('celery.apps.worker:Worker') + + @cached_property + def WorkController(self, **kwargs): + return self.subclass_with_self('celery.worker:WorkController') + + @cached_property + def Beat(self, **kwargs): + return self.subclass_with_self('celery.apps.beat:Beat') + + @cached_property + def TaskSet(self): + return self.subclass_with_self('celery.task.sets:TaskSet') + + @cached_property + def Task(self): + return self.create_task_cls() + + @cached_property + def annotations(self): + return prepare_annotations(self.conf.CELERY_ANNOTATIONS) + + @cached_property + def AsyncResult(self): + return self.subclass_with_self('celery.result:AsyncResult') + + @cached_property + def GroupResult(self): + return self.subclass_with_self('celery.result:GroupResult') + + @cached_property + def TaskSetResult(self): # XXX compat + return self.subclass_with_self('celery.result:TaskSetResult') + + @property + def pool(self): + if self._pool is None: + register_after_fork(self, self._after_fork) + limit = self.conf.BROKER_POOL_LIMIT + self._pool = self.connection().Pool(limit=limit) + return self._pool + + @property + def current_task(self): + return _task_stack.top + + @cached_property + def amqp(self): + return instantiate(self.amqp_cls, app=self) + + @cached_property + def backend(self): + return self._get_backend() + + @cached_property + def conf(self): + return self._get_config() + + @cached_property + def control(self): + return instantiate(self.control_cls, app=self) + + @cached_property + def events(self): + return instantiate(self.events_cls, app=self) + + @cached_property + def loader(self): + return get_loader_cls(self.loader_cls)(app=self) + + @cached_property + def log(self): + return instantiate(self.log_cls, app=self) + + @cached_property + def tasks(self): + self.finalize() + return self._tasks +App = Celery # compat diff --git a/awx/lib/site-packages/celery/app/builtins.py b/awx/lib/site-packages/celery/app/builtins.py new file mode 100644 index 0000000000..0c3582f5ed --- /dev/null +++ b/awx/lib/site-packages/celery/app/builtins.py @@ -0,0 +1,374 @@ +# -*- coding: utf-8 -*- +""" + celery.app.builtins + ~~~~~~~~~~~~~~~~~~~ + + Built-in tasks that are always available in all + app instances. E.g. chord, group and xmap. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +from collections import deque + +from celery._state import get_current_worker_task +from celery.utils import uuid + +#: global list of functions defining tasks that should be +#: added to all apps. +_shared_tasks = [] + + +def shared_task(constructor): + """Decorator that specifies that the decorated function is a function + that generates a built-in task. + + The function will then be called for every new app instance created + (lazily, so more exactly when the task registry for that app is needed). + """ + _shared_tasks.append(constructor) + return constructor + + +def load_shared_tasks(app): + """Loads the built-in tasks for an app instance.""" + for constructor in _shared_tasks: + constructor(app) + + +@shared_task +def add_backend_cleanup_task(app): + """The backend cleanup task can be used to clean up the default result + backend. + + This task is also added do the periodic task schedule so that it is + run every day at midnight, but :program:`celerybeat` must be running + for this to be effective. + + Note that not all backends do anything for this, what needs to be + done at cleanup is up to each backend, and some backends + may even clean up in realtime so that a periodic cleanup is not necessary. + + """ + + @app.task(name='celery.backend_cleanup', _force_evaluate=True) + def backend_cleanup(): + app.backend.cleanup() + return backend_cleanup + + +@shared_task +def add_unlock_chord_task(app): + """The unlock chord task is used by result backends that doesn't + have native chord support. + + It creates a task chain polling the header for completion. + + """ + from celery.canvas import subtask + from celery.exceptions import ChordError + from celery.result import from_serializable + + default_propagate = app.conf.CELERY_CHORD_PROPAGATES + + @app.task(name='celery.chord_unlock', max_retries=None, + default_retry_delay=1, ignore_result=True, _force_evaluate=True) + def unlock_chord(group_id, callback, interval=None, propagate=None, + max_retries=None, result=None, + Result=app.AsyncResult, GroupResult=app.GroupResult, + from_serializable=from_serializable): + # if propagate is disabled exceptions raised by chord tasks + # will be sent as part of the result list to the chord callback. + # Since 3.1 propagate will be enabled by default, and instead + # the chord callback changes state to FAILURE with the + # exception set to ChordError. + propagate = default_propagate if propagate is None else propagate + + # check if the task group is ready, and if so apply the callback. + deps = GroupResult( + group_id, + [from_serializable(r, app=app) for r in result], + ) + j = deps.join_native if deps.supports_native_join else deps.join + + if deps.ready(): + callback = subtask(callback) + try: + ret = j(propagate=propagate) + except Exception, exc: + try: + culprit = deps._failed_join_report().next() + reason = 'Dependency %s raised %r' % (culprit.id, exc) + except StopIteration: + reason = repr(exc) + app._tasks[callback.task].backend.fail_from_current_stack( + callback.id, exc=ChordError(reason), + ) + else: + try: + callback.delay(ret) + except Exception, exc: + app._tasks[callback.task].backend.fail_from_current_stack( + callback.id, + exc=ChordError('Callback error: %r' % (exc, )), + ) + else: + return unlock_chord.retry(countdown=interval, + max_retries=max_retries) + return unlock_chord + + +@shared_task +def add_map_task(app): + from celery.canvas import subtask + + @app.task(name='celery.map', _force_evaluate=True) + def xmap(task, it): + task = subtask(task).type + return [task(value) for value in it] + return xmap + + +@shared_task +def add_starmap_task(app): + from celery.canvas import subtask + + @app.task(name='celery.starmap', _force_evaluate=True) + def xstarmap(task, it): + task = subtask(task).type + return [task(*args) for args in it] + return xstarmap + + +@shared_task +def add_chunk_task(app): + from celery.canvas import chunks as _chunks + + @app.task(name='celery.chunks', _force_evaluate=True) + def chunks(task, it, n): + return _chunks.apply_chunks(task, it, n) + return chunks + + +@shared_task +def add_group_task(app): + _app = app + from celery.canvas import maybe_subtask, subtask + from celery.result import from_serializable + + class Group(app.Task): + app = _app + name = 'celery.group' + accept_magic_kwargs = False + + def run(self, tasks, result, group_id, partial_args): + app = self.app + result = from_serializable(result, app) + # any partial args are added to all tasks in the group + taskit = (subtask(task).clone(partial_args) + for i, task in enumerate(tasks)) + if self.request.is_eager or app.conf.CELERY_ALWAYS_EAGER: + return app.GroupResult( + result.id, + [stask.apply(group_id=group_id) for stask in taskit], + ) + with app.producer_or_acquire() as pub: + [stask.apply_async(group_id=group_id, publisher=pub, + add_to_parent=False) for stask in taskit] + parent = get_current_worker_task() + if parent: + parent.request.children.append(result) + return result + + def prepare(self, options, tasks, args, **kwargs): + AsyncResult = self.AsyncResult + options['group_id'] = group_id = ( + options.setdefault('task_id', uuid())) + + def prepare_member(task): + task = maybe_subtask(task) + opts = task.options + opts['group_id'] = group_id + try: + tid = opts['task_id'] + except KeyError: + tid = opts['task_id'] = uuid() + return task, AsyncResult(tid) + + try: + tasks, results = zip(*[prepare_member(task) for task in tasks]) + except ValueError: # tasks empty + tasks, results = [], [] + return (tasks, self.app.GroupResult(group_id, results), + group_id, args) + + def apply_async(self, partial_args=(), kwargs={}, **options): + if self.app.conf.CELERY_ALWAYS_EAGER: + return self.apply(partial_args, kwargs, **options) + tasks, result, gid, args = self.prepare( + options, args=partial_args, **kwargs + ) + super(Group, self).apply_async(( + list(tasks), result.serializable(), gid, args), **options + ) + return result + + def apply(self, args=(), kwargs={}, **options): + return super(Group, self).apply( + self.prepare(options, args=args, **kwargs), + **options).get() + return Group + + +@shared_task +def add_chain_task(app): + from celery.canvas import Signature, chord, group, maybe_subtask + _app = app + + class Chain(app.Task): + app = _app + name = 'celery.chain' + accept_magic_kwargs = False + + def prepare_steps(self, args, tasks): + steps = deque(tasks) + next_step = prev_task = prev_res = None + tasks, results = [], [] + i = 0 + while steps: + # First task get partial args from chain. + task = maybe_subtask(steps.popleft()) + task = task.clone() if i else task.clone(args) + res = task._freeze() + i += 1 + + if isinstance(task, group): + # automatically upgrade group(..) | s to chord(group, s) + try: + next_step = steps.popleft() + # for chords we freeze by pretending it's a normal + # task instead of a group. + res = Signature._freeze(task) + task = chord(task, body=next_step, task_id=res.task_id) + except IndexError: + pass + if prev_task: + # link previous task to this task. + prev_task.link(task) + # set the results parent attribute. + res.parent = prev_res + + results.append(res) + tasks.append(task) + prev_task, prev_res = task, res + + return tasks, results + + def apply_async(self, args=(), kwargs={}, group_id=None, chord=None, + task_id=None, **options): + if self.app.conf.CELERY_ALWAYS_EAGER: + return self.apply(args, kwargs, **options) + options.pop('publisher', None) + tasks, results = self.prepare_steps(args, kwargs['tasks']) + result = results[-1] + if group_id: + tasks[-1].set(group_id=group_id) + if chord: + tasks[-1].set(chord=chord) + if task_id: + tasks[-1].set(task_id=task_id) + result = tasks[-1].type.AsyncResult(task_id) + tasks[0].apply_async() + return result + + def apply(self, args=(), kwargs={}, subtask=maybe_subtask, **options): + last, fargs = None, args # fargs passed to first task only + for task in kwargs['tasks']: + res = subtask(task).clone(fargs).apply(last and (last.get(), )) + res.parent, last, fargs = last, res, None + return last + return Chain + + +@shared_task +def add_chord_task(app): + """Every chord is executed in a dedicated task, so that the chord + can be used as a subtask, and this generates the task + responsible for that.""" + from celery import group + from celery.canvas import maybe_subtask + _app = app + default_propagate = app.conf.CELERY_CHORD_PROPAGATES + + class Chord(app.Task): + app = _app + name = 'celery.chord' + accept_magic_kwargs = False + ignore_result = False + + def run(self, header, body, partial_args=(), interval=1, countdown=1, + max_retries=None, propagate=None, eager=False, **kwargs): + propagate = default_propagate if propagate is None else propagate + group_id = uuid() + AsyncResult = self.app.AsyncResult + prepare_member = self._prepare_member + + # - convert back to group if serialized + tasks = header.tasks if isinstance(header, group) else header + header = group([maybe_subtask(s).clone() for s in tasks]) + # - eager applies the group inline + if eager: + return header.apply(args=partial_args, task_id=group_id) + + results = [AsyncResult(prepare_member(task, body, group_id)) + for task in header.tasks] + + # - fallback implementations schedules the chord_unlock task here + app.backend.on_chord_apply(group_id, body, + interval=interval, + countdown=countdown, + max_retries=max_retries, + propagate=propagate, + result=results) + # - call the header group, returning the GroupResult. + # XXX Python 2.5 doesn't allow kwargs after star-args. + return header(*partial_args, **{'task_id': group_id}) + + def _prepare_member(self, task, body, group_id): + opts = task.options + # d.setdefault would work but generating uuid's are expensive + try: + task_id = opts['task_id'] + except KeyError: + task_id = opts['task_id'] = uuid() + opts.update(chord=body, group_id=group_id) + return task_id + + def apply_async(self, args=(), kwargs={}, task_id=None, **options): + if self.app.conf.CELERY_ALWAYS_EAGER: + return self.apply(args, kwargs, **options) + group_id = options.pop('group_id', None) + chord = options.pop('chord', None) + header = kwargs.pop('header') + body = kwargs.pop('body') + header, body = (list(maybe_subtask(header)), + maybe_subtask(body)) + if group_id: + body.set(group_id=group_id) + if chord: + body.set(chord=chord) + callback_id = body.options.setdefault('task_id', task_id or uuid()) + parent = super(Chord, self).apply_async((header, body, args), + kwargs, **options) + body_result = self.AsyncResult(callback_id) + body_result.parent = parent + return body_result + + def apply(self, args=(), kwargs={}, propagate=True, **options): + body = kwargs['body'] + res = super(Chord, self).apply(args, dict(kwargs, eager=True), + **options) + return maybe_subtask(body).apply( + args=(res.get(propagate=propagate).get(), )) + return Chord diff --git a/awx/lib/site-packages/celery/app/control.py b/awx/lib/site-packages/celery/app/control.py new file mode 100644 index 0000000000..a3e8b23efa --- /dev/null +++ b/awx/lib/site-packages/celery/app/control.py @@ -0,0 +1,270 @@ +# -*- coding: utf-8 -*- +""" + celery.app.control + ~~~~~~~~~~~~~~~~~~~ + + Client for worker remote control commands. + Server implementation is in :mod:`celery.worker.control`. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +from kombu.pidbox import Mailbox +from kombu.utils import cached_property + +from . import app_or_default + + +def flatten_reply(reply): + nodes = {} + for item in reply: + nodes.update(item) + return nodes + + +class Inspect(object): + app = None + + def __init__(self, destination=None, timeout=1, callback=None, + connection=None, app=None, limit=None): + self.app = app or self.app + self.destination = destination + self.timeout = timeout + self.callback = callback + self.connection = connection + self.limit = limit + + def _prepare(self, reply): + if not reply: + return + by_node = flatten_reply(reply) + if self.destination and \ + not isinstance(self.destination, (list, tuple)): + return by_node.get(self.destination) + return by_node + + def _request(self, command, **kwargs): + return self._prepare(self.app.control.broadcast( + command, + arguments=kwargs, + destination=self.destination, + callback=self.callback, + connection=self.connection, + limit=self.limit, + timeout=self.timeout, reply=True, + )) + + def report(self): + return self._request('report') + + def active(self, safe=False): + return self._request('dump_active', safe=safe) + + def scheduled(self, safe=False): + return self._request('dump_schedule', safe=safe) + + def reserved(self, safe=False): + return self._request('dump_reserved', safe=safe) + + def stats(self): + return self._request('stats') + + def revoked(self): + return self._request('dump_revoked') + + def registered(self, *taskinfoitems): + return self._request('dump_tasks', taskinfoitems=taskinfoitems) + registered_tasks = registered + + def ping(self): + return self._request('ping') + + def active_queues(self): + return self._request('active_queues') + + def conf(self): + return self._request('dump_conf') + + +class Control(object): + Mailbox = Mailbox + + def __init__(self, app=None): + self.app = app_or_default(app) + self.mailbox = self.Mailbox('celery', type='fanout', + accept=self.app.conf.CELERY_ACCEPT_CONTENT) + + @cached_property + def inspect(self): + return self.app.subclass_with_self(Inspect, reverse='control.inspect') + + def purge(self, connection=None): + """Discard all waiting tasks. + + This will ignore all tasks waiting for execution, and they will + be deleted from the messaging server. + + :returns: the number of tasks discarded. + + """ + with self.app.connection_or_acquire(connection) as conn: + return self.app.amqp.TaskConsumer(conn).purge() + discard_all = purge + + def revoke(self, task_id, destination=None, terminate=False, + signal='SIGTERM', **kwargs): + """Tell all (or specific) workers to revoke a task by id. + + If a task is revoked, the workers will ignore the task and + not execute it after all. + + :param task_id: Id of the task to revoke. + :keyword terminate: Also terminate the process currently working + on the task (if any). + :keyword signal: Name of signal to send to process if terminate. + Default is TERM. + + See :meth:`broadcast` for supported keyword arguments. + + """ + return self.broadcast('revoke', destination=destination, + arguments={'task_id': task_id, + 'terminate': terminate, + 'signal': signal}, **kwargs) + + def ping(self, destination=None, timeout=1, **kwargs): + """Ping all (or specific) workers. + + Returns answer from alive workers. + + See :meth:`broadcast` for supported keyword arguments. + + """ + return self.broadcast('ping', reply=True, destination=destination, + timeout=timeout, **kwargs) + + def rate_limit(self, task_name, rate_limit, destination=None, **kwargs): + """Tell all (or specific) workers to set a new rate limit + for task by type. + + :param task_name: Name of task to change rate limit for. + :param rate_limit: The rate limit as tasks per second, or a rate limit + string (`'100/m'`, etc. + see :attr:`celery.task.base.Task.rate_limit` for + more information). + + See :meth:`broadcast` for supported keyword arguments. + + """ + return self.broadcast('rate_limit', destination=destination, + arguments={'task_name': task_name, + 'rate_limit': rate_limit}, + **kwargs) + + def add_consumer(self, queue, exchange=None, exchange_type='direct', + routing_key=None, options=None, **kwargs): + """Tell all (or specific) workers to start consuming from a new queue. + + Only the queue name is required as if only the queue is specified + then the exchange/routing key will be set to the same name ( + like automatic queues do). + + .. note:: + + This command does not respect the default queue/exchange + options in the configuration. + + :param queue: Name of queue to start consuming from. + :keyword exchange: Optional name of exchange. + :keyword exchange_type: Type of exchange (defaults to 'direct') + command to, when empty broadcast to all workers. + :keyword routing_key: Optional routing key. + :keyword options: Additional options as supported + by :meth:`kombu.entitiy.Queue.from_dict`. + + See :meth:`broadcast` for supported keyword arguments. + + """ + return self.broadcast( + 'add_consumer', + arguments=dict({'queue': queue, 'exchange': exchange, + 'exchange_type': exchange_type, + 'routing_key': routing_key}, **options or {}), + **kwargs + ) + + def cancel_consumer(self, queue, **kwargs): + """Tell all (or specific) workers to stop consuming from ``queue``. + + Supports the same keyword arguments as :meth:`broadcast`. + + """ + return self.broadcast( + 'cancel_consumer', arguments={'queue': queue}, **kwargs + ) + + def time_limit(self, task_name, soft=None, hard=None, **kwargs): + """Tell all (or specific) workers to set time limits for + a task by type. + + :param task_name: Name of task to change time limits for. + :keyword soft: New soft time limit (in seconds). + :keyword hard: New hard time limit (in seconds). + + Any additional keyword arguments are passed on to :meth:`broadcast`. + + """ + return self.broadcast( + 'time_limit', + arguments={'task_name': task_name, + 'hard': hard, 'soft': soft}, **kwargs) + + def enable_events(self, destination=None, **kwargs): + """Tell all (or specific) workers to enable events.""" + return self.broadcast('enable_events', {}, destination, **kwargs) + + def disable_events(self, destination=None, **kwargs): + """Tell all (or specific) workers to enable events.""" + return self.broadcast('disable_events', {}, destination, **kwargs) + + def pool_grow(self, n=1, destination=None, **kwargs): + """Tell all (or specific) workers to grow the pool by ``n``. + + Supports the same arguments as :meth:`broadcast`. + + """ + return self.broadcast('pool_grow', {}, destination, **kwargs) + + def pool_shrink(self, n=1, destination=None, **kwargs): + """Tell all (or specific) workers to shrink the pool by ``n``. + + Supports the same arguments as :meth:`broadcast`. + + """ + return self.broadcast('pool_shrink', {}, destination, **kwargs) + + def broadcast(self, command, arguments=None, destination=None, + connection=None, reply=False, timeout=1, limit=None, + callback=None, channel=None, **extra_kwargs): + """Broadcast a control command to the celery workers. + + :param command: Name of command to send. + :param arguments: Keyword arguments for the command. + :keyword destination: If set, a list of the hosts to send the + command to, when empty broadcast to all workers. + :keyword connection: Custom broker connection to use, if not set, + a connection will be established automatically. + :keyword reply: Wait for and return the reply. + :keyword timeout: Timeout in seconds to wait for the reply. + :keyword limit: Limit number of replies. + :keyword callback: Callback called immediately for each reply + received. + + """ + with self.app.connection_or_acquire(connection) as conn: + arguments = dict(arguments or {}, **extra_kwargs) + return self.mailbox(conn)._broadcast( + command, arguments, destination, reply, timeout, + limit, callback, channel=channel, + ) diff --git a/awx/lib/site-packages/celery/app/defaults.py b/awx/lib/site-packages/celery/app/defaults.py new file mode 100644 index 0000000000..0cb1037256 --- /dev/null +++ b/awx/lib/site-packages/celery/app/defaults.py @@ -0,0 +1,267 @@ +# -*- coding: utf-8 -*- +""" + celery.app.defaults + ~~~~~~~~~~~~~~~~~~~ + + Configuration introspection and defaults. + +""" +from __future__ import absolute_import + +import sys + +from collections import deque +from datetime import timedelta + +from celery.utils import strtobool +from celery.utils.functional import memoize + +is_jython = sys.platform.startswith('java') +is_pypy = hasattr(sys, 'pypy_version_info') + +DEFAULT_POOL = 'processes' +if is_jython: + DEFAULT_POOL = 'threads' +elif is_pypy: + if sys.pypy_version_info[0:3] < (1, 5, 0): + DEFAULT_POOL = 'solo' + else: + DEFAULT_POOL = 'processes' + + +DEFAULT_PROCESS_LOG_FMT = """ + [%(asctime)s: %(levelname)s/%(processName)s] %(message)s +""".strip() +DEFAULT_LOG_FMT = '[%(asctime)s: %(levelname)s] %(message)s' +DEFAULT_TASK_LOG_FMT = """[%(asctime)s: %(levelname)s/%(processName)s] \ +%(task_name)s[%(task_id)s]: %(message)s""" + +_BROKER_OLD = {'deprecate_by': '2.5', 'remove_by': '4.0', 'alt': 'BROKER_URL'} +_REDIS_OLD = {'deprecate_by': '2.5', 'remove_by': '4.0', + 'alt': 'URL form of CELERY_RESULT_BACKEND'} + + +class Option(object): + alt = None + deprecate_by = None + remove_by = None + typemap = dict(string=str, int=int, float=float, any=lambda v: v, + bool=strtobool, dict=dict, tuple=tuple) + + def __init__(self, default=None, *args, **kwargs): + self.default = default + self.type = kwargs.get('type') or 'string' + for attr, value in kwargs.iteritems(): + setattr(self, attr, value) + + def to_python(self, value): + return self.typemap[self.type](value) + + def __repr__(self): + return '%s default->%r>' % (self.type, self.default) + + +NAMESPACES = { + 'BROKER': { + 'URL': Option(None, type='string'), + 'CONNECTION_TIMEOUT': Option(4, type='float'), + 'CONNECTION_RETRY': Option(True, type='bool'), + 'CONNECTION_MAX_RETRIES': Option(100, type='int'), + 'HEARTBEAT': Option(None, type='int'), + 'HEARTBEAT_CHECKRATE': Option(3.0, type='int'), + 'POOL_LIMIT': Option(10, type='int'), + 'INSIST': Option(False, type='bool', + deprecate_by='2.4', remove_by='4.0'), + 'USE_SSL': Option(False, type='bool'), + 'TRANSPORT': Option(type='string'), + 'TRANSPORT_OPTIONS': Option({}, type='dict'), + 'HOST': Option(type='string', **_BROKER_OLD), + 'PORT': Option(type='int', **_BROKER_OLD), + 'USER': Option(type='string', **_BROKER_OLD), + 'PASSWORD': Option(type='string', **_BROKER_OLD), + 'VHOST': Option(type='string', **_BROKER_OLD), + }, + 'CASSANDRA': { + 'COLUMN_FAMILY': Option(type='string'), + 'DETAILED_MODE': Option(False, type='bool'), + 'KEYSPACE': Option(type='string'), + 'READ_CONSISTENCY': Option(type='string'), + 'SERVERS': Option(type='list'), + 'WRITE_CONSISTENCY': Option(type='string'), + }, + 'CELERY': { + 'ACCEPT_CONTENT': Option(None, type='any'), + 'ACKS_LATE': Option(False, type='bool'), + 'ALWAYS_EAGER': Option(False, type='bool'), + 'AMQP_TASK_RESULT_EXPIRES': Option( + type='float', deprecate_by='2.5', remove_by='4.0', + alt='CELERY_TASK_RESULT_EXPIRES' + ), + 'AMQP_TASK_RESULT_CONNECTION_MAX': Option( + 1, type='int', remove_by='2.5', alt='BROKER_POOL_LIMIT', + ), + 'ANNOTATIONS': Option(type='any'), + 'BROADCAST_QUEUE': Option('celeryctl'), + 'BROADCAST_EXCHANGE': Option('celeryctl'), + 'BROADCAST_EXCHANGE_TYPE': Option('fanout'), + 'CACHE_BACKEND': Option(), + 'CACHE_BACKEND_OPTIONS': Option({}, type='dict'), + # chord propagate will be True from v3.1 + 'CHORD_PROPAGATES': Option(False, type='bool'), + 'CREATE_MISSING_QUEUES': Option(True, type='bool'), + 'DEFAULT_RATE_LIMIT': Option(type='string'), + 'DISABLE_RATE_LIMITS': Option(False, type='bool'), + 'DEFAULT_ROUTING_KEY': Option('celery'), + 'DEFAULT_QUEUE': Option('celery'), + 'DEFAULT_EXCHANGE': Option('celery'), + 'DEFAULT_EXCHANGE_TYPE': Option('direct'), + 'DEFAULT_DELIVERY_MODE': Option(2, type='string'), + 'EAGER_PROPAGATES_EXCEPTIONS': Option(False, type='bool'), + 'ENABLE_UTC': Option(True, type='bool'), + 'EVENT_SERIALIZER': Option('json'), + 'IMPORTS': Option((), type='tuple'), + 'INCLUDE': Option((), type='tuple'), + 'IGNORE_RESULT': Option(False, type='bool'), + 'MAX_CACHED_RESULTS': Option(5000, type='int'), + 'MESSAGE_COMPRESSION': Option(type='string'), + 'MONGODB_BACKEND_SETTINGS': Option(type='dict'), + 'REDIS_HOST': Option(type='string', **_REDIS_OLD), + 'REDIS_PORT': Option(type='int', **_REDIS_OLD), + 'REDIS_DB': Option(type='int', **_REDIS_OLD), + 'REDIS_PASSWORD': Option(type='string', **_REDIS_OLD), + 'REDIS_MAX_CONNECTIONS': Option(type='int'), + 'RESULT_BACKEND': Option(type='string'), + 'RESULT_DB_SHORT_LIVED_SESSIONS': Option(False, type='bool'), + 'RESULT_DBURI': Option(), + 'RESULT_ENGINE_OPTIONS': Option(type='dict'), + 'RESULT_EXCHANGE': Option('celeryresults'), + 'RESULT_EXCHANGE_TYPE': Option('direct'), + 'RESULT_SERIALIZER': Option('pickle'), + 'RESULT_PERSISTENT': Option(False, type='bool'), + 'ROUTES': Option(type='any'), + 'SEND_EVENTS': Option(False, type='bool'), + 'SEND_TASK_ERROR_EMAILS': Option(False, type='bool'), + 'SEND_TASK_SENT_EVENT': Option(False, type='bool'), + 'STORE_ERRORS_EVEN_IF_IGNORED': Option(False, type='bool'), + 'TASK_ERROR_WHITELIST': Option( + (), type='tuple', deprecate_by='2.5', remove_by='4.0', + ), + 'TASK_PUBLISH_RETRY': Option(True, type='bool'), + 'TASK_PUBLISH_RETRY_POLICY': Option({ + 'max_retries': 3, + 'interval_start': 0, + 'interval_max': 1, + 'interval_step': 0.2}, type='dict'), + 'TASK_RESULT_EXPIRES': Option(timedelta(days=1), type='float'), + 'TASK_SERIALIZER': Option('pickle'), + 'TIMEZONE': Option(type='string'), + 'TRACK_STARTED': Option(False, type='bool'), + 'REDIRECT_STDOUTS': Option(True, type='bool'), + 'REDIRECT_STDOUTS_LEVEL': Option('WARNING'), + 'QUEUES': Option(type='dict'), + 'QUEUE_HA_POLICY': Option(None, type='string'), + 'SECURITY_KEY': Option(type='string'), + 'SECURITY_CERTIFICATE': Option(type='string'), + 'SECURITY_CERT_STORE': Option(type='string'), + 'WORKER_DIRECT': Option(False, type='bool'), + }, + 'CELERYD': { + 'AUTOSCALER': Option('celery.worker.autoscale.Autoscaler'), + 'AUTORELOADER': Option('celery.worker.autoreload.Autoreloader'), + 'BOOT_STEPS': Option((), type='tuple'), + 'CONCURRENCY': Option(0, type='int'), + 'TIMER': Option(type='string'), + 'TIMER_PRECISION': Option(1.0, type='float'), + 'FORCE_EXECV': Option(False, type='bool'), + 'HIJACK_ROOT_LOGGER': Option(True, type='bool'), + 'CONSUMER': Option(type='string'), + 'LOG_FORMAT': Option(DEFAULT_PROCESS_LOG_FMT), + 'LOG_COLOR': Option(type='bool'), + 'LOG_LEVEL': Option('WARN', deprecate_by='2.4', remove_by='4.0', + alt='--loglevel argument'), + 'LOG_FILE': Option(deprecate_by='2.4', remove_by='4.0', + alt='--logfile argument'), + 'MEDIATOR': Option('celery.worker.mediator.Mediator'), + 'MAX_TASKS_PER_CHILD': Option(type='int'), + 'POOL': Option(DEFAULT_POOL), + 'POOL_PUTLOCKS': Option(True, type='bool'), + 'POOL_RESTARTS': Option(False, type='bool'), + 'PREFETCH_MULTIPLIER': Option(4, type='int'), + 'STATE_DB': Option(), + 'TASK_LOG_FORMAT': Option(DEFAULT_TASK_LOG_FMT), + 'TASK_SOFT_TIME_LIMIT': Option(type='float'), + 'TASK_TIME_LIMIT': Option(type='float'), + 'WORKER_LOST_WAIT': Option(10.0, type='float') + }, + 'CELERYBEAT': { + 'SCHEDULE': Option({}, type='dict'), + 'SCHEDULER': Option('celery.beat.PersistentScheduler'), + 'SCHEDULE_FILENAME': Option('celerybeat-schedule'), + 'MAX_LOOP_INTERVAL': Option(0, type='float'), + 'LOG_LEVEL': Option('INFO', deprecate_by='2.4', remove_by='4.0', + alt='--loglevel argument'), + 'LOG_FILE': Option(deprecate_by='2.4', remove_by='4.0', + alt='--logfile argument'), + }, + 'CELERYMON': { + 'LOG_LEVEL': Option('INFO', deprecate_by='2.4', remove_by='4.0', + alt='--loglevel argument'), + 'LOG_FILE': Option(deprecate_by='2.4', remove_by='4.0', + alt='--logfile argument'), + 'LOG_FORMAT': Option(DEFAULT_LOG_FMT), + }, + 'EMAIL': { + 'HOST': Option('localhost'), + 'PORT': Option(25, type='int'), + 'HOST_USER': Option(), + 'HOST_PASSWORD': Option(), + 'TIMEOUT': Option(2, type='float'), + 'USE_SSL': Option(False, type='bool'), + 'USE_TLS': Option(False, type='bool'), + }, + 'SERVER_EMAIL': Option('celery@localhost'), + 'ADMINS': Option((), type='tuple'), +} + + +def flatten(d, ns=''): + stack = deque([(ns, d)]) + while stack: + name, space = stack.popleft() + for key, value in space.iteritems(): + if isinstance(value, dict): + stack.append((name + key + '_', value)) + else: + yield name + key, value +DEFAULTS = dict((key, value.default) for key, value in flatten(NAMESPACES)) + + +def find_deprecated_settings(source): + from celery.utils import warn_deprecated + for name, opt in flatten(NAMESPACES): + if (opt.deprecate_by or opt.remove_by) and getattr(source, name, None): + warn_deprecated(description='The %r setting' % (name, ), + deprecation=opt.deprecate_by, + removal=opt.remove_by, + alternative='Use %s instead' % (opt.alt, )) + return source + + +@memoize(maxsize=None) +def find(name, namespace='celery'): + # - Try specified namespace first. + namespace = namespace.upper() + try: + return namespace, name.upper(), NAMESPACES[namespace][name.upper()] + except KeyError: + # - Try all the other namespaces. + for ns, keys in NAMESPACES.iteritems(): + if ns.upper() == name.upper(): + return None, ns, keys + elif isinstance(keys, dict): + try: + return ns, name.upper(), keys[name.upper()] + except KeyError: + pass + # - See if name is a qualname last. + return None, name.upper(), DEFAULTS[name.upper()] diff --git a/awx/lib/site-packages/celery/app/log.py b/awx/lib/site-packages/celery/app/log.py new file mode 100644 index 0000000000..a776548836 --- /dev/null +++ b/awx/lib/site-packages/celery/app/log.py @@ -0,0 +1,231 @@ +# -*- coding: utf-8 -*- +""" + celery.app.log + ~~~~~~~~~~~~~~ + + The Celery instances logging section: ``Celery.log``. + + Sets up logging for the worker and other programs, + redirects stdouts, colors log output, patches logging + related compatibility fixes, and so on. + +""" +from __future__ import absolute_import + +import logging +import os +import sys + +from kombu.log import NullHandler + +from celery import signals +from celery._state import get_current_task +from celery.utils import isatty +from celery.utils.compat import WatchedFileHandler +from celery.utils.log import ( + get_logger, mlevel, + ColorFormatter, ensure_process_aware_logger, + LoggingProxy, get_multiprocessing_logger, + reset_multiprocessing_logger, +) +from celery.utils.term import colored + +is_py3k = sys.version_info[0] == 3 + +MP_LOG = os.environ.get('MP_LOG', False) + + +class TaskFormatter(ColorFormatter): + + def format(self, record): + task = get_current_task() + if task and task.request: + record.__dict__.update(task_id=task.request.id, + task_name=task.name) + else: + record.__dict__.setdefault('task_name', '???') + record.__dict__.setdefault('task_id', '???') + return ColorFormatter.format(self, record) + + +class Logging(object): + #: The logging subsystem is only configured once per process. + #: setup_logging_subsystem sets this flag, and subsequent calls + #: will do nothing. + _setup = False + + def __init__(self, app): + self.app = app + self.loglevel = mlevel(self.app.conf.CELERYD_LOG_LEVEL) + self.format = self.app.conf.CELERYD_LOG_FORMAT + self.task_format = self.app.conf.CELERYD_TASK_LOG_FORMAT + self.colorize = self.app.conf.CELERYD_LOG_COLOR + + def setup(self, loglevel=None, logfile=None, redirect_stdouts=False, + redirect_level='WARNING', colorize=None): + handled = self.setup_logging_subsystem( + loglevel, logfile, colorize=colorize, + ) + if not handled: + logger = get_logger('celery.redirected') + if redirect_stdouts: + self.redirect_stdouts_to_logger(logger, + loglevel=redirect_level) + os.environ.update( + CELERY_LOG_LEVEL=str(loglevel) if loglevel else '', + CELERY_LOG_FILE=str(logfile) if logfile else '', + CELERY_LOG_REDIRECT='1' if redirect_stdouts else '', + CELERY_LOG_REDIRECT_LEVEL=str(redirect_level), + ) + + def setup_logging_subsystem(self, loglevel=None, logfile=None, + format=None, colorize=None, **kwargs): + if Logging._setup: + return + Logging._setup = True + loglevel = mlevel(loglevel or self.loglevel) + format = format or self.format + colorize = self.supports_color(colorize, logfile) + reset_multiprocessing_logger() + if not is_py3k: + ensure_process_aware_logger() + receivers = signals.setup_logging.send( + sender=None, loglevel=loglevel, logfile=logfile, + format=format, colorize=colorize, + ) + + if not receivers: + root = logging.getLogger() + + if self.app.conf.CELERYD_HIJACK_ROOT_LOGGER: + root.handlers = [] + + # Configure root logger + self._configure_logger( + root, logfile, loglevel, format, colorize, **kwargs + ) + + # Configure the multiprocessing logger + self._configure_logger( + get_multiprocessing_logger(), + logfile, loglevel if MP_LOG else logging.ERROR, + format, colorize, **kwargs + ) + + signals.after_setup_logger.send( + sender=None, logger=root, + loglevel=loglevel, logfile=logfile, + format=format, colorize=colorize, + ) + + # then setup the root task logger. + self.setup_task_loggers(loglevel, logfile, colorize=colorize) + + # This is a hack for multiprocessing's fork+exec, so that + # logging before Process.run works. + logfile_name = logfile if isinstance(logfile, basestring) else '' + os.environ.update( + _MP_FORK_LOGLEVEL_=str(loglevel), + _MP_FORK_LOGFILE_=logfile_name, + _MP_FORK_LOGFORMAT_=format, + ) + return receivers + + def _configure_logger(self, logger, logfile, loglevel, + format, colorize, **kwargs): + if logger is not None: + self.setup_handlers(logger, logfile, format, + colorize, **kwargs) + if loglevel: + logger.setLevel(loglevel) + + def setup_task_loggers(self, loglevel=None, logfile=None, format=None, + colorize=None, propagate=False, **kwargs): + """Setup the task logger. + + If `logfile` is not specified, then `sys.stderr` is used. + + Returns logger object. + + """ + loglevel = mlevel(loglevel or self.loglevel) + format = format or self.task_format + colorize = self.supports_color(colorize, logfile) + + logger = self.setup_handlers( + get_logger('celery.task'), + logfile, format, colorize, + formatter=TaskFormatter, **kwargs + ) + logger.setLevel(loglevel) + logger.propagate = int(propagate) # this is an int for some reason. + # better to not question why. + signals.after_setup_task_logger.send( + sender=None, logger=logger, + loglevel=loglevel, logfile=logfile, + format=format, colorize=colorize, + ) + return logger + + def redirect_stdouts_to_logger(self, logger, loglevel=None, + stdout=True, stderr=True): + """Redirect :class:`sys.stdout` and :class:`sys.stderr` to a + logging instance. + + :param logger: The :class:`logging.Logger` instance to redirect to. + :param loglevel: The loglevel redirected messages will be logged as. + + """ + proxy = LoggingProxy(logger, loglevel) + if stdout: + sys.stdout = proxy + if stderr: + sys.stderr = proxy + return proxy + + def supports_color(self, colorize=None, logfile=None): + colorize = self.colorize if colorize is None else colorize + if self.app.IS_WINDOWS: + # Windows does not support ANSI color codes. + return False + if colorize or colorize is None: + # Only use color if there is no active log file + # and stderr is an actual terminal. + return logfile is None and isatty(sys.stderr) + return colorize + + def colored(self, logfile=None, enabled=None): + return colored(enabled=self.supports_color(enabled, logfile)) + + def setup_handlers(self, logger, logfile, format, colorize, + formatter=ColorFormatter, **kwargs): + if self._is_configured(logger): + return logger + handler = self._detect_handler(logfile) + handler.setFormatter(formatter(format, use_color=colorize)) + logger.addHandler(handler) + return logger + + def _detect_handler(self, logfile=None): + """Create log handler with either a filename, an open stream + or :const:`None` (stderr).""" + logfile = sys.__stderr__ if logfile is None else logfile + if hasattr(logfile, 'write'): + return logging.StreamHandler(logfile) + return WatchedFileHandler(logfile) + + def _has_handler(self, logger): + return (logger.handlers and + not isinstance(logger.handlers[0], NullHandler)) + + def _is_configured(self, logger): + return self._has_handler(logger) and not getattr( + logger, '_rudimentary_setup', False) + + def setup_logger(self, name='celery', *args, **kwargs): + """Deprecated: No longer used.""" + self.setup_logging_subsystem(*args, **kwargs) + return logging.root + + def get_default_logger(self, name='celery', **kwargs): + return get_logger(name) diff --git a/awx/lib/site-packages/celery/app/registry.py b/awx/lib/site-packages/celery/app/registry.py new file mode 100644 index 0000000000..2acbe4f2c3 --- /dev/null +++ b/awx/lib/site-packages/celery/app/registry.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +""" + celery.app.registry + ~~~~~~~~~~~~~~~~~~~ + + Registry of available tasks. + +""" +from __future__ import absolute_import + +import inspect + +from celery.exceptions import NotRegistered + + +class TaskRegistry(dict): + NotRegistered = NotRegistered + + def __missing__(self, key): + raise self.NotRegistered(key) + + def register(self, task): + """Register a task in the task registry. + + The task will be automatically instantiated if not already an + instance. + + """ + self[task.name] = inspect.isclass(task) and task() or task + + def unregister(self, name): + """Unregister task by name. + + :param name: name of the task to unregister, or a + :class:`celery.task.base.Task` with a valid `name` attribute. + + :raises celery.exceptions.NotRegistered: if the task has not + been registered. + + """ + try: + self.pop(getattr(name, 'name', name)) + except KeyError: + raise self.NotRegistered(name) + + # -- these methods are irrelevant now and will be removed in 4.0 + def regular(self): + return self.filter_types('regular') + + def periodic(self): + return self.filter_types('periodic') + + def filter_types(self, type): + return dict((name, task) for name, task in self.iteritems() + if getattr(task, 'type', 'regular') == type) + + +def _unpickle_task(name): + from celery import current_app + return current_app.tasks[name] diff --git a/awx/lib/site-packages/celery/app/routes.py b/awx/lib/site-packages/celery/app/routes.py new file mode 100644 index 0000000000..015b148792 --- /dev/null +++ b/awx/lib/site-packages/celery/app/routes.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +""" + celery.routes + ~~~~~~~~~~~~~ + + Contains utilities for working with task routers, + (:setting:`CELERY_ROUTES`). + +""" +from __future__ import absolute_import + +from celery.exceptions import QueueNotFound +from celery.utils import lpmerge +from celery.utils.functional import firstmethod, mpromise +from celery.utils.imports import instantiate + +_first_route = firstmethod('route_for_task') + + +class MapRoute(object): + """Creates a router out of a :class:`dict`.""" + + def __init__(self, map): + self.map = map + + def route_for_task(self, task, *args, **kwargs): + route = self.map.get(task) + if route: + return dict(route) + + +class Router(object): + + def __init__(self, routes=None, queues=None, + create_missing=False, app=None): + self.app = app + self.queues = {} if queues is None else queues + self.routes = [] if routes is None else routes + self.create_missing = create_missing + + def route(self, options, task, args=(), kwargs={}): + options = self.expand_destination(options) # expands 'queue' + if self.routes: + route = self.lookup_route(task, args, kwargs) + if route: # expands 'queue' in route. + return lpmerge(self.expand_destination(route), options) + if 'queue' not in options: + options = lpmerge(self.expand_destination( + self.app.conf.CELERY_DEFAULT_QUEUE), options) + return options + + def expand_destination(self, route): + # Route can be a queue name: convenient for direct exchanges. + if isinstance(route, basestring): + queue, route = route, {} + else: + # can use defaults from configured queue, but override specific + # things (like the routing_key): great for topic exchanges. + queue = route.pop('queue', None) + + if queue: + try: + Q = self.queues[queue] # noqa + except KeyError: + if not self.create_missing: + raise QueueNotFound( + 'Queue %r is not defined in CELERY_QUEUES' % queue) + for key in 'exchange', 'routing_key': + if route.get(key) is None: + route[key] = queue + Q = self.app.amqp.queues.add(queue, **route) + # needs to be declared by publisher + route['queue'] = Q + return route + + def lookup_route(self, task, args=None, kwargs=None): + return _first_route(self.routes, task, args, kwargs) + + +def prepare(routes): + """Expands the :setting:`CELERY_ROUTES` setting.""" + + def expand_route(route): + if isinstance(route, dict): + return MapRoute(route) + if isinstance(route, basestring): + return mpromise(instantiate, route) + return route + + if routes is None: + return () + if not isinstance(routes, (list, tuple)): + routes = (routes, ) + return [expand_route(route) for route in routes] diff --git a/awx/lib/site-packages/celery/app/task.py b/awx/lib/site-packages/celery/app/task.py new file mode 100644 index 0000000000..d3c0fe3522 --- /dev/null +++ b/awx/lib/site-packages/celery/app/task.py @@ -0,0 +1,795 @@ +# -*- coding: utf-8 -*- +""" + celery.app.task + ~~~~~~~~~~~~~~~ + + Task Implementation: Task request context, and the base task class. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +from celery import current_app +from celery import states +from celery.__compat__ import class_property +from celery._state import get_current_worker_task, _task_stack +from celery.canvas import subtask +from celery.datastructures import ExceptionInfo +from celery.exceptions import MaxRetriesExceededError, RetryTaskError +from celery.result import EagerResult +from celery.utils import gen_task_name, fun_takes_kwargs, uuid, maybe_reraise +from celery.utils.functional import mattrgetter, maybe_list +from celery.utils.imports import instantiate +from celery.utils.mail import ErrorMail + +from .annotations import resolve_all as resolve_all_annotations +from .registry import _unpickle_task + +#: extracts attributes related to publishing a message from an object. +extract_exec_options = mattrgetter( + 'queue', 'routing_key', 'exchange', + 'immediate', 'mandatory', 'priority', 'expires', + 'serializer', 'delivery_mode', 'compression', +) + + +class Context(object): + # Default context + logfile = None + loglevel = None + hostname = None + id = None + args = None + kwargs = None + retries = 0 + eta = None + expires = None + is_eager = False + delivery_info = None + taskset = None # compat alias to group + group = None + chord = None + utc = None + called_directly = True + callbacks = None + errbacks = None + timeouts = None + _children = None # see property + _protected = 0 + + def __init__(self, *args, **kwargs): + self.update(*args, **kwargs) + + def update(self, *args, **kwargs): + self.__dict__.update(*args, **kwargs) + + def clear(self): + self.__dict__.clear() + + def get(self, key, default=None): + try: + return getattr(self, key) + except AttributeError: + return default + + def __repr__(self): + return '' % (vars(self, )) + + @property + def children(self): + # children must be an empy list for every thread + if self._children is None: + self._children = [] + return self._children + + +class TaskType(type): + """Meta class for tasks. + + Automatically registers the task in the task registry, except + if the `abstract` attribute is set. + + If no `name` attribute is provided, then no name is automatically + set to the name of the module it was defined in, and the class name. + + """ + + def __new__(cls, name, bases, attrs): + new = super(TaskType, cls).__new__ + task_module = attrs.get('__module__') or '__main__' + + # - Abstract class: abstract attribute should not be inherited. + if attrs.pop('abstract', None) or not attrs.get('autoregister', True): + return new(cls, name, bases, attrs) + + # The 'app' attribute is now a property, with the real app located + # in the '_app' attribute. Previously this was a regular attribute, + # so we should support classes defining it. + _app1, _app2 = attrs.pop('_app', None), attrs.pop('app', None) + app = attrs['_app'] = _app1 or _app2 or current_app + + # - Automatically generate missing/empty name. + task_name = attrs.get('name') + if not task_name: + attrs['name'] = task_name = gen_task_name(app, name, task_module) + + # - Create and register class. + # Because of the way import happens (recursively) + # we may or may not be the first time the task tries to register + # with the framework. There should only be one class for each task + # name, so we always return the registered version. + tasks = app._tasks + if task_name not in tasks: + tasks.register(new(cls, name, bases, attrs)) + instance = tasks[task_name] + instance.bind(app) + return instance.__class__ + + def __repr__(cls): + if cls._app: + return '' % (cls.__name__, cls._app, ) + if cls.__v2_compat__: + return '' % (cls.__name__, ) + return '' % (cls.__name__, ) + + +class Task(object): + """Task base class. + + When called tasks apply the :meth:`run` method. This method must + be defined by all tasks (that is unless the :meth:`__call__` method + is overridden). + + """ + __metaclass__ = TaskType + __trace__ = None + __v2_compat__ = False # set by old base in celery.task.base + + ErrorMail = ErrorMail + MaxRetriesExceededError = MaxRetriesExceededError + + #: Execution strategy used, or the qualified name of one. + Strategy = 'celery.worker.strategy:default' + + #: This is the instance bound to if the task is a method of a class. + __self__ = None + + #: The application instance associated with this task class. + _app = None + + #: Name of the task. + name = None + + #: If :const:`True` the task is an abstract base class. + abstract = True + + #: If disabled the worker will not forward magic keyword arguments. + #: Deprecated and scheduled for removal in v4.0. + accept_magic_kwargs = False + + #: Maximum number of retries before giving up. If set to :const:`None`, + #: it will **never** stop retrying. + max_retries = 3 + + #: Default time in seconds before a retry of the task should be + #: executed. 3 minutes by default. + default_retry_delay = 3 * 60 + + #: Rate limit for this task type. Examples: :const:`None` (no rate + #: limit), `'100/s'` (hundred tasks a second), `'100/m'` (hundred tasks + #: a minute),`'100/h'` (hundred tasks an hour) + rate_limit = None + + #: If enabled the worker will not store task state and return values + #: for this task. Defaults to the :setting:`CELERY_IGNORE_RESULT` + #: setting. + ignore_result = None + + #: When enabled errors will be stored even if the task is otherwise + #: configured to ignore results. + store_errors_even_if_ignored = None + + #: If enabled an email will be sent to :setting:`ADMINS` whenever a task + #: of this type fails. + send_error_emails = None + + #: The name of a serializer that are registered with + #: :mod:`kombu.serialization.registry`. Default is `'pickle'`. + serializer = None + + #: Hard time limit. + #: Defaults to the :setting:`CELERY_TASK_TIME_LIMIT` setting. + time_limit = None + + #: Soft time limit. + #: Defaults to the :setting:`CELERY_TASK_SOFT_TIME_LIMIT` setting. + soft_time_limit = None + + #: The result store backend used for this task. + backend = None + + #: If disabled this task won't be registered automatically. + autoregister = True + + #: If enabled the task will report its status as 'started' when the task + #: is executed by a worker. Disabled by default as the normal behaviour + #: is to not report that level of granularity. Tasks are either pending, + #: finished, or waiting to be retried. + #: + #: Having a 'started' status can be useful for when there are long + #: running tasks and there is a need to report which task is currently + #: running. + #: + #: The application default can be overridden using the + #: :setting:`CELERY_TRACK_STARTED` setting. + track_started = None + + #: When enabled messages for this task will be acknowledged **after** + #: the task has been executed, and not *just before* which is the + #: default behavior. + #: + #: Please note that this means the task may be executed twice if the + #: worker crashes mid execution (which may be acceptable for some + #: applications). + #: + #: The application default can be overridden with the + #: :setting:`CELERY_ACKS_LATE` setting. + acks_late = None + + #: Default task expiry time. + expires = None + + #: Some may expect a request to exist even if the task has not been + #: called. This should probably be deprecated. + _default_request = None + + __bound__ = False + + from_config = ( + ('send_error_emails', 'CELERY_SEND_TASK_ERROR_EMAILS'), + ('serializer', 'CELERY_TASK_SERIALIZER'), + ('rate_limit', 'CELERY_DEFAULT_RATE_LIMIT'), + ('track_started', 'CELERY_TRACK_STARTED'), + ('acks_late', 'CELERY_ACKS_LATE'), + ('ignore_result', 'CELERY_IGNORE_RESULT'), + ('store_errors_even_if_ignored', + 'CELERY_STORE_ERRORS_EVEN_IF_IGNORED'), + ) + + __bound__ = False + + # - Tasks are lazily bound, so that configuration is not set + # - until the task is actually used + + @classmethod + def bind(self, app): + was_bound, self.__bound__ = self.__bound__, True + self._app = app + conf = app.conf + + for attr_name, config_name in self.from_config: + if getattr(self, attr_name, None) is None: + setattr(self, attr_name, conf[config_name]) + if self.accept_magic_kwargs is None: + self.accept_magic_kwargs = app.accept_magic_kwargs + if self.backend is None: + self.backend = app.backend + + # decorate with annotations from config. + if not was_bound: + self.annotate() + + from celery.utils.threads import LocalStack + self.request_stack = LocalStack() + + # PeriodicTask uses this to add itself to the PeriodicTask schedule. + self.on_bound(app) + + return app + + @classmethod + def on_bound(self, app): + """This method can be defined to do additional actions when the + task class is bound to an app.""" + pass + + @classmethod + def _get_app(self): + if not self.__bound__ or self._app is None: + # The app property's __set__ method is not called + # if Task.app is set (on the class), so must bind on use. + self.bind(current_app) + return self._app + app = class_property(_get_app, bind) + + @classmethod + def annotate(self): + for d in resolve_all_annotations(self.app.annotations, self): + for key, value in d.iteritems(): + if key.startswith('@'): + self.add_around(key[1:], value) + else: + setattr(self, key, value) + + @classmethod + def add_around(self, attr, around): + orig = getattr(self, attr) + if getattr(orig, '__wrapped__', None): + orig = orig.__wrapped__ + meth = around(orig) + meth.__wrapped__ = orig + setattr(self, attr, meth) + + def __call__(self, *args, **kwargs): + _task_stack.push(self) + self.push_request() + try: + # add self if this is a bound task + if self.__self__ is not None: + return self.run(self.__self__, *args, **kwargs) + return self.run(*args, **kwargs) + finally: + self.pop_request() + _task_stack.pop() + + # - tasks are pickled into the name of the task only, and the reciever + # - simply grabs it from the local registry. + def __reduce__(self): + return (_unpickle_task, (self.name, ), None) + + def run(self, *args, **kwargs): + """The body of the task executed by workers.""" + raise NotImplementedError('Tasks must define the run method.') + + def start_strategy(self, app, consumer): + return instantiate(self.Strategy, self, app, consumer) + + def delay(self, *args, **kwargs): + """Star argument version of :meth:`apply_async`. + + Does not support the extra options enabled by :meth:`apply_async`. + + :param \*args: positional arguments passed on to the task. + :param \*\*kwargs: keyword arguments passed on to the task. + + :returns :class:`celery.result.AsyncResult`: + + """ + return self.apply_async(args, kwargs) + + def apply_async(self, args=None, kwargs=None, + task_id=None, producer=None, connection=None, router=None, + link=None, link_error=None, publisher=None, + add_to_parent=True, **options): + """Apply tasks asynchronously by sending a message. + + :keyword args: The positional arguments to pass on to the + task (a :class:`list` or :class:`tuple`). + + :keyword kwargs: The keyword arguments to pass on to the + task (a :class:`dict`) + + :keyword countdown: Number of seconds into the future that the + task should execute. Defaults to immediate + execution (do not confuse with the + `immediate` flag, as they are unrelated). + + :keyword eta: A :class:`~datetime.datetime` object describing + the absolute time and date of when the task should + be executed. May not be specified if `countdown` + is also supplied. (Do not confuse this with the + `immediate` flag, as they are unrelated). + + :keyword expires: Either a :class:`int`, describing the number of + seconds, or a :class:`~datetime.datetime` object + that describes the absolute time and date of when + the task should expire. The task will not be + executed after the expiration time. + + :keyword connection: Re-use existing broker connection instead + of establishing a new one. + + :keyword retry: If enabled sending of the task message will be retried + in the event of connection loss or failure. Default + is taken from the :setting:`CELERY_TASK_PUBLISH_RETRY` + setting. Note you need to handle the + producer/connection manually for this to work. + + :keyword retry_policy: Override the retry policy used. See the + :setting:`CELERY_TASK_PUBLISH_RETRY` setting. + + :keyword routing_key: Custom routing key used to route the task to a + worker server. If in combination with a + ``queue`` argument only used to specify custom + routing keys to topic exchanges. + + :keyword queue: The queue to route the task to. This must be a key + present in :setting:`CELERY_QUEUES`, or + :setting:`CELERY_CREATE_MISSING_QUEUES` must be + enabled. See :ref:`guide-routing` for more + information. + + :keyword exchange: Named custom exchange to send the task to. + Usually not used in combination with the ``queue`` + argument. + + :keyword priority: The task priority, a number between 0 and 9. + Defaults to the :attr:`priority` attribute. + + :keyword serializer: A string identifying the default + serialization method to use. Can be `pickle`, + `json`, `yaml`, `msgpack` or any custom + serialization method that has been registered + with :mod:`kombu.serialization.registry`. + Defaults to the :attr:`serializer` attribute. + + :keyword compression: A string identifying the compression method + to use. Can be one of ``zlib``, ``bzip2``, + or any custom compression methods registered with + :func:`kombu.compression.register`. Defaults to + the :setting:`CELERY_MESSAGE_COMPRESSION` + setting. + :keyword link: A single, or a list of subtasks to apply if the + task exits successfully. + :keyword link_error: A single, or a list of subtasks to apply + if an error occurs while executing the task. + + :keyword producer: :class:~@amqp.TaskProducer` instance to use. + :keyword add_to_parent: If set to True (default) and the task + is applied while executing another task, then the result + will be appended to the parent tasks ``request.children`` + attribute. + :keyword publisher: Deprecated alias to ``producer``. + + Also supports all keyword arguments supported by + :meth:`kombu.messaging.Producer.publish`. + + .. note:: + If the :setting:`CELERY_ALWAYS_EAGER` setting is set, it will + be replaced by a local :func:`apply` call instead. + + """ + producer = producer or publisher + app = self._get_app() + router = router or self.app.amqp.router + conf = app.conf + + # add 'self' if this is a bound method. + if self.__self__ is not None: + args = (self.__self__, ) + tuple(args) + + if conf.CELERY_ALWAYS_EAGER: + return self.apply(args, kwargs, task_id=task_id, **options) + options = dict(extract_exec_options(self), **options) + options = router.route(options, self.name, args, kwargs) + + if connection: + producer = app.amqp.TaskProducer(connection) + with app.producer_or_acquire(producer) as P: + task_id = P.publish_task(self.name, args, kwargs, + task_id=task_id, + callbacks=maybe_list(link), + errbacks=maybe_list(link_error), + **options) + result = self.AsyncResult(task_id) + if add_to_parent: + parent = get_current_worker_task() + if parent: + parent.request.children.append(result) + return result + + def subtask_from_request(self, request=None, args=None, kwargs=None, + **extra_options): + + request = self.request if request is None else request + args = request.args if args is None else args + kwargs = request.kwargs if kwargs is None else kwargs + delivery_info = request.delivery_info or {} + options = { + 'task_id': request.id, + 'link': request.callbacks, + 'link_error': request.errbacks, + 'exchange': delivery_info.get('exchange'), + 'routing_key': delivery_info.get('routing_key') + } + return self.subtask(args, kwargs, options, type=self, **extra_options) + + def retry(self, args=None, kwargs=None, exc=None, throw=True, + eta=None, countdown=None, max_retries=None, **options): + """Retry the task. + + :param args: Positional arguments to retry with. + :param kwargs: Keyword arguments to retry with. + :keyword exc: Custom exception to report when the max restart + limit has been exceeded (default: + :exc:`~celery.exceptions.MaxRetriesExceededError`). + + If this argument is set and retry is called while + an exception was raised (``sys.exc_info()`` is set) + it will attempt to reraise the current exception. + + If no exception was raised it will raise the ``exc`` + argument provided. + :keyword countdown: Time in seconds to delay the retry for. + :keyword eta: Explicit time and date to run the retry at + (must be a :class:`~datetime.datetime` instance). + :keyword max_retries: If set, overrides the default retry limit. + :keyword \*\*options: Any extra options to pass on to + meth:`apply_async`. + :keyword throw: If this is :const:`False`, do not raise the + :exc:`~celery.exceptions.RetryTaskError` exception, + that tells the worker to mark the task as being + retried. Note that this means the task will be + marked as failed if the task raises an exception, + or successful if it returns. + + :raises celery.exceptions.RetryTaskError: To tell the worker that + the task has been re-sent for retry. This always happens, + unless the `throw` keyword argument has been explicitly set + to :const:`False`, and is considered normal operation. + + **Example** + + .. code-block:: python + + >>> @task() + >>> def tweet(auth, message): + ... twitter = Twitter(oauth=auth) + ... try: + ... twitter.post_status_update(message) + ... except twitter.FailWhale, exc: + ... # Retry in 5 minutes. + ... raise tweet.retry(countdown=60 * 5, exc=exc) + + Although the task will never return above as `retry` raises an + exception to notify the worker, we use `return` in front of the retry + to convey that the rest of the block will not be executed. + + """ + request = self.request + retries = request.retries + 1 + max_retries = self.max_retries if max_retries is None else max_retries + + # Not in worker or emulated by (apply/always_eager), + # so just raise the original exception. + if request.called_directly: + maybe_reraise() # raise orig stack if PyErr_Occurred + raise exc or RetryTaskError('Task can be retried', None) + + if not eta and countdown is None: + countdown = self.default_retry_delay + + S = self.subtask_from_request( + request, args, kwargs, + countdown=countdown, eta=eta, retries=retries, + **options + ) + + if max_retries is not None and retries > max_retries: + if exc: + maybe_reraise() + raise self.MaxRetriesExceededError( + """Can't retry %s[%s] args:%s kwargs:%s""" % ( + self.name, request.id, S.args, S.kwargs)) + + # If task was executed eagerly using apply(), + # then the retry must also be executed eagerly. + S.apply().get() if request.is_eager else S.apply_async() + ret = RetryTaskError(exc=exc, when=eta or countdown) + if throw: + raise ret + return ret + + def apply(self, args=None, kwargs=None, **options): + """Execute this task locally, by blocking until the task returns. + + :param args: positional arguments passed on to the task. + :param kwargs: keyword arguments passed on to the task. + :keyword throw: Re-raise task exceptions. Defaults to + the :setting:`CELERY_EAGER_PROPAGATES_EXCEPTIONS` + setting. + + :rtype :class:`celery.result.EagerResult`: + + """ + # trace imports Task, so need to import inline. + from celery.task.trace import eager_trace_task + + app = self._get_app() + args = args or () + # add 'self' if this is a bound method. + if self.__self__ is not None: + args = (self.__self__, ) + tuple(args) + kwargs = kwargs or {} + task_id = options.get('task_id') or uuid() + retries = options.get('retries', 0) + throw = app.either('CELERY_EAGER_PROPAGATES_EXCEPTIONS', + options.pop('throw', None)) + + # Make sure we get the task instance, not class. + task = app._tasks[self.name] + + request = {'id': task_id, + 'retries': retries, + 'is_eager': True, + 'logfile': options.get('logfile'), + 'loglevel': options.get('loglevel', 0), + 'delivery_info': {'is_eager': True}} + if self.accept_magic_kwargs: + default_kwargs = {'task_name': task.name, + 'task_id': task_id, + 'task_retries': retries, + 'task_is_eager': True, + 'logfile': options.get('logfile'), + 'loglevel': options.get('loglevel', 0), + 'delivery_info': {'is_eager': True}} + supported_keys = fun_takes_kwargs(task.run, default_kwargs) + extend_with = dict((key, val) + for key, val in default_kwargs.items() + if key in supported_keys) + kwargs.update(extend_with) + + tb = None + retval, info = eager_trace_task(task, task_id, args, kwargs, + request=request, propagate=throw) + if isinstance(retval, ExceptionInfo): + retval, tb = retval.exception, retval.traceback + state = states.SUCCESS if info is None else info.state + return EagerResult(task_id, retval, state, traceback=tb) + + def AsyncResult(self, task_id, **kwargs): + """Get AsyncResult instance for this kind of task. + + :param task_id: Task id to get result for. + + """ + return self._get_app().AsyncResult(task_id, backend=self.backend, + task_name=self.name, **kwargs) + + def subtask(self, args=None, *starargs, **starkwargs): + """Returns :class:`~celery.subtask` object for + this task, wrapping arguments and execution options + for a single task invocation.""" + return subtask(self, args, *starargs, **starkwargs) + + def s(self, *args, **kwargs): + """``.s(*a, **k) -> .subtask(a, k)``""" + return self.subtask(args, kwargs) + + def si(self, *args, **kwargs): + """``.si(*a, **k) -> .subtask(a, k, immutable=True)``""" + return self.subtask(args, kwargs, immutable=True) + + def chunks(self, it, n): + """Creates a :class:`~celery.canvas.chunks` task for this task.""" + from celery import chunks + return chunks(self.s(), it, n) + + def map(self, it): + """Creates a :class:`~celery.canvas.xmap` task from ``it``.""" + from celery import xmap + return xmap(self.s(), it) + + def starmap(self, it): + """Creates a :class:`~celery.canvas.xstarmap` task from ``it``.""" + from celery import xstarmap + return xstarmap(self.s(), it) + + def update_state(self, task_id=None, state=None, meta=None): + """Update task state. + + :keyword task_id: Id of the task to update, defaults to the + id of the current task + :keyword state: New state (:class:`str`). + :keyword meta: State metadata (:class:`dict`). + + + + """ + if task_id is None: + task_id = self.request.id + self.backend.store_result(task_id, meta, state) + + def on_success(self, retval, task_id, args, kwargs): + """Success handler. + + Run by the worker if the task executes successfully. + + :param retval: The return value of the task. + :param task_id: Unique id of the executed task. + :param args: Original arguments for the executed task. + :param kwargs: Original keyword arguments for the executed task. + + The return value of this handler is ignored. + + """ + pass + + def on_retry(self, exc, task_id, args, kwargs, einfo): + """Retry handler. + + This is run by the worker when the task is to be retried. + + :param exc: The exception sent to :meth:`retry`. + :param task_id: Unique id of the retried task. + :param args: Original arguments for the retried task. + :param kwargs: Original keyword arguments for the retried task. + + :keyword einfo: :class:`~celery.datastructures.ExceptionInfo` + instance, containing the traceback. + + The return value of this handler is ignored. + + """ + pass + + def on_failure(self, exc, task_id, args, kwargs, einfo): + """Error handler. + + This is run by the worker when the task fails. + + :param exc: The exception raised by the task. + :param task_id: Unique id of the failed task. + :param args: Original arguments for the task that failed. + :param kwargs: Original keyword arguments for the task + that failed. + + :keyword einfo: :class:`~celery.datastructures.ExceptionInfo` + instance, containing the traceback. + + The return value of this handler is ignored. + + """ + pass + + def after_return(self, status, retval, task_id, args, kwargs, einfo): + """Handler called after the task returns. + + :param status: Current task state. + :param retval: Task return value/exception. + :param task_id: Unique id of the task. + :param args: Original arguments for the task that failed. + :param kwargs: Original keyword arguments for the task + that failed. + + :keyword einfo: :class:`~celery.datastructures.ExceptionInfo` + instance, containing the traceback (if any). + + The return value of this handler is ignored. + + """ + pass + + def send_error_email(self, context, exc, **kwargs): + if self.send_error_emails and \ + not getattr(self, 'disable_error_emails', None): + self.ErrorMail(self, **kwargs).send(context, exc) + + def push_request(self, *args, **kwargs): + self.request_stack.push(Context(*args, **kwargs)) + + def pop_request(self): + self.request_stack.pop() + + def __repr__(self): + """`repr(task)`""" + if self.__self__: + return '' % (self.name, self.__self__) + return '<@task: %s>' % (self.name, ) + + def _get_request(self): + """Get current request object.""" + req = self.request_stack.top + if req is None: + # task was not called, but some may still expect a request + # to be there, perhaps that should be deprecated. + if self._default_request is None: + self._default_request = Context() + return self._default_request + return req + request = property(_get_request) + + @property + def __name__(self): + return self.__class__.__name__ +BaseTask = Task # compat alias diff --git a/awx/lib/site-packages/celery/app/utils.py b/awx/lib/site-packages/celery/app/utils.py new file mode 100644 index 0000000000..2e857ac57e --- /dev/null +++ b/awx/lib/site-packages/celery/app/utils.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +""" + celery.app.utils + ~~~~~~~~~~~~~~~~ + + App utilities: Compat settings, bugreport tool, pickling apps. + +""" +from __future__ import absolute_import + +import os +import platform as _platform +import re + +from celery import platforms +from celery.datastructures import ConfigurationView +from celery.utils.text import pretty +from celery.utils.imports import qualname + +from .defaults import find + +#: Format used to generate bugreport information. +BUGREPORT_INFO = """ +software -> celery:%(celery_v)s kombu:%(kombu_v)s py:%(py_v)s + billiard:%(billiard_v)s %(driver_v)s +platform -> system:%(system)s arch:%(arch)s imp:%(py_i)s +loader -> %(loader)s +settings -> transport:%(transport)s results:%(results)s + +%(human_settings)s +""" + +HIDDEN_SETTINGS = re.compile( + 'API|TOKEN|KEY|SECRET|PASS|PROFANITIES_LIST|SIGNATURE|DATABASE', + re.IGNORECASE, +) + + +class Settings(ConfigurationView): + """Celery settings object.""" + + @property + def CELERY_RESULT_BACKEND(self): + return self.first('CELERY_RESULT_BACKEND', 'CELERY_BACKEND') + + @property + def BROKER_TRANSPORT(self): + return self.first('BROKER_TRANSPORT', + 'BROKER_BACKEND', 'CARROT_BACKEND') + + @property + def BROKER_BACKEND(self): + """Deprecated compat alias to :attr:`BROKER_TRANSPORT`.""" + return self.BROKER_TRANSPORT + + @property + def BROKER_HOST(self): + return (os.environ.get('CELERY_BROKER_URL') or + self.first('BROKER_URL', 'BROKER_HOST')) + + @property + def CELERY_TIMEZONE(self): + # this way we also support django's time zone. + return self.first('CELERY_TIMEZONE', 'TIME_ZONE') + + def without_defaults(self): + """Returns the current configuration, but without defaults.""" + # the last stash is the default settings, so just skip that + return Settings({}, self._order[:-1]) + + def find_option(self, name, namespace='celery'): + """Search for option by name. + + Will return ``(namespace, option_name, Option)`` tuple, e.g.:: + + >>> celery.conf.find_option('disable_rate_limits') + ('CELERY', 'DISABLE_RATE_LIMITS', + bool default->False>)) + + :param name: Name of option, cannot be partial. + :keyword namespace: Preferred namespace (``CELERY`` by default). + + """ + return find(name, namespace) + + def find_value_for_key(self, name, namespace='celery'): + """Shortcut to ``get_by_parts(*find_option(name)[:-1])``""" + return self.get_by_parts(*self.find_option(name, namespace)[:-1]) + + def get_by_parts(self, *parts): + """Returns the current value for setting specified as a path. + + Example:: + + >>> celery.conf.get_by_parts('CELERY', 'DISABLE_RATE_LIMITS') + False + + """ + return self['_'.join(part for part in parts if part)] + + def humanize(self): + """Returns a human readable string showing changes to the + configuration.""" + return '\n'.join( + '%s: %s' % (key, pretty(value, width=50)) + for key, value in filter_hidden_settings(dict( + (k, v) for k, v in self.without_defaults().iteritems() + if k.isupper() and not k.startswith('_'))).iteritems()) + + +class AppPickler(object): + """Default application pickler/unpickler.""" + + def __call__(self, cls, *args): + kwargs = self.build_kwargs(*args) + app = self.construct(cls, **kwargs) + self.prepare(app, **kwargs) + return app + + def prepare(self, app, **kwargs): + app.conf.update(kwargs['changes']) + + def build_kwargs(self, *args): + return self.build_standard_kwargs(*args) + + def build_standard_kwargs(self, main, changes, loader, backend, amqp, + events, log, control, accept_magic_kwargs, + config_source=None): + return dict(main=main, loader=loader, backend=backend, amqp=amqp, + changes=changes, events=events, log=log, control=control, + set_as_current=False, + accept_magic_kwargs=accept_magic_kwargs, + config_source=config_source) + + def construct(self, cls, **kwargs): + return cls(**kwargs) + + +def _unpickle_app(cls, pickler, *args): + return pickler()(cls, *args) + + +def filter_hidden_settings(conf): + + def maybe_censor(key, value): + return '********' if HIDDEN_SETTINGS.search(key) else value + + return dict((k, maybe_censor(k, v)) for k, v in conf.iteritems()) + + +def bugreport(app): + """Returns a string containing information useful in bug reports.""" + import billiard + import celery + import kombu + + try: + conn = app.connection() + driver_v = '%s:%s' % (conn.transport.driver_name, + conn.transport.driver_version()) + transport = conn.transport_cls + except Exception: + transport = driver_v = '' + + return BUGREPORT_INFO % { + 'system': _platform.system(), + 'arch': ', '.join(p for p in _platform.architecture() if p), + 'py_i': platforms.pyimplementation(), + 'celery_v': celery.VERSION_BANNER, + 'kombu_v': kombu.__version__, + 'billiard_v': billiard.__version__, + 'py_v': _platform.python_version(), + 'driver_v': driver_v, + 'transport': transport, + 'results': app.conf.CELERY_RESULT_BACKEND or 'disabled', + 'human_settings': app.conf.humanize(), + 'loader': qualname(app.loader.__class__), + } diff --git a/awx/lib/site-packages/celery/apps/__init__.py b/awx/lib/site-packages/celery/apps/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/celery/apps/beat.py b/awx/lib/site-packages/celery/apps/beat.py new file mode 100644 index 0000000000..8e1ac22f41 --- /dev/null +++ b/awx/lib/site-packages/celery/apps/beat.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +""" + celery.apps.beat + ~~~~~~~~~~~~~~~~ + + This module is the 'program-version' of :mod:`celery.beat`. + + It does everything necessary to run that module + as an actual application, like installing signal handlers + and so on. + +""" +from __future__ import absolute_import + +import socket +import sys + +from celery import VERSION_BANNER, platforms, beat +from celery.app import app_or_default +from celery.app.abstract import configurated, from_config +from celery.utils.imports import qualname +from celery.utils.log import LOG_LEVELS, get_logger +from celery.utils.timeutils import humanize_seconds + +STARTUP_INFO_FMT = """ +Configuration -> + . broker -> %(conninfo)s + . loader -> %(loader)s + . scheduler -> %(scheduler)s +%(scheduler_info)s + . logfile -> %(logfile)s@%(loglevel)s + . maxinterval -> %(hmax_interval)s (%(max_interval)ss) +""".strip() + +logger = get_logger('celery.beat') + + +class Beat(configurated): + Service = beat.Service + + app = None + loglevel = from_config('log_level') + logfile = from_config('log_file') + schedule = from_config('schedule_filename') + scheduler_cls = from_config('scheduler') + redirect_stdouts = from_config() + redirect_stdouts_level = from_config() + + def __init__(self, max_interval=None, app=None, + socket_timeout=30, pidfile=None, no_color=None, **kwargs): + """Starts the celerybeat task scheduler.""" + self.app = app = app_or_default(app or self.app) + self.setup_defaults(kwargs, namespace='celerybeat') + + self.max_interval = max_interval + self.socket_timeout = socket_timeout + self.no_color = no_color + self.colored = app.log.colored( + self.logfile, + enabled=not no_color if no_color is not None else no_color, + ) + self.pidfile = pidfile + + if not isinstance(self.loglevel, int): + self.loglevel = LOG_LEVELS[self.loglevel.upper()] + + def run(self): + print(str(self.colored.cyan( + 'celerybeat v%s is starting.' % VERSION_BANNER))) + self.init_loader() + self.set_process_title() + self.start_scheduler() + + def setup_logging(self, colorize=None): + if colorize is None and self.no_color is not None: + colorize = not self.no_color + self.app.log.setup(self.loglevel, self.logfile, + self.redirect_stdouts, self.redirect_stdouts_level, + colorize=colorize) + + def start_scheduler(self): + c = self.colored + if self.pidfile: + platforms.create_pidlock(self.pidfile) + beat = self.Service(app=self.app, + max_interval=self.max_interval, + scheduler_cls=self.scheduler_cls, + schedule_filename=self.schedule) + + print(str(c.blue('__ ', c.magenta('-'), + c.blue(' ... __ '), c.magenta('-'), + c.blue(' _\n'), + c.reset(self.startup_info(beat))))) + self.setup_logging() + if self.socket_timeout: + logger.debug('Setting default socket timeout to %r', + self.socket_timeout) + socket.setdefaulttimeout(self.socket_timeout) + try: + self.install_sync_handler(beat) + beat.start() + except Exception, exc: + logger.critical('celerybeat raised exception %s: %r', + exc.__class__, exc, + exc_info=True) + + def init_loader(self): + # Run the worker init handler. + # (Usually imports task modules and such.) + self.app.loader.init_worker() + self.app.finalize() + + def startup_info(self, beat): + scheduler = beat.get_scheduler(lazy=True) + return STARTUP_INFO_FMT % { + 'conninfo': self.app.connection().as_uri(), + 'logfile': self.logfile or '[stderr]', + 'loglevel': LOG_LEVELS[self.loglevel], + 'loader': qualname(self.app.loader), + 'scheduler': qualname(scheduler), + 'scheduler_info': scheduler.info, + 'hmax_interval': humanize_seconds(beat.max_interval), + 'max_interval': beat.max_interval, + } + + def set_process_title(self): + arg_start = 'manage' in sys.argv[0] and 2 or 1 + platforms.set_process_title( + 'celerybeat', info=' '.join(sys.argv[arg_start:]), + ) + + def install_sync_handler(self, beat): + """Install a `SIGTERM` + `SIGINT` handler that saves + the celerybeat schedule.""" + + def _sync(signum, frame): + beat.sync() + raise SystemExit() + + platforms.signals.update(SIGTERM=_sync, SIGINT=_sync) diff --git a/awx/lib/site-packages/celery/apps/worker.py b/awx/lib/site-packages/celery/apps/worker.py new file mode 100644 index 0000000000..c3e6d6da42 --- /dev/null +++ b/awx/lib/site-packages/celery/apps/worker.py @@ -0,0 +1,419 @@ +# -*- coding: utf-8 -*- +""" + celery.apps.worker + ~~~~~~~~~~~~~~~~~~ + + This module is the 'program-version' of :mod:`celery.worker`. + + It does everything necessary to run that module + as an actual application, like installing signal handlers, + platform tweaks, and so on. + +""" +from __future__ import absolute_import + +import logging +import os +import platform as _platform +import socket +import sys +import warnings + +from functools import partial + +from billiard import cpu_count, current_process +from kombu.utils.encoding import safe_str + +from celery import VERSION_BANNER, platforms, signals +from celery.app import app_or_default +from celery.app.abstract import configurated, from_config +from celery.exceptions import ImproperlyConfigured, SystemTerminate +from celery.loaders.app import AppLoader +from celery.task import trace +from celery.utils import cry, isatty, worker_direct +from celery.utils.imports import qualname +from celery.utils.log import get_logger, mlevel, set_in_sighandler +from celery.utils.text import pluralize +from celery.worker import WorkController + +try: + from greenlet import GreenletExit + IGNORE_ERRORS = (GreenletExit, ) +except ImportError: # pragma: no cover + IGNORE_ERRORS = () + +logger = get_logger(__name__) +is_jython = sys.platform.startswith('java') +is_pypy = hasattr(sys, 'pypy_version_info') + + +def active_thread_count(): + from threading import enumerate + # must use .getName on Python 2.5 + return sum(1 for t in enumerate() + if not t.getName().startswith('Dummy-')) + + +def safe_say(msg): + sys.__stderr__.write('\n%s\n' % msg) + +ARTLINES = [ + ' --------------', + '---- **** -----', + '--- * *** * --', + '-- * - **** ---', + '- ** ----------', + '- ** ----------', + '- ** ----------', + '- ** ----------', + '- *** --- * ---', + '-- ******* ----', + '--- ***** -----', + ' --------------', +] + +BANNER = """\ +celery@%(hostname)s v%(version)s + +%(platform)s + +[config] +.> broker: %(conninfo)s +.> app: %(app)s +.> concurrency: %(concurrency)s +.> events: %(events)s + +[queues] +%(queues)s +""" + +EXTRA_INFO_FMT = """ +[Tasks] +%(tasks)s +""" + +UNKNOWN_QUEUE = """\ +Trying to select queue subset of %r, but queue %s is not +defined in the CELERY_QUEUES setting. + +If you want to automatically declare unknown queues you can +enable the CELERY_CREATE_MISSING_QUEUES setting. +""" + + +class Worker(configurated): + WorkController = WorkController + + app = None + inherit_confopts = (WorkController, ) + loglevel = from_config('log_level') + redirect_stdouts = from_config() + redirect_stdouts_level = from_config() + + def __init__(self, hostname=None, purge=False, beat=False, + queues=None, include=None, app=None, pidfile=None, + autoscale=None, autoreload=False, no_execv=False, + no_color=None, **kwargs): + self.app = app = app_or_default(app or self.app) + self.hostname = hostname or socket.gethostname() + + # this signal can be used to set up configuration for + # workers by name. + signals.celeryd_init.send(sender=self.hostname, instance=self, + conf=self.app.conf) + + self.setup_defaults(kwargs, namespace='celeryd') + if not self.concurrency: + try: + self.concurrency = cpu_count() + except NotImplementedError: + self.concurrency = 2 + self.purge = purge + self.beat = beat + self.use_queues = [] if queues is None else queues + self.queues = None + self.include = include + self.pidfile = pidfile + self.autoscale = None + self.autoreload = autoreload + self.no_color = no_color + self.no_execv = no_execv + if autoscale: + max_c, _, min_c = autoscale.partition(',') + self.autoscale = [int(max_c), min_c and int(min_c) or 0] + self._isatty = isatty(sys.stdout) + + self.colored = app.log.colored( + self.logfile, + enabled=not no_color if no_color is not None else no_color + ) + + if isinstance(self.use_queues, basestring): + self.use_queues = self.use_queues.split(',') + if self.include: + if isinstance(self.include, basestring): + self.include = self.include.split(',') + app.conf.CELERY_INCLUDE = ( + tuple(app.conf.CELERY_INCLUDE) + tuple(self.include)) + self.loglevel = mlevel(self.loglevel) + + def run(self): + self.init_queues() + self.app.loader.init_worker() + + # this signal can be used to e.g. change queues after + # the -Q option has been applied. + signals.celeryd_after_setup.send(sender=self.hostname, instance=self, + conf=self.app.conf) + + if getattr(os, 'getuid', None) and os.getuid() == 0: + warnings.warn(RuntimeWarning( + 'Running celeryd with superuser privileges is discouraged!')) + + if self.purge: + self.purge_messages() + + # Dump configuration to screen so we have some basic information + # for when users sends bug reports. + print(str(self.colored.cyan(' \n', self.startup_info())) + + str(self.colored.reset(self.extra_info() or ''))) + self.set_process_status('-active-') + + self.setup_logging() + + # apply task execution optimizations + trace.setup_worker_optimizations(self.app) + + try: + self.run_worker() + except IGNORE_ERRORS: + pass + + def on_consumer_ready(self, consumer): + signals.worker_ready.send(sender=consumer) + print('celery@%s ready.' % safe_str(self.hostname)) + + def init_queues(self): + try: + self.app.select_queues(self.use_queues) + except KeyError, exc: + raise ImproperlyConfigured(UNKNOWN_QUEUE % (self.use_queues, exc)) + if self.app.conf.CELERY_WORKER_DIRECT: + self.app.amqp.queues.select_add(worker_direct(self.hostname)) + + def setup_logging(self, colorize=None): + if colorize is None and self.no_color is not None: + colorize = not self.no_color + self.app.log.setup(self.loglevel, self.logfile, + self.redirect_stdouts, self.redirect_stdouts_level, + colorize=colorize) + + def purge_messages(self): + count = self.app.control.purge() + print('purge: Erased %d %s from the queue.\n' % ( + count, pluralize(count, 'message'))) + + def tasklist(self, include_builtins=True, sep='\n', int_='celery.'): + return sep.join( + ' . %s' % task for task in sorted(self.app.tasks) + if (not task.startswith(int_) if not include_builtins else task) + ) + + def extra_info(self): + if self.loglevel <= logging.INFO: + include_builtins = self.loglevel <= logging.DEBUG + tasklist = self.tasklist(include_builtins=include_builtins) + return EXTRA_INFO_FMT % {'tasks': tasklist} + + def startup_info(self): + app = self.app + concurrency = unicode(self.concurrency) + appr = '%s:0x%x' % (app.main or '__main__', id(app)) + if not isinstance(app.loader, AppLoader): + loader = qualname(app.loader) + if loader.startswith('celery.loaders'): + loader = loader[14:] + appr += ' (%s)' % loader + if self.autoscale: + max, min = self.autoscale + concurrency = '{min=%s, max=%s}' % (min, max) + pool = self.pool_cls + if not isinstance(pool, basestring): + pool = pool.__module__ + concurrency += ' (%s)' % pool.split('.')[-1] + events = 'ON' + if not self.send_events: + events = 'OFF (enable -E to monitor this worker)' + + banner = (BANNER % { + 'app': appr, + 'hostname': self.hostname, + 'version': VERSION_BANNER, + 'conninfo': self.app.connection().as_uri(), + 'concurrency': concurrency, + 'platform': _platform.platform(), + 'events': events, + 'queues': app.amqp.queues.format(indent=0, indent_first=False), + }).splitlines() + + # integrate the ASCII art. + for i, x in enumerate(banner): + try: + banner[i] = ' '.join([ARTLINES[i], banner[i]]) + except IndexError: + banner[i] = ' ' * 16 + banner[i] + return '\n'.join(banner) + '\n' + + def run_worker(self): + worker = self.WorkController( + app=self.app, + hostname=self.hostname, + ready_callback=self.on_consumer_ready, beat=self.beat, + autoscale=self.autoscale, autoreload=self.autoreload, + no_execv=self.no_execv, + pidfile=self.pidfile, + **self.confopts_as_dict() + ) + self.install_platform_tweaks(worker) + signals.worker_init.send(sender=worker) + worker.start() + + def install_platform_tweaks(self, worker): + """Install platform specific tweaks and workarounds.""" + if self.app.IS_OSX: + self.osx_proxy_detection_workaround() + + # Install signal handler so SIGHUP restarts the worker. + if not self._isatty: + # only install HUP handler if detached from terminal, + # so closing the terminal window doesn't restart celeryd + # into the background. + if self.app.IS_OSX: + # OS X can't exec from a process using threads. + # See http://github.com/celery/celery/issues#issue/152 + install_HUP_not_supported_handler(worker) + else: + install_worker_restart_handler(worker) + install_worker_term_handler(worker) + install_worker_term_hard_handler(worker) + install_worker_int_handler(worker) + install_cry_handler() + install_rdb_handler() + + def osx_proxy_detection_workaround(self): + """See http://github.com/celery/celery/issues#issue/161""" + os.environ.setdefault('celery_dummy_proxy', 'set_by_celeryd') + + def set_process_status(self, info): + return platforms.set_mp_process_title( + 'celeryd', + info='%s (%s)' % (info, platforms.strargv(sys.argv)), + hostname=self.hostname, + ) + + +def _shutdown_handler(worker, sig='TERM', how='Warm', + exc=SystemExit, callback=None): + + def _handle_request(*args): + set_in_sighandler(True) + try: + from celery.worker import state + if current_process()._name == 'MainProcess': + if callback: + callback(worker) + safe_say('celeryd: %s shutdown (MainProcess)' % how) + if active_thread_count() > 1: + setattr(state, {'Warm': 'should_stop', + 'Cold': 'should_terminate'}[how], True) + else: + raise exc() + finally: + set_in_sighandler(False) + _handle_request.__name__ = 'worker_' + how + platforms.signals[sig] = _handle_request +install_worker_term_handler = partial( + _shutdown_handler, sig='SIGTERM', how='Warm', exc=SystemExit, +) +if not is_jython: + install_worker_term_hard_handler = partial( + _shutdown_handler, sig='SIGQUIT', how='Cold', exc=SystemTerminate, + ) +else: + install_worker_term_handler = \ + install_worker_term_hard_handler = lambda *a, **kw: None + + +def on_SIGINT(worker): + safe_say('celeryd: Hitting Ctrl+C again will terminate all running tasks!') + install_worker_term_hard_handler(worker, sig='SIGINT') +if not is_jython: + install_worker_int_handler = partial( + _shutdown_handler, sig='SIGINT', callback=on_SIGINT + ) +else: + install_worker_int_handler = lambda *a, **kw: None + + +def _clone_current_worker(): + if os.fork() == 0: + platforms.close_open_fds([ + sys.__stdin__, sys.__stdout__, sys.__stderr__, + ]) + os.execv(sys.executable, [sys.executable] + sys.argv) + + +def install_worker_restart_handler(worker, sig='SIGHUP'): + + def restart_worker_sig_handler(*args): + """Signal handler restarting the current python program.""" + set_in_sighandler(True) + safe_say('Restarting celeryd (%s)' % (' '.join(sys.argv), )) + import atexit + atexit.register(_clone_current_worker) + from celery.worker import state + state.should_stop = True + platforms.signals[sig] = restart_worker_sig_handler + + +def install_cry_handler(): + # Jython/PyPy does not have sys._current_frames + if is_jython or is_pypy: # pragma: no cover + return + + def cry_handler(*args): + """Signal handler logging the stacktrace of all active threads.""" + set_in_sighandler(True) + try: + safe_say(cry()) + finally: + set_in_sighandler(False) + platforms.signals['SIGUSR1'] = cry_handler + + +def install_rdb_handler(envvar='CELERY_RDBSIG', + sig='SIGUSR2'): # pragma: no cover + + def rdb_handler(*args): + """Signal handler setting a rdb breakpoint at the current frame.""" + set_in_sighandler(True) + try: + _, frame = args + from celery.contrib import rdb + rdb.set_trace(frame) + finally: + set_in_sighandler(False) + if os.environ.get(envvar): + platforms.signals[sig] = rdb_handler + + +def install_HUP_not_supported_handler(worker, sig='SIGHUP'): + + def warn_on_HUP_handler(*args): + set_in_sighandler(True) + try: + safe_say('%(sig)s not supported: Restarting with %(sig)s is ' + 'unstable on this platform!' % {'sig': sig}) + finally: + set_in_sighandler(False) + platforms.signals[sig] = warn_on_HUP_handler diff --git a/awx/lib/site-packages/celery/backends/__init__.py b/awx/lib/site-packages/celery/backends/__init__.py new file mode 100644 index 0000000000..493827379b --- /dev/null +++ b/awx/lib/site-packages/celery/backends/__init__.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +""" + celery.backends + ~~~~~~~~~~~~~~~ + + Backend abstract factory (...did I just say that?) and alias definitions. + +""" +from __future__ import absolute_import + +import sys + +from kombu.utils.url import _parse_url + +from celery.local import Proxy +from celery._state import current_app +from celery.utils.imports import symbol_by_name +from celery.utils.functional import memoize + +UNKNOWN_BACKEND = """\ +Unknown result backend: %r. Did you spell that correctly? (%r)\ +""" + +BACKEND_ALIASES = { + 'amqp': 'celery.backends.amqp:AMQPBackend', + 'cache': 'celery.backends.cache:CacheBackend', + 'redis': 'celery.backends.redis:RedisBackend', + 'mongodb': 'celery.backends.mongodb:MongoBackend', + 'database': 'celery.backends.database:DatabaseBackend', + 'cassandra': 'celery.backends.cassandra:CassandraBackend', + 'disabled': 'celery.backends.base:DisabledBackend', +} + +#: deprecated alias to ``current_app.backend``. +default_backend = Proxy(lambda: current_app.backend) + + +@memoize(100) +def get_backend_cls(backend=None, loader=None): + """Get backend class by name/alias""" + backend = backend or 'disabled' + loader = loader or current_app.loader + aliases = dict(BACKEND_ALIASES, **loader.override_backends) + try: + return symbol_by_name(backend, aliases) + except ValueError, exc: + raise ValueError, ValueError(UNKNOWN_BACKEND % ( + backend, exc)), sys.exc_info()[2] + + +def get_backend_by_url(backend=None, loader=None): + url = None + if backend and '://' in backend: + url = backend + backend, _, _, _, _, _, _ = _parse_url(url) + return get_backend_cls(backend, loader), url diff --git a/awx/lib/site-packages/celery/backends/amqp.py b/awx/lib/site-packages/celery/backends/amqp.py new file mode 100644 index 0000000000..7a6154f0c2 --- /dev/null +++ b/awx/lib/site-packages/celery/backends/amqp.py @@ -0,0 +1,268 @@ +# -*- coding: utf-8 -*- +""" + celery.backends.amqp + ~~~~~~~~~~~~~~~~~~~~ + + The AMQP result backend. + + This backend publishes results as messages. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import socket +import threading +import time + +from kombu import Exchange, Queue, Producer, Consumer + +from celery import states +from celery.exceptions import TimeoutError +from celery.utils.log import get_logger + +from .base import BaseDictBackend + +logger = get_logger(__name__) + + +class BacklogLimitExceeded(Exception): + """Too much state history to fast-forward.""" + + +def repair_uuid(s): + # Historically the dashes in UUIDS are removed from AMQ entity names, + # but there is no known reason to. Hopefully we'll be able to fix + # this in v4.0. + return '%s-%s-%s-%s-%s' % (s[:8], s[8:12], s[12:16], s[16:20], s[20:]) + + +class AMQPBackend(BaseDictBackend): + """Publishes results by sending messages.""" + Exchange = Exchange + Queue = Queue + Consumer = Consumer + Producer = Producer + + BacklogLimitExceeded = BacklogLimitExceeded + + supports_autoexpire = True + supports_native_join = True + + retry_policy = { + 'max_retries': 20, + 'interval_start': 0, + 'interval_step': 1, + 'interval_max': 1, + } + + def __init__(self, connection=None, exchange=None, exchange_type=None, + persistent=None, serializer=None, auto_delete=True, + **kwargs): + super(AMQPBackend, self).__init__(**kwargs) + conf = self.app.conf + self._connection = connection + self.queue_arguments = {} + self.persistent = (conf.CELERY_RESULT_PERSISTENT if persistent is None + else persistent) + delivery_mode = persistent and 'persistent' or 'transient' + exchange = exchange or conf.CELERY_RESULT_EXCHANGE + exchange_type = exchange_type or conf.CELERY_RESULT_EXCHANGE_TYPE + self.exchange = self.Exchange(name=exchange, + type=exchange_type, + delivery_mode=delivery_mode, + durable=self.persistent, + auto_delete=False) + self.serializer = serializer or conf.CELERY_RESULT_SERIALIZER + self.auto_delete = auto_delete + + # AMQP_TASK_RESULT_EXPIRES setting is deprecated and will be + # removed in version 4.0. + dexpires = conf.CELERY_AMQP_TASK_RESULT_EXPIRES + + self.expires = None + if 'expires' in kwargs: + if kwargs['expires'] is not None: + self.expires = self.prepare_expires(kwargs['expires']) + else: + self.expires = self.prepare_expires(dexpires) + + if self.expires: + self.queue_arguments['x-expires'] = int(self.expires * 1000) + self.mutex = threading.Lock() + + def _create_binding(self, task_id): + name = task_id.replace('-', '') + return self.Queue(name=name, + exchange=self.exchange, + routing_key=name, + durable=self.persistent, + auto_delete=self.auto_delete, + queue_arguments=self.queue_arguments) + + def revive(self, channel): + pass + + def _republish(self, channel, task_id, body, content_type, + content_encoding): + return Producer(channel).publish( + body, + exchange=self.exchange, + routing_key=task_id.replace('-', ''), + serializer=self.serializer, + content_type=content_type, + content_encoding=content_encoding, + retry=True, retry_policy=self.retry_policy, + declare=[self._create_binding(task_id)], + ) + + def _store_result(self, task_id, result, status, traceback=None): + """Send task return value and status.""" + with self.mutex: + with self.app.amqp.producer_pool.acquire(block=True) as pub: + pub.publish({'task_id': task_id, 'status': status, + 'result': self.encode_result(result, status), + 'traceback': traceback, + 'children': self.current_task_children()}, + exchange=self.exchange, + routing_key=task_id.replace('-', ''), + serializer=self.serializer, + retry=True, retry_policy=self.retry_policy, + declare=[self._create_binding(task_id)]) + return result + + def wait_for(self, task_id, timeout=None, cache=True, propagate=True, + **kwargs): + cached_meta = self._cache.get(task_id) + if cache and cached_meta and \ + cached_meta['status'] in states.READY_STATES: + meta = cached_meta + else: + try: + meta = self.consume(task_id, timeout=timeout) + except socket.timeout: + raise TimeoutError('The operation timed out.') + + state = meta['status'] + if state == states.SUCCESS: + return meta['result'] + elif state in states.PROPAGATE_STATES: + if propagate: + raise self.exception_to_python(meta['result']) + return meta['result'] + else: + return self.wait_for(task_id, timeout, cache) + + def get_task_meta(self, task_id, backlog_limit=1000): + # Polling and using basic_get + with self.app.pool.acquire_channel(block=True) as (_, channel): + binding = self._create_binding(task_id)(channel) + binding.declare() + prev = latest = acc = None + for i in xrange(backlog_limit): # spool ffwd + prev, latest, acc = latest, acc, binding.get(no_ack=False) + if not acc: # no more messages + break + if prev: + # backends are not expected to keep history, + # so we delete everything except the most recent state. + prev.ack() + else: + raise self.BacklogLimitExceeded(task_id) + + if latest: + payload = self._cache[task_id] = latest.payload + latest.requeue() + return payload + else: + # no new state, use previous + try: + return self._cache[task_id] + except KeyError: + # result probably pending. + return {'status': states.PENDING, 'result': None} + poll = get_task_meta # XXX compat + + def drain_events(self, connection, consumer, timeout=None, now=time.time): + wait = connection.drain_events + results = {} + + def callback(meta, message): + if meta['status'] in states.READY_STATES: + uuid = repair_uuid(message.delivery_info['routing_key']) + results[uuid] = meta + + consumer.callbacks[:] = [callback] + time_start = now() + + while 1: + # Total time spent may exceed a single call to wait() + if timeout and now() - time_start >= timeout: + raise socket.timeout() + wait(timeout=timeout) + if results: # got event on the wanted channel. + break + self._cache.update(results) + return results + + def consume(self, task_id, timeout=None): + with self.app.pool.acquire_channel(block=True) as (conn, channel): + binding = self._create_binding(task_id) + with self.Consumer(channel, binding, no_ack=True) as consumer: + return self.drain_events(conn, consumer, timeout).values()[0] + + def get_many(self, task_ids, timeout=None, **kwargs): + with self.app.pool.acquire_channel(block=True) as (conn, channel): + ids = set(task_ids) + cached_ids = set() + for task_id in ids: + try: + cached = self._cache[task_id] + except KeyError: + pass + else: + if cached['status'] in states.READY_STATES: + yield task_id, cached + cached_ids.add(task_id) + ids ^= cached_ids + + bindings = [self._create_binding(task_id) for task_id in task_ids] + with self.Consumer(channel, bindings, no_ack=True) as consumer: + while ids: + r = self.drain_events(conn, consumer, timeout) + ids ^= set(r) + for ready_id, ready_meta in r.iteritems(): + yield ready_id, ready_meta + + def reload_task_result(self, task_id): + raise NotImplementedError( + 'reload_task_result is not supported by this backend.') + + def reload_group_result(self, task_id): + """Reload group result, even if it has been previously fetched.""" + raise NotImplementedError( + 'reload_group_result is not supported by this backend.') + + def save_group(self, group_id, result): + raise NotImplementedError( + 'save_group is not supported by this backend.') + + def restore_group(self, group_id, cache=True): + raise NotImplementedError( + 'restore_group is not supported by this backend.') + + def delete_group(self, group_id): + raise NotImplementedError( + 'delete_group is not supported by this backend.') + + def __reduce__(self, args=(), kwargs={}): + kwargs.update( + connection=self._connection, + exchange=self.exchange.name, + exchange_type=self.exchange.type, + persistent=self.persistent, + serializer=self.serializer, + auto_delete=self.auto_delete, + expires=self.expires, + ) + return super(AMQPBackend, self).__reduce__(args, kwargs) diff --git a/awx/lib/site-packages/celery/backends/base.py b/awx/lib/site-packages/celery/backends/base.py new file mode 100644 index 0000000000..888bfe4be1 --- /dev/null +++ b/awx/lib/site-packages/celery/backends/base.py @@ -0,0 +1,542 @@ +# -*- coding: utf-8 -*- +""" + celery.backends.base + ~~~~~~~~~~~~~~~~~~~~ + + Result backend base classes. + + - :class:`BaseBackend` defines the interface. + + - :class:`BaseDictBackend` assumes the fields are stored in a dict. + + - :class:`KeyValueStoreBackend` is a common base class + using K/V semantics like _get and _put. + +""" +from __future__ import absolute_import + +import time +import sys + +from datetime import timedelta + +from billiard.einfo import ExceptionInfo +from kombu import serialization +from kombu.utils.encoding import bytes_to_str, ensure_bytes, from_utf8 + +from celery import states +from celery.app import current_task +from celery.datastructures import LRUCache +from celery.exceptions import ChordError, TaskRevokedError, TimeoutError +from celery.result import from_serializable, GroupResult +from celery.utils import timeutils +from celery.utils.serialization import ( + get_pickled_exception, + get_pickleable_exception, + create_exception_cls, +) + +EXCEPTION_ABLE_CODECS = frozenset(['pickle', 'yaml']) +is_py3k = sys.version_info >= (3, 0) + + +def unpickle_backend(cls, args, kwargs): + """Returns an unpickled backend.""" + return cls(*args, **kwargs) + + +class BaseBackend(object): + """Base backend class.""" + READY_STATES = states.READY_STATES + UNREADY_STATES = states.UNREADY_STATES + EXCEPTION_STATES = states.EXCEPTION_STATES + + TimeoutError = TimeoutError + + #: Time to sleep between polling each individual item + #: in `ResultSet.iterate`. as opposed to the `interval` + #: argument which is for each pass. + subpolling_interval = None + + #: If true the backend must implement :meth:`get_many`. + supports_native_join = False + + #: If true the backend must automatically expire results. + #: The daily backend_cleanup periodic task will not be triggered + #: in this case. + supports_autoexpire = False + + def __init__(self, *args, **kwargs): + from celery.app import app_or_default + self.app = app_or_default(kwargs.get('app')) + self.serializer = kwargs.get('serializer', + self.app.conf.CELERY_RESULT_SERIALIZER) + (self.content_type, + self.content_encoding, + self.encoder) = serialization.registry._encoders[self.serializer] + + def encode(self, data): + _, _, payload = serialization.encode(data, serializer=self.serializer) + return payload + + def decode(self, payload): + payload = is_py3k and payload or str(payload) + return serialization.decode(payload, + content_type=self.content_type, + content_encoding=self.content_encoding) + + def prepare_expires(self, value, type=None): + if value is None: + value = self.app.conf.CELERY_TASK_RESULT_EXPIRES + if isinstance(value, timedelta): + value = timeutils.timedelta_seconds(value) + if value is not None and type: + return type(value) + return value + + def encode_result(self, result, status): + if status in self.EXCEPTION_STATES and isinstance(result, Exception): + return self.prepare_exception(result) + else: + return self.prepare_value(result) + + def store_result(self, task_id, result, status, traceback=None): + """Store the result and status of a task.""" + raise NotImplementedError( + 'store_result is not supported by this backend.') + + def mark_as_started(self, task_id, **meta): + """Mark a task as started""" + return self.store_result(task_id, meta, status=states.STARTED) + + def mark_as_done(self, task_id, result): + """Mark task as successfully executed.""" + return self.store_result(task_id, result, status=states.SUCCESS) + + def mark_as_failure(self, task_id, exc, traceback=None): + """Mark task as executed with failure. Stores the execption.""" + return self.store_result(task_id, exc, status=states.FAILURE, + traceback=traceback) + + def fail_from_current_stack(self, task_id, exc=None): + type_, real_exc, tb = sys.exc_info() + try: + exc = real_exc if exc is None else exc + ei = ExceptionInfo((type_, exc, tb)) + self.mark_as_failure(task_id, exc, ei.traceback) + return ei + finally: + del(tb) + + def mark_as_retry(self, task_id, exc, traceback=None): + """Mark task as being retries. Stores the current + exception (if any).""" + return self.store_result(task_id, exc, status=states.RETRY, + traceback=traceback) + + def mark_as_revoked(self, task_id, reason=''): + return self.store_result(task_id, TaskRevokedError(reason), + status=states.REVOKED, traceback=None) + + def prepare_exception(self, exc): + """Prepare exception for serialization.""" + if self.serializer in EXCEPTION_ABLE_CODECS: + return get_pickleable_exception(exc) + return {'exc_type': type(exc).__name__, 'exc_message': str(exc)} + + def exception_to_python(self, exc): + """Convert serialized exception to Python exception.""" + if self.serializer in EXCEPTION_ABLE_CODECS: + return get_pickled_exception(exc) + return create_exception_cls(from_utf8(exc['exc_type']), + sys.modules[__name__])(exc['exc_message']) + + def prepare_value(self, result): + """Prepare value for storage.""" + if isinstance(result, GroupResult): + return result.serializable() + return result + + def forget(self, task_id): + raise NotImplementedError('%s does not implement forget.' % ( + self.__class__)) + + def wait_for(self, task_id, timeout=None, propagate=True, interval=0.5): + """Wait for task and return its result. + + If the task raises an exception, this exception + will be re-raised by :func:`wait_for`. + + If `timeout` is not :const:`None`, this raises the + :class:`celery.exceptions.TimeoutError` exception if the operation + takes longer than `timeout` seconds. + + """ + + time_elapsed = 0.0 + + while 1: + status = self.get_status(task_id) + if status == states.SUCCESS: + return self.get_result(task_id) + elif status in states.PROPAGATE_STATES: + result = self.get_result(task_id) + if propagate: + raise result + return result + # avoid hammering the CPU checking status. + time.sleep(interval) + time_elapsed += interval + if timeout and time_elapsed >= timeout: + raise TimeoutError('The operation timed out.') + + def cleanup(self): + """Backend cleanup. Is run by + :class:`celery.task.DeleteExpiredTaskMetaTask`.""" + pass + + def process_cleanup(self): + """Cleanup actions to do at the end of a task worker process.""" + pass + + def get_status(self, task_id): + """Get the status of a task.""" + raise NotImplementedError( + 'get_status is not supported by this backend.') + + def get_result(self, task_id): + """Get the result of a task.""" + raise NotImplementedError( + 'get_result is not supported by this backend.') + + def get_children(self, task_id): + raise NotImplementedError( + 'get_children is not supported by this backend.') + + def get_traceback(self, task_id): + """Get the traceback for a failed task.""" + raise NotImplementedError( + 'get_traceback is not supported by this backend.') + + def save_group(self, group_id, result): + """Store the result and status of a task.""" + + raise NotImplementedError( + 'save_group is not supported by %s.' % (type(self).__name__, )) + + def restore_group(self, group_id, cache=True): + """Get the result of a group.""" + raise NotImplementedError( + 'restore_group is not supported by this backend.') + + def delete_group(self, group_id): + raise NotImplementedError( + 'delete_group is not supported by this backend.') + + def reload_task_result(self, task_id): + """Reload task result, even if it has been previously fetched.""" + raise NotImplementedError( + 'reload_task_result is not supported by this backend.') + + def reload_group_result(self, task_id): + """Reload group result, even if it has been previously fetched.""" + raise NotImplementedError( + 'reload_group_result is not supported by this backend.') + + def on_chord_part_return(self, task, propagate=True): + pass + + def fallback_chord_unlock(self, group_id, body, result=None, + countdown=1, **kwargs): + kwargs['result'] = [r.id for r in result] + self.app.tasks['celery.chord_unlock'].apply_async( + (group_id, body, ), kwargs, countdown=countdown, + ) + on_chord_apply = fallback_chord_unlock + + def current_task_children(self): + current = current_task() + if current: + return [r.serializable() for r in current.request.children] + + def __reduce__(self, args=(), kwargs={}): + return (unpickle_backend, (self.__class__, args, kwargs)) + + def is_cached(self, task_id): + return False + + +class BaseDictBackend(BaseBackend): + + def __init__(self, *args, **kwargs): + super(BaseDictBackend, self).__init__(*args, **kwargs) + self._cache = LRUCache(limit=kwargs.get('max_cached_results') or + self.app.conf.CELERY_MAX_CACHED_RESULTS) + + def is_cached(self, task_id): + return task_id in self._cache + + def store_result(self, task_id, result, status, traceback=None, **kwargs): + """Store task result and status.""" + result = self.encode_result(result, status) + self._store_result(task_id, result, status, traceback, **kwargs) + return result + + def forget(self, task_id): + self._cache.pop(task_id, None) + self._forget(task_id) + + def _forget(self, task_id): + raise NotImplementedError('%s does not implement forget.' % ( + self.__class__)) + + def get_status(self, task_id): + """Get the status of a task.""" + return self.get_task_meta(task_id)['status'] + + def get_traceback(self, task_id): + """Get the traceback for a failed task.""" + return self.get_task_meta(task_id).get('traceback') + + def get_result(self, task_id): + """Get the result of a task.""" + meta = self.get_task_meta(task_id) + if meta['status'] in self.EXCEPTION_STATES: + return self.exception_to_python(meta['result']) + else: + return meta['result'] + + def get_children(self, task_id): + """Get the list of subtasks sent by a task.""" + try: + return self.get_task_meta(task_id)['children'] + except KeyError: + pass + + def get_task_meta(self, task_id, cache=True): + if cache: + try: + return self._cache[task_id] + except KeyError: + pass + + meta = self._get_task_meta_for(task_id) + if cache and meta.get('status') == states.SUCCESS: + self._cache[task_id] = meta + return meta + + def reload_task_result(self, task_id): + self._cache[task_id] = self.get_task_meta(task_id, cache=False) + + def reload_group_result(self, group_id): + self._cache[group_id] = self.get_group_meta(group_id, + cache=False) + + def get_group_meta(self, group_id, cache=True): + if cache: + try: + return self._cache[group_id] + except KeyError: + pass + + meta = self._restore_group(group_id) + if cache and meta is not None: + self._cache[group_id] = meta + return meta + + def restore_group(self, group_id, cache=True): + """Get the result for a group.""" + meta = self.get_group_meta(group_id, cache=cache) + if meta: + return meta['result'] + + def save_group(self, group_id, result): + """Store the result of an executed group.""" + return self._save_group(group_id, result) + + def delete_group(self, group_id): + self._cache.pop(group_id, None) + return self._delete_group(group_id) + + +class KeyValueStoreBackend(BaseDictBackend): + task_keyprefix = ensure_bytes('celery-task-meta-') + group_keyprefix = ensure_bytes('celery-taskset-meta-') + chord_keyprefix = ensure_bytes('chord-unlock-') + implements_incr = False + + def get(self, key): + raise NotImplementedError('Must implement the get method.') + + def mget(self, keys): + raise NotImplementedError('Does not support get_many') + + def set(self, key, value): + raise NotImplementedError('Must implement the set method.') + + def delete(self, key): + raise NotImplementedError('Must implement the delete method') + + def incr(self, key): + raise NotImplementedError('Does not implement incr') + + def expire(self, key, value): + pass + + def get_key_for_task(self, task_id): + """Get the cache key for a task by id.""" + return self.task_keyprefix + ensure_bytes(task_id) + + def get_key_for_group(self, group_id): + """Get the cache key for a group by id.""" + return self.group_keyprefix + ensure_bytes(group_id) + + def get_key_for_chord(self, group_id): + """Get the cache key for the chord waiting on group with given id.""" + return self.chord_keyprefix + ensure_bytes(group_id) + + def _strip_prefix(self, key): + """Takes bytes, emits string.""" + key = ensure_bytes(key) + for prefix in self.task_keyprefix, self.group_keyprefix: + if key.startswith(prefix): + return bytes_to_str(key[len(prefix):]) + return bytes_to_str(key) + + def _mget_to_results(self, values, keys): + if hasattr(values, 'items'): + # client returns dict so mapping preserved. + return dict((self._strip_prefix(k), self.decode(v)) + for k, v in values.iteritems() + if v is not None) + else: + # client returns list so need to recreate mapping. + return dict((bytes_to_str(keys[i]), self.decode(value)) + for i, value in enumerate(values) + if value is not None) + + def get_many(self, task_ids, timeout=None, interval=0.5): + ids = set(task_ids) + cached_ids = set() + for task_id in ids: + try: + cached = self._cache[task_id] + except KeyError: + pass + else: + if cached['status'] in states.READY_STATES: + yield bytes_to_str(task_id), cached + cached_ids.add(task_id) + + ids ^= cached_ids + iterations = 0 + while ids: + keys = list(ids) + r = self._mget_to_results(self.mget([self.get_key_for_task(k) + for k in keys]), keys) + self._cache.update(r) + ids ^= set(bytes_to_str(v) for v in r) + for key, value in r.iteritems(): + yield bytes_to_str(key), value + if timeout and iterations * interval >= timeout: + raise TimeoutError('Operation timed out (%s)' % (timeout, )) + time.sleep(interval) # don't busy loop. + iterations += 1 + + def _forget(self, task_id): + self.delete(self.get_key_for_task(task_id)) + + def _store_result(self, task_id, result, status, traceback=None): + meta = {'status': status, 'result': result, 'traceback': traceback, + 'children': self.current_task_children()} + self.set(self.get_key_for_task(task_id), self.encode(meta)) + return result + + def _save_group(self, group_id, result): + self.set(self.get_key_for_group(group_id), + self.encode({'result': result.serializable()})) + return result + + def _delete_group(self, group_id): + self.delete(self.get_key_for_group(group_id)) + + def _get_task_meta_for(self, task_id): + """Get task metadata for a task by id.""" + meta = self.get(self.get_key_for_task(task_id)) + if not meta: + return {'status': states.PENDING, 'result': None} + return self.decode(meta) + + def _restore_group(self, group_id): + """Get task metadata for a task by id.""" + meta = self.get(self.get_key_for_group(group_id)) + # previously this was always pickled, but later this + # was extended to support other serializers, so the + # structure is kind of weird. + if meta: + meta = self.decode(meta) + result = meta['result'] + if isinstance(result, (list, tuple)): + return {'result': from_serializable(result, self.app)} + return meta + + def on_chord_apply(self, group_id, body, result=None, **kwargs): + if self.implements_incr: + self.save_group(group_id, self.app.GroupResult(group_id, result)) + else: + self.fallback_chord_unlock(group_id, body, result, **kwargs) + + def on_chord_part_return(self, task, propagate=None): + if not self.implements_incr: + return + from celery import subtask + from celery.result import GroupResult + app = self.app + if propagate is None: + propagate = self.app.conf.CELERY_CHORD_PROPAGATES + gid = task.request.group + if not gid: + return + key = self.get_key_for_chord(gid) + deps = GroupResult.restore(gid, backend=task.backend) + val = self.incr(key) + if val >= len(deps): + j = deps.join_native if deps.supports_native_join else deps.join + callback = subtask(task.request.chord) + try: + ret = j(propagate=propagate) + except Exception, exc: + try: + culprit = deps._failed_join_report().next() + reason = 'Dependency %s raised %r' % (culprit.id, exc) + except StopIteration: + reason = repr(exc) + app._tasks[callback.task].backend.fail_from_current_stack( + callback.id, exc=ChordError(reason), + ) + else: + try: + callback.delay(ret) + except Exception, exc: + app._tasks[callback.task].backend.fail_from_current_stack( + callback.id, + exc=ChordError('Callback error: %r' % (exc, )), + ) + finally: + deps.delete() + self.client.delete(key) + else: + self.expire(key, 86400) + + +class DisabledBackend(BaseBackend): + _cache = {} # need this attribute to reset cache in tests. + + def store_result(self, *args, **kwargs): + pass + + def _is_disabled(self, *args, **kwargs): + raise NotImplementedError( + 'No result backend configured. ' + 'Please see the documentation for more information.') + wait_for = get_status = get_result = get_traceback = _is_disabled diff --git a/awx/lib/site-packages/celery/backends/cache.py b/awx/lib/site-packages/celery/backends/cache.py new file mode 100644 index 0000000000..411a07a780 --- /dev/null +++ b/awx/lib/site-packages/celery/backends/cache.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +""" + celery.backends.cache + ~~~~~~~~~~~~~~~~~~~~~ + + Memcache and in-memory cache result backend. + +""" +from __future__ import absolute_import + +from kombu.utils import cached_property + +from celery.datastructures import LRUCache +from celery.exceptions import ImproperlyConfigured + +from .base import KeyValueStoreBackend + +_imp = [None] + +REQUIRES_BACKEND = """\ +The memcached backend requires either pylibmc or python-memcached.\ +""" + +UNKNOWN_BACKEND = """\ +The cache backend %r is unknown, +Please use one of the following backends instead: %s\ +""" + + +def import_best_memcache(): + if _imp[0] is None: + is_pylibmc = False + try: + import pylibmc as memcache + is_pylibmc = True + except ImportError: + try: + import memcache # noqa + except ImportError: + raise ImproperlyConfigured(REQUIRES_BACKEND) + _imp[0] = (is_pylibmc, memcache) + return _imp[0] + + +def get_best_memcache(*args, **kwargs): + behaviors = kwargs.pop('behaviors', None) + is_pylibmc, memcache = import_best_memcache() + client = memcache.Client(*args, **kwargs) + if is_pylibmc and behaviors is not None: + client.behaviors = behaviors + return client + + +class DummyClient(object): + + def __init__(self, *args, **kwargs): + self.cache = LRUCache(limit=5000) + + def get(self, key, *args, **kwargs): + return self.cache.get(key) + + def get_multi(self, keys): + cache = self.cache + return dict((k, cache[k]) for k in keys if k in cache) + + def set(self, key, value, *args, **kwargs): + self.cache[key] = value + + def delete(self, key, *args, **kwargs): + self.cache.pop(key, None) + + def incr(self, key, delta=1): + return self.cache.incr(key, delta) + + +backends = {'memcache': lambda: get_best_memcache, + 'memcached': lambda: get_best_memcache, + 'pylibmc': lambda: get_best_memcache, + 'memory': lambda: DummyClient} + + +class CacheBackend(KeyValueStoreBackend): + servers = None + supports_autoexpire = True + supports_native_join = True + implements_incr = True + + def __init__(self, expires=None, backend=None, options={}, **kwargs): + super(CacheBackend, self).__init__(self, **kwargs) + + self.options = dict(self.app.conf.CELERY_CACHE_BACKEND_OPTIONS, + **options) + + self.backend = backend or self.app.conf.CELERY_CACHE_BACKEND + if self.backend: + self.backend, _, servers = self.backend.partition('://') + self.servers = servers.rstrip('/').split(';') + self.expires = self.prepare_expires(expires, type=int) + try: + self.Client = backends[self.backend]() + except KeyError: + raise ImproperlyConfigured(UNKNOWN_BACKEND % ( + self.backend, ', '.join(backends))) + + def get(self, key): + return self.client.get(key) + + def mget(self, keys): + return self.client.get_multi(keys) + + def set(self, key, value): + return self.client.set(key, value, self.expires) + + def delete(self, key): + return self.client.delete(key) + + def on_chord_apply(self, group_id, body, result=None, **kwargs): + self.client.set(self.get_key_for_chord(group_id), '0', time=86400) + self.save_group(group_id, self.app.GroupResult(group_id, result)) + + def incr(self, key): + return self.client.incr(key) + + @cached_property + def client(self): + return self.Client(self.servers, **self.options) + + def __reduce__(self, args=(), kwargs={}): + servers = ';'.join(self.servers) + backend = '%s://%s/' % (self.backend, servers) + kwargs.update( + dict(backend=backend, + expires=self.expires, + options=self.options)) + return super(CacheBackend, self).__reduce__(args, kwargs) diff --git a/awx/lib/site-packages/celery/backends/cassandra.py b/awx/lib/site-packages/celery/backends/cassandra.py new file mode 100644 index 0000000000..66ae7b2851 --- /dev/null +++ b/awx/lib/site-packages/celery/backends/cassandra.py @@ -0,0 +1,188 @@ +# -*- coding: utf-8 -*- +""" + celery.backends.cassandra + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + Apache Cassandra result store backend. + +""" +from __future__ import absolute_import + +try: # pragma: no cover + import pycassa + from thrift import Thrift + C = pycassa.cassandra.ttypes +except ImportError: # pragma: no cover + pycassa = None # noqa + +import socket +import time + +from celery import states +from celery.exceptions import ImproperlyConfigured +from celery.utils.log import get_logger +from celery.utils.timeutils import maybe_timedelta, timedelta_seconds + +from .base import BaseDictBackend + +logger = get_logger(__name__) + + +class CassandraBackend(BaseDictBackend): + """Highly fault tolerant Cassandra backend. + + .. attribute:: servers + + List of Cassandra servers with format: ``hostname:port``. + + :raises celery.exceptions.ImproperlyConfigured: if + module :mod:`pycassa` is not available. + + """ + servers = [] + keyspace = None + column_family = None + detailed_mode = False + _retry_timeout = 300 + _retry_wait = 3 + supports_autoexpire = True + + def __init__(self, servers=None, keyspace=None, column_family=None, + cassandra_options=None, detailed_mode=False, **kwargs): + """Initialize Cassandra backend. + + Raises :class:`celery.exceptions.ImproperlyConfigured` if + the :setting:`CASSANDRA_SERVERS` setting is not set. + + """ + super(CassandraBackend, self).__init__(**kwargs) + + self.expires = kwargs.get('expires') or maybe_timedelta( + self.app.conf.CELERY_TASK_RESULT_EXPIRES) + + if not pycassa: + raise ImproperlyConfigured( + 'You need to install the pycassa library to use the ' + 'Cassandra backend. See https://github.com/pycassa/pycassa') + + conf = self.app.conf + self.servers = (servers or + conf.get('CASSANDRA_SERVERS') or + self.servers) + self.keyspace = (keyspace or + conf.get('CASSANDRA_KEYSPACE') or + self.keyspace) + self.column_family = (column_family or + conf.get('CASSANDRA_COLUMN_FAMILY') or + self.column_family) + self.cassandra_options = dict(conf.get('CASSANDRA_OPTIONS') or {}, + **cassandra_options or {}) + self.detailed_mode = (detailed_mode or + conf.get('CASSANDRA_DETAILED_MODE') or + self.detailed_mode) + read_cons = conf.get('CASSANDRA_READ_CONSISTENCY') or 'LOCAL_QUORUM' + write_cons = conf.get('CASSANDRA_WRITE_CONSISTENCY') or 'LOCAL_QUORUM' + try: + self.read_consistency = getattr(pycassa.ConsistencyLevel, + read_cons) + except AttributeError: + self.read_consistency = pycassa.ConsistencyLevel.LOCAL_QUORUM + try: + self.write_consistency = getattr(pycassa.ConsistencyLevel, + write_cons) + except AttributeError: + self.write_consistency = pycassa.ConsistencyLevel.LOCAL_QUORUM + + if not self.servers or not self.keyspace or not self.column_family: + raise ImproperlyConfigured( + 'Cassandra backend not configured.') + + self._column_family = None + + def _retry_on_error(self, fun, *args, **kwargs): + ts = time.time() + self._retry_timeout + while 1: + try: + return fun(*args, **kwargs) + except (pycassa.InvalidRequestException, + pycassa.TimedOutException, + pycassa.UnavailableException, + pycassa.AllServersUnavailable, + socket.error, + socket.timeout, + Thrift.TException), exc: + if time.time() > ts: + raise + logger.warning('Cassandra error: %r. Retrying...', exc) + time.sleep(self._retry_wait) + + def _get_column_family(self): + if self._column_family is None: + conn = pycassa.ConnectionPool(self.keyspace, + server_list=self.servers, + **self.cassandra_options) + self._column_family = pycassa.ColumnFamily( + conn, self.column_family, + read_consistency_level=self.read_consistency, + write_consistency_level=self.write_consistency, + ) + return self._column_family + + def process_cleanup(self): + if self._column_family is not None: + self._column_family = None + + def _store_result(self, task_id, result, status, traceback=None): + """Store return value and status of an executed task.""" + + def _do_store(): + cf = self._get_column_family() + date_done = self.app.now() + meta = {'status': status, + 'date_done': date_done.strftime('%Y-%m-%dT%H:%M:%SZ'), + 'traceback': self.encode(traceback), + 'children': self.encode(self.current_task_children())} + if self.detailed_mode: + meta['result'] = result + cf.insert(task_id, {date_done: self.encode(meta)}, + ttl=self.expires and timedelta_seconds(self.expires)) + else: + meta['result'] = self.encode(result) + cf.insert(task_id, meta, + ttl=self.expires and timedelta_seconds(self.expires)) + + return self._retry_on_error(_do_store) + + def _get_task_meta_for(self, task_id): + """Get task metadata for a task by id.""" + + def _do_get(): + cf = self._get_column_family() + try: + if self.detailed_mode: + row = cf.get(task_id, column_reversed=True, column_count=1) + meta = self.decode(row.values()[0]) + meta['task_id'] = task_id + else: + obj = cf.get(task_id) + meta = { + 'task_id': task_id, + 'status': obj['status'], + 'result': self.decode(obj['result']), + 'date_done': obj['date_done'], + 'traceback': self.decode(obj['traceback']), + 'children': self.decode(obj['children']), + } + except (KeyError, pycassa.NotFoundException): + meta = {'status': states.PENDING, 'result': None} + return meta + + return self._retry_on_error(_do_get) + + def __reduce__(self, args=(), kwargs={}): + kwargs.update( + dict(servers=self.servers, + keyspace=self.keyspace, + column_family=self.column_family, + cassandra_options=self.cassandra_options)) + return super(CassandraBackend, self).__reduce__(args, kwargs) diff --git a/awx/lib/site-packages/celery/backends/database/__init__.py b/awx/lib/site-packages/celery/backends/database/__init__.py new file mode 100644 index 0000000000..7bb98cfc2e --- /dev/null +++ b/awx/lib/site-packages/celery/backends/database/__init__.py @@ -0,0 +1,182 @@ +# -*- coding: utf-8 -*- +""" + celery.backends.database + ~~~~~~~~~~~~~~~~~~~~~~~~ + + SQLAlchemy result store backend. + +""" +from __future__ import absolute_import + +from functools import wraps + +from celery import states +from celery.exceptions import ImproperlyConfigured +from celery.utils.timeutils import maybe_timedelta + +from celery.backends.base import BaseDictBackend + +from .models import Task, TaskSet +from .session import ResultSession + + +def _sqlalchemy_installed(): + try: + import sqlalchemy + except ImportError: + raise ImproperlyConfigured( + 'The database result backend requires SQLAlchemy to be installed.' + 'See http://pypi.python.org/pypi/SQLAlchemy') + return sqlalchemy +_sqlalchemy_installed() + +from sqlalchemy.exc import DatabaseError, OperationalError + + +def retry(fun): + + @wraps(fun) + def _inner(*args, **kwargs): + max_retries = kwargs.pop('max_retries', 3) + + for retries in xrange(max_retries + 1): + try: + return fun(*args, **kwargs) + except (DatabaseError, OperationalError): + if retries + 1 > max_retries: + raise + + return _inner + + +class DatabaseBackend(BaseDictBackend): + """The database result backend.""" + # ResultSet.iterate should sleep this much between each pool, + # to not bombard the database with queries. + subpolling_interval = 0.5 + + def __init__(self, dburi=None, expires=None, + engine_options=None, **kwargs): + super(DatabaseBackend, self).__init__(**kwargs) + conf = self.app.conf + self.expires = maybe_timedelta(self.prepare_expires(expires)) + self.dburi = dburi or conf.CELERY_RESULT_DBURI + self.engine_options = dict( + engine_options or {}, + **conf.CELERY_RESULT_ENGINE_OPTIONS or {}) + self.short_lived_sessions = kwargs.get( + 'short_lived_sessions', + conf.CELERY_RESULT_DB_SHORT_LIVED_SESSIONS, + ) + if not self.dburi: + raise ImproperlyConfigured( + 'Missing connection string! Do you have ' + 'CELERY_RESULT_DBURI set to a real value?') + + def ResultSession(self): + return ResultSession( + dburi=self.dburi, + short_lived_sessions=self.short_lived_sessions, + **self.engine_options + ) + + @retry + def _store_result(self, task_id, result, status, + traceback=None, max_retries=3): + """Store return value and status of an executed task.""" + session = self.ResultSession() + try: + task = session.query(Task).filter(Task.task_id == task_id).first() + if not task: + task = Task(task_id) + session.add(task) + session.flush() + task.result = result + task.status = status + task.traceback = traceback + session.commit() + return result + finally: + session.close() + + @retry + def _get_task_meta_for(self, task_id): + """Get task metadata for a task by id.""" + session = self.ResultSession() + try: + task = session.query(Task).filter(Task.task_id == task_id).first() + if task is None: + task = Task(task_id) + task.status = states.PENDING + task.result = None + return task.to_dict() + finally: + session.close() + + @retry + def _save_group(self, group_id, result): + """Store the result of an executed group.""" + session = self.ResultSession() + try: + group = TaskSet(group_id, result) + session.add(group) + session.flush() + session.commit() + return result + finally: + session.close() + + @retry + def _restore_group(self, group_id): + """Get metadata for group by id.""" + session = self.ResultSession() + try: + group = session.query(TaskSet).filter( + TaskSet.taskset_id == group_id).first() + if group: + return group.to_dict() + finally: + session.close() + + @retry + def _delete_group(self, group_id): + """Delete metadata for group by id.""" + session = self.ResultSession() + try: + session.query(TaskSet).filter( + TaskSet.taskset_id == group_id).delete() + session.flush() + session.commit() + finally: + session.close() + + @retry + def _forget(self, task_id): + """Forget about result.""" + session = self.ResultSession() + try: + session.query(Task).filter(Task.task_id == task_id).delete() + session.commit() + finally: + session.close() + + def cleanup(self): + """Delete expired metadata.""" + session = self.ResultSession() + expires = self.expires + now = self.app.now() + try: + session.query(Task).filter( + Task.date_done < (now - expires)).delete() + session.query(TaskSet).filter( + TaskSet.date_done < (now - expires)).delete() + session.commit() + finally: + session.close() + + def __reduce__(self, args=(), kwargs={}): + kwargs.update( + dict(dburi=self.dburi, + expires=self.expires, + engine_options=self.engine_options)) + return super(DatabaseBackend, self).__reduce__(args, kwargs) diff --git a/awx/lib/site-packages/celery/backends/database/a805d4bd.py b/awx/lib/site-packages/celery/backends/database/a805d4bd.py new file mode 100644 index 0000000000..e0d14a1fea --- /dev/null +++ b/awx/lib/site-packages/celery/backends/database/a805d4bd.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +""" + celery.backends.database.a805d4bd + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + This module fixes a bug with pickling and relative imports in Python < 2.6. + + The problem is with pickling an e.g. `exceptions.KeyError` instance. + As SQLAlchemy has its own `exceptions` module, pickle will try to + lookup :exc:`KeyError` in the wrong module, resulting in this exception:: + + cPickle.PicklingError: Can't pickle : + attribute lookup exceptions.KeyError failed + + doing `import exceptions` just before the dump in `sqlalchemy.types` + reveals the source of the bug:: + + EXCEPTIONS: + + Hence the random module name 'a805d5bd' is taken to decrease the chances of + a collision. + +""" +from __future__ import absolute_import + +from sqlalchemy.types import PickleType as _PickleType + + +class PickleType(_PickleType): # pragma: no cover + + def bind_processor(self, dialect): + impl_processor = self.impl.bind_processor(dialect) + dumps = self.pickler.dumps + protocol = self.protocol + if impl_processor: + + def process(value): + if value is not None: + value = dumps(value, protocol) + return impl_processor(value) + + else: + + def process(value): # noqa + if value is not None: + value = dumps(value, protocol) + return value + return process + + def result_processor(self, dialect, coltype): + impl_processor = self.impl.result_processor(dialect, coltype) + loads = self.pickler.loads + if impl_processor: + + def process(value): + value = impl_processor(value) + if value is not None: + return loads(value) + else: + + def process(value): # noqa + if value is not None: + return loads(value) + return process + + def copy_value(self, value): + if self.mutable: + return self.pickler.loads(self.pickler.dumps(value, self.protocol)) + else: + return value diff --git a/awx/lib/site-packages/celery/backends/database/dfd042c7.py b/awx/lib/site-packages/celery/backends/database/dfd042c7.py new file mode 100644 index 0000000000..ea932a74c9 --- /dev/null +++ b/awx/lib/site-packages/celery/backends/database/dfd042c7.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +""" + celery.backends.database.dfd042c7 + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + SQLAlchemy 0.5.8 version of :mod:`~celery.backends.database.a805d4bd`, + see the docstring of that module for an explanation of why we need + this workaround. + +""" +from __future__ import absolute_import + +from sqlalchemy.types import PickleType as _PickleType +from sqlalchemy import util + + +class PickleType(_PickleType): # pragma: no cover + + def process_bind_param(self, value, dialect): + dumps = self.pickler.dumps + protocol = self.protocol + if value is not None: + return dumps(value, protocol) + + def process_result_value(self, value, dialect): + loads = self.pickler.loads + if value is not None: + return loads(str(value)) + + def copy_value(self, value): + if self.mutable: + return self.pickler.loads(self.pickler.dumps(value, self.protocol)) + else: + return value + + def compare_values(self, x, y): + if self.comparator: + return self.comparator(x, y) + elif self.mutable and not hasattr(x, '__eq__') and x is not None: + util.warn_deprecated( + 'Objects stored with PickleType when mutable=True ' + 'must implement __eq__() for reliable comparison.') + a = self.pickler.dumps(x, self.protocol) + b = self.pickler.dumps(y, self.protocol) + return a == b + else: + return x == y + + def is_mutable(self): + return self.mutable diff --git a/awx/lib/site-packages/celery/backends/database/models.py b/awx/lib/site-packages/celery/backends/database/models.py new file mode 100644 index 0000000000..f34d7252c7 --- /dev/null +++ b/awx/lib/site-packages/celery/backends/database/models.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +""" + celery.backends.database.models + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Database tables for the SQLAlchemy result store backend. + +""" +from __future__ import absolute_import + +from datetime import datetime + +import sqlalchemy as sa + +from celery import states + +from .session import ResultModelBase + +# See docstring of a805d4bd for an explanation for this workaround ;) +if sa.__version__.startswith('0.5'): + from .dfd042c7 import PickleType +else: + from .a805d4bd import PickleType # noqa + + +class Task(ResultModelBase): + """Task result/status.""" + __tablename__ = 'celery_taskmeta' + __table_args__ = {'sqlite_autoincrement': True} + + id = sa.Column(sa.Integer, sa.Sequence('task_id_sequence'), + primary_key=True, + autoincrement=True) + task_id = sa.Column(sa.String(255), unique=True) + status = sa.Column(sa.String(50), default=states.PENDING) + result = sa.Column(PickleType, nullable=True) + date_done = sa.Column(sa.DateTime, default=datetime.utcnow, + onupdate=datetime.utcnow, nullable=True) + traceback = sa.Column(sa.Text, nullable=True) + + def __init__(self, task_id): + self.task_id = task_id + + def to_dict(self): + return {'task_id': self.task_id, + 'status': self.status, + 'result': self.result, + 'traceback': self.traceback, + 'date_done': self.date_done} + + def __repr__(self): + return '' % (self.task_id, self.status) + + +class TaskSet(ResultModelBase): + """TaskSet result""" + __tablename__ = 'celery_tasksetmeta' + __table_args__ = {'sqlite_autoincrement': True} + + id = sa.Column(sa.Integer, sa.Sequence('taskset_id_sequence'), + autoincrement=True, primary_key=True) + taskset_id = sa.Column(sa.String(255), unique=True) + result = sa.Column(sa.PickleType, nullable=True) + date_done = sa.Column(sa.DateTime, default=datetime.utcnow, + nullable=True) + + def __init__(self, taskset_id, result): + self.taskset_id = taskset_id + self.result = result + + def to_dict(self): + return {'taskset_id': self.taskset_id, + 'result': self.result, + 'date_done': self.date_done} + + def __repr__(self): + return '' % (self.taskset_id, ) diff --git a/awx/lib/site-packages/celery/backends/database/session.py b/awx/lib/site-packages/celery/backends/database/session.py new file mode 100644 index 0000000000..ce1808ee63 --- /dev/null +++ b/awx/lib/site-packages/celery/backends/database/session.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +""" + celery.backends.database.session + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + SQLAlchemy sessions. + +""" +from __future__ import absolute_import + +from collections import defaultdict + +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from sqlalchemy.ext.declarative import declarative_base + +ResultModelBase = declarative_base() + +_SETUP = defaultdict(lambda: False) +_ENGINES = {} +_SESSIONS = {} + + +def get_engine(dburi, **kwargs): + if dburi not in _ENGINES: + _ENGINES[dburi] = create_engine(dburi, **kwargs) + return _ENGINES[dburi] + + +def create_session(dburi, short_lived_sessions=False, **kwargs): + engine = get_engine(dburi, **kwargs) + if short_lived_sessions or dburi not in _SESSIONS: + _SESSIONS[dburi] = sessionmaker(bind=engine) + return engine, _SESSIONS[dburi] + + +def setup_results(engine): + if not _SETUP['results']: + ResultModelBase.metadata.create_all(engine) + _SETUP['results'] = True + + +def ResultSession(dburi, **kwargs): + engine, session = create_session(dburi, **kwargs) + setup_results(engine) + return session() diff --git a/awx/lib/site-packages/celery/backends/mongodb.py b/awx/lib/site-packages/celery/backends/mongodb.py new file mode 100644 index 0000000000..2027d66b3b --- /dev/null +++ b/awx/lib/site-packages/celery/backends/mongodb.py @@ -0,0 +1,223 @@ +# -*- coding: utf-8 -*- +""" + celery.backends.mongodb + ~~~~~~~~~~~~~~~~~~~~~~~ + + MongoDB result store backend. + +""" +from __future__ import absolute_import + +from datetime import datetime + +try: + import pymongo +except ImportError: # pragma: no cover + pymongo = None # noqa + +if pymongo: + try: + from bson.binary import Binary + except ImportError: # pragma: no cover + from pymongo.binary import Binary # noqa +else: # pragma: no cover + Binary = None # noqa + +from kombu.utils import cached_property + +from celery import states +from celery.exceptions import ImproperlyConfigured +from celery.utils.timeutils import maybe_timedelta + +from .base import BaseDictBackend + + +class Bunch(object): + + def __init__(self, **kw): + self.__dict__.update(kw) + + +class MongoBackend(BaseDictBackend): + mongodb_host = 'localhost' + mongodb_port = 27017 + mongodb_user = None + mongodb_password = None + mongodb_database = 'celery' + mongodb_taskmeta_collection = 'celery_taskmeta' + mongodb_max_pool_size = 10 + mongodb_options = None + + supports_autoexpire = False + + def __init__(self, *args, **kwargs): + """Initialize MongoDB backend instance. + + :raises celery.exceptions.ImproperlyConfigured: if + module :mod:`pymongo` is not available. + + """ + super(MongoBackend, self).__init__(*args, **kwargs) + self.expires = kwargs.get('expires') or maybe_timedelta( + self.app.conf.CELERY_TASK_RESULT_EXPIRES) + + if not pymongo: + raise ImproperlyConfigured( + 'You need to install the pymongo library to use the ' + 'MongoDB backend.') + + config = self.app.conf.get('CELERY_MONGODB_BACKEND_SETTINGS', None) + if config is not None: + if not isinstance(config, dict): + raise ImproperlyConfigured( + 'MongoDB backend settings should be grouped in a dict') + + self.mongodb_host = config.get('host', self.mongodb_host) + self.mongodb_port = int(config.get('port', self.mongodb_port)) + self.mongodb_user = config.get('user', self.mongodb_user) + self.mongodb_options = config.get('options', {}) + self.mongodb_password = config.get( + 'password', self.mongodb_password) + self.mongodb_database = config.get( + 'database', self.mongodb_database) + self.mongodb_taskmeta_collection = config.get( + 'taskmeta_collection', self.mongodb_taskmeta_collection) + self.mongodb_max_pool_size = config.get( + 'max_pool_size', self.mongodb_max_pool_size) + + self._connection = None + + def _get_connection(self): + """Connect to the MongoDB server.""" + if self._connection is None: + from pymongo.connection import Connection + + # The first pymongo.Connection() argument (host) can be + # a list of ['host:port'] elements or a mongodb connection + # URI. If this is the case, don't use self.mongodb_port + # but let pymongo get the port(s) from the URI instead. + # This enables the use of replica sets and sharding. + # See pymongo.Connection() for more info. + args = [self.mongodb_host] + kwargs = {'max_pool_size': self.mongodb_max_pool_size} + if isinstance(self.mongodb_host, basestring) \ + and not self.mongodb_host.startswith('mongodb://'): + args.append(self.mongodb_port) + + self._connection = Connection( + *args, **dict(kwargs, **self.mongodb_options or {}) + ) + + return self._connection + + def process_cleanup(self): + if self._connection is not None: + # MongoDB connection will be closed automatically when object + # goes out of scope + self._connection = None + + def _store_result(self, task_id, result, status, traceback=None): + """Store return value and status of an executed task.""" + meta = {'_id': task_id, + 'status': status, + 'result': Binary(self.encode(result)), + 'date_done': datetime.utcnow(), + 'traceback': Binary(self.encode(traceback)), + 'children': Binary(self.encode(self.current_task_children()))} + self.collection.save(meta, safe=True) + + return result + + def _get_task_meta_for(self, task_id): + """Get task metadata for a task by id.""" + + obj = self.collection.find_one({'_id': task_id}) + if not obj: + return {'status': states.PENDING, 'result': None} + + meta = { + 'task_id': obj['_id'], + 'status': obj['status'], + 'result': self.decode(obj['result']), + 'date_done': obj['date_done'], + 'traceback': self.decode(obj['traceback']), + 'children': self.decode(obj['children']), + } + + return meta + + def _save_group(self, group_id, result): + """Save the group result.""" + meta = {'_id': group_id, + 'result': Binary(self.encode(result)), + 'date_done': datetime.utcnow()} + self.collection.save(meta, safe=True) + + return result + + def _restore_group(self, group_id): + """Get the result for a group by id.""" + obj = self.collection.find_one({'_id': group_id}) + if not obj: + return + + meta = { + 'task_id': obj['_id'], + 'result': self.decode(obj['result']), + 'date_done': obj['date_done'], + } + + return meta + + def _delete_group(self, group_id): + """Delete a group by id.""" + self.collection.remove({'_id': group_id}) + + def _forget(self, task_id): + """ + Remove result from MongoDB. + + :raises celery.exceptions.OperationsError: if the task_id could not be + removed. + """ + # By using safe=True, this will wait until it receives a response from + # the server. Likewise, it will raise an OperationsError if the + # response was unable to be completed. + self.collection.remove({'_id': task_id}, safe=True) + + def cleanup(self): + """Delete expired metadata.""" + self.collection.remove( + {'date_done': {'$lt': self.app.now() - self.expires}}, + ) + + def __reduce__(self, args=(), kwargs={}): + kwargs.update( + dict(expires=self.expires)) + return super(MongoBackend, self).__reduce__(args, kwargs) + + def _get_database(self): + conn = self._get_connection() + db = conn[self.mongodb_database] + if self.mongodb_user and self.mongodb_password: + if not db.authenticate(self.mongodb_user, + self.mongodb_password): + raise ImproperlyConfigured( + 'Invalid MongoDB username or password.') + return db + + @cached_property + def database(self): + """Get database from MongoDB connection and perform authentication + if necessary.""" + return self._get_database() + + @cached_property + def collection(self): + """Get the metadata task collection.""" + collection = self.database[self.mongodb_taskmeta_collection] + + # Ensure an index on date_done is there, if not process the index + # in the background. Once completed cleanup will be much faster + collection.ensure_index('date_done', background='true') + return collection diff --git a/awx/lib/site-packages/celery/backends/redis.py b/awx/lib/site-packages/celery/backends/redis.py new file mode 100644 index 0000000000..38583de31a --- /dev/null +++ b/awx/lib/site-packages/celery/backends/redis.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- +""" + celery.backends.redis + ~~~~~~~~~~~~~~~~~~~~~ + + Redis result store backend. + +""" +from __future__ import absolute_import + +from kombu.utils import cached_property +from kombu.utils.url import _parse_url + +from celery.exceptions import ImproperlyConfigured + +from .base import KeyValueStoreBackend + +try: + import redis + from redis.exceptions import ConnectionError +except ImportError: # pragma: no cover + redis = None # noqa + ConnectionError = None # noqa + + +class RedisBackend(KeyValueStoreBackend): + """Redis task result store.""" + + #: redis-py client module. + redis = redis + + #: default Redis server hostname (`localhost`). + host = 'localhost' + + #: default Redis server port (6379) + port = 6379 + + #: default Redis db number (0) + db = 0 + + #: default Redis password (:const:`None`) + password = None + + #: Maximium number of connections in the pool. + max_connections = None + + supports_autoexpire = True + supports_native_join = True + implements_incr = True + + def __init__(self, host=None, port=None, db=None, password=None, + expires=None, max_connections=None, url=None, **kwargs): + super(RedisBackend, self).__init__(**kwargs) + conf = self.app.conf + if self.redis is None: + raise ImproperlyConfigured( + 'You need to install the redis library in order to use ' + 'the Redis result store backend.') + + # For compatibility with the old REDIS_* configuration keys. + def _get(key): + for prefix in 'CELERY_REDIS_%s', 'REDIS_%s': + try: + return conf[prefix % key] + except KeyError: + pass + if host and '://' in host: + url, host = host, None + self.url = url + uhost = uport = upass = udb = None + if url: + _, uhost, uport, _, upass, udb, _ = _parse_url(url) + udb = udb.strip('/') if udb else 0 + self.host = uhost or host or _get('HOST') or self.host + self.port = int(uport or port or _get('PORT') or self.port) + self.db = udb or db or _get('DB') or self.db + self.password = upass or password or _get('PASSWORD') or self.password + self.expires = self.prepare_expires(expires, type=int) + self.max_connections = (max_connections + or _get('MAX_CONNECTIONS') + or self.max_connections) + + def get(self, key): + return self.client.get(key) + + def mget(self, keys): + return self.client.mget(keys) + + def set(self, key, value): + client = self.client + if self.expires is not None: + client.setex(key, value, self.expires) + else: + client.set(key, value) + client.publish(key, value) + + def delete(self, key): + self.client.delete(key) + + def incr(self, key): + return self.client.incr(key) + + def expire(self, key, value): + return self.client.expire(key, value) + + @cached_property + def client(self): + pool = self.redis.ConnectionPool(host=self.host, port=self.port, + db=self.db, password=self.password, + max_connections=self.max_connections) + return self.redis.Redis(connection_pool=pool) + + def __reduce__(self, args=(), kwargs={}): + kwargs.update( + dict(host=self.host, + port=self.port, + db=self.db, + password=self.password, + expires=self.expires, + max_connections=self.max_connections)) + return super(RedisBackend, self).__reduce__(args, kwargs) diff --git a/awx/lib/site-packages/celery/beat.py b/awx/lib/site-packages/celery/beat.py new file mode 100644 index 0000000000..207985c9f5 --- /dev/null +++ b/awx/lib/site-packages/celery/beat.py @@ -0,0 +1,510 @@ +# -*- coding: utf-8 -*- +""" + celery.beat + ~~~~~~~~~~~ + + The periodic task scheduler. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import errno +import os +import time +import shelve +import sys +import traceback + +from billiard import Process, ensure_multiprocessing +from kombu.utils import cached_property, reprcall +from kombu.utils.functional import maybe_promise + +from . import __version__ +from . import platforms +from . import signals +from . import current_app +from .app import app_or_default +from .schedules import maybe_schedule, crontab +from .utils.imports import instantiate +from .utils.threads import Event, Thread +from .utils.timeutils import humanize_seconds +from .utils.log import get_logger + +logger = get_logger(__name__) +debug, info, error, warning = (logger.debug, logger.info, + logger.error, logger.warning) + +DEFAULT_MAX_INTERVAL = 300 # 5 minutes + + +class SchedulingError(Exception): + """An error occured while scheduling a task.""" + + +class ScheduleEntry(object): + """An entry in the scheduler. + + :keyword name: see :attr:`name`. + :keyword schedule: see :attr:`schedule`. + :keyword args: see :attr:`args`. + :keyword kwargs: see :attr:`kwargs`. + :keyword options: see :attr:`options`. + :keyword last_run_at: see :attr:`last_run_at`. + :keyword total_run_count: see :attr:`total_run_count`. + :keyword relative: Is the time relative to when the server starts? + + """ + + #: The task name + name = None + + #: The schedule (run_every/crontab) + schedule = None + + #: Positional arguments to apply. + args = None + + #: Keyword arguments to apply. + kwargs = None + + #: Task execution options. + options = None + + #: The time and date of when this task was last scheduled. + last_run_at = None + + #: Total number of times this task has been scheduled. + total_run_count = 0 + + def __init__(self, name=None, task=None, last_run_at=None, + total_run_count=None, schedule=None, args=(), kwargs={}, + options={}, relative=False): + self.name = name + self.task = task + self.args = args + self.kwargs = kwargs + self.options = options + self.schedule = maybe_schedule(schedule, relative) + self.last_run_at = last_run_at or self._default_now() + self.total_run_count = total_run_count or 0 + + def _default_now(self): + return self.schedule.now() if self.schedule else current_app.now() + + def _next_instance(self, last_run_at=None): + """Returns a new instance of the same class, but with + its date and count fields updated.""" + return self.__class__(**dict( + self, + last_run_at=last_run_at or self._default_now(), + total_run_count=self.total_run_count + 1, + )) + __next__ = next = _next_instance # for 2to3 + + def update(self, other): + """Update values from another entry. + + Does only update "editable" fields (task, schedule, args, kwargs, + options). + + """ + self.__dict__.update({'task': other.task, 'schedule': other.schedule, + 'args': other.args, 'kwargs': other.kwargs, + 'options': other.options}) + + def is_due(self): + """See :meth:`~celery.schedule.schedule.is_due`.""" + return self.schedule.is_due(self.last_run_at) + + def __iter__(self): + return vars(self).iteritems() + + def __repr__(self): + return '%s', entry.task, result.id) + return next_time_to_run + + def tick(self): + """Run a tick, that is one iteration of the scheduler. + + Executes all due tasks. + + """ + remaining_times = [] + try: + for entry in self.schedule.itervalues(): + next_time_to_run = self.maybe_due(entry, self.publisher) + if next_time_to_run: + remaining_times.append(next_time_to_run) + except RuntimeError: + pass + + return min(remaining_times + [self.max_interval]) + + def should_sync(self): + return (not self._last_sync or + (time.time() - self._last_sync) > self.sync_every) + + def reserve(self, entry): + new_entry = self.schedule[entry.name] = entry.next() + return new_entry + + def apply_async(self, entry, publisher=None, **kwargs): + # Update timestamps and run counts before we actually execute, + # so we have that done if an exception is raised (doesn't schedule + # forever.) + entry = self.reserve(entry) + task = self.app.tasks.get(entry.task) + + try: + if task: + result = task.apply_async(entry.args, entry.kwargs, + publisher=publisher, + **entry.options) + else: + result = self.send_task(entry.task, entry.args, entry.kwargs, + publisher=publisher, + **entry.options) + except Exception, exc: + raise SchedulingError, SchedulingError( + "Couldn't apply scheduled task %s: %s" % ( + entry.name, exc)), sys.exc_info()[2] + finally: + if self.should_sync(): + self._do_sync() + return result + + def send_task(self, *args, **kwargs): + return self.app.send_task(*args, **kwargs) + + def setup_schedule(self): + self.install_default_entries(self.data) + + def _do_sync(self): + try: + debug('Celerybeat: Synchronizing schedule...') + self.sync() + finally: + self._last_sync = time.time() + + def sync(self): + pass + + def close(self): + self.sync() + + def add(self, **kwargs): + entry = self.Entry(**kwargs) + self.schedule[entry.name] = entry + return entry + + def _maybe_entry(self, name, entry): + if isinstance(entry, self.Entry): + return entry + return self.Entry(**dict(entry, name=name)) + + def update_from_dict(self, dict_): + self.schedule.update(dict( + (name, self._maybe_entry(name, entry)) + for name, entry in dict_.items())) + + def merge_inplace(self, b): + schedule = self.schedule + A, B = set(schedule), set(b) + + # Remove items from disk not in the schedule anymore. + for key in A ^ B: + schedule.pop(key, None) + + # Update and add new items in the schedule + for key in B: + entry = self.Entry(**dict(b[key], name=key)) + if schedule.get(key): + schedule[key].update(entry) + else: + schedule[key] = entry + + def _ensure_connected(self): + # callback called for each retry while the connection + # can't be established. + def _error_handler(exc, interval): + error('Celerybeat: Connection error: %s. ' + 'Trying again in %s seconds...', exc, interval) + + return self.connection.ensure_connection( + _error_handler, self.app.conf.BROKER_CONNECTION_MAX_RETRIES + ) + + def get_schedule(self): + return self.data + + def set_schedule(self, schedule): + self.data = schedule + schedule = property(get_schedule, set_schedule) + + @cached_property + def connection(self): + return self.app.connection() + + @cached_property + def publisher(self): + return self.Publisher(self._ensure_connected()) + + @property + def info(self): + return '' + + +class PersistentScheduler(Scheduler): + persistence = shelve + known_suffixes = ('', '.db', '.dat', '.bak', '.dir') + + _store = None + + def __init__(self, *args, **kwargs): + self.schedule_filename = kwargs.get('schedule_filename') + Scheduler.__init__(self, *args, **kwargs) + + def _remove_db(self): + for suffix in self.known_suffixes: + with platforms.ignore_errno(errno.ENOENT): + os.remove(self.schedule_filename + suffix) + + def setup_schedule(self): + try: + self._store = self.persistence.open(self.schedule_filename, + writeback=True) + entries = self._store.setdefault('entries', {}) + except Exception, exc: + error('Removing corrupted schedule file %r: %r', + self.schedule_filename, exc, exc_info=True) + self._remove_db() + self._store = self.persistence.open(self.schedule_filename, + writeback=True) + else: + if '__version__' not in self._store: + warning('Reset: Account for new __version__ field') + self._store.clear() # remove schedule at 2.2.2 upgrade. + if 'tz' not in self._store: + warning('Reset: Account for new tz field') + self._store.clear() # remove schedule at 3.0.8 upgrade + if 'utc_enabled' not in self._store: + warning('Reset: Account for new utc_enabled field') + self._store.clear() # remove schedule at 3.0.9 upgrade + + tz = self.app.conf.CELERY_TIMEZONE + stored_tz = self._store.get('tz') + if stored_tz is not None and stored_tz != tz: + warning('Reset: Timezone changed from %r to %r', stored_tz, tz) + self._store.clear() # Timezone changed, reset db! + utc = self.app.conf.CELERY_ENABLE_UTC + stored_utc = self._store.get('utc_enabled') + if stored_utc is not None and stored_utc != utc: + choices = {True: 'enabled', False: 'disabled'} + warning('Reset: UTC changed from %s to %s', + choices[stored_utc], choices[utc]) + self._store.clear() # UTC setting changed, reset db! + entries = self._store.setdefault('entries', {}) + self.merge_inplace(self.app.conf.CELERYBEAT_SCHEDULE) + self.install_default_entries(self.schedule) + self._store.update(__version__=__version__, tz=tz, utc_enabled=utc) + self.sync() + debug('Current schedule:\n' + '\n'.join( + repr(entry) for entry in entries.itervalues())) + + def get_schedule(self): + return self._store['entries'] + + def set_schedule(self, schedule): + self._store['entries'] = schedule + schedule = property(get_schedule, set_schedule) + + def sync(self): + if self._store is not None: + self._store.sync() + + def close(self): + self.sync() + self._store.close() + + @property + def info(self): + return ' . db -> %s' % (self.schedule_filename, ) + + +class Service(object): + scheduler_cls = PersistentScheduler + + def __init__(self, max_interval=None, schedule_filename=None, + scheduler_cls=None, app=None): + app = self.app = app_or_default(app) + self.max_interval = (max_interval + or app.conf.CELERYBEAT_MAX_LOOP_INTERVAL) + self.scheduler_cls = scheduler_cls or self.scheduler_cls + self.schedule_filename = ( + schedule_filename or app.conf.CELERYBEAT_SCHEDULE_FILENAME) + + self._is_shutdown = Event() + self._is_stopped = Event() + + def __reduce__(self): + return self.__class__, (self.max_interval, self.schedule_filename, + self.scheduler_cls, self.app) + + def start(self, embedded_process=False): + info('Celerybeat: Starting...') + debug('Celerybeat: Ticking with max interval->%s', + humanize_seconds(self.scheduler.max_interval)) + + signals.beat_init.send(sender=self) + if embedded_process: + signals.beat_embedded_init.send(sender=self) + platforms.set_process_title('celerybeat') + + try: + while not self._is_shutdown.is_set(): + interval = self.scheduler.tick() + debug('Celerybeat: Waking up %s.', + humanize_seconds(interval, prefix='in ')) + time.sleep(interval) + except (KeyboardInterrupt, SystemExit): + self._is_shutdown.set() + finally: + self.sync() + + def sync(self): + self.scheduler.close() + self._is_stopped.set() + + def stop(self, wait=False): + info('Celerybeat: Shutting down...') + self._is_shutdown.set() + wait and self._is_stopped.wait() # block until shutdown done. + + def get_scheduler(self, lazy=False): + filename = self.schedule_filename + scheduler = instantiate(self.scheduler_cls, + app=self.app, + schedule_filename=filename, + max_interval=self.max_interval, + lazy=lazy) + return scheduler + + @cached_property + def scheduler(self): + return self.get_scheduler() + + +class _Threaded(Thread): + """Embedded task scheduler using threading.""" + + def __init__(self, *args, **kwargs): + super(_Threaded, self).__init__() + self.service = Service(*args, **kwargs) + self.daemon = True + self.name = 'Beat' + + def run(self): + self.service.start() + + def stop(self): + self.service.stop(wait=True) + + +try: + ensure_multiprocessing() +except NotImplementedError: # pragma: no cover + _Process = None +else: + class _Process(Process): # noqa + + def __init__(self, *args, **kwargs): + super(_Process, self).__init__() + self.service = Service(*args, **kwargs) + self.name = 'Beat' + + def run(self): + platforms.signals.reset('SIGTERM') + self.service.start(embedded_process=True) + + def stop(self): + self.service.stop() + self.terminate() + + +def EmbeddedService(*args, **kwargs): + """Return embedded clock service. + + :keyword thread: Run threaded instead of as a separate process. + Default is :const:`False`. + + """ + if kwargs.pop('thread', False) or _Process is None: + # Need short max interval to be able to stop thread + # in reasonable time. + kwargs.setdefault('max_interval', 1) + return _Threaded(*args, **kwargs) + return _Process(*args, **kwargs) diff --git a/awx/lib/site-packages/celery/bin/__init__.py b/awx/lib/site-packages/celery/bin/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/celery/bin/base.py b/awx/lib/site-packages/celery/bin/base.py new file mode 100644 index 0000000000..9517f6821c --- /dev/null +++ b/awx/lib/site-packages/celery/bin/base.py @@ -0,0 +1,392 @@ +# -*- coding: utf-8 -*- +""" + +.. _preload-options: + +Preload Options +--------------- + +These options are supported by all commands, +and usually parsed before command-specific arguments. + +.. cmdoption:: -A, --app + + app instance to use (e.g. module.attr_name) + +.. cmdoption:: -b, --broker + + url to broker. default is 'amqp://guest@localhost//' + +.. cmdoption:: --loader + + name of custom loader class to use. + +.. cmdoption:: --config + + Name of the configuration module + +.. _daemon-options: + +Daemon Options +-------------- + +These options are supported by commands that can detach +into the background (daemon). They will be present +in any command that also has a `--detach` option. + +.. cmdoption:: -f, --logfile + + Path to log file. If no logfile is specified, `stderr` is used. + +.. cmdoption:: --pidfile + + Optional file used to store the process pid. + + The program will not start if this file already exists + and the pid is still alive. + +.. cmdoption:: --uid + + User id, or user name of the user to run as after detaching. + +.. cmdoption:: --gid + + Group id, or group name of the main group to change to after + detaching. + +.. cmdoption:: --umask + + Effective umask of the process after detaching. Default is 0. + +.. cmdoption:: --workdir + + Optional directory to change to after detaching. + +""" +from __future__ import absolute_import + +import os +import re +import sys +import warnings + +from collections import defaultdict +from optparse import OptionParser, IndentedHelpFormatter, make_option as Option +from types import ModuleType + +import celery +from celery.exceptions import CDeprecationWarning, CPendingDeprecationWarning +from celery.platforms import EX_FAILURE, EX_USAGE, maybe_patch_concurrency +from celery.utils import text +from celery.utils.imports import symbol_by_name, import_from_cwd + +# always enable DeprecationWarnings, so our users can see them. +for warning in (CDeprecationWarning, CPendingDeprecationWarning): + warnings.simplefilter('once', warning, 0) + +ARGV_DISABLED = """ +Unrecognized command line arguments: %s + +Try --help? +""" + +find_long_opt = re.compile(r'.+?(--.+?)(?:\s|,|$)') +find_rst_ref = re.compile(r':\w+:`(.+?)`') + + +class HelpFormatter(IndentedHelpFormatter): + + def format_epilog(self, epilog): + if epilog: + return '\n%s\n\n' % epilog + return '' + + def format_description(self, description): + return text.ensure_2lines(text.fill_paragraphs( + text.dedent(description), self.width)) + + +class Command(object): + """Base class for command line applications. + + :keyword app: The current app. + :keyword get_app: Callable returning the current app if no app provided. + + """ + Parser = OptionParser + + #: Arg list used in help. + args = '' + + #: Application version. + version = celery.VERSION_BANNER + + #: If false the parser will raise an exception if positional + #: args are provided. + supports_args = True + + #: List of options (without preload options). + option_list = () + + # module Rst documentation to parse help from (if any) + doc = None + + # Some programs (multi) does not want to load the app specified + # (Issue #1008). + respects_app_option = True + + #: List of options to parse before parsing other options. + preload_options = ( + Option('-A', '--app', default=None), + Option('-b', '--broker', default=None), + Option('--loader', default=None), + Option('--config', default=None), + Option('--workdir', default=None, dest='working_directory'), + ) + + #: Enable if the application should support config from the cmdline. + enable_config_from_cmdline = False + + #: Default configuration namespace. + namespace = 'celery' + + #: Text to print at end of --help + epilog = None + + #: Text to print in --help before option list. + description = '' + + #: Set to true if this command doesn't have subcommands + leaf = True + + def __init__(self, app=None, get_app=None): + self.app = app + self.get_app = get_app or self._get_default_app + + def run(self, *args, **options): + """This is the body of the command called by :meth:`handle_argv`.""" + raise NotImplementedError('subclass responsibility') + + def execute_from_commandline(self, argv=None): + """Execute application from command line. + + :keyword argv: The list of command line arguments. + Defaults to ``sys.argv``. + + """ + if argv is None: + argv = list(sys.argv) + # Should we load any special concurrency environment? + self.maybe_patch_concurrency(argv) + self.on_concurrency_setup() + + # Dump version and exit if '--version' arg set. + self.early_version(argv) + argv = self.setup_app_from_commandline(argv) + prog_name = os.path.basename(argv[0]) + return self.handle_argv(prog_name, argv[1:]) + + def run_from_argv(self, prog_name, argv=None): + return self.handle_argv(prog_name, sys.argv if argv is None else argv) + + def maybe_patch_concurrency(self, argv=None): + argv = argv or sys.argv + pool_option = self.with_pool_option(argv) + if pool_option: + maybe_patch_concurrency(argv, *pool_option) + short_opts, long_opts = pool_option + + def on_concurrency_setup(self): + pass + + def usage(self, command): + """Returns the command line usage string for this app.""" + return '%%prog [options] %s' % (self.args, ) + + def get_options(self): + """Get supported command line options.""" + return self.option_list + + def expanduser(self, value): + if isinstance(value, basestring): + return os.path.expanduser(value) + return value + + def handle_argv(self, prog_name, argv): + """Parses command line arguments from ``argv`` and dispatches + to :meth:`run`. + + :param prog_name: The program name (``argv[0]``). + :param argv: Command arguments. + + Exits with an error message if :attr:`supports_args` is disabled + and ``argv`` contains positional arguments. + + """ + options, args = self.prepare_args(*self.parse_options(prog_name, argv)) + return self.run(*args, **options) + + def prepare_args(self, options, args): + if options: + options = dict((k, self.expanduser(v)) + for k, v in vars(options).iteritems() + if not k.startswith('_')) + args = [self.expanduser(arg) for arg in args] + self.check_args(args) + return options, args + + def check_args(self, args): + if not self.supports_args and args: + self.die(ARGV_DISABLED % (', '.join(args, )), EX_USAGE) + + def die(self, msg, status=EX_FAILURE): + sys.stderr.write(msg + '\n') + sys.exit(status) + + def early_version(self, argv): + if '--version' in argv: + sys.stdout.write('%s\n' % self.version) + sys.exit(0) + + def parse_options(self, prog_name, arguments): + """Parse the available options.""" + # Don't want to load configuration to just print the version, + # so we handle --version manually here. + parser = self.create_parser(prog_name) + return parser.parse_args(arguments) + + def create_parser(self, prog_name, command=None): + return self.prepare_parser(self.Parser( + prog=prog_name, + usage=self.usage(command), + version=self.version, + epilog=self.epilog, + formatter=HelpFormatter(), + description=self.description, + option_list=(self.preload_options + self.get_options()))) + + def prepare_parser(self, parser): + docs = [self.parse_doc(doc) for doc in (self.doc, __doc__) if doc] + for doc in docs: + for long_opt, help in doc.iteritems(): + option = parser.get_option(long_opt) + if option is not None: + option.help = ' '.join(help) % {'default': option.default} + return parser + + def setup_app_from_commandline(self, argv): + preload_options = self.parse_preload_options(argv) + workdir = preload_options.get('working_directory') + if workdir: + os.chdir(workdir) + app = (preload_options.get('app') or + os.environ.get('CELERY_APP') or + self.app) + preload_loader = preload_options.get('loader') + if preload_loader: + # Default app takes loader from this env (Issue #1066). + os.environ['CELERY_LOADER'] = preload_loader + loader = (preload_loader, + os.environ.get('CELERY_LOADER') or + 'default') + broker = preload_options.get('broker', None) + if broker: + os.environ['CELERY_BROKER_URL'] = broker + config = preload_options.get('config') + if config: + os.environ['CELERY_CONFIG_MODULE'] = config + if self.respects_app_option: + if app and self.respects_app_option: + self.app = self.find_app(app) + elif self.app is None: + self.app = self.get_app(loader=loader) + if self.enable_config_from_cmdline: + argv = self.process_cmdline_config(argv) + else: + self.app = celery.Celery() + return argv + + def find_app(self, app): + try: + sym = self.symbol_by_name(app) + except AttributeError: + # last part was not an attribute, but a module + sym = import_from_cwd(app) + if isinstance(sym, ModuleType): + if getattr(sym, '__path__', None): + return self.find_app('%s.celery:' % (app.replace(':', ''), )) + return sym.celery + return sym + + def symbol_by_name(self, name): + return symbol_by_name(name, imp=import_from_cwd) + get_cls_by_name = symbol_by_name # XXX compat + + def process_cmdline_config(self, argv): + try: + cargs_start = argv.index('--') + except ValueError: + return argv + argv, cargs = argv[:cargs_start], argv[cargs_start + 1:] + self.app.config_from_cmdline(cargs, namespace=self.namespace) + return argv + + def parse_preload_options(self, args): + acc = {} + opts = {} + for opt in self.preload_options: + for t in (opt._long_opts, opt._short_opts): + opts.update(dict(zip(t, [opt.dest] * len(t)))) + index = 0 + length = len(args) + while index < length: + arg = args[index] + if arg.startswith('--') and '=' in arg: + key, value = arg.split('=', 1) + dest = opts.get(key) + if dest: + acc[dest] = value + elif arg.startswith('-'): + dest = opts.get(arg) + if dest: + acc[dest] = args[index + 1] + index += 1 + index += 1 + return acc + + def parse_doc(self, doc): + options, in_option = defaultdict(list), None + for line in doc.splitlines(): + if line.startswith('.. cmdoption::'): + m = find_long_opt.match(line) + if m: + in_option = m.groups()[0].strip() + assert in_option, 'missing long opt' + elif in_option and line.startswith(' ' * 4): + options[in_option].append( + find_rst_ref.sub(r'\1', line.strip()).replace('`', '')) + return options + + def with_pool_option(self, argv): + """Returns tuple of ``(short_opts, long_opts)`` if the command + supports a pool argument, and used to monkey patch eventlet/gevent + environments as early as possible. + + E.g:: + has_pool_option = (['-P'], ['--pool']) + """ + pass + + def _get_default_app(self, *args, **kwargs): + from celery._state import get_current_app + return get_current_app() # omit proxy + + +def daemon_options(default_pidfile=None, default_logfile=None): + return ( + Option('-f', '--logfile', default=default_logfile), + Option('--pidfile', default=default_pidfile), + Option('--uid', default=None), + Option('--gid', default=None), + Option('--umask', default=0, type='int'), + ) diff --git a/awx/lib/site-packages/celery/bin/camqadm.py b/awx/lib/site-packages/celery/bin/camqadm.py new file mode 100644 index 0000000000..366df42960 --- /dev/null +++ b/awx/lib/site-packages/celery/bin/camqadm.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +""" +The :program:`celery amqp` command. + +.. program:: celery amqp + +""" +from __future__ import absolute_import + +import cmd +import sys +import shlex +import pprint + +from itertools import count + +try: + import amqp +except ImportError: + from amqplib import client_0_8 as amqp # noqa + +from celery.app import app_or_default +from celery.utils.functional import padlist + +from celery.bin.base import Command +from celery.utils import strtobool + +# Map to coerce strings to other types. +COERCE = {bool: strtobool} + +HELP_HEADER = """ +Commands +-------- +""".rstrip() + +EXAMPLE_TEXT = """ +Example: + -> queue.delete myqueue yes no +""" + + +def say(m, fh=sys.stderr): + fh.write('%s\n' % (m, )) + + +class Spec(object): + """AMQP Command specification. + + Used to convert arguments to Python values and display various help + and tooltips. + + :param args: see :attr:`args`. + :keyword returns: see :attr:`returns`. + + .. attribute args:: + + List of arguments this command takes. Should + contain `(argument_name, argument_type)` tuples. + + .. attribute returns: + + Helpful human string representation of what this command returns. + May be :const:`None`, to signify the return type is unknown. + + """ + def __init__(self, *args, **kwargs): + self.args = args + self.returns = kwargs.get('returns') + + def coerce(self, index, value): + """Coerce value for argument at index. + + E.g. if :attr:`args` is `[('is_active', bool)]`: + + >>> coerce(0, 'False') + False + + """ + arg_info = self.args[index] + arg_type = arg_info[1] + # Might be a custom way to coerce the string value, + # so look in the coercion map. + return COERCE.get(arg_type, arg_type)(value) + + def str_args_to_python(self, arglist): + """Process list of string arguments to values according to spec. + + e.g: + + >>> spec = Spec([('queue', str), ('if_unused', bool)]) + >>> spec.str_args_to_python('pobox', 'true') + ('pobox', True) + + """ + return tuple( + self.coerce(index, value) for index, value in enumerate(arglist)) + + def format_response(self, response): + """Format the return value of this command in a human-friendly way.""" + if not self.returns: + if response is None: + return 'ok.' + return response + if callable(self.returns): + return self.returns(response) + return self.returns % (response, ) + + def format_arg(self, name, type, default_value=None): + if default_value is not None: + return '%s:%s' % (name, default_value) + return name + + def format_signature(self): + return ' '.join(self.format_arg(*padlist(list(arg), 3)) + for arg in self.args) + + +def dump_message(message): + if message is None: + return 'No messages in queue. basic.publish something.' + return {'body': message.body, + 'properties': message.properties, + 'delivery_info': message.delivery_info} + + +def format_declare_queue(ret): + return 'ok. queue:%s messages:%s consumers:%s.' % ret + + +class AMQShell(cmd.Cmd): + """AMQP API Shell. + + :keyword connect: Function used to connect to the server, must return + connection object. + + :keyword silent: If :const:`True`, the commands won't have annoying + output not relevant when running in non-shell mode. + + + .. attribute: builtins + + Mapping of built-in command names -> method names + + .. attribute:: amqp + + Mapping of AMQP API commands and their :class:`Spec`. + + """ + conn = None + chan = None + prompt_fmt = '%d> ' + identchars = cmd.IDENTCHARS = '.' + needs_reconnect = False + counter = 1 + inc_counter = count(2).next + + builtins = {'EOF': 'do_exit', + 'exit': 'do_exit', + 'help': 'do_help'} + + amqp = { + 'exchange.declare': Spec(('exchange', str), + ('type', str), + ('passive', bool, 'no'), + ('durable', bool, 'no'), + ('auto_delete', bool, 'no'), + ('internal', bool, 'no')), + 'exchange.delete': Spec(('exchange', str), + ('if_unused', bool)), + 'queue.bind': Spec(('queue', str), + ('exchange', str), + ('routing_key', str)), + 'queue.declare': Spec(('queue', str), + ('passive', bool, 'no'), + ('durable', bool, 'no'), + ('exclusive', bool, 'no'), + ('auto_delete', bool, 'no'), + returns=format_declare_queue), + 'queue.delete': Spec(('queue', str), + ('if_unused', bool, 'no'), + ('if_empty', bool, 'no'), + returns='ok. %d messages deleted.'), + 'queue.purge': Spec(('queue', str), + returns='ok. %d messages deleted.'), + 'basic.get': Spec(('queue', str), + ('no_ack', bool, 'off'), + returns=dump_message), + 'basic.publish': Spec(('msg', amqp.Message), + ('exchange', str), + ('routing_key', str), + ('mandatory', bool, 'no'), + ('immediate', bool, 'no')), + 'basic.ack': Spec(('delivery_tag', int)), + } + + def __init__(self, *args, **kwargs): + self.connect = kwargs.pop('connect') + self.silent = kwargs.pop('silent', False) + self.out = kwargs.pop('out', sys.stderr) + cmd.Cmd.__init__(self, *args, **kwargs) + self._reconnect() + + def note(self, m): + """Say something to the user. Disabled if :attr:`silent`.""" + if not self.silent: + say(m, fh=self.out) + + def say(self, m): + say(m, fh=self.out) + + def get_amqp_api_command(self, cmd, arglist): + """With a command name and a list of arguments, convert the arguments + to Python values and find the corresponding method on the AMQP channel + object. + + :returns: tuple of `(method, processed_args)`. + + Example: + + >>> get_amqp_api_command('queue.delete', ['pobox', 'yes', 'no']) + (>, + ('testfoo', True, False)) + + """ + spec = self.amqp[cmd] + args = spec.str_args_to_python(arglist) + attr_name = cmd.replace('.', '_') + if self.needs_reconnect: + self._reconnect() + return getattr(self.chan, attr_name), args, spec.format_response + + def do_exit(self, *args): + """The `'exit'` command.""" + self.note("\n-> please, don't leave!") + sys.exit(0) + + def display_command_help(self, cmd, short=False): + spec = self.amqp[cmd] + self.say('%s %s' % (cmd, spec.format_signature())) + + def do_help(self, *args): + if not args: + self.say(HELP_HEADER) + for cmd_name in self.amqp: + self.display_command_help(cmd_name, short=True) + self.say(EXAMPLE_TEXT) + else: + self.display_command_help(args[0]) + + def default(self, line): + self.say("unknown syntax: '%s'. how about some 'help'?" % line) + + def get_names(self): + return set(self.builtins) | set(self.amqp) + + def completenames(self, text, *ignored): + """Return all commands starting with `text`, for tab-completion.""" + names = self.get_names() + first = [cmd for cmd in names + if cmd.startswith(text.replace('_', '.'))] + if first: + return first + return [cmd for cmd in names + if cmd.partition('.')[2].startswith(text)] + + def dispatch(self, cmd, argline): + """Dispatch and execute the command. + + Lookup order is: :attr:`builtins` -> :attr:`amqp`. + + """ + arglist = shlex.split(argline) + if cmd in self.builtins: + return getattr(self, self.builtins[cmd])(*arglist) + fun, args, formatter = self.get_amqp_api_command(cmd, arglist) + return formatter(fun(*args)) + + def parseline(self, line): + """Parse input line. + + :returns: tuple of three items: + `(command_name, arglist, original_line)` + + E.g:: + + >>> parseline('queue.delete A 'B' C') + ('queue.delete', 'A 'B' C', 'queue.delete A 'B' C') + + """ + parts = line.split() + if parts: + return parts[0], ' '.join(parts[1:]), line + return '', '', line + + def onecmd(self, line): + """Parse line and execute command.""" + cmd, arg, line = self.parseline(line) + if not line: + return self.emptyline() + if cmd is None: + return self.default(line) + self.lastcmd = line + if cmd == '': + return self.default(line) + else: + self.counter = self.inc_counter() + try: + self.respond(self.dispatch(cmd, arg)) + except (AttributeError, KeyError), exc: + self.default(line) + except Exception, exc: + self.say(exc) + self.needs_reconnect = True + + def respond(self, retval): + """What to do with the return value of a command.""" + if retval is not None: + if isinstance(retval, basestring): + self.say(retval) + else: + self.say(pprint.pformat(retval)) + + def _reconnect(self): + """Re-establish connection to the AMQP server.""" + self.conn = self.connect(self.conn) + self.chan = self.conn.default_channel + self.needs_reconnect = False + + @property + def prompt(self): + return self.prompt_fmt % self.counter + + +class AMQPAdmin(object): + """The celery :program:`camqadm` utility.""" + Shell = AMQShell + + def __init__(self, *args, **kwargs): + self.app = app_or_default(kwargs.get('app')) + self.out = kwargs.setdefault('out', sys.stderr) + self.silent = kwargs.get('silent') + self.args = args + + def connect(self, conn=None): + if conn: + conn.close() + conn = self.app.connection() + self.note('-> connecting to %s.' % conn.as_uri()) + conn.connect() + self.note('-> connected.') + return conn + + def run(self): + shell = self.Shell(connect=self.connect, out=self.out) + if self.args: + return shell.onecmd(' '.join(self.args)) + try: + return shell.cmdloop() + except KeyboardInterrupt: + self.note('(bibi)') + pass + + def note(self, m): + if not self.silent: + say(m, fh=self.out) + + +class AMQPAdminCommand(Command): + + def run(self, *args, **options): + options['app'] = self.app + return AMQPAdmin(*args, **options).run() + + +def camqadm(*args, **options): + AMQPAdmin(*args, **options).run() + + +def main(): + AMQPAdminCommand().execute_from_commandline() + +if __name__ == '__main__': # pragma: no cover + main() diff --git a/awx/lib/site-packages/celery/bin/celery.py b/awx/lib/site-packages/celery/bin/celery.py new file mode 100644 index 0000000000..d92a497ed6 --- /dev/null +++ b/awx/lib/site-packages/celery/bin/celery.py @@ -0,0 +1,963 @@ +# -*- coding: utf-8 -*- +""" + +The :program:`celery` umbrella command. + +.. program:: celery + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import anyjson +import heapq +import os +import sys +import warnings + +from importlib import import_module +from pprint import pformat + +from celery.platforms import EX_OK, EX_FAILURE, EX_UNAVAILABLE, EX_USAGE +from celery.utils import term +from celery.utils import text +from celery.utils.functional import memoize +from celery.utils.imports import symbol_by_name +from celery.utils.timeutils import maybe_iso8601 + +from celery.bin.base import Command as BaseCommand, Option + +HELP = """ +---- -- - - ---- Commands- -------------- --- ------------ + +%(commands)s +---- -- - - --------- -- - -------------- --- ------------ + +Type '%(prog_name)s --help' for help using a specific command. +""" + +commands = {} + +command_classes = [ + ('Main', ['worker', 'events', 'beat', 'shell', 'multi', 'amqp'], 'green'), + ('Remote Control', ['status', 'inspect', 'control'], 'blue'), + ('Utils', ['purge', 'list', 'migrate', 'call', 'result', 'report'], None), +] + + +@memoize() +def _get_extension_classes(): + extensions = [] + command_classes.append(('Extensions', extensions, 'magenta')) + return extensions + + +def ensure_broadcast_supported(app): + if app.connection().transport.driver_type == 'sql': + raise Error('SQL broker transports does not support broadcast') + + +class Error(Exception): + + def __init__(self, reason, status=EX_FAILURE): + self.reason = reason + self.status = status + super(Error, self).__init__(reason, status) + + def __str__(self): + return self.reason + + +def command(fun, name=None, sortpri=0): + commands[name or fun.__name__] = fun + fun.sortpri = sortpri + return fun + + +def load_extension_commands(namespace='celery.commands'): + try: + from pkg_resources import iter_entry_points + except ImportError: + return + + for ep in iter_entry_points(namespace): + sym = ':'.join([ep.module_name, ep.attrs[0]]) + try: + cls = symbol_by_name(sym) + except (ImportError, SyntaxError), exc: + warnings.warn('Cannot load extension %r: %r' % (sym, exc)) + else: + heapq.heappush(_get_extension_classes(), ep.name) + command(cls, name=ep.name) + + +class Command(BaseCommand): + help = '' + args = '' + prog_name = 'celery' + show_body = True + show_reply = True + + option_list = ( + Option('--quiet', '-q', action='store_true'), + Option('--no-color', '-C', action='store_true', default=None), + ) + + def __init__(self, app=None, no_color=False, stdout=sys.stdout, + stderr=sys.stderr, show_reply=True): + super(Command, self).__init__(app=app) + self.colored = term.colored(enabled=not no_color) + self.stdout = stdout + self.stderr = stderr + self.quiet = False + if show_reply is not None: + self.show_reply = show_reply + + def __call__(self, *args, **kwargs): + try: + ret = self.run(*args, **kwargs) + except Error, exc: + self.error(self.colored.red('Error: %s' % exc)) + return exc.status + + return ret if ret is not None else EX_OK + + def show_help(self, command): + self.run_from_argv(self.prog_name, [command, '--help']) + return EX_USAGE + + def error(self, s): + self.out(s, fh=self.stderr) + + def out(self, s, fh=None): + s = str(s) + if not s.endswith('\n'): + s += '\n' + (fh or self.stdout).write(s) + + def run_from_argv(self, prog_name, argv): + self.prog_name = prog_name + self.command = argv[0] + self.arglist = argv[1:] + self.parser = self.create_parser(self.prog_name, self.command) + options, args = self.prepare_args( + *self.parser.parse_args(self.arglist)) + self.colored = term.colored(enabled=not options['no_color']) + self.quiet = options.get('quiet', False) + self.show_body = options.get('show_body', True) + return self(*args, **options) + + def usage(self, command): + return '%%prog %s [options] %s' % (command, self.args) + + def prettify_list(self, n): + c = self.colored + if not n: + return '- empty -' + return '\n'.join(str(c.reset(c.white('*'), ' %s' % (item, ))) + for item in n) + + def prettify_dict_ok_error(self, n): + c = self.colored + try: + return (c.green('OK'), + text.indent(self.prettify(n['ok'])[1], 4)) + except KeyError: + pass + return (c.red('ERROR'), + text.indent(self.prettify(n['error'])[1], 4)) + + def say_remote_command_reply(self, replies): + c = self.colored + node = iter(replies).next() # <-- take first. + reply = replies[node] + status, preply = self.prettify(reply) + self.say_chat('->', c.cyan(node, ': ') + status, + text.indent(preply, 4) if self.show_reply else '') + + def prettify(self, n): + OK = str(self.colored.green('OK')) + if isinstance(n, list): + return OK, self.prettify_list(n) + if isinstance(n, dict): + if 'ok' in n or 'error' in n: + return self.prettify_dict_ok_error(n) + if isinstance(n, basestring): + return OK, unicode(n) + return OK, pformat(n) + + def say_chat(self, direction, title, body=''): + c = self.colored + if direction == '<-' and self.quiet: + return + dirstr = not self.quiet and c.bold(c.white(direction), ' ') or '' + self.out(c.reset(dirstr, title)) + if body and self.show_body: + self.out(body) + + @property + def description(self): + return self.__doc__ + + +class Delegate(Command): + + def __init__(self, *args, **kwargs): + super(Delegate, self).__init__(*args, **kwargs) + + self.target = symbol_by_name(self.Command)(app=self.app) + self.args = self.target.args + + def get_options(self): + return self.option_list + self.target.get_options() + + def create_parser(self, prog_name, command): + parser = super(Delegate, self).create_parser(prog_name, command) + return self.target.prepare_parser(parser) + + def run(self, *args, **kwargs): + self.target.check_args(args) + return self.target.run(*args, **kwargs) + + +class multi(Command): + """Start multiple worker instances.""" + respects_app_option = False + + def get_options(self): + return () + + def run_from_argv(self, prog_name, argv): + from celery.bin.celeryd_multi import MultiTool + return MultiTool().execute_from_commandline(argv, prog_name) +multi = command(multi) + + +class worker(Delegate): + """Start worker instance. + + Examples:: + + celery worker --app=proj -l info + celery worker -A proj -l info -Q hipri,lopri + + celery worker -A proj --concurrency=4 + celery worker -A proj --concurrency=1000 -P eventlet + + celery worker --autoscale=10,0 + """ + Command = 'celery.bin.celeryd:WorkerCommand' +worker = command(worker, sortpri=01) + + +class events(Delegate): + """Event-stream utilities. + + Commands:: + + celery events --app=proj + start graphical monitor (requires curses) + celery events -d --app=proj + dump events to screen. + celery events -b amqp:// + celery events -C [options] + run snapshot camera. + + Examples:: + + celery events + celery events -d + celery events -C mod.attr -F 1.0 --detach --maxrate=100/m -l info + """ + Command = 'celery.bin.celeryev:EvCommand' +events = command(events, sortpri=10) + + +class beat(Delegate): + """Start the celerybeat periodic task scheduler. + + Examples:: + + celery beat -l info + celery beat -s /var/run/celerybeat/schedule --detach + celery beat -S djcelery.schedulers.DatabaseScheduler + + """ + Command = 'celery.bin.celerybeat:BeatCommand' +beat = command(beat, sortpri=20) + + +class amqp(Delegate): + """AMQP Administration Shell. + + Also works for non-amqp transports. + + Examples:: + + celery amqp + start shell mode + celery amqp help + show list of commands + + celery amqp exchange.delete name + celery amqp queue.delete queue + celery amqp queue.delete queue yes yes + + """ + Command = 'celery.bin.camqadm:AMQPAdminCommand' +amqp = command(amqp, sortpri=30) + + +class list_(Command): + """Get info from broker. + + Examples:: + + celery list bindings + + NOTE: For RabbitMQ the management plugin is required. + """ + args = '[bindings]' + + def list_bindings(self, management): + try: + bindings = management.get_bindings() + except NotImplementedError: + raise Error('Your transport cannot list bindings.') + + fmt = lambda q, e, r: self.out('%s %s %s' % (q.ljust(28), + e.ljust(28), r)) + fmt('Queue', 'Exchange', 'Routing Key') + fmt('-' * 16, '-' * 16, '-' * 16) + for b in bindings: + fmt(b['destination'], b['source'], b['routing_key']) + + def run(self, what=None, *_, **kw): + topics = {'bindings': self.list_bindings} + available = ', '.join(topics) + if not what: + raise Error('You must specify what to list (%s)' % available) + if what not in topics: + raise Error('unknown topic %r (choose one of: %s)' % ( + what, available)) + with self.app.connection() as conn: + self.app.amqp.TaskConsumer(conn).declare() + topics[what](conn.manager) +list_ = command(list_, 'list') + + +class call(Command): + """Call a task by name. + + Examples:: + + celery call tasks.add --args='[2, 2]' + celery call tasks.add --args='[2, 2]' --countdown=10 + """ + args = '' + option_list = Command.option_list + ( + Option('--args', '-a', help='positional arguments (json).'), + Option('--kwargs', '-k', help='keyword arguments (json).'), + Option('--eta', help='scheduled time (ISO-8601).'), + Option('--countdown', type='float', + help='eta in seconds from now (float/int).'), + Option('--expires', help='expiry time (ISO-8601/float/int).'), + Option('--serializer', default='json', help='defaults to json.'), + Option('--queue', help='custom queue name.'), + Option('--exchange', help='custom exchange name.'), + Option('--routing-key', help='custom routing key.'), + ) + + def run(self, name, *_, **kw): + # Positional args. + args = kw.get('args') or () + if isinstance(args, basestring): + args = anyjson.loads(args) + + # Keyword args. + kwargs = kw.get('kwargs') or {} + if isinstance(kwargs, basestring): + kwargs = anyjson.loads(kwargs) + + # Expires can be int/float. + expires = kw.get('expires') or None + try: + expires = float(expires) + except (TypeError, ValueError): + # or a string describing an ISO 8601 datetime. + try: + expires = maybe_iso8601(expires) + except (TypeError, ValueError): + raise + + res = self.app.send_task(name, args=args, kwargs=kwargs, + countdown=kw.get('countdown'), + serializer=kw.get('serializer'), + queue=kw.get('queue'), + exchange=kw.get('exchange'), + routing_key=kw.get('routing_key'), + eta=maybe_iso8601(kw.get('eta')), + expires=expires) + self.out(res.id) +call = command(call) + + +class purge(Command): + """Erase all messages from all known task queues. + + WARNING: There is no undo operation for this command. + + """ + def run(self, *args, **kwargs): + queues = len(self.app.amqp.queues) + messages_removed = self.app.control.purge() + if messages_removed: + self.out('Purged %s %s from %s known task %s.' % ( + messages_removed, text.pluralize(messages_removed, 'message'), + queues, text.pluralize(queues, 'queue'))) + else: + self.out('No messages purged from %s known %s' % ( + queues, text.pluralize(queues, 'queue'))) +purge = command(purge) + + +class result(Command): + """Gives the return value for a given task id. + + Examples:: + + celery result 8f511516-e2f5-4da4-9d2f-0fb83a86e500 + celery result 8f511516-e2f5-4da4-9d2f-0fb83a86e500 -t tasks.add + celery result 8f511516-e2f5-4da4-9d2f-0fb83a86e500 --traceback + + """ + args = '' + option_list = Command.option_list + ( + Option('--task', '-t', help='name of task (if custom backend)'), + Option('--traceback', action='store_true', + help='show traceback instead'), + ) + + def run(self, task_id, *args, **kwargs): + result_cls = self.app.AsyncResult + task = kwargs.get('task') + traceback = kwargs.get('traceback', False) + + if task: + result_cls = self.app.tasks[task].AsyncResult + result = result_cls(task_id) + if traceback: + value = result.traceback + else: + value = result.get() + self.out(self.prettify(value)[1]) +result = command(result) + + +class _RemoteControl(Command): + name = None + choices = None + leaf = False + option_list = Command.option_list + ( + Option('--timeout', '-t', type='float', + help='Timeout in seconds (float) waiting for reply'), + Option('--destination', '-d', + help='Comma separated list of destination node names.')) + + @classmethod + def get_command_info(self, command, + indent=0, prefix='', color=None, help=False): + if help: + help = '|' + text.indent(self.choices[command][1], indent + 4) + else: + help = None + try: + # see if it uses args. + meth = getattr(self, command) + return text.join([ + '|' + text.indent('%s%s %s' % (prefix, color(command), + meth.__doc__), indent), help, + ]) + + except AttributeError: + return text.join([ + '|' + text.indent(prefix + str(color(command)), indent), help, + ]) + + @classmethod + def list_commands(self, indent=0, prefix='', color=None, help=False): + color = color if color else lambda x: x + prefix = prefix + ' ' if prefix else '' + return '\n'.join(self.get_command_info(c, indent, prefix, color, help) + for c in sorted(self.choices)) + + @property + def epilog(self): + return '\n'.join([ + '[Commands]', + self.list_commands(indent=4, help=True) + ]) + + def usage(self, command): + return '%%prog %s [options] %s [arg1 .. argN]' % ( + command, self.args) + + def call(self, *args, **kwargs): + raise NotImplementedError('get_obj') + + def run(self, *args, **kwargs): + if not args: + raise Error('Missing %s method. See --help' % self.name) + return self.do_call_method(args, **kwargs) + + def do_call_method(self, args, **kwargs): + method = args[0] + if method == 'help': + raise Error("Did you mean '%s --help'?" % self.name) + if method not in self.choices: + raise Error('Unknown %s method %s' % (self.name, method)) + + ensure_broadcast_supported(self.app) + + destination = kwargs.get('destination') + timeout = kwargs.get('timeout') or self.choices[method][0] + if destination and isinstance(destination, basestring): + destination = [v.strip() for v in destination.split(',')] + + try: + handler = getattr(self, method) + except AttributeError: + handler = self.call + + # XXX Python 2.5 does not support X(*args, foo=1) + kwargs = {"timeout": timeout, "destination": destination, + "callback": self.say_remote_command_reply} + replies = handler(method, *args[1:], **kwargs) + if not replies: + raise Error('No nodes replied within time constraint.', + status=EX_UNAVAILABLE) + return replies + + def say(self, direction, title, body=''): + c = self.colored + if direction == '<-' and self.quiet: + return + dirstr = not self.quiet and c.bold(c.white(direction), ' ') or '' + self.out(c.reset(dirstr, title)) + if body and self.show_body: + self.out(body) + + +class inspect(_RemoteControl): + """Inspect the worker at runtime. + + Availability: RabbitMQ (amqp), Redis, and MongoDB transports. + + Examples:: + + celery inspect active --timeout=5 + celery inspect scheduled -d worker1.example.com + celery inspect revoked -d w1.e.com,w2.e.com + + """ + name = 'inspect' + choices = { + 'active': (1.0, 'dump active tasks (being processed)'), + 'active_queues': (1.0, 'dump queues being consumed from'), + 'scheduled': (1.0, 'dump scheduled tasks (eta/countdown/retry)'), + 'reserved': (1.0, 'dump reserved tasks (waiting to be processed)'), + 'stats': (1.0, 'dump worker statistics'), + 'revoked': (1.0, 'dump of revoked task ids'), + 'registered': (1.0, 'dump of registered tasks'), + 'ping': (0.2, 'ping worker(s)'), + 'report': (1.0, 'get bugreport info') + } + + def call(self, method, *args, **options): + i = self.app.control.inspect(**options) + return getattr(i, method)(*args) +inspect = command(inspect) + + +class control(_RemoteControl): + """Workers remote control. + + Availability: RabbitMQ (amqp), Redis, and MongoDB transports. + + Examples:: + + celery control enable_events --timeout=5 + celery control -d worker1.example.com enable_events + celery control -d w1.e.com,w2.e.com enable_events + + celery control -d w1.e.com add_consumer queue_name + celery control -d w1.e.com cancel_consumer queue_name + + celery control -d w1.e.com add_consumer queue exchange direct rkey + + """ + name = 'control' + choices = { + 'enable_events': (1.0, 'tell worker(s) to enable events'), + 'disable_events': (1.0, 'tell worker(s) to disable events'), + 'add_consumer': (1.0, 'tell worker(s) to start consuming a queue'), + 'cancel_consumer': (1.0, 'tell worker(s) to stop consuming a queue'), + 'rate_limit': ( + 1.0, 'tell worker(s) to modify the rate limit for a task type'), + 'time_limit': ( + 1.0, 'tell worker(s) to modify the time limit for a task type.'), + 'autoscale': (1.0, 'change autoscale settings'), + 'pool_grow': (1.0, 'start more pool processes'), + 'pool_shrink': (1.0, 'use less pool processes'), + } + + def call(self, method, *args, **options): + # XXX Python 2.5 doesn't support X(*args, reply=True, **kwargs) + return getattr(self.app.control, method)( + *args, **dict(options, retry=True)) + + def pool_grow(self, method, n=1, **kwargs): + """[N=1]""" + return self.call(method, n, **kwargs) + + def pool_shrink(self, method, n=1, **kwargs): + """[N=1]""" + return self.call(method, n, **kwargs) + + def autoscale(self, method, max=None, min=None, **kwargs): + """[max] [min]""" + return self.call(method, max, min, **kwargs) + + def rate_limit(self, method, task_name, rate_limit, **kwargs): + """ (e.g. 5/s | 5/m | 5/h)>""" + return self.call(method, task_name, rate_limit, reply=True, **kwargs) + + def time_limit(self, method, task_name, soft, hard=None, **kwargs): + """ [hard_secs]""" + return self.call(method, task_name, soft, hard, reply=True, **kwargs) + + def add_consumer(self, method, queue, exchange=None, + exchange_type='direct', routing_key=None, **kwargs): + """ [exchange [type [routing_key]]]""" + return self.call(method, queue, exchange, + exchange_type, routing_key, reply=True, **kwargs) + + def cancel_consumer(self, method, queue, **kwargs): + """""" + return self.call(method, queue, reply=True, **kwargs) +control = command(control) + + +class status(Command): + """Show list of workers that are online.""" + option_list = inspect.option_list + + def run(self, *args, **kwargs): + replies = inspect( + app=self.app, + no_color=kwargs.get('no_color', False), + stdout=self.stdout, stderr=self.stderr, + show_reply=False).run( + 'ping', **dict(kwargs, quiet=True, show_body=False)) + if not replies: + raise Error('No nodes replied within time constraint', + status=EX_UNAVAILABLE) + nodecount = len(replies) + if not kwargs.get('quiet', False): + self.out('\n%s %s online.' % (nodecount, + text.pluralize(nodecount, 'node'))) +status = command(status) + + +class migrate(Command): + """Migrate tasks from one broker to another. + + Examples:: + + celery migrate redis://localhost amqp://guest@localhost// + celery migrate django:// redis://localhost + + NOTE: This command is experimental, make sure you have + a backup of the tasks before you continue. + """ + args = ' ' + option_list = Command.option_list + ( + Option('--limit', '-n', type='int', + help='Number of tasks to consume (int)'), + Option('--timeout', '-t', type='float', default=1.0, + help='Timeout in seconds (float) waiting for tasks'), + Option('--ack-messages', '-a', action='store_true', + help='Ack messages from source broker.'), + Option('--tasks', '-T', + help='List of task names to filter on.'), + Option('--queues', '-Q', + help='List of queues to migrate.'), + Option('--forever', '-F', action='store_true', + help='Continually migrate tasks until killed.'), + ) + + def on_migrate_task(self, state, body, message): + self.out('Migrating task %s/%s: %s[%s]' % ( + state.count, state.strtotal, body['task'], body['id'])) + + def run(self, *args, **kwargs): + if len(args) != 2: + return self.show_help('migrate') + from kombu import Connection + from celery.contrib.migrate import migrate_tasks + + migrate_tasks(Connection(args[0]), + Connection(args[1]), + callback=self.on_migrate_task, + **kwargs) +migrate = command(migrate) + + +class shell(Command): # pragma: no cover + """Start shell session with convenient access to celery symbols. + + The following symbols will be added to the main globals: + + - celery: the current application. + - chord, group, chain, chunks, + xmap, xstarmap subtask, Task + - all registered tasks. + + Example Session: + + .. code-block:: bash + + $ celery shell + + >>> celery + + >>> add + <@task: tasks.add> + >>> add.delay(2, 2) + + """ + option_list = Command.option_list + ( + Option('--ipython', '-I', + action='store_true', dest='force_ipython', + help='force iPython.'), + Option('--bpython', '-B', + action='store_true', dest='force_bpython', + help='force bpython.'), + Option('--python', '-P', + action='store_true', dest='force_python', + help='force default Python shell.'), + Option('--without-tasks', '-T', action='store_true', + help="don't add tasks to locals."), + Option('--eventlet', action='store_true', + help='use eventlet.'), + Option('--gevent', action='store_true', help='use gevent.'), + ) + + def run(self, force_ipython=False, force_bpython=False, + force_python=False, without_tasks=False, eventlet=False, + gevent=False, **kwargs): + sys.path.insert(0, os.getcwd()) + if eventlet: + import_module('celery.concurrency.eventlet') + if gevent: + import_module('celery.concurrency.gevent') + import celery + import celery.task.base + self.app.loader.import_default_modules() + self.locals = {'celery': self.app, + 'Task': celery.Task, + 'chord': celery.chord, + 'group': celery.group, + 'chain': celery.chain, + 'chunks': celery.chunks, + 'xmap': celery.xmap, + 'xstarmap': celery.xstarmap, + 'subtask': celery.subtask} + + if not without_tasks: + self.locals.update(dict( + (task.__name__, task) for task in self.app.tasks.itervalues() + if not task.name.startswith('celery.')), + ) + + if force_python: + return self.invoke_fallback_shell() + elif force_bpython: + return self.invoke_bpython_shell() + elif force_ipython: + return self.invoke_ipython_shell() + return self.invoke_default_shell() + + def invoke_default_shell(self): + try: + import IPython # noqa + except ImportError: + try: + import bpython # noqa + except ImportError: + return self.invoke_fallback_shell() + else: + return self.invoke_bpython_shell() + else: + return self.invoke_ipython_shell() + + def invoke_fallback_shell(self): + import code + try: + import readline + except ImportError: + pass + else: + import rlcompleter + readline.set_completer( + rlcompleter.Completer(self.locals).complete) + readline.parse_and_bind('tab:complete') + code.interact(local=self.locals) + + def invoke_ipython_shell(self): + try: + from IPython.frontend.terminal import embed + embed.TerminalInteractiveShell(user_ns=self.locals).mainloop() + except ImportError: # ipython < 0.11 + from IPython.Shell import IPShell + IPShell(argv=[], user_ns=self.locals).mainloop() + + def invoke_bpython_shell(self): + import bpython + bpython.embed(self.locals) + +shell = command(shell) + + +class help(Command): + """Show help screen and exit.""" + + def usage(self, command): + return '%%prog [options] %s' % (self.args, ) + + def run(self, *args, **kwargs): + self.parser.print_help() + self.out(HELP % {'prog_name': self.prog_name, + 'commands': CeleryCommand.list_commands()}) + + return EX_USAGE +help = command(help) + + +class report(Command): + """Shows information useful to include in bugreports.""" + + def run(self, *args, **kwargs): + self.out(self.app.bugreport()) + return EX_OK +report = command(report) + + +class CeleryCommand(BaseCommand): + commands = commands + enable_config_from_cmdline = True + prog_name = 'celery' + + def execute(self, command, argv=None): + try: + cls = self.commands[command] + except KeyError: + cls, argv = self.commands['help'], ['help'] + cls = self.commands.get(command) or self.commands['help'] + try: + return cls(app=self.app).run_from_argv(self.prog_name, argv) + except (TypeError, Error): + return self.execute('help', argv) + + def remove_options_at_beginning(self, argv, index=0): + if argv: + while index < len(argv): + value = argv[index] + if value.startswith('--'): + pass + elif value.startswith('-'): + index += 1 + else: + return argv[index:] + index += 1 + return [] + + def handle_argv(self, prog_name, argv): + self.prog_name = prog_name + argv = self.remove_options_at_beginning(argv) + _, argv = self.prepare_args(None, argv) + try: + command = argv[0] + except IndexError: + command, argv = 'help', ['help'] + return self.execute(command, argv) + + def execute_from_commandline(self, argv=None): + argv = sys.argv if argv is None else argv + if 'multi' in argv[1:3]: # Issue 1008 + self.respects_app_option = False + try: + sys.exit(determine_exit_status( + super(CeleryCommand, self).execute_from_commandline(argv))) + except KeyboardInterrupt: + sys.exit(EX_FAILURE) + + @classmethod + def get_command_info(self, command, indent=0, color=None): + colored = term.colored().names[color] if color else lambda x: x + obj = self.commands[command] + if obj.leaf: + return '|' + text.indent('celery %s' % colored(command), indent) + return text.join([ + ' ', + '|' + text.indent('celery %s --help' % colored(command), indent), + obj.list_commands(indent, 'celery %s' % command, colored), + ]) + + @classmethod + def list_commands(self, indent=0): + white = term.colored().white + ret = [] + for cls, commands, color in command_classes: + ret.extend([ + text.indent('+ %s: ' % white(cls), indent), + '\n'.join(self.get_command_info(command, indent + 4, color) + for command in commands), + '' + ]) + return '\n'.join(ret).strip() + + def with_pool_option(self, argv): + if len(argv) > 1 and argv[1] == 'worker': + # this command supports custom pools + # that may have to be loaded as early as possible. + return (['-P'], ['--pool']) + + def on_concurrency_setup(self): + load_extension_commands() + + +def determine_exit_status(ret): + if isinstance(ret, int): + return ret + return EX_OK if ret else EX_FAILURE + + +def main(argv=None): + # Fix for setuptools generated scripts, so that it will + # work with multiprocessing fork emulation. + # (see multiprocessing.forking.get_preparation_data()) + try: + if __name__ != '__main__': # pragma: no cover + sys.modules['__main__'] = sys.modules[__name__] + cmd = CeleryCommand() + cmd.maybe_patch_concurrency() + from billiard import freeze_support + freeze_support() + cmd.execute_from_commandline(argv) + except KeyboardInterrupt: + pass + + +if __name__ == '__main__': # pragma: no cover + main() diff --git a/awx/lib/site-packages/celery/bin/celerybeat.py b/awx/lib/site-packages/celery/bin/celerybeat.py new file mode 100644 index 0000000000..8b16e00075 --- /dev/null +++ b/awx/lib/site-packages/celery/bin/celerybeat.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +""" + +The :program:`celery beat` command. + +.. program:: celery beat + +.. seealso:: + + See :ref:`preload-options` and :ref:`daemon-options`. + +.. cmdoption:: --detach + + Detach and run in the background as a daemon. + +.. cmdoption:: -s, --schedule + + Path to the schedule database. Defaults to `celerybeat-schedule`. + The extension '.db' may be appended to the filename. + Default is %(default)s. + +.. cmdoption:: -S, --scheduler + + Scheduler class to use. + Default is :class:`celery.beat.PersistentScheduler`. + +.. cmdoption:: max-interval + + Max seconds to sleep between schedule iterations. + +.. cmdoption:: -f, --logfile + + Path to log file. If no logfile is specified, `stderr` is used. + +.. cmdoption:: -l, --loglevel + + Logging level, choose between `DEBUG`, `INFO`, `WARNING`, + `ERROR`, `CRITICAL`, or `FATAL`. + +""" +from __future__ import with_statement +from __future__ import absolute_import + +from functools import partial + +from celery.platforms import detached + +from celery.bin.base import Command, Option, daemon_options + + +class BeatCommand(Command): + doc = __doc__ + enable_config_from_cmdline = True + supports_args = False + + def run(self, detach=False, logfile=None, pidfile=None, uid=None, + gid=None, umask=None, working_directory=None, **kwargs): + workdir = working_directory + kwargs.pop('app', None) + beat = partial(self.app.Beat, + logfile=logfile, pidfile=pidfile, **kwargs) + + if detach: + with detached(logfile, pidfile, uid, gid, umask, workdir): + return beat().run() + else: + return beat().run() + + def get_options(self): + c = self.app.conf + + return ( + Option('--detach', action='store_true'), + Option('-s', '--schedule', default=c.CELERYBEAT_SCHEDULE_FILENAME), + Option('--max-interval', type='float'), + Option('-S', '--scheduler', dest='scheduler_cls'), + Option('-l', '--loglevel', default=c.CELERYBEAT_LOG_LEVEL), + ) + daemon_options(default_pidfile='celerybeat.pid') + + +def main(): + beat = BeatCommand() + beat.execute_from_commandline() + +if __name__ == '__main__': # pragma: no cover + main() diff --git a/awx/lib/site-packages/celery/bin/celeryctl.py b/awx/lib/site-packages/celery/bin/celeryctl.py new file mode 100644 index 0000000000..b653924ebd --- /dev/null +++ b/awx/lib/site-packages/celery/bin/celeryctl.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +""" + celery.bin.celeryctl + ~~~~~~~~~~~~~~~~~~~~ + + Now replaced by the :program:`celery` command. + +""" +from __future__ import absolute_import + +from celery.bin.celery import ( # noqa + CeleryCommand as celeryctl, Command, main, +) + +if __name__ == '__main__': # pragma: no cover + main() diff --git a/awx/lib/site-packages/celery/bin/celeryd.py b/awx/lib/site-packages/celery/bin/celeryd.py new file mode 100644 index 0000000000..172c73239e --- /dev/null +++ b/awx/lib/site-packages/celery/bin/celeryd.py @@ -0,0 +1,206 @@ +# -*- coding: utf-8 -*- +""" + +The :program:`celery worker` command (previously known as ``celeryd``) + +.. program:: celery worker + +.. seealso:: + + See :ref:`preload-options`. + +.. cmdoption:: -c, --concurrency + + Number of child processes processing the queue. The default + is the number of CPUs available on your system. + +.. cmdoption:: -P, --pool + + Pool implementation: + + processes (default), eventlet, gevent, solo or threads. + +.. cmdoption:: -f, --logfile + + Path to log file. If no logfile is specified, `stderr` is used. + +.. cmdoption:: -l, --loglevel + + Logging level, choose between `DEBUG`, `INFO`, `WARNING`, + `ERROR`, `CRITICAL`, or `FATAL`. + +.. cmdoption:: -n, --hostname + + Set custom hostname, e.g. 'foo.example.com'. + +.. cmdoption:: -B, --beat + + Also run the `celerybeat` periodic task scheduler. Please note that + there must only be one instance of this service. + +.. cmdoption:: -Q, --queues + + List of queues to enable for this worker, separated by comma. + By default all configured queues are enabled. + Example: `-Q video,image` + +.. cmdoption:: -I, --include + + Comma separated list of additional modules to import. + Example: -I foo.tasks,bar.tasks + +.. cmdoption:: -s, --schedule + + Path to the schedule database if running with the `-B` option. + Defaults to `celerybeat-schedule`. The extension ".db" may be + appended to the filename. + +.. cmdoption:: --scheduler + + Scheduler class to use. Default is celery.beat.PersistentScheduler + +.. cmdoption:: -S, --statedb + + Path to the state database. The extension '.db' may + be appended to the filename. Default: %(default)s + +.. cmdoption:: -E, --events + + Send events that can be captured by monitors like :program:`celeryev`, + `celerymon`, and others. + +.. cmdoption:: --purge + + Purges all waiting tasks before the daemon is started. + **WARNING**: This is unrecoverable, and the tasks will be + deleted from the messaging server. + +.. cmdoption:: --time-limit + + Enables a hard time limit (in seconds int/float) for tasks. + +.. cmdoption:: --soft-time-limit + + Enables a soft time limit (in seconds int/float) for tasks. + +.. cmdoption:: --maxtasksperchild + + Maximum number of tasks a pool worker can execute before it's + terminated and replaced by a new worker. + +.. cmdoption:: --pidfile + + Optional file used to store the workers pid. + + The worker will not start if this file already exists + and the pid is still alive. + +.. cmdoption:: --autoscale + + Enable autoscaling by providing + max_concurrency, min_concurrency. Example:: + + --autoscale=10,3 + + (always keep 3 processes, but grow to 10 if necessary) + +.. cmdoption:: --autoreload + + Enable autoreloading. + +.. cmdoption:: --no-execv + + Don't do execv after multiprocessing child fork. + +""" +from __future__ import absolute_import + +import sys + +from celery import concurrency +from celery.bin.base import Command, Option +from celery.utils.log import LOG_LEVELS, mlevel + + +class WorkerCommand(Command): + doc = __doc__ # parse help from this. + namespace = 'celeryd' + enable_config_from_cmdline = True + supports_args = False + + def execute_from_commandline(self, argv=None): + if argv is None: + argv = list(sys.argv) + return super(WorkerCommand, self).execute_from_commandline(argv) + + def run(self, *args, **kwargs): + kwargs.pop('app', None) + # Pools like eventlet/gevent needs to patch libs as early + # as possible. + kwargs['pool_cls'] = concurrency.get_implementation( + kwargs.get('pool_cls') or self.app.conf.CELERYD_POOL) + if self.app.IS_WINDOWS and kwargs.get('beat'): + self.die('-B option does not work on Windows. ' + 'Please run celerybeat as a separate service.') + loglevel = kwargs.get('loglevel') + if loglevel: + try: + kwargs['loglevel'] = mlevel(loglevel) + except KeyError: # pragma: no cover + self.die('Unknown level %r. Please use one of %s.' % ( + loglevel, '|'.join(l for l in LOG_LEVELS + if isinstance(l, basestring)))) + return self.app.Worker(**kwargs).run() + + def with_pool_option(self, argv): + # this command support custom pools + # that may have to be loaded as early as possible. + return (['-P'], ['--pool']) + + def get_options(self): + conf = self.app.conf + return ( + Option('-c', '--concurrency', + default=conf.CELERYD_CONCURRENCY, type='int'), + Option('-P', '--pool', default=conf.CELERYD_POOL, dest='pool_cls'), + Option('--purge', '--discard', default=False, action='store_true'), + Option('-f', '--logfile', default=conf.CELERYD_LOG_FILE), + Option('-l', '--loglevel', default=conf.CELERYD_LOG_LEVEL), + Option('-n', '--hostname'), + Option('-B', '--beat', action='store_true'), + Option('-s', '--schedule', dest='schedule_filename', + default=conf.CELERYBEAT_SCHEDULE_FILENAME), + Option('--scheduler', dest='scheduler_cls'), + Option('-S', '--statedb', + default=conf.CELERYD_STATE_DB, dest='state_db'), + Option('-E', '--events', default=conf.CELERY_SEND_EVENTS, + action='store_true', dest='send_events'), + Option('--time-limit', type='float', dest='task_time_limit', + default=conf.CELERYD_TASK_TIME_LIMIT), + Option('--soft-time-limit', dest='task_soft_time_limit', + default=conf.CELERYD_TASK_SOFT_TIME_LIMIT, type='float'), + Option('--maxtasksperchild', dest='max_tasks_per_child', + default=conf.CELERYD_MAX_TASKS_PER_CHILD, type='int'), + Option('--queues', '-Q', default=[]), + Option('--include', '-I', default=[]), + Option('--pidfile'), + Option('--autoscale'), + Option('--autoreload', action='store_true'), + Option('--no-execv', action='store_true', default=False), + ) + + +def main(): + # Fix for setuptools generated scripts, so that it will + # work with multiprocessing fork emulation. + # (see multiprocessing.forking.get_preparation_data()) + if __name__ != '__main__': # pragma: no cover + sys.modules['__main__'] = sys.modules[__name__] + from billiard import freeze_support + freeze_support() + worker = WorkerCommand() + worker.execute_from_commandline() + + +if __name__ == '__main__': # pragma: no cover + main() diff --git a/awx/lib/site-packages/celery/bin/celeryd_detach.py b/awx/lib/site-packages/celery/bin/celeryd_detach.py new file mode 100644 index 0000000000..f2462952ff --- /dev/null +++ b/awx/lib/site-packages/celery/bin/celeryd_detach.py @@ -0,0 +1,154 @@ +# -*- coding: utf-8 -*- +""" + celery.bin.celeryd_detach + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + Program used to daemonize celeryd. + + Using :func:`os.execv` because forking and multiprocessing + leads to weird issues (it was a long time ago now, but it + could have something to do with the threading mutex bug) + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import celery +import os +import sys + +from optparse import OptionParser, BadOptionError + +from celery.platforms import EX_FAILURE, detached +from celery.utils.log import get_logger + +from celery.bin.base import daemon_options, Option + +logger = get_logger(__name__) + +OPTION_LIST = daemon_options(default_pidfile='celeryd.pid') + ( + Option('--fake', + default=False, action='store_true', dest='fake', + help="Don't fork (for debugging purposes)"), +) + + +def detach(path, argv, logfile=None, pidfile=None, uid=None, + gid=None, umask=0, working_directory=None, fake=False, ): + with detached(logfile, pidfile, uid, gid, umask, working_directory, fake): + try: + os.execv(path, [path] + argv) + except Exception: + from celery import current_app + current_app.log.setup_logging_subsystem('ERROR', logfile) + logger.critical("Can't exec %r", ' '.join([path] + argv), + exc_info=True) + return EX_FAILURE + + +class PartialOptionParser(OptionParser): + + def __init__(self, *args, **kwargs): + self.leftovers = [] + OptionParser.__init__(self, *args, **kwargs) + + def _process_long_opt(self, rargs, values): + arg = rargs.pop(0) + + if '=' in arg: + opt, next_arg = arg.split('=', 1) + rargs.insert(0, next_arg) + had_explicit_value = True + else: + opt = arg + had_explicit_value = False + + try: + opt = self._match_long_opt(opt) + option = self._long_opt.get(opt) + except BadOptionError: + option = None + + if option: + if option.takes_value(): + nargs = option.nargs + if len(rargs) < nargs: + if nargs == 1: + self.error('%s option requires an argument' % opt) + else: + self.error('%s option requires %d arguments' % ( + opt, nargs)) + elif nargs == 1: + value = rargs.pop(0) + else: + value = tuple(rargs[0:nargs]) + del rargs[0:nargs] + + elif had_explicit_value: + self.error('%s option does not take a value' % opt) + else: + value = None + option.process(opt, value, values, self) + else: + self.leftovers.append(arg) + + def _process_short_opts(self, rargs, values): + arg = rargs[0] + try: + OptionParser._process_short_opts(self, rargs, values) + except BadOptionError: + self.leftovers.append(arg) + if rargs and not rargs[0][0] == '-': + self.leftovers.append(rargs.pop(0)) + + +class detached_celeryd(object): + option_list = OPTION_LIST + usage = '%prog [options] [celeryd options]' + version = celery.VERSION_BANNER + description = ('Detaches Celery worker nodes. See `celeryd --help` ' + 'for the list of supported worker arguments.') + command = sys.executable + execv_path = sys.executable + execv_argv = ['-m', 'celery.bin.celeryd'] + + def Parser(self, prog_name): + return PartialOptionParser(prog=prog_name, + option_list=self.option_list, + usage=self.usage, + description=self.description, + version=self.version) + + def parse_options(self, prog_name, argv): + parser = self.Parser(prog_name) + options, values = parser.parse_args(argv) + if options.logfile: + parser.leftovers.append('--logfile=%s' % (options.logfile, )) + if options.pidfile: + parser.leftovers.append('--pidfile=%s' % (options.pidfile, )) + return options, values, parser.leftovers + + def execute_from_commandline(self, argv=None): + if argv is None: + argv = sys.argv + config = [] + seen_cargs = 0 + for arg in argv: + if seen_cargs: + config.append(arg) + else: + if arg == '--': + seen_cargs = 1 + config.append(arg) + prog_name = os.path.basename(argv[0]) + options, values, leftovers = self.parse_options(prog_name, argv[1:]) + sys.exit(detach(path=self.execv_path, + argv=self.execv_argv + leftovers + config, + **vars(options))) + + +def main(): + detached_celeryd().execute_from_commandline() + +if __name__ == '__main__': # pragma: no cover + main() diff --git a/awx/lib/site-packages/celery/bin/celeryd_multi.py b/awx/lib/site-packages/celery/bin/celeryd_multi.py new file mode 100644 index 0000000000..5d9344c866 --- /dev/null +++ b/awx/lib/site-packages/celery/bin/celeryd_multi.py @@ -0,0 +1,567 @@ +# -*- coding: utf-8 -*- +""" + +.. program:: celeryd-multi + +Examples +======== + +.. code-block:: bash + + # Single worker with explicit name and events enabled. + $ celeryd-multi start Leslie -E + + # Pidfiles and logfiles are stored in the current directory + # by default. Use --pidfile and --logfile argument to change + # this. The abbreviation %n will be expanded to the current + # node name. + $ celeryd-multi start Leslie -E --pidfile=/var/run/celery/%n.pid + --logfile=/var/log/celery/%n.log + + + # You need to add the same arguments when you restart, + # as these are not persisted anywhere. + $ celeryd-multi restart Leslie -E --pidfile=/var/run/celery/%n.pid + --logfile=/var/run/celery/%n.log + + # To stop the node, you need to specify the same pidfile. + $ celeryd-multi stop Leslie --pidfile=/var/run/celery/%n.pid + + # 3 workers, with 3 processes each + $ celeryd-multi start 3 -c 3 + celeryd -n celeryd1.myhost -c 3 + celeryd -n celeryd2.myhost -c 3 + celeryd- n celeryd3.myhost -c 3 + + # start 3 named workers + $ celeryd-multi start image video data -c 3 + celeryd -n image.myhost -c 3 + celeryd -n video.myhost -c 3 + celeryd -n data.myhost -c 3 + + # specify custom hostname + $ celeryd-multi start 2 -n worker.example.com -c 3 + celeryd -n celeryd1.worker.example.com -c 3 + celeryd -n celeryd2.worker.example.com -c 3 + + # Advanced example starting 10 workers in the background: + # * Three of the workers processes the images and video queue + # * Two of the workers processes the data queue with loglevel DEBUG + # * the rest processes the default' queue. + $ celeryd-multi start 10 -l INFO -Q:1-3 images,video -Q:4,5 data + -Q default -L:4,5 DEBUG + + # You can show the commands necessary to start the workers with + # the 'show' command: + $ celeryd-multi show 10 -l INFO -Q:1-3 images,video -Q:4,5 data + -Q default -L:4,5 DEBUG + + # Additional options are added to each celeryd', + # but you can also modify the options for ranges of, or specific workers + + # 3 workers: Two with 3 processes, and one with 10 processes. + $ celeryd-multi start 3 -c 3 -c:1 10 + celeryd -n celeryd1.myhost -c 10 + celeryd -n celeryd2.myhost -c 3 + celeryd -n celeryd3.myhost -c 3 + + # can also specify options for named workers + $ celeryd-multi start image video data -c 3 -c:image 10 + celeryd -n image.myhost -c 10 + celeryd -n video.myhost -c 3 + celeryd -n data.myhost -c 3 + + # ranges and lists of workers in options is also allowed: + # (-c:1-3 can also be written as -c:1,2,3) + $ celeryd-multi start 5 -c 3 -c:1-3 10 + celeryd -n celeryd1.myhost -c 10 + celeryd -n celeryd2.myhost -c 10 + celeryd -n celeryd3.myhost -c 10 + celeryd -n celeryd4.myhost -c 3 + celeryd -n celeryd5.myhost -c 3 + + # lists also works with named workers + $ celeryd-multi start foo bar baz xuzzy -c 3 -c:foo,bar,baz 10 + celeryd -n foo.myhost -c 10 + celeryd -n bar.myhost -c 10 + celeryd -n baz.myhost -c 10 + celeryd -n xuzzy.myhost -c 3 + +""" +from __future__ import absolute_import + +import errno +import os +import signal +import socket +import sys + +from collections import defaultdict +from subprocess import Popen +from time import sleep + +from kombu.utils import cached_property +from kombu.utils.encoding import from_utf8 + +from celery import VERSION_BANNER +from celery.platforms import Pidfile, shellsplit +from celery.utils import term +from celery.utils.text import pluralize + +SIGNAMES = set(sig for sig in dir(signal) + if sig.startswith('SIG') and '_' not in sig) +SIGMAP = dict((getattr(signal, name), name) for name in SIGNAMES) + +USAGE = """\ +usage: %(prog_name)s start [celeryd options] + %(prog_name)s stop [-SIG (default: -TERM)] + %(prog_name)s restart [-SIG] [celeryd options] + %(prog_name)s kill + + %(prog_name)s show [celeryd options] + %(prog_name)s get hostname [-qv] [celeryd options] + %(prog_name)s names + %(prog_name)s expand template + %(prog_name)s help + +additional options (must appear after command name): + + * --nosplash: Don't display program info. + * --quiet: Don't show as much output. + * --verbose: Show more output. + * --no-color: Don't display colors. +""" + + +def main(): + sys.exit(MultiTool().execute_from_commandline(sys.argv)) + + +class MultiTool(object): + retcode = 0 # Final exit code. + + def __init__(self, env=None, fh=None, quiet=False, verbose=False, + no_color=False, nosplash=False): + self.fh = fh or sys.stderr + self.env = env + self.nosplash = nosplash + self.quiet = quiet + self.verbose = verbose + self.no_color = no_color + self.prog_name = 'celeryd-multi' + self.commands = {'start': self.start, + 'show': self.show, + 'stop': self.stop, + 'stopwait': self.stopwait, + 'stop_verify': self.stopwait, # compat alias + 'restart': self.restart, + 'kill': self.kill, + 'names': self.names, + 'expand': self.expand, + 'get': self.get, + 'help': self.help} + + def execute_from_commandline(self, argv, cmd='celeryd'): + argv = list(argv) # don't modify callers argv. + + # Reserve the --nosplash|--quiet|-q/--verbose options. + if '--nosplash' in argv: + self.nosplash = argv.pop(argv.index('--nosplash')) + if '--quiet' in argv: + self.quiet = argv.pop(argv.index('--quiet')) + if '-q' in argv: + self.quiet = argv.pop(argv.index('-q')) + if '--verbose' in argv: + self.verbose = argv.pop(argv.index('--verbose')) + if '--no-color' in argv: + self.no_color = argv.pop(argv.index('--no-color')) + + self.prog_name = os.path.basename(argv.pop(0)) + if not argv or argv[0][0] == '-': + return self.error() + + try: + self.commands[argv[0]](argv[1:], cmd) + except KeyError: + self.error('Invalid command: %s' % argv[0]) + + return self.retcode + + def say(self, m, newline=True): + self.fh.write('%s%s' % (m, '\n' if newline else '')) + + def names(self, argv, cmd): + p = NamespacedOptionParser(argv) + self.say('\n'.join( + hostname for hostname, _, _ in multi_args(p, cmd)), + ) + + def get(self, argv, cmd): + wanted = argv[0] + p = NamespacedOptionParser(argv[1:]) + for name, worker, _ in multi_args(p, cmd): + if name == wanted: + self.say(' '.join(worker)) + return + + def show(self, argv, cmd): + p = NamespacedOptionParser(argv) + self.note('> Starting nodes...') + self.say('\n'.join( + ' '.join(worker) for _, worker, _ in multi_args(p, cmd)), + ) + + def start(self, argv, cmd): + self.splash() + p = NamespacedOptionParser(argv) + self.with_detacher_default_options(p) + retcodes = [] + self.note('> Starting nodes...') + for nodename, argv, _ in multi_args(p, cmd): + self.note('\t> %s: ' % (nodename, ), newline=False) + retcode = self.waitexec(argv) + self.note(retcode and self.FAILED or self.OK) + retcodes.append(retcode) + self.retcode = int(any(retcodes)) + + def with_detacher_default_options(self, p): + p.options.setdefault('--pidfile', 'celeryd@%n.pid') + p.options.setdefault('--logfile', 'celeryd@%n.log') + p.options.setdefault('--cmd', '-m celery.bin.celeryd_detach') + + def signal_node(self, nodename, pid, sig): + try: + os.kill(pid, sig) + except OSError, exc: + if exc.errno != errno.ESRCH: + raise + self.note('Could not signal %s (%s): No such process' % ( + nodename, pid)) + return False + return True + + def node_alive(self, pid): + try: + os.kill(pid, 0) + except OSError, exc: + if exc.errno == errno.ESRCH: + return False + raise + return True + + def shutdown_nodes(self, nodes, sig=signal.SIGTERM, retry=None, + callback=None): + if not nodes: + return + P = set(nodes) + + def on_down(node): + P.discard(node) + if callback: + callback(*node) + + self.note(self.colored.blue('> Stopping nodes...')) + for node in list(P): + if node in P: + nodename, _, pid = node + self.note('\t> %s: %s -> %s' % (nodename, + SIGMAP[sig][3:], + pid)) + if not self.signal_node(nodename, pid, sig): + on_down(node) + + def note_waiting(): + left = len(P) + if left: + pids = ', '.join(str(pid) for _, _, pid in P) + self.note(self.colored.blue('> Waiting for %s %s -> %s...' % ( + left, pluralize(left, 'node'), pids)), newline=False) + + if retry: + note_waiting() + its = 0 + while P: + for node in P: + its += 1 + self.note('.', newline=False) + nodename, _, pid = node + if not self.node_alive(pid): + self.note('\n\t> %s: %s' % (nodename, self.OK)) + on_down(node) + note_waiting() + break + if P and not its % len(P): + sleep(float(retry)) + self.note('') + + def getpids(self, p, cmd, callback=None): + pidfile_template = p.options.setdefault('--pidfile', 'celeryd@%n.pid') + + nodes = [] + for nodename, argv, expander in multi_args(p, cmd): + pid = None + pidfile = expander(pidfile_template) + try: + pid = Pidfile(pidfile).read_pid() + except ValueError: + pass + if pid: + nodes.append((nodename, tuple(argv), pid)) + else: + self.note('> %s: %s' % (nodename, self.DOWN)) + if callback: + callback(nodename, argv, pid) + + return nodes + + def kill(self, argv, cmd): + self.splash() + p = NamespacedOptionParser(argv) + for nodename, _, pid in self.getpids(p, cmd): + self.note('Killing node %s (%s)' % (nodename, pid)) + self.signal_node(nodename, pid, signal.SIGKILL) + + def stop(self, argv, cmd, retry=None, callback=None): + self.splash() + p = NamespacedOptionParser(argv) + return self._stop_nodes(p, cmd, retry=retry, callback=callback) + + def _stop_nodes(self, p, cmd, retry=None, callback=None): + restargs = p.args[len(p.values):] + self.shutdown_nodes(self.getpids(p, cmd, callback=callback), + sig=findsig(restargs), + retry=retry, + callback=callback) + + def restart(self, argv, cmd): + self.splash() + p = NamespacedOptionParser(argv) + self.with_detacher_default_options(p) + retvals = [] + + def on_node_shutdown(nodename, argv, pid): + self.note(self.colored.blue( + '> Restarting node %s: ' % nodename), newline=False) + retval = self.waitexec(argv) + self.note(retval and self.FAILED or self.OK) + retvals.append(retval) + + self._stop_nodes(p, cmd, retry=2, callback=on_node_shutdown) + self.retval = int(any(retvals)) + + def stopwait(self, argv, cmd): + self.splash() + p = NamespacedOptionParser(argv) + self.with_detacher_default_options(p) + return self._stop_nodes(p, cmd, retry=2) + stop_verify = stopwait # compat + + def expand(self, argv, cmd=None): + template = argv[0] + p = NamespacedOptionParser(argv[1:]) + for _, _, expander in multi_args(p, cmd): + self.say(expander(template)) + + def help(self, argv, cmd=None): + self.say(__doc__) + + def usage(self): + self.splash() + self.say(USAGE % {'prog_name': self.prog_name}) + + def splash(self): + if not self.nosplash: + c = self.colored + self.note(c.cyan('celeryd-multi v%s' % VERSION_BANNER)) + + def waitexec(self, argv, path=sys.executable): + args = ' '.join([path] + list(argv)) + argstr = shellsplit(from_utf8(args)) + pipe = Popen(argstr, env=self.env) + self.info(' %s' % ' '.join(argstr)) + retcode = pipe.wait() + if retcode < 0: + self.note('* Child was terminated by signal %s' % (-retcode, )) + return -retcode + elif retcode > 0: + self.note('* Child terminated with failure code %s' % (retcode, )) + return retcode + + def error(self, msg=None): + if msg: + self.say(msg) + self.usage() + self.retcode = 1 + return 1 + + def info(self, msg, newline=True): + if self.verbose: + self.note(msg, newline=newline) + + def note(self, msg, newline=True): + if not self.quiet: + self.say(str(msg), newline=newline) + + @cached_property + def colored(self): + return term.colored(enabled=not self.no_color) + + @cached_property + def OK(self): + return str(self.colored.green('OK')) + + @cached_property + def FAILED(self): + return str(self.colored.red('FAILED')) + + @cached_property + def DOWN(self): + return str(self.colored.magenta('DOWN')) + + +def multi_args(p, cmd='celeryd', append='', prefix='', suffix=''): + names = p.values + options = dict(p.options) + passthrough = p.passthrough + ranges = len(names) == 1 + if ranges: + try: + noderange = int(names[0]) + except ValueError: + pass + else: + names = [str(v) for v in range(1, noderange + 1)] + prefix = 'celery' + cmd = options.pop('--cmd', cmd) + append = options.pop('--append', append) + hostname = options.pop('--hostname', + options.pop('-n', socket.gethostname())) + prefix = options.pop('--prefix', prefix) or '' + suffix = options.pop('--suffix', suffix) or '.' + hostname + if suffix in ('""', "''"): + suffix = '' + + for ns_name, ns_opts in p.namespaces.items(): + if ',' in ns_name or (ranges and '-' in ns_name): + for subns in parse_ns_range(ns_name, ranges): + p.namespaces[subns].update(ns_opts) + p.namespaces.pop(ns_name) + + for name in names: + this_name = options['-n'] = prefix + name + suffix + expand = abbreviations({'%h': this_name, + '%n': name}) + argv = ([expand(cmd)] + + [format_opt(opt, expand(value)) + for opt, value in p.optmerge(name, options).items()] + + [passthrough]) + if append: + argv.append(expand(append)) + yield this_name, argv, expand + + +class NamespacedOptionParser(object): + + def __init__(self, args): + self.args = args + self.options = {} + self.values = [] + self.passthrough = '' + self.namespaces = defaultdict(lambda: {}) + + self.parse() + + def parse(self): + rargs = list(self.args) + pos = 0 + while pos < len(rargs): + arg = rargs[pos] + if arg == '--': + self.passthrough = ' '.join(rargs[pos:]) + break + elif arg[0] == '-': + if arg[1] == '-': + self.process_long_opt(arg[2:]) + else: + value = None + if len(rargs) > pos + 1 and rargs[pos + 1][0] != '-': + value = rargs[pos + 1] + pos += 1 + self.process_short_opt(arg[1:], value) + else: + self.values.append(arg) + pos += 1 + + def process_long_opt(self, arg, value=None): + if '=' in arg: + arg, value = arg.split('=', 1) + self.add_option(arg, value, short=False) + + def process_short_opt(self, arg, value=None): + self.add_option(arg, value, short=True) + + def optmerge(self, ns, defaults=None): + if defaults is None: + defaults = self.options + return dict(defaults, **self.namespaces[ns]) + + def add_option(self, name, value, short=False, ns=None): + prefix = short and '-' or '--' + dest = self.options + if ':' in name: + name, ns = name.split(':') + dest = self.namespaces[ns] + dest[prefix + name] = value + + +def quote(v): + return "\\'".join("'" + p + "'" for p in v.split("'")) + + +def format_opt(opt, value): + if not value: + return opt + if opt.startswith('--'): + return '%s=%s' % (opt, value) + return '%s %s' % (opt, value) + + +def parse_ns_range(ns, ranges=False): + ret = [] + for space in ',' in ns and ns.split(',') or [ns]: + if ranges and '-' in space: + start, stop = space.split('-') + x = [str(v) for v in range(int(start), int(stop) + 1)] + ret.extend(x) + else: + ret.append(space) + return ret + + +def abbreviations(mapping): + + def expand(S): + ret = S + if S is not None: + for short, long in mapping.items(): + ret = ret.replace(short, long) + return ret + + return expand + + +def findsig(args, default=signal.SIGTERM): + for arg in reversed(args): + if len(arg) == 2 and arg[0] == '-': + try: + return int(arg[1]) + except ValueError: + pass + if arg[0] == '-': + maybe_sig = 'SIG' + arg[1:] + if maybe_sig in SIGNAMES: + return getattr(signal, maybe_sig) + return default + +if __name__ == '__main__': # pragma: no cover + main() diff --git a/awx/lib/site-packages/celery/bin/celeryev.py b/awx/lib/site-packages/celery/bin/celeryev.py new file mode 100644 index 0000000000..89408496b6 --- /dev/null +++ b/awx/lib/site-packages/celery/bin/celeryev.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +""" + +The :program:`celery events` command. + +.. program:: celery events + +.. seealso:: + + See :ref:`preload-options` and :ref:`daemon-options`. + +.. cmdoption:: -d, --dump + + Dump events to stdout. + +.. cmdoption:: -c, --camera + + Take snapshots of events using this camera. + +.. cmdoption:: --detach + + Camera: Detach and run in the background as a daemon. + +.. cmdoption:: -F, --freq, --frequency + + Camera: Shutter frequency. Default is every 1.0 seconds. + +.. cmdoption:: -r, --maxrate + + Camera: Optional shutter rate limit (e.g. 10/m). + +.. cmdoption:: -l, --loglevel + + Logging level, choose between `DEBUG`, `INFO`, `WARNING`, + `ERROR`, `CRITICAL`, or `FATAL`. Default is INFO. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import sys + +from functools import partial + +from celery.platforms import detached, set_process_title, strargv +from celery.bin.base import Command, Option, daemon_options + + +class EvCommand(Command): + doc = __doc__ + supports_args = False + + def run(self, dump=False, camera=None, frequency=1.0, maxrate=None, + loglevel='INFO', logfile=None, prog_name='celeryev', + pidfile=None, uid=None, gid=None, umask=None, + working_directory=None, detach=False, **kwargs): + self.prog_name = prog_name + + if dump: + return self.run_evdump() + if camera: + return self.run_evcam(camera, freq=frequency, maxrate=maxrate, + loglevel=loglevel, logfile=logfile, + pidfile=pidfile, uid=uid, gid=gid, + umask=umask, + working_directory=working_directory, + detach=detach) + return self.run_evtop() + + def run_evdump(self): + from celery.events.dumper import evdump + self.set_process_status('dump') + return evdump(app=self.app) + + def run_evtop(self): + from celery.events.cursesmon import evtop + self.set_process_status('top') + return evtop(app=self.app) + + def run_evcam(self, camera, logfile=None, pidfile=None, uid=None, + gid=None, umask=None, working_directory=None, + detach=False, **kwargs): + from celery.events.snapshot import evcam + workdir = working_directory + self.set_process_status('cam') + kwargs['app'] = self.app + cam = partial(evcam, camera, + logfile=logfile, pidfile=pidfile, **kwargs) + + if detach: + with detached(logfile, pidfile, uid, gid, umask, workdir): + return cam() + else: + return cam() + + def set_process_status(self, prog, info=''): + prog = '%s:%s' % (self.prog_name, prog) + info = '%s %s' % (info, strargv(sys.argv)) + return set_process_title(prog, info=info) + + def get_options(self): + return ( + Option('-d', '--dump', action='store_true'), + Option('-c', '--camera'), + Option('--detach', action='store_true'), + Option('-F', '--frequency', '--freq', type='float', default=1.0), + Option('-r', '--maxrate'), + Option('-l', '--loglevel', default='INFO'), + ) + daemon_options(default_pidfile='celeryev.pid') + + +def main(): + ev = EvCommand() + ev.execute_from_commandline() + +if __name__ == '__main__': # pragma: no cover + main() diff --git a/awx/lib/site-packages/celery/canvas.py b/awx/lib/site-packages/celery/canvas.py new file mode 100644 index 0000000000..d8dcfabcad --- /dev/null +++ b/awx/lib/site-packages/celery/canvas.py @@ -0,0 +1,510 @@ +# -*- coding: utf-8 -*- +""" + celery.canvas + ~~~~~~~~~~~~~ + + Composing task workflows. + + Documentation for these functions are in :mod:`celery`. + You should not import from this module directly. + +""" +from __future__ import absolute_import + +from copy import deepcopy +from functools import partial as _partial +from operator import itemgetter +from itertools import chain as _chain + +from kombu.utils import cached_property, fxrange, kwdict, reprcall, uuid + +from celery._state import current_app +from celery.utils.compat import chain_from_iterable +from celery.result import AsyncResult, GroupResult +from celery.utils.functional import ( + maybe_list, is_list, regen, + chunks as _chunks, +) +from celery.utils.text import truncate + + +class _getitem_property(object): + """Attribute -> dict key descriptor. + + The target object must support ``__getitem__``, + and optionally ``__setitem__``. + + Example: + + class Me(dict): + deep = defaultdict(dict) + + foo = _getitem_property('foo') + deep_thing = _getitem_property('deep.thing') + + + >>> me = Me() + >>> me.foo + None + + >>> me.foo = 10 + >>> me.foo + 10 + >>> me['foo'] + 10 + + >>> me.deep_thing = 42 + >>> me.deep_thinge + 42 + >>> me.deep: + defaultdict(, {'thing': 42}) + + """ + + def __init__(self, keypath): + path, _, self.key = keypath.rpartition('.') + self.path = path.split('.') if path else None + + def _path(self, obj): + return (reduce(lambda d, k: d[k], [obj] + self.path) if self.path + else obj) + + def __get__(self, obj, type=None): + if obj is None: + return type + return self._path(obj).get(self.key) + + def __set__(self, obj, value): + self._path(obj)[self.key] = value + + +class Signature(dict): + """Class that wraps the arguments and execution options + for a single task invocation. + + Used as the parts in a :class:`group` or to safely + pass tasks around as callbacks. + + :param task: Either a task class/instance, or the name of a task. + :keyword args: Positional arguments to apply. + :keyword kwargs: Keyword arguments to apply. + :keyword options: Additional options to :meth:`Task.apply_async`. + + Note that if the first argument is a :class:`dict`, the other + arguments will be ignored and the values in the dict will be used + instead. + + >>> s = subtask('tasks.add', args=(2, 2)) + >>> subtask(s) + {'task': 'tasks.add', args=(2, 2), kwargs={}, options={}} + + """ + TYPES = {} + _type = None + + @classmethod + def register_type(cls, subclass, name=None): + cls.TYPES[name or subclass.__name__] = subclass + return subclass + + @classmethod + def from_dict(self, d): + typ = d.get('subtask_type') + if typ: + return self.TYPES[typ].from_dict(kwdict(d)) + return Signature(d) + + def __init__(self, task=None, args=None, kwargs=None, options=None, + type=None, subtask_type=None, immutable=False, **ex): + init = dict.__init__ + + if isinstance(task, dict): + return init(self, task) # works like dict(d) + + # Also supports using task class/instance instead of string name. + try: + task_name = task.name + except AttributeError: + task_name = task + else: + self._type = task + + init(self, + task=task_name, args=tuple(args or ()), + kwargs=kwargs or {}, + options=dict(options or {}, **ex), + subtask_type=subtask_type, + immutable=immutable) + + def __call__(self, *partial_args, **partial_kwargs): + return self.apply_async(partial_args, partial_kwargs) + delay = __call__ + + def apply(self, args=(), kwargs={}, **options): + """Apply this task locally.""" + # For callbacks: extra args are prepended to the stored args. + args, kwargs, options = self._merge(args, kwargs, options) + return self.type.apply(args, kwargs, **options) + + def _merge(self, args=(), kwargs={}, options={}): + if self.immutable: + return self.args, self.kwargs, dict(self.options, **options) + return (tuple(args) + tuple(self.args) if args else self.args, + dict(self.kwargs, **kwargs) if kwargs else self.kwargs, + dict(self.options, **options) if options else self.options) + + def clone(self, args=(), kwargs={}, **opts): + # need to deepcopy options so origins links etc. is not modified. + args, kwargs, opts = self._merge(args, kwargs, opts) + s = Signature.from_dict({'task': self.task, 'args': tuple(args), + 'kwargs': kwargs, 'options': deepcopy(opts), + 'subtask_type': self.subtask_type, + 'immutable': self.immutable}) + s._type = self._type + return s + partial = clone + + def _freeze(self, _id=None): + opts = self.options + try: + tid = opts['task_id'] + except KeyError: + tid = opts['task_id'] = _id or uuid() + return self.AsyncResult(tid) + + def replace(self, args=None, kwargs=None, options=None): + s = self.clone() + if args is not None: + s.args = args + if kwargs is not None: + s.kwargs = kwargs + if options is not None: + s.options = options + return s + + def set(self, immutable=None, **options): + if immutable is not None: + self.immutable = immutable + self.options.update(options) + return self + + def apply_async(self, args=(), kwargs={}, **options): + # For callbacks: extra args are prepended to the stored args. + args, kwargs, options = self._merge(args, kwargs, options) + return self._apply_async(args, kwargs, **options) + + def append_to_list_option(self, key, value): + items = self.options.setdefault(key, []) + if value not in items: + items.append(value) + return value + + def link(self, callback): + return self.append_to_list_option('link', callback) + + def link_error(self, errback): + return self.append_to_list_option('link_error', errback) + + def flatten_links(self): + return list(chain_from_iterable(_chain( + [[self]], + (link.flatten_links() + for link in maybe_list(self.options.get('link')) or []) + ))) + + def __or__(self, other): + if not isinstance(self, chain) and isinstance(other, chain): + return chain((self,) + other.tasks) + elif isinstance(other, chain): + return chain(*self.tasks + other.tasks) + elif isinstance(other, Signature): + if isinstance(self, chain): + return chain(*self.tasks + (other, )) + return chain(self, other) + return NotImplemented + + def __invert__(self): + return self.apply_async().get() + + def __reduce__(self): + # for serialization, the task type is lazily loaded, + # and not stored in the dict itself. + return subtask, (dict(self), ) + + def reprcall(self, *args, **kwargs): + args, kwargs, _ = self._merge(args, kwargs, {}) + return reprcall(self['task'], args, kwargs) + + def __repr__(self): + return self.reprcall() + + @cached_property + def type(self): + return self._type or current_app.tasks[self['task']] + + @cached_property + def AsyncResult(self): + try: + return self.type.AsyncResult + except KeyError: # task not registered + return AsyncResult + + @cached_property + def _apply_async(self): + try: + return self.type.apply_async + except KeyError: + return _partial(current_app.send_task, self['task']) + id = _getitem_property('options.task_id') + task = _getitem_property('task') + args = _getitem_property('args') + kwargs = _getitem_property('kwargs') + options = _getitem_property('options') + subtask_type = _getitem_property('subtask_type') + immutable = _getitem_property('immutable') + + +class chain(Signature): + + def __init__(self, *tasks, **options): + tasks = tasks[0] if len(tasks) == 1 and is_list(tasks[0]) else tasks + Signature.__init__( + self, 'celery.chain', (), {'tasks': tasks}, **options + ) + self.tasks = tasks + self.subtask_type = 'chain' + + def __call__(self, *args, **kwargs): + if self.tasks: + return self.apply_async(args, kwargs) + + @classmethod + def from_dict(self, d): + tasks = d['kwargs']['tasks'] + if d['args'] and tasks: + # partial args passed on to first task in chain (Issue #1057). + tasks[0]['args'] = d['args'] + tasks[0]['args'] + return chain(*d['kwargs']['tasks'], **kwdict(d['options'])) + + @property + def type(self): + return self._type or self.tasks[0].type.app.tasks['celery.chain'] + + def __repr__(self): + return ' | '.join(repr(t) for t in self.tasks) +Signature.register_type(chain) + + +class _basemap(Signature): + _task_name = None + _unpack_args = itemgetter('task', 'it') + + def __init__(self, task, it, **options): + Signature.__init__( + self, self._task_name, (), + {'task': task, 'it': regen(it)}, immutable=True, **options + ) + + def apply_async(self, args=(), kwargs={}, **opts): + # need to evaluate generators + task, it = self._unpack_args(self.kwargs) + return self.type.apply_async( + (), {'task': task, 'it': list(it)}, **opts + ) + + @classmethod + def from_dict(self, d): + return chunks(*self._unpack_args(d['kwargs']), **d['options']) + + +class xmap(_basemap): + _task_name = 'celery.map' + + def __repr__(self): + task, it = self._unpack_args(self.kwargs) + return '[%s(x) for x in %s]' % (task.task, truncate(repr(it), 100)) +Signature.register_type(xmap) + + +class xstarmap(_basemap): + _task_name = 'celery.starmap' + + def __repr__(self): + task, it = self._unpack_args(self.kwargs) + return '[%s(*x) for x in %s]' % (task.task, truncate(repr(it), 100)) +Signature.register_type(xstarmap) + + +class chunks(Signature): + _unpack_args = itemgetter('task', 'it', 'n') + + def __init__(self, task, it, n, **options): + Signature.__init__( + self, 'celery.chunks', (), + {'task': task, 'it': regen(it), 'n': n}, + immutable=True, **options + ) + + @classmethod + def from_dict(self, d): + return chunks(*self._unpack_args(d['kwargs']), **d['options']) + + def apply_async(self, args=(), kwargs={}, **opts): + return self.group().apply_async(args, kwargs, **opts) + + def __call__(self, **options): + return self.group()(**options) + + def group(self): + # need to evaluate generators + task, it, n = self._unpack_args(self.kwargs) + return group(xstarmap(task, part) for part in _chunks(iter(it), n)) + + @classmethod + def apply_chunks(cls, task, it, n): + return cls(task, it, n)() +Signature.register_type(chunks) + + +def _maybe_group(tasks): + if isinstance(tasks, group): + tasks = list(tasks.tasks) + elif isinstance(tasks, Signature): + tasks = [tasks] + else: + tasks = regen(tasks) + return tasks + + +class group(Signature): + + def __init__(self, *tasks, **options): + if len(tasks) == 1: + tasks = _maybe_group(tasks[0]) + Signature.__init__( + self, 'celery.group', (), {'tasks': tasks}, **options + ) + self.tasks, self.subtask_type = tasks, 'group' + + @classmethod + def from_dict(self, d): + tasks = d['kwargs']['tasks'] + if d['args'] and tasks: + # partial args passed on to all tasks in the group (Issue #1057). + for task in tasks: + task['args'] = d['args'] + task['args'] + return group(tasks, **kwdict(d['options'])) + + def __call__(self, *partial_args, **options): + tasks = [task.clone() for task in self.tasks] + if not tasks: + return + # taking the app from the first task in the list, + # there may be a better solution to this, e.g. + # consolidate tasks with the same app and apply them in + # batches. + type = tasks[0].type.app.tasks[self['task']] + return type(*type.prepare(options, tasks, partial_args)) + + def _freeze(self, _id=None): + opts = self.options + try: + gid = opts['group'] + except KeyError: + gid = opts['group'] = uuid() + new_tasks, results = [], [] + for task in self.tasks: + task = maybe_subtask(task).clone() + results.append(task._freeze()) + new_tasks.append(task) + self.tasks = self.kwargs['tasks'] = new_tasks + return GroupResult(gid, results) + + def skew(self, start=1.0, stop=None, step=1.0): + _next_skew = fxrange(start, stop, step, repeatlast=True).next + for task in self.tasks: + task.set(countdown=_next_skew()) + return self + + def __iter__(self): + return iter(self.tasks) + + def __repr__(self): + return repr(self.tasks) +Signature.register_type(group) + + +class chord(Signature): + + def __init__(self, header, body=None, task='celery.chord', + args=(), kwargs={}, **options): + Signature.__init__( + self, task, args, + dict(kwargs, header=_maybe_group(header), + body=maybe_subtask(body)), **options + ) + self.subtask_type = 'chord' + + @classmethod + def from_dict(self, d): + args, d['kwargs'] = self._unpack_args(**kwdict(d['kwargs'])) + return self(*args, **kwdict(d)) + + @staticmethod + def _unpack_args(header=None, body=None, **kwargs): + # Python signatures are better at extracting keys from dicts + # than manually popping things off. + return (header, body), kwargs + + @property + def type(self): + return self._type or self.tasks[0].type.app.tasks['celery.chord'] + + def __call__(self, body=None, **kwargs): + _chord = self.type + body = (body or self.kwargs['body']).clone() + kwargs = dict(self.kwargs, body=body, **kwargs) + if _chord.app.conf.CELERY_ALWAYS_EAGER: + return self.apply((), kwargs) + callback_id = body.options.setdefault('task_id', uuid()) + return _chord.AsyncResult(callback_id, parent=_chord(**kwargs)) + + def clone(self, *args, **kwargs): + s = Signature.clone(self, *args, **kwargs) + # need to make copy of body + try: + s.kwargs['body'] = s.kwargs['body'].clone() + except (AttributeError, KeyError): + pass + return s + + def link(self, callback): + self.body.link(callback) + return callback + + def link_error(self, errback): + self.body.link_error(errback) + return errback + + def __repr__(self): + if self.body: + return self.body.reprcall(self.tasks) + return '' % (self.tasks, ) + + tasks = _getitem_property('kwargs.header') + body = _getitem_property('kwargs.body') +Signature.register_type(chord) + + +def subtask(varies, *args, **kwargs): + if not (args or kwargs) and isinstance(varies, dict): + if isinstance(varies, Signature): + return varies.clone() + return Signature.from_dict(varies) + return Signature(varies, *args, **kwargs) + + +def maybe_subtask(d): + if d is not None and isinstance(d, dict) and not isinstance(d, Signature): + return subtask(d) + return d diff --git a/awx/lib/site-packages/celery/concurrency/__init__.py b/awx/lib/site-packages/celery/concurrency/__init__.py new file mode 100644 index 0000000000..02d222f90b --- /dev/null +++ b/awx/lib/site-packages/celery/concurrency/__init__.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +""" + celery.concurrency + ~~~~~~~~~~~~~~~~~~ + + Pool implementation abstract factory, and alias definitions. + +""" +from __future__ import absolute_import + +# Import from kombu directly as it's used +# early in the import stage, where celery.utils loads +# too much (e.g. for eventlet patching) +from kombu.utils import symbol_by_name + +ALIASES = { + 'processes': 'celery.concurrency.processes:TaskPool', + 'eventlet': 'celery.concurrency.eventlet:TaskPool', + 'gevent': 'celery.concurrency.gevent:TaskPool', + 'threads': 'celery.concurrency.threads:TaskPool', + 'solo': 'celery.concurrency.solo:TaskPool', +} + + +def get_implementation(cls): + return symbol_by_name(cls, ALIASES) diff --git a/awx/lib/site-packages/celery/concurrency/base.py b/awx/lib/site-packages/celery/concurrency/base.py new file mode 100644 index 0000000000..2ab7ecba26 --- /dev/null +++ b/awx/lib/site-packages/celery/concurrency/base.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +""" + celery.concurrency.base + ~~~~~~~~~~~~~~~~~~~~~~~ + + TaskPool interface. + +""" +from __future__ import absolute_import + +import logging +import os +import time + +from kombu.utils.encoding import safe_repr + +from celery.utils import timer2 +from celery.utils.log import get_logger + +logger = get_logger('celery.concurrency') + + +def apply_target(target, args=(), kwargs={}, callback=None, + accept_callback=None, pid=None, **_): + if accept_callback: + accept_callback(pid or os.getpid(), time.time()) + callback(target(*args, **kwargs)) + + +class BasePool(object): + RUN = 0x1 + CLOSE = 0x2 + TERMINATE = 0x3 + + Timer = timer2.Timer + + #: set to true if the pool can be shutdown from within + #: a signal handler. + signal_safe = True + + #: set to true if pool supports rate limits. + #: (this is here for gevent, which currently does not implement + #: the necessary timers). + rlimit_safe = True + + #: set to true if pool requires the use of a mediator + #: thread (e.g. if applying new items can block the current thread). + requires_mediator = False + + #: set to true if pool uses greenlets. + is_green = False + + _state = None + _pool = None + + #: only used by multiprocessing pool + uses_semaphore = False + + def __init__(self, limit=None, putlocks=True, + forking_enable=True, callbacks_propagate=(), **options): + self.limit = limit + self.putlocks = putlocks + self.options = options + self.forking_enable = forking_enable + self.callbacks_propagate = callbacks_propagate + self._does_debug = logger.isEnabledFor(logging.DEBUG) + + def on_start(self): + pass + + def did_start_ok(self): + return True + + def on_stop(self): + pass + + def on_apply(self, *args, **kwargs): + pass + + def on_terminate(self): + pass + + def on_soft_timeout(self, job): + pass + + def on_hard_timeout(self, job): + pass + + def maybe_handle_result(self, *args): + pass + + def maintain_pool(self, *args, **kwargs): + pass + + def terminate_job(self, pid): + raise NotImplementedError( + '%s does not implement kill_job' % (self.__class__, )) + + def restart(self): + raise NotImplementedError( + '%s does not implement restart' % (self.__class__, )) + + def stop(self): + self.on_stop() + self._state = self.TERMINATE + + def terminate(self): + self._state = self.TERMINATE + self.on_terminate() + + def start(self): + self.on_start() + self._state = self.RUN + + def close(self): + self._state = self.CLOSE + self.on_close() + + def on_close(self): + pass + + def init_callbacks(self, **kwargs): + pass + + def apply_async(self, target, args=[], kwargs={}, **options): + """Equivalent of the :func:`apply` built-in function. + + Callbacks should optimally return as soon as possible since + otherwise the thread which handles the result will get blocked. + + """ + if self._does_debug: + logger.debug('TaskPool: Apply %s (args:%s kwargs:%s)', + target, safe_repr(args), safe_repr(kwargs)) + + return self.on_apply(target, args, kwargs, + waitforslot=self.putlocks, + callbacks_propagate=self.callbacks_propagate, + **options) + + def _get_info(self): + return {} + + @property + def info(self): + return self._get_info() + + @property + def active(self): + return self._state == self.RUN + + @property + def num_processes(self): + return self.limit + + @property + def readers(self): + return {} + + @property + def writers(self): + return {} + + @property + def timers(self): + return {} diff --git a/awx/lib/site-packages/celery/concurrency/eventlet.py b/awx/lib/site-packages/celery/concurrency/eventlet.py new file mode 100644 index 0000000000..fd97269f61 --- /dev/null +++ b/awx/lib/site-packages/celery/concurrency/eventlet.py @@ -0,0 +1,153 @@ +# -*- coding: utf-8 -*- +""" + celery.concurrency.eventlet + ~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Eventlet pool implementation. + +""" +from __future__ import absolute_import + +import os +import sys + +EVENTLET_NOPATCH = os.environ.get('EVENTLET_NOPATCH', False) +EVENTLET_DBLOCK = int(os.environ.get('EVENTLET_NOBLOCK', 0)) +W_RACE = """\ +Celery module with %s imported before eventlet patched\ +""" +RACE_MODS = ('billiard.', 'celery.', 'kombu.') + + +#: Warn if we couldn't patch early enough, +#: and thread/socket depending celery modules have already been loaded. +for mod in (mod for mod in sys.modules if mod.startswith(RACE_MODS)): + for side in ('thread', 'threading', 'socket'): + if getattr(mod, side, None): + import warnings + warnings.warn(RuntimeWarning(W_RACE % side)) + + +PATCHED = [0] +if not EVENTLET_NOPATCH and not PATCHED[0]: + PATCHED[0] += 1 + import eventlet + import eventlet.debug + eventlet.monkey_patch() + eventlet.debug.hub_blocking_detection(EVENTLET_DBLOCK) + +from time import time + +from celery import signals +from celery.utils import timer2 + +from . import base + + +def apply_target(target, args=(), kwargs={}, callback=None, + accept_callback=None, getpid=None): + return base.apply_target(target, args, kwargs, callback, accept_callback, + pid=getpid()) + + +class Schedule(timer2.Schedule): + + def __init__(self, *args, **kwargs): + from eventlet.greenthread import spawn_after + from greenlet import GreenletExit + super(Schedule, self).__init__(*args, **kwargs) + + self.GreenletExit = GreenletExit + self._spawn_after = spawn_after + self._queue = set() + + def _enter(self, eta, priority, entry): + secs = max(eta - time(), 0) + g = self._spawn_after(secs, entry) + self._queue.add(g) + g.link(self._entry_exit, entry) + g.entry = entry + g.eta = eta + g.priority = priority + g.cancelled = False + return g + + def _entry_exit(self, g, entry): + try: + try: + g.wait() + except self.GreenletExit: + entry.cancel() + g.cancelled = True + finally: + self._queue.discard(g) + + def clear(self): + queue = self._queue + while queue: + try: + queue.pop().cancel() + except (KeyError, self.GreenletExit): + pass + + @property + def queue(self): + return [(g.eta, g.priority, g.entry) for g in self._queue] + + +class Timer(timer2.Timer): + Schedule = Schedule + + def ensure_started(self): + pass + + def stop(self): + self.schedule.clear() + + def cancel(self, tref): + try: + tref.cancel() + except self.schedule.GreenletExit: + pass + + def start(self): + pass + + +class TaskPool(base.BasePool): + Timer = Timer + + rlimit_safe = False + signal_safe = False + is_green = True + + def __init__(self, *args, **kwargs): + from eventlet import greenthread + from eventlet.greenpool import GreenPool + self.Pool = GreenPool + self.getcurrent = greenthread.getcurrent + self.getpid = lambda: id(greenthread.getcurrent()) + self.spawn_n = greenthread.spawn_n + + super(TaskPool, self).__init__(*args, **kwargs) + + def on_start(self): + self._pool = self.Pool(self.limit) + signals.eventlet_pool_started.send(sender=self) + self._quick_put = self._pool.spawn_n + self._quick_apply_sig = signals.eventlet_pool_apply.send + + def on_stop(self): + signals.eventlet_pool_preshutdown.send(sender=self) + if self._pool is not None: + self._pool.waitall() + signals.eventlet_pool_postshutdown.send(sender=self) + + def on_apply(self, target, args=None, kwargs=None, callback=None, + accept_callback=None, **_): + self._quick_apply_sig( + sender=self, target=target, args=args, kwargs=kwargs, + ) + self._quick_put(apply_target, target, args, kwargs, + callback, accept_callback, + self.getpid) diff --git a/awx/lib/site-packages/celery/concurrency/gevent.py b/awx/lib/site-packages/celery/concurrency/gevent.py new file mode 100644 index 0000000000..881023746d --- /dev/null +++ b/awx/lib/site-packages/celery/concurrency/gevent.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- +""" + celery.concurrency.gevent + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + gevent pool implementation. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import os + +PATCHED = [0] +if not os.environ.get('GEVENT_NOPATCH') and not PATCHED[0]: + PATCHED[0] += 1 + from gevent import monkey, version_info + monkey.patch_all() + if version_info[0] == 0: + # Signals are not working along gevent in version prior 1.0 + # and they are not monkey patch by monkey.patch_all() + from gevent import signal as _gevent_signal + _signal = __import__('signal') + _signal.signal = _gevent_signal + +try: + from gevent import Timeout +except ImportError: + Timeout = None # noqa + +from time import time + +from celery.utils import timer2 + +from .base import apply_target, BasePool + + +def apply_timeout(target, args=(), kwargs={}, callback=None, + accept_callback=None, pid=None, timeout=None, + timeout_callback=None, **rest): + try: + with Timeout(timeout): + return apply_target(target, args, kwargs, callback, + accept_callback, pid, **rest) + except Timeout: + return timeout_callback(False, timeout) + + +class Schedule(timer2.Schedule): + + def __init__(self, *args, **kwargs): + from gevent.greenlet import Greenlet, GreenletExit + + class _Greenlet(Greenlet): + + def cancel(self): + self.kill() + + self._Greenlet = _Greenlet + self._GreenletExit = GreenletExit + super(Schedule, self).__init__(*args, **kwargs) + self._queue = set() + + def _enter(self, eta, priority, entry): + secs = max(eta - time(), 0) + g = self._Greenlet.spawn_later(secs, entry) + self._queue.add(g) + g.link(self._entry_exit) + g.entry = entry + g.eta = eta + g.priority = priority + g.cancelled = False + return g + + def _entry_exit(self, g): + try: + g.kill() + finally: + self._queue.discard(g) + + def clear(self): + queue = self._queue + while queue: + try: + queue.pop().kill() + except KeyError: + pass + + @property + def queue(self): + return [(g.eta, g.priority, g.entry) for g in self._queue] + + +class Timer(timer2.Timer): + Schedule = Schedule + + def ensure_started(self): + pass + + def stop(self): + self.schedule.clear() + + def start(self): + pass + + +class TaskPool(BasePool): + Timer = Timer + + signal_safe = False + rlimit_safe = False + is_green = True + + def __init__(self, *args, **kwargs): + from gevent import spawn_raw + from gevent.pool import Pool + self.Pool = Pool + self.spawn_n = spawn_raw + self.timeout = kwargs.get('timeout') + super(TaskPool, self).__init__(*args, **kwargs) + + def on_start(self): + self._pool = self.Pool(self.limit) + self._quick_put = self._pool.spawn + + def on_stop(self): + if self._pool is not None: + self._pool.join() + + def on_apply(self, target, args=None, kwargs=None, callback=None, + accept_callback=None, timeout=None, + timeout_callback=None, **_): + timeout = self.timeout if timeout is None else timeout + return self._quick_put(apply_timeout if timeout else apply_target, + target, args, kwargs, callback, accept_callback, + timeout=timeout, + timeout_callback=timeout_callback) + + def grow(self, n=1): + self._pool._semaphore.counter += n + self._pool.size += n + + def shrink(self, n=1): + self._pool._semaphore.counter -= n + self._pool.size -= n + + @property + def num_processes(self): + return len(self._pool) diff --git a/awx/lib/site-packages/celery/concurrency/processes/__init__.py b/awx/lib/site-packages/celery/concurrency/processes/__init__.py new file mode 100644 index 0000000000..799f7698ea --- /dev/null +++ b/awx/lib/site-packages/celery/concurrency/processes/__init__.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 -*- +""" + celery.concurrency.processes + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Pool implementation using :mod:`multiprocessing`. + + We use the billiard fork of multiprocessing which contains + numerous improvements. + +""" +from __future__ import absolute_import + +import os + +from billiard import forking_enable +from billiard.pool import Pool, RUN, CLOSE + +from celery import platforms +from celery import signals +from celery._state import set_default_app +from celery.concurrency.base import BasePool +from celery.task import trace + +#: List of signals to reset when a child process starts. +WORKER_SIGRESET = frozenset(['SIGTERM', + 'SIGHUP', + 'SIGTTIN', + 'SIGTTOU', + 'SIGUSR1']) + +#: List of signals to ignore when a child process starts. +WORKER_SIGIGNORE = frozenset(['SIGINT']) + + +def process_initializer(app, hostname): + """Initializes the process so it can be used to process tasks.""" + platforms.signals.reset(*WORKER_SIGRESET) + platforms.signals.ignore(*WORKER_SIGIGNORE) + platforms.set_mp_process_title('celeryd', hostname=hostname) + # This is for Windows and other platforms not supporting + # fork(). Note that init_worker makes sure it's only + # run once per process. + app.loader.init_worker() + app.loader.init_worker_process() + app.log.setup(int(os.environ.get('CELERY_LOG_LEVEL', 0)), + os.environ.get('CELERY_LOG_FILE') or None, + bool(os.environ.get('CELERY_LOG_REDIRECT', False)), + str(os.environ.get('CELERY_LOG_REDIRECT_LEVEL'))) + if os.environ.get('FORKED_BY_MULTIPROCESSING'): + # pool did execv after fork + trace.setup_worker_optimizations(app) + else: + app.set_current() + set_default_app(app) + app.finalize() + trace._tasks = app._tasks # enables fast_trace_task optimization. + from celery.task.trace import build_tracer + for name, task in app.tasks.iteritems(): + task.__trace__ = build_tracer(name, task, app.loader, hostname) + signals.worker_process_init.send(sender=None) + + +class TaskPool(BasePool): + """Multiprocessing Pool implementation.""" + Pool = Pool + + requires_mediator = True + uses_semaphore = True + + def on_start(self): + """Run the task pool. + + Will pre-fork all workers so they're ready to accept tasks. + + """ + forking_enable(self.forking_enable) + P = self._pool = self.Pool(processes=self.limit, + initializer=process_initializer, + **self.options) + self.on_apply = P.apply_async + self.on_soft_timeout = P._timeout_handler.on_soft_timeout + self.on_hard_timeout = P._timeout_handler.on_hard_timeout + self.maintain_pool = P.maintain_pool + self.maybe_handle_result = P._result_handler.handle_event + + def did_start_ok(self): + return self._pool.did_start_ok() + + def on_stop(self): + """Gracefully stop the pool.""" + if self._pool is not None and self._pool._state in (RUN, CLOSE): + self._pool.close() + self._pool.join() + self._pool = None + + def on_terminate(self): + """Force terminate the pool.""" + if self._pool is not None: + self._pool.terminate() + self._pool = None + + def on_close(self): + if self._pool is not None and self._pool._state == RUN: + self._pool.close() + + def terminate_job(self, pid, signal=None): + return self._pool.terminate_job(pid, signal) + + def grow(self, n=1): + return self._pool.grow(n) + + def shrink(self, n=1): + return self._pool.shrink(n) + + def restart(self): + self._pool.restart() + + def _get_info(self): + return {'max-concurrency': self.limit, + 'processes': [p.pid for p in self._pool._pool], + 'max-tasks-per-child': self._pool._maxtasksperchild, + 'put-guarded-by-semaphore': self.putlocks, + 'timeouts': (self._pool.soft_timeout, self._pool.timeout)} + + def init_callbacks(self, **kwargs): + for k, v in kwargs.iteritems(): + setattr(self._pool, k, v) + + def handle_timeouts(self): + if self._pool._timeout_handler: + self._pool._timeout_handler.handle_event() + + @property + def num_processes(self): + return self._pool._processes + + @property + def readers(self): + return self._pool.readers + + @property + def writers(self): + return self._pool.writers + + @property + def timers(self): + return {self.maintain_pool: 5.0} diff --git a/awx/lib/site-packages/celery/concurrency/solo.py b/awx/lib/site-packages/celery/concurrency/solo.py new file mode 100644 index 0000000000..51d47c6a29 --- /dev/null +++ b/awx/lib/site-packages/celery/concurrency/solo.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +""" + celery.concurrency.solo + ~~~~~~~~~~~~~~~~~~~~~~~ + + Single-threaded pool implementation. + +""" +from __future__ import absolute_import + +import os + +from .base import BasePool, apply_target + + +class TaskPool(BasePool): + """Solo task pool (blocking, inline, fast).""" + + def __init__(self, *args, **kwargs): + super(TaskPool, self).__init__(*args, **kwargs) + self.on_apply = apply_target + + def _get_info(self): + return {'max-concurrency': 1, + 'processes': [os.getpid()], + 'max-tasks-per-child': None, + 'put-guarded-by-semaphore': True, + 'timeouts': ()} diff --git a/awx/lib/site-packages/celery/concurrency/threads.py b/awx/lib/site-packages/celery/concurrency/threads.py new file mode 100644 index 0000000000..ba5c6416c2 --- /dev/null +++ b/awx/lib/site-packages/celery/concurrency/threads.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +""" + celery.concurrency.threads + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Pool implementation using threads. + +""" +from __future__ import absolute_import + +from celery.utils.compat import UserDict + +from .base import apply_target, BasePool + + +class NullDict(UserDict): + + def __setitem__(self, key, value): + pass + + +class TaskPool(BasePool): + + def __init__(self, *args, **kwargs): + try: + import threadpool + except ImportError: + raise ImportError( + 'The threaded pool requires the threadpool module.') + self.WorkRequest = threadpool.WorkRequest + self.ThreadPool = threadpool.ThreadPool + super(TaskPool, self).__init__(*args, **kwargs) + + def on_start(self): + self._pool = self.ThreadPool(self.limit) + # threadpool stores all work requests until they are processed + # we don't need this dict, and it occupies way too much memory. + self._pool.workRequests = NullDict() + self._quick_put = self._pool.putRequest + self._quick_clear = self._pool._results_queue.queue.clear + + def on_stop(self): + self._pool.dismissWorkers(self.limit, do_join=True) + + def on_apply(self, target, args=None, kwargs=None, callback=None, + accept_callback=None, **_): + req = self.WorkRequest(apply_target, (target, args, kwargs, callback, + accept_callback)) + self._quick_put(req) + # threadpool also has callback support, + # but for some reason the callback is not triggered + # before you've collected the results. + # Clear the results (if any), so it doesn't grow too large. + self._quick_clear() + return req diff --git a/awx/lib/site-packages/celery/contrib/__init__.py b/awx/lib/site-packages/celery/contrib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/celery/contrib/abortable.py b/awx/lib/site-packages/celery/contrib/abortable.py new file mode 100644 index 0000000000..f7d9b79028 --- /dev/null +++ b/awx/lib/site-packages/celery/contrib/abortable.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +""" +========================= +Abortable tasks overview +========================= + +For long-running :class:`Task`'s, it can be desirable to support +aborting during execution. Of course, these tasks should be built to +support abortion specifically. + +The :class:`AbortableTask` serves as a base class for all :class:`Task` +objects that should support abortion by producers. + +* Producers may invoke the :meth:`abort` method on + :class:`AbortableAsyncResult` instances, to request abortion. + +* Consumers (workers) should periodically check (and honor!) the + :meth:`is_aborted` method at controlled points in their task's + :meth:`run` method. The more often, the better. + +The necessary intermediate communication is dealt with by the +:class:`AbortableTask` implementation. + +Usage example +------------- + +In the consumer: + +.. code-block:: python + + from celery.contrib.abortable import AbortableTask + from celery.utils.log import get_task_logger + + logger = get_logger(__name__) + + class MyLongRunningTask(AbortableTask): + + def run(self, **kwargs): + results = [] + for x in xrange(100): + # Check after every 5 loops.. + if x % 5 == 0: # alternatively, check when some timer is due + if self.is_aborted(**kwargs): + # Respect the aborted status and terminate + # gracefully + logger.warning('Task aborted.') + return + y = do_something_expensive(x) + results.append(y) + logger.info('Task finished.') + return results + + +In the producer: + +.. code-block:: python + + from myproject.tasks import MyLongRunningTask + + def myview(request): + + async_result = MyLongRunningTask.delay() + # async_result is of type AbortableAsyncResult + + # After 10 seconds, abort the task + time.sleep(10) + async_result.abort() + + ... + +After the `async_result.abort()` call, the task execution is not +aborted immediately. In fact, it is not guaranteed to abort at all. Keep +checking the `async_result` status, or call `async_result.wait()` to +have it block until the task is finished. + +.. note:: + + In order to abort tasks, there needs to be communication between the + producer and the consumer. This is currently implemented through the + database backend. Therefore, this class will only work with the + database backends. + +""" +from __future__ import absolute_import + +from celery.task.base import Task +from celery.result import AsyncResult + + +""" +Task States +----------- + +.. state:: ABORTED + +ABORTED +~~~~~~~ + +Task is aborted (typically by the producer) and should be +aborted as soon as possible. + +""" +ABORTED = 'ABORTED' + + +class AbortableAsyncResult(AsyncResult): + """Represents a abortable result. + + Specifically, this gives the `AsyncResult` a :meth:`abort()` method, + which sets the state of the underlying Task to `'ABORTED'`. + + """ + + def is_aborted(self): + """Returns :const:`True` if the task is (being) aborted.""" + return self.state == ABORTED + + def abort(self): + """Set the state of the task to :const:`ABORTED`. + + Abortable tasks monitor their state at regular intervals and + terminate execution if so. + + Be aware that invoking this method does not guarantee when the + task will be aborted (or even if the task will be aborted at + all). + + """ + # TODO: store_result requires all four arguments to be set, + # but only status should be updated here + return self.backend.store_result(self.id, result=None, + status=ABORTED, traceback=None) + + +class AbortableTask(Task): + """A celery task that serves as a base class for all :class:`Task`'s + that support aborting during execution. + + All subclasses of :class:`AbortableTask` must call the + :meth:`is_aborted` method periodically and act accordingly when + the call evaluates to :const:`True`. + + """ + + @classmethod + def AsyncResult(cls, task_id): + """Returns the accompanying AbortableAsyncResult instance.""" + return AbortableAsyncResult(task_id, backend=cls.backend) + + def is_aborted(self, **kwargs): + """Checks against the backend whether this + :class:`AbortableAsyncResult` is :const:`ABORTED`. + + Always returns :const:`False` in case the `task_id` parameter + refers to a regular (non-abortable) :class:`Task`. + + Be aware that invoking this method will cause a hit in the + backend (for example a database query), so find a good balance + between calling it regularly (for responsiveness), but not too + often (for performance). + + """ + task_id = kwargs.get('task_id', self.request.id) + result = self.AsyncResult(task_id) + if not isinstance(result, AbortableAsyncResult): + return False + return result.is_aborted() diff --git a/awx/lib/site-packages/celery/contrib/batches.py b/awx/lib/site-packages/celery/contrib/batches.py new file mode 100644 index 0000000000..4dd78f014e --- /dev/null +++ b/awx/lib/site-packages/celery/contrib/batches.py @@ -0,0 +1,245 @@ +# -*- coding: utf-8 -*- +""" +celery.contrib.batches +====================== + +Experimental task class that buffers messages and processes them as a list. + +.. warning:: + + For this to work you have to set + :setting:`CELERYD_PREFETCH_MULTIPLIER` to zero, or some value where + the final multiplied value is higher than ``flush_every``. + + In the future we hope to add the ability to direct batching tasks + to a channel with different QoS requirements than the task channel. + +**Simple Example** + +A click counter that flushes the buffer every 100 messages, and every +seconds. Does not do anything with the data, but can easily be modified +to store it in a database. + +.. code-block:: python + + # Flush after 100 messages, or 10 seconds. + @app.task(base=Batches, flush_every=100, flush_interval=10) + def count_click(requests): + from collections import Counter + count = Counter(request.kwargs['url'] for request in requests) + for url, count in count.items(): + print('>>> Clicks: %s -> %s' % (url, count)) + + +Then you can ask for a click to be counted by doing:: + + >>> count_click.delay('http://example.com') + +**Example returning results** + +An interface to the Web of Trust API that flushes the buffer every 100 +messages, and every 10 seconds. + +.. code-block:: python + + import requests + from urlparse import urlparse + + from celery.contrib.batches import Batches + + wot_api_target = "https://api.mywot.com/0.4/public_link_json" + + @app.task(base=Batches, flush_every=100, flush_interval=10) + def wot_api(requests): + sig = lambda url: url + reponses = wot_api_real( + (sig(*request.args, **request.kwargs) for request in requests) + ) + # use mark_as_done to manually return response data + for response, request in zip(reponses, requests): + app.backend.mark_as_done(request.id, response) + + + def wot_api_real(urls): + domains = [urlparse(url).netloc for url in urls] + response = requests.get( + wot_api_target, + params={"hosts": ('/').join(set(domains)) + '/'} + ) + return [response.json[domain] for domain in domains] + +Using the API is done as follows:: + + >>> wot_api.delay('http://example.com') + +.. note:: + + If you don't have an ``app`` instance then use the current app proxy + instead:: + + from celery import current_app + app.backend.mark_as_done(request.id, response) + +""" +from __future__ import absolute_import + +from itertools import count +from Queue import Empty, Queue + +from celery.task import Task +from celery.utils.log import get_logger +from celery.worker.job import Request +from celery.utils import noop + +logger = get_logger(__name__) + + +def consume_queue(queue): + """Iterator yielding all immediately available items in a + :class:`Queue.Queue`. + + The iterator stops as soon as the queue raises :exc:`Queue.Empty`. + + *Examples* + + >>> q = Queue() + >>> map(q.put, range(4)) + >>> list(consume_queue(q)) + [0, 1, 2, 3] + >>> list(consume_queue(q)) + [] + + """ + get = queue.get_nowait + while 1: + try: + yield get() + except Empty: + break + + +def apply_batches_task(task, args, loglevel, logfile): + task.push_request(loglevel=loglevel, logfile=logfile) + try: + result = task(*args) + except Exception, exc: + result = None + logger.error('Error: %r', exc, exc_info=True) + finally: + task.pop_request() + return result + + +class SimpleRequest(object): + """Pickleable request.""" + + #: task id + id = None + + #: task name + name = None + + #: positional arguments + args = () + + #: keyword arguments + kwargs = {} + + #: message delivery information. + delivery_info = None + + #: worker node name + hostname = None + + def __init__(self, id, name, args, kwargs, delivery_info, hostname): + self.id = id + self.name = name + self.args = args + self.kwargs = kwargs + self.delivery_info = delivery_info + self.hostname = hostname + + @classmethod + def from_request(cls, request): + return cls(request.id, request.name, request.args, + request.kwargs, request.delivery_info, request.hostname) + + +class Batches(Task): + abstract = True + + #: Maximum number of message in buffer. + flush_every = 10 + + #: Timeout in seconds before buffer is flushed anyway. + flush_interval = 30 + + def __init__(self): + self._buffer = Queue() + self._count = count(1).next + self._tref = None + self._pool = None + + def run(self, requests): + raise NotImplementedError('%r must implement run(requests)' % (self, )) + + def Strategy(self, task, app, consumer): + self._pool = consumer.pool + hostname = consumer.hostname + eventer = consumer.event_dispatcher + Req = Request + connection_errors = consumer.connection_errors + timer = consumer.timer + put_buffer = self._buffer.put + flush_buffer = self._do_flush + + def task_message_handler(message, body, ack): + request = Req(body, on_ack=ack, app=app, hostname=hostname, + events=eventer, task=task, + connection_errors=connection_errors, + delivery_info=message.delivery_info) + put_buffer(request) + + if self._tref is None: # first request starts flush timer. + self._tref = timer.apply_interval(self.flush_interval * 1000.0, + flush_buffer) + + if not self._count() % self.flush_every: + flush_buffer() + + return task_message_handler + + def flush(self, requests): + return self.apply_buffer(requests, ([SimpleRequest.from_request(r) + for r in requests], )) + + def _do_flush(self): + logger.debug('Batches: Wake-up to flush buffer...') + requests = None + if self._buffer.qsize(): + requests = list(consume_queue(self._buffer)) + if requests: + logger.debug('Batches: Buffer complete: %s', len(requests)) + self.flush(requests) + if not requests: + logger.debug('Batches: Cancelling timer: Nothing in buffer.') + self._tref.cancel() # cancel timer. + self._tref = None + + def apply_buffer(self, requests, args=(), kwargs={}): + acks_late = [], [] + [acks_late[r.task.acks_late].append(r) for r in requests] + assert requests and (acks_late[True] or acks_late[False]) + + def on_accepted(pid, time_accepted): + [req.acknowledge() for req in acks_late[False]] + + def on_return(result): + [req.acknowledge() for req in acks_late[True]] + + return self._pool.apply_async( + apply_batches_task, + (self, args, 0, None), + accept_callback=on_accepted, + callback=acks_late[True] and on_return or noop, + ) diff --git a/awx/lib/site-packages/celery/contrib/bundles.py b/awx/lib/site-packages/celery/contrib/bundles.py new file mode 100644 index 0000000000..ded9aa7092 --- /dev/null +++ b/awx/lib/site-packages/celery/contrib/bundles.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +""" + celery.contrib.bundles + ~~~~~~~~~~~~~~~~~~~~~~ + + Celery PyPI Bundles. + +""" +from __future__ import absolute_import + +from celery import VERSION +from bundle.extensions import Dist + + +defaults = {'author': 'Celery Project', + 'author_email': 'bundles@celeryproject.org', + 'url': 'http://celeryproject.org', + 'license': 'BSD'} +celery = Dist('celery', VERSION, **defaults) +django_celery = Dist('django-celery', VERSION, **defaults) +flask_celery = Dist('Flask-Celery', VERSION, **defaults) + +bundles = [ + celery.Bundle( + 'celery-with-redis', + 'Bundle installing the dependencies for Celery and Redis', + requires=['redis>=2.4.4'], + ), + celery.Bundle( + 'celery-with-mongodb', + 'Bundle installing the dependencies for Celery and MongoDB', + requires=['pymongo'], + ), + celery.Bundle( + 'celery-with-couchdb', + 'Bundle installing the dependencies for Celery and CouchDB', + requires=['couchdb'], + ), + celery.Bundle( + 'celery-with-beanstalk', + 'Bundle installing the dependencies for Celery and Beanstalk', + requires=['beanstalkc'], + ), + + django_celery.Bundle( + 'django-celery-with-redis', + 'Bundle installing the dependencies for Django-Celery and Redis', + requires=['redis>=2.4.4'], + ), + django_celery.Bundle( + 'django-celery-with-mongodb', + 'Bundle installing the dependencies for Django-Celery and MongoDB', + requires=['pymongo'], + ), + django_celery.Bundle( + 'django-celery-with-couchdb', + 'Bundle installing the dependencies for Django-Celery and CouchDB', + requires=['couchdb'], + ), + django_celery.Bundle( + 'django-celery-with-beanstalk', + 'Bundle installing the dependencies for Django-Celery and Beanstalk', + requires=['beanstalkc'], + ), +] diff --git a/awx/lib/site-packages/celery/contrib/methods.py b/awx/lib/site-packages/celery/contrib/methods.py new file mode 100644 index 0000000000..6d771bb56f --- /dev/null +++ b/awx/lib/site-packages/celery/contrib/methods.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +""" +celery.contrib.methods +====================== + +Task decorator that supports creating tasks out of methods. + +Examples +-------- + +.. code-block:: python + + from celery.contrib.methods import task + + class X(object): + + @task() + def add(self, x, y): + return x + y + +or with any task decorator: + +.. code-block:: python + + from celery.contrib.methods import task_method + + class X(object): + + @celery.task(filter=task_method) + def add(self, x, y): + return x + y + +.. note:: + + The task must use the new Task base class (:class:`celery.Task`), + and the old base class using classmethods (``celery.task.Task``, + ``celery.task.base.Task``). + + This means that you have to use the task decorator from a Celery app + instance, and not the old-API: + + .. code-block:: python + + + from celery import task # BAD + from celery.task import task # ALSO BAD + + # GOOD: + celery = Celery(...) + + @celery.task(filter=task_method) + def foo(self): pass + + # ALSO GOOD: + from celery import current_app + + @current_app.task(filter=task_method) + def foo(self): pass + +Caveats +------- + +- Automatic naming won't be able to know what the class name is. + + The name will still be module_name + task_name, + so two methods with the same name in the same module will collide + so that only one task can run: + + .. code-block:: python + + class A(object): + + @task() + def add(self, x, y): + return x + y + + class B(object): + + @task() + def add(self, x, y): + return x + y + + would have to be written as: + + .. code-block:: python + + class A(object): + @task(name='A.add') + def add(self, x, y): + return x + y + + class B(object): + @task(name='B.add') + def add(self, x, y): + return x + y + +""" + +from __future__ import absolute_import + +from celery import current_app + + +class task_method(object): + + def __init__(self, task, *args, **kwargs): + self.task = task + + def __get__(self, obj, type=None): + if obj is None: + return self.task + task = self.task.__class__() + task.__self__ = obj + return task + + +def task(*args, **kwargs): + return current_app.task(*args, **dict(kwargs, filter=task_method)) diff --git a/awx/lib/site-packages/celery/contrib/migrate.py b/awx/lib/site-packages/celery/contrib/migrate.py new file mode 100644 index 0000000000..9e54979455 --- /dev/null +++ b/awx/lib/site-packages/celery/contrib/migrate.py @@ -0,0 +1,355 @@ +# -*- coding: utf-8 -*- +""" + celery.contrib.migrate + ~~~~~~~~~~~~~~~~~~~~~~ + + Migration tools. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import socket + +from functools import partial +from itertools import cycle, islice + +from kombu import eventloop, Queue +from kombu.common import maybe_declare +from kombu.exceptions import StdChannelError +from kombu.utils.encoding import ensure_bytes + +from celery.app import app_or_default +from celery.utils import worker_direct + + +class StopFiltering(Exception): + pass + + +class State(object): + count = 0 + filtered = 0 + total_apx = 0 + + @property + def strtotal(self): + if not self.total_apx: + return u'?' + return unicode(self.total_apx) + + def __repr__(self): + if self.filtered: + return '^%s' % self.filtered + return '%s/%s' % (self.count, self.strtotal) + + +def republish(producer, message, exchange=None, routing_key=None, + remove_props=['application_headers', + 'content_type', + 'content_encoding', + 'headers']): + body = ensure_bytes(message.body) # use raw message body. + info, headers, props = (message.delivery_info, + message.headers, message.properties) + exchange = info['exchange'] if exchange is None else exchange + routing_key = info['routing_key'] if routing_key is None else routing_key + ctype, enc = message.content_type, message.content_encoding + # remove compression header, as this will be inserted again + # when the message is recompressed. + compression = headers.pop('compression', None) + + for key in remove_props: + props.pop(key, None) + + producer.publish(ensure_bytes(body), exchange=exchange, + routing_key=routing_key, compression=compression, + headers=headers, content_type=ctype, + content_encoding=enc, **props) + + +def migrate_task(producer, body_, message, queues=None): + info = message.delivery_info + queues = {} if queues is None else queues + republish(producer, message, + exchange=queues.get(info['exchange']), + routing_key=queues.get(info['routing_key'])) + + +def filter_callback(callback, tasks): + + def filtered(body, message): + if tasks and message.payload['task'] not in tasks: + return + + return callback(body, message) + return filtered + + +def migrate_tasks(source, dest, migrate=migrate_task, app=None, + queues=None, **kwargs): + app = app_or_default(app) + queues = prepare_queues(queues) + producer = app.amqp.TaskProducer(dest) + migrate = partial(migrate, producer, queues=queues) + + def on_declare_queue(queue): + new_queue = queue(producer.channel) + new_queue.name = queues.get(queue.name, queue.name) + if new_queue.routing_key == queue.name: + new_queue.routing_key = queues.get(queue.name, + new_queue.routing_key) + if new_queue.exchange.name == queue.name: + new_queue.exchange.name = queues.get(queue.name, queue.name) + new_queue.declare() + + return start_filter(app, source, migrate, queues=queues, + on_declare_queue=on_declare_queue, **kwargs) + + +def _maybe_queue(app, q): + if isinstance(q, basestring): + return app.amqp.queues[q] + return q + + +def move(predicate, connection=None, exchange=None, routing_key=None, + source=None, app=None, callback=None, limit=None, transform=None, + **kwargs): + """Find tasks by filtering them and move the tasks to a new queue. + + :param predicate: Filter function used to decide which messages + to move. Must accept the standard signature of ``(body, message)`` + used by Kombu consumer callbacks. If the predicate wants the message + to be moved it must return either: + + 1) a tuple of ``(exchange, routing_key)``, or + + 2) a :class:`~kombu.entity.Queue` instance, or + + 3) any other true value which means the specified + ``exchange`` and ``routing_key`` arguments will be used. + + :keyword connection: Custom connection to use. + :keyword source: Optional list of source queues to use instead of the + default (which is the queues in :setting:`CELERY_QUEUES`). + This list can also contain new :class:`~kombu.entity.Queue` instances. + :keyword exchange: Default destination exchange. + :keyword routing_key: Default destination routing key. + :keyword limit: Limit number of messages to filter. + :keyword callback: Callback called after message moved, + with signature ``(state, body, message)``. + :keyword transform: Optional function to transform the return + value (destination) of the filter function. + + Also supports the same keyword arguments as :func:`start_filter`. + + To demonstrate, the :func:`move_task_by_id` operation can be implemented + like this: + + .. code-block:: python + + def is_wanted_task(body, message): + if body['id'] == wanted_id: + return Queue('foo', exchange=Exchange('foo'), + routing_key='foo') + + move(is_wanted_task) + + or with a transform: + + .. code-block:: python + + def transform(value): + if isinstance(value, basestring): + return Queue(value, Exchange(value), value) + return value + + move(is_wanted_task, transform=transform) + + The predicate may also return a tuple of ``(exchange, routing_key)`` + to specify the destination to where the task should be moved, + or a :class:`~kombu.entitiy.Queue` instance. + Any other true value means that the task will be moved to the + default exchange/routing_key. + + """ + app = app_or_default(app) + queues = [_maybe_queue(app, queue) for queue in source or []] or None + with app.connection_or_acquire(connection, pool=False) as conn: + producer = app.amqp.TaskProducer(conn) + state = State() + + def on_task(body, message): + ret = predicate(body, message) + if ret: + if transform: + ret = transform(ret) + if isinstance(ret, Queue): + maybe_declare(ret, conn.default_channel) + ex, rk = ret.exchange.name, ret.routing_key + else: + ex, rk = expand_dest(ret, exchange, routing_key) + republish(producer, message, + exchange=ex, routing_key=rk) + message.ack() + + state.filtered += 1 + if callback: + callback(state, body, message) + if limit and state.filtered >= limit: + raise StopFiltering() + + return start_filter(app, conn, on_task, consume_from=queues, **kwargs) + + +def expand_dest(ret, exchange, routing_key): + try: + ex, rk = ret + except (TypeError, ValueError): + ex, rk = exchange, routing_key + return ex, rk + + +def task_id_eq(task_id, body, message): + return body['id'] == task_id + + +def task_id_in(ids, body, message): + return body['id'] in ids + + +def prepare_queues(queues): + if isinstance(queues, basestring): + queues = queues.split(',') + if isinstance(queues, list): + queues = dict(tuple(islice(cycle(q.split(':')), None, 2)) + for q in queues) + if queues is None: + queues = {} + return queues + + +def start_filter(app, conn, filter, limit=None, timeout=1.0, + ack_messages=False, tasks=None, queues=None, + callback=None, forever=False, on_declare_queue=None, + consume_from=None, state=None, **kwargs): + state = state or State() + queues = prepare_queues(queues) + if isinstance(tasks, basestring): + tasks = set(tasks.split(',')) + if tasks is None: + tasks = set([]) + + def update_state(body, message): + state.count += 1 + if limit and state.count >= limit: + raise StopFiltering() + + def ack_message(body, message): + message.ack() + + consumer = app.amqp.TaskConsumer(conn, queues=consume_from) + + if tasks: + filter = filter_callback(filter, tasks) + update_state = filter_callback(update_state, tasks) + ack_message = filter_callback(ack_message, tasks) + + consumer.register_callback(filter) + consumer.register_callback(update_state) + if ack_messages: + consumer.register_callback(ack_message) + if callback is not None: + callback = partial(callback, state) + if tasks: + callback = filter_callback(callback, tasks) + consumer.register_callback(callback) + + # declare all queues on the new broker. + for queue in consumer.queues: + if queues and queue.name not in queues: + continue + if on_declare_queue is not None: + on_declare_queue(queue) + try: + _, mcount, _ = queue(consumer.channel).queue_declare(passive=True) + if mcount: + state.total_apx += mcount + except conn.channel_errors + (StdChannelError, ): + pass + + # start migrating messages. + with consumer: + try: + for _ in eventloop(conn, # pragma: no cover + timeout=timeout, ignore_timeouts=forever): + pass + except socket.timeout: + pass + except StopFiltering: + pass + return state + + +def move_task_by_id(task_id, dest, **kwargs): + """Find a task by id and move it to another queue. + + :param task_id: Id of task to move. + :param dest: Destination queue. + + Also supports the same keyword arguments as :func:`move`. + + """ + return move_by_idmap({task_id: dest}, **kwargs) + + +def move_by_idmap(map, **kwargs): + """Moves tasks by matching from a ``task_id: queue`` mapping, + where ``queue`` is a queue to move the task to. + + Example:: + + >>> reroute_idmap({ + ... '5bee6e82-f4ac-468e-bd3d-13e8600250bc': Queue(...), + ... 'ada8652d-aef3-466b-abd2-becdaf1b82b3': Queue(...), + ... '3a2b140d-7db1-41ba-ac90-c36a0ef4ab1f': Queue(...)}, + ... queues=['hipri']) + + """ + def task_id_in_map(body, message): + return map.get(body['id']) + + # adding the limit means that we don't have to consume any more + # when we've found everything. + return move(task_id_in_map, limit=len(map), **kwargs) + + +def move_by_taskmap(map, **kwargs): + """Moves tasks by matching from a ``task_name: queue`` mapping, + where ``queue`` is the queue to move the task to. + + Example:: + + >>> reroute_idmap({ + ... 'tasks.add': Queue(...), + ... 'tasks.mul': Queue(...), + ... }) + + """ + + def task_name_in_map(body, message): + return map.get(body['task']) # <- name of task + + return move(task_name_in_map, **kwargs) + + +move_direct = partial(move, transform=worker_direct) +move_direct_by_id = partial(move_task_by_id, transform=worker_direct) +move_direct_by_idmap = partial(move_by_idmap, transform=worker_direct) +move_direct_by_taskmap = partial(move_by_taskmap, transform=worker_direct) + + +def filter_status(state, body, message): + print('Moving task %s/%s: %s[%s]' % ( + state.filtered, state.strtotal, body['task'], body['id'])) diff --git a/awx/lib/site-packages/celery/contrib/rdb.py b/awx/lib/site-packages/celery/contrib/rdb.py new file mode 100644 index 0000000000..00914ab04b --- /dev/null +++ b/awx/lib/site-packages/celery/contrib/rdb.py @@ -0,0 +1,163 @@ +# -*- coding: utf-8 -*- +""" +celery.contrib.rdb +================== + +Remote debugger for Celery tasks running in multiprocessing pool workers. +Inspired by http://snippets.dzone.com/posts/show/7248 + +**Usage** + +.. code-block:: python + + from celery.contrib import rdb + from celery import task + + @task() + def add(x, y): + result = x + y + rdb.set_trace() + return result + + +**Environment Variables** + +.. envvar:: CELERY_RDB_HOST + + Hostname to bind to. Default is '127.0.01', which means the socket + will only be accessible from the local host. + +.. envvar:: CELERY_RDB_PORT + + Base port to bind to. Default is 6899. + The debugger will try to find an available port starting from the + base port. The selected port will be logged by the worker. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import errno +import os +import socket +import sys + +from pdb import Pdb + +from billiard import current_process + +from celery.platforms import ignore_errno + +default_port = 6899 + +CELERY_RDB_HOST = os.environ.get('CELERY_RDB_HOST') or '127.0.0.1' +CELERY_RDB_PORT = int(os.environ.get('CELERY_RDB_PORT') or default_port) + +#: Holds the currently active debugger. +_current = [None] + +_frame = getattr(sys, '_getframe') + + +class Rdb(Pdb): + me = 'Remote Debugger' + _prev_outs = None + _sock = None + + def __init__(self, host=CELERY_RDB_HOST, port=CELERY_RDB_PORT, + port_search_limit=100, port_skew=+0, out=sys.stdout): + self.active = True + self.out = out + + self._prev_handles = sys.stdin, sys.stdout + + self._sock, this_port = self.get_avail_port( + host, port, port_search_limit, port_skew, + ) + self._sock.setblocking(1) + self._sock.listen(1) + me = '%s:%s' % (self.me, this_port) + context = self.context = {'me': me, 'host': host, 'port': this_port} + self.say('%(me)s: Please telnet %(host)s %(port)s.' + ' Type `exit` in session to continue.' % context) + self.say('%(me)s: Waiting for client...' % context) + + self._client, address = self._sock.accept() + self._client.setblocking(1) + context['remote_addr'] = ':'.join(str(v) for v in address) + self.say('%(me)s: In session with %(remote_addr)s' % context) + self._handle = sys.stdin = sys.stdout = self._client.makefile('rw') + Pdb.__init__(self, completekey='tab', + stdin=self._handle, stdout=self._handle) + + def get_avail_port(self, host, port, search_limit=100, skew=+0): + try: + _, skew = current_process().name.split('-') + skew = int(skew) + except ValueError: + pass + this_port = None + for i in xrange(search_limit): + _sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + this_port = port + skew + i + try: + _sock.bind((host, this_port)) + except socket.error, exc: + if exc.errno in [errno.EADDRINUSE, errno.EINVAL]: + continue + raise + else: + return _sock, this_port + else: + raise Exception( + '%s: Could not find available port. Please set using ' + 'environment variable CELERY_RDB_PORT' % (self.me, )) + + def say(self, m): + self.out.write(m + '\n') + + def _close_session(self): + self.stdin, self.stdout = sys.stdin, sys.stdout = self._prev_handles + self._handle.close() + self._client.close() + self._sock.close() + self.active = False + self.say('%(me)s: Session %(remote_addr)s ended.' % self.context) + + def do_continue(self, arg): + self._close_session() + self.set_continue() + return 1 + do_c = do_cont = do_continue + + def do_quit(self, arg): + self._close_session() + self.set_quit() + return 1 + do_q = do_exit = do_quit + + def set_trace(self, frame=None): + if frame is None: + frame = _frame().f_back + with ignore_errno(errno.ECONNRESET): + Pdb.set_trace(self, frame) + + def set_quit(self): + # this raises a BdbQuit exception that we are unable to catch. + sys.settrace(None) + + +def debugger(): + """Returns the current debugger instance (if any), + or creates a new one.""" + rdb = _current[0] + if rdb is None or not rdb.active: + rdb = _current[0] = Rdb() + return rdb + + +def set_trace(frame=None): + """Set breakpoint at current location, or a specified frame""" + if frame is None: + frame = _frame().f_back + return debugger().set_trace(frame) diff --git a/awx/lib/site-packages/celery/datastructures.py b/awx/lib/site-packages/celery/datastructures.py new file mode 100644 index 0000000000..f3e9c2e2b6 --- /dev/null +++ b/awx/lib/site-packages/celery/datastructures.py @@ -0,0 +1,503 @@ +# -*- coding: utf-8 -*- +""" + celery.datastructures + ~~~~~~~~~~~~~~~~~~~~~ + + Custom types and data structures. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import sys +import time + +from collections import defaultdict +from heapq import heapify, heappush, heappop +from itertools import chain + +try: + from collections import Mapping, MutableMapping +except ImportError: # pragma: no cover + MutableMapping = None # noqa + Mapping = dict # noqa + +from billiard.einfo import ExceptionInfo # noqa +from kombu.utils.limits import TokenBucket # noqa + +from .utils.functional import LRUCache, first, uniq # noqa + + +class CycleError(Exception): + """A cycle was detected in an acyclic graph.""" + + +class DependencyGraph(object): + """A directed acyclic graph of objects and their dependencies. + + Supports a robust topological sort + to detect the order in which they must be handled. + + Takes an optional iterator of ``(obj, dependencies)`` + tuples to build the graph from. + + .. warning:: + + Does not support cycle detection. + + """ + + def __init__(self, it=None): + self.adjacent = {} + if it is not None: + self.update(it) + + def add_arc(self, obj): + """Add an object to the graph.""" + self.adjacent.setdefault(obj, []) + + def add_edge(self, A, B): + """Add an edge from object ``A`` to object ``B`` + (``A`` depends on ``B``).""" + self[A].append(B) + + def topsort(self): + """Sort the graph topologically. + + :returns: a list of objects in the order + in which they must be handled. + + """ + graph = DependencyGraph() + components = self._tarjan72() + + NC = dict((node, component) + for component in components + for node in component) + for component in components: + graph.add_arc(component) + for node in self: + node_c = NC[node] + for successor in self[node]: + successor_c = NC[successor] + if node_c != successor_c: + graph.add_edge(node_c, successor_c) + return [t[0] for t in graph._khan62()] + + def valency_of(self, obj): + """Returns the velency (degree) of a vertex in the graph.""" + try: + l = [len(self[obj])] + except KeyError: + return 0 + for node in self[obj]: + l.append(self.valency_of(node)) + return sum(l) + + def update(self, it): + """Update the graph with data from a list + of ``(obj, dependencies)`` tuples.""" + tups = list(it) + for obj, _ in tups: + self.add_arc(obj) + for obj, deps in tups: + for dep in deps: + self.add_edge(obj, dep) + + def edges(self): + """Returns generator that yields for all edges in the graph.""" + return (obj for obj, adj in self.iteritems() if adj) + + def _khan62(self): + """Khans simple topological sort algorithm from '62 + + See http://en.wikipedia.org/wiki/Topological_sorting + + """ + count = defaultdict(lambda: 0) + result = [] + + for node in self: + for successor in self[node]: + count[successor] += 1 + ready = [node for node in self if not count[node]] + + while ready: + node = ready.pop() + result.append(node) + + for successor in self[node]: + count[successor] -= 1 + if count[successor] == 0: + ready.append(successor) + result.reverse() + return result + + def _tarjan72(self): + """Tarjan's algorithm to find strongly connected components. + + See http://bit.ly/vIMv3h. + + """ + result, stack, low = [], [], {} + + def visit(node): + if node in low: + return + num = len(low) + low[node] = num + stack_pos = len(stack) + stack.append(node) + + for successor in self[node]: + visit(successor) + low[node] = min(low[node], low[successor]) + + if num == low[node]: + component = tuple(stack[stack_pos:]) + stack[stack_pos:] = [] + result.append(component) + for item in component: + low[item] = len(self) + + for node in self: + visit(node) + + return result + + def to_dot(self, fh, ws=' ' * 4): + """Convert the graph to DOT format. + + :param fh: A file, or a file-like object to write the graph to. + + """ + fh.write('digraph dependencies {\n') + for obj, adjacent in self.iteritems(): + if not adjacent: + fh.write(ws + '"%s"\n' % (obj, )) + for req in adjacent: + fh.write(ws + '"%s" -> "%s"\n' % (obj, req)) + fh.write('}\n') + + def __iter__(self): + return iter(self.adjacent) + + def __getitem__(self, node): + return self.adjacent[node] + + def __len__(self): + return len(self.adjacent) + + def __contains__(self, obj): + return obj in self.adjacent + + def _iterate_items(self): + return self.adjacent.iteritems() + items = iteritems = _iterate_items + + def __repr__(self): + return '\n'.join(self.repr_node(N) for N in self) + + def repr_node(self, obj, level=1): + output = ['%s(%s)' % (obj, self.valency_of(obj))] + if obj in self: + for other in self[obj]: + d = '%s(%s)' % (other, self.valency_of(other)) + output.append(' ' * level + d) + output.extend(self.repr_node(other, level + 1).split('\n')[1:]) + return '\n'.join(output) + + +class AttributeDictMixin(object): + """Adds attribute access to mappings. + + `d.key -> d[key]` + + """ + + def __getattr__(self, k): + """`d.key -> d[key]`""" + try: + return self[k] + except KeyError: + raise AttributeError( + "'%s' object has no attribute '%s'" % (type(self).__name__, k)) + + def __setattr__(self, key, value): + """`d[key] = value -> d.key = value`""" + self[key] = value + + +class AttributeDict(dict, AttributeDictMixin): + """Dict subclass with attribute access.""" + pass + + +class DictAttribute(object): + """Dict interface to attributes. + + `obj[k] -> obj.k` + + """ + obj = None + + def __init__(self, obj): + object.__setattr__(self, 'obj', obj) + + def __getattr__(self, key): + return getattr(self.obj, key) + + def __setattr__(self, key, value): + return setattr(self.obj, key, value) + + def get(self, key, default=None): + try: + return self[key] + except KeyError: + return default + + def setdefault(self, key, default): + try: + return self[key] + except KeyError: + self[key] = default + return default + + def __getitem__(self, key): + try: + return getattr(self.obj, key) + except AttributeError: + raise KeyError(key) + + def __setitem__(self, key, value): + setattr(self.obj, key, value) + + def __contains__(self, key): + return hasattr(self.obj, key) + + def _iterate_keys(self): + return iter(dir(self.obj)) + iterkeys = _iterate_keys + + def __iter__(self): + return self._iterate_keys() + + def _iterate_items(self): + for key in self._iterate_keys(): + yield key, getattr(self.obj, key) + iteritems = _iterate_items + + if sys.version_info[0] == 3: # pragma: no cover + items = _iterate_items + keys = _iterate_keys + else: + + def keys(self): + return list(self) + + def items(self): + return list(self._iterate_items()) + + +class ConfigurationView(AttributeDictMixin): + """A view over an applications configuration dicts. + + If the key does not exist in ``changes``, the ``defaults`` dicts + are consulted. + + :param changes: Dict containing changes to the configuration. + :param defaults: List of dicts containing the default configuration. + + """ + changes = None + defaults = None + _order = None + + def __init__(self, changes, defaults): + self.__dict__.update(changes=changes, defaults=defaults, + _order=[changes] + defaults) + + def add_defaults(self, d): + if not isinstance(d, Mapping): + d = DictAttribute(d) + self.defaults.insert(0, d) + self._order.insert(1, d) + + def __getitem__(self, key): + for d in self._order: + try: + return d[key] + except KeyError: + pass + raise KeyError(key) + + def __setitem__(self, key, value): + self.changes[key] = value + + def first(self, *keys): + return first(None, (self.get(key) for key in keys)) + + def get(self, key, default=None): + try: + return self[key] + except KeyError: + return default + + def setdefault(self, key, default): + try: + return self[key] + except KeyError: + self[key] = default + return default + + def update(self, *args, **kwargs): + return self.changes.update(*args, **kwargs) + + def __contains__(self, key): + for d in self._order: + if key in d: + return True + return False + + def __repr__(self): + return repr(dict(self.iteritems())) + + def __iter__(self): + return self._iterate_keys() + + def __len__(self): + # The logic for iterating keys includes uniq(), + # so to be safe we count by explicitly iterating + return len(self.keys()) + + def _iter(self, op): + # defaults must be first in the stream, so values in + # changes takes precedence. + return chain(*[op(d) for d in reversed(self._order)]) + + def _iterate_keys(self): + return uniq(self._iter(lambda d: d)) + iterkeys = _iterate_keys + + def _iterate_items(self): + return ((key, self[key]) for key in self) + iteritems = _iterate_items + + def _iterate_values(self): + return (self[key] for key in self) + itervalues = _iterate_values + + def keys(self): + return list(self._iterate_keys()) + + def items(self): + return list(self._iterate_items()) + + def values(self): + return list(self._iterate_values()) +if MutableMapping: + MutableMapping.register(ConfigurationView) + + +class LimitedSet(object): + """Kind-of Set with limitations. + + Good for when you need to test for membership (`a in set`), + but the list might become to big, so you want to limit it so it doesn't + consume too much resources. + + :keyword maxlen: Maximum number of members before we start + evicting expired members. + :keyword expires: Time in seconds, before a membership expires. + + """ + __slots__ = ('maxlen', 'expires', '_data', '__len__', '_heap') + + def __init__(self, maxlen=None, expires=None, data=None, heap=None): + self.maxlen = maxlen + self.expires = expires + self._data = data or {} + self._heap = heap or [] + self.__len__ = self._data.__len__ + + def add(self, value): + """Add a new member.""" + self.purge(1) + now = time.time() + self._data[value] = now + heappush(self._heap, (now, value)) + + def __reduce__(self): + return self.__class__, ( + self.maxlen, self.expires, self._data, self._heap, + ) + + def clear(self): + """Remove all members""" + self._data.clear() + self._heap[:] = [] + + def pop_value(self, value): + """Remove membership by finding value.""" + try: + itime = self._data[value] + except KeyError: + return + try: + self._heap.remove((value, itime)) + except ValueError: + pass + self._data.pop(value, None) + + def _expire_item(self): + """Hunt down and remove an expired item.""" + self.purge(1) + + def __contains__(self, value): + return value in self._data + + def purge(self, limit=None): + H, maxlen = self._heap, self.maxlen + if not maxlen: + return + i = 0 + while len(self) >= maxlen: + if limit and i > limit: + break + try: + item = heappop(H) + except IndexError: + break + if self.expires: + if time.time() < item[0] + self.expires: + heappush(H, item) + break + self._data.pop(item[1]) + i += 1 + + def update(self, other, heappush=heappush): + if isinstance(other, self.__class__): + self._data.update(other._data) + self._heap.extend(other._heap) + heapify(self._heap) + else: + for obj in other: + self.add(obj) + + def as_dict(self): + return self._data + + def __iter__(self): + return iter(self._data) + + def __repr__(self): + return 'LimitedSet(%s)' % (repr(list(self._data))[:100], ) + + @property + def chronologically(self): + return [value for _, value in self._heap] + + @property + def first(self): + """Get the oldest member.""" + return self._heap[0][1] diff --git a/awx/lib/site-packages/celery/events/__init__.py b/awx/lib/site-packages/celery/events/__init__.py new file mode 100644 index 0000000000..9d053472e1 --- /dev/null +++ b/awx/lib/site-packages/celery/events/__init__.py @@ -0,0 +1,286 @@ +# -*- coding: utf-8 -*- +""" + celery.events + ~~~~~~~~~~~~~ + + Events is a stream of messages sent for certain actions occurring + in the worker (and clients if :setting:`CELERY_SEND_TASK_SENT_EVENT` + is enabled), used for monitoring purposes. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import time +import socket +import threading + +from collections import deque +from contextlib import contextmanager +from copy import copy + +from kombu import eventloop, Exchange, Queue, Consumer, Producer +from kombu.utils import cached_property + +from celery.app import app_or_default +from celery.utils import uuid + +event_exchange = Exchange('celeryev', type='topic') + + +def get_exchange(conn): + ex = copy(event_exchange) + if conn.transport.driver_type == 'redis': + # quick hack for Issue #436 + ex.type = 'fanout' + return ex + + +def Event(type, _fields=None, **fields): + """Create an event. + + An event is a dictionary, the only required field is ``type``. + + """ + event = dict(_fields or {}, type=type, **fields) + if 'timestamp' not in event: + event['timestamp'] = time.time() + return event + + +class EventDispatcher(object): + """Send events as messages. + + :param connection: Connection to the broker. + + :keyword hostname: Hostname to identify ourselves as, + by default uses the hostname returned by :func:`socket.gethostname`. + + :keyword enabled: Set to :const:`False` to not actually publish any events, + making :meth:`send` a noop operation. + + :keyword channel: Can be used instead of `connection` to specify + an exact channel to use when sending events. + + :keyword buffer_while_offline: If enabled events will be buffered + while the connection is down. :meth:`flush` must be called + as soon as the connection is re-established. + + You need to :meth:`close` this after use. + + """ + DISABLED_TRANSPORTS = set(['sql']) + + def __init__(self, connection=None, hostname=None, enabled=True, + channel=None, buffer_while_offline=True, app=None, + serializer=None): + self.app = app_or_default(app or self.app) + self.connection = connection + self.channel = channel + self.hostname = hostname or socket.gethostname() + self.buffer_while_offline = buffer_while_offline + self.mutex = threading.Lock() + self.producer = None + self._outbound_buffer = deque() + self.serializer = serializer or self.app.conf.CELERY_EVENT_SERIALIZER + self.on_enabled = set() + self.on_disabled = set() + + self.enabled = enabled + if not connection and channel: + self.connection = channel.connection.client + self.enabled = enabled + conninfo = self.connection or self.app.connection() + self.exchange = get_exchange(conninfo) + if conninfo.transport.driver_type in self.DISABLED_TRANSPORTS: + self.enabled = False + if self.enabled: + self.enable() + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + + def enable(self): + self.producer = Producer(self.channel or self.connection, + exchange=self.exchange, + serializer=self.serializer) + self.enabled = True + for callback in self.on_enabled: + callback() + + def disable(self): + if self.enabled: + self.enabled = False + self.close() + for callback in self.on_disabled: + callback() + + def publish(self, type, fields, producer, retry=False, retry_policy=None): + with self.mutex: + event = Event(type, hostname=self.hostname, + clock=self.app.clock.forward(), **fields) + exchange = self.exchange + producer.publish( + event, + routing_key=type.replace('-', '.'), + exchange=exchange.name, + retry=retry, + retry_policy=retry_policy, + declare=[exchange], + serializer=self.serializer, + ) + + def send(self, type, **fields): + """Send event. + + :param type: Kind of event. + :keyword \*\*fields: Event arguments. + + """ + if self.enabled: + try: + self.publish(type, fields, self.producer) + except Exception, exc: + if not self.buffer_while_offline: + raise + self._outbound_buffer.append((type, fields, exc)) + + def flush(self): + while self._outbound_buffer: + try: + type, fields, _ = self._outbound_buffer.popleft() + except IndexError: + return + self.send(type, **fields) + + def copy_buffer(self, other): + self._outbound_buffer = other._outbound_buffer + + def close(self): + """Close the event dispatcher.""" + self.mutex.locked() and self.mutex.release() + self.producer = None + + def _get_publisher(self): + return self.producer + + def _set_publisher(self, producer): + self.producer = producer + publisher = property(_get_publisher, _set_publisher) # XXX compat + + +class EventReceiver(object): + """Capture events. + + :param connection: Connection to the broker. + :keyword handlers: Event handlers. + + :attr:`handlers` is a dict of event types and their handlers, + the special handler `"*"` captures all events that doesn't have a + handler. + + """ + handlers = {} + + def __init__(self, connection, handlers=None, routing_key='#', + node_id=None, app=None, queue_prefix='celeryev'): + self.app = app_or_default(app) + self.connection = connection + if handlers is not None: + self.handlers = handlers + self.routing_key = routing_key + self.node_id = node_id or uuid() + self.queue_prefix = queue_prefix + self.exchange = get_exchange(self.connection or self.app.connection()) + self.queue = Queue('.'.join([self.queue_prefix, self.node_id]), + exchange=self.exchange, + routing_key=self.routing_key, + auto_delete=True, + durable=False) + + def process(self, type, event): + """Process the received event by dispatching it to the appropriate + handler.""" + handler = self.handlers.get(type) or self.handlers.get('*') + handler and handler(event) + + @contextmanager + def consumer(self, wakeup=True): + """Create event consumer.""" + consumer = Consumer(self.connection, + queues=[self.queue], no_ack=True, + accept=['application/json']) + consumer.register_callback(self._receive) + consumer.consume() + + try: + if wakeup: + self.wakeup_workers(channel=consumer.channel) + yield consumer + finally: + try: + consumer.cancel() + except self.connection.connection_errors: + pass + + def itercapture(self, limit=None, timeout=None, wakeup=True): + with self.consumer(wakeup=wakeup) as consumer: + yield consumer + self.drain_events(limit=limit, timeout=timeout) + + def capture(self, limit=None, timeout=None, wakeup=True): + """Open up a consumer capturing events. + + This has to run in the main process, and it will never + stop unless forced via :exc:`KeyboardInterrupt` or :exc:`SystemExit`. + + """ + list(self.itercapture(limit=limit, timeout=timeout, wakeup=wakeup)) + + def wakeup_workers(self, channel=None): + self.app.control.broadcast('heartbeat', + connection=self.connection, + channel=channel) + + def drain_events(self, **kwargs): + for _ in eventloop(self.connection, **kwargs): + pass + + def _receive(self, body, message): + type = body.pop('type').lower() + clock = body.get('clock') + if clock: + self.app.clock.adjust(clock) + self.process(type, Event(type, body)) + + +class Events(object): + + def __init__(self, app=None): + self.app = app + + @cached_property + def Receiver(self): + return self.app.subclass_with_self(EventReceiver, + reverse='events.Receiver') + + @cached_property + def Dispatcher(self): + return self.app.subclass_with_self(EventDispatcher, + reverse='events.Dispatcher') + + @cached_property + def State(self): + return self.app.subclass_with_self('celery.events.state:State', + reverse='events.State') + + @contextmanager + def default_dispatcher(self, hostname=None, enabled=True, + buffer_while_offline=False): + with self.app.amqp.producer_pool.acquire(block=True) as pub: + with self.Dispatcher(pub.connection, hostname, enabled, + pub.channel, buffer_while_offline) as d: + yield d diff --git a/awx/lib/site-packages/celery/events/cursesmon.py b/awx/lib/site-packages/celery/events/cursesmon.py new file mode 100644 index 0000000000..179c626a5d --- /dev/null +++ b/awx/lib/site-packages/celery/events/cursesmon.py @@ -0,0 +1,527 @@ +# -*- coding: utf-8 -*- +""" + celery.events.cursesmon + ~~~~~~~~~~~~~~~~~~~~~~~ + + Graphical monitor of Celery events using curses. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import curses +import sys +import threading +import time + +from datetime import datetime +from itertools import count +from textwrap import wrap +from math import ceil + +from celery import VERSION_BANNER +from celery import states +from celery.app import app_or_default +from celery.utils.text import abbr, abbrtask + +BORDER_SPACING = 4 +LEFT_BORDER_OFFSET = 3 +UUID_WIDTH = 36 +STATE_WIDTH = 8 +TIMESTAMP_WIDTH = 8 +MIN_WORKER_WIDTH = 15 +MIN_TASK_WIDTH = 16 + +# this module is considered experimental +# we don't care about coverage. + + +class CursesMonitor(object): # pragma: no cover + keymap = {} + win = None + screen_width = None + screen_delay = 10 + selected_task = None + selected_position = 0 + selected_str = 'Selected: ' + foreground = curses.COLOR_BLACK + background = curses.COLOR_WHITE + online_str = 'Workers online: ' + help_title = 'Keys: ' + help = ('j:up k:down i:info t:traceback r:result c:revoke ^c: quit') + greet = 'celeryev %s' % VERSION_BANNER + info_str = 'Info: ' + + def __init__(self, state, keymap=None, app=None): + self.app = app_or_default(app) + self.keymap = keymap or self.keymap + self.state = state + default_keymap = {'J': self.move_selection_down, + 'K': self.move_selection_up, + 'C': self.revoke_selection, + 'T': self.selection_traceback, + 'R': self.selection_result, + 'I': self.selection_info, + 'L': self.selection_rate_limit} + self.keymap = dict(default_keymap, **self.keymap) + + def format_row(self, uuid, task, worker, timestamp, state): + mx = self.display_width + + # include spacing + detail_width = mx - 1 - STATE_WIDTH - 1 - TIMESTAMP_WIDTH + uuid_space = detail_width - 1 - MIN_TASK_WIDTH - 1 - MIN_WORKER_WIDTH + + if uuid_space < UUID_WIDTH: + uuid_width = uuid_space + else: + uuid_width = UUID_WIDTH + + detail_width = detail_width - uuid_width - 1 + task_width = int(ceil(detail_width / 2.0)) + worker_width = detail_width - task_width - 1 + + uuid = abbr(uuid, uuid_width).ljust(uuid_width) + worker = abbr(worker, worker_width).ljust(worker_width) + task = abbrtask(task, task_width).ljust(task_width) + state = abbr(state, STATE_WIDTH).ljust(STATE_WIDTH) + timestamp = timestamp.ljust(TIMESTAMP_WIDTH) + + row = '%s %s %s %s %s ' % (uuid, worker, task, timestamp, state) + if self.screen_width is None: + self.screen_width = len(row[:mx]) + return row[:mx] + + @property + def screen_width(self): + _, mx = self.win.getmaxyx() + return mx + + @property + def screen_height(self): + my, _ = self.win.getmaxyx() + return my + + @property + def display_width(self): + _, mx = self.win.getmaxyx() + return mx - BORDER_SPACING + + @property + def display_height(self): + my, _ = self.win.getmaxyx() + return my - 10 + + @property + def limit(self): + return self.display_height + + def find_position(self): + if not self.tasks: + return 0 + for i, e in enumerate(self.tasks): + if self.selected_task == e[0]: + return i + return 0 + + def move_selection_up(self): + self.move_selection(-1) + + def move_selection_down(self): + self.move_selection(1) + + def move_selection(self, direction=1): + if not self.tasks: + return + pos = self.find_position() + try: + self.selected_task = self.tasks[pos + direction][0] + except IndexError: + self.selected_task = self.tasks[0][0] + + keyalias = {curses.KEY_DOWN: 'J', + curses.KEY_UP: 'K', + curses.KEY_ENTER: 'I'} + + def handle_keypress(self): + try: + key = self.win.getkey().upper() + except: + return + key = self.keyalias.get(key) or key + handler = self.keymap.get(key) + if handler is not None: + handler() + + def alert(self, callback, title=None): + self.win.erase() + my, mx = self.win.getmaxyx() + y = blank_line = count(2).next + if title: + self.win.addstr(y(), 3, title, curses.A_BOLD | curses.A_UNDERLINE) + blank_line() + callback(my, mx, y()) + self.win.addstr(my - 1, 0, 'Press any key to continue...', + curses.A_BOLD) + self.win.refresh() + while 1: + try: + return self.win.getkey().upper() + except: + pass + + def selection_rate_limit(self): + if not self.selected_task: + return curses.beep() + task = self.state.tasks[self.selected_task] + if not task.name: + return curses.beep() + + my, mx = self.win.getmaxyx() + r = 'New rate limit: ' + self.win.addstr(my - 2, 3, r, curses.A_BOLD | curses.A_UNDERLINE) + self.win.addstr(my - 2, len(r) + 3, ' ' * (mx - len(r))) + rlimit = self.readline(my - 2, 3 + len(r)) + + if rlimit: + reply = self.app.control.rate_limit(task.name, + rlimit.strip(), reply=True) + self.alert_remote_control_reply(reply) + + def alert_remote_control_reply(self, reply): + + def callback(my, mx, xs): + y = count(xs).next + if not reply: + self.win.addstr( + y(), 3, 'No replies received in 1s deadline.', + curses.A_BOLD + curses.color_pair(2), + ) + return + + for subreply in reply: + curline = y() + + host, response = subreply.items()[0] + host = '%s: ' % host + self.win.addstr(curline, 3, host, curses.A_BOLD) + attr = curses.A_NORMAL + text = '' + if 'error' in response: + text = response['error'] + attr |= curses.color_pair(2) + elif 'ok' in response: + text = response['ok'] + attr |= curses.color_pair(3) + self.win.addstr(curline, 3 + len(host), text, attr) + + return self.alert(callback, 'Remote Control Command Replies') + + def readline(self, x, y): + buffer = str() + curses.echo() + try: + i = 0 + while 1: + ch = self.win.getch(x, y + i) + if ch != -1: + if ch in (10, curses.KEY_ENTER): # enter + break + if ch in (27, ): + buffer = str() + break + buffer += chr(ch) + i += 1 + finally: + curses.noecho() + return buffer + + def revoke_selection(self): + if not self.selected_task: + return curses.beep() + reply = self.app.control.revoke(self.selected_task, reply=True) + self.alert_remote_control_reply(reply) + + def selection_info(self): + if not self.selected_task: + return + + def alert_callback(mx, my, xs): + my, mx = self.win.getmaxyx() + y = count(xs).next + task = self.state.tasks[self.selected_task] + info = task.info(extra=['state']) + infoitems = [('args', info.pop('args', None)), + ('kwargs', info.pop('kwargs', None))] + info.items() + for key, value in infoitems: + if key is None: + continue + value = str(value) + curline = y() + keys = key + ': ' + self.win.addstr(curline, 3, keys, curses.A_BOLD) + wrapped = wrap(value, mx - 2) + if len(wrapped) == 1: + self.win.addstr( + curline, len(keys) + 3, + abbr(wrapped[0], + self.screen_width - (len(keys) + 3))) + else: + for subline in wrapped: + nexty = y() + if nexty >= my - 1: + subline = ' ' * 4 + '[...]' + elif nexty >= my: + break + self.win.addstr( + nexty, 3, + abbr(' ' * 4 + subline, self.screen_width - 4), + curses.A_NORMAL, + ) + + return self.alert( + alert_callback, 'Task details for %s' % self.selected_task, + ) + + def selection_traceback(self): + if not self.selected_task: + return curses.beep() + task = self.state.tasks[self.selected_task] + if task.state not in states.EXCEPTION_STATES: + return curses.beep() + + def alert_callback(my, mx, xs): + y = count(xs).next + for line in task.traceback.split('\n'): + self.win.addstr(y(), 3, line) + + return self.alert( + alert_callback, + 'Task Exception Traceback for %s' % self.selected_task, + ) + + def selection_result(self): + if not self.selected_task: + return + + def alert_callback(my, mx, xs): + y = count(xs).next + task = self.state.tasks[self.selected_task] + result = (getattr(task, 'result', None) + or getattr(task, 'exception', None)) + for line in wrap(result, mx - 2): + self.win.addstr(y(), 3, line) + + return self.alert( + alert_callback, 'Task Result for %s' % self.selected_task, + ) + + def display_task_row(self, lineno, task): + state_color = self.state_colors.get(task.state) + attr = curses.A_NORMAL + if task.uuid == self.selected_task: + attr = curses.A_STANDOUT + timestamp = datetime.utcfromtimestamp( + task.timestamp or time.time(), + ) + timef = timestamp.strftime('%H:%M:%S') + hostname = task.worker.hostname if task.worker else '*NONE*' + line = self.format_row(task.uuid, task.name, + hostname, + timef, task.state) + self.win.addstr(lineno, LEFT_BORDER_OFFSET, line, attr) + + if state_color: + self.win.addstr(lineno, + len(line) - STATE_WIDTH + BORDER_SPACING - 1, + task.state, state_color | attr) + + def draw(self): + win = self.win + self.handle_keypress() + x = LEFT_BORDER_OFFSET + y = blank_line = count(2).next + my, mx = win.getmaxyx() + win.erase() + win.bkgd(' ', curses.color_pair(1)) + win.border() + win.addstr(1, x, self.greet, curses.A_DIM | curses.color_pair(5)) + blank_line() + win.addstr(y(), x, self.format_row('UUID', 'TASK', + 'WORKER', 'TIME', 'STATE'), + curses.A_BOLD | curses.A_UNDERLINE) + tasks = self.tasks + if tasks: + for row, (uuid, task) in enumerate(tasks): + if row > self.display_height: + break + + if task.uuid: + lineno = y() + self.display_task_row(lineno, task) + + # -- Footer + blank_line() + win.hline(my - 6, x, curses.ACS_HLINE, self.screen_width - 4) + + # Selected Task Info + if self.selected_task: + win.addstr(my - 5, x, self.selected_str, curses.A_BOLD) + info = 'Missing extended info' + detail = '' + try: + selection = self.state.tasks[self.selected_task] + except KeyError: + pass + else: + info = selection.info() + if 'runtime' in info: + info['runtime'] = '%.2fs' % info['runtime'] + if 'result' in info: + info['result'] = abbr(info['result'], 16) + info = ' '.join( + '%s=%s' % (key, value) for key, value in info.items()) + detail = '... -> key i' + infowin = abbr(info, + self.screen_width - len(self.selected_str) - 2, + detail) + win.addstr(my - 5, x + len(self.selected_str), infowin) + # Make ellipsis bold + if detail in infowin: + detailpos = len(infowin) - len(detail) + win.addstr(my - 5, x + len(self.selected_str) + detailpos, + detail, curses.A_BOLD) + else: + win.addstr(my - 5, x, 'No task selected', curses.A_NORMAL) + + # Workers + if self.workers: + win.addstr(my - 4, x, self.online_str, curses.A_BOLD) + win.addstr(my - 4, x + len(self.online_str), + ', '.join(sorted(self.workers)), curses.A_NORMAL) + else: + win.addstr(my - 4, x, 'No workers discovered.') + + # Info + win.addstr(my - 3, x, self.info_str, curses.A_BOLD) + win.addstr( + my - 3, x + len(self.info_str), + 'events:%s tasks:%s workers:%s/%s' % ( + self.state.event_count, self.state.task_count, + len([w for w in self.state.workers.values() + if w.alive]), + len(self.state.workers)), + curses.A_DIM, + ) + + # Help + self.safe_add_str(my - 2, x, self.help_title, curses.A_BOLD) + self.safe_add_str(my - 2, x + len(self.help_title), self.help, + curses.A_DIM) + win.refresh() + + def safe_add_str(self, y, x, string, *args, **kwargs): + if x + len(string) > self.screen_width: + string = string[:self.screen_width - x] + self.win.addstr(y, x, string, *args, **kwargs) + + def init_screen(self): + self.win = curses.initscr() + self.win.nodelay(True) + self.win.keypad(True) + curses.start_color() + curses.init_pair(1, self.foreground, self.background) + # exception states + curses.init_pair(2, curses.COLOR_RED, self.background) + # successful state + curses.init_pair(3, curses.COLOR_GREEN, self.background) + # revoked state + curses.init_pair(4, curses.COLOR_MAGENTA, self.background) + # greeting + curses.init_pair(5, curses.COLOR_BLUE, self.background) + # started state + curses.init_pair(6, curses.COLOR_YELLOW, self.foreground) + + self.state_colors = {states.SUCCESS: curses.color_pair(3), + states.REVOKED: curses.color_pair(4), + states.STARTED: curses.color_pair(6)} + for state in states.EXCEPTION_STATES: + self.state_colors[state] = curses.color_pair(2) + + curses.cbreak() + + def resetscreen(self): + curses.nocbreak() + self.win.keypad(False) + curses.echo() + curses.endwin() + + def nap(self): + curses.napms(self.screen_delay) + + @property + def tasks(self): + return self.state.tasks_by_timestamp()[:self.limit] + + @property + def workers(self): + return [hostname for hostname, w in self.state.workers.items() + if w.alive] + + +class DisplayThread(threading.Thread): # pragma: no cover + + def __init__(self, display): + self.display = display + self.shutdown = False + threading.Thread.__init__(self) + + def run(self): + while not self.shutdown: + self.display.draw() + self.display.nap() + + +def capture_events(app, state, display): # pragma: no cover + + def on_connection_error(exc, interval): + sys.stderr.write('Connection Error: %r. Retry in %ss.' % ( + exc, interval)) + + while 1: + sys.stderr.write('-> evtop: starting capture...\n') + with app.connection() as conn: + try: + conn.ensure_connection(on_connection_error, + app.conf.BROKER_CONNECTION_MAX_RETRIES) + recv = app.events.Receiver(conn, handlers={'*': state.event}) + display.resetscreen() + display.init_screen() + with recv.consumer(): + recv.drain_events(timeout=1, ignore_timeouts=True) + except (conn.connection_errors, conn.channel_errors), exc: + sys.stderr.write('Connection lost: %r' % (exc, )) + + +def evtop(app=None): # pragma: no cover + app = app_or_default(app) + state = app.events.State() + display = CursesMonitor(state, app=app) + display.init_screen() + refresher = DisplayThread(display) + refresher.start() + try: + capture_events(app, state, display) + except Exception: + refresher.shutdown = True + refresher.join() + display.resetscreen() + raise + except (KeyboardInterrupt, SystemExit): + refresher.shutdown = True + refresher.join() + display.resetscreen() + + +if __name__ == '__main__': # pragma: no cover + evtop() diff --git a/awx/lib/site-packages/celery/events/dumper.py b/awx/lib/site-packages/celery/events/dumper.py new file mode 100644 index 0000000000..02b5f1ade5 --- /dev/null +++ b/awx/lib/site-packages/celery/events/dumper.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +""" + celery.events.dumper + ~~~~~~~~~~~~~~~~~~~~ + + This is a simple program that dumps events to the console + as they happen. Think of it like a `tcpdump` for Celery events. + +""" +from __future__ import absolute_import + +import sys + +from datetime import datetime + +from celery.app import app_or_default +from celery.datastructures import LRUCache +from celery.utils.timeutils import humanize_seconds + +TASK_NAMES = LRUCache(limit=0xFFF) + +HUMAN_TYPES = {'worker-offline': 'shutdown', + 'worker-online': 'started', + 'worker-heartbeat': 'heartbeat'} + +CONNECTION_ERROR = """\ +-> Cannot connect to %s: %s. +Trying again %s +""" + + +def humanize_type(type): + try: + return HUMAN_TYPES[type.lower()] + except KeyError: + return type.lower().replace('-', ' ') + + +def say(msg, out=sys.stdout): + out.write(msg + '\n') + + +class Dumper(object): + + def __init__(self, out=sys.stdout): + self.out = out + + def say(self, msg): + say(msg, out=self.out) + + def on_event(self, ev): + timestamp = datetime.utcfromtimestamp(ev.pop('timestamp')) + type = ev.pop('type').lower() + hostname = ev.pop('hostname') + if type.startswith('task-'): + uuid = ev.pop('uuid') + if type in ('task-received', 'task-sent'): + task = TASK_NAMES[uuid] = '%s(%s) args=%s kwargs=%s' % ( + ev.pop('name'), uuid, + ev.pop('args'), + ev.pop('kwargs')) + else: + task = TASK_NAMES.get(uuid, '') + return self.format_task_event(hostname, timestamp, + type, task, ev) + fields = ', '.join('%s=%s' % (key, ev[key]) for key in sorted(ev)) + sep = fields and ':' or '' + self.say('%s [%s] %s%s %s' % (hostname, timestamp, + humanize_type(type), sep, fields)) + + def format_task_event(self, hostname, timestamp, type, task, ev): + fields = ', '.join('%s=%s' % (key, ev[key]) for key in sorted(ev)) + sep = fields and ':' or '' + self.say('%s [%s] %s%s %s %s' % ( + hostname, timestamp, humanize_type(type), sep, task, fields, + )) + + +def evdump(app=None, out=sys.stdout): + app = app_or_default(app) + dumper = Dumper(out=out) + dumper.say('-> evdump: starting capture...') + conn = app.connection() + + def _error_handler(exc, interval): + dumper.say(CONNECTION_ERROR % ( + conn.as_uri(), exc, humanize_seconds(interval, 'in', ' ') + )) + + while 1: + try: + conn = conn.clone() + conn.ensure_connection(_error_handler) + recv = app.events.Receiver(conn, handlers={'*': dumper.on_event}) + recv.capture() + except (KeyboardInterrupt, SystemExit): + return conn and conn.close() + except conn.connection_errors + conn.channel_errors: + dumper.say('-> Connection lost, attempting reconnect') + +if __name__ == '__main__': # pragma: no cover + evdump() diff --git a/awx/lib/site-packages/celery/events/snapshot.py b/awx/lib/site-packages/celery/events/snapshot.py new file mode 100644 index 0000000000..9839f24d77 --- /dev/null +++ b/awx/lib/site-packages/celery/events/snapshot.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +""" + celery.events.snapshot + ~~~~~~~~~~~~~~~~~~~~~~ + + Consuming the events as a stream is not always suitable + so this module implements a system to take snapshots of the + state of a cluster at regular intervals. There is a full + implementation of this writing the snapshots to a database + in :mod:`djcelery.snapshots` in the `django-celery` distribution. + +""" +from __future__ import absolute_import + +from kombu.utils.limits import TokenBucket + +from celery import platforms +from celery.app import app_or_default +from celery.utils import timer2 +from celery.utils.dispatch import Signal +from celery.utils.imports import instantiate +from celery.utils.log import get_logger +from celery.utils.timeutils import rate + +logger = get_logger('celery.evcam') + + +class Polaroid(object): + timer = timer2 + shutter_signal = Signal(providing_args=('state', )) + cleanup_signal = Signal() + clear_after = False + + _tref = None + _ctref = None + + def __init__(self, state, freq=1.0, maxrate=None, + cleanup_freq=3600.0, timer=None, app=None): + self.app = app_or_default(app) + self.state = state + self.freq = freq + self.cleanup_freq = cleanup_freq + self.timer = timer or self.timer + self.logger = logger + self.maxrate = maxrate and TokenBucket(rate(maxrate)) + + def install(self): + self._tref = self.timer.apply_interval(self.freq * 1000.0, + self.capture) + self._ctref = self.timer.apply_interval(self.cleanup_freq * 1000.0, + self.cleanup) + + def on_shutter(self, state): + pass + + def on_cleanup(self): + pass + + def cleanup(self): + logger.debug('Cleanup: Running...') + self.cleanup_signal.send(None) + self.on_cleanup() + + def shutter(self): + if self.maxrate is None or self.maxrate.can_consume(): + logger.debug('Shutter: %s', self.state) + self.shutter_signal.send(self.state) + self.on_shutter(self.state) + + def capture(self): + self.state.freeze_while(self.shutter, clear_after=self.clear_after) + + def cancel(self): + if self._tref: + self._tref() # flush all received events. + self._tref.cancel() + if self._ctref: + self._ctref.cancel() + + def __enter__(self): + self.install() + return self + + def __exit__(self, *exc_info): + self.cancel() + + +def evcam(camera, freq=1.0, maxrate=None, loglevel=0, + logfile=None, pidfile=None, timer=None, app=None): + app = app_or_default(app) + + if pidfile: + platforms.create_pidlock(pidfile) + + app.log.setup_logging_subsystem(loglevel, logfile) + + logger.info( + '-> evcam: Taking snapshots with %s (every %s secs.)\n' % ( + camera, freq)) + state = app.events.State() + cam = instantiate(camera, state, app=app, freq=freq, + maxrate=maxrate, timer=timer) + cam.install() + conn = app.connection() + recv = app.events.Receiver(conn, handlers={'*': state.event}) + try: + try: + recv.capture(limit=None) + except KeyboardInterrupt: + raise SystemExit + finally: + cam.cancel() + conn.close() diff --git a/awx/lib/site-packages/celery/events/state.py b/awx/lib/site-packages/celery/events/state.py new file mode 100644 index 0000000000..0afbec5383 --- /dev/null +++ b/awx/lib/site-packages/celery/events/state.py @@ -0,0 +1,369 @@ +# -*- coding: utf-8 -*- +""" + celery.events.state + ~~~~~~~~~~~~~~~~~~~ + + This module implements a datastructure used to keep + track of the state of a cluster of workers and the tasks + it is working on (by consuming events). + + For every event consumed the state is updated, + so the state represents the state of the cluster + at the time of the last event. + + Snapshots (:mod:`celery.events.snapshot`) can be used to + take "pictures" of this state at regular intervals + to e.g. store that in a database. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import heapq +import threading + +from time import time + +from kombu.utils import kwdict + +from celery import states +from celery.datastructures import AttributeDict, LRUCache + +# The window (in percentage) is added to the workers heartbeat +# frequency. If the time between updates exceeds this window, +# then the worker is considered to be offline. +HEARTBEAT_EXPIRE_WINDOW = 200 + + +def heartbeat_expires(timestamp, freq=60, + expire_window=HEARTBEAT_EXPIRE_WINDOW): + return timestamp + freq * (expire_window / 1e2) + + +class Element(AttributeDict): + """Base class for worker state elements.""" + + +class Worker(Element): + """Worker State.""" + heartbeat_max = 4 + expire_window = HEARTBEAT_EXPIRE_WINDOW + + def __init__(self, **fields): + fields.setdefault('freq', 60) + super(Worker, self).__init__(**fields) + self.heartbeats = [] + + def on_online(self, timestamp=None, **kwargs): + """Callback for the :event:`worker-online` event.""" + self.update(**kwargs) + self._heartpush(timestamp) + + def on_offline(self, **kwargs): + """Callback for the :event:`worker-offline` event.""" + self.update(**kwargs) + self.heartbeats = [] + + def on_heartbeat(self, timestamp=None, **kwargs): + """Callback for the :event:`worker-heartbeat` event.""" + self.update(**kwargs) + self._heartpush(timestamp) + + def _heartpush(self, timestamp): + if timestamp: + heapq.heappush(self.heartbeats, timestamp) + if len(self.heartbeats) > self.heartbeat_max: + self.heartbeats = self.heartbeats[self.heartbeat_max:] + + def __repr__(self): + return '' % (self.name, self.uuid, self.state) + + @property + def ready(self): + return self.state in states.READY_STATES + + +class State(object): + """Records clusters state.""" + event_count = 0 + task_count = 0 + + def __init__(self, callback=None, + max_workers_in_memory=5000, max_tasks_in_memory=10000): + self.workers = LRUCache(limit=max_workers_in_memory) + self.tasks = LRUCache(limit=max_tasks_in_memory) + self.event_callback = callback + self._mutex = threading.Lock() + + def freeze_while(self, fun, *args, **kwargs): + clear_after = kwargs.pop('clear_after', False) + with self._mutex: + try: + return fun(*args, **kwargs) + finally: + if clear_after: + self._clear() + + def clear_tasks(self, ready=True): + with self._mutex: + return self._clear_tasks(ready) + + def _clear_tasks(self, ready=True): + if ready: + in_progress = dict( + (uuid, task) for uuid, task in self.itertasks() + if task.state not in states.READY_STATES) + self.tasks.clear() + self.tasks.update(in_progress) + else: + self.tasks.clear() + + def _clear(self, ready=True): + self.workers.clear() + self._clear_tasks(ready) + self.event_count = 0 + self.task_count = 0 + + def clear(self, ready=True): + with self._mutex: + return self._clear(ready) + + def get_or_create_worker(self, hostname, **kwargs): + """Get or create worker by hostname.""" + try: + worker = self.workers[hostname] + worker.update(kwargs) + except KeyError: + worker = self.workers[hostname] = Worker( + hostname=hostname, **kwargs) + return worker + + def get_or_create_task(self, uuid): + """Get or create task by uuid.""" + try: + return self.tasks[uuid] + except KeyError: + task = self.tasks[uuid] = Task(uuid=uuid) + return task + + def worker_event(self, type, fields): + """Process worker event.""" + hostname = fields.pop('hostname', None) + if hostname: + worker = self.get_or_create_worker(hostname) + handler = getattr(worker, 'on_%s' % type, None) + if handler: + handler(**fields) + + def task_event(self, type, fields): + """Process task event.""" + uuid = fields['uuid'] + hostname = fields['hostname'] + worker = self.get_or_create_worker(hostname) + task = self.get_or_create_task(uuid) + handler = getattr(task, 'on_' + type, None) + if type == 'received': + self.task_count += 1 + if handler: + handler(**fields) + else: + task.on_unknown_event(type, **fields) + task.worker = worker + + def event(self, event): + with self._mutex: + return self._dispatch_event(event) + + def _dispatch_event(self, event): + self.event_count += 1 + event = kwdict(event) + group, _, subject = event['type'].partition('-') + getattr(self, group + '_event')(subject, event) + if self.event_callback: + self.event_callback(self, event) + + def itertasks(self, limit=None): + for index, row in enumerate(self.tasks.iteritems()): + yield row + if limit and index + 1 >= limit: + break + + def tasks_by_timestamp(self, limit=None): + """Get tasks by timestamp. + + Returns a list of `(uuid, task)` tuples. + + """ + return self._sort_tasks_by_time(self.itertasks(limit)) + + def _sort_tasks_by_time(self, tasks): + """Sort task items by time.""" + return sorted(tasks, key=lambda t: t[1].timestamp, + reverse=True) + + def tasks_by_type(self, name, limit=None): + """Get all tasks by type. + + Returns a list of `(uuid, task)` tuples. + + """ + sorted_tasks = self._sort_tasks_by_time( + (uuid, task) for uuid, task in self.tasks.iteritems() + if task.name == name) + + return sorted_tasks[0:limit or None] + + def tasks_by_worker(self, hostname, limit=None): + """Get all tasks by worker. + + Returns a list of `(uuid, task)` tuples. + + """ + return self._sort_tasks_by_time( + (uuid, task) for uuid, task in self.itertasks(limit) + if task.worker.hostname == hostname) + + def task_types(self): + """Returns a list of all seen task types.""" + return list(sorted(set(task.name for task in self.tasks.itervalues()))) + + def alive_workers(self): + """Returns a list of (seemingly) alive workers.""" + return [w for w in self.workers.values() if w.alive] + + def __repr__(self): + return '' % (self.event_count, + self.task_count) + + def __getstate__(self): + d = dict(vars(self)) + d.pop('_mutex') + return d + + def __setstate__(self, state): + self.__dict__ = state + self._mutex = threading.Lock() + + +state = State() diff --git a/awx/lib/site-packages/celery/exceptions.py b/awx/lib/site-packages/celery/exceptions.py new file mode 100644 index 0000000000..cd5dc66a01 --- /dev/null +++ b/awx/lib/site-packages/celery/exceptions.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +""" + celery.exceptions + ~~~~~~~~~~~~~~~~~ + + This module contains all exceptions used by the Celery API. + +""" +from __future__ import absolute_import + +from billiard.exceptions import ( # noqa + SoftTimeLimitExceeded, TimeLimitExceeded, WorkerLostError, Terminated, +) + +UNREGISTERED_FMT = """\ +Task of kind %s is not registered, please make sure it's imported.\ +""" + + +class SecurityError(Exception): + """Security related exceptions. + + Handle with care. + + """ + + +class Ignore(Exception): + """A task can raise this to ignore doing state updates.""" + + +class SystemTerminate(SystemExit): + """Signals that the worker should terminate.""" + + +class QueueNotFound(KeyError): + """Task routed to a queue not in CELERY_QUEUES.""" + + +class ImproperlyConfigured(ImportError): + """Celery is somehow improperly configured.""" + + +class NotRegistered(KeyError): + """The task is not registered.""" + + def __repr__(self): + return UNREGISTERED_FMT % str(self) + + +class AlreadyRegistered(Exception): + """The task is already registered.""" + + +class TimeoutError(Exception): + """The operation timed out.""" + + +class MaxRetriesExceededError(Exception): + """The tasks max restart limit has been exceeded.""" + + +class RetryTaskError(Exception): + """The task is to be retried later.""" + + #: Optional message describing context of retry. + message = None + + #: Exception (if any) that caused the retry to happen. + exc = None + + #: Time of retry (ETA), either int or :class:`~datetime.datetime`. + when = None + + def __init__(self, message=None, exc=None, when=None, **kwargs): + from kombu.utils.encoding import safe_repr + self.message = message + if isinstance(exc, basestring): + self.exc, self.excs = None, exc + else: + self.exc, self.excs = exc, safe_repr(exc) if exc else None + self.when = when + Exception.__init__(self, exc, when, **kwargs) + + def humanize(self): + if isinstance(self.when, int): + return 'in %ss' % self.when + return 'at %s' % (self.when, ) + + def __str__(self): + if self.message: + return self.message + if self.excs: + return 'Retry %s: %r' % (self.humanize(), self.excs) + return 'Retry %s' % self.humanize() + + def __reduce__(self): + return self.__class__, (self.message, self.excs, self.when) + + +class TaskRevokedError(Exception): + """The task has been revoked, so no result available.""" + + +class NotConfigured(UserWarning): + """Celery has not been configured, as no config module has been found.""" + + +class AlwaysEagerIgnored(UserWarning): + """send_task ignores CELERY_ALWAYS_EAGER option""" + + +class InvalidTaskError(Exception): + """The task has invalid data or is not properly constructed.""" + + +class CPendingDeprecationWarning(PendingDeprecationWarning): + pass + + +class CDeprecationWarning(DeprecationWarning): + pass + + +class IncompleteStream(Exception): + """Found the end of a stream of data, but the data is not yet complete.""" + + +class ChordError(Exception): + """A task part of the chord raised an exception.""" diff --git a/awx/lib/site-packages/celery/loaders/__init__.py b/awx/lib/site-packages/celery/loaders/__init__.py new file mode 100644 index 0000000000..6f8aea72e9 --- /dev/null +++ b/awx/lib/site-packages/celery/loaders/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +""" + celery.loaders + ~~~~~~~~~~~~~~ + + Loaders define how configuration is read, what happens + when workers start, when tasks are executed and so on. + +""" +from __future__ import absolute_import + +from celery._state import current_app +from celery.utils import deprecated +from celery.utils.imports import symbol_by_name + +LOADER_ALIASES = {'app': 'celery.loaders.app:AppLoader', + 'default': 'celery.loaders.default:Loader', + 'django': 'djcelery.loaders:DjangoLoader'} + + +def get_loader_cls(loader): + """Get loader class by name/alias""" + return symbol_by_name(loader, LOADER_ALIASES) + + +@deprecated(deprecation='2.5', removal='4.0', + alternative='celery.current_app.loader') +def current_loader(): + return current_app.loader + + +@deprecated(deprecation='2.5', removal='4.0', + alternative='celery.current_app.conf') +def load_settings(): + return current_app.conf diff --git a/awx/lib/site-packages/celery/loaders/app.py b/awx/lib/site-packages/celery/loaders/app.py new file mode 100644 index 0000000000..54f6853a0e --- /dev/null +++ b/awx/lib/site-packages/celery/loaders/app.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +""" + celery.loaders.app + ~~~~~~~~~~~~~~~~~~ + + The default loader used with custom app instances. + +""" +from __future__ import absolute_import + +from .base import BaseLoader + + +class AppLoader(BaseLoader): + pass diff --git a/awx/lib/site-packages/celery/loaders/base.py b/awx/lib/site-packages/celery/loaders/base.py new file mode 100644 index 0000000000..1c3abd467a --- /dev/null +++ b/awx/lib/site-packages/celery/loaders/base.py @@ -0,0 +1,256 @@ +# -*- coding: utf-8 -*- +""" + celery.loaders.base + ~~~~~~~~~~~~~~~~~~~ + + Loader base class. + +""" +from __future__ import absolute_import + +import anyjson +import importlib +import os +import re +import sys + +from datetime import datetime + +from kombu.utils import cached_property +from kombu.utils.encoding import safe_str + +from celery.datastructures import DictAttribute +from celery.exceptions import ImproperlyConfigured +from celery.utils.imports import ( + import_from_cwd, symbol_by_name, NotAPackage, find_module, +) +from celery.utils.functional import maybe_list + +ERROR_ENVVAR_NOT_SET = """\ +The environment variable %r is not set, +and as such the configuration could not be loaded. +Please set this variable and make it point to +a configuration module.""" + +CONFIG_INVALID_NAME = """ +Error: Module '%(module)s' doesn't exist, or it's not a valid \ +Python module name. +""" + +CONFIG_WITH_SUFFIX = CONFIG_INVALID_NAME + """ +Did you mean '%(suggest)s'? +""" + + +class BaseLoader(object): + """The base class for loaders. + + Loaders handles, + + * Reading celery client/worker configurations. + + * What happens when a task starts? + See :meth:`on_task_init`. + + * What happens when the worker starts? + See :meth:`on_worker_init`. + + * What happens when the worker shuts down? + See :meth:`on_worker_shutdown`. + + * What modules are imported to find tasks? + + """ + builtin_modules = frozenset() + configured = False + error_envvar_not_set = ERROR_ENVVAR_NOT_SET + override_backends = {} + worker_initialized = False + + _conf = None + + def __init__(self, app=None, **kwargs): + from celery.app import app_or_default + self.app = app_or_default(app) + self.task_modules = set() + + def now(self, utc=True): + if utc: + return datetime.utcnow() + return datetime.now() + + def on_task_init(self, task_id, task): + """This method is called before a task is executed.""" + pass + + def on_process_cleanup(self): + """This method is called after a task is executed.""" + pass + + def on_worker_init(self): + """This method is called when the worker (:program:`celery worker`) + starts.""" + pass + + def on_worker_shutdown(self): + """This method is called when the worker (:program:`celery worker`) + shuts down.""" + pass + + def on_worker_process_init(self): + """This method is called when a child process starts.""" + pass + + def import_task_module(self, module): + self.task_modules.add(module) + return self.import_from_cwd(module) + + def import_module(self, module, package=None): + return importlib.import_module(module, package=package) + + def import_from_cwd(self, module, imp=None, package=None): + return import_from_cwd( + module, + self.import_module if imp is None else imp, + package=package, + ) + + def import_default_modules(self): + return [ + self.import_task_module(m) for m in ( + tuple(self.builtin_modules) + + tuple(maybe_list(self.app.conf.CELERY_IMPORTS)) + + tuple(maybe_list(self.app.conf.CELERY_INCLUDE)) + ) + ] + + def init_worker(self): + if not self.worker_initialized: + self.worker_initialized = True + self.import_default_modules() + self.on_worker_init() + + def shutdown_worker(self): + self.on_worker_shutdown() + + def init_worker_process(self): + self.on_worker_process_init() + + def config_from_envvar(self, variable_name, silent=False): + module_name = os.environ.get(variable_name) + if not module_name: + if silent: + return False + raise ImproperlyConfigured(self.error_envvar_not_set % module_name) + return self.config_from_object(module_name, silent=silent) + + def config_from_object(self, obj, silent=False): + if isinstance(obj, basestring): + try: + if '.' in obj: + obj = symbol_by_name(obj, imp=self.import_from_cwd) + else: + obj = self.import_from_cwd(obj) + except (ImportError, AttributeError): + if silent: + return False + raise + if not hasattr(obj, '__getitem__'): + obj = DictAttribute(obj) + self._conf = obj + return True + + def _import_config_module(self, name): + try: + self.find_module(name) + except NotAPackage: + if name.endswith('.py'): + raise NotAPackage, NotAPackage(CONFIG_WITH_SUFFIX % { + 'module': name, 'suggest': name[:-3]}), sys.exc_info()[2] + raise NotAPackage, NotAPackage( + CONFIG_INVALID_NAME % {'module': name}), sys.exc_info()[2] + else: + return self.import_from_cwd(name) + + def find_module(self, module): + return find_module(module) + + def cmdline_config_parser( + self, args, namespace='celery', + re_type=re.compile(r'\((\w+)\)'), + extra_types={'json': anyjson.loads}, + override_types={'tuple': 'json', + 'list': 'json', + 'dict': 'json'}): + from celery.app.defaults import Option, NAMESPACES + namespace = namespace.upper() + typemap = dict(Option.typemap, **extra_types) + + def getarg(arg): + """Parse a single configuration definition from + the command line.""" + + ## find key/value + # ns.key=value|ns_key=value (case insensitive) + key, value = arg.split('=', 1) + key = key.upper().replace('.', '_') + + ## find namespace. + # .key=value|_key=value expands to default namespace. + if key[0] == '_': + ns, key = namespace, key[1:] + else: + # find namespace part of key + ns, key = key.split('_', 1) + + ns_key = (ns and ns + '_' or '') + key + + # (type)value makes cast to custom type. + cast = re_type.match(value) + if cast: + type_ = cast.groups()[0] + type_ = override_types.get(type_, type_) + value = value[len(cast.group()):] + value = typemap[type_](value) + else: + try: + value = NAMESPACES[ns][key].to_python(value) + except ValueError, exc: + # display key name in error message. + raise ValueError('%r: %s' % (ns_key, exc)) + return ns_key, value + return dict(getarg(v) for v in args) + + def mail_admins(self, subject, body, fail_silently=False, + sender=None, to=None, host=None, port=None, + user=None, password=None, timeout=None, + use_ssl=False, use_tls=False): + message = self.mail.Message(sender=sender, to=to, + subject=safe_str(subject), + body=safe_str(body)) + mailer = self.mail.Mailer(host=host, port=port, + user=user, password=password, + timeout=timeout, use_ssl=use_ssl, + use_tls=use_tls) + mailer.send(message, fail_silently=fail_silently) + + def read_configuration(self): + try: + custom_config = os.environ['CELERY_CONFIG_MODULE'] + except KeyError: + pass + else: + usercfg = self._import_config_module(custom_config) + return DictAttribute(usercfg) + return {} + + @property + def conf(self): + """Loader configuration.""" + if self._conf is None: + self._conf = self.read_configuration() + return self._conf + + @cached_property + def mail(self): + return self.import_module('celery.utils.mail') diff --git a/awx/lib/site-packages/celery/loaders/default.py b/awx/lib/site-packages/celery/loaders/default.py new file mode 100644 index 0000000000..f695538cba --- /dev/null +++ b/awx/lib/site-packages/celery/loaders/default.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +""" + celery.loaders.default + ~~~~~~~~~~~~~~~~~~~~~~ + + The default loader used when no custom app has been initialized. + +""" +from __future__ import absolute_import + +import os +import warnings + +from celery.datastructures import DictAttribute +from celery.exceptions import NotConfigured +from celery.utils import strtobool + +from .base import BaseLoader + +DEFAULT_CONFIG_MODULE = 'celeryconfig' + +#: Warns if configuration file is missing if :envvar:`C_WNOCONF` is set. +C_WNOCONF = strtobool(os.environ.get('C_WNOCONF', False)) + + +class Loader(BaseLoader): + """The loader used by the default app.""" + + def setup_settings(self, settingsdict): + return DictAttribute(settingsdict) + + def read_configuration(self): + """Read configuration from :file:`celeryconfig.py` and configure + celery and Django so it can be used by regular Python.""" + configname = os.environ.get('CELERY_CONFIG_MODULE', + DEFAULT_CONFIG_MODULE) + try: + usercfg = self._import_config_module(configname) + except ImportError: + # billiard sets this if forked using execv + if C_WNOCONF and not os.environ.get('FORKED_BY_MULTIPROCESSING'): + warnings.warn(NotConfigured( + 'No %r module found! Please make sure it exists and ' + 'is available to Python.' % (configname, ))) + return self.setup_settings({}) + else: + self.configured = True + return self.setup_settings(usercfg) diff --git a/awx/lib/site-packages/celery/local.py b/awx/lib/site-packages/celery/local.py new file mode 100644 index 0000000000..f54de36c4b --- /dev/null +++ b/awx/lib/site-packages/celery/local.py @@ -0,0 +1,316 @@ +# -*- coding: utf-8 -*- +""" + celery.local + ~~~~~~~~~~~~ + + This module contains critical utilities that + needs to be loaded as soon as possible, and that + shall not load any third party modules. + + Parts of this module is Copyright by Werkzeug Team. + +""" +from __future__ import absolute_import + +import importlib +import sys + +PY3 = sys.version_info[0] == 3 + +__module__ = __name__ # used by Proxy class body + + +def _default_cls_attr(name, type_, cls_value): + # Proxy uses properties to forward the standard + # class attributes __module__, __name__ and __doc__ to the real + # object, but these needs to be a string when accessed from + # the Proxy class directly. This is a hack to make that work. + # -- See Issue #1087. + + def __new__(cls, getter): + instance = type_.__new__(cls, cls_value) + instance.__getter = getter + return instance + + def __get__(self, obj, cls=None): + return self.__getter(obj) if obj is not None else self + + def __set__(self, obj, value): + raise AttributeError('readonly attribute') + + return type(name, (type_, ), { + '__new__': __new__, '__get__': __get__, '__set__': __set__, + }) + + +class _cls_spec(str): + + def __new__(cls, getter): + s = str.__new__(cls, getter.__module__) + s.__getter = getter + return s + + def __get__(self, obj, cls=None): + if obj is not None: + return self.__getter(obj) + return self + + def __set__(self, obj, value): + raise AttributeError('cannot set attribute') + + +def symbol_by_name(name, aliases={}, imp=None, package=None, + sep='.', default=None, **kwargs): + """Get symbol by qualified name. + + The name should be the full dot-separated path to the class:: + + modulename.ClassName + + Example:: + + celery.concurrency.processes.TaskPool + ^- class name + + or using ':' to separate module and symbol:: + + celery.concurrency.processes:TaskPool + + If `aliases` is provided, a dict containing short name/long name + mappings, the name is looked up in the aliases first. + + Examples: + + >>> symbol_by_name('celery.concurrency.processes.TaskPool') + + + >>> symbol_by_name('default', { + ... 'default': 'celery.concurrency.processes.TaskPool'}) + + + # Does not try to look up non-string names. + >>> from celery.concurrency.processes import TaskPool + >>> symbol_by_name(TaskPool) is TaskPool + True + + """ + if imp is None: + imp = importlib.import_module + + if not isinstance(name, basestring): + return name # already a class + + name = aliases.get(name) or name + sep = ':' if ':' in name else sep + module_name, _, cls_name = name.rpartition(sep) + if not module_name: + cls_name, module_name = None, package if package else cls_name + try: + try: + module = imp(module_name, package=package, **kwargs) + except ValueError, exc: + raise ValueError, ValueError( + "Couldn't import %r: %s" % (name, exc)), sys.exc_info()[2] + return getattr(module, cls_name) if cls_name else module + except (ImportError, AttributeError): + if default is None: + raise + return default + + +def try_import(module, default=None): + """Try to import and return module, or return + None if the module does not exist.""" + try: + return importlib.import_module(module) + except ImportError: + return default + + +class Proxy(object): + """Proxy to another object.""" + + # Code stolen from werkzeug.local.Proxy. + __slots__ = ('__local', '__args', '__kwargs', '__dict__') + if not PY3: + __slots__ += ('__name__', ) + + def __init__(self, local, args=None, kwargs=None, name=None): + object.__setattr__(self, '_Proxy__local', local) + object.__setattr__(self, '_Proxy__args', args or ()) + object.__setattr__(self, '_Proxy__kwargs', kwargs or {}) + if name is not None: + object.__setattr__(self, '__custom_name__', name) + + @_default_cls_attr('name', str, __name__) + def __name__(self): + try: + return self.__custom_name__ + except AttributeError: + return self._get_current_object().__name__ + + @_default_cls_attr('module', str, __module__) + def __module__(self): + return self._get_current_object().__module__ + + @_default_cls_attr('doc', str, __doc__) + def __doc__(self): + return self._get_current_object().__doc__ + + def _get_class(self): + return self._get_current_object().__class__ + + @property + def __class__(self): + return self._get_class() + + def _get_current_object(self): + """Return the current object. This is useful if you want the real + object behind the proxy at a time for performance reasons or because + you want to pass the object into a different context. + """ + loc = object.__getattribute__(self, '_Proxy__local') + if not hasattr(loc, '__release_local__'): + return loc(*self.__args, **self.__kwargs) + try: + return getattr(loc, self.__name__) + except AttributeError: + raise RuntimeError('no object bound to %s' % self.__name__) + + @property + def __dict__(self): + try: + return self._get_current_object().__dict__ + except RuntimeError: # pragma: no cover + raise AttributeError('__dict__') + + def __repr__(self): + try: + obj = self._get_current_object() + except RuntimeError: # pragma: no cover + return '<%s unbound>' % self.__class__.__name__ + return repr(obj) + + def __nonzero__(self): + try: + return bool(self._get_current_object()) + except RuntimeError: # pragma: no cover + return False + + def __unicode__(self): + try: + return unicode(self._get_current_object()) + except RuntimeError: # pragma: no cover + return repr(self) + + def __dir__(self): + try: + return dir(self._get_current_object()) + except RuntimeError: # pragma: no cover + return [] + + def __getattr__(self, name): + if name == '__members__': + return dir(self._get_current_object()) + return getattr(self._get_current_object(), name) + + def __setitem__(self, key, value): + self._get_current_object()[key] = value + + def __delitem__(self, key): + del self._get_current_object()[key] + + def __setslice__(self, i, j, seq): + self._get_current_object()[i:j] = seq + + def __delslice__(self, i, j): + del self._get_current_object()[i:j] + + __setattr__ = lambda x, n, v: setattr(x._get_current_object(), n, v) + __delattr__ = lambda x, n: delattr(x._get_current_object(), n) + __str__ = lambda x: str(x._get_current_object()) + __lt__ = lambda x, o: x._get_current_object() < o + __le__ = lambda x, o: x._get_current_object() <= o + __eq__ = lambda x, o: x._get_current_object() == o + __ne__ = lambda x, o: x._get_current_object() != o + __gt__ = lambda x, o: x._get_current_object() > o + __ge__ = lambda x, o: x._get_current_object() >= o + __cmp__ = lambda x, o: cmp(x._get_current_object(), o) + __hash__ = lambda x: hash(x._get_current_object()) + __call__ = lambda x, *a, **kw: x._get_current_object()(*a, **kw) + __len__ = lambda x: len(x._get_current_object()) + __getitem__ = lambda x, i: x._get_current_object()[i] + __iter__ = lambda x: iter(x._get_current_object()) + __contains__ = lambda x, i: i in x._get_current_object() + __getslice__ = lambda x, i, j: x._get_current_object()[i:j] + __add__ = lambda x, o: x._get_current_object() + o + __sub__ = lambda x, o: x._get_current_object() - o + __mul__ = lambda x, o: x._get_current_object() * o + __floordiv__ = lambda x, o: x._get_current_object() // o + __mod__ = lambda x, o: x._get_current_object() % o + __divmod__ = lambda x, o: x._get_current_object().__divmod__(o) + __pow__ = lambda x, o: x._get_current_object() ** o + __lshift__ = lambda x, o: x._get_current_object() << o + __rshift__ = lambda x, o: x._get_current_object() >> o + __and__ = lambda x, o: x._get_current_object() & o + __xor__ = lambda x, o: x._get_current_object() ^ o + __or__ = lambda x, o: x._get_current_object() | o + __div__ = lambda x, o: x._get_current_object().__div__(o) + __truediv__ = lambda x, o: x._get_current_object().__truediv__(o) + __neg__ = lambda x: -(x._get_current_object()) + __pos__ = lambda x: +(x._get_current_object()) + __abs__ = lambda x: abs(x._get_current_object()) + __invert__ = lambda x: ~(x._get_current_object()) + __complex__ = lambda x: complex(x._get_current_object()) + __int__ = lambda x: int(x._get_current_object()) + __long__ = lambda x: long(x._get_current_object()) + __float__ = lambda x: float(x._get_current_object()) + __oct__ = lambda x: oct(x._get_current_object()) + __hex__ = lambda x: hex(x._get_current_object()) + __index__ = lambda x: x._get_current_object().__index__() + __coerce__ = lambda x, o: x.__coerce__(x, o) + __enter__ = lambda x: x._get_current_object().__enter__() + __exit__ = lambda x, *a, **kw: x._get_current_object().__exit__(*a, **kw) + __reduce__ = lambda x: x._get_current_object().__reduce__() + + +class PromiseProxy(Proxy): + """This is a proxy to an object that has not yet been evaulated. + + :class:`Proxy` will evaluate the object each time, while the + promise will only evaluate it once. + + """ + + def _get_current_object(self): + try: + return object.__getattribute__(self, '__thing') + except AttributeError: + return self.__evaluate__() + + def __evaluated__(self): + try: + object.__getattribute__(self, '__thing') + except AttributeError: + return False + return True + + def __maybe_evaluate__(self): + return self._get_current_object() + + def __evaluate__(self): + try: + thing = Proxy._get_current_object(self) + object.__setattr__(self, '__thing', thing) + return thing + finally: + object.__delattr__(self, '_Proxy__local') + object.__delattr__(self, '_Proxy__args') + object.__delattr__(self, '_Proxy__kwargs') + + +def maybe_evaluate(obj): + try: + return obj.__maybe_evaluate__() + except AttributeError: + return obj diff --git a/awx/lib/site-packages/celery/platforms.py b/awx/lib/site-packages/celery/platforms.py new file mode 100644 index 0000000000..7919cb10b8 --- /dev/null +++ b/awx/lib/site-packages/celery/platforms.py @@ -0,0 +1,687 @@ +# -*- coding: utf-8 -*- +""" + celery.platforms + ~~~~~~~~~~~~~~~~ + + Utilities dealing with platform specifics: signals, daemonization, + users, groups, and so on. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import atexit +import errno +import os +import platform as _platform +import shlex +import signal as _signal +import sys + +from billiard import current_process +from kombu.utils.encoding import safe_str +from contextlib import contextmanager + +from .local import try_import + +_setproctitle = try_import('setproctitle') +resource = try_import('resource') +pwd = try_import('pwd') +grp = try_import('grp') + +# exitcodes +EX_OK = getattr(os, 'EX_OK', 0) +EX_FAILURE = 1 +EX_UNAVAILABLE = getattr(os, 'EX_UNAVAILABLE', 69) +EX_USAGE = getattr(os, 'EX_USAGE', 64) + +SYSTEM = _platform.system() +IS_OSX = SYSTEM == 'Darwin' +IS_WINDOWS = SYSTEM == 'Windows' + +DAEMON_UMASK = 0 +DAEMON_WORKDIR = '/' + +PIDFILE_FLAGS = os.O_CREAT | os.O_EXCL | os.O_WRONLY +PIDFILE_MODE = ((os.R_OK | os.W_OK) << 6) | ((os.R_OK) << 3) | ((os.R_OK)) + +PIDLOCKED = """ERROR: Pidfile (%s) already exists. +Seems we're already running? (pid: %s)""" + + +def pyimplementation(): + """Returns string identifying the current Python implementation.""" + if hasattr(_platform, 'python_implementation'): + return _platform.python_implementation() + elif sys.platform.startswith('java'): + return 'Jython ' + sys.platform + elif hasattr(sys, 'pypy_version_info'): + v = '.'.join(str(p) for p in sys.pypy_version_info[:3]) + if sys.pypy_version_info[3:]: + v += '-' + ''.join(str(p) for p in sys.pypy_version_info[3:]) + return 'PyPy ' + v + else: + return 'CPython' + + +def _find_option_with_arg(argv, short_opts=None, long_opts=None): + """Search argv for option specifying its short and longopt + alternatives. + + Returns the value of the option if found. + + """ + for i, arg in enumerate(argv): + if arg.startswith('-'): + if long_opts and arg.startswith('--'): + name, _, val = arg.partition('=') + if name in long_opts: + return val + if short_opts and arg in short_opts: + return argv[i + 1] + raise KeyError('|'.join(short_opts or [] + long_opts or [])) + + +def maybe_patch_concurrency(argv, short_opts=None, long_opts=None): + """With short and long opt alternatives that specify the command line + option to set the pool, this makes sure that anything that needs + to be patched is completed as early as possible. + (e.g. eventlet/gevent monkey patches).""" + try: + pool = _find_option_with_arg(argv, short_opts, long_opts) + except KeyError: + pass + else: + # set up eventlet/gevent environments ASAP. + from celery import concurrency + concurrency.get_implementation(pool) + + +class LockFailed(Exception): + """Raised if a pidlock can't be acquired.""" + + +def get_fdmax(default=None): + """Returns the maximum number of open file descriptors + on this system. + + :keyword default: Value returned if there's no file + descriptor limit. + + """ + fdmax = resource.getrlimit(resource.RLIMIT_NOFILE)[1] + if fdmax == resource.RLIM_INFINITY: + return default + return fdmax + + +class Pidfile(object): + """Pidfile + + This is the type returned by :func:`create_pidlock`. + + TIP: Use the :func:`create_pidlock` function instead, + which is more convenient and also removes stale pidfiles (when + the process holding the lock is no longer running). + + """ + + #: Path to the pid lock file. + path = None + + def __init__(self, path): + self.path = os.path.abspath(path) + + def acquire(self): + """Acquire lock.""" + try: + self.write_pid() + except OSError, exc: + raise LockFailed, LockFailed(str(exc)), sys.exc_info()[2] + return self + __enter__ = acquire + + def is_locked(self): + """Returns true if the pid lock exists.""" + return os.path.exists(self.path) + + def release(self, *args): + """Release lock.""" + self.remove() + __exit__ = release + + def read_pid(self): + """Reads and returns the current pid.""" + with ignore_errno('ENOENT'): + with open(self.path, 'r') as fh: + line = fh.readline() + if line.strip() == line: # must contain '\n' + raise ValueError( + 'Partially written or invalid pidfile %r' % self.path) + + try: + return int(line.strip()) + except ValueError: + raise ValueError( + 'pidfile %r contents invalid.' % self.path) + + def remove(self): + """Removes the lock.""" + with ignore_errno(errno.ENOENT, errno.EACCES): + os.unlink(self.path) + + def remove_if_stale(self): + """Removes the lock if the process is not running. + (does not respond to signals).""" + try: + pid = self.read_pid() + except ValueError, exc: + sys.stderr.write('Broken pidfile found. Removing it.\n') + self.remove() + return True + if not pid: + self.remove() + return True + + try: + os.kill(pid, 0) + except os.error, exc: + if exc.errno == errno.ESRCH: + sys.stderr.write('Stale pidfile exists. Removing it.\n') + self.remove() + return True + return False + + def write_pid(self): + pid = os.getpid() + content = '%d\n' % (pid, ) + + pidfile_fd = os.open(self.path, PIDFILE_FLAGS, PIDFILE_MODE) + pidfile = os.fdopen(pidfile_fd, 'w') + try: + pidfile.write(content) + # flush and sync so that the re-read below works. + pidfile.flush() + try: + os.fsync(pidfile_fd) + except AttributeError: # pragma: no cover + pass + finally: + pidfile.close() + + rfh = open(self.path) + try: + if rfh.read() != content: + raise LockFailed( + "Inconsistency: Pidfile content doesn't match at re-read") + finally: + rfh.close() +PIDFile = Pidfile # compat alias + + +def create_pidlock(pidfile): + """Create and verify pidfile. + + If the pidfile already exists the program exits with an error message, + however if the process it refers to is not running anymore, the pidfile + is deleted and the program continues. + + This function will automatically install an :mod:`atexit` handler + to release the lock at exit, you can skip this by calling + :func:`_create_pidlock` instead. + + :returns: :class:`Pidfile`. + + **Example**: + + .. code-block:: python + + pidlock = create_pidlock('/var/run/app.pid') + + """ + pidlock = _create_pidlock(pidfile) + atexit.register(pidlock.release) + return pidlock + + +def _create_pidlock(pidfile): + pidlock = Pidfile(pidfile) + if pidlock.is_locked() and not pidlock.remove_if_stale(): + raise SystemExit(PIDLOCKED % (pidfile, pidlock.read_pid())) + pidlock.acquire() + return pidlock + + +def fileno(f): + """Get object fileno, or :const:`None` if not defined.""" + if isinstance(f, int): + return f + try: + return f.fileno() + except AttributeError: + pass + + +def close_open_fds(keep=None): + keep = [fileno(f) for f in keep if fileno(f)] if keep else [] + for fd in reversed(range(get_fdmax(default=2048))): + if fd not in keep: + with ignore_errno(errno.EBADF): + os.close(fd) + + +class DaemonContext(object): + _is_open = False + + def __init__(self, pidfile=None, workdir=None, umask=None, + fake=False, after_chdir=None, **kwargs): + self.workdir = workdir or DAEMON_WORKDIR + self.umask = DAEMON_UMASK if umask is None else umask + self.fake = fake + self.after_chdir = after_chdir + self.stdfds = (sys.stdin, sys.stdout, sys.stderr) + + def redirect_to_null(self, fd): + if fd: + dest = os.open(os.devnull, os.O_RDWR) + os.dup2(dest, fd) + + def open(self): + if not self._is_open: + if not self.fake: + self._detach() + + os.chdir(self.workdir) + os.umask(self.umask) + + if self.after_chdir: + self.after_chdir() + + close_open_fds(self.stdfds) + for fd in self.stdfds: + self.redirect_to_null(fileno(fd)) + + self._is_open = True + __enter__ = open + + def close(self, *args): + if self._is_open: + self._is_open = False + __exit__ = close + + def _detach(self): + if os.fork() == 0: # first child + os.setsid() # create new session + if os.fork() > 0: # second child + os._exit(0) + else: + os._exit(0) + return self + + +def detached(logfile=None, pidfile=None, uid=None, gid=None, umask=0, + workdir=None, fake=False, **opts): + """Detach the current process in the background (daemonize). + + :keyword logfile: Optional log file. The ability to write to this file + will be verified before the process is detached. + :keyword pidfile: Optional pidfile. The pidfile will not be created, + as this is the responsibility of the child. But the process will + exit if the pid lock exists and the pid written is still running. + :keyword uid: Optional user id or user name to change + effective privileges to. + :keyword gid: Optional group id or group name to change effective + privileges to. + :keyword umask: Optional umask that will be effective in the child process. + :keyword workdir: Optional new working directory. + :keyword fake: Don't actually detach, intented for debugging purposes. + :keyword \*\*opts: Ignored. + + **Example**: + + .. code-block:: python + + from celery.platforms import detached, create_pidlock + + with detached(logfile='/var/log/app.log', pidfile='/var/run/app.pid', + uid='nobody'): + # Now in detached child process with effective user set to nobody, + # and we know that our logfile can be written to, and that + # the pidfile is not locked. + pidlock = create_pidlock('/var/run/app.pid') + + # Run the program + program.run(logfile='/var/log/app.log') + + """ + + if not resource: + raise RuntimeError('This platform does not support detach.') + workdir = os.getcwd() if workdir is None else workdir + + signals.reset('SIGCLD') # Make sure SIGCLD is using the default handler. + if not os.geteuid(): + # no point trying to setuid unless we're root. + maybe_drop_privileges(uid=uid, gid=gid) + + def after_chdir_do(): + # Since without stderr any errors will be silently suppressed, + # we need to know that we have access to the logfile. + logfile and open(logfile, 'a').close() + # Doesn't actually create the pidfile, but makes sure it's not stale. + if pidfile: + _create_pidlock(pidfile).release() + + return DaemonContext( + umask=umask, workdir=workdir, fake=fake, after_chdir=after_chdir_do, + ) + + +def parse_uid(uid): + """Parse user id. + + uid can be an integer (uid) or a string (user name), if a user name + the uid is taken from the password file. + + """ + try: + return int(uid) + except ValueError: + try: + return pwd.getpwnam(uid).pw_uid + except (AttributeError, KeyError): + raise KeyError('User does not exist: %r' % (uid, )) + + +def parse_gid(gid): + """Parse group id. + + gid can be an integer (gid) or a string (group name), if a group name + the gid is taken from the password file. + + """ + try: + return int(gid) + except ValueError: + try: + return grp.getgrnam(gid).gr_gid + except (AttributeError, KeyError): + raise KeyError('Group does not exist: %r' % (gid, )) + + +def _setgroups_hack(groups): + """:fun:`setgroups` may have a platform-dependent limit, + and it is not always possible to know in advance what this limit + is, so we use this ugly hack stolen from glibc.""" + groups = groups[:] + + while 1: + try: + return os.setgroups(groups) + except ValueError: # error from Python's check. + if len(groups) <= 1: + raise + groups[:] = groups[:-1] + except OSError, exc: # error from the OS. + if exc.errno != errno.EINVAL or len(groups) <= 1: + raise + groups[:] = groups[:-1] + + +def setgroups(groups): + """Set active groups from a list of group ids.""" + max_groups = None + try: + max_groups = os.sysconf('SC_NGROUPS_MAX') + except Exception: + pass + try: + return _setgroups_hack(groups[:max_groups]) + except OSError, exc: + if exc.errno != errno.EPERM: + raise + if any(group not in groups for group in os.getgroups()): + # we shouldn't be allowed to change to this group. + raise + + +def initgroups(uid, gid): + """Compat version of :func:`os.initgroups` which was first + added to Python 2.7.""" + if not pwd: # pragma: no cover + return + username = pwd.getpwuid(uid)[0] + if hasattr(os, 'initgroups'): # Python 2.7+ + return os.initgroups(username, gid) + groups = [gr.gr_gid for gr in grp.getgrall() + if username in gr.gr_mem] + setgroups(groups) + + +def setgid(gid): + """Version of :func:`os.setgid` supporting group names.""" + os.setgid(parse_gid(gid)) + + +def setuid(uid): + """Version of :func:`os.setuid` supporting usernames.""" + os.setuid(parse_uid(uid)) + + +def maybe_drop_privileges(uid=None, gid=None): + """Change process privileges to new user/group. + + If UID and GID is specified, the real user/group is changed. + + If only UID is specified, the real user is changed, and the group is + changed to the users primary group. + + If only GID is specified, only the group is changed. + + """ + uid = uid and parse_uid(uid) + gid = gid and parse_gid(gid) + + if uid: + # If GID isn't defined, get the primary GID of the user. + if not gid and pwd: + gid = pwd.getpwuid(uid).pw_gid + # Must set the GID before initgroups(), as setgid() + # is known to zap the group list on some platforms. + setgid(gid) + initgroups(uid, gid) + + # at last: + setuid(uid) + else: + gid and setgid(gid) + + +class Signals(object): + """Convenience interface to :mod:`signals`. + + If the requested signal is not supported on the current platform, + the operation will be ignored. + + **Examples**: + + .. code-block:: python + + >>> from celery.platforms import signals + + >>> signals['INT'] = my_handler + + >>> signals['INT'] + my_handler + + >>> signals.supported('INT') + True + + >>> signals.signum('INT') + 2 + + >>> signals.ignore('USR1') + >>> signals['USR1'] == signals.ignored + True + + >>> signals.reset('USR1') + >>> signals['USR1'] == signals.default + True + + >>> signals.update(INT=exit_handler, + ... TERM=exit_handler, + ... HUP=hup_handler) + + """ + + ignored = _signal.SIG_IGN + default = _signal.SIG_DFL + + def supported(self, signal_name): + """Returns true value if ``signal_name`` exists on this platform.""" + try: + return self.signum(signal_name) + except AttributeError: + pass + + def signum(self, signal_name): + """Get signal number from signal name.""" + if isinstance(signal_name, int): + return signal_name + if not isinstance(signal_name, basestring) \ + or not signal_name.isupper(): + raise TypeError('signal name must be uppercase string.') + if not signal_name.startswith('SIG'): + signal_name = 'SIG' + signal_name + return getattr(_signal, signal_name) + + def reset(self, *signal_names): + """Reset signals to the default signal handler. + + Does nothing if the platform doesn't support signals, + or the specified signal in particular. + + """ + self.update((sig, self.default) for sig in signal_names) + + def ignore(self, *signal_names): + """Ignore signal using :const:`SIG_IGN`. + + Does nothing if the platform doesn't support signals, + or the specified signal in particular. + + """ + self.update((sig, self.ignored) for sig in signal_names) + + def __getitem__(self, signal_name): + return _signal.getsignal(self.signum(signal_name)) + + def __setitem__(self, signal_name, handler): + """Install signal handler. + + Does nothing if the current platform doesn't support signals, + or the specified signal in particular. + + """ + try: + _signal.signal(self.signum(signal_name), handler) + except (AttributeError, ValueError): + pass + + def update(self, _d_=None, **sigmap): + """Set signal handlers from a mapping.""" + for signal_name, handler in dict(_d_ or {}, **sigmap).iteritems(): + self[signal_name] = handler + + +signals = Signals() +get_signal = signals.signum # compat +install_signal_handler = signals.__setitem__ # compat +reset_signal = signals.reset # compat +ignore_signal = signals.ignore # compat + + +def strargv(argv): + arg_start = 2 if 'manage' in argv[0] else 1 + if len(argv) > arg_start: + return ' '.join(argv[arg_start:]) + return '' + + +def set_process_title(progname, info=None): + """Set the ps name for the currently running process. + + Only works if :mod:`setproctitle` is installed. + + """ + proctitle = '[%s]' % progname + proctitle = '%s %s' % (proctitle, info) if info else proctitle + if _setproctitle: + _setproctitle.setproctitle(safe_str(proctitle)) + return proctitle + + +if os.environ.get('NOSETPS'): # pragma: no cover + + def set_mp_process_title(*a, **k): + pass +else: + + def set_mp_process_title(progname, info=None, hostname=None): # noqa + """Set the ps name using the multiprocessing process name. + + Only works if :mod:`setproctitle` is installed. + + """ + if hostname: + progname = '%s@%s' % (progname, hostname.split('.')[0]) + return set_process_title( + '%s:%s' % (progname, current_process().name), info=info) + + +def shellsplit(s): + """Compat. version of :func:`shlex.split` that supports + the ``posix`` option which was first added in Python 2.6. + + Posix behavior will be disabled if running under Windows. + + """ + lexer = shlex.shlex(s, posix=not IS_WINDOWS) + lexer.whitespace_split = True + lexer.commenters = '' + return list(lexer) + + +def get_errno(n): + """Get errno for string, e.g. ``ENOENT``.""" + if isinstance(n, basestring): + return getattr(errno, n) + return n + + +@contextmanager +def ignore_errno(*errnos, **kwargs): + """Context manager to ignore specific POSIX error codes. + + Takes a list of error codes to ignore, which can be either + the name of the code, or the code integer itself:: + + >>> with ignore_errno('ENOENT'): + ... with open('foo', 'r'): + ... return r.read() + + >>> with ignore_errno(errno.ENOENT, errno.EPERM): + ... pass + + :keyword types: A tuple of exceptions to ignore (when the errno matches), + defaults to :exc:`Exception`. + """ + types = kwargs.get('types') or (Exception, ) + errnos = [get_errno(errno) for errno in errnos] + try: + yield + except types, exc: + if not hasattr(exc, 'errno'): + raise + if exc.errno not in errnos: + raise diff --git a/awx/lib/site-packages/celery/result.py b/awx/lib/site-packages/celery/result.py new file mode 100644 index 0000000000..1b6af3aec9 --- /dev/null +++ b/awx/lib/site-packages/celery/result.py @@ -0,0 +1,733 @@ +# -*- coding: utf-8 -*- +""" + celery.result + ~~~~~~~~~~~~~ + + Task results/state and groups of results. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import time + +from collections import deque +from copy import copy + +from kombu.utils import cached_property +from kombu.utils.compat import OrderedDict + +from . import current_app +from . import states +from .app import app_or_default +from .datastructures import DependencyGraph +from .exceptions import IncompleteStream, TimeoutError + + +class ResultBase(object): + """Base class for all results""" + + +class AsyncResult(ResultBase): + """Query task state. + + :param id: see :attr:`id`. + :keyword backend: see :attr:`backend`. + + """ + app = None + + #: Error raised for timeouts. + TimeoutError = TimeoutError + + #: The task's UUID. + id = None + + #: The task result backend to use. + backend = None + + #: Parent result (if part of a chain) + parent = None + + def __init__(self, id, backend=None, task_name=None, + app=None, parent=None): + self.app = app_or_default(app or self.app) + self.id = id + self.backend = backend or self.app.backend + self.task_name = task_name + self.parent = parent + + def serializable(self): + return self.id, None + + def forget(self): + """Forget about (and possibly remove the result of) this task.""" + self.backend.forget(self.id) + + def revoke(self, connection=None, terminate=False, signal=None): + """Send revoke signal to all workers. + + Any worker receiving the task, or having reserved the + task, *must* ignore it. + + :keyword terminate: Also terminate the process currently working + on the task (if any). + :keyword signal: Name of signal to send to process if terminate. + Default is TERM. + + """ + self.app.control.revoke(self.id, connection=connection, + terminate=terminate, signal=signal) + + def get(self, timeout=None, propagate=True, interval=0.5): + """Wait until task is ready, and return its result. + + .. warning:: + + Waiting for tasks within a task may lead to deadlocks. + Please read :ref:`task-synchronous-subtasks`. + + :keyword timeout: How long to wait, in seconds, before the + operation times out. + :keyword propagate: Re-raise exception if the task failed. + :keyword interval: Time to wait (in seconds) before retrying to + retrieve the result. Note that this does not have any effect + when using the amqp result store backend, as it does not + use polling. + + :raises celery.exceptions.TimeoutError: if `timeout` is not + :const:`None` and the result does not arrive within `timeout` + seconds. + + If the remote call raised an exception then that exception will + be re-raised. + + """ + return self.backend.wait_for(self.id, timeout=timeout, + propagate=propagate, + interval=interval) + wait = get # deprecated alias to :meth:`get`. + + def collect(self, intermediate=False, **kwargs): + """Iterator, like :meth:`get` will wait for the task to complete, + but will also follow :class:`AsyncResult` and :class:`ResultSet` + returned by the task, yielding for each result in the tree. + + An example would be having the following tasks: + + .. code-block:: python + + @task() + def A(how_many): + return group(B.s(i) for i in xrange(how_many)) + + @task() + def B(i): + return pow2.delay(i) + + @task() + def pow2(i): + return i ** 2 + + Calling :meth:`collect` would return: + + .. code-block:: python + + >>> result = A.delay(10) + >>> list(result.collect()) + [0, 1, 4, 9, 16, 25, 36, 49, 64, 81] + + """ + for _, R in self.iterdeps(intermediate=intermediate): + yield R, R.get(**kwargs) + + def get_leaf(self): + value = None + for _, R in self.iterdeps(): + value = R.get() + return value + + def iterdeps(self, intermediate=False): + stack = deque([(None, self)]) + + while stack: + parent, node = stack.popleft() + yield parent, node + if node.ready(): + stack.extend((node, child) for child in node.children or []) + else: + if not intermediate: + raise IncompleteStream() + + def ready(self): + """Returns :const:`True` if the task has been executed. + + If the task is still running, pending, or is waiting + for retry then :const:`False` is returned. + + """ + return self.state in self.backend.READY_STATES + + def successful(self): + """Returns :const:`True` if the task executed successfully.""" + return self.state == states.SUCCESS + + def failed(self): + """Returns :const:`True` if the task failed.""" + return self.state == states.FAILURE + + def build_graph(self, intermediate=False): + graph = DependencyGraph() + for parent, node in self.iterdeps(intermediate=intermediate): + if parent: + graph.add_arc(parent) + graph.add_edge(parent, node) + return graph + + def __str__(self): + """`str(self) -> self.id`""" + return str(self.id) + + def __hash__(self): + """`hash(self) -> hash(self.id)`""" + return hash(self.id) + + def __repr__(self): + return '<%s: %s>' % (self.__class__.__name__, self.id) + + def __eq__(self, other): + if isinstance(other, AsyncResult): + return other.id == self.id + elif isinstance(other, basestring): + return other == self.id + return NotImplemented + + def __copy__(self): + r = self.__reduce__() + return r[0](*r[1]) + + def __reduce__(self): + return self.__class__, self.__reduce_args__() + + def __reduce_args__(self): + return self.id, self.backend, self.task_name, None, self.parent + + @cached_property + def graph(self): + return self.build_graph() + + @property + def supports_native_join(self): + return self.backend.supports_native_join + + @property + def children(self): + children = self.backend.get_children(self.id) + if children: + return [from_serializable(r, self.app) for r in children] + + @property + def result(self): + """When the task has been executed, this contains the return value. + If the task raised an exception, this will be the exception + instance.""" + return self.backend.get_result(self.id) + info = result + + @property + def traceback(self): + """Get the traceback of a failed task.""" + return self.backend.get_traceback(self.id) + + @property + def state(self): + """The tasks current state. + + Possible values includes: + + *PENDING* + + The task is waiting for execution. + + *STARTED* + + The task has been started. + + *RETRY* + + The task is to be retried, possibly because of failure. + + *FAILURE* + + The task raised an exception, or has exceeded the retry limit. + The :attr:`result` attribute then contains the + exception raised by the task. + + *SUCCESS* + + The task executed successfully. The :attr:`result` attribute + then contains the tasks return value. + + """ + return self.backend.get_status(self.id) + status = state + + def _get_task_id(self): + return self.id + + def _set_task_id(self, id): + self.id = id + task_id = property(_get_task_id, _set_task_id) +BaseAsyncResult = AsyncResult # for backwards compatibility. + + +class ResultSet(ResultBase): + """Working with more than one result. + + :param results: List of result instances. + + """ + app = None + + #: List of results in in the set. + results = None + + def __init__(self, results, app=None, **kwargs): + self.app = app_or_default(app or self.app) + self.results = results + + def add(self, result): + """Add :class:`AsyncResult` as a new member of the set. + + Does nothing if the result is already a member. + + """ + if result not in self.results: + self.results.append(result) + + def remove(self, result): + """Removes result from the set; it must be a member. + + :raises KeyError: if the result is not a member. + + """ + if isinstance(result, basestring): + result = AsyncResult(result) + try: + self.results.remove(result) + except ValueError: + raise KeyError(result) + + def discard(self, result): + """Remove result from the set if it is a member. + + If it is not a member, do nothing. + + """ + try: + self.remove(result) + except KeyError: + pass + + def update(self, results): + """Update set with the union of itself and an iterable with + results.""" + self.results.extend(r for r in results if r not in self.results) + + def clear(self): + """Remove all results from this set.""" + self.results[:] = [] # don't create new list. + + def successful(self): + """Was all of the tasks successful? + + :returns: :const:`True` if all of the tasks finished + successfully (i.e. did not raise an exception). + + """ + return all(result.successful() for result in self.results) + + def failed(self): + """Did any of the tasks fail? + + :returns: :const:`True` if any of the tasks failed. + (i.e., raised an exception) + + """ + return any(result.failed() for result in self.results) + + def waiting(self): + """Are any of the tasks incomplete? + + :returns: :const:`True` if any of the tasks is still + waiting for execution. + + """ + return any(not result.ready() for result in self.results) + + def ready(self): + """Did all of the tasks complete? (either by success of failure). + + :returns: :const:`True` if all of the tasks been + executed. + + """ + return all(result.ready() for result in self.results) + + def completed_count(self): + """Task completion count. + + :returns: the number of tasks completed. + + """ + return sum(int(result.successful()) for result in self.results) + + def forget(self): + """Forget about (and possible remove the result of) all the tasks.""" + for result in self.results: + result.forget() + + def revoke(self, connection=None, terminate=False, signal=None): + """Send revoke signal to all workers for all tasks in the set. + + :keyword terminate: Also terminate the process currently working + on the task (if any). + :keyword signal: Name of signal to send to process if terminate. + Default is TERM. + + """ + with self.app.connection_or_acquire(connection) as conn: + for result in self.results: + result.revoke( + connection=conn, terminate=terminate, signal=signal, + ) + + def __iter__(self): + return self.iterate() + + def __getitem__(self, index): + """`res[i] -> res.results[i]`""" + return self.results[index] + + def iterate(self, timeout=None, propagate=True, interval=0.5): + """Iterate over the return values of the tasks as they finish + one by one. + + :raises: The exception if any of the tasks raised an exception. + + """ + elapsed = 0.0 + results = OrderedDict((result.id, copy(result)) + for result in self.results) + + while results: + removed = set() + for task_id, result in results.iteritems(): + if result.ready(): + yield result.get(timeout=timeout and timeout - elapsed, + propagate=propagate) + removed.add(task_id) + else: + if result.backend.subpolling_interval: + time.sleep(result.backend.subpolling_interval) + for task_id in removed: + results.pop(task_id, None) + time.sleep(interval) + elapsed += interval + if timeout and elapsed >= timeout: + raise TimeoutError("The operation timed out") + + def get(self, timeout=None, propagate=True, interval=0.5): + """See :meth:`join` + + This is here for API compatibility with :class:`AsyncResult`, + in addition it uses :meth:`join_native` if available for the + current result backend. + + """ + return (self.join_native if self.supports_native_join else self.join)( + timeout=timeout, propagate=propagate, interval=interval) + + def join(self, timeout=None, propagate=True, interval=0.5): + """Gathers the results of all tasks as a list in order. + + .. note:: + + This can be an expensive operation for result store + backends that must resort to polling (e.g. database). + + You should consider using :meth:`join_native` if your backend + supports it. + + .. warning:: + + Waiting for tasks within a task may lead to deadlocks. + Please see :ref:`task-synchronous-subtasks`. + + :keyword timeout: The number of seconds to wait for results before + the operation times out. + + :keyword propagate: If any of the tasks raises an exception, the + exception will be re-raised. + + :keyword interval: Time to wait (in seconds) before retrying to + retrieve a result from the set. Note that this + does not have any effect when using the amqp + result store backend, as it does not use polling. + + :raises celery.exceptions.TimeoutError: if `timeout` is not + :const:`None` and the operation takes longer than `timeout` + seconds. + + """ + time_start = time.time() + remaining = None + + results = [] + for result in self.results: + remaining = None + if timeout: + remaining = timeout - (time.time() - time_start) + if remaining <= 0.0: + raise TimeoutError('join operation timed out') + results.append(result.get(timeout=remaining, + propagate=propagate, + interval=interval)) + return results + + def iter_native(self, timeout=None, interval=None): + """Backend optimized version of :meth:`iterate`. + + .. versionadded:: 2.2 + + Note that this does not support collecting the results + for different task types using different backends. + + This is currently only supported by the amqp, Redis and cache + result backends. + + """ + if not self.results: + return iter([]) + backend = self.results[0].backend + ids = [result.id for result in self.results] + return backend.get_many(ids, timeout=timeout, interval=interval) + + def join_native(self, timeout=None, propagate=True, interval=0.5): + """Backend optimized version of :meth:`join`. + + .. versionadded:: 2.2 + + Note that this does not support collecting the results + for different task types using different backends. + + This is currently only supported by the amqp, Redis and cache + result backends. + + """ + results = self.results + acc = [None for _ in xrange(len(self))] + for task_id, meta in self.iter_native(timeout=timeout, + interval=interval): + if propagate and meta['status'] in states.PROPAGATE_STATES: + raise meta['result'] + acc[results.index(task_id)] = meta['result'] + return acc + + def _failed_join_report(self): + return (res for res in self.results + if res.backend.is_cached(res.id) and + res.state in states.PROPAGATE_STATES) + + def __len__(self): + return len(self.results) + + def __eq__(self, other): + if isinstance(other, ResultSet): + return other.results == self.results + return NotImplemented + + def __repr__(self): + return '<%s: [%s]>' % (self.__class__.__name__, + ', '.join(r.id for r in self.results)) + + @property + def subtasks(self): + """Deprecated alias to :attr:`results`.""" + return self.results + + @property + def supports_native_join(self): + return self.results[0].supports_native_join + + +class GroupResult(ResultSet): + """Like :class:`ResultSet`, but with an associated id. + + This type is returned by :class:`~celery.group`, and the + deprecated TaskSet, meth:`~celery.task.TaskSet.apply_async` method. + + It enables inspection of the tasks state and return values as + a single entity. + + :param id: The id of the group. + :param results: List of result instances. + + """ + + #: The UUID of the group. + id = None + + #: List/iterator of results in the group + results = None + + def __init__(self, id=None, results=None, **kwargs): + self.id = id + ResultSet.__init__(self, results, **kwargs) + + def save(self, backend=None): + """Save group-result for later retrieval using :meth:`restore`. + + Example:: + + >>> result.save() + >>> result = GroupResult.restore(group_id) + + """ + return (backend or self.app.backend).save_group(self.id, self) + + def delete(self, backend=None): + """Remove this result if it was previously saved.""" + (backend or self.app.backend).delete_group(self.id) + + def __reduce__(self): + return self.__class__, self.__reduce_args__() + + def __reduce_args__(self): + return self.id, self.results + + def __eq__(self, other): + if isinstance(other, GroupResult): + return other.id == self.id and other.results == self.results + return NotImplemented + + def __repr__(self): + return '<%s: %s [%s]>' % (self.__class__.__name__, self.id, + ', '.join(r.id for r in self.results)) + + def serializable(self): + return self.id, [r.serializable() for r in self.results] + + @property + def children(self): + return self.results + + @classmethod + def restore(self, id, backend=None): + """Restore previously saved group result.""" + return (backend or current_app.backend).restore_group(id) + + +class TaskSetResult(GroupResult): + """Deprecated version of :class:`GroupResult`""" + + def __init__(self, taskset_id, results=None, **kwargs): + # XXX supports the taskset_id kwarg. + # XXX previously the "results" arg was named "subtasks". + if 'subtasks' in kwargs: + results = kwargs['subtasks'] + GroupResult.__init__(self, taskset_id, results, **kwargs) + + def itersubtasks(self): + """Deprecated. Use ``iter(self.results)`` instead.""" + return iter(self.results) + + @property + def total(self): + """Deprecated: Use ``len(r)``.""" + return len(self) + + def _get_taskset_id(self): + return self.id + + def _set_taskset_id(self, id): + self.id = id + taskset_id = property(_get_taskset_id, _set_taskset_id) + + +class EagerResult(AsyncResult): + """Result that we know has already been executed.""" + task_name = None + + def __init__(self, id, ret_value, state, traceback=None): + self.id = id + self._result = ret_value + self._state = state + self._traceback = traceback + + def __reduce__(self): + return self.__class__, self.__reduce_args__() + + def __reduce_args__(self): + return (self.id, self._result, self._state, self._traceback) + + def __copy__(self): + cls, args = self.__reduce__() + return cls(*args) + + def ready(self): + return True + + def get(self, timeout=None, propagate=True, **kwargs): + if self.successful(): + return self.result + elif self.state in states.PROPAGATE_STATES: + if propagate: + raise self.result + return self.result + wait = get + + def forget(self): + pass + + def revoke(self, *args, **kwargs): + self._state = states.REVOKED + + def __repr__(self): + return "" % self.id + + @property + def result(self): + """The tasks return value""" + return self._result + + @property + def state(self): + """The tasks state.""" + return self._state + status = state + + @property + def traceback(self): + """The traceback if the task failed.""" + return self._traceback + + @property + def supports_native_join(self): + return False + + +def from_serializable(r, app=None): + # earlier backends may just pickle, so check if + # result is already prepared. + app = app_or_default(app) + Result = app.AsyncResult + if not isinstance(r, ResultBase): + if isinstance(r, (list, tuple)): + id, nodes = r + if nodes: + return app.GroupResult(id, [Result(sid) for sid, _ in nodes]) + return Result(id) + else: + return Result(r) + return r diff --git a/awx/lib/site-packages/celery/schedules.py b/awx/lib/site-packages/celery/schedules.py new file mode 100644 index 0000000000..ca0e3aa314 --- /dev/null +++ b/awx/lib/site-packages/celery/schedules.py @@ -0,0 +1,537 @@ +# -*- coding: utf-8 -*- +""" + celery.schedules + ~~~~~~~~~~~~~~~~ + + Schedules define the intervals at which periodic tasks + should run. + +""" +from __future__ import absolute_import + +import re + +from datetime import datetime, timedelta + +from dateutil.relativedelta import relativedelta +from kombu.utils import cached_property + +from . import current_app +from .utils import is_iterable +from .utils.timeutils import ( + timedelta_seconds, weekday, maybe_timedelta, remaining, + humanize_seconds, timezone, maybe_make_aware +) +from .datastructures import AttributeDict + + +def cronfield(s): + return '*' if s is None else s + + +class ParseException(Exception): + """Raised by crontab_parser when the input can't be parsed.""" + + +class schedule(object): + relative = False + + def __init__(self, run_every=None, relative=False, nowfun=None): + self.run_every = maybe_timedelta(run_every) + self.relative = relative + self.nowfun = nowfun + + def now(self): + return (self.nowfun or self.app.now)() + + def remaining_estimate(self, last_run_at): + return remaining(last_run_at, self.run_every, + self.maybe_make_aware(self.now()), self.relative) + + def is_due(self, last_run_at): + """Returns tuple of two items `(is_due, next_time_to_run)`, + where next time to run is in seconds. + + e.g. + + * `(True, 20)`, means the task should be run now, and the next + time to run is in 20 seconds. + + * `(False, 12)`, means the task should be run in 12 seconds. + + You can override this to decide the interval at runtime, + but keep in mind the value of :setting:`CELERYBEAT_MAX_LOOP_INTERVAL`, + which decides the maximum number of seconds celerybeat can sleep + between re-checking the periodic task intervals. So if you + dynamically change the next run at value, and the max interval is + set to 5 minutes, it will take 5 minutes for the change to take + effect, so you may consider lowering the value of + :setting:`CELERYBEAT_MAX_LOOP_INTERVAL` if responsiveness is of + importance to you. + + .. admonition:: Scheduler max interval variance + + The default max loop interval may vary for different schedulers. + For the default scheduler the value is 5 minutes, but for e.g. + the django-celery database scheduler the value is 5 seconds. + + """ + last_run_at = self.maybe_make_aware(last_run_at) + rem_delta = self.remaining_estimate(last_run_at) + rem = timedelta_seconds(rem_delta) + if rem == 0: + return True, self.seconds + return False, rem + + def maybe_make_aware(self, dt): + if self.utc_enabled: + return maybe_make_aware(dt, self.tz) + return dt + + def __repr__(self): + return '' % self.human_seconds + + def __eq__(self, other): + if isinstance(other, schedule): + return self.run_every == other.run_every + return self.run_every == other + + @property + def seconds(self): + return timedelta_seconds(self.run_every) + + @property + def human_seconds(self): + return humanize_seconds(self.seconds) + + @cached_property + def app(self): + return current_app._get_current_object() + + @cached_property + def tz(self): + return timezone.get_timezone(self.app.conf.CELERY_TIMEZONE) + + @cached_property + def utc_enabled(self): + return self.app.conf.CELERY_ENABLE_UTC + + def to_local(self, dt): + if not self.utc_enabled: + return timezone.to_local_fallback(dt, self.tz) + return dt + + +class crontab_parser(object): + """Parser for crontab expressions. Any expression of the form 'groups' + (see BNF grammar below) is accepted and expanded to a set of numbers. + These numbers represent the units of time that the crontab needs to + run on:: + + digit :: '0'..'9' + dow :: 'a'..'z' + number :: digit+ | dow+ + steps :: number + range :: number ( '-' number ) ? + numspec :: '*' | range + expr :: numspec ( '/' steps ) ? + groups :: expr ( ',' expr ) * + + The parser is a general purpose one, useful for parsing hours, minutes and + day_of_week expressions. Example usage:: + + >>> minutes = crontab_parser(60).parse('*/15') + [0, 15, 30, 45] + >>> hours = crontab_parser(24).parse('*/4') + [0, 4, 8, 12, 16, 20] + >>> day_of_week = crontab_parser(7).parse('*') + [0, 1, 2, 3, 4, 5, 6] + + It can also parse day_of_month and month_of_year expressions if initialized + with an minimum of 1. Example usage:: + + >>> days_of_month = crontab_parser(31, 1).parse('*/3') + [1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 31] + >>> months_of_year = crontab_parser(12, 1).parse('*/2') + [1, 3, 5, 7, 9, 11] + >>> months_of_year = crontab_parser(12, 1).parse('2-12/2') + [2, 4, 6, 8, 10, 12] + + The maximum possible expanded value returned is found by the formula:: + + max_ + min_ - 1 + + """ + ParseException = ParseException + + _range = r'(\w+?)-(\w+)' + _steps = r'/(\w+)?' + _star = r'\*' + + def __init__(self, max_=60, min_=0): + self.max_ = max_ + self.min_ = min_ + self.pats = ( + (re.compile(self._range + self._steps), self._range_steps), + (re.compile(self._range), self._expand_range), + (re.compile(self._star + self._steps), self._star_steps), + (re.compile('^' + self._star + '$'), self._expand_star), + ) + + def parse(self, spec): + acc = set() + for part in spec.split(','): + if not part: + raise self.ParseException('empty part') + acc |= set(self._parse_part(part)) + return acc + + def _parse_part(self, part): + for regex, handler in self.pats: + m = regex.match(part) + if m: + return handler(m.groups()) + return self._expand_range((part, )) + + def _expand_range(self, toks): + fr = self._expand_number(toks[0]) + if len(toks) > 1: + to = self._expand_number(toks[1]) + if to < fr: # Wrap around max_ if necessary + return range(fr, + self.min_ + self.max_) + range(self.min_, + to + 1) + return range(fr, to + 1) + return [fr] + + def _range_steps(self, toks): + if len(toks) != 3 or not toks[2]: + raise self.ParseException('empty filter') + return self._expand_range(toks[:2])[::int(toks[2])] + + def _star_steps(self, toks): + if not toks or not toks[0]: + raise self.ParseException('empty filter') + return self._expand_star()[::int(toks[0])] + + def _expand_star(self, *args): + return range(self.min_, self.max_ + self.min_) + + def _expand_number(self, s): + if isinstance(s, basestring) and s[0] == '-': + raise self.ParseException('negative numbers not supported') + try: + i = int(s) + except ValueError: + try: + i = weekday(s) + except KeyError: + raise ValueError("Invalid weekday literal '%s'." % s) + + max_val = self.min_ + self.max_ - 1 + if i > max_val: + raise ValueError( + 'Invalid end range: %s > %s.' % (i, max_val)) + if i < self.min_: + raise ValueError( + 'Invalid beginning range: %s < %s.' % (i, self.min_)) + return i + + +class crontab(schedule): + """A crontab can be used as the `run_every` value of a + :class:`PeriodicTask` to add cron-like scheduling. + + Like a :manpage:`cron` job, you can specify units of time of when + you would like the task to execute. It is a reasonably complete + implementation of cron's features, so it should provide a fair + degree of scheduling needs. + + You can specify a minute, an hour, a day of the week, a day of the + month, and/or a month in the year in any of the following formats: + + .. attribute:: minute + + - A (list of) integers from 0-59 that represent the minutes of + an hour of when execution should occur; or + - A string representing a crontab pattern. This may get pretty + advanced, like `minute='*/15'` (for every quarter) or + `minute='1,13,30-45,50-59/2'`. + + .. attribute:: hour + + - A (list of) integers from 0-23 that represent the hours of + a day of when execution should occur; or + - A string representing a crontab pattern. This may get pretty + advanced, like `hour='*/3'` (for every three hours) or + `hour='0,8-17/2'` (at midnight, and every two hours during + office hours). + + .. attribute:: day_of_week + + - A (list of) integers from 0-6, where Sunday = 0 and Saturday = + 6, that represent the days of a week that execution should + occur. + - A string representing a crontab pattern. This may get pretty + advanced, like `day_of_week='mon-fri'` (for weekdays only). + (Beware that `day_of_week='*/2'` does not literally mean + 'every two days', but 'every day that is divisible by two'!) + + .. attribute:: day_of_month + + - A (list of) integers from 1-31 that represents the days of the + month that execution should occur. + - A string representing a crontab pattern. This may get pretty + advanced, such as `day_of_month='2-30/3'` (for every even + numbered day) or `day_of_month='1-7,15-21'` (for the first and + third weeks of the month). + + .. attribute:: month_of_year + + - A (list of) integers from 1-12 that represents the months of + the year during which execution can occur. + - A string representing a crontab pattern. This may get pretty + advanced, such as `month_of_year='*/3'` (for the first month + of every quarter) or `month_of_year='2-12/2'` (for every even + numbered month). + + It is important to realize that any day on which execution should + occur must be represented by entries in all three of the day and + month attributes. For example, if `day_of_week` is 0 and `day_of_month` + is every seventh day, only months that begin on Sunday and are also + in the `month_of_year` attribute will have execution events. Or, + `day_of_week` is 1 and `day_of_month` is '1-7,15-21' means every + first and third monday of every month present in `month_of_year`. + + """ + + @staticmethod + def _expand_cronspec(cronspec, max_, min_=0): + """Takes the given cronspec argument in one of the forms:: + + int (like 7) + basestring (like '3-5,*/15', '*', or 'monday') + set (like set([0,15,30,45])) + list (like [8-17]) + + And convert it to an (expanded) set representing all time unit + values on which the crontab triggers. Only in case of the base + type being 'basestring', parsing occurs. (It is fast and + happens only once for each crontab instance, so there is no + significant performance overhead involved.) + + For the other base types, merely Python type conversions happen. + + The argument `max_` is needed to determine the expansion of '*' + and ranges. + The argument `min_` is needed to determine the expansion of '*' + and ranges for 1-based cronspecs, such as day of month or month + of year. The default is sufficient for minute, hour, and day of + week. + + """ + if isinstance(cronspec, int): + result = set([cronspec]) + elif isinstance(cronspec, basestring): + result = crontab_parser(max_, min_).parse(cronspec) + elif isinstance(cronspec, set): + result = cronspec + elif is_iterable(cronspec): + result = set(cronspec) + else: + raise TypeError( + 'Argument cronspec needs to be of any of the ' + 'following types: int, basestring, or an iterable type. ' + "'%s' was given." % type(cronspec)) + + # assure the result does not preceed the min or exceed the max + for number in result: + if number >= max_ + min_ or number < min_: + raise ValueError( + 'Invalid crontab pattern. Valid ' + "range is %d-%d. '%d' was found." % ( + min_, max_ - 1 + min_, number)) + + return result + + def _delta_to_next(self, last_run_at, next_hour, next_minute): + """ + Takes a datetime of last run, next minute and hour, and + returns a relativedelta for the next scheduled day and time. + Only called when day_of_month and/or month_of_year cronspec + is specified to further limit scheduled task execution. + """ + from bisect import bisect, bisect_left + + datedata = AttributeDict(year=last_run_at.year) + days_of_month = sorted(self.day_of_month) + months_of_year = sorted(self.month_of_year) + + def day_out_of_range(year, month, day): + try: + datetime(year=year, month=month, day=day) + except ValueError: + return True + return False + + def roll_over(): + while 1: + flag = (datedata.dom == len(days_of_month) or + day_out_of_range(datedata.year, + months_of_year[datedata.moy], + days_of_month[datedata.dom])) + if flag: + datedata.dom = 0 + datedata.moy += 1 + if datedata.moy == len(months_of_year): + datedata.moy = 0 + datedata.year += 1 + else: + break + + if last_run_at.month in self.month_of_year: + datedata.dom = bisect(days_of_month, last_run_at.day) + datedata.moy = bisect_left(months_of_year, last_run_at.month) + else: + datedata.dom = 0 + datedata.moy = bisect(months_of_year, last_run_at.month) + if datedata.moy == len(months_of_year): + datedata.moy = 0 + roll_over() + + while 1: + th = datetime(year=datedata.year, + month=months_of_year[datedata.moy], + day=days_of_month[datedata.dom]) + if th.isoweekday() % 7 in self.day_of_week: + break + datedata.dom += 1 + roll_over() + + return relativedelta(year=datedata.year, + month=months_of_year[datedata.moy], + day=days_of_month[datedata.dom], + hour=next_hour, + minute=next_minute, + second=0, + microsecond=0) + + def __init__(self, minute='*', hour='*', day_of_week='*', + day_of_month='*', month_of_year='*', nowfun=None): + self._orig_minute = cronfield(minute) + self._orig_hour = cronfield(hour) + self._orig_day_of_week = cronfield(day_of_week) + self._orig_day_of_month = cronfield(day_of_month) + self._orig_month_of_year = cronfield(month_of_year) + self.hour = self._expand_cronspec(hour, 24) + self.minute = self._expand_cronspec(minute, 60) + self.day_of_week = self._expand_cronspec(day_of_week, 7) + self.day_of_month = self._expand_cronspec(day_of_month, 31, 1) + self.month_of_year = self._expand_cronspec(month_of_year, 12, 1) + self.nowfun = nowfun + + def now(self): + return (self.nowfun or self.app.now)() + + def __repr__(self): + return '' % ( + self._orig_minute, + self._orig_hour, + self._orig_day_of_week, + self._orig_day_of_month, + self._orig_month_of_year, + ) + + def __reduce__(self): + return (self.__class__, (self._orig_minute, + self._orig_hour, + self._orig_day_of_week, + self._orig_day_of_month, + self._orig_month_of_year), None) + + def remaining_estimate(self, last_run_at, tz=None): + """Returns when the periodic task should run next as a timedelta.""" + tz = tz or self.tz + last_run_at = self.maybe_make_aware(last_run_at) + dow_num = last_run_at.isoweekday() % 7 # Sunday is day 0, not day 7 + + execute_this_date = (last_run_at.month in self.month_of_year and + last_run_at.day in self.day_of_month and + dow_num in self.day_of_week) + + execute_this_hour = (execute_this_date and + last_run_at.hour in self.hour and + last_run_at.minute < max(self.minute)) + + if execute_this_hour: + next_minute = min(minute for minute in self.minute + if minute > last_run_at.minute) + delta = relativedelta(minute=next_minute, + second=0, + microsecond=0) + else: + next_minute = min(self.minute) + execute_today = (execute_this_date and + last_run_at.hour < max(self.hour)) + + if execute_today: + next_hour = min(hour for hour in self.hour + if hour > last_run_at.hour) + delta = relativedelta(hour=next_hour, + minute=next_minute, + second=0, + microsecond=0) + else: + next_hour = min(self.hour) + all_dom_moy = (self._orig_day_of_month == '*' and + self._orig_month_of_year == '*') + if all_dom_moy: + next_day = min([day for day in self.day_of_week + if day > dow_num] or self.day_of_week) + add_week = next_day == dow_num + + delta = relativedelta(weeks=add_week and 1 or 0, + weekday=(next_day - 1) % 7, + hour=next_hour, + minute=next_minute, + second=0, + microsecond=0) + else: + delta = self._delta_to_next(last_run_at, + next_hour, next_minute) + + now = self.maybe_make_aware(self.now()) + return remaining(self.to_local(last_run_at), delta, + self.to_local(now)) + + def is_due(self, last_run_at): + """Returns tuple of two items `(is_due, next_time_to_run)`, + where next time to run is in seconds. + + See :meth:`celery.schedules.schedule.is_due` for more information. + + """ + rem_delta = self.remaining_estimate(last_run_at) + rem = timedelta_seconds(rem_delta) + due = rem == 0 + if due: + rem_delta = self.remaining_estimate(self.now()) + rem = timedelta_seconds(rem_delta) + return due, rem + + def __eq__(self, other): + if isinstance(other, crontab): + return (other.month_of_year == self.month_of_year and + other.day_of_month == self.day_of_month and + other.day_of_week == self.day_of_week and + other.hour == self.hour and + other.minute == self.minute) + return other is self + + +def maybe_schedule(s, relative=False): + if isinstance(s, int): + s = timedelta(seconds=s) + if isinstance(s, timedelta): + return schedule(s, relative) + return s diff --git a/awx/lib/site-packages/celery/security/__init__.py b/awx/lib/site-packages/celery/security/__init__.py new file mode 100644 index 0000000000..cb2b4ca47d --- /dev/null +++ b/awx/lib/site-packages/celery/security/__init__.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +""" + celery.security + ~~~~~~~~~~~~~~~ + + Module implementing the signing message serializer. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +from kombu.serialization import registry + +from celery import current_app +from celery.exceptions import ImproperlyConfigured + +from .serialization import register_auth + +SSL_NOT_INSTALLED = """\ +You need to install the pyOpenSSL library to use the auth serializer. +Please install by: + + $ pip install pyOpenSSL +""" + +SETTING_MISSING = """\ +Sorry, but you have to configure the + * CELERY_SECURITY_KEY + * CELERY_SECURITY_CERTIFICATE, and the + * CELERY_SECURITY_CERT_STORE +configuration settings to use the auth serializer. + +Please see the configuration reference for more information. +""" + + +def disable_untrusted_serializers(whitelist=None): + for name in set(registry._decoders) - set(whitelist or []): + registry.disable(name) + + +def setup_security(allowed_serializers=None, key=None, cert=None, store=None, + digest='sha1', serializer='json'): + """Setup the message-signing serializer. + + Disables untrusted serializers and if configured to use the ``auth`` + serializer will register the auth serializer with the provided settings + into the Kombu serializer registry. + + :keyword allowed_serializers: List of serializer names, or content_types + that should be exempt from being disabled. + :keyword key: Name of private key file to use. + Defaults to the :setting:`CELERY_SECURITY_KEY` setting. + :keyword cert: Name of certificate file to use. + Defaults to the :setting:`CELERY_SECURITY_CERTIFICATE` setting. + :keyword store: Directory containing certificates. + Defaults to the :setting:`CELERY_SECURITY_CERT_STORE` setting. + :keyword digest: Digest algorithm used when signing messages. + Default is ``sha1``. + :keyword serializer: Serializer used to encode messages after + they have been signed. See :setting:`CELERY_TASK_SERIALIZER` for + the serializers supported. + Default is ``json``. + + """ + + disable_untrusted_serializers(allowed_serializers) + + conf = current_app.conf + if conf.CELERY_TASK_SERIALIZER != 'auth': + return + + try: + from OpenSSL import crypto # noqa + except ImportError: + raise ImproperlyConfigured(SSL_NOT_INSTALLED) + + key = key or conf.CELERY_SECURITY_KEY + cert = cert or conf.CELERY_SECURITY_CERTIFICATE + store = store or conf.CELERY_SECURITY_CERT_STORE + + if not (key and cert and store): + raise ImproperlyConfigured(SETTING_MISSING) + + with open(key) as kf: + with open(cert) as cf: + register_auth(kf.read(), cf.read(), store) diff --git a/awx/lib/site-packages/celery/security/certificate.py b/awx/lib/site-packages/celery/security/certificate.py new file mode 100644 index 0000000000..218f542961 --- /dev/null +++ b/awx/lib/site-packages/celery/security/certificate.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +""" + celery.security.certificate + ~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + X.509 certificates. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import glob +import os + +from celery.exceptions import SecurityError + +from .utils import crypto, reraise_errors + + +class Certificate(object): + """X.509 certificate.""" + + def __init__(self, cert): + assert crypto is not None + with reraise_errors('Invalid certificate: %r'): + self._cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert) + + def has_expired(self): + """Check if the certificate has expired.""" + return self._cert.has_expired() + + def get_serial_number(self): + """Returns the certificates serial number.""" + return self._cert.get_serial_number() + + def get_issuer(self): + """Returns issuer (CA) as a string""" + return ' '.join(x[1] for x in + self._cert.get_issuer().get_components()) + + def get_id(self): + """Serial number/issuer pair uniquely identifies a certificate""" + return '%s %s' % (self.get_issuer(), self.get_serial_number()) + + def verify(self, data, signature, digest): + """Verifies the signature for string containing data.""" + with reraise_errors('Bad signature: %r'): + crypto.verify(self._cert, signature, data, digest) + + +class CertStore(object): + """Base class for certificate stores""" + + def __init__(self): + self._certs = {} + + def itercerts(self): + """an iterator over the certificates""" + for c in self._certs.itervalues(): + yield c + + def __getitem__(self, id): + """get certificate by id""" + try: + return self._certs[id] + except KeyError: + raise SecurityError('Unknown certificate: %r' % (id, )) + + def add_cert(self, cert): + if cert.get_id() in self._certs: + raise SecurityError('Duplicate certificate: %r' % (id, )) + self._certs[cert.get_id()] = cert + + +class FSCertStore(CertStore): + """File system certificate store""" + + def __init__(self, path): + CertStore.__init__(self) + if os.path.isdir(path): + path = os.path.join(path, '*') + for p in glob.glob(path): + with open(p) as f: + cert = Certificate(f.read()) + if cert.has_expired(): + raise SecurityError( + 'Expired certificate: %r' % (cert.get_id(), )) + self.add_cert(cert) diff --git a/awx/lib/site-packages/celery/security/key.py b/awx/lib/site-packages/celery/security/key.py new file mode 100644 index 0000000000..528fab9e73 --- /dev/null +++ b/awx/lib/site-packages/celery/security/key.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +""" + celery.security.key + ~~~~~~~~~~~~~~~~~~~ + + Private key for the security serializer. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +from .utils import crypto, reraise_errors + + +class PrivateKey(object): + + def __init__(self, key): + with reraise_errors('Invalid private key: %r'): + self._key = crypto.load_privatekey(crypto.FILETYPE_PEM, key) + + def sign(self, data, digest): + """sign string containing data.""" + with reraise_errors('Unable to sign data: %r'): + return crypto.sign(self._key, data, digest) diff --git a/awx/lib/site-packages/celery/security/serialization.py b/awx/lib/site-packages/celery/security/serialization.py new file mode 100644 index 0000000000..4284f47480 --- /dev/null +++ b/awx/lib/site-packages/celery/security/serialization.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +""" + celery.security.serialization + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Secure serializer. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import base64 + +from kombu.serialization import registry, encode, decode +from kombu.utils.encoding import bytes_to_str, str_to_bytes + +from .certificate import Certificate, FSCertStore +from .key import PrivateKey +from .utils import reraise_errors + + +def b64encode(s): + return bytes_to_str(base64.b64encode(str_to_bytes(s))) + + +def b64decode(s): + return base64.b64decode(str_to_bytes(s)) + + +class SecureSerializer(object): + + def __init__(self, key=None, cert=None, cert_store=None, + digest='sha1', serializer='json'): + self._key = key + self._cert = cert + self._cert_store = cert_store + self._digest = digest + self._serializer = serializer + + def serialize(self, data): + """serialize data structure into string""" + assert self._key is not None + assert self._cert is not None + with reraise_errors('Unable to serialize: %r', (Exception, )): + content_type, content_encoding, body = encode( + data, serializer=self._serializer) + # What we sign is the serialized body, not the body itself. + # this way the receiver doesn't have to decode the contents + # to verify the signature (and thus avoiding potential flaws + # in the decoding step). + return self._pack(body, content_type, content_encoding, + signature=self._key.sign(body, self._digest), + signer=self._cert.get_id()) + + def deserialize(self, data): + """deserialize data structure from string""" + assert self._cert_store is not None + with reraise_errors('Unable to deserialize: %r', (Exception, )): + payload = self._unpack(data) + signature, signer, body = (payload['signature'], + payload['signer'], + payload['body']) + self._cert_store[signer].verify(body, signature, self._digest) + return decode(body, payload['content_type'], + payload['content_encoding'], force=True) + + def _pack(self, body, content_type, content_encoding, signer, signature, + sep='\x00\x01'): + return b64encode(sep.join([signer, signature, + content_type, content_encoding, body])) + + def _unpack(self, payload, sep='\x00\x01', + fields=('signer', 'signature', 'content_type', + 'content_encoding', 'body')): + return dict(zip(fields, b64decode(payload).split(sep))) + + +def register_auth(key=None, cert=None, store=None, digest='sha1', + serializer='json'): + """register security serializer""" + s = SecureSerializer(key and PrivateKey(key), + cert and Certificate(cert), + store and FSCertStore(store), + digest=digest, serializer=serializer) + registry.register('auth', s.serialize, s.deserialize, + content_type='application/data', + content_encoding='utf-8') diff --git a/awx/lib/site-packages/celery/security/utils.py b/awx/lib/site-packages/celery/security/utils.py new file mode 100644 index 0000000000..37b2286a78 --- /dev/null +++ b/awx/lib/site-packages/celery/security/utils.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" + celery.security.utils + ~~~~~~~~~~~~~~~~~~~~~ + + Utilities used by the message signing serializer. + +""" +from __future__ import absolute_import + +import sys + +from contextlib import contextmanager + +from celery.exceptions import SecurityError + +try: + from OpenSSL import crypto +except ImportError: # pragma: no cover + crypto = None # noqa + + +@contextmanager +def reraise_errors(msg='%r', errors=None): + assert crypto is not None + errors = (crypto.Error, ) if errors is None else errors + try: + yield + except errors, exc: + raise SecurityError, SecurityError(msg % (exc, )), sys.exc_info()[2] diff --git a/awx/lib/site-packages/celery/signals.py b/awx/lib/site-packages/celery/signals.py new file mode 100644 index 0000000000..3e34be0555 --- /dev/null +++ b/awx/lib/site-packages/celery/signals.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +""" + celery.signals + ~~~~~~~~~~~~~~ + + This module defines the signals (Observer pattern) sent by + both workers and clients. + + Functions can be connected to these signals, and connected + functions are called whenever a signal is called. + + See :ref:`signals` for more information. + +""" +from __future__ import absolute_import +from .utils.dispatch import Signal + +task_sent = Signal(providing_args=[ + 'task_id', 'task', 'args', 'kwargs', 'eta', 'taskset']) +task_prerun = Signal(providing_args=['task_id', 'task', 'args', 'kwargs']) +task_postrun = Signal(providing_args=[ + 'task_id', 'task', 'args', 'kwargs', 'retval']) +task_success = Signal(providing_args=['result']) +task_retry = Signal(providing_args=[ + 'request', 'reason', 'einfo', +]) +task_failure = Signal(providing_args=[ + 'task_id', 'exception', 'args', 'kwargs', 'traceback', 'einfo']) +task_revoked = Signal(providing_args=['terminated', 'signum', 'expired']) +celeryd_init = Signal(providing_args=['instance', 'conf']) +celeryd_after_setup = Signal(providing_args=['instance', 'conf']) +worker_init = Signal(providing_args=[]) +worker_process_init = Signal(providing_args=[]) +worker_ready = Signal(providing_args=[]) +worker_shutdown = Signal(providing_args=[]) +setup_logging = Signal(providing_args=[ + 'loglevel', 'logfile', 'format', 'colorize']) +after_setup_logger = Signal(providing_args=[ + 'logger', 'loglevel', 'logfile', 'format', 'colorize']) +after_setup_task_logger = Signal(providing_args=[ + 'logger', 'loglevel', 'logfile', 'format', 'colorize']) +beat_init = Signal(providing_args=[]) +beat_embedded_init = Signal(providing_args=[]) +eventlet_pool_started = Signal(providing_args=[]) +eventlet_pool_preshutdown = Signal(providing_args=[]) +eventlet_pool_postshutdown = Signal(providing_args=[]) +eventlet_pool_apply = Signal(providing_args=['target', 'args', 'kwargs']) diff --git a/awx/lib/site-packages/celery/states.py b/awx/lib/site-packages/celery/states.py new file mode 100644 index 0000000000..4563fb72f7 --- /dev/null +++ b/awx/lib/site-packages/celery/states.py @@ -0,0 +1,137 @@ +# -*- coding: utf-8 -*- +""" +celery.states +============= + +Built-in task states. + +.. _states: + +States +------ + +See :ref:`task-states`. + +.. _statesets: + +Sets +---- + +.. state:: READY_STATES + +READY_STATES +~~~~~~~~~~~~ + +Set of states meaning the task result is ready (has been executed). + +.. state:: UNREADY_STATES + +UNREADY_STATES +~~~~~~~~~~~~~~ + +Set of states meaning the task result is not ready (has not been executed). + +.. state:: EXCEPTION_STATES + +EXCEPTION_STATES +~~~~~~~~~~~~~~~~ + +Set of states meaning the task returned an exception. + +.. state:: PROPAGATE_STATES + +PROPAGATE_STATES +~~~~~~~~~~~~~~~~ + +Set of exception states that should propagate exceptions to the user. + +.. state:: ALL_STATES + +ALL_STATES +~~~~~~~~~~ + +Set of all possible states. + + +Misc. +----- + +""" +from __future__ import absolute_import + +#: State precedence. +#: None represents the precedence of an unknown state. +#: Lower index means higher precedence. +PRECEDENCE = ['SUCCESS', + 'FAILURE', + None, + 'REVOKED', + 'STARTED', + 'RECEIVED', + 'RETRY', + 'PENDING'] + + +def precedence(state): + """Get the precedence index for state. + + Lower index means higher precedence. + + """ + try: + return PRECEDENCE.index(state) + except ValueError: + return PRECEDENCE.index(None) + + +class state(str): + """State is a subclass of :class:`str`, implementing comparison + methods adhering to state precedence rules:: + + >>> from celery.states import state, PENDING, SUCCESS + + >>> state(PENDING) < state(SUCCESS) + True + + Any custom state is considered to be lower than :state:`FAILURE` and + :state:`SUCCESS`, but higher than any of the other built-in states:: + + >>> state('PROGRESS') > state(STARTED) + True + + >>> state('PROGRESS') > state('SUCCESS') + False + + """ + + def compare(self, other, fun): + return fun(precedence(self), precedence(other)) + + def __gt__(self, other): + return self.compare(other, lambda a, b: a < b) + + def __ge__(self, other): + return self.compare(other, lambda a, b: a <= b) + + def __lt__(self, other): + return self.compare(other, lambda a, b: a > b) + + def __le__(self, other): + return self.compare(other, lambda a, b: a >= b) + +PENDING = 'PENDING' +RECEIVED = 'RECEIVED' +STARTED = 'STARTED' +SUCCESS = 'SUCCESS' +FAILURE = 'FAILURE' +REVOKED = 'REVOKED' +RETRY = 'RETRY' +IGNORED = 'IGNORED' + +READY_STATES = frozenset([SUCCESS, FAILURE, REVOKED]) +UNREADY_STATES = frozenset([PENDING, RECEIVED, STARTED, RETRY]) +EXCEPTION_STATES = frozenset([RETRY, FAILURE, REVOKED]) +PROPAGATE_STATES = frozenset([FAILURE, REVOKED]) + +ALL_STATES = frozenset([PENDING, RECEIVED, STARTED, + SUCCESS, FAILURE, RETRY, REVOKED]) diff --git a/awx/lib/site-packages/celery/task/__init__.py b/awx/lib/site-packages/celery/task/__init__.py new file mode 100644 index 0000000000..d2a04118b8 --- /dev/null +++ b/awx/lib/site-packages/celery/task/__init__.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +""" + celery.task + ~~~~~~~~~~~ + + This is the old task module, it should not be used anymore, + import from the main 'celery' module instead. + If you're looking for the decorator implementation then that's in + ``celery.app.base.Celery.task``. + +""" +from __future__ import absolute_import + +from celery._state import current_app, current_task as current +from celery.__compat__ import MagicModule, recreate_module +from celery.local import Proxy + +__all__ = [ + 'BaseTask', 'Task', 'PeriodicTask', 'task', 'periodic_task', + 'group', 'chord', 'subtask', 'TaskSet', +] + + +STATICA_HACK = True +globals()['kcah_acitats'[::-1].upper()] = False +if STATICA_HACK: + # This is never executed, but tricks static analyzers (PyDev, PyCharm, + # pylint, etc.) into knowing the types of these symbols, and what + # they contain. + from celery.canvas import group, chord, subtask + from .base import BaseTask, Task, PeriodicTask, task, periodic_task + from .sets import TaskSet + + +class module(MagicModule): + + def __call__(self, *args, **kwargs): + return self.task(*args, **kwargs) + + +old_module, new_module = recreate_module( # pragma: no cover + __name__, + by_module={ + 'celery.task.base': ['BaseTask', 'Task', 'PeriodicTask', + 'task', 'periodic_task'], + 'celery.canvas': ['group', 'chord', 'subtask'], + 'celery.task.sets': ['TaskSet'], + }, + base=module, + __package__='celery.task', + __file__=__file__, + __path__=__path__, + __doc__=__doc__, + current=current, + discard_all=Proxy(lambda: current_app.control.purge), + backend_cleanup=Proxy( + lambda: current_app.tasks['celery.backend_cleanup'] + ), +) diff --git a/awx/lib/site-packages/celery/task/base.py b/awx/lib/site-packages/celery/task/base.py new file mode 100644 index 0000000000..85e6a39f1a --- /dev/null +++ b/awx/lib/site-packages/celery/task/base.py @@ -0,0 +1,228 @@ +# -*- coding: utf-8 -*- +""" + celery.task.base + ~~~~~~~~~~~~~~~~ + + The task implementation has been moved to :mod:`celery.app.task`. + + This contains the backward compatible Task class used in the old API, + and shouldn't be used in new applications. + +""" +from __future__ import absolute_import + +from kombu import Exchange + +from celery import current_app +from celery.__compat__ import class_property, reclassmethod +from celery.app.task import Context, TaskType, Task as BaseTask # noqa +from celery.schedules import maybe_schedule +from celery.utils.log import get_task_logger + +#: list of methods that must be classmethods in the old API. +_COMPAT_CLASSMETHODS = ( + 'delay', 'apply_async', 'retry', 'apply', 'subtask_from_request', + 'AsyncResult', 'subtask', '_get_request', +) + + +class Task(BaseTask): + """Deprecated Task base class. + + Modern applications should use :class:`celery.Task` instead. + + """ + abstract = True + __bound__ = False + __v2_compat__ = True + + #- Deprecated compat. attributes -: + + queue = None + routing_key = None + exchange = None + exchange_type = None + delivery_mode = None + mandatory = False # XXX deprecated + immediate = False # XXX deprecated + priority = None + type = 'regular' + error_whitelist = () + disable_error_emails = False + accept_magic_kwargs = False + + from_config = BaseTask.from_config + ( + ('exchange_type', 'CELERY_DEFAULT_EXCHANGE_TYPE'), + ('delivery_mode', 'CELERY_DEFAULT_DELIVERY_MODE'), + ('error_whitelist', 'CELERY_TASK_ERROR_WHITELIST'), + ) + + # In old Celery the @task decorator didn't exist, so one would create + # classes instead and use them directly (e.g. MyTask.apply_async()). + # the use of classmethods was a hack so that it was not necessary + # to instantiate the class before using it, but it has only + # given us pain (like all magic). + for name in _COMPAT_CLASSMETHODS: + locals()[name] = reclassmethod(getattr(BaseTask, name)) + + @class_property + @classmethod + def request(cls): + return cls._get_request() + + @classmethod + def get_logger(self, **kwargs): + return get_task_logger(self.name) + + @classmethod + def establish_connection(self, connect_timeout=None): + """Deprecated method used to get a broker connection. + + Should be replaced with :meth:`@Celery.connection` + instead, or by acquiring connections from the connection pool: + + .. code-block:: python + + # using the connection pool + with celery.pool.acquire(block=True) as conn: + ... + + # establish fresh connection + with celery.connection() as conn: + ... + """ + return self._get_app().connection( + connect_timeout=connect_timeout) + + def get_publisher(self, connection=None, exchange=None, + connect_timeout=None, exchange_type=None, **options): + """Deprecated method to get the task publisher (now called producer). + + Should be replaced with :class:`@amqp.TaskProducer`: + + .. code-block:: python + + with celery.connection() as conn: + with celery.amqp.TaskProducer(conn) as prod: + my_task.apply_async(producer=prod) + + """ + exchange = self.exchange if exchange is None else exchange + if exchange_type is None: + exchange_type = self.exchange_type + connection = connection or self.establish_connection(connect_timeout) + return self._get_app().amqp.TaskProducer( + connection, + exchange=exchange and Exchange(exchange, exchange_type), + routing_key=self.routing_key, **options + ) + + @classmethod + def get_consumer(self, connection=None, queues=None, **kwargs): + """Deprecated method used to get consumer for the queue + this task is sent to. + + Should be replaced with :class:`@amqp.TaskConsumer` instead: + + """ + Q = self._get_app().amqp + connection = connection or self.establish_connection() + if queues is None: + queues = Q.queues[self.queue] if self.queue else Q.default_queue + return Q.TaskConsumer(connection, queues, **kwargs) + + +class PeriodicTask(Task): + """A periodic task is a task that adds itself to the + :setting:`CELERYBEAT_SCHEDULE` setting.""" + abstract = True + ignore_result = True + relative = False + options = None + compat = True + + def __init__(self): + if not hasattr(self, 'run_every'): + raise NotImplementedError( + 'Periodic tasks must have a run_every attribute') + self.run_every = maybe_schedule(self.run_every, self.relative) + super(PeriodicTask, self).__init__() + + @classmethod + def on_bound(cls, app): + app.conf.CELERYBEAT_SCHEDULE[cls.name] = { + 'task': cls.name, + 'schedule': cls.run_every, + 'args': (), + 'kwargs': {}, + 'options': cls.options or {}, + 'relative': cls.relative, + } + + +def task(*args, **kwargs): + """Decorator to create a task class out of any callable. + + **Examples** + + .. code-block:: python + + @task() + def refresh_feed(url): + return Feed.objects.get(url=url).refresh() + + With setting extra options and using retry. + + .. code-block:: python + + @task(max_retries=10) + def refresh_feed(url): + try: + return Feed.objects.get(url=url).refresh() + except socket.error, exc: + refresh_feed.retry(exc=exc) + + Calling the resulting task: + + >>> refresh_feed('http://example.com/rss') # Regular + + >>> refresh_feed.delay('http://example.com/rss') # Async + + """ + return current_app.task(*args, **dict({'accept_magic_kwargs': False, + 'base': Task}, **kwargs)) + + +def periodic_task(*args, **options): + """Decorator to create a task class out of any callable. + + .. admonition:: Examples + + .. code-block:: python + + @task() + def refresh_feed(url): + return Feed.objects.get(url=url).refresh() + + With setting extra options and using retry. + + .. code-block:: python + + from celery.task import current + + @task(exchange='feeds') + def refresh_feed(url): + try: + return Feed.objects.get(url=url).refresh() + except socket.error, exc: + current.retry(exc=exc) + + Calling the resulting task: + + >>> refresh_feed('http://example.com/rss') # Regular + + >>> refresh_feed.delay('http://example.com/rss') # Async + + + """ + return task(**dict({'base': PeriodicTask}, **options)) diff --git a/awx/lib/site-packages/celery/task/http.py b/awx/lib/site-packages/celery/task/http.py new file mode 100644 index 0000000000..c9f776aff9 --- /dev/null +++ b/awx/lib/site-packages/celery/task/http.py @@ -0,0 +1,216 @@ +# -*- coding: utf-8 -*- +""" + celery.task.http + ~~~~~~~~~~~~~~~~ + + Webhook task implementation. + +""" +from __future__ import absolute_import + +import anyjson +import sys +import urllib2 + +from urllib import urlencode +from urlparse import urlparse +try: + from urlparse import parse_qsl +except ImportError: # pragma: no cover + from cgi import parse_qsl # noqa + +from celery import __version__ as celery_version +from celery.utils.log import get_task_logger +from .base import Task as BaseTask + +GET_METHODS = frozenset(['GET', 'HEAD']) +logger = get_task_logger(__name__) + + +class InvalidResponseError(Exception): + """The remote server gave an invalid response.""" + + +class RemoteExecuteError(Exception): + """The remote task gave a custom error.""" + + +class UnknownStatusError(InvalidResponseError): + """The remote server gave an unknown status.""" + + +def maybe_utf8(value): + """Encode to utf-8, only if the value is Unicode.""" + if isinstance(value, unicode): + return value.encode('utf-8') + return value + + +if sys.version_info[0] == 3: # pragma: no cover + + def utf8dict(tup): + if not isinstance(tup, dict): + return dict(tup) + return tup +else: + + def utf8dict(tup): # noqa + """With a dict's items() tuple return a new dict with any utf-8 + keys/values encoded.""" + return dict((key.encode('utf-8'), maybe_utf8(value)) + for key, value in tup) + + +def extract_response(raw_response, loads=anyjson.loads): + """Extract the response text from a raw JSON response.""" + if not raw_response: + raise InvalidResponseError('Empty response') + try: + payload = loads(raw_response) + except ValueError, exc: + raise InvalidResponseError, InvalidResponseError( + str(exc)), sys.exc_info()[2] + + status = payload['status'] + if status == 'success': + return payload['retval'] + elif status == 'failure': + raise RemoteExecuteError(payload.get('reason')) + else: + raise UnknownStatusError(str(status)) + + +class MutableURL(object): + """Object wrapping a Uniform Resource Locator. + + Supports editing the query parameter list. + You can convert the object back to a string, the query will be + properly urlencoded. + + Examples + + >>> url = URL('http://www.google.com:6580/foo/bar?x=3&y=4#foo') + >>> url.query + {'x': '3', 'y': '4'} + >>> str(url) + 'http://www.google.com:6580/foo/bar?y=4&x=3#foo' + >>> url.query['x'] = 10 + >>> url.query.update({'George': 'Costanza'}) + >>> str(url) + 'http://www.google.com:6580/foo/bar?y=4&x=10&George=Costanza#foo' + + """ + def __init__(self, url): + self.parts = urlparse(url) + self.query = dict(parse_qsl(self.parts[4])) + + def __str__(self): + scheme, netloc, path, params, query, fragment = self.parts + query = urlencode(utf8dict(self.query.items())) + components = [scheme + '://', netloc, path or '/', + ';%s' % params if params else '', + '?%s' % query if query else '', + '#%s' % fragment if fragment else ''] + return ''.join(c for c in components if c) + + def __repr__(self): + return '<%s: %s>' % (self.__class__.__name__, str(self)) + + +class HttpDispatch(object): + """Make task HTTP request and collect the task result. + + :param url: The URL to request. + :param method: HTTP method used. Currently supported methods are `GET` + and `POST`. + :param task_kwargs: Task keyword arguments. + :param logger: Logger used for user/system feedback. + + """ + user_agent = 'celery/%s' % celery_version + timeout = 5 + + def __init__(self, url, method, task_kwargs, **kwargs): + self.url = url + self.method = method + self.task_kwargs = task_kwargs + self.logger = kwargs.get("logger") or logger + + def make_request(self, url, method, params): + """Makes an HTTP request and returns the response.""" + request = urllib2.Request(url, params) + for key, val in self.http_headers.items(): + request.add_header(key, val) + response = urllib2.urlopen(request) # user catches errors. + return response.read() + + def dispatch(self): + """Dispatch callback and return result.""" + url = MutableURL(self.url) + params = None + if self.method in GET_METHODS: + url.query.update(self.task_kwargs) + else: + params = urlencode(utf8dict(self.task_kwargs.items())) + raw_response = self.make_request(str(url), self.method, params) + return extract_response(raw_response) + + @property + def http_headers(self): + headers = {'User-Agent': self.user_agent} + return headers + + +class HttpDispatchTask(BaseTask): + """Task dispatching to an URL. + + :keyword url: The URL location of the HTTP callback task. + :keyword method: Method to use when dispatching the callback. Usually + `GET` or `POST`. + :keyword \*\*kwargs: Keyword arguments to pass on to the HTTP callback. + + .. attribute:: url + + If this is set, this is used as the default URL for requests. + Default is to require the user of the task to supply the url as an + argument, as this attribute is intended for subclasses. + + .. attribute:: method + + If this is set, this is the default method used for requests. + Default is to require the user of the task to supply the method as an + argument, as this attribute is intended for subclasses. + + """ + + url = None + method = None + accept_magic_kwargs = False + + def run(self, url=None, method='GET', **kwargs): + url = url or self.url + method = method or self.method + return HttpDispatch(url, method, kwargs).dispatch() + + +class URL(MutableURL): + """HTTP Callback URL + + Supports requesting an URL asynchronously. + + :param url: URL to request. + :keyword dispatcher: Class used to dispatch the request. + By default this is :class:`HttpDispatchTask`. + + """ + dispatcher = HttpDispatchTask + + def __init__(self, url, dispatcher=None): + super(URL, self).__init__(url) + self.dispatcher = dispatcher or self.dispatcher + + def get_async(self, **kwargs): + return self.dispatcher.delay(str(self), 'GET', **kwargs) + + def post_async(self, **kwargs): + return self.dispatcher.delay(str(self), 'POST', **kwargs) diff --git a/awx/lib/site-packages/celery/task/sets.py b/awx/lib/site-packages/celery/task/sets.py new file mode 100644 index 0000000000..33630f466e --- /dev/null +++ b/awx/lib/site-packages/celery/task/sets.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +""" + celery.task.sets + ~~~~~~~~~~~~~~~~ + + Old ``group`` implementation, this module should + not be used anymore use :func:`celery.group` instead. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +from celery._state import get_current_worker_task +from celery.app import app_or_default +from celery.canvas import subtask, maybe_subtask # noqa +from celery.utils import uuid + + +class TaskSet(list): + """A task containing several subtasks, making it possible + to track how many, or when all of the tasks have been completed. + + :param tasks: A list of :class:`subtask` instances. + + Example:: + + >>> urls = ('http://cnn.com/rss', 'http://bbc.co.uk/rss') + >>> s = TaskSet(refresh_feed.s(url) for url in urls) + >>> taskset_result = s.apply_async() + >>> list_of_return_values = taskset_result.join() # *expensive* + + """ + app = None + + def __init__(self, tasks=None, app=None, Publisher=None): + super(TaskSet, self).__init__(maybe_subtask(t) for t in tasks or []) + self.app = app_or_default(app or self.app) + self.Publisher = Publisher or self.app.amqp.TaskProducer + self.total = len(self) # XXX compat + + def apply_async(self, connection=None, connect_timeout=None, + publisher=None, taskset_id=None): + """Apply TaskSet.""" + app = self.app + + if app.conf.CELERY_ALWAYS_EAGER: + return self.apply(taskset_id=taskset_id) + + with app.connection_or_acquire(connection, connect_timeout) as conn: + setid = taskset_id or uuid() + pub = publisher or self.Publisher(conn) + results = self._async_results(setid, pub) + + result = app.TaskSetResult(setid, results) + parent = get_current_worker_task() + if parent: + parent.request.children.append(result) + return result + + def _async_results(self, taskset_id, publisher): + return [task.apply_async(taskset_id=taskset_id, publisher=publisher) + for task in self] + + def apply(self, taskset_id=None): + """Applies the TaskSet locally by blocking until all tasks return.""" + setid = taskset_id or uuid() + return self.app.TaskSetResult(setid, self._sync_results(setid)) + + def _sync_results(self, taskset_id): + return [task.apply(taskset_id=taskset_id) for task in self] + + def _get_tasks(self): + return self + + def _set_tasks(self, tasks): + self[:] = tasks + tasks = property(_get_tasks, _set_tasks) diff --git a/awx/lib/site-packages/celery/task/trace.py b/awx/lib/site-packages/celery/task/trace.py new file mode 100644 index 0000000000..ab1a4d3e08 --- /dev/null +++ b/awx/lib/site-packages/celery/task/trace.py @@ -0,0 +1,418 @@ +# -*- coding: utf-8 -*- +""" + celery.task.trace + ~~~~~~~~~~~~~~~~~~~~ + + This module defines how the task execution is traced: + errors are recorded, handlers are applied and so on. + +""" +from __future__ import absolute_import + +# ## --- +# This is the heart of the worker, the inner loop so to speak. +# It used to be split up into nice little classes and methods, +# but in the end it only resulted in bad performance and horrible tracebacks, +# so instead we now use one closure per task class. + +import os +import socket +import sys + +from warnings import warn + +from kombu.utils import kwdict + +from celery import current_app +from celery import states, signals +from celery._state import _task_stack +from celery.app import set_default_app +from celery.app.task import Task as BaseTask, Context +from celery.datastructures import ExceptionInfo +from celery.exceptions import Ignore, RetryTaskError +from celery.utils.serialization import get_pickleable_exception +from celery.utils.log import get_logger + +_logger = get_logger(__name__) + +send_prerun = signals.task_prerun.send +prerun_receivers = signals.task_prerun.receivers +send_postrun = signals.task_postrun.send +postrun_receivers = signals.task_postrun.receivers +send_success = signals.task_success.send +success_receivers = signals.task_success.receivers +STARTED = states.STARTED +SUCCESS = states.SUCCESS +IGNORED = states.IGNORED +RETRY = states.RETRY +FAILURE = states.FAILURE +EXCEPTION_STATES = states.EXCEPTION_STATES +IGNORE_STATES = frozenset([IGNORED, RETRY]) + +#: set by :func:`setup_worker_optimizations` +_tasks = None +_patched = {} + + +def mro_lookup(cls, attr, stop=(), monkey_patched=[]): + """Returns the first node by MRO order that defines an attribute. + + :keyword stop: A list of types that if reached will stop the search. + :keyword monkey_patched: Use one of the stop classes if the attr's + module origin is not in this list, this to detect monkey patched + attributes. + + :returns None: if the attribute was not found. + + """ + for node in cls.mro(): + if node in stop: + try: + attr = node.__dict__[attr] + module_origin = attr.__module__ + except (AttributeError, KeyError): + pass + else: + if module_origin not in monkey_patched: + return node + return + if attr in node.__dict__: + return node + + +def task_has_custom(task, attr): + """Returns true if the task or one of its bases + defines ``attr`` (excluding the one in BaseTask).""" + return mro_lookup(task.__class__, attr, stop=(BaseTask, object), + monkey_patched=['celery.app.task']) + + +class TraceInfo(object): + __slots__ = ('state', 'retval') + + def __init__(self, state, retval=None): + self.state = state + self.retval = retval + + def handle_error_state(self, task, eager=False): + store_errors = not eager + if task.ignore_result: + store_errors = task.store_errors_even_if_ignored + + return { + RETRY: self.handle_retry, + FAILURE: self.handle_failure, + }[self.state](task, store_errors=store_errors) + + def handle_retry(self, task, store_errors=True): + """Handle retry exception.""" + # the exception raised is the RetryTaskError semi-predicate, + # and it's exc' attribute is the original exception raised (if any). + req = task.request + type_, _, tb = sys.exc_info() + try: + reason = self.retval + einfo = ExceptionInfo((type_, reason, tb)) + if store_errors: + task.backend.mark_as_retry(req.id, reason.exc, einfo.traceback) + task.on_retry(reason.exc, req.id, req.args, req.kwargs, einfo) + signals.task_retry.send(sender=task, request=req, + reason=reason, einfo=einfo) + return einfo + finally: + del(tb) + + def handle_failure(self, task, store_errors=True): + """Handle exception.""" + req = task.request + type_, _, tb = sys.exc_info() + try: + exc = self.retval + einfo = ExceptionInfo((type_, get_pickleable_exception(exc), tb)) + if store_errors: + task.backend.mark_as_failure(req.id, exc, einfo.traceback) + task.on_failure(exc, req.id, req.args, req.kwargs, einfo) + signals.task_failure.send(sender=task, task_id=req.id, + exception=exc, args=req.args, + kwargs=req.kwargs, + traceback=tb, + einfo=einfo) + return einfo + finally: + del(tb) + + +def build_tracer(name, task, loader=None, hostname=None, store_errors=True, + Info=TraceInfo, eager=False, propagate=False, + IGNORE_STATES=IGNORE_STATES): + """Builts a function that tracing the tasks execution; catches all + exceptions, and saves the state and result of the task execution + to the result backend. + + If the call was successful, it saves the result to the task result + backend, and sets the task status to `"SUCCESS"`. + + If the call raises :exc:`~celery.exceptions.RetryTaskError`, it extracts + the original exception, uses that as the result and sets the task status + to `"RETRY"`. + + If the call results in an exception, it saves the exception as the task + result, and sets the task status to `"FAILURE"`. + + Returns a function that takes the following arguments: + + :param uuid: The unique id of the task. + :param args: List of positional args to pass on to the function. + :param kwargs: Keyword arguments mapping to pass on to the function. + :keyword request: Request dict. + + """ + # If the task doesn't define a custom __call__ method + # we optimize it away by simply calling the run method directly, + # saving the extra method call and a line less in the stack trace. + fun = task if task_has_custom(task, '__call__') else task.run + + loader = loader or current_app.loader + backend = task.backend + ignore_result = task.ignore_result + track_started = task.track_started + track_started = not eager and (task.track_started and not ignore_result) + publish_result = not eager and not ignore_result + hostname = hostname or socket.gethostname() + + loader_task_init = loader.on_task_init + loader_cleanup = loader.on_process_cleanup + + task_on_success = None + task_after_return = None + if task_has_custom(task, 'on_success'): + task_on_success = task.on_success + if task_has_custom(task, 'after_return'): + task_after_return = task.after_return + + store_result = backend.store_result + backend_cleanup = backend.process_cleanup + + pid = os.getpid() + + request_stack = task.request_stack + push_request = request_stack.push + pop_request = request_stack.pop + push_task = _task_stack.push + pop_task = _task_stack.pop + on_chord_part_return = backend.on_chord_part_return + + from celery import canvas + subtask = canvas.subtask + + def trace_task(uuid, args, kwargs, request=None): + R = I = None + kwargs = kwdict(kwargs) + try: + push_task(task) + task_request = Context(request or {}, args=args, + called_directly=False, kwargs=kwargs) + push_request(task_request) + try: + # -*- PRE -*- + if prerun_receivers: + send_prerun(sender=task, task_id=uuid, task=task, + args=args, kwargs=kwargs) + loader_task_init(uuid, task) + if track_started: + store_result(uuid, {'pid': pid, + 'hostname': hostname}, STARTED) + + # -*- TRACE -*- + try: + R = retval = fun(*args, **kwargs) + state = SUCCESS + except Ignore, exc: + I, R = Info(IGNORED, exc), ExceptionInfo(internal=True) + state, retval = I.state, I.retval + except RetryTaskError, exc: + I = Info(RETRY, exc) + state, retval = I.state, I.retval + R = I.handle_error_state(task, eager=eager) + except Exception, exc: + if propagate: + raise + I = Info(FAILURE, exc) + state, retval = I.state, I.retval + R = I.handle_error_state(task, eager=eager) + [subtask(errback).apply_async((uuid, )) + for errback in task_request.errbacks or []] + except BaseException, exc: + raise + except: # pragma: no cover + # For Python2.5 where raising strings are still allowed + # (but deprecated) + if propagate: + raise + I = Info(FAILURE, None) + state, retval = I.state, I.retval + R = I.handle_error_state(task, eager=eager) + [subtask(errback).apply_async((uuid, )) + for errback in task_request.errbacks or []] + else: + # callback tasks must be applied before the result is + # stored, so that result.children is populated. + [subtask(callback).apply_async((retval, )) + for callback in task_request.callbacks or []] + if publish_result: + store_result(uuid, retval, SUCCESS) + if task_on_success: + task_on_success(retval, uuid, args, kwargs) + if success_receivers: + send_success(sender=task, result=retval) + + # -* POST *- + if state not in IGNORE_STATES: + if task_request.chord: + on_chord_part_return(task) + if task_after_return: + task_after_return( + state, retval, uuid, args, kwargs, None, + ) + if postrun_receivers: + send_postrun(sender=task, task_id=uuid, task=task, + args=args, kwargs=kwargs, + retval=retval, state=state) + finally: + pop_task() + pop_request() + if not eager: + try: + backend_cleanup() + loader_cleanup() + except (KeyboardInterrupt, SystemExit, MemoryError): + raise + except Exception, exc: + _logger.error('Process cleanup failed: %r', exc, + exc_info=True) + except Exception, exc: + if eager: + raise + R = report_internal_error(task, exc) + return R, I + + return trace_task + + +def trace_task(task, uuid, args, kwargs, request={}, **opts): + try: + if task.__trace__ is None: + task.__trace__ = build_tracer(task.name, task, **opts) + return task.__trace__(uuid, args, kwargs, request)[0] + except Exception, exc: + return report_internal_error(task, exc) + + +def _trace_task_ret(name, uuid, args, kwargs, request={}, **opts): + return trace_task(current_app.tasks[name], + uuid, args, kwargs, request, **opts) +trace_task_ret = _trace_task_ret + + +def _fast_trace_task(task, uuid, args, kwargs, request={}): + # setup_worker_optimizations will point trace_task_ret to here, + # so this is the function used in the worker. + return _tasks[task].__trace__(uuid, args, kwargs, request)[0] + + +def eager_trace_task(task, uuid, args, kwargs, request=None, **opts): + opts.setdefault('eager', True) + return build_tracer(task.name, task, **opts)( + uuid, args, kwargs, request) + + +def report_internal_error(task, exc): + _type, _value, _tb = sys.exc_info() + try: + _value = task.backend.prepare_exception(exc) + exc_info = ExceptionInfo((_type, _value, _tb), internal=True) + warn(RuntimeWarning( + 'Exception raised outside body: %r:\n%s' % ( + exc, exc_info.traceback))) + return exc_info + finally: + del(_tb) + + +def setup_worker_optimizations(app): + global _tasks + global trace_task_ret + + # make sure custom Task.__call__ methods that calls super + # will not mess up the request/task stack. + _install_stack_protection() + + # all new threads start without a current app, so if an app is not + # passed on to the thread it will fall back to the "default app", + # which then could be the wrong app. So for the worker + # we set this to always return our app. This is a hack, + # and means that only a single app can be used for workers + # running in the same process. + app.set_current() + set_default_app(app) + + # evaluate all task classes by finalizing the app. + app.finalize() + + # set fast shortcut to task registry + _tasks = app._tasks + + trace_task_ret = _fast_trace_task + try: + job = sys.modules['celery.worker.job'] + except KeyError: + pass + else: + job.trace_task_ret = _fast_trace_task + job.__optimize__() + + +def reset_worker_optimizations(): + global trace_task_ret + trace_task_ret = _trace_task_ret + try: + delattr(BaseTask, '_stackprotected') + except AttributeError: + pass + try: + BaseTask.__call__ = _patched.pop('BaseTask.__call__') + except KeyError: + pass + try: + sys.modules['celery.worker.job'].trace_task_ret = _trace_task_ret + except KeyError: + pass + + +def _install_stack_protection(): + # Patches BaseTask.__call__ in the worker to handle the edge case + # where people override it and also call super. + # + # - The worker optimizes away BaseTask.__call__ and instead + # calls task.run directly. + # - so with the addition of current_task and the request stack + # BaseTask.__call__ now pushes to those stacks so that + # they work when tasks are called directly. + # + # The worker only optimizes away __call__ in the case + # where it has not been overridden, so the request/task stack + # will blow if a custom task class defines __call__ and also + # calls super(). + if not getattr(BaseTask, '_stackprotected', False): + _patched['BaseTask.__call__'] = orig = BaseTask.__call__ + + def __protected_call__(self, *args, **kwargs): + stack = self.request_stack + req = stack.top + if req and not req._protected and \ + len(stack) == 1 and not req.called_directly: + req._protected = 1 + return self.run(*args, **kwargs) + return orig(self, *args, **kwargs) + BaseTask.__call__ = __protected_call__ + BaseTask._stackprotected = True diff --git a/awx/lib/site-packages/celery/tests/__init__.py b/awx/lib/site-packages/celery/tests/__init__.py new file mode 100644 index 0000000000..0284069f0e --- /dev/null +++ b/awx/lib/site-packages/celery/tests/__init__.py @@ -0,0 +1,88 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import logging +import os +import sys +import warnings + +from importlib import import_module + +try: + WindowsError = WindowsError # noqa +except NameError: + + class WindowsError(Exception): + pass + +config_module = os.environ.setdefault( + 'CELERY_TEST_CONFIG_MODULE', 'celery.tests.config', +) + +os.environ.setdefault('CELERY_CONFIG_MODULE', config_module) +os.environ['CELERY_LOADER'] = 'default' +os.environ['EVENTLET_NOPATCH'] = 'yes' +os.environ['GEVENT_NOPATCH'] = 'yes' +os.environ['KOMBU_DISABLE_LIMIT_PROTECTION'] = 'yes' +os.environ['CELERY_BROKER_URL'] = 'memory://' + + +def setup(): + if os.environ.get('COVER_ALL_MODULES') or '--with-coverage3' in sys.argv: + from celery.tests.utils import catch_warnings + with catch_warnings(record=True): + import_all_modules() + warnings.resetwarnings() + + +def teardown(): + # Don't want SUBDEBUG log messages at finalization. + try: + from multiprocessing.util import get_logger + except ImportError: + pass + else: + get_logger().setLevel(logging.WARNING) + + # Make sure test database is removed. + import os + if os.path.exists('test.db'): + try: + os.remove('test.db') + except WindowsError: + pass + + # Make sure there are no remaining threads at shutdown. + import threading + remaining_threads = [thread for thread in threading.enumerate() + if thread.getName() != 'MainThread'] + if remaining_threads: + sys.stderr.write( + '\n\n**WARNING**: Remaining threads at teardown: %r...\n' % ( + remaining_threads)) + + +def find_distribution_modules(name=__name__, file=__file__): + current_dist_depth = len(name.split('.')) - 1 + current_dist = os.path.join(os.path.dirname(file), + *([os.pardir] * current_dist_depth)) + abs = os.path.abspath(current_dist) + dist_name = os.path.basename(abs) + + for dirpath, dirnames, filenames in os.walk(abs): + package = (dist_name + dirpath[len(abs):]).replace('/', '.') + if '__init__.py' in filenames: + yield package + for filename in filenames: + if filename.endswith('.py') and filename != '__init__.py': + yield '.'.join([package, filename])[:-3] + + +def import_all_modules(name=__name__, file=__file__, + skip=['celery.decorators', 'celery.contrib.batches']): + for module in find_distribution_modules(name, file): + if module not in skip: + try: + import_module(module) + except ImportError: + pass diff --git a/awx/lib/site-packages/celery/tests/app/__init__.py b/awx/lib/site-packages/celery/tests/app/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/celery/tests/app/test_amqp.py b/awx/lib/site-packages/celery/tests/app/test_amqp.py new file mode 100644 index 0000000000..06aec7892c --- /dev/null +++ b/awx/lib/site-packages/celery/tests/app/test_amqp.py @@ -0,0 +1,136 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from kombu import Exchange, Queue +from mock import Mock + +from celery.app.amqp import Queues, TaskPublisher +from celery.tests.utils import AppCase + + +class test_TaskProducer(AppCase): + + def test__exit__(self): + publisher = self.app.amqp.TaskProducer(self.app.connection()) + publisher.release = Mock() + with publisher: + pass + publisher.release.assert_called_with() + + def test_declare(self): + publisher = self.app.amqp.TaskProducer(self.app.connection()) + publisher.exchange.name = 'foo' + publisher.declare() + publisher.exchange.name = None + publisher.declare() + + def test_retry_policy(self): + prod = self.app.amqp.TaskProducer(Mock()) + prod.channel.connection.client.declared_entities = set() + prod.publish_task('tasks.add', (2, 2), {}, + retry_policy={'frobulate': 32.4}) + + def test_publish_no_retry(self): + prod = self.app.amqp.TaskProducer(Mock()) + prod.channel.connection.client.declared_entities = set() + prod.publish_task('tasks.add', (2, 2), {}, retry=False, chord=123) + self.assertFalse(prod.connection.ensure.call_count) + + +class test_compat_TaskPublisher(AppCase): + + def test_compat_exchange_is_string(self): + producer = TaskPublisher(exchange='foo', app=self.app) + self.assertIsInstance(producer.exchange, Exchange) + self.assertEqual(producer.exchange.name, 'foo') + self.assertEqual(producer.exchange.type, 'direct') + producer = TaskPublisher(exchange='foo', exchange_type='topic', + app=self.app) + self.assertEqual(producer.exchange.type, 'topic') + + def test_compat_exchange_is_Exchange(self): + producer = TaskPublisher(exchange=Exchange('foo')) + self.assertEqual(producer.exchange.name, 'foo') + + +class test_PublisherPool(AppCase): + + def test_setup_nolimit(self): + L = self.app.conf.BROKER_POOL_LIMIT + self.app.conf.BROKER_POOL_LIMIT = None + try: + delattr(self.app, '_pool') + except AttributeError: + pass + self.app.amqp._producer_pool = None + try: + pool = self.app.amqp.producer_pool + self.assertEqual(pool.limit, self.app.pool.limit) + self.assertFalse(pool._resource.queue) + + r1 = pool.acquire() + r2 = pool.acquire() + r1.release() + r2.release() + r1 = pool.acquire() + r2 = pool.acquire() + finally: + self.app.conf.BROKER_POOL_LIMIT = L + + def test_setup(self): + L = self.app.conf.BROKER_POOL_LIMIT + self.app.conf.BROKER_POOL_LIMIT = 2 + try: + delattr(self.app, '_pool') + except AttributeError: + pass + self.app.amqp._producer_pool = None + try: + pool = self.app.amqp.producer_pool + self.assertEqual(pool.limit, self.app.pool.limit) + self.assertTrue(pool._resource.queue) + + p1 = r1 = pool.acquire() + p2 = r2 = pool.acquire() + r1.release() + r2.release() + r1 = pool.acquire() + r2 = pool.acquire() + self.assertIs(p2, r1) + self.assertIs(p1, r2) + r1.release() + r2.release() + finally: + self.app.conf.BROKER_POOL_LIMIT = L + + +class test_Queues(AppCase): + + def test_queues_format(self): + prev, self.app.amqp.queues._consume_from = ( + self.app.amqp.queues._consume_from, {}) + try: + self.assertEqual(self.app.amqp.queues.format(), '') + finally: + self.app.amqp.queues._consume_from = prev + + def test_with_defaults(self): + self.assertEqual(Queues(None), {}) + + def test_add(self): + q = Queues() + q.add('foo', exchange='ex', routing_key='rk') + self.assertIn('foo', q) + self.assertIsInstance(q['foo'], Queue) + self.assertEqual(q['foo'].routing_key, 'rk') + + def test_add_default_exchange(self): + ex = Exchange('fff', 'fanout') + q = Queues(default_exchange=ex) + q.add(Queue('foo')) + self.assertEqual(q['foo'].exchange, ex) + + def test_alias(self): + q = Queues() + q.add(Queue('foo', alias='barfoo')) + self.assertIs(q['barfoo'], q['foo']) diff --git a/awx/lib/site-packages/celery/tests/app/test_annotations.py b/awx/lib/site-packages/celery/tests/app/test_annotations.py new file mode 100644 index 0000000000..10e49bef94 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/app/test_annotations.py @@ -0,0 +1,53 @@ +from __future__ import absolute_import + +from celery.app.annotations import MapAnnotation, prepare +from celery.task import task +from celery.utils.imports import qualname + +from celery.tests.utils import Case + + +@task() +def add(x, y): + return x + y + + +@task() +def mul(x, y): + return x * y + + +class MyAnnotation(object): + foo = 65 + + +class test_MapAnnotation(Case): + + def test_annotate(self): + x = MapAnnotation({add.name: {'foo': 1}}) + self.assertDictEqual(x.annotate(add), {'foo': 1}) + self.assertIsNone(x.annotate(mul)) + + def test_annotate_any(self): + x = MapAnnotation({'*': {'foo': 2}}) + self.assertDictEqual(x.annotate_any(), {'foo': 2}) + + x = MapAnnotation() + self.assertIsNone(x.annotate_any()) + + +class test_prepare(Case): + + def test_dict_to_MapAnnotation(self): + x = prepare({add.name: {'foo': 3}}) + self.assertIsInstance(x[0], MapAnnotation) + + def test_returns_list(self): + self.assertListEqual(prepare(1), [1]) + self.assertListEqual(prepare([1]), [1]) + self.assertListEqual(prepare((1, )), [1]) + self.assertEqual(prepare(None), ()) + + def test_evalutes_qualnames(self): + self.assertEqual(prepare(qualname(MyAnnotation))[0]().foo, 65) + self.assertEqual(prepare([qualname(MyAnnotation)])[0]().foo, 65) diff --git a/awx/lib/site-packages/celery/tests/app/test_app.py b/awx/lib/site-packages/celery/tests/app/test_app.py new file mode 100644 index 0000000000..5e1a15586d --- /dev/null +++ b/awx/lib/site-packages/celery/tests/app/test_app.py @@ -0,0 +1,519 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import os + +from mock import Mock, patch +from pickle import loads, dumps + +from kombu import Exchange + +from celery import Celery +from celery import app as _app +from celery import _state +from celery.app import defaults +from celery.loaders.base import BaseLoader +from celery.platforms import pyimplementation +from celery.utils.serialization import pickle + +from celery.tests import config +from celery.tests.utils import (Case, mask_modules, platform_pyimp, + sys_platform, pypy_version) +from celery.utils import uuid +from celery.utils.mail import ErrorMail + +THIS_IS_A_KEY = 'this is a value' + + +class Object(object): + + def __init__(self, **kwargs): + for key, value in kwargs.items(): + setattr(self, key, value) + + +def _get_test_config(): + return dict((key, getattr(config, key)) + for key in dir(config) + if key.isupper() and not key.startswith('_')) + +test_config = _get_test_config() + + +class test_module(Case): + + def test_default_app(self): + self.assertEqual(_app.default_app, _state.default_app) + + def test_bugreport(self): + self.assertTrue(_app.bugreport()) + + +class test_App(Case): + + def setUp(self): + self.app = Celery(set_as_current=False) + self.app.conf.update(test_config) + + def test_task(self): + app = Celery('foozibari', set_as_current=False) + + def fun(): + pass + + fun.__module__ = '__main__' + task = app.task(fun) + self.assertEqual(task.name, app.main + '.fun') + + def test_with_broker(self): + prev = os.environ.get('CELERY_BROKER_URL') + os.environ.pop('CELERY_BROKER_URL', None) + try: + app = Celery(set_as_current=False, broker='foo://baribaz') + self.assertEqual(app.conf.BROKER_HOST, 'foo://baribaz') + finally: + os.environ['CELERY_BROKER_URL'] = prev + + def test_repr(self): + self.assertTrue(repr(self.app)) + + def test_custom_task_registry(self): + app1 = Celery(set_as_current=False) + app2 = Celery(set_as_current=False, tasks=app1.tasks) + self.assertIs(app2.tasks, app1.tasks) + + def test_include_argument(self): + app = Celery(set_as_current=False, include=('foo', 'bar.foo')) + self.assertEqual(app.conf.CELERY_IMPORTS, ('foo', 'bar.foo')) + + def test_set_as_current(self): + current = _state._tls.current_app + try: + app = Celery(set_as_current=True) + self.assertIs(_state._tls.current_app, app) + finally: + _state._tls.current_app = current + + def test_current_task(self): + app = Celery(set_as_current=False) + + @app.task + def foo(): + pass + + _state._task_stack.push(foo) + try: + self.assertEqual(app.current_task.name, foo.name) + finally: + _state._task_stack.pop() + + def test_task_not_shared(self): + with patch('celery.app.base.shared_task') as shared_task: + app = Celery(set_as_current=False) + + @app.task(shared=False) + def foo(): + pass + self.assertFalse(shared_task.called) + + def test_task_compat_with_filter(self): + app = Celery(set_as_current=False, accept_magic_kwargs=True) + check = Mock() + + def filter(task): + check(task) + return task + + @app.task(filter=filter) + def foo(): + pass + check.assert_called_with(foo) + + def test_task_with_filter(self): + app = Celery(set_as_current=False, accept_magic_kwargs=False) + check = Mock() + + def filter(task): + check(task) + return task + + @app.task(filter=filter) + def foo(): + pass + check.assert_called_with(foo) + + def test_task_sets_main_name_MP_MAIN_FILE(self): + from celery import utils as _utils + _utils.MP_MAIN_FILE = __file__ + try: + app = Celery('xuzzy', set_as_current=False) + + @app.task + def foo(): + pass + + self.assertEqual(foo.name, 'xuzzy.foo') + finally: + _utils.MP_MAIN_FILE = None + + def test_base_task_inherits_magic_kwargs_from_app(self): + from celery.task import Task as OldTask + + class timkX(OldTask): + abstract = True + + app = Celery(set_as_current=False, accept_magic_kwargs=True) + timkX.bind(app) + # see #918 + self.assertFalse(timkX.accept_magic_kwargs) + + from celery import Task as NewTask + + class timkY(NewTask): + abstract = True + + timkY.bind(app) + self.assertFalse(timkY.accept_magic_kwargs) + + def test_annotate_decorator(self): + from celery.app.task import Task + + class adX(Task): + abstract = True + + def run(self, y, z, x): + return y, z, x + + check = Mock() + + def deco(fun): + + def _inner(*args, **kwargs): + check(*args, **kwargs) + return fun(*args, **kwargs) + return _inner + + app = Celery(set_as_current=False) + app.conf.CELERY_ANNOTATIONS = { + adX.name: {'@__call__': deco} + } + adX.bind(app) + self.assertIs(adX.app, app) + + i = adX() + i(2, 4, x=3) + check.assert_called_with(i, 2, 4, x=3) + + i.annotate() + i.annotate() + + def test_apply_async_has__self__(self): + app = Celery(set_as_current=False) + + @app.task(__self__='hello') + def aawsX(): + pass + + with patch('celery.app.amqp.TaskProducer.publish_task') as dt: + aawsX.apply_async((4, 5)) + args = dt.call_args[0][1] + self.assertEqual(args, ('hello', 4, 5)) + + def test_apply_async__connection_arg(self): + app = Celery(set_as_current=False) + + @app.task() + def aacaX(): + pass + + connection = app.connection('asd://') + with self.assertRaises(KeyError): + aacaX.apply_async(connection=connection) + + def test_apply_async_adds_children(self): + from celery._state import _task_stack + app = Celery(set_as_current=False) + + @app.task() + def a3cX1(self): + pass + + @app.task() + def a3cX2(self): + pass + + _task_stack.push(a3cX1) + try: + a3cX1.push_request(called_directly=False) + try: + res = a3cX2.apply_async(add_to_parent=True) + self.assertIn(res, a3cX1.request.children) + finally: + a3cX1.pop_request() + finally: + _task_stack.pop() + + def test_TaskSet(self): + ts = self.app.TaskSet() + self.assertListEqual(ts.tasks, []) + self.assertIs(ts.app, self.app) + + def test_pickle_app(self): + changes = dict(THE_FOO_BAR='bars', + THE_MII_MAR='jars') + self.app.conf.update(changes) + saved = pickle.dumps(self.app) + self.assertLess(len(saved), 2048) + restored = pickle.loads(saved) + self.assertDictContainsSubset(changes, restored.conf) + + def test_worker_main(self): + from celery.bin import celeryd + + class WorkerCommand(celeryd.WorkerCommand): + + def execute_from_commandline(self, argv): + return argv + + prev, celeryd.WorkerCommand = celeryd.WorkerCommand, WorkerCommand + try: + ret = self.app.worker_main(argv=['--version']) + self.assertListEqual(ret, ['--version']) + finally: + celeryd.WorkerCommand = prev + + def test_config_from_envvar(self): + os.environ['CELERYTEST_CONFIG_OBJECT'] = 'celery.tests.app.test_app' + self.app.config_from_envvar('CELERYTEST_CONFIG_OBJECT') + self.assertEqual(self.app.conf.THIS_IS_A_KEY, 'this is a value') + + def test_config_from_object(self): + + class Object(object): + LEAVE_FOR_WORK = True + MOMENT_TO_STOP = True + CALL_ME_BACK = 123456789 + WANT_ME_TO = False + UNDERSTAND_ME = True + + self.app.config_from_object(Object()) + + self.assertTrue(self.app.conf.LEAVE_FOR_WORK) + self.assertTrue(self.app.conf.MOMENT_TO_STOP) + self.assertEqual(self.app.conf.CALL_ME_BACK, 123456789) + self.assertFalse(self.app.conf.WANT_ME_TO) + self.assertTrue(self.app.conf.UNDERSTAND_ME) + + def test_config_from_cmdline(self): + cmdline = ['.always_eager=no', + '.result_backend=/dev/null', + '.task_error_whitelist=(list)["a", "b", "c"]', + 'celeryd.prefetch_multiplier=368', + '.foobarstring=(string)300', + '.foobarint=(int)300', + '.result_engine_options=(dict){"foo": "bar"}'] + self.app.config_from_cmdline(cmdline, namespace='celery') + self.assertFalse(self.app.conf.CELERY_ALWAYS_EAGER) + self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, '/dev/null') + self.assertEqual(self.app.conf.CELERYD_PREFETCH_MULTIPLIER, 368) + self.assertListEqual(self.app.conf.CELERY_TASK_ERROR_WHITELIST, + ['a', 'b', 'c']) + self.assertEqual(self.app.conf.CELERY_FOOBARSTRING, '300') + self.assertEqual(self.app.conf.CELERY_FOOBARINT, 300) + self.assertDictEqual(self.app.conf.CELERY_RESULT_ENGINE_OPTIONS, + {'foo': 'bar'}) + + def test_compat_setting_CELERY_BACKEND(self): + + self.app.config_from_object(Object(CELERY_BACKEND='set_by_us')) + self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, 'set_by_us') + + def test_setting_BROKER_TRANSPORT_OPTIONS(self): + + _args = {'foo': 'bar', 'spam': 'baz'} + + self.app.config_from_object(Object()) + self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, {}) + + self.app.config_from_object(Object(BROKER_TRANSPORT_OPTIONS=_args)) + self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, _args) + + def test_Windows_log_color_disabled(self): + self.app.IS_WINDOWS = True + self.assertFalse(self.app.log.supports_color(True)) + + def test_compat_setting_CARROT_BACKEND(self): + self.app.config_from_object(Object(CARROT_BACKEND='set_by_us')) + self.assertEqual(self.app.conf.BROKER_TRANSPORT, 'set_by_us') + + def test_WorkController(self): + x = self.app.WorkController + self.assertIs(x.app, self.app) + + def test_Worker(self): + x = self.app.Worker + self.assertIs(x.app, self.app) + + def test_AsyncResult(self): + x = self.app.AsyncResult('1') + self.assertIs(x.app, self.app) + r = loads(dumps(x)) + # not set as current, so ends up as default app after reduce + self.assertIs(r.app, _state.default_app) + + @patch('celery.bin.celery.CeleryCommand.execute_from_commandline') + def test_start(self, execute): + self.app.start() + self.assertTrue(execute.called) + + def test_mail_admins(self): + + class Loader(BaseLoader): + + def mail_admins(*args, **kwargs): + return args, kwargs + + self.app.loader = Loader() + self.app.conf.ADMINS = None + self.assertFalse(self.app.mail_admins('Subject', 'Body')) + self.app.conf.ADMINS = [('George Costanza', 'george@vandelay.com')] + self.assertTrue(self.app.mail_admins('Subject', 'Body')) + + def test_amqp_get_broker_info(self): + self.assertDictContainsSubset( + {'hostname': 'localhost', + 'userid': 'guest', + 'password': 'guest', + 'virtual_host': '/'}, + self.app.connection('amqp://').info(), + ) + self.app.conf.BROKER_PORT = 1978 + self.app.conf.BROKER_VHOST = 'foo' + self.assertDictContainsSubset( + {'port': 1978, 'virtual_host': 'foo'}, + self.app.connection('amqp://:1978/foo').info(), + ) + conn = self.app.connection('amqp:////value') + self.assertDictContainsSubset({'virtual_host': '/value'}, + conn.info()) + + def test_BROKER_BACKEND_alias(self): + self.assertEqual(self.app.conf.BROKER_BACKEND, + self.app.conf.BROKER_TRANSPORT) + + def test_with_default_connection(self): + + @self.app.with_default_connection + def handler(connection=None, foo=None): + return connection, foo + + connection, foo = handler(foo=42) + self.assertEqual(foo, 42) + self.assertTrue(connection) + + def test_after_fork(self): + p = self.app._pool = Mock() + self.app._after_fork(self.app) + p.force_close_all.assert_called_with() + self.assertIsNone(self.app._pool) + self.app._after_fork(self.app) + + def test_pool_no_multiprocessing(self): + with mask_modules('multiprocessing.util'): + pool = self.app.pool + self.assertIs(pool, self.app._pool) + + def test_bugreport(self): + self.assertTrue(self.app.bugreport()) + + def test_send_task_sent_event(self): + + class Dispatcher(object): + sent = [] + + def publish(self, type, fields, *args, **kwargs): + self.sent.append((type, fields)) + + conn = self.app.connection() + chan = conn.channel() + try: + for e in ('foo_exchange', 'moo_exchange', 'bar_exchange'): + chan.exchange_declare(e, 'direct', durable=True) + chan.queue_declare(e, durable=True) + chan.queue_bind(e, e, e) + finally: + chan.close() + assert conn.transport_cls == 'memory' + + prod = self.app.amqp.TaskProducer( + conn, exchange=Exchange('foo_exchange'), + send_sent_event=True, + ) + + dispatcher = Dispatcher() + self.assertTrue(prod.publish_task('footask', (), {}, + exchange='moo_exchange', + routing_key='moo_exchange', + event_dispatcher=dispatcher)) + self.assertTrue(dispatcher.sent) + self.assertEqual(dispatcher.sent[0][0], 'task-sent') + self.assertTrue(prod.publish_task('footask', (), {}, + event_dispatcher=dispatcher, + exchange='bar_exchange', + routing_key='bar_exchange')) + + def test_error_mail_sender(self): + x = ErrorMail.subject % {'name': 'task_name', + 'id': uuid(), + 'exc': 'FOOBARBAZ', + 'hostname': 'lana'} + self.assertTrue(x) + + +class test_defaults(Case): + + def test_str_to_bool(self): + for s in ('false', 'no', '0'): + self.assertFalse(defaults.strtobool(s)) + for s in ('true', 'yes', '1'): + self.assertTrue(defaults.strtobool(s)) + with self.assertRaises(TypeError): + defaults.strtobool('unsure') + + +class test_debugging_utils(Case): + + def test_enable_disable_trace(self): + try: + _app.enable_trace() + self.assertEqual(_app.app_or_default, _app._app_or_default_trace) + _app.disable_trace() + self.assertEqual(_app.app_or_default, _app._app_or_default) + finally: + _app.disable_trace() + + +class test_pyimplementation(Case): + + def test_platform_python_implementation(self): + with platform_pyimp(lambda: 'Xython'): + self.assertEqual(pyimplementation(), 'Xython') + + def test_platform_jython(self): + with platform_pyimp(): + with sys_platform('java 1.6.51'): + self.assertIn('Jython', pyimplementation()) + + def test_platform_pypy(self): + with platform_pyimp(): + with sys_platform('darwin'): + with pypy_version((1, 4, 3)): + self.assertIn('PyPy', pyimplementation()) + with pypy_version((1, 4, 3, 'a4')): + self.assertIn('PyPy', pyimplementation()) + + def test_platform_fallback(self): + with platform_pyimp(): + with sys_platform('darwin'): + with pypy_version(): + self.assertEqual('CPython', pyimplementation()) diff --git a/awx/lib/site-packages/celery/tests/app/test_beat.py b/awx/lib/site-packages/celery/tests/app/test_beat.py new file mode 100644 index 0000000000..10262dfb70 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/app/test_beat.py @@ -0,0 +1,468 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import errno + +from datetime import datetime, timedelta +from mock import Mock, call, patch +from nose import SkipTest + +from celery import current_app +from celery import beat +from celery import task +from celery.result import AsyncResult +from celery.schedules import schedule +from celery.task.base import Task +from celery.utils import uuid +from celery.tests.utils import Case, patch_settings + + +class Object(object): + pass + + +class MockShelve(dict): + closed = False + synced = False + + def close(self): + self.closed = True + + def sync(self): + self.synced = True + + +class MockService(object): + started = False + stopped = False + + def __init__(self, *args, **kwargs): + pass + + def start(self, **kwargs): + self.started = True + + def stop(self, **kwargs): + self.stopped = True + + +class test_ScheduleEntry(Case): + Entry = beat.ScheduleEntry + + def create_entry(self, **kwargs): + entry = dict(name='celery.unittest.add', + schedule=schedule(timedelta(seconds=10)), + args=(2, 2), + options={'routing_key': 'cpu'}) + return self.Entry(**dict(entry, **kwargs)) + + def test_next(self): + entry = self.create_entry(schedule=10) + self.assertTrue(entry.last_run_at) + self.assertIsInstance(entry.last_run_at, datetime) + self.assertEqual(entry.total_run_count, 0) + + next_run_at = entry.last_run_at + timedelta(seconds=10) + next = entry.next(next_run_at) + self.assertGreaterEqual(next.last_run_at, next_run_at) + self.assertEqual(next.total_run_count, 1) + + def test_is_due(self): + entry = self.create_entry(schedule=timedelta(seconds=10)) + due1, next_time_to_run1 = entry.is_due() + self.assertFalse(due1) + self.assertGreater(next_time_to_run1, 9) + + next_run_at = entry.last_run_at - timedelta(seconds=10) + next = entry.next(next_run_at) + due2, next_time_to_run2 = next.is_due() + self.assertTrue(due2) + self.assertGreater(next_time_to_run2, 9) + + def test_repr(self): + entry = self.create_entry() + self.assertIn(' 1: + return s.sh + raise OSError() + opens.side_effect = effect + s.setup_schedule() + s._remove_db.assert_called_with() + + s._store = {'__version__': 1} + s.setup_schedule() + + def test_get_schedule(self): + s = create_persistent_scheduler()[0](schedule_filename='schedule') + s._store = {'entries': {}} + s.schedule = {'foo': 'bar'} + self.assertDictEqual(s.schedule, {'foo': 'bar'}) + self.assertDictEqual(s._store['entries'], s.schedule) + + +class test_Service(Case): + + def get_service(self): + Scheduler, mock_shelve = create_persistent_scheduler() + return beat.Service(scheduler_cls=Scheduler), mock_shelve + + def test_start(self): + s, sh = self.get_service() + schedule = s.scheduler.schedule + self.assertIsInstance(schedule, dict) + self.assertIsInstance(s.scheduler, beat.Scheduler) + scheduled = schedule.keys() + for task_name in sh['entries'].keys(): + self.assertIn(task_name, scheduled) + + s.sync() + self.assertTrue(sh.closed) + self.assertTrue(sh.synced) + self.assertTrue(s._is_stopped.isSet()) + s.sync() + s.stop(wait=False) + self.assertTrue(s._is_shutdown.isSet()) + s.stop(wait=True) + self.assertTrue(s._is_shutdown.isSet()) + + p = s.scheduler._store + s.scheduler._store = None + try: + s.scheduler.sync() + finally: + s.scheduler._store = p + + def test_start_embedded_process(self): + s, sh = self.get_service() + s._is_shutdown.set() + s.start(embedded_process=True) + + def test_start_thread(self): + s, sh = self.get_service() + s._is_shutdown.set() + s.start(embedded_process=False) + + def test_start_tick_raises_exit_error(self): + s, sh = self.get_service() + s.scheduler.tick_raises_exit = True + s.start() + self.assertTrue(s._is_shutdown.isSet()) + + def test_start_manages_one_tick_before_shutdown(self): + s, sh = self.get_service() + s.scheduler.shutdown_service = s + s.start() + self.assertTrue(s._is_shutdown.isSet()) + + +class test_EmbeddedService(Case): + + def test_start_stop_process(self): + try: + import _multiprocessing # noqa + except ImportError: + raise SkipTest('multiprocessing not available') + + from billiard.process import Process + + s = beat.EmbeddedService() + self.assertIsInstance(s, Process) + self.assertIsInstance(s.service, beat.Service) + s.service = MockService() + + class _Popen(object): + terminated = False + + def terminate(self): + self.terminated = True + + s.run() + self.assertTrue(s.service.started) + + s._popen = _Popen() + s.stop() + self.assertTrue(s.service.stopped) + self.assertTrue(s._popen.terminated) + + def test_start_stop_threaded(self): + s = beat.EmbeddedService(thread=True) + from threading import Thread + self.assertIsInstance(s, Thread) + self.assertIsInstance(s.service, beat.Service) + s.service = MockService() + + s.run() + self.assertTrue(s.service.started) + + s.stop() + self.assertTrue(s.service.stopped) diff --git a/awx/lib/site-packages/celery/tests/app/test_builtins.py b/awx/lib/site-packages/celery/tests/app/test_builtins.py new file mode 100644 index 0000000000..dc09fab2a7 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/app/test_builtins.py @@ -0,0 +1,162 @@ +from __future__ import absolute_import + +from mock import Mock, patch + +from celery import current_app as app, group, task, chord +from celery.app import builtins +from celery._state import _task_stack +from celery.tests.utils import Case + + +@task() +def add(x, y): + return x + y + + +@task() +def xsum(x): + return sum(x) + + +class test_backend_cleanup(Case): + + def test_run(self): + prev = app.backend + app.backend.cleanup = Mock() + app.backend.cleanup.__name__ = 'cleanup' + try: + cleanup_task = builtins.add_backend_cleanup_task(app) + cleanup_task() + self.assertTrue(app.backend.cleanup.called) + finally: + app.backend = prev + + +class test_map(Case): + + def test_run(self): + + @app.task() + def map_mul(x): + return x[0] * x[1] + + res = app.tasks['celery.map'](map_mul, [(2, 2), (4, 4), (8, 8)]) + self.assertEqual(res, [4, 16, 64]) + + +class test_starmap(Case): + + def test_run(self): + + @app.task() + def smap_mul(x, y): + return x * y + + res = app.tasks['celery.starmap'](smap_mul, [(2, 2), (4, 4), (8, 8)]) + self.assertEqual(res, [4, 16, 64]) + + +class test_chunks(Case): + + @patch('celery.canvas.chunks.apply_chunks') + def test_run(self, apply_chunks): + + @app.task() + def chunks_mul(l): + return l + + app.tasks['celery.chunks']( + chunks_mul, [(2, 2), (4, 4), (8, 8)], 1, + ) + self.assertTrue(apply_chunks.called) + + +class test_group(Case): + + def setUp(self): + self.prev = app.tasks.get('celery.group') + self.task = builtins.add_group_task(app)() + + def tearDown(self): + app.tasks['celery.group'] = self.prev + + def test_apply_async_eager(self): + self.task.apply = Mock() + app.conf.CELERY_ALWAYS_EAGER = True + try: + self.task.apply_async() + finally: + app.conf.CELERY_ALWAYS_EAGER = False + self.assertTrue(self.task.apply.called) + + def test_apply(self): + x = group([add.s(4, 4), add.s(8, 8)]) + x.name = self.task.name + res = x.apply() + self.assertEqual(res.get(), [8, 16]) + + def test_apply_async(self): + x = group([add.s(4, 4), add.s(8, 8)]) + x.apply_async() + + def test_apply_async_with_parent(self): + _task_stack.push(add) + try: + add.push_request(called_directly=False) + try: + assert not add.request.children + x = group([add.s(4, 4), add.s(8, 8)]) + res = x() + self.assertTrue(add.request.children) + self.assertIn(res, add.request.children) + self.assertEqual(len(add.request.children), 1) + finally: + add.pop_request() + finally: + _task_stack.pop() + + +class test_chain(Case): + + def setUp(self): + self.prev = app.tasks.get('celery.chain') + self.task = builtins.add_chain_task(app)() + + def tearDown(self): + app.tasks['celery.chain'] = self.prev + + def test_apply_async(self): + c = add.s(2, 2) | add.s(4) | add.s(8) + result = c.apply_async() + self.assertTrue(result.parent) + self.assertTrue(result.parent.parent) + self.assertIsNone(result.parent.parent.parent) + + +class test_chord(Case): + + def setUp(self): + self.prev = app.tasks.get('celery.chord') + self.task = builtins.add_chord_task(app)() + + def tearDown(self): + app.tasks['celery.chord'] = self.prev + + def test_apply_async(self): + x = chord([add.s(i, i) for i in xrange(10)], body=xsum.s()) + r = x.apply_async() + self.assertTrue(r) + self.assertTrue(r.parent) + + def test_run_header_not_group(self): + self.task([add.s(i, i) for i in xrange(10)], xsum.s()) + + def test_apply_eager(self): + app.conf.CELERY_ALWAYS_EAGER = True + try: + x = chord([add.s(i, i) for i in xrange(10)], body=xsum.s()) + r = x.apply_async() + self.assertEqual(r.get(), 90) + + finally: + app.conf.CELERY_ALWAYS_EAGER = False diff --git a/awx/lib/site-packages/celery/tests/app/test_celery.py b/awx/lib/site-packages/celery/tests/app/test_celery.py new file mode 100644 index 0000000000..b28c5f0575 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/app/test_celery.py @@ -0,0 +1,18 @@ +from __future__ import absolute_import +from celery.tests.utils import Case + +import celery + + +class test_celery_package(Case): + + def test_version(self): + self.assertTrue(celery.VERSION) + self.assertGreaterEqual(len(celery.VERSION), 3) + celery.VERSION = (0, 3, 0) + self.assertGreaterEqual(celery.__version__.count('.'), 2) + + def test_meta(self): + for m in ('__author__', '__contact__', '__homepage__', + '__docformat__'): + self.assertTrue(getattr(celery, m, None)) diff --git a/awx/lib/site-packages/celery/tests/app/test_control.py b/awx/lib/site-packages/celery/tests/app/test_control.py new file mode 100644 index 0000000000..2b94ba09cd --- /dev/null +++ b/awx/lib/site-packages/celery/tests/app/test_control.py @@ -0,0 +1,197 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from functools import wraps + +from kombu.pidbox import Mailbox + +from celery.app import app_or_default +from celery.app import control +from celery.task import task +from celery.utils import uuid +from celery.tests.utils import Case + + +@task() +def mytask(): + pass + + +class MockMailbox(Mailbox): + sent = [] + + def _publish(self, command, *args, **kwargs): + self.__class__.sent.append(command) + + def close(self): + pass + + def _collect(self, *args, **kwargs): + pass + + +class Control(control.Control): + Mailbox = MockMailbox + + +def with_mock_broadcast(fun): + + @wraps(fun) + def _resets(*args, **kwargs): + MockMailbox.sent = [] + try: + return fun(*args, **kwargs) + finally: + MockMailbox.sent = [] + return _resets + + +class test_inspect(Case): + + def setUp(self): + app = self.app = app_or_default() + self.c = Control(app=app) + self.prev, app.control = app.control, self.c + self.i = self.c.inspect() + + def tearDown(self): + self.app.control = self.prev + + def test_prepare_reply(self): + self.assertDictEqual(self.i._prepare([{'w1': {'ok': 1}}, + {'w2': {'ok': 1}}]), + {'w1': {'ok': 1}, 'w2': {'ok': 1}}) + + i = self.c.inspect(destination='w1') + self.assertEqual(i._prepare([{'w1': {'ok': 1}}]), + {'ok': 1}) + + @with_mock_broadcast + def test_active(self): + self.i.active() + self.assertIn('dump_active', MockMailbox.sent) + + @with_mock_broadcast + def test_scheduled(self): + self.i.scheduled() + self.assertIn('dump_schedule', MockMailbox.sent) + + @with_mock_broadcast + def test_reserved(self): + self.i.reserved() + self.assertIn('dump_reserved', MockMailbox.sent) + + @with_mock_broadcast + def test_stats(self): + self.i.stats() + self.assertIn('stats', MockMailbox.sent) + + @with_mock_broadcast + def test_revoked(self): + self.i.revoked() + self.assertIn('dump_revoked', MockMailbox.sent) + + @with_mock_broadcast + def test_tasks(self): + self.i.registered() + self.assertIn('dump_tasks', MockMailbox.sent) + + @with_mock_broadcast + def test_ping(self): + self.i.ping() + self.assertIn('ping', MockMailbox.sent) + + @with_mock_broadcast + def test_active_queues(self): + self.i.active_queues() + self.assertIn('active_queues', MockMailbox.sent) + + @with_mock_broadcast + def test_report(self): + self.i.report() + self.assertIn('report', MockMailbox.sent) + + +class test_Broadcast(Case): + + def setUp(self): + self.app = app_or_default() + self.control = Control(app=self.app) + self.app.control = self.control + + def tearDown(self): + del(self.app.control) + + def test_purge(self): + self.control.purge() + + @with_mock_broadcast + def test_broadcast(self): + self.control.broadcast('foobarbaz', arguments=[]) + self.assertIn('foobarbaz', MockMailbox.sent) + + @with_mock_broadcast + def test_broadcast_limit(self): + self.control.broadcast( + 'foobarbaz1', arguments=[], limit=None, destination=[1, 2, 3], + ) + self.assertIn('foobarbaz1', MockMailbox.sent) + + @with_mock_broadcast + def test_broadcast_validate(self): + with self.assertRaises(ValueError): + self.control.broadcast('foobarbaz2', + destination='foo') + + @with_mock_broadcast + def test_rate_limit(self): + self.control.rate_limit(mytask.name, '100/m') + self.assertIn('rate_limit', MockMailbox.sent) + + @with_mock_broadcast + def test_time_limit(self): + self.control.time_limit(mytask.name, soft=10, hard=20) + self.assertIn('time_limit', MockMailbox.sent) + + @with_mock_broadcast + def test_add_consumer(self): + self.control.add_consumer('foo') + self.assertIn('add_consumer', MockMailbox.sent) + + @with_mock_broadcast + def test_cancel_consumer(self): + self.control.cancel_consumer('foo') + self.assertIn('cancel_consumer', MockMailbox.sent) + + @with_mock_broadcast + def test_enable_events(self): + self.control.enable_events() + self.assertIn('enable_events', MockMailbox.sent) + + @with_mock_broadcast + def test_disable_events(self): + self.control.disable_events() + self.assertIn('disable_events', MockMailbox.sent) + + @with_mock_broadcast + def test_revoke(self): + self.control.revoke('foozbaaz') + self.assertIn('revoke', MockMailbox.sent) + + @with_mock_broadcast + def test_ping(self): + self.control.ping() + self.assertIn('ping', MockMailbox.sent) + + @with_mock_broadcast + def test_revoke_from_result(self): + self.app.AsyncResult('foozbazzbar').revoke() + self.assertIn('revoke', MockMailbox.sent) + + @with_mock_broadcast + def test_revoke_from_resultset(self): + r = self.app.GroupResult(uuid(), + map(self.app.AsyncResult, + [uuid() for i in range(10)])) + r.revoke() + self.assertIn('revoke', MockMailbox.sent) diff --git a/awx/lib/site-packages/celery/tests/app/test_defaults.py b/awx/lib/site-packages/celery/tests/app/test_defaults.py new file mode 100644 index 0000000000..d74c8e28d3 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/app/test_defaults.py @@ -0,0 +1,55 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import sys + +from importlib import import_module +from mock import Mock, patch + +from celery.tests.utils import Case, pypy_version, sys_platform + + +class test_defaults(Case): + + def setUp(self): + self._prev = sys.modules.pop('celery.app.defaults', None) + + def tearDown(self): + if self._prev: + sys.modules['celery.app.defaults'] = self._prev + + def test_any(self): + val = object() + self.assertIs(self.defaults.Option.typemap['any'](val), val) + + def test_default_pool_pypy_14(self): + with sys_platform('darwin'): + with pypy_version((1, 4, 0)): + self.assertEqual(self.defaults.DEFAULT_POOL, 'solo') + + def test_default_pool_pypy_15(self): + with sys_platform('darwin'): + with pypy_version((1, 5, 0)): + self.assertEqual(self.defaults.DEFAULT_POOL, 'processes') + + def test_deprecated(self): + source = Mock() + source.BROKER_INSIST = True + with patch('celery.utils.warn_deprecated') as warn: + self.defaults.find_deprecated_settings(source) + self.assertTrue(warn.called) + + def test_default_pool_jython(self): + with sys_platform('java 1.6.51'): + self.assertEqual(self.defaults.DEFAULT_POOL, 'threads') + + def test_find(self): + find = self.defaults.find + + self.assertEqual(find('server_email')[2].default, 'celery@localhost') + self.assertEqual(find('default_queue')[2].default, 'celery') + self.assertEqual(find('celery_default_exchange')[2], 'celery') + + @property + def defaults(self): + return import_module('celery.app.defaults') diff --git a/awx/lib/site-packages/celery/tests/app/test_loaders.py b/awx/lib/site-packages/celery/tests/app/test_loaders.py new file mode 100644 index 0000000000..4e9f117062 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/app/test_loaders.py @@ -0,0 +1,276 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import os +import sys + +from mock import Mock, patch + +from celery import loaders +from celery.app import app_or_default +from celery.exceptions import ( + NotConfigured, + ImproperlyConfigured, + CPendingDeprecationWarning, +) +from celery.loaders import base +from celery.loaders import default +from celery.loaders.app import AppLoader +from celery.utils.imports import NotAPackage +from celery.utils.mail import SendmailWarning + +from celery.tests.utils import AppCase, Case +from celery.tests.compat import catch_warnings + + +class ObjectConfig(object): + FOO = 1 + BAR = 2 + +object_config = ObjectConfig() +dict_config = dict(FOO=10, BAR=20) + + +class Object(object): + + def __init__(self, **kwargs): + for k, v in kwargs.items(): + setattr(self, k, v) + + +class DummyLoader(base.BaseLoader): + + def read_configuration(self): + return {'foo': 'bar', 'CELERY_IMPORTS': ('os', 'sys')} + + +class test_loaders(AppCase): + + def test_get_loader_cls(self): + + self.assertEqual(loaders.get_loader_cls('default'), + default.Loader) + + def test_current_loader(self): + with self.assertWarnsRegex( + CPendingDeprecationWarning, + r'deprecation'): + self.assertIs(loaders.current_loader(), self.app.loader) + + def test_load_settings(self): + with self.assertWarnsRegex( + CPendingDeprecationWarning, + r'deprecation'): + self.assertIs(loaders.load_settings(), self.app.conf) + + +class test_LoaderBase(Case): + message_options = {'subject': 'Subject', + 'body': 'Body', + 'sender': 'x@x.com', + 'to': 'y@x.com'} + server_options = {'host': 'smtp.x.com', + 'port': 1234, + 'user': 'x', + 'password': 'qwerty', + 'timeout': 3} + + def setUp(self): + self.loader = DummyLoader() + self.app = app_or_default() + + def test_handlers_pass(self): + self.loader.on_task_init('foo.task', 'feedface-cafebabe') + self.loader.on_worker_init() + + def test_import_task_module(self): + self.assertEqual(sys, self.loader.import_task_module('sys')) + + def test_init_worker_process(self): + self.loader.on_worker_process_init() + m = self.loader.on_worker_process_init = Mock() + self.loader.init_worker_process() + m.assert_called_with() + + def test_config_from_object_module(self): + self.loader.import_from_cwd = Mock() + self.loader.config_from_object('module_name') + self.loader.import_from_cwd.assert_called_with('module_name') + + def test_conf_property(self): + self.assertEqual(self.loader.conf['foo'], 'bar') + self.assertEqual(self.loader._conf['foo'], 'bar') + self.assertEqual(self.loader.conf['foo'], 'bar') + + def test_import_default_modules(self): + modnames = lambda l: [m.__name__ for m in l] + prev, self.app.conf.CELERY_IMPORTS = ( + self.app.conf.CELERY_IMPORTS, ('os', 'sys')) + try: + self.assertEqual( + sorted(modnames(self.loader.import_default_modules())), + sorted(modnames([os, sys])), + ) + finally: + self.app.conf.CELERY_IMPORTS = prev + + def test_import_from_cwd_custom_imp(self): + + def imp(module, package=None): + imp.called = True + imp.called = False + + self.loader.import_from_cwd('foo', imp=imp) + self.assertTrue(imp.called) + + @patch('celery.utils.mail.Mailer._send') + def test_mail_admins_errors(self, send): + send.side_effect = KeyError() + opts = dict(self.message_options, **self.server_options) + + with self.assertWarnsRegex(SendmailWarning, r'KeyError'): + self.loader.mail_admins(fail_silently=True, **opts) + + with self.assertRaises(KeyError): + self.loader.mail_admins(fail_silently=False, **opts) + + @patch('celery.utils.mail.Mailer._send') + def test_mail_admins(self, send): + opts = dict(self.message_options, **self.server_options) + self.loader.mail_admins(**opts) + self.assertTrue(send.call_args) + message = send.call_args[0][0] + self.assertEqual(message.to, [self.message_options['to']]) + self.assertEqual(message.subject, self.message_options['subject']) + self.assertEqual(message.sender, self.message_options['sender']) + self.assertEqual(message.body, self.message_options['body']) + + def test_mail_attribute(self): + from celery.utils import mail + loader = base.BaseLoader() + self.assertIs(loader.mail, mail) + + def test_cmdline_config_ValueError(self): + with self.assertRaises(ValueError): + self.loader.cmdline_config_parser(['broker.port=foobar']) + + +class test_DefaultLoader(Case): + + @patch('celery.loaders.base.find_module') + def test_read_configuration_not_a_package(self, find_module): + find_module.side_effect = NotAPackage() + l = default.Loader() + with self.assertRaises(NotAPackage): + l.read_configuration() + + @patch('celery.loaders.base.find_module') + def test_read_configuration_py_in_name(self, find_module): + prev = os.environ['CELERY_CONFIG_MODULE'] + os.environ['CELERY_CONFIG_MODULE'] = 'celeryconfig.py' + try: + find_module.side_effect = NotAPackage() + l = default.Loader() + with self.assertRaises(NotAPackage): + l.read_configuration() + finally: + os.environ['CELERY_CONFIG_MODULE'] = prev + + @patch('celery.loaders.base.find_module') + def test_read_configuration_importerror(self, find_module): + default.C_WNOCONF = True + find_module.side_effect = ImportError() + l = default.Loader() + with self.assertWarnsRegex(NotConfigured, r'make sure it exists'): + l.read_configuration() + + def test_read_configuration(self): + from types import ModuleType + + class ConfigModule(ModuleType): + pass + + celeryconfig = ConfigModule('celeryconfig') + celeryconfig.CELERY_IMPORTS = ('os', 'sys') + configname = os.environ.get('CELERY_CONFIG_MODULE') or 'celeryconfig' + + prevconfig = sys.modules.get(configname) + sys.modules[configname] = celeryconfig + try: + l = default.Loader() + settings = l.read_configuration() + self.assertTupleEqual(settings.CELERY_IMPORTS, ('os', 'sys')) + settings = l.read_configuration() + self.assertTupleEqual(settings.CELERY_IMPORTS, ('os', 'sys')) + l.on_worker_init() + finally: + if prevconfig: + sys.modules[configname] = prevconfig + + def test_import_from_cwd(self): + l = default.Loader() + old_path = list(sys.path) + try: + sys.path.remove(os.getcwd()) + except ValueError: + pass + celery = sys.modules.pop('celery', None) + try: + self.assertTrue(l.import_from_cwd('celery')) + sys.modules.pop('celery', None) + sys.path.insert(0, os.getcwd()) + self.assertTrue(l.import_from_cwd('celery')) + finally: + sys.path = old_path + sys.modules['celery'] = celery + + def test_unconfigured_settings(self): + context_executed = [False] + + class _Loader(default.Loader): + + def find_module(self, name): + raise ImportError(name) + + with catch_warnings(record=True): + l = _Loader() + self.assertFalse(l.configured) + context_executed[0] = True + self.assertTrue(context_executed[0]) + + +class test_AppLoader(Case): + + def setUp(self): + self.app = app_or_default() + self.loader = AppLoader(app=self.app) + + def test_config_from_envvar(self, key='CELERY_HARNESS_CFG1'): + self.assertFalse(self.loader.config_from_envvar('HDSAJIHWIQHEWQU', + silent=True)) + with self.assertRaises(ImproperlyConfigured): + self.loader.config_from_envvar('HDSAJIHWIQHEWQU', silent=False) + os.environ[key] = __name__ + '.object_config' + self.assertTrue(self.loader.config_from_envvar(key)) + self.assertEqual(self.loader.conf['FOO'], 1) + self.assertEqual(self.loader.conf['BAR'], 2) + + os.environ[key] = 'unknown_asdwqe.asdwqewqe' + with self.assertRaises(ImportError): + self.loader.config_from_envvar(key, silent=False) + self.assertFalse(self.loader.config_from_envvar(key, silent=True)) + + os.environ[key] = __name__ + '.dict_config' + self.assertTrue(self.loader.config_from_envvar(key)) + self.assertEqual(self.loader.conf['FOO'], 10) + self.assertEqual(self.loader.conf['BAR'], 20) + + def test_on_worker_init(self): + prev, self.app.conf.CELERY_IMPORTS = ( + self.app.conf.CELERY_IMPORTS, ('subprocess', )) + try: + sys.modules.pop('subprocess', None) + self.loader.init_worker() + self.assertIn('subprocess', sys.modules) + finally: + self.app.conf.CELERY_IMPORTS = prev diff --git a/awx/lib/site-packages/celery/tests/app/test_log.py b/awx/lib/site-packages/celery/tests/app/test_log.py new file mode 100644 index 0000000000..75cc68b990 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/app/test_log.py @@ -0,0 +1,274 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import sys +import logging +from tempfile import mktemp + +from mock import patch, Mock +from nose import SkipTest + +from celery import current_app +from celery import signals +from celery.app.log import Logging, TaskFormatter +from celery.utils.log import LoggingProxy +from celery.utils import uuid +from celery.utils.log import ( + get_logger, + ColorFormatter, + logger as base_logger, + get_task_logger, +) +from celery.tests.utils import ( + AppCase, Case, override_stdouts, wrap_logger, get_handlers, +) + +log = current_app.log + + +class test_TaskFormatter(Case): + + def test_no_task(self): + class Record(object): + msg = 'hello world' + levelname = 'info' + exc_text = exc_info = None + stack_info = None + + def getMessage(self): + return self.msg + record = Record() + x = TaskFormatter() + x.format(record) + self.assertEqual(record.task_name, '???') + self.assertEqual(record.task_id, '???') + + +class test_ColorFormatter(Case): + + @patch('celery.utils.log.safe_str') + @patch('logging.Formatter.formatException') + def test_formatException_not_string(self, fe, safe_str): + x = ColorFormatter('HELLO') + value = KeyError() + fe.return_value = value + self.assertIs(x.formatException(value), value) + self.assertTrue(fe.called) + self.assertFalse(safe_str.called) + + @patch('logging.Formatter.formatException') + @patch('celery.utils.log.safe_str') + def test_formatException_string(self, safe_str, fe, value='HELLO'): + x = ColorFormatter(value) + fe.return_value = value + self.assertTrue(x.formatException(value)) + if sys.version_info[0] == 2: + self.assertTrue(safe_str.called) + + @patch('celery.utils.log.safe_str') + def test_format_raises(self, safe_str): + x = ColorFormatter('HELLO') + + def on_safe_str(s): + try: + raise ValueError('foo') + finally: + safe_str.side_effect = None + safe_str.side_effect = on_safe_str + + class Record(object): + levelname = 'ERROR' + msg = 'HELLO' + exc_text = 'error text' + stack_info = None + + def __str__(self): + return on_safe_str('') + + def getMessage(self): + return self.msg + + record = Record() + safe_str.return_value = record + + x.format(record) + self.assertIn(' stop_raising_at[0]: + return + iterations[0] += 1 + raise KeyError('foo') + + backend = AMQPBackend() + from celery.app.amqp import TaskProducer + prod, TaskProducer.publish = TaskProducer.publish, publish + try: + with self.assertRaises(KeyError): + backend.retry_policy['max_retries'] = None + backend.store_result('foo', 'bar', 'STARTED') + + with self.assertRaises(KeyError): + backend.retry_policy['max_retries'] = 10 + backend.store_result('foo', 'bar', 'STARTED') + finally: + TaskProducer.publish = prod + + def assertState(self, retval, state): + self.assertEqual(retval['status'], state) + + def test_poll_no_messages(self): + b = self.create_backend() + self.assertState(b.get_task_meta(uuid()), states.PENDING) + + def test_poll_result(self): + + results = Queue() + + class Message(object): + acked = 0 + requeued = 0 + + def __init__(self, **merge): + self.payload = dict({'status': states.STARTED, + 'result': None}, **merge) + self.body = pickle.dumps(self.payload) + self.content_type = 'application/x-python-serialize' + self.content_encoding = 'binary' + + def ack(self, *args, **kwargs): + self.acked += 1 + + def requeue(self, *args, **kwargs): + self.requeued += 1 + + class MockBinding(object): + + def __init__(self, *args, **kwargs): + self.channel = Mock() + + def __call__(self, *args, **kwargs): + return self + + def declare(self): + pass + + def get(self, no_ack=False): + try: + return results.get(block=False) + except Empty: + pass + + def is_bound(self): + return True + + class MockBackend(AMQPBackend): + Queue = MockBinding + + backend = MockBackend() + backend._republish = Mock() + + # FFWD's to the latest state. + state_messages = [ + Message(status=states.RECEIVED, seq=1), + Message(status=states.STARTED, seq=2), + Message(status=states.FAILURE, seq=3), + ] + for state_message in state_messages: + results.put(state_message) + r1 = backend.get_task_meta(uuid()) + self.assertDictContainsSubset({'status': states.FAILURE, + 'seq': 3}, r1, + 'FFWDs to the last state') + + # Caches last known state. + results.put(Message()) + tid = uuid() + backend.get_task_meta(tid) + self.assertIn(tid, backend._cache, 'Caches last known state') + + self.assertTrue(state_messages[-1].requeued) + + # Returns cache if no new states. + results.queue.clear() + assert not results.qsize() + backend._cache[tid] = 'hello' + self.assertEqual(backend.get_task_meta(tid), 'hello', + 'Returns cache if no new states') + + def test_wait_for(self): + b = self.create_backend() + + tid = uuid() + with self.assertRaises(TimeoutError): + b.wait_for(tid, timeout=0.1) + b.store_result(tid, None, states.STARTED) + with self.assertRaises(TimeoutError): + b.wait_for(tid, timeout=0.1) + b.store_result(tid, None, states.RETRY) + with self.assertRaises(TimeoutError): + b.wait_for(tid, timeout=0.1) + b.store_result(tid, 42, states.SUCCESS) + self.assertEqual(b.wait_for(tid, timeout=1), 42) + b.store_result(tid, 56, states.SUCCESS) + self.assertEqual(b.wait_for(tid, timeout=1), 42, + 'result is cached') + self.assertEqual(b.wait_for(tid, timeout=1, cache=False), 56) + b.store_result(tid, KeyError('foo'), states.FAILURE) + with self.assertRaises(KeyError): + b.wait_for(tid, timeout=1, cache=False) + + def test_drain_events_remaining_timeouts(self): + + class Connection(object): + + def drain_events(self, timeout=None): + pass + + b = self.create_backend() + with current_app.pool.acquire_channel(block=False) as (_, channel): + binding = b._create_binding(uuid()) + consumer = b.Consumer(channel, binding, no_ack=True) + with self.assertRaises(socket.timeout): + b.drain_events(Connection(), consumer, timeout=0.1) + + def test_get_many(self): + b = self.create_backend() + + tids = [] + for i in xrange(10): + tid = uuid() + b.store_result(tid, i, states.SUCCESS) + tids.append(tid) + + res = list(b.get_many(tids, timeout=1)) + expected_results = [(tid, {'status': states.SUCCESS, + 'result': i, + 'traceback': None, + 'task_id': tid, + 'children': None}) + for i, tid in enumerate(tids)] + self.assertEqual(sorted(res), sorted(expected_results)) + self.assertDictEqual(b._cache[res[0][0]], res[0][1]) + cached_res = list(b.get_many(tids, timeout=1)) + self.assertEqual(sorted(cached_res), sorted(expected_results)) + b._cache[res[0][0]]['status'] = states.RETRY + with self.assertRaises(socket.timeout): + list(b.get_many(tids, timeout=0.01)) + + def test_test_get_many_raises_outer_block(self): + + class Backend(AMQPBackend): + + def Consumer(*args, **kwargs): + raise KeyError('foo') + + b = Backend() + with self.assertRaises(KeyError): + b.get_many(['id1']).next() + + def test_test_get_many_raises_inner_block(self): + + class Backend(AMQPBackend): + + def drain_events(self, *args, **kwargs): + raise KeyError('foo') + + b = Backend() + with self.assertRaises(KeyError): + b.get_many(['id1']).next() + + def test_no_expires(self): + b = self.create_backend(expires=None) + app = app_or_default() + prev = app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES + app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = None + try: + b = self.create_backend(expires=None) + with self.assertRaises(KeyError): + b.queue_arguments['x-expires'] + finally: + app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = prev + + def test_process_cleanup(self): + self.create_backend().process_cleanup() + + def test_reload_task_result(self): + with self.assertRaises(NotImplementedError): + self.create_backend().reload_task_result('x') + + def test_reload_group_result(self): + with self.assertRaises(NotImplementedError): + self.create_backend().reload_group_result('x') + + def test_save_group(self): + with self.assertRaises(NotImplementedError): + self.create_backend().save_group('x', 'x') + + def test_restore_group(self): + with self.assertRaises(NotImplementedError): + self.create_backend().restore_group('x') + + def test_delete_group(self): + with self.assertRaises(NotImplementedError): + self.create_backend().delete_group('x') diff --git a/awx/lib/site-packages/celery/tests/backends/test_backends.py b/awx/lib/site-packages/celery/tests/backends/test_backends.py new file mode 100644 index 0000000000..467ef5eecd --- /dev/null +++ b/awx/lib/site-packages/celery/tests/backends/test_backends.py @@ -0,0 +1,48 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from mock import patch + +from celery import current_app +from celery import backends +from celery.backends.amqp import AMQPBackend +from celery.backends.cache import CacheBackend +from celery.tests.utils import Case + + +class test_backends(Case): + + def test_get_backend_aliases(self): + expects = [('amqp', AMQPBackend), + ('cache', CacheBackend)] + for expect_name, expect_cls in expects: + self.assertIsInstance(backends.get_backend_cls(expect_name)(), + expect_cls) + + def test_get_backend_cache(self): + backends.get_backend_cls.clear() + hits = backends.get_backend_cls.hits + misses = backends.get_backend_cls.misses + self.assertTrue(backends.get_backend_cls('amqp')) + self.assertEqual(backends.get_backend_cls.misses, misses + 1) + self.assertTrue(backends.get_backend_cls('amqp')) + self.assertEqual(backends.get_backend_cls.hits, hits + 1) + + def test_unknown_backend(self): + with self.assertRaises(ImportError): + backends.get_backend_cls('fasodaopjeqijwqe') + + def test_default_backend(self): + self.assertEqual(backends.default_backend, current_app.backend) + + def test_backend_by_url(self, url='redis://localhost/1'): + from celery.backends.redis import RedisBackend + backend, url_ = backends.get_backend_by_url(url) + self.assertIs(backend, RedisBackend) + self.assertEqual(url_, url) + + def test_sym_raises_ValuError(self): + with patch('celery.backends.symbol_by_name') as sbn: + sbn.side_effect = ValueError() + with self.assertRaises(ValueError): + backends.get_backend_cls('xxx.xxx:foo') diff --git a/awx/lib/site-packages/celery/tests/backends/test_base.py b/awx/lib/site-packages/celery/tests/backends/test_base.py new file mode 100644 index 0000000000..cae67425c8 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/backends/test_base.py @@ -0,0 +1,338 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import sys +import types + +from mock import Mock +from nose import SkipTest + +from celery import current_app +from celery.result import AsyncResult, GroupResult +from celery.utils import serialization +from celery.utils.serialization import subclass_exception +from celery.utils.serialization import \ + find_nearest_pickleable_exception as fnpe +from celery.utils.serialization import UnpickleableExceptionWrapper +from celery.utils.serialization import get_pickleable_exception as gpe + +from celery import states +from celery.backends.base import BaseBackend, KeyValueStoreBackend +from celery.backends.base import BaseDictBackend, DisabledBackend +from celery.utils import uuid + +from celery.tests.utils import Case + + +class wrapobject(object): + + def __init__(self, *args, **kwargs): + self.args = args + +if sys.version_info >= (3, 0): + Oldstyle = None +else: + Oldstyle = types.ClassType('Oldstyle', (), {}) +Unpickleable = subclass_exception('Unpickleable', KeyError, 'foo.module') +Impossible = subclass_exception('Impossible', object, 'foo.module') +Lookalike = subclass_exception('Lookalike', wrapobject, 'foo.module') +b = BaseBackend() + + +class test_serialization(Case): + + def test_create_exception_cls(self): + self.assertTrue(serialization.create_exception_cls('FooError', 'm')) + self.assertTrue(serialization.create_exception_cls('FooError', 'm', + KeyError)) + + +class test_BaseBackend_interface(Case): + + def test_get_status(self): + with self.assertRaises(NotImplementedError): + b.get_status('SOMExx-N0Nex1stant-IDxx-') + + def test__forget(self): + with self.assertRaises(NotImplementedError): + b.forget('SOMExx-N0Nex1stant-IDxx-') + + def test_get_children(self): + with self.assertRaises(NotImplementedError): + b.get_children('SOMExx-N0Nex1stant-IDxx-') + + def test_store_result(self): + with self.assertRaises(NotImplementedError): + b.store_result('SOMExx-N0nex1stant-IDxx-', 42, states.SUCCESS) + + def test_mark_as_started(self): + with self.assertRaises(NotImplementedError): + b.mark_as_started('SOMExx-N0nex1stant-IDxx-') + + def test_reload_task_result(self): + with self.assertRaises(NotImplementedError): + b.reload_task_result('SOMExx-N0nex1stant-IDxx-') + + def test_reload_group_result(self): + with self.assertRaises(NotImplementedError): + b.reload_group_result('SOMExx-N0nex1stant-IDxx-') + + def test_get_result(self): + with self.assertRaises(NotImplementedError): + b.get_result('SOMExx-N0nex1stant-IDxx-') + + def test_restore_group(self): + with self.assertRaises(NotImplementedError): + b.restore_group('SOMExx-N0nex1stant-IDxx-') + + def test_delete_group(self): + with self.assertRaises(NotImplementedError): + b.delete_group('SOMExx-N0nex1stant-IDxx-') + + def test_save_group(self): + with self.assertRaises(NotImplementedError): + b.save_group('SOMExx-N0nex1stant-IDxx-', 'blergh') + + def test_get_traceback(self): + with self.assertRaises(NotImplementedError): + b.get_traceback('SOMExx-N0nex1stant-IDxx-') + + def test_forget(self): + with self.assertRaises(NotImplementedError): + b.forget('SOMExx-N0nex1stant-IDxx-') + + def test_on_chord_part_return(self): + b.on_chord_part_return(None) + + def test_on_chord_apply(self, unlock='celery.chord_unlock'): + p, current_app.tasks[unlock] = current_app.tasks.get(unlock), Mock() + try: + b.on_chord_apply('dakj221', 'sdokqweok', + result=map(AsyncResult, [1, 2, 3])) + self.assertTrue(current_app.tasks[unlock].apply_async.call_count) + finally: + current_app.tasks[unlock] = p + + +class test_exception_pickle(Case): + + def test_oldstyle(self): + if Oldstyle is None: + raise SkipTest('py3k does not support old style classes') + self.assertIsNone(fnpe(Oldstyle())) + + def test_BaseException(self): + self.assertIsNone(fnpe(Exception())) + + def test_get_pickleable_exception(self): + exc = Exception('foo') + self.assertEqual(gpe(exc), exc) + + def test_unpickleable(self): + self.assertIsInstance(fnpe(Unpickleable()), KeyError) + self.assertIsNone(fnpe(Impossible())) + + +class test_prepare_exception(Case): + + def test_unpickleable(self): + x = b.prepare_exception(Unpickleable(1, 2, 'foo')) + self.assertIsInstance(x, KeyError) + y = b.exception_to_python(x) + self.assertIsInstance(y, KeyError) + + def test_impossible(self): + x = b.prepare_exception(Impossible()) + self.assertIsInstance(x, UnpickleableExceptionWrapper) + self.assertTrue(str(x)) + y = b.exception_to_python(x) + self.assertEqual(y.__class__.__name__, 'Impossible') + if sys.version_info < (2, 5): + self.assertTrue(y.__class__.__module__) + else: + self.assertEqual(y.__class__.__module__, 'foo.module') + + def test_regular(self): + x = b.prepare_exception(KeyError('baz')) + self.assertIsInstance(x, KeyError) + y = b.exception_to_python(x) + self.assertIsInstance(y, KeyError) + + +class KVBackend(KeyValueStoreBackend): + mget_returns_dict = False + + def __init__(self, *args, **kwargs): + self.db = {} + super(KVBackend, self).__init__(KeyValueStoreBackend) + + def get(self, key): + return self.db.get(key) + + def set(self, key, value): + self.db[key] = value + + def mget(self, keys): + if self.mget_returns_dict: + return dict((key, self.get(key)) for key in keys) + else: + return [self.get(k) for k in keys] + + def delete(self, key): + self.db.pop(key, None) + + +class DictBackend(BaseDictBackend): + + def __init__(self, *args, **kwargs): + BaseDictBackend.__init__(self, *args, **kwargs) + self._data = {'can-delete': {'result': 'foo'}} + + def _restore_group(self, group_id): + if group_id == 'exists': + return {'result': 'group'} + + def _get_task_meta_for(self, task_id): + if task_id == 'task-exists': + return {'result': 'task'} + + def _delete_group(self, group_id): + self._data.pop(group_id, None) + + +class test_BaseDictBackend(Case): + + def setUp(self): + self.b = DictBackend() + + def test_delete_group(self): + self.b.delete_group('can-delete') + self.assertNotIn('can-delete', self.b._data) + + def test_prepare_exception_json(self): + x = DictBackend(serializer='json') + e = x.prepare_exception(KeyError('foo')) + self.assertIn('exc_type', e) + e = x.exception_to_python(e) + self.assertEqual(e.__class__.__name__, 'KeyError') + self.assertEqual(str(e), "'foo'") + + def test_save_group(self): + b = BaseDictBackend() + b._save_group = Mock() + b.save_group('foofoo', 'xxx') + b._save_group.assert_called_with('foofoo', 'xxx') + + def test_forget_interface(self): + b = BaseDictBackend() + with self.assertRaises(NotImplementedError): + b.forget('foo') + + def test_restore_group(self): + self.assertIsNone(self.b.restore_group('missing')) + self.assertIsNone(self.b.restore_group('missing')) + self.assertEqual(self.b.restore_group('exists'), 'group') + self.assertEqual(self.b.restore_group('exists'), 'group') + self.assertEqual(self.b.restore_group('exists', cache=False), 'group') + + def test_reload_group_result(self): + self.b._cache = {} + self.b.reload_group_result('exists') + self.b._cache['exists'] = {'result': 'group'} + + def test_reload_task_result(self): + self.b._cache = {} + self.b.reload_task_result('task-exists') + self.b._cache['task-exists'] = {'result': 'task'} + + +class test_KeyValueStoreBackend(Case): + + def setUp(self): + self.b = KVBackend() + + def test_on_chord_part_return(self): + assert not self.b.implements_incr + self.b.on_chord_part_return(None) + + def test_get_store_delete_result(self): + tid = uuid() + self.b.mark_as_done(tid, 'Hello world') + self.assertEqual(self.b.get_result(tid), 'Hello world') + self.assertEqual(self.b.get_status(tid), states.SUCCESS) + self.b.forget(tid) + self.assertEqual(self.b.get_status(tid), states.PENDING) + + def test_strip_prefix(self): + x = self.b.get_key_for_task('x1b34') + self.assertEqual(self.b._strip_prefix(x), 'x1b34') + self.assertEqual(self.b._strip_prefix('x1b34'), 'x1b34') + + def test_get_many(self): + for is_dict in True, False: + self.b.mget_returns_dict = is_dict + ids = dict((uuid(), i) for i in xrange(10)) + for id, i in ids.items(): + self.b.mark_as_done(id, i) + it = self.b.get_many(ids.keys()) + for i, (got_id, got_state) in enumerate(it): + self.assertEqual(got_state['result'], ids[got_id]) + self.assertEqual(i, 9) + self.assertTrue(list(self.b.get_many(ids.keys()))) + + def test_get_missing_meta(self): + self.assertIsNone(self.b.get_result('xxx-missing')) + self.assertEqual(self.b.get_status('xxx-missing'), states.PENDING) + + def test_save_restore_delete_group(self): + tid = uuid() + tsr = GroupResult(tid, [AsyncResult(uuid()) for _ in range(10)]) + self.b.save_group(tid, tsr) + stored = self.b.restore_group(tid) + print(stored) + self.assertEqual(self.b.restore_group(tid), tsr) + self.b.delete_group(tid) + self.assertIsNone(self.b.restore_group(tid)) + + def test_restore_missing_group(self): + self.assertIsNone(self.b.restore_group('xxx-nonexistant')) + + +class test_KeyValueStoreBackend_interface(Case): + + def test_get(self): + with self.assertRaises(NotImplementedError): + KeyValueStoreBackend().get('a') + + def test_set(self): + with self.assertRaises(NotImplementedError): + KeyValueStoreBackend().set('a', 1) + + def test_incr(self): + with self.assertRaises(NotImplementedError): + KeyValueStoreBackend().incr('a') + + def test_cleanup(self): + self.assertFalse(KeyValueStoreBackend().cleanup()) + + def test_delete(self): + with self.assertRaises(NotImplementedError): + KeyValueStoreBackend().delete('a') + + def test_mget(self): + with self.assertRaises(NotImplementedError): + KeyValueStoreBackend().mget(['a']) + + def test_forget(self): + with self.assertRaises(NotImplementedError): + KeyValueStoreBackend().forget('a') + + +class test_DisabledBackend(Case): + + def test_store_result(self): + DisabledBackend().store_result() + + def test_is_disabled(self): + with self.assertRaises(NotImplementedError): + DisabledBackend().get_status('foo') diff --git a/awx/lib/site-packages/celery/tests/backends/test_cache.py b/awx/lib/site-packages/celery/tests/backends/test_cache.py new file mode 100644 index 0000000000..a78aae554d --- /dev/null +++ b/awx/lib/site-packages/celery/tests/backends/test_cache.py @@ -0,0 +1,244 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import sys +import types + +from contextlib import contextmanager + +from kombu.utils.encoding import str_to_bytes +from mock import Mock, patch + +from celery import current_app +from celery import states +from celery.backends.cache import CacheBackend, DummyClient +from celery.exceptions import ImproperlyConfigured +from celery.result import AsyncResult +from celery.task import subtask +from celery.utils import uuid + +from celery.tests.utils import Case, mask_modules, reset_modules + + +class SomeClass(object): + + def __init__(self, data): + self.data = data + + +class test_CacheBackend(Case): + + def setUp(self): + self.tb = CacheBackend(backend='memory://') + self.tid = uuid() + + def test_mark_as_done(self): + self.assertEqual(self.tb.get_status(self.tid), states.PENDING) + self.assertIsNone(self.tb.get_result(self.tid)) + + self.tb.mark_as_done(self.tid, 42) + self.assertEqual(self.tb.get_status(self.tid), states.SUCCESS) + self.assertEqual(self.tb.get_result(self.tid), 42) + + def test_is_pickled(self): + result = {'foo': 'baz', 'bar': SomeClass(12345)} + self.tb.mark_as_done(self.tid, result) + # is serialized properly. + rindb = self.tb.get_result(self.tid) + self.assertEqual(rindb.get('foo'), 'baz') + self.assertEqual(rindb.get('bar').data, 12345) + + def test_mark_as_failure(self): + try: + raise KeyError('foo') + except KeyError, exception: + self.tb.mark_as_failure(self.tid, exception) + self.assertEqual(self.tb.get_status(self.tid), states.FAILURE) + self.assertIsInstance(self.tb.get_result(self.tid), KeyError) + + def test_on_chord_apply(self): + tb = CacheBackend(backend='memory://') + gid, res = uuid(), [AsyncResult(uuid()) for _ in xrange(3)] + tb.on_chord_apply(gid, {}, result=res) + + @patch('celery.result.GroupResult') + def test_on_chord_part_return(self, setresult): + tb = CacheBackend(backend='memory://') + + deps = Mock() + deps.__len__ = Mock() + deps.__len__.return_value = 2 + setresult.restore.return_value = deps + task = Mock() + task.name = 'foobarbaz' + try: + current_app.tasks['foobarbaz'] = task + task.request.chord = subtask(task) + + gid, res = uuid(), [AsyncResult(uuid()) for _ in xrange(3)] + task.request.group = gid + tb.on_chord_apply(gid, {}, result=res) + + self.assertFalse(deps.join_native.called) + tb.on_chord_part_return(task) + self.assertFalse(deps.join_native.called) + + tb.on_chord_part_return(task) + deps.join_native.assert_called_with(propagate=False) + deps.delete.assert_called_with() + + finally: + current_app.tasks.pop('foobarbaz') + + def test_mget(self): + self.tb.set('foo', 1) + self.tb.set('bar', 2) + + self.assertDictEqual(self.tb.mget(['foo', 'bar']), + {'foo': 1, 'bar': 2}) + + def test_forget(self): + self.tb.mark_as_done(self.tid, {'foo': 'bar'}) + x = AsyncResult(self.tid, backend=self.tb) + x.forget() + self.assertIsNone(x.result) + + def test_process_cleanup(self): + self.tb.process_cleanup() + + def test_expires_as_int(self): + tb = CacheBackend(backend='memory://', expires=10) + self.assertEqual(tb.expires, 10) + + def test_unknown_backend_raises_ImproperlyConfigured(self): + with self.assertRaises(ImproperlyConfigured): + CacheBackend(backend='unknown://') + + +class MyMemcachedStringEncodingError(Exception): + pass + + +class MemcachedClient(DummyClient): + + def set(self, key, value, *args, **kwargs): + if isinstance(key, unicode): + raise MyMemcachedStringEncodingError( + 'Keys must be str, not unicode. Convert your unicode ' + 'strings using mystring.encode(charset)!') + return super(MemcachedClient, self).set(key, value, *args, **kwargs) + + +class MockCacheMixin(object): + + @contextmanager + def mock_memcache(self): + memcache = types.ModuleType('memcache') + memcache.Client = MemcachedClient + memcache.Client.__module__ = memcache.__name__ + prev, sys.modules['memcache'] = sys.modules.get('memcache'), memcache + try: + yield True + finally: + if prev is not None: + sys.modules['memcache'] = prev + + @contextmanager + def mock_pylibmc(self): + pylibmc = types.ModuleType('pylibmc') + pylibmc.Client = MemcachedClient + pylibmc.Client.__module__ = pylibmc.__name__ + prev = sys.modules.get('pylibmc') + sys.modules['pylibmc'] = pylibmc + try: + yield True + finally: + if prev is not None: + sys.modules['pylibmc'] = prev + + +class test_get_best_memcache(Case, MockCacheMixin): + + def test_pylibmc(self): + with self.mock_pylibmc(): + with reset_modules('celery.backends.cache'): + from celery.backends import cache + cache._imp = [None] + self.assertEqual(cache.get_best_memcache().__module__, + 'pylibmc') + + def test_memcache(self): + with self.mock_memcache(): + with reset_modules('celery.backends.cache'): + with mask_modules('pylibmc'): + from celery.backends import cache + cache._imp = [None] + self.assertEqual(cache.get_best_memcache().__module__, + 'memcache') + + def test_no_implementations(self): + with mask_modules('pylibmc', 'memcache'): + with reset_modules('celery.backends.cache'): + from celery.backends import cache + cache._imp = [None] + with self.assertRaises(ImproperlyConfigured): + cache.get_best_memcache() + + def test_cached(self): + with self.mock_pylibmc(): + with reset_modules('celery.backends.cache'): + from celery.backends import cache + cache._imp = [None] + cache.get_best_memcache(behaviors={'foo': 'bar'}) + self.assertTrue(cache._imp[0]) + cache.get_best_memcache() + + def test_backends(self): + from celery.backends.cache import backends + for name, fun in backends.items(): + self.assertTrue(fun()) + + +class test_memcache_key(Case, MockCacheMixin): + + def test_memcache_unicode_key(self): + with self.mock_memcache(): + with reset_modules('celery.backends.cache'): + with mask_modules('pylibmc'): + from celery.backends import cache + cache._imp = [None] + task_id, result = unicode(uuid()), 42 + b = cache.CacheBackend(backend='memcache') + b.store_result(task_id, result, status=states.SUCCESS) + self.assertEqual(b.get_result(task_id), result) + + def test_memcache_bytes_key(self): + with self.mock_memcache(): + with reset_modules('celery.backends.cache'): + with mask_modules('pylibmc'): + from celery.backends import cache + cache._imp = [None] + task_id, result = str_to_bytes(uuid()), 42 + b = cache.CacheBackend(backend='memcache') + b.store_result(task_id, result, status=states.SUCCESS) + self.assertEqual(b.get_result(task_id), result) + + def test_pylibmc_unicode_key(self): + with reset_modules('celery.backends.cache'): + with self.mock_pylibmc(): + from celery.backends import cache + cache._imp = [None] + task_id, result = unicode(uuid()), 42 + b = cache.CacheBackend(backend='memcache') + b.store_result(task_id, result, status=states.SUCCESS) + self.assertEqual(b.get_result(task_id), result) + + def test_pylibmc_bytes_key(self): + with reset_modules('celery.backends.cache'): + with self.mock_pylibmc(): + from celery.backends import cache + cache._imp = [None] + task_id, result = str_to_bytes(uuid()), 42 + b = cache.CacheBackend(backend='memcache') + b.store_result(task_id, result, status=states.SUCCESS) + self.assertEqual(b.get_result(task_id), result) diff --git a/awx/lib/site-packages/celery/tests/backends/test_cassandra.py b/awx/lib/site-packages/celery/tests/backends/test_cassandra.py new file mode 100644 index 0000000000..3965a61d9d --- /dev/null +++ b/awx/lib/site-packages/celery/tests/backends/test_cassandra.py @@ -0,0 +1,193 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import socket + +from mock import Mock +from pickle import loads, dumps + +from celery import Celery +from celery import states +from celery.exceptions import ImproperlyConfigured +from celery.tests.utils import AppCase, mock_module + + +class Object(object): + pass + + +def install_exceptions(mod): + # py3k: cannot catch exceptions not ineheriting from BaseException. + + class NotFoundException(Exception): + pass + + class TException(Exception): + pass + + class InvalidRequestException(Exception): + pass + + class UnavailableException(Exception): + pass + + class TimedOutException(Exception): + pass + + class AllServersUnavailable(Exception): + pass + + mod.NotFoundException = NotFoundException + mod.TException = TException + mod.InvalidRequestException = InvalidRequestException + mod.TimedOutException = TimedOutException + mod.UnavailableException = UnavailableException + mod.AllServersUnavailable = AllServersUnavailable + + +class test_CassandraBackend(AppCase): + + def test_init_no_pycassa(self): + with mock_module('pycassa'): + from celery.backends import cassandra as mod + prev, mod.pycassa = mod.pycassa, None + try: + with self.assertRaises(ImproperlyConfigured): + mod.CassandraBackend(app=self.app) + finally: + mod.pycassa = prev + + def get_app(self): + celery = Celery(set_as_current=False) + celery.conf.CASSANDRA_SERVERS = ['example.com'] + celery.conf.CASSANDRA_KEYSPACE = 'keyspace' + celery.conf.CASSANDRA_COLUMN_FAMILY = 'columns' + return celery + + def test_init_with_and_without_LOCAL_QUROM(self): + with mock_module('pycassa'): + from celery.backends import cassandra as mod + mod.pycassa = Mock() + install_exceptions(mod.pycassa) + cons = mod.pycassa.ConsistencyLevel = Object() + cons.LOCAL_QUORUM = 'foo' + + app = self.get_app() + app.conf.CASSANDRA_READ_CONSISTENCY = 'LOCAL_FOO' + app.conf.CASSANDRA_WRITE_CONSISTENCY = 'LOCAL_FOO' + + mod.CassandraBackend(app=app) + cons.LOCAL_FOO = 'bar' + mod.CassandraBackend(app=app) + + # no servers raises ImproperlyConfigured + with self.assertRaises(ImproperlyConfigured): + app.conf.CASSANDRA_SERVERS = None + mod.CassandraBackend(app=app, keyspace='b', column_family='c') + + def test_reduce(self): + with mock_module('pycassa'): + from celery.backends.cassandra import CassandraBackend + self.assertTrue(loads(dumps(CassandraBackend(app=self.get_app())))) + + def test_get_task_meta_for(self): + with mock_module('pycassa'): + from celery.backends import cassandra as mod + mod.pycassa = Mock() + install_exceptions(mod.pycassa) + mod.Thrift = Mock() + install_exceptions(mod.Thrift) + app = self.get_app() + x = mod.CassandraBackend(app=app) + Get_Column = x._get_column_family = Mock() + get_column = Get_Column.return_value = Mock() + get = get_column.get + META = get.return_value = { + 'task_id': 'task_id', + 'status': states.SUCCESS, + 'result': '1', + 'date_done': 'date', + 'traceback': '', + 'children': None, + } + x.decode = Mock() + x.detailed_mode = False + meta = x._get_task_meta_for('task_id') + self.assertEqual(meta['status'], states.SUCCESS) + + x.detailed_mode = True + row = get.return_value = Mock() + row.values.return_value = [Mock()] + x.decode.return_value = META + meta = x._get_task_meta_for('task_id') + self.assertEqual(meta['status'], states.SUCCESS) + x.decode.return_value = Mock() + + x.detailed_mode = False + get.side_effect = KeyError() + meta = x._get_task_meta_for('task_id') + self.assertEqual(meta['status'], states.PENDING) + + calls = [0] + end = [10] + + def work_eventually(*arg): + try: + if calls[0] > end[0]: + return META + raise socket.error() + finally: + calls[0] += 1 + get.side_effect = work_eventually + x._retry_timeout = 10 + x._retry_wait = 0.01 + meta = x._get_task_meta_for('task') + self.assertEqual(meta['status'], states.SUCCESS) + + x._retry_timeout = 0.1 + calls[0], end[0] = 0, 100 + with self.assertRaises(socket.error): + x._get_task_meta_for('task') + + def test_store_result(self): + with mock_module('pycassa'): + from celery.backends import cassandra as mod + mod.pycassa = Mock() + install_exceptions(mod.pycassa) + mod.Thrift = Mock() + install_exceptions(mod.Thrift) + app = self.get_app() + x = mod.CassandraBackend(app=app) + Get_Column = x._get_column_family = Mock() + cf = Get_Column.return_value = Mock() + x.detailed_mode = False + x._store_result('task_id', 'result', states.SUCCESS) + self.assertTrue(cf.insert.called) + + cf.insert.reset() + x.detailed_mode = True + x._store_result('task_id', 'result', states.SUCCESS) + self.assertTrue(cf.insert.called) + + def test_process_cleanup(self): + with mock_module('pycassa'): + from celery.backends import cassandra as mod + app = self.get_app() + x = mod.CassandraBackend(app=app) + x._column_family = None + x.process_cleanup() + + x._column_family = True + x.process_cleanup() + self.assertIsNone(x._column_family) + + def test_get_column_family(self): + with mock_module('pycassa'): + from celery.backends import cassandra as mod + mod.pycassa = Mock() + install_exceptions(mod.pycassa) + app = self.get_app() + x = mod.CassandraBackend(app=app) + self.assertTrue(x._get_column_family()) + self.assertIsNotNone(x._column_family) + self.assertIs(x._get_column_family(), x._column_family) diff --git a/awx/lib/site-packages/celery/tests/backends/test_database.py b/awx/lib/site-packages/celery/tests/backends/test_database.py new file mode 100644 index 0000000000..1dec60f218 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/backends/test_database.py @@ -0,0 +1,205 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import sys + +from datetime import datetime + +from nose import SkipTest +from pickle import loads, dumps + +from celery import states +from celery.app import app_or_default +from celery.exceptions import ImproperlyConfigured +from celery.result import AsyncResult +from celery.utils import uuid + +from celery.tests.utils import ( + Case, + mask_modules, + skip_if_pypy, + skip_if_jython, +) + +try: + import sqlalchemy # noqa +except ImportError: + DatabaseBackend = Task = TaskSet = None # noqa +else: + from celery.backends.database import DatabaseBackend + from celery.backends.database.models import Task, TaskSet + + +class SomeClass(object): + + def __init__(self, data): + self.data = data + + +class test_DatabaseBackend(Case): + + @skip_if_pypy + @skip_if_jython + def setUp(self): + if DatabaseBackend is None: + raise SkipTest('sqlalchemy not installed') + + def test_missing_SQLAlchemy_raises_ImproperlyConfigured(self): + with mask_modules('sqlalchemy'): + from celery.backends.database import _sqlalchemy_installed + with self.assertRaises(ImproperlyConfigured): + _sqlalchemy_installed() + + def test_pickle_hack_for_sqla_05(self): + import sqlalchemy as sa + from celery.backends.database import session + prev_base = session.ResultModelBase + prev_ver, sa.__version__ = sa.__version__, '0.5.0' + prev_models = sys.modules.pop('celery.backends.database.models', None) + try: + from sqlalchemy.ext.declarative import declarative_base + session.ResultModelBase = declarative_base() + from celery.backends.database.dfd042c7 import PickleType as Type1 + from celery.backends.database.models import PickleType as Type2 + self.assertIs(Type1, Type2) + finally: + sys.modules['celery.backends.database.models'] = prev_models + sa.__version__ = prev_ver + session.ResultModelBase = prev_base + + def test_missing_dburi_raises_ImproperlyConfigured(self): + conf = app_or_default().conf + prev, conf.CELERY_RESULT_DBURI = conf.CELERY_RESULT_DBURI, None + try: + with self.assertRaises(ImproperlyConfigured): + DatabaseBackend() + finally: + conf.CELERY_RESULT_DBURI = prev + + def test_missing_task_id_is_PENDING(self): + tb = DatabaseBackend() + self.assertEqual(tb.get_status('xxx-does-not-exist'), states.PENDING) + + def test_missing_task_meta_is_dict_with_pending(self): + tb = DatabaseBackend() + self.assertDictContainsSubset({ + 'status': states.PENDING, + 'task_id': 'xxx-does-not-exist-at-all', + 'result': None, + 'traceback': None, + }, tb.get_task_meta('xxx-does-not-exist-at-all')) + + def test_mark_as_done(self): + tb = DatabaseBackend() + + tid = uuid() + + self.assertEqual(tb.get_status(tid), states.PENDING) + self.assertIsNone(tb.get_result(tid)) + + tb.mark_as_done(tid, 42) + self.assertEqual(tb.get_status(tid), states.SUCCESS) + self.assertEqual(tb.get_result(tid), 42) + + def test_is_pickled(self): + tb = DatabaseBackend() + + tid2 = uuid() + result = {'foo': 'baz', 'bar': SomeClass(12345)} + tb.mark_as_done(tid2, result) + # is serialized properly. + rindb = tb.get_result(tid2) + self.assertEqual(rindb.get('foo'), 'baz') + self.assertEqual(rindb.get('bar').data, 12345) + + def test_mark_as_started(self): + tb = DatabaseBackend() + tid = uuid() + tb.mark_as_started(tid) + self.assertEqual(tb.get_status(tid), states.STARTED) + + def test_mark_as_revoked(self): + tb = DatabaseBackend() + tid = uuid() + tb.mark_as_revoked(tid) + self.assertEqual(tb.get_status(tid), states.REVOKED) + + def test_mark_as_retry(self): + tb = DatabaseBackend() + tid = uuid() + try: + raise KeyError('foo') + except KeyError, exception: + import traceback + trace = '\n'.join(traceback.format_stack()) + tb.mark_as_retry(tid, exception, traceback=trace) + self.assertEqual(tb.get_status(tid), states.RETRY) + self.assertIsInstance(tb.get_result(tid), KeyError) + self.assertEqual(tb.get_traceback(tid), trace) + + def test_mark_as_failure(self): + tb = DatabaseBackend() + + tid3 = uuid() + try: + raise KeyError('foo') + except KeyError, exception: + import traceback + trace = '\n'.join(traceback.format_stack()) + tb.mark_as_failure(tid3, exception, traceback=trace) + self.assertEqual(tb.get_status(tid3), states.FAILURE) + self.assertIsInstance(tb.get_result(tid3), KeyError) + self.assertEqual(tb.get_traceback(tid3), trace) + + def test_forget(self): + tb = DatabaseBackend(backend='memory://') + tid = uuid() + tb.mark_as_done(tid, {'foo': 'bar'}) + tb.mark_as_done(tid, {'foo': 'bar'}) + x = AsyncResult(tid, backend=tb) + x.forget() + self.assertIsNone(x.result) + + def test_process_cleanup(self): + tb = DatabaseBackend() + tb.process_cleanup() + + def test_reduce(self): + tb = DatabaseBackend() + self.assertTrue(loads(dumps(tb))) + + def test_save__restore__delete_group(self): + tb = DatabaseBackend() + + tid = uuid() + res = {u'something': 'special'} + self.assertEqual(tb.save_group(tid, res), res) + + res2 = tb.restore_group(tid) + self.assertEqual(res2, res) + + tb.delete_group(tid) + self.assertIsNone(tb.restore_group(tid)) + + self.assertIsNone(tb.restore_group('xxx-nonexisting-id')) + + def test_cleanup(self): + tb = DatabaseBackend() + for i in range(10): + tb.mark_as_done(uuid(), 42) + tb.save_group(uuid(), {'foo': 'bar'}) + s = tb.ResultSession() + for t in s.query(Task).all(): + t.date_done = datetime.now() - tb.expires * 2 + for t in s.query(TaskSet).all(): + t.date_done = datetime.now() - tb.expires * 2 + s.commit() + s.close() + + tb.cleanup() + + def test_Task__repr__(self): + self.assertIn('foo', repr(Task('foo'))) + + def test_TaskSet__repr__(self): + self.assertIn('foo', repr(TaskSet('foo', None))) diff --git a/awx/lib/site-packages/celery/tests/backends/test_mongodb.py b/awx/lib/site-packages/celery/tests/backends/test_mongodb.py new file mode 100644 index 0000000000..8980176581 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/backends/test_mongodb.py @@ -0,0 +1,321 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import datetime +import uuid + +from mock import MagicMock, Mock, patch, sentinel +from nose import SkipTest +from pickle import loads, dumps + +from celery import Celery +from celery import states +from celery.backends import mongodb as module +from celery.backends.mongodb import MongoBackend, Bunch, pymongo +from celery.exceptions import ImproperlyConfigured +from celery.tests.utils import AppCase + +COLLECTION = 'taskmeta_celery' +TASK_ID = str(uuid.uuid1()) +MONGODB_HOST = 'localhost' +MONGODB_PORT = 27017 +MONGODB_USER = 'mongo' +MONGODB_PASSWORD = '1234' +MONGODB_DATABASE = 'testing' +MONGODB_COLLECTION = 'collection1' + + +class test_MongoBackend(AppCase): + + def setUp(self): + if pymongo is None: + raise SkipTest('pymongo is not installed.') + + R = self._reset = {} + R['encode'], MongoBackend.encode = MongoBackend.encode, Mock() + R['decode'], MongoBackend.decode = MongoBackend.decode, Mock() + R['Binary'], module.Binary = module.Binary, Mock() + R['datetime'], datetime.datetime = datetime.datetime, Mock() + + self.backend = MongoBackend() + + def tearDown(self): + MongoBackend.encode = self._reset['encode'] + MongoBackend.decode = self._reset['decode'] + module.Binary = self._reset['Binary'] + datetime.datetime = self._reset['datetime'] + + def test_Bunch(self): + x = Bunch(foo='foo', bar=2) + self.assertEqual(x.foo, 'foo') + self.assertEqual(x.bar, 2) + + def test_init_no_mongodb(self): + prev, module.pymongo = module.pymongo, None + try: + with self.assertRaises(ImproperlyConfigured): + MongoBackend() + finally: + module.pymongo = prev + + def test_init_no_settings(self): + celery = Celery(set_as_current=False) + celery.conf.CELERY_MONGODB_BACKEND_SETTINGS = [] + with self.assertRaises(ImproperlyConfigured): + MongoBackend(app=celery) + + def test_init_settings_is_None(self): + celery = Celery(set_as_current=False) + celery.conf.CELERY_MONGODB_BACKEND_SETTINGS = None + MongoBackend(app=celery) + + def test_restore_group_no_entry(self): + x = MongoBackend() + x.collection = Mock() + fo = x.collection.find_one = Mock() + fo.return_value = None + self.assertIsNone(x._restore_group('1f3fab')) + + def test_reduce(self): + x = MongoBackend() + self.assertTrue(loads(dumps(x))) + + def test_get_connection_connection_exists(self): + + with patch('pymongo.connection.Connection') as mock_Connection: + self.backend._connection = sentinel._connection + + connection = self.backend._get_connection() + + self.assertEquals(sentinel._connection, connection) + self.assertFalse(mock_Connection.called) + + def test_get_connection_no_connection_host(self): + + with patch('pymongo.connection.Connection') as mock_Connection: + self.backend._connection = None + self.backend.mongodb_host = MONGODB_HOST + self.backend.mongodb_port = MONGODB_PORT + mock_Connection.return_value = sentinel.connection + + connection = self.backend._get_connection() + mock_Connection.assert_called_once_with( + MONGODB_HOST, MONGODB_PORT, max_pool_size=10) + self.assertEquals(sentinel.connection, connection) + + def test_get_connection_no_connection_mongodb_uri(self): + + with patch('pymongo.connection.Connection') as mock_Connection: + mongodb_uri = 'mongodb://%s:%d' % (MONGODB_HOST, MONGODB_PORT) + self.backend._connection = None + self.backend.mongodb_host = mongodb_uri + + mock_Connection.return_value = sentinel.connection + + connection = self.backend._get_connection() + mock_Connection.assert_called_once_with( + mongodb_uri, max_pool_size=10) + self.assertEquals(sentinel.connection, connection) + + @patch('celery.backends.mongodb.MongoBackend._get_connection') + def test_get_database_no_existing(self, mock_get_connection): + # Should really check for combinations of these two, to be complete. + self.backend.mongodb_user = MONGODB_USER + self.backend.mongodb_password = MONGODB_PASSWORD + + mock_database = Mock() + mock_connection = MagicMock(spec=['__getitem__']) + mock_connection.__getitem__.return_value = mock_database + mock_get_connection.return_value = mock_connection + + database = self.backend.database + + self.assertTrue(database is mock_database) + self.assertTrue(self.backend.__dict__['database'] is mock_database) + mock_database.authenticate.assert_called_once_with( + MONGODB_USER, MONGODB_PASSWORD) + + @patch('celery.backends.mongodb.MongoBackend._get_connection') + def test_get_database_no_existing_no_auth(self, mock_get_connection): + # Should really check for combinations of these two, to be complete. + self.backend.mongodb_user = None + self.backend.mongodb_password = None + + mock_database = Mock() + mock_connection = MagicMock(spec=['__getitem__']) + mock_connection.__getitem__.return_value = mock_database + mock_get_connection.return_value = mock_connection + + database = self.backend.database + + self.assertTrue(database is mock_database) + self.assertFalse(mock_database.authenticate.called) + self.assertTrue(self.backend.__dict__['database'] is mock_database) + + def test_process_cleanup(self): + self.backend._connection = None + self.backend.process_cleanup() + self.assertEquals(self.backend._connection, None) + + self.backend._connection = 'not none' + self.backend.process_cleanup() + self.assertEquals(self.backend._connection, None) + + @patch('celery.backends.mongodb.MongoBackend._get_database') + def test_store_result(self, mock_get_database): + self.backend.mongodb_taskmeta_collection = MONGODB_COLLECTION + + mock_database = MagicMock(spec=['__getitem__', '__setitem__']) + mock_collection = Mock() + + mock_get_database.return_value = mock_database + mock_database.__getitem__.return_value = mock_collection + + ret_val = self.backend._store_result( + sentinel.task_id, sentinel.result, sentinel.status) + + mock_get_database.assert_called_once_with() + mock_database.__getitem__.assert_called_once_with(MONGODB_COLLECTION) + mock_collection.save.assert_called_once() + self.assertEquals(sentinel.result, ret_val) + + @patch('celery.backends.mongodb.MongoBackend._get_database') + def test_get_task_meta_for(self, mock_get_database): + datetime.datetime = self._reset['datetime'] + self.backend.mongodb_taskmeta_collection = MONGODB_COLLECTION + + mock_database = MagicMock(spec=['__getitem__', '__setitem__']) + mock_collection = Mock() + mock_collection.find_one.return_value = MagicMock() + + mock_get_database.return_value = mock_database + mock_database.__getitem__.return_value = mock_collection + + ret_val = self.backend._get_task_meta_for(sentinel.task_id) + + mock_get_database.assert_called_once_with() + mock_database.__getitem__.assert_called_once_with(MONGODB_COLLECTION) + self.assertEquals( + ['status', 'task_id', 'date_done', 'traceback', 'result', + 'children'], + ret_val.keys()) + + @patch('celery.backends.mongodb.MongoBackend._get_database') + def test_get_task_meta_for_no_result(self, mock_get_database): + self.backend.mongodb_taskmeta_collection = MONGODB_COLLECTION + + mock_database = MagicMock(spec=['__getitem__', '__setitem__']) + mock_collection = Mock() + mock_collection.find_one.return_value = None + + mock_get_database.return_value = mock_database + mock_database.__getitem__.return_value = mock_collection + + ret_val = self.backend._get_task_meta_for(sentinel.task_id) + + mock_get_database.assert_called_once_with() + mock_database.__getitem__.assert_called_once_with(MONGODB_COLLECTION) + self.assertEquals({'status': states.PENDING, 'result': None}, ret_val) + + @patch('celery.backends.mongodb.MongoBackend._get_database') + def test_save_group(self, mock_get_database): + self.backend.mongodb_taskmeta_collection = MONGODB_COLLECTION + + mock_database = MagicMock(spec=['__getitem__', '__setitem__']) + mock_collection = Mock() + + mock_get_database.return_value = mock_database + mock_database.__getitem__.return_value = mock_collection + + ret_val = self.backend._save_group( + sentinel.taskset_id, sentinel.result) + + mock_get_database.assert_called_once_with() + mock_database.__getitem__.assert_called_once_with(MONGODB_COLLECTION) + mock_collection.save.assert_called_once() + self.assertEquals(sentinel.result, ret_val) + + @patch('celery.backends.mongodb.MongoBackend._get_database') + def test_restore_group(self, mock_get_database): + self.backend.mongodb_taskmeta_collection = MONGODB_COLLECTION + + mock_database = MagicMock(spec=['__getitem__', '__setitem__']) + mock_collection = Mock() + mock_collection.find_one.return_value = MagicMock() + + mock_get_database.return_value = mock_database + mock_database.__getitem__.return_value = mock_collection + + ret_val = self.backend._restore_group(sentinel.taskset_id) + + mock_get_database.assert_called_once_with() + mock_database.__getitem__.assert_called_once_with(MONGODB_COLLECTION) + mock_collection.find_one.assert_called_once_with( + {'_id': sentinel.taskset_id}) + self.assertEquals(['date_done', 'result', 'task_id'], ret_val.keys()) + + @patch('celery.backends.mongodb.MongoBackend._get_database') + def test_delete_group(self, mock_get_database): + self.backend.mongodb_taskmeta_collection = MONGODB_COLLECTION + + mock_database = MagicMock(spec=['__getitem__', '__setitem__']) + mock_collection = Mock() + + mock_get_database.return_value = mock_database + mock_database.__getitem__.return_value = mock_collection + + self.backend._delete_group(sentinel.taskset_id) + + mock_get_database.assert_called_once_with() + mock_database.__getitem__.assert_called_once_with(MONGODB_COLLECTION) + mock_collection.remove.assert_called_once_with( + {'_id': sentinel.taskset_id}) + + @patch('celery.backends.mongodb.MongoBackend._get_database') + def test_forget(self, mock_get_database): + self.backend.mongodb_taskmeta_collection = MONGODB_COLLECTION + + mock_database = MagicMock(spec=['__getitem__', '__setitem__']) + mock_collection = Mock() + + mock_get_database.return_value = mock_database + mock_database.__getitem__.return_value = mock_collection + + self.backend._forget(sentinel.task_id) + + mock_get_database.assert_called_once_with() + mock_database.__getitem__.assert_called_once_with( + MONGODB_COLLECTION) + mock_collection.remove.assert_called_once_with( + {'_id': sentinel.task_id}, safe=True) + + @patch('celery.backends.mongodb.MongoBackend._get_database') + def test_cleanup(self, mock_get_database): + datetime.datetime = self._reset['datetime'] + self.backend.mongodb_taskmeta_collection = MONGODB_COLLECTION + + mock_database = MagicMock(spec=['__getitem__', '__setitem__']) + mock_collection = Mock() + + mock_get_database.return_value = mock_database + mock_database.__getitem__.return_value = mock_collection + + self.backend.app.now = datetime.datetime.utcnow + self.backend.cleanup() + + mock_get_database.assert_called_once_with() + mock_database.__getitem__.assert_called_once_with( + MONGODB_COLLECTION) + mock_collection.assert_called_once() + + def test_get_database_authfailure(self): + x = MongoBackend() + x._get_connection = Mock() + conn = x._get_connection.return_value = {} + db = conn[x.mongodb_database] = Mock() + db.authenticate.return_value = False + x.mongodb_user = 'jerry' + x.mongodb_password = 'cere4l' + with self.assertRaises(ImproperlyConfigured): + x._get_database() + db.authenticate.assert_called_with('jerry', 'cere4l') diff --git a/awx/lib/site-packages/celery/tests/backends/test_redis.py b/awx/lib/site-packages/celery/tests/backends/test_redis.py new file mode 100644 index 0000000000..4281ff51a8 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/backends/test_redis.py @@ -0,0 +1,199 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from datetime import timedelta + +from mock import Mock, patch +from nose import SkipTest +from pickle import loads, dumps + +from kombu.utils import cached_property, uuid + +from celery import current_app +from celery import states +from celery.datastructures import AttributeDict +from celery.exceptions import ImproperlyConfigured +from celery.result import AsyncResult +from celery.task import subtask +from celery.utils.timeutils import timedelta_seconds + +from celery.tests.utils import Case + + +class Redis(object): + + class Connection(object): + connected = True + + def disconnect(self): + self.connected = False + + def __init__(self, host=None, port=None, db=None, password=None, **kw): + self.host = host + self.port = port + self.db = db + self.password = password + self.connection = self.Connection() + self.keyspace = {} + self.expiry = {} + + def get(self, key): + return self.keyspace.get(key) + + def setex(self, key, value, expires): + self.set(key, value) + self.expire(key, expires) + + def set(self, key, value): + self.keyspace[key] = value + + def expire(self, key, expires): + self.expiry[key] = expires + + def delete(self, key): + self.keyspace.pop(key) + + def publish(self, key, value): + pass + + +class redis(object): + Redis = Redis + + class ConnectionPool(object): + + def __init__(self, **kwargs): + pass + + +class test_RedisBackend(Case): + + def get_backend(self): + from celery.backends import redis + + class RedisBackend(redis.RedisBackend): + redis = redis + + return RedisBackend + + def setUp(self): + self.Backend = self.get_backend() + + class MockBackend(self.Backend): + + @cached_property + def client(self): + return Mock() + + self.MockBackend = MockBackend + + def test_reduce(self): + try: + from celery.backends.redis import RedisBackend + x = RedisBackend() + self.assertTrue(loads(dumps(x))) + except ImportError: + raise SkipTest('redis not installed') + + def test_no_redis(self): + self.MockBackend.redis = None + with self.assertRaises(ImproperlyConfigured): + self.MockBackend() + + def test_url(self): + x = self.MockBackend('redis://foobar//1') + self.assertEqual(x.host, 'foobar') + self.assertEqual(x.db, '1') + + def test_conf_raises_KeyError(self): + conf = AttributeDict({'CELERY_RESULT_SERIALIZER': 'json', + 'CELERY_MAX_CACHED_RESULTS': 1, + 'CELERY_TASK_RESULT_EXPIRES': None}) + prev, current_app.conf = current_app.conf, conf + try: + self.MockBackend() + finally: + current_app.conf = prev + + def test_expires_defaults_to_config(self): + conf = current_app.conf + prev = conf.CELERY_TASK_RESULT_EXPIRES + conf.CELERY_TASK_RESULT_EXPIRES = 10 + try: + b = self.Backend(expires=None) + self.assertEqual(b.expires, 10) + finally: + conf.CELERY_TASK_RESULT_EXPIRES = prev + + def test_expires_is_int(self): + b = self.Backend(expires=48) + self.assertEqual(b.expires, 48) + + def test_expires_is_None(self): + b = self.Backend(expires=None) + self.assertEqual(b.expires, timedelta_seconds( + current_app.conf.CELERY_TASK_RESULT_EXPIRES)) + + def test_expires_is_timedelta(self): + b = self.Backend(expires=timedelta(minutes=1)) + self.assertEqual(b.expires, 60) + + def test_on_chord_apply(self): + self.Backend().on_chord_apply('group_id', {}, + result=map(AsyncResult, [1, 2, 3])) + + def test_mget(self): + b = self.MockBackend() + self.assertTrue(b.mget(['a', 'b', 'c'])) + b.client.mget.assert_called_with(['a', 'b', 'c']) + + def test_set_no_expire(self): + b = self.MockBackend() + b.expires = None + b.set('foo', 'bar') + + @patch('celery.result.GroupResult') + def test_on_chord_part_return(self, setresult): + b = self.MockBackend() + deps = Mock() + deps.__len__ = Mock() + deps.__len__.return_value = 10 + setresult.restore.return_value = deps + b.client.incr.return_value = 1 + task = Mock() + task.name = 'foobarbaz' + try: + current_app.tasks['foobarbaz'] = task + task.request.chord = subtask(task) + task.request.group = 'group_id' + + b.on_chord_part_return(task) + self.assertTrue(b.client.incr.call_count) + + b.client.incr.return_value = len(deps) + b.on_chord_part_return(task) + deps.join_native.assert_called_with(propagate=False) + deps.delete.assert_called_with() + + self.assertTrue(b.client.expire.call_count) + finally: + current_app.tasks.pop('foobarbaz') + + def test_process_cleanup(self): + self.Backend().process_cleanup() + + def test_get_set_forget(self): + b = self.Backend() + tid = uuid() + b.store_result(tid, 42, states.SUCCESS) + self.assertEqual(b.get_status(tid), states.SUCCESS) + self.assertEqual(b.get_result(tid), 42) + b.forget(tid) + self.assertEqual(b.get_status(tid), states.PENDING) + + def test_set_expires(self): + b = self.Backend(expires=512) + tid = uuid() + key = b.get_key_for_task(tid) + b.store_result(tid, 42, states.SUCCESS) + self.assertEqual(b.client.expiry[key], 512) diff --git a/awx/lib/site-packages/celery/tests/bin/__init__.py b/awx/lib/site-packages/celery/tests/bin/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/celery/tests/bin/test_base.py b/awx/lib/site-packages/celery/tests/bin/test_base.py new file mode 100644 index 0000000000..ae3f13917a --- /dev/null +++ b/awx/lib/site-packages/celery/tests/bin/test_base.py @@ -0,0 +1,145 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import os + +from mock import patch + +from celery.bin.base import Command, Option +from celery.tests.utils import AppCase, override_stdouts + + +class Object(object): + pass + + +class MyApp(object): + pass + +APP = MyApp() # <-- Used by test_with_custom_app + + +class MockCommand(Command): + mock_args = ('arg1', 'arg2', 'arg3') + + def parse_options(self, prog_name, arguments): + options = Object() + options.foo = 'bar' + options.prog_name = prog_name + return options, self.mock_args + + def run(self, *args, **kwargs): + return args, kwargs + + +class test_Command(AppCase): + + def test_get_options(self): + cmd = Command() + cmd.option_list = (1, 2, 3) + self.assertTupleEqual(cmd.get_options(), (1, 2, 3)) + + def test_run_interface(self): + with self.assertRaises(NotImplementedError): + Command().run() + + @patch('sys.stdout') + def test_early_version(self, stdout): + cmd = Command() + with self.assertRaises(SystemExit): + cmd.early_version(['--version']) + stdout.write.assert_called_with(cmd.version + '\n') + + def test_execute_from_commandline(self): + cmd = MockCommand() + args1, kwargs1 = cmd.execute_from_commandline() # sys.argv + self.assertTupleEqual(args1, cmd.mock_args) + self.assertDictContainsSubset({'foo': 'bar'}, kwargs1) + self.assertTrue(kwargs1.get('prog_name')) + args2, kwargs2 = cmd.execute_from_commandline(['foo']) # pass list + self.assertTupleEqual(args2, cmd.mock_args) + self.assertDictContainsSubset({'foo': 'bar', 'prog_name': 'foo'}, + kwargs2) + + def test_with_bogus_args(self): + cmd = MockCommand() + cmd.supports_args = False + with override_stdouts() as (_, stderr): + with self.assertRaises(SystemExit): + cmd.execute_from_commandline(argv=['--bogus']) + self.assertTrue(stderr.getvalue()) + self.assertIn('Unrecognized', stderr.getvalue()) + + def test_with_custom_config_module(self): + prev = os.environ.pop('CELERY_CONFIG_MODULE', None) + try: + cmd = MockCommand() + cmd.setup_app_from_commandline(['--config=foo.bar.baz']) + self.assertEqual(os.environ.get('CELERY_CONFIG_MODULE'), + 'foo.bar.baz') + finally: + if prev: + os.environ['CELERY_CONFIG_MODULE'] = prev + else: + os.environ.pop('CELERY_CONFIG_MODULE', None) + + def test_with_custom_broker(self): + prev = os.environ.pop('CELERY_BROKER_URL', None) + try: + cmd = MockCommand() + cmd.setup_app_from_commandline(['--broker=xyzza://']) + self.assertEqual( + os.environ.get('CELERY_BROKER_URL'), 'xyzza://', + ) + finally: + if prev: + os.environ['CELERY_BROKER_URL'] = prev + else: + os.environ.pop('CELERY_BROKER_URL', None) + + def test_with_custom_app(self): + cmd = MockCommand() + app = '.'.join([__name__, 'APP']) + cmd.setup_app_from_commandline(['--app=%s' % (app, ), + '--loglevel=INFO']) + self.assertIs(cmd.app, APP) + + def test_with_cmdline_config(self): + cmd = MockCommand() + try: + cmd.enable_config_from_cmdline = True + cmd.namespace = 'celeryd' + rest = cmd.setup_app_from_commandline(argv=[ + '--loglevel=INFO', '--', + 'broker.url=amqp://broker.example.com', + '.prefetch_multiplier=100']) + self.assertEqual(cmd.app.conf.BROKER_URL, + 'amqp://broker.example.com') + self.assertEqual(cmd.app.conf.CELERYD_PREFETCH_MULTIPLIER, 100) + self.assertListEqual(rest, ['--loglevel=INFO']) + finally: + cmd.app.conf.BROKER_URL = 'memory://' + + def test_find_app(self): + cmd = MockCommand() + with patch('celery.bin.base.symbol_by_name') as sbn: + from types import ModuleType + x = ModuleType('proj') + + def on_sbn(*args, **kwargs): + + def after(*args, **kwargs): + x.celery = 'quick brown fox' + x.__path__ = None + return x + sbn.side_effect = after + return x + sbn.side_effect = on_sbn + x.__path__ = [True] + self.assertEqual(cmd.find_app('proj'), 'quick brown fox') + + def test_parse_preload_options_shortopt(self): + cmd = Command() + cmd.preload_options = (Option('-s', action='store', dest='silent'), ) + acc = cmd.parse_preload_options(['-s', 'yes']) + self.assertEqual(acc.get('silent'), 'yes') diff --git a/awx/lib/site-packages/celery/tests/bin/test_camqadm.py b/awx/lib/site-packages/celery/tests/bin/test_camqadm.py new file mode 100644 index 0000000000..b0b945a046 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/bin/test_camqadm.py @@ -0,0 +1,156 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from mock import Mock, patch + +from celery import Celery +from celery.bin.camqadm import ( + AMQPAdmin, + AMQShell, + dump_message, + AMQPAdminCommand, + camqadm, + main, +) + +from celery.tests.utils import AppCase, WhateverIO + + +class test_AMQShell(AppCase): + + def setup(self): + self.fh = WhateverIO() + self.app = Celery(broker='memory://', set_as_current=False) + self.adm = self.create_adm() + self.shell = AMQShell(connect=self.adm.connect, out=self.fh) + + def create_adm(self, *args, **kwargs): + return AMQPAdmin(app=self.app, out=self.fh, *args, **kwargs) + + def test_queue_declare(self): + self.shell.onecmd('queue.declare foo') + self.assertIn('ok', self.fh.getvalue()) + + def test_missing_command(self): + self.shell.onecmd('foo foo') + self.assertIn('unknown syntax', self.fh.getvalue()) + + def RV(self): + raise Exception(self.fh.getvalue()) + + def test_missing_namespace(self): + self.shell.onecmd('ns.cmd arg') + self.assertIn('unknown syntax', self.fh.getvalue()) + + def test_help(self): + self.shell.onecmd('help') + self.assertIn('Example:', self.fh.getvalue()) + + def test_help_command(self): + self.shell.onecmd('help queue.declare') + self.assertIn('passive:no', self.fh.getvalue()) + + def test_help_unknown_command(self): + self.shell.onecmd('help foo.baz') + self.assertIn('unknown syntax', self.fh.getvalue()) + + def test_exit(self): + with self.assertRaises(SystemExit): + self.shell.onecmd('exit') + self.assertIn("don't leave!", self.fh.getvalue()) + + def test_note_silent(self): + self.shell.silent = True + self.shell.note('foo bar') + self.assertNotIn('foo bar', self.fh.getvalue()) + + def test_reconnect(self): + self.shell.onecmd('queue.declare foo') + self.shell.needs_reconnect = True + self.shell.onecmd('queue.delete foo') + + def test_completenames(self): + self.assertEqual( + self.shell.completenames('queue.dec'), + ['queue.declare'], + ) + self.assertEqual( + self.shell.completenames('declare'), + ['queue.declare', 'exchange.declare'], + ) + + def test_empty_line(self): + self.shell.emptyline = Mock() + self.shell.default = Mock() + self.shell.onecmd('') + self.shell.emptyline.assert_called_with() + self.shell.onecmd('foo') + self.shell.default.assert_called_with('foo') + + def test_respond(self): + self.shell.respond({'foo': 'bar'}) + self.assertIn('foo', self.fh.getvalue()) + + def test_prompt(self): + self.assertTrue(self.shell.prompt) + + def test_no_returns(self): + self.shell.onecmd('queue.declare foo') + self.shell.onecmd('exchange.declare bar direct yes') + self.shell.onecmd('queue.bind foo bar baz') + self.shell.onecmd('basic.ack 1') + + def test_dump_message(self): + m = Mock() + m.body = 'the quick brown fox' + m.properties = {'a': 1} + m.delivery_info = {'exchange': 'bar'} + self.assertTrue(dump_message(m)) + + def test_dump_message_no_message(self): + self.assertIn('No messages in queue', dump_message(None)) + + def test_note(self): + self.adm.silent = True + self.adm.note('FOO') + self.assertNotIn('FOO', self.fh.getvalue()) + + def test_run(self): + a = self.create_adm('queue.declare foo') + a.run() + self.assertIn('ok', self.fh.getvalue()) + + def test_run_loop(self): + a = self.create_adm() + a.Shell = Mock() + shell = a.Shell.return_value = Mock() + shell.cmdloop = Mock() + a.run() + shell.cmdloop.assert_called_with() + + shell.cmdloop.side_effect = KeyboardInterrupt() + a.run() + self.assertIn('bibi', self.fh.getvalue()) + + @patch('celery.bin.camqadm.AMQPAdminCommand') + def test_main(self, Command): + c = Command.return_value = Mock() + main() + c.execute_from_commandline.assert_called_with() + + @patch('celery.bin.camqadm.AMQPAdmin') + def test_camqadm(self, cls): + c = cls.return_value = Mock() + camqadm() + c.run.assert_called_with() + + @patch('celery.bin.camqadm.AMQPAdmin') + def test_AMQPAdminCommand(self, cls): + c = cls.return_value = Mock() + camqadm() + c.run.assert_called_with() + + x = AMQPAdminCommand(app=self.app) + x.run() + self.assertIs(cls.call_args[1]['app'], self.app) + c.run.assert_called_with() diff --git a/awx/lib/site-packages/celery/tests/bin/test_celery.py b/awx/lib/site-packages/celery/tests/bin/test_celery.py new file mode 100644 index 0000000000..9fc462b7da --- /dev/null +++ b/awx/lib/site-packages/celery/tests/bin/test_celery.py @@ -0,0 +1,347 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from anyjson import dumps +from datetime import datetime +from mock import Mock, patch + +from celery import task +from celery.platforms import EX_FAILURE, EX_USAGE, EX_OK +from celery.bin.celery import ( + Command, + Error, + worker, + list_, + call, + purge, + result, + inspect, + status, + migrate, + help, + report, + CeleryCommand, + determine_exit_status, + main, +) + +from celery.tests.utils import AppCase, WhateverIO + + +@task() +def add(x, y): + return x + y + + +class test_Command(AppCase): + + def test_Error_repr(self): + x = Error('something happened') + self.assertIsNotNone(x.status) + self.assertTrue(x.reason) + self.assertTrue(str(x)) + + def setup(self): + self.out = WhateverIO() + self.err = WhateverIO() + self.cmd = Command(self.app, stdout=self.out, stderr=self.err) + + def test_show_help(self): + self.cmd.run_from_argv = Mock() + self.assertEqual(self.cmd.show_help('foo'), EX_USAGE) + self.cmd.run_from_argv.assert_called_with( + self.cmd.prog_name, ['foo', '--help'] + ) + + def test_error(self): + self.cmd.out = Mock() + self.cmd.error('FOO') + self.assertTrue(self.cmd.out.called) + + def test_out(self): + f = Mock() + self.cmd.out('foo', f) + f.write.assert_called_with('foo\n') + self.cmd.out('foo\n', f) + + def test_call(self): + self.cmd.run = Mock() + self.cmd.run.return_value = None + self.assertEqual(self.cmd(), EX_OK) + + self.cmd.run.side_effect = Error('error', EX_FAILURE) + self.assertEqual(self.cmd(), EX_FAILURE) + + def test_run_from_argv(self): + with self.assertRaises(NotImplementedError): + self.cmd.run_from_argv('prog', ['foo', 'bar']) + self.assertEqual(self.cmd.prog_name, 'prog') + + def test_prettify_list(self): + self.assertEqual(self.cmd.prettify([])[1], '- empty -') + self.assertIn('bar', self.cmd.prettify(['foo', 'bar'])[1]) + + def test_prettify_dict(self): + self.assertIn( + 'OK', + str(self.cmd.prettify({'ok': 'the quick brown fox'})[0]), + ) + self.assertIn( + 'ERROR', + str(self.cmd.prettify({'error': 'the quick brown fox'})[0]), + ) + + def test_prettify(self): + self.assertIn('OK', str(self.cmd.prettify('the quick brown'))) + self.assertIn('OK', str(self.cmd.prettify(object()))) + self.assertIn('OK', str(self.cmd.prettify({'foo': 'bar'}))) + + +class test_Delegate(AppCase): + + def test_get_options(self): + self.assertTrue(worker(app=self.app).get_options()) + + def test_run(self): + w = worker() + w.target.run = Mock() + w.run() + w.target.run.assert_called_with() + + +class test_list(AppCase): + + def test_list_bindings_no_support(self): + l = list_(app=self.app, stderr=WhateverIO()) + management = Mock() + management.get_bindings.side_effect = NotImplementedError() + with self.assertRaises(Error): + l.list_bindings(management) + + def test_run(self): + l = list_(app=self.app, stderr=WhateverIO()) + l.run('bindings') + + with self.assertRaises(Error): + l.run(None) + + with self.assertRaises(Error): + l.run('foo') + + +class test_call(AppCase): + + @patch('celery.app.base.Celery.send_task') + def test_run(self, send_task): + a = call(app=self.app, stderr=WhateverIO(), stdout=WhateverIO()) + a.run('tasks.add') + self.assertTrue(send_task.called) + + a.run('tasks.add', + args=dumps([4, 4]), + kwargs=dumps({'x': 2, 'y': 2})) + self.assertEqual(send_task.call_args[1]['args'], [4, 4]) + self.assertEqual(send_task.call_args[1]['kwargs'], {'x': 2, 'y': 2}) + + a.run('tasks.add', expires=10, countdown=10) + self.assertEqual(send_task.call_args[1]['expires'], 10) + self.assertEqual(send_task.call_args[1]['countdown'], 10) + + now = datetime.now() + iso = now.isoformat() + a.run('tasks.add', expires=iso) + self.assertEqual(send_task.call_args[1]['expires'], now) + with self.assertRaises(ValueError): + a.run('tasks.add', expires='foobaribazibar') + + +class test_purge(AppCase): + + @patch('celery.app.control.Control.purge') + def test_run(self, purge_): + out = WhateverIO() + a = purge(app=self.app, stdout=out) + purge_.return_value = 0 + a.run() + self.assertIn('No messages purged', out.getvalue()) + + purge_.return_value = 100 + a.run() + self.assertIn('100 messages', out.getvalue()) + + +class test_result(AppCase): + + @patch('celery.result.AsyncResult.get') + def test_run(self, get): + out = WhateverIO() + r = result(app=self.app, stdout=out) + get.return_value = 'Jerry' + r.run('id') + self.assertIn('Jerry', out.getvalue()) + + get.return_value = 'Elaine' + r.run('id', task=add.name) + self.assertIn('Elaine', out.getvalue()) + + +class test_status(AppCase): + + @patch('celery.bin.celery.inspect') + def test_run(self, inspect_): + out, err = WhateverIO(), WhateverIO() + ins = inspect_.return_value = Mock() + ins.run.return_value = [] + s = status(self.app, stdout=out, stderr=err) + with self.assertRaises(Error): + s.run() + + ins.run.return_value = ['a', 'b', 'c'] + s.run() + self.assertIn('3 nodes online', out.getvalue()) + s.run(quiet=True) + + +class test_migrate(AppCase): + + @patch('celery.contrib.migrate.migrate_tasks') + def test_run(self, migrate_tasks): + out = WhateverIO() + m = migrate(app=self.app, stdout=out, stderr=WhateverIO()) + with self.assertRaises(SystemExit): + m.run() + self.assertFalse(migrate_tasks.called) + + m.run('memory://foo', 'memory://bar') + self.assertTrue(migrate_tasks.called) + + state = Mock() + state.count = 10 + state.strtotal = 30 + m.on_migrate_task(state, {'task': 'tasks.add', 'id': 'ID'}, None) + self.assertIn('10/30', out.getvalue()) + + +class test_report(AppCase): + + def test_run(self): + out = WhateverIO() + r = report(app=self.app, stdout=out) + self.assertEqual(r.run(), EX_OK) + self.assertTrue(out.getvalue()) + + +class test_help(AppCase): + + def test_run(self): + out = WhateverIO() + h = help(app=self.app, stdout=out) + h.parser = Mock() + self.assertEqual(h.run(), EX_USAGE) + self.assertTrue(out.getvalue()) + self.assertTrue(h.usage('help')) + h.parser.print_help.assert_called_with() + + +class test_CeleryCommand(AppCase): + + def test_execute_from_commandline(self): + x = CeleryCommand(app=self.app) + x.handle_argv = Mock() + x.handle_argv.return_value = 1 + with self.assertRaises(SystemExit): + x.execute_from_commandline() + + x.handle_argv.return_value = True + with self.assertRaises(SystemExit): + x.execute_from_commandline() + + x.handle_argv.side_effect = KeyboardInterrupt() + with self.assertRaises(SystemExit): + x.execute_from_commandline() + + def test_determine_exit_status(self): + self.assertEqual(determine_exit_status('true'), EX_OK) + self.assertEqual(determine_exit_status(''), EX_FAILURE) + + def test_remove_options_at_beginning(self): + x = CeleryCommand(app=self.app) + self.assertEqual(x.remove_options_at_beginning(None), []) + self.assertEqual(x.remove_options_at_beginning(['-c 3', '--foo']), []) + self.assertEqual(x.remove_options_at_beginning(['--foo', '-c 3']), []) + self.assertEqual(x.remove_options_at_beginning( + ['foo', '--foo=1']), ['foo', '--foo=1']) + + def test_handle_argv(self): + x = CeleryCommand(app=self.app) + x.execute = Mock() + x.handle_argv('celery', []) + x.execute.assert_called_with('help', ['help']) + + x.handle_argv('celery', ['start', 'foo']) + x.execute.assert_called_with('start', ['start', 'foo']) + + def test_execute(self): + x = CeleryCommand(app=self.app) + Help = x.commands['help'] = Mock() + help = Help.return_value = Mock() + x.execute('fooox', ['a']) + help.run_from_argv.assert_called_with(x.prog_name, ['help']) + help.reset() + x.execute('help', ['help']) + help.run_from_argv.assert_called_with(x.prog_name, ['help']) + + Dummy = x.commands['dummy'] = Mock() + dummy = Dummy.return_value = Mock() + dummy.run_from_argv.side_effect = Error('foo', status='EX_FAILURE') + help.reset() + x.execute('dummy', ['dummy']) + dummy.run_from_argv.assert_called_with(x.prog_name, ['dummy']) + help.run_from_argv.assert_called_with(x.prog_name, ['dummy']) + + +class test_inspect(AppCase): + + def test_usage(self): + self.assertTrue(inspect(app=self.app).usage('foo')) + + @patch('celery.app.control.Control.inspect') + def test_run(self, real): + out = WhateverIO() + i = inspect(app=self.app, stdout=out) + with self.assertRaises(Error): + i.run() + with self.assertRaises(Error): + i.run('help') + with self.assertRaises(Error): + i.run('xyzzybaz') + + i.run('ping') + self.assertTrue(real.called) + i.run('ping', destination='foo,bar') + self.assertEqual(real.call_args[1]['destination'], ['foo', 'bar']) + self.assertEqual(real.call_args[1]['timeout'], 0.2) + callback = real.call_args[1]['callback'] + + callback({'foo': {'ok': 'pong'}}) + self.assertIn('OK', out.getvalue()) + + instance = real.return_value = Mock() + instance.ping.return_value = None + with self.assertRaises(Error): + i.run('ping') + + out.seek(0) + out.truncate() + i.quiet = True + i.say('<-', 'hello') + self.assertFalse(out.getvalue()) + + +class test_main(AppCase): + + @patch('celery.bin.celery.CeleryCommand') + def test_main(self, Command): + command = Command.return_value = Mock() + main() + command.execute_from_commandline.assert_called_with(None) diff --git a/awx/lib/site-packages/celery/tests/bin/test_celerybeat.py b/awx/lib/site-packages/celery/tests/bin/test_celerybeat.py new file mode 100644 index 0000000000..5fe35d3e89 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/bin/test_celerybeat.py @@ -0,0 +1,184 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import logging +import sys + +from collections import defaultdict + +from kombu.tests.utils import redirect_stdouts +from mock import patch + +from celery import beat +from celery import platforms +from celery.app import app_or_default +from celery.bin import celerybeat as celerybeat_bin +from celery.apps import beat as beatapp + +from celery.tests.utils import AppCase + + +class MockedShelveModule(object): + shelves = defaultdict(lambda: {}) + + def open(self, filename, *args, **kwargs): + return self.shelves[filename] +mocked_shelve = MockedShelveModule() + + +class MockService(beat.Service): + started = False + in_sync = False + persistence = mocked_shelve + + def start(self): + self.__class__.started = True + + def sync(self): + self.__class__.in_sync = True + + +class MockBeat(beatapp.Beat): + running = False + + def run(self): + MockBeat.running = True + + +class MockBeat2(beatapp.Beat): + Service = MockService + + def install_sync_handler(self, b): + pass + + +class MockBeat3(beatapp.Beat): + Service = MockService + + def install_sync_handler(self, b): + raise TypeError('xxx') + + +class test_Beat(AppCase): + + def test_loglevel_string(self): + b = beatapp.Beat(loglevel='DEBUG') + self.assertEqual(b.loglevel, logging.DEBUG) + + b2 = beatapp.Beat(loglevel=logging.DEBUG) + self.assertEqual(b2.loglevel, logging.DEBUG) + + def test_init_loader(self): + b = beatapp.Beat() + b.init_loader() + + def test_process_title(self): + b = beatapp.Beat() + b.set_process_title() + + def test_run(self): + b = MockBeat2() + MockService.started = False + b.run() + self.assertTrue(MockService.started) + + def psig(self, fun, *args, **kwargs): + handlers = {} + + class Signals(platforms.Signals): + + def __setitem__(self, sig, handler): + handlers[sig] = handler + + p, platforms.signals = platforms.signals, Signals() + try: + fun(*args, **kwargs) + return handlers + finally: + platforms.signals = p + + def test_install_sync_handler(self): + b = beatapp.Beat() + clock = MockService() + MockService.in_sync = False + handlers = self.psig(b.install_sync_handler, clock) + with self.assertRaises(SystemExit): + handlers['SIGINT']('SIGINT', object()) + self.assertTrue(MockService.in_sync) + MockService.in_sync = False + + def test_setup_logging(self): + try: + # py3k + delattr(sys.stdout, 'logger') + except AttributeError: + pass + b = beatapp.Beat() + b.redirect_stdouts = False + b.app.log.__class__._setup = False + b.setup_logging() + with self.assertRaises(AttributeError): + sys.stdout.logger + + @redirect_stdouts + @patch('celery.apps.beat.logger') + def test_logs_errors(self, logger, stdout, stderr): + b = MockBeat3(socket_timeout=None) + b.start_scheduler() + self.assertTrue(logger.critical.called) + + @redirect_stdouts + @patch('celery.platforms.create_pidlock') + def test_use_pidfile(self, create_pidlock, stdout, stderr): + b = MockBeat2(pidfile='pidfilelockfilepid', socket_timeout=None) + b.start_scheduler() + self.assertTrue(create_pidlock.called) + + +class MockDaemonContext(object): + opened = False + closed = False + + def __init__(self, *args, **kwargs): + pass + + def open(self): + self.__class__.opened = True + return self + __enter__ = open + + def close(self, *args): + self.__class__.closed = True + __exit__ = close + + +class test_div(AppCase): + + def setup(self): + self.prev, beatapp.Beat = beatapp.Beat, MockBeat + self.ctx, celerybeat_bin.detached = ( + celerybeat_bin.detached, MockDaemonContext) + + def teardown(self): + beatapp.Beat = self.prev + + def test_main(self): + sys.argv = [sys.argv[0], '-s', 'foo'] + try: + celerybeat_bin.main() + self.assertTrue(MockBeat.running) + finally: + MockBeat.running = False + + def test_detach(self): + cmd = celerybeat_bin.BeatCommand() + cmd.app = app_or_default() + cmd.run(detach=True) + self.assertTrue(MockDaemonContext.opened) + self.assertTrue(MockDaemonContext.closed) + + def test_parse_options(self): + cmd = celerybeat_bin.BeatCommand() + cmd.app = app_or_default() + options, args = cmd.parse_options('celerybeat', ['-s', 'foo']) + self.assertEqual(options.schedule, 'foo') diff --git a/awx/lib/site-packages/celery/tests/bin/test_celeryd.py b/awx/lib/site-packages/celery/tests/bin/test_celeryd.py new file mode 100644 index 0000000000..9ed3f242ab --- /dev/null +++ b/awx/lib/site-packages/celery/tests/bin/test_celeryd.py @@ -0,0 +1,688 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import logging +import os +import sys + +from functools import wraps + +from mock import Mock, patch +from nose import SkipTest + +from billiard import current_process +from kombu import Exchange, Queue + +from celery import Celery +from celery import platforms +from celery import signals +from celery import current_app +from celery.apps import worker as cd +from celery.bin.celeryd import WorkerCommand, main as celeryd_main +from celery.exceptions import ImproperlyConfigured, SystemTerminate +from celery.task import trace +from celery.utils.log import ensure_process_aware_logger +from celery.worker import state + +from celery.tests.utils import ( + AppCase, + WhateverIO, + skip_if_pypy, + skip_if_jython, +) + +ensure_process_aware_logger() + + +class WorkerAppCase(AppCase): + + def tearDown(self): + super(WorkerAppCase, self).tearDown() + trace.reset_worker_optimizations() + + +def disable_stdouts(fun): + + @wraps(fun) + def disable(*args, **kwargs): + prev_out, prev_err = sys.stdout, sys.stderr + prev_rout, prev_rerr = sys.__stdout__, sys.__stderr__ + sys.stdout = sys.__stdout__ = WhateverIO() + sys.stderr = sys.__stderr__ = WhateverIO() + try: + return fun(*args, **kwargs) + finally: + sys.stdout = prev_out + sys.stderr = prev_err + sys.__stdout__ = prev_rout + sys.__stderr__ = prev_rerr + + return disable + + +class _WorkController(object): + + def __init__(self, *args, **kwargs): + pass + + def start(self): + pass + + +class Worker(cd.Worker): + WorkController = _WorkController + + def __init__(self, *args, **kwargs): + super(Worker, self).__init__(*args, **kwargs) + self.redirect_stdouts = False + + +class test_Worker(WorkerAppCase): + + Worker = Worker + + def teardown(self): + self.app.conf.CELERY_INCLUDE = () + + @disable_stdouts + def test_queues_string(self): + celery = Celery(set_as_current=False) + worker = celery.Worker(queues='foo,bar,baz') + worker.init_queues() + self.assertEqual(worker.use_queues, ['foo', 'bar', 'baz']) + self.assertTrue('foo' in celery.amqp.queues) + + @disable_stdouts + def test_cpu_count(self): + celery = Celery(set_as_current=False) + with patch('celery.apps.worker.cpu_count') as cpu_count: + cpu_count.side_effect = NotImplementedError() + worker = celery.Worker(concurrency=None) + self.assertEqual(worker.concurrency, 2) + worker = celery.Worker(concurrency=5) + self.assertEqual(worker.concurrency, 5) + + @disable_stdouts + def test_windows_B_option(self): + celery = Celery(set_as_current=False) + celery.IS_WINDOWS = True + with self.assertRaises(SystemExit): + WorkerCommand(app=celery).run(beat=True) + + def test_setup_concurrency_very_early(self): + x = WorkerCommand() + x.run = Mock() + with self.assertRaises(ImportError): + x.execute_from_commandline(['celeryd', '-P', 'xyzybox']) + + @disable_stdouts + def test_invalid_loglevel_gives_error(self): + x = WorkerCommand(app=Celery(set_as_current=False)) + with self.assertRaises(SystemExit): + x.run(loglevel='GRIM_REAPER') + + def test_no_loglevel(self): + app = Celery(set_as_current=False) + app.Worker = Mock() + WorkerCommand(app=app).run(loglevel=None) + + def test_tasklist(self): + celery = Celery(set_as_current=False) + worker = celery.Worker() + self.assertTrue(worker.app.tasks) + self.assertTrue(worker.app.finalized) + self.assertTrue(worker.tasklist(include_builtins=True)) + worker.tasklist(include_builtins=False) + + def test_extra_info(self): + celery = Celery(set_as_current=False) + worker = celery.Worker() + worker.loglevel = logging.WARNING + self.assertFalse(worker.extra_info()) + worker.loglevel = logging.INFO + self.assertTrue(worker.extra_info()) + + @disable_stdouts + def test_loglevel_string(self): + worker = self.Worker(loglevel='INFO') + self.assertEqual(worker.loglevel, logging.INFO) + + def test_run_worker(self): + handlers = {} + + class Signals(platforms.Signals): + + def __setitem__(self, sig, handler): + handlers[sig] = handler + + p = platforms.signals + platforms.signals = Signals() + try: + w = self.Worker() + w._isatty = False + w.run_worker() + for sig in 'SIGINT', 'SIGHUP', 'SIGTERM': + self.assertIn(sig, handlers) + + handlers.clear() + w = self.Worker() + w._isatty = True + w.run_worker() + for sig in 'SIGINT', 'SIGTERM': + self.assertIn(sig, handlers) + self.assertNotIn('SIGHUP', handlers) + finally: + platforms.signals = p + + @disable_stdouts + def test_startup_info(self): + worker = self.Worker() + worker.run() + self.assertTrue(worker.startup_info()) + worker.loglevel = logging.DEBUG + self.assertTrue(worker.startup_info()) + worker.loglevel = logging.INFO + self.assertTrue(worker.startup_info()) + worker.autoscale = 13, 10 + self.assertTrue(worker.startup_info()) + + worker = self.Worker(queues='foo,bar,baz,xuzzy,do,re,mi') + app = worker.app + prev, app.loader = app.loader, Mock() + try: + app.loader.__module__ = 'acme.baked_beans' + self.assertTrue(worker.startup_info()) + finally: + app.loader = prev + + prev, app.loader = app.loader, Mock() + try: + app.loader.__module__ = 'celery.loaders.foo' + self.assertTrue(worker.startup_info()) + finally: + app.loader = prev + + from celery.loaders.app import AppLoader + prev, app.loader = app.loader, AppLoader() + try: + self.assertTrue(worker.startup_info()) + finally: + app.loader = prev + + worker.send_events = True + self.assertTrue(worker.startup_info()) + + # test when there are too few output lines + # to draft the ascii art onto + prev, cd.ARTLINES = cd.ARTLINES, ['the quick brown fox'] + self.assertTrue(worker.startup_info()) + + @disable_stdouts + def test_run(self): + self.Worker().run() + self.Worker(purge=True).run() + worker = self.Worker() + worker.run() + + prev, cd.IGNORE_ERRORS = cd.IGNORE_ERRORS, (KeyError, ) + try: + worker.run_worker = Mock() + worker.run_worker.side_effect = KeyError() + worker.run() + finally: + cd.IGNORE_ERRORS = prev + + @disable_stdouts + def test_purge_messages(self): + self.Worker().purge_messages() + + @disable_stdouts + def test_init_queues(self): + app = current_app + c = app.conf + p, app.amqp.queues = app.amqp.queues, app.amqp.Queues({ + 'celery': {'exchange': 'celery', + 'routing_key': 'celery'}, + 'video': {'exchange': 'video', + 'routing_key': 'video'}}) + try: + worker = self.Worker(queues=['video']) + worker.init_queues() + self.assertIn('video', app.amqp.queues) + self.assertIn('video', app.amqp.queues.consume_from) + self.assertIn('celery', app.amqp.queues) + self.assertNotIn('celery', app.amqp.queues.consume_from) + + c.CELERY_CREATE_MISSING_QUEUES = False + del(app.amqp.queues) + with self.assertRaises(ImproperlyConfigured): + self.Worker(queues=['image']).init_queues() + del(app.amqp.queues) + c.CELERY_CREATE_MISSING_QUEUES = True + worker = self.Worker(queues=['image']) + worker.init_queues() + self.assertIn('image', app.amqp.queues.consume_from) + self.assertEqual(Queue('image', Exchange('image'), + routing_key='image'), app.amqp.queues['image']) + finally: + app.amqp.queues = p + + @disable_stdouts + def test_autoscale_argument(self): + worker1 = self.Worker(autoscale='10,3') + self.assertListEqual(worker1.autoscale, [10, 3]) + worker2 = self.Worker(autoscale='10') + self.assertListEqual(worker2.autoscale, [10, 0]) + + def test_include_argument(self): + worker1 = self.Worker(include='some.module') + self.assertListEqual(worker1.include, ['some.module']) + worker2 = self.Worker(include='some.module,another.package') + self.assertListEqual( + worker2.include, + ['some.module', 'another.package'], + ) + self.Worker(include=['os', 'sys']) + + @disable_stdouts + def test_unknown_loglevel(self): + with self.assertRaises(SystemExit): + WorkerCommand(app=self.app).run(loglevel='ALIEN') + worker1 = self.Worker(loglevel=0xFFFF) + self.assertEqual(worker1.loglevel, 0xFFFF) + + def test_warns_if_running_as_privileged_user(self): + app = current_app + if app.IS_WINDOWS: + raise SkipTest('Not applicable on Windows') + + def getuid(): + return 0 + + prev, os.getuid = os.getuid, getuid + try: + with self.assertWarnsRegex( + RuntimeWarning, + r'superuser privileges is discouraged'): + worker = self.Worker() + worker.run() + finally: + os.getuid = prev + + @disable_stdouts + def test_redirect_stdouts(self): + worker = self.Worker() + worker.redirect_stdouts = False + worker.setup_logging() + with self.assertRaises(AttributeError): + sys.stdout.logger + + def test_redirect_stdouts_already_handled(self): + logging_setup = [False] + + @signals.setup_logging.connect + def on_logging_setup(**kwargs): + logging_setup[0] = True + + try: + worker = self.Worker() + worker.app.log.__class__._setup = False + worker.setup_logging() + self.assertTrue(logging_setup[0]) + with self.assertRaises(AttributeError): + sys.stdout.logger + finally: + signals.setup_logging.disconnect(on_logging_setup) + + @disable_stdouts + def test_platform_tweaks_osx(self): + + class OSXWorker(Worker): + proxy_workaround_installed = False + + def osx_proxy_detection_workaround(self): + self.proxy_workaround_installed = True + + worker = OSXWorker(redirect_stdouts=False) + + def install_HUP_nosupport(controller): + controller.hup_not_supported_installed = True + + class Controller(object): + pass + + prev = cd.install_HUP_not_supported_handler + cd.install_HUP_not_supported_handler = install_HUP_nosupport + try: + worker.app.IS_OSX = True + controller = Controller() + worker.install_platform_tweaks(controller) + self.assertTrue(controller.hup_not_supported_installed) + self.assertTrue(worker.proxy_workaround_installed) + finally: + cd.install_HUP_not_supported_handler = prev + + @disable_stdouts + def test_general_platform_tweaks(self): + + restart_worker_handler_installed = [False] + + def install_worker_restart_handler(worker): + restart_worker_handler_installed[0] = True + + class Controller(object): + pass + + prev = cd.install_worker_restart_handler + cd.install_worker_restart_handler = install_worker_restart_handler + try: + worker = self.Worker() + worker.app.IS_OSX = False + worker.install_platform_tweaks(Controller()) + self.assertTrue(restart_worker_handler_installed[0]) + finally: + cd.install_worker_restart_handler = prev + + @disable_stdouts + def test_on_consumer_ready(self): + worker_ready_sent = [False] + + @signals.worker_ready.connect + def on_worker_ready(**kwargs): + worker_ready_sent[0] = True + + self.Worker().on_consumer_ready(object()) + self.assertTrue(worker_ready_sent[0]) + + +class test_funs(WorkerAppCase): + + def test_active_thread_count(self): + self.assertTrue(cd.active_thread_count()) + + @disable_stdouts + def test_set_process_status(self): + try: + __import__('setproctitle') + except ImportError: + raise SkipTest('setproctitle not installed') + worker = Worker(hostname='xyzza') + prev1, sys.argv = sys.argv, ['Arg0'] + try: + st = worker.set_process_status('Running') + self.assertIn('celeryd', st) + self.assertIn('xyzza', st) + self.assertIn('Running', st) + prev2, sys.argv = sys.argv, ['Arg0', 'Arg1'] + try: + st = worker.set_process_status('Running') + self.assertIn('celeryd', st) + self.assertIn('xyzza', st) + self.assertIn('Running', st) + self.assertIn('Arg1', st) + finally: + sys.argv = prev2 + finally: + sys.argv = prev1 + + @disable_stdouts + def test_parse_options(self): + cmd = WorkerCommand() + cmd.app = current_app + opts, args = cmd.parse_options('celeryd', ['--concurrency=512']) + self.assertEqual(opts.concurrency, 512) + + @disable_stdouts + def test_main(self): + p, cd.Worker = cd.Worker, Worker + s, sys.argv = sys.argv, ['celeryd', '--discard'] + try: + celeryd_main() + finally: + cd.Worker = p + sys.argv = s + + +class test_signal_handlers(WorkerAppCase): + + class _Worker(object): + stopped = False + terminated = False + + def stop(self, in_sighandler=False): + self.stopped = True + + def terminate(self, in_sighandler=False): + self.terminated = True + + def psig(self, fun, *args, **kwargs): + handlers = {} + + class Signals(platforms.Signals): + def __setitem__(self, sig, handler): + handlers[sig] = handler + + p, platforms.signals = platforms.signals, Signals() + try: + fun(*args, **kwargs) + return handlers + finally: + platforms.signals = p + + @disable_stdouts + def test_worker_int_handler(self): + worker = self._Worker() + handlers = self.psig(cd.install_worker_int_handler, worker) + next_handlers = {} + state.should_stop = False + state.should_terminate = False + + class Signals(platforms.Signals): + + def __setitem__(self, sig, handler): + next_handlers[sig] = handler + + with patch('celery.apps.worker.active_thread_count') as c: + c.return_value = 3 + p, platforms.signals = platforms.signals, Signals() + try: + handlers['SIGINT']('SIGINT', object()) + self.assertTrue(state.should_stop) + finally: + platforms.signals = p + state.should_stop = False + + try: + next_handlers['SIGINT']('SIGINT', object()) + self.assertTrue(state.should_terminate) + finally: + state.should_terminate = False + + with patch('celery.apps.worker.active_thread_count') as c: + c.return_value = 1 + p, platforms.signals = platforms.signals, Signals() + try: + with self.assertRaises(SystemExit): + handlers['SIGINT']('SIGINT', object()) + finally: + platforms.signals = p + + with self.assertRaises(SystemTerminate): + next_handlers['SIGINT']('SIGINT', object()) + + @disable_stdouts + def test_worker_int_handler_only_stop_MainProcess(self): + try: + import _multiprocessing # noqa + except ImportError: + raise SkipTest('only relevant for multiprocessing') + process = current_process() + name, process.name = process.name, 'OtherProcess' + with patch('celery.apps.worker.active_thread_count') as c: + c.return_value = 3 + try: + worker = self._Worker() + handlers = self.psig(cd.install_worker_int_handler, worker) + handlers['SIGINT']('SIGINT', object()) + self.assertTrue(state.should_stop) + finally: + process.name = name + state.should_stop = False + + with patch('celery.apps.worker.active_thread_count') as c: + c.return_value = 1 + try: + worker = self._Worker() + handlers = self.psig(cd.install_worker_int_handler, worker) + with self.assertRaises(SystemExit): + handlers['SIGINT']('SIGINT', object()) + finally: + process.name = name + state.should_stop = False + + @disable_stdouts + def test_install_HUP_not_supported_handler(self): + worker = self._Worker() + handlers = self.psig(cd.install_HUP_not_supported_handler, worker) + handlers['SIGHUP']('SIGHUP', object()) + + @disable_stdouts + def test_worker_term_hard_handler_only_stop_MainProcess(self): + try: + import _multiprocessing # noqa + except ImportError: + raise SkipTest('only relevant for multiprocessing') + process = current_process() + name, process.name = process.name, 'OtherProcess' + try: + with patch('celery.apps.worker.active_thread_count') as c: + c.return_value = 3 + worker = self._Worker() + handlers = self.psig( + cd.install_worker_term_hard_handler, worker) + try: + handlers['SIGQUIT']('SIGQUIT', object()) + self.assertTrue(state.should_terminate) + finally: + state.should_terminate = False + with patch('celery.apps.worker.active_thread_count') as c: + c.return_value = 1 + worker = self._Worker() + handlers = self.psig( + cd.install_worker_term_hard_handler, worker) + with self.assertRaises(SystemTerminate): + handlers['SIGQUIT']('SIGQUIT', object()) + finally: + process.name = name + + @disable_stdouts + def test_worker_term_handler_when_threads(self): + with patch('celery.apps.worker.active_thread_count') as c: + c.return_value = 3 + worker = self._Worker() + handlers = self.psig(cd.install_worker_term_handler, worker) + try: + handlers['SIGTERM']('SIGTERM', object()) + self.assertTrue(state.should_stop) + finally: + state.should_stop = False + + @disable_stdouts + def test_worker_term_handler_when_single_thread(self): + with patch('celery.apps.worker.active_thread_count') as c: + c.return_value = 1 + worker = self._Worker() + handlers = self.psig(cd.install_worker_term_handler, worker) + try: + with self.assertRaises(SystemExit): + handlers['SIGTERM']('SIGTERM', object()) + finally: + state.should_stop = False + + @patch('sys.__stderr__') + @skip_if_pypy + @skip_if_jython + def test_worker_cry_handler(self, stderr): + if sys.version_info > (2, 5): + handlers = self.psig(cd.install_cry_handler) + self.assertIsNone(handlers['SIGUSR1']('SIGUSR1', object())) + self.assertTrue(stderr.write.called) + else: + raise SkipTest('Needs Python 2.5 or later') + + @disable_stdouts + def test_worker_term_handler_only_stop_MainProcess(self): + try: + import _multiprocessing # noqa + except ImportError: + raise SkipTest('only relevant for multiprocessing') + process = current_process() + name, process.name = process.name, 'OtherProcess' + try: + with patch('celery.apps.worker.active_thread_count') as c: + c.return_value = 3 + worker = self._Worker() + handlers = self.psig(cd.install_worker_term_handler, worker) + handlers['SIGTERM']('SIGTERM', object()) + self.assertTrue(state.should_stop) + with patch('celery.apps.worker.active_thread_count') as c: + c.return_value = 1 + worker = self._Worker() + handlers = self.psig(cd.install_worker_term_handler, worker) + with self.assertRaises(SystemExit): + handlers['SIGTERM']('SIGTERM', object()) + finally: + process.name = name + state.should_stop = False + + @disable_stdouts + @patch('atexit.register') + @patch('os.fork') + @patch('os.close') + def test_worker_restart_handler(self, _close, fork, register): + fork.return_value = 0 + if getattr(os, 'execv', None) is None: + raise SkipTest('platform does not have excv') + argv = [] + + def _execv(*args): + argv.extend(args) + + execv, os.execv = os.execv, _execv + try: + worker = self._Worker() + handlers = self.psig(cd.install_worker_restart_handler, worker) + handlers['SIGHUP']('SIGHUP', object()) + self.assertTrue(state.should_stop) + self.assertTrue(register.called) + callback = register.call_args[0][0] + callback() + self.assertTrue(argv) + argv[:] = [] + fork.return_value = 1 + callback() + self.assertFalse(argv) + finally: + os.execv = execv + state.should_stop = False + + @disable_stdouts + def test_worker_term_hard_handler_when_threaded(self): + with patch('celery.apps.worker.active_thread_count') as c: + c.return_value = 3 + worker = self._Worker() + handlers = self.psig(cd.install_worker_term_hard_handler, worker) + try: + handlers['SIGQUIT']('SIGQUIT', object()) + self.assertTrue(state.should_terminate) + finally: + state.should_terminate = False + + @disable_stdouts + def test_worker_term_hard_handler_when_single_threaded(self): + with patch('celery.apps.worker.active_thread_count') as c: + c.return_value = 1 + worker = self._Worker() + handlers = self.psig(cd.install_worker_term_hard_handler, worker) + with self.assertRaises(SystemTerminate): + handlers['SIGQUIT']('SIGQUIT', object()) diff --git a/awx/lib/site-packages/celery/tests/bin/test_celeryd_detach.py b/awx/lib/site-packages/celery/tests/bin/test_celeryd_detach.py new file mode 100644 index 0000000000..d0d46c79ec --- /dev/null +++ b/awx/lib/site-packages/celery/tests/bin/test_celeryd_detach.py @@ -0,0 +1,100 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from mock import Mock, patch + +from celery import current_app +from celery.bin.celeryd_detach import ( + detach, + detached_celeryd, + main, +) + +from celery.tests.utils import Case, override_stdouts + + +if not current_app.IS_WINDOWS: + class test_detached(Case): + + @patch('celery.bin.celeryd_detach.detached') + @patch('os.execv') + @patch('celery.bin.celeryd_detach.logger') + @patch('celery.app.log.Logging.setup_logging_subsystem') + def test_execs(self, setup_logs, logger, execv, detached): + context = detached.return_value = Mock() + context.__enter__ = Mock() + context.__exit__ = Mock() + + detach('/bin/boo', ['a', 'b', 'c'], logfile='/var/log', + pidfile='/var/pid') + detached.assert_called_with('/var/log', '/var/pid', None, None, 0, + None, False) + execv.assert_called_with('/bin/boo', ['/bin/boo', 'a', 'b', 'c']) + + execv.side_effect = Exception('foo') + r = detach('/bin/boo', ['a', 'b', 'c'], + logfile='/var/log', pidfile='/var/pid') + context.__enter__.assert_called_with() + self.assertTrue(logger.critical.called) + setup_logs.assert_called_with('ERROR', '/var/log') + self.assertEqual(r, 1) + + +class test_PartialOptionParser(Case): + + def test_parser(self): + x = detached_celeryd() + p = x.Parser('celeryd_detach') + options, values = p.parse_args(['--logfile=foo', '--fake', '--enable', + 'a', 'b', '-c1', '-d', '2']) + self.assertEqual(options.logfile, 'foo') + self.assertEqual(values, ['a', 'b']) + self.assertEqual(p.leftovers, ['--enable', '-c1', '-d', '2']) + + with override_stdouts(): + with self.assertRaises(SystemExit): + p.parse_args(['--logfile']) + p.get_option('--logfile').nargs = 2 + with self.assertRaises(SystemExit): + p.parse_args(['--logfile=a']) + with self.assertRaises(SystemExit): + p.parse_args(['--fake=abc']) + + assert p.get_option('--logfile').nargs == 2 + p.parse_args(['--logfile=a', 'b']) + p.get_option('--logfile').nargs = 1 + + +class test_Command(Case): + argv = ['--autoscale=10,2', '-c', '1', + '--logfile=/var/log', '-lDEBUG', + '--', '.disable_rate_limits=1'] + + def test_parse_options(self): + x = detached_celeryd() + o, v, l = x.parse_options('cd', self.argv) + self.assertEqual(o.logfile, '/var/log') + self.assertEqual(l, ['--autoscale=10,2', '-c', '1', + '-lDEBUG', '--logfile=/var/log', + '--pidfile=celeryd.pid']) + x.parse_options('cd', []) # no args + + @patch('sys.exit') + @patch('celery.bin.celeryd_detach.detach') + def test_execute_from_commandline(self, detach, exit): + x = detached_celeryd() + x.execute_from_commandline(self.argv) + self.assertTrue(exit.called) + detach.assert_called_with( + path=x.execv_path, uid=None, gid=None, + umask=0, fake=False, logfile='/var/log', pidfile='celeryd.pid', + argv=['-m', 'celery.bin.celeryd', '-c', '1', '-lDEBUG', + '--logfile=/var/log', '--pidfile=celeryd.pid', + '--', '.disable_rate_limits=1'], + ) + + @patch('celery.bin.celeryd_detach.detached_celeryd') + def test_main(self, command): + c = command.return_value = Mock() + main() + c.execute_from_commandline.assert_called_with() diff --git a/awx/lib/site-packages/celery/tests/bin/test_celeryd_multi.py b/awx/lib/site-packages/celery/tests/bin/test_celeryd_multi.py new file mode 100644 index 0000000000..bb03a6b36b --- /dev/null +++ b/awx/lib/site-packages/celery/tests/bin/test_celeryd_multi.py @@ -0,0 +1,451 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import errno +import signal +import sys + +from mock import Mock, patch + +from celery.bin.celeryd_multi import ( + main, + MultiTool, + findsig, + abbreviations, + parse_ns_range, + format_opt, + quote, + NamespacedOptionParser, + multi_args, + __doc__ as doc, +) + +from celery.tests.utils import Case, WhateverIO + + +class test_functions(Case): + + def test_findsig(self): + self.assertEqual(findsig(['a', 'b', 'c', '-1']), 1) + self.assertEqual(findsig(['--foo=1', '-9']), 9) + self.assertEqual(findsig(['-INT']), signal.SIGINT) + self.assertEqual(findsig([]), signal.SIGTERM) + self.assertEqual(findsig(['-s']), signal.SIGTERM) + self.assertEqual(findsig(['-log']), signal.SIGTERM) + + def test_abbreviations(self): + expander = abbreviations({'%s': 'START', + '%x': 'STOP'}) + self.assertEqual(expander('foo%s'), 'fooSTART') + self.assertEqual(expander('foo%x'), 'fooSTOP') + self.assertEqual(expander('foo%y'), 'foo%y') + self.assertIsNone(expander(None)) + + def test_parse_ns_range(self): + self.assertEqual(parse_ns_range('1-3', True), ['1', '2', '3']) + self.assertEqual(parse_ns_range('1-3', False), ['1-3']) + self.assertEqual(parse_ns_range( + '1-3,10,11,20', True), + ['1', '2', '3', '10', '11', '20'], + ) + + def test_format_opt(self): + self.assertEqual(format_opt('--foo', None), '--foo') + self.assertEqual(format_opt('-c', 1), '-c 1') + self.assertEqual(format_opt('--log', 'foo'), '--log=foo') + + def test_quote(self): + self.assertEqual(quote("the 'quick"), "'the '\\''quick'") + + +class test_NamespacedOptionParser(Case): + + def test_parse(self): + x = NamespacedOptionParser(['-c:1,3', '4']) + self.assertEqual(x.namespaces.get('1,3'), {'-c': '4'}) + x = NamespacedOptionParser(['-c:jerry,elaine', '5', + '--loglevel:kramer=DEBUG', + '--flag', + '--logfile=foo', '-Q', 'bar', 'a', 'b', + '--', '.disable_rate_limits=1']) + self.assertEqual(x.options, {'--logfile': 'foo', + '-Q': 'bar', + '--flag': None}) + self.assertEqual(x.values, ['a', 'b']) + self.assertEqual(x.namespaces.get('jerry,elaine'), {'-c': '5'}) + self.assertEqual(x.namespaces.get('kramer'), {'--loglevel': 'DEBUG'}) + self.assertEqual(x.passthrough, '-- .disable_rate_limits=1') + + +class test_multi_args(Case): + + @patch('socket.gethostname') + def test_parse(self, gethostname): + p = NamespacedOptionParser([ + '-c:jerry,elaine', '5', + '--loglevel:kramer=DEBUG', + '--flag', + '--logfile=foo', '-Q', 'bar', 'jerry', + 'elaine', 'kramer', + '--', '.disable_rate_limits=1', + ]) + it = multi_args(p, cmd='COMMAND', append='*AP*', + prefix='*P*', suffix='*S*') + names = list(it) + self.assertEqual( + names[0][0:2], + ('*P*jerry*S*', [ + 'COMMAND', '-n *P*jerry*S*', '-Q bar', + '-c 5', '--flag', '--logfile=foo', + '-- .disable_rate_limits=1', '*AP*', + ]), + ) + self.assertEqual( + names[1][0:2], + ('*P*elaine*S*', [ + 'COMMAND', '-n *P*elaine*S*', '-Q bar', + '-c 5', '--flag', '--logfile=foo', + '-- .disable_rate_limits=1', '*AP*', + ]), + ) + self.assertEqual( + names[2][0:2], + ('*P*kramer*S*', [ + 'COMMAND', '--loglevel=DEBUG', '-n *P*kramer*S*', + '-Q bar', '--flag', '--logfile=foo', + '-- .disable_rate_limits=1', '*AP*', + ]), + ) + expand = names[0][2] + self.assertEqual(expand('%h'), '*P*jerry*S*') + self.assertEqual(expand('%n'), 'jerry') + names2 = list(multi_args(p, cmd='COMMAND', append='', + prefix='*P*', suffix='*S*')) + self.assertEqual(names2[0][1][-1], '-- .disable_rate_limits=1') + + gethostname.return_value = 'example.com' + p2 = NamespacedOptionParser(['10', '-c:1', '5']) + names3 = list(multi_args(p2, cmd='COMMAND')) + self.assertEqual(len(names3), 10) + self.assertEqual( + names3[0][0:2], + ('celery1.example.com', + ['COMMAND', '-n celery1.example.com', '-c 5', '']), + ) + for i, worker in enumerate(names3[1:]): + self.assertEqual( + worker[0:2], + ('celery%s.example.com' % (i + 2), + ['COMMAND', '-n celery%s.example.com' % (i + 2), '']), + ) + + names4 = list(multi_args(p2, cmd='COMMAND', suffix='""')) + self.assertEqual(len(names4), 10) + self.assertEqual( + names4[0][0:2], + ('celery1', ['COMMAND', '-n celery1', '-c 5', '']), + ) + + p3 = NamespacedOptionParser(['foo', '-c:foo', '5']) + names5 = list(multi_args(p3, cmd='COMMAND', suffix='""')) + self.assertEqual( + names5[0][0:2], + ('foo', ['COMMAND', '-n foo', '-c 5', '']), + ) + + +class test_MultiTool(Case): + + def setUp(self): + self.fh = WhateverIO() + self.env = {} + self.t = MultiTool(env=self.env, fh=self.fh) + + def test_note(self): + self.t.note('hello world') + self.assertEqual(self.fh.getvalue(), 'hello world\n') + + def test_note_quiet(self): + self.t.quiet = True + self.t.note('hello world') + self.assertFalse(self.fh.getvalue()) + + def test_info(self): + self.t.verbose = True + self.t.info('hello info') + self.assertEqual(self.fh.getvalue(), 'hello info\n') + + def test_info_not_verbose(self): + self.t.verbose = False + self.t.info('hello info') + self.assertFalse(self.fh.getvalue()) + + def test_error(self): + self.t.say = Mock() + self.t.usage = Mock() + self.assertEqual(self.t.error('foo'), 1) + self.t.say.assert_called_with('foo') + self.t.usage.assert_called_with() + + self.t.say = Mock() + self.assertEqual(self.t.error(), 1) + self.assertFalse(self.t.say.called) + + self.assertEqual(self.t.retcode, 1) + + @patch('celery.bin.celeryd_multi.Popen') + def test_waitexec(self, Popen): + self.t.note = Mock() + pipe = Popen.return_value = Mock() + pipe.wait.return_value = -10 + self.assertEqual(self.t.waitexec(['-m', 'foo'], 'path'), 10) + Popen.assert_called_with(['path', '-m', 'foo'], env=self.t.env) + self.t.note.assert_called_with('* Child was terminated by signal 10') + + pipe.wait.return_value = 2 + self.assertEqual(self.t.waitexec(['-m', 'foo'], 'path'), 2) + self.t.note.assert_called_with( + '* Child terminated with failure code 2') + + pipe.wait.return_value = 0 + self.assertFalse(self.t.waitexec(['-m', 'foo', 'path'])) + + def test_nosplash(self): + self.t.nosplash = True + self.t.splash() + self.assertFalse(self.fh.getvalue()) + + def test_splash(self): + self.t.nosplash = False + self.t.splash() + self.assertIn('celeryd-multi', self.fh.getvalue()) + + def test_usage(self): + self.t.usage() + self.assertTrue(self.fh.getvalue()) + + def test_help(self): + self.t.help([]) + self.assertIn(doc, self.fh.getvalue()) + + def test_expand(self): + self.t.expand(['foo%n', 'ask', 'klask', 'dask']) + self.assertEqual( + self.fh.getvalue(), 'fooask\nfooklask\nfoodask\n', + ) + + def test_restart(self): + stop = self.t._stop_nodes = Mock() + self.t.restart(['jerry', 'george'], 'celeryd') + waitexec = self.t.waitexec = Mock() + self.assertTrue(stop.called) + callback = stop.call_args[1]['callback'] + self.assertTrue(callback) + + waitexec.return_value = 0 + callback('jerry', ['arg'], 13) + waitexec.assert_called_with(['arg']) + self.assertIn('OK', self.fh.getvalue()) + self.fh.seek(0) + self.fh.truncate() + + waitexec.return_value = 1 + callback('jerry', ['arg'], 13) + self.assertIn('FAILED', self.fh.getvalue()) + + def test_stop(self): + self.t.getpids = Mock() + self.t.getpids.return_value = [2, 3, 4] + self.t.shutdown_nodes = Mock() + self.t.stop(['a', 'b', '-INT'], 'celeryd') + self.t.shutdown_nodes.assert_called_with( + [2, 3, 4], sig=signal.SIGINT, retry=None, callback=None, + + ) + + def test_kill(self): + self.t.getpids = Mock() + self.t.getpids.return_value = [ + ('a', None, 10), + ('b', None, 11), + ('c', None, 12) + ] + sig = self.t.signal_node = Mock() + + self.t.kill(['a', 'b', 'c'], 'celeryd') + + sigs = sig.call_args_list + self.assertEqual(len(sigs), 3) + self.assertEqual(sigs[0][0], ('a', 10, signal.SIGKILL)) + self.assertEqual(sigs[1][0], ('b', 11, signal.SIGKILL)) + self.assertEqual(sigs[2][0], ('c', 12, signal.SIGKILL)) + + def prepare_pidfile_for_getpids(self, Pidfile): + class pids(object): + + def __init__(self, path): + self.path = path + + def read_pid(self): + try: + return {'celeryd@foo.pid': 10, + 'celeryd@bar.pid': 11}[self.path] + except KeyError: + raise ValueError() + Pidfile.side_effect = pids + + @patch('celery.bin.celeryd_multi.Pidfile') + @patch('socket.gethostname') + def test_getpids(self, gethostname, Pidfile): + gethostname.return_value = 'e.com' + self.prepare_pidfile_for_getpids(Pidfile) + callback = Mock() + + p = NamespacedOptionParser(['foo', 'bar', 'baz']) + nodes = self.t.getpids(p, 'celeryd', callback=callback) + self.assertEqual(nodes, [ + ('foo.e.com', + ('celeryd', '--pidfile=celeryd@foo.pid', '-n foo.e.com', ''), + 10), + ('bar.e.com', + ('celeryd', '--pidfile=celeryd@bar.pid', '-n bar.e.com', ''), + 11), + ]) + self.assertTrue(callback.called) + callback.assert_called_with( + 'baz.e.com', + ['celeryd', '--pidfile=celeryd@baz.pid', '-n baz.e.com', ''], + None, + ) + self.assertIn('DOWN', self.fh.getvalue()) + + # without callback, should work + nodes = self.t.getpids(p, 'celeryd', callback=None) + + @patch('celery.bin.celeryd_multi.Pidfile') + @patch('socket.gethostname') + @patch('celery.bin.celeryd_multi.sleep') + def test_shutdown_nodes(self, slepp, gethostname, Pidfile): + gethostname.return_value = 'e.com' + self.prepare_pidfile_for_getpids(Pidfile) + self.assertIsNone(self.t.shutdown_nodes([])) + self.t.signal_node = Mock() + node_alive = self.t.node_alive = Mock() + self.t.node_alive.return_value = False + + callback = Mock() + self.t.stop(['foo', 'bar', 'baz'], 'celeryd', callback=callback) + sigs = self.t.signal_node.call_args_list + self.assertEqual(len(sigs), 2) + self.assertEqual(sigs[0][0], ('foo.e.com', 10, signal.SIGTERM)) + self.assertEqual(sigs[1][0], ('bar.e.com', 11, signal.SIGTERM)) + self.t.signal_node.return_value = False + self.assertTrue(callback.called) + self.t.stop(['foo', 'bar', 'baz'], 'celeryd', callback=None) + + def on_node_alive(pid): + if node_alive.call_count > 4: + return True + return False + self.t.signal_node.return_value = True + self.t.node_alive.side_effect = on_node_alive + self.t.stop(['foo', 'bar', 'baz'], 'celeryd', retry=True) + + @patch('os.kill') + def test_node_alive(self, kill): + kill.return_value = True + self.assertTrue(self.t.node_alive(13)) + esrch = OSError() + esrch.errno = errno.ESRCH + kill.side_effect = esrch + self.assertFalse(self.t.node_alive(13)) + kill.assert_called_with(13, 0) + + enoent = OSError() + enoent.errno = errno.ENOENT + kill.side_effect = enoent + with self.assertRaises(OSError): + self.t.node_alive(13) + + @patch('os.kill') + def test_signal_node(self, kill): + kill.return_value = True + self.assertTrue(self.t.signal_node('foo', 13, 9)) + esrch = OSError() + esrch.errno = errno.ESRCH + kill.side_effect = esrch + self.assertFalse(self.t.signal_node('foo', 13, 9)) + kill.assert_called_with(13, 9) + self.assertIn('Could not signal foo', self.fh.getvalue()) + + enoent = OSError() + enoent.errno = errno.ENOENT + kill.side_effect = enoent + with self.assertRaises(OSError): + self.t.signal_node('foo', 13, 9) + + def test_start(self): + self.t.waitexec = Mock() + self.t.waitexec.return_value = 0 + self.assertFalse(self.t.start(['foo', 'bar', 'baz'], 'celeryd')) + + self.t.waitexec.return_value = 1 + self.assertFalse(self.t.start(['foo', 'bar', 'baz'], 'celeryd')) + + def test_show(self): + self.t.show(['foo', 'bar', 'baz'], 'celeryd') + self.assertTrue(self.fh.getvalue()) + + @patch('socket.gethostname') + def test_get(self, gethostname): + gethostname.return_value = 'e.com' + self.t.get(['xuzzy.e.com', 'foo', 'bar', 'baz'], 'celeryd') + self.assertFalse(self.fh.getvalue()) + self.t.get(['foo.e.com', 'foo', 'bar', 'baz'], 'celeryd') + self.assertTrue(self.fh.getvalue()) + + @patch('socket.gethostname') + def test_names(self, gethostname): + gethostname.return_value = 'e.com' + self.t.names(['foo', 'bar', 'baz'], 'celeryd') + self.assertIn('foo.e.com\nbar.e.com\nbaz.e.com', self.fh.getvalue()) + + def test_execute_from_commandline(self): + start = self.t.commands['start'] = Mock() + self.t.error = Mock() + self.t.execute_from_commandline(['multi', 'start', 'foo', 'bar']) + self.assertFalse(self.t.error.called) + start.assert_called_with(['foo', 'bar'], 'celeryd') + + self.t.error = Mock() + self.t.execute_from_commandline(['multi', 'frob', 'foo', 'bar']) + self.t.error.assert_called_with('Invalid command: frob') + + self.t.error = Mock() + self.t.execute_from_commandline(['multi']) + self.t.error.assert_called_with() + + self.t.error = Mock() + self.t.execute_from_commandline(['multi', '-foo']) + self.t.error.assert_called_with() + + self.t.execute_from_commandline( + ['multi', 'start', 'foo', + '--nosplash', '--quiet', '-q', '--verbose', '--no-color'], + ) + self.assertTrue(self.t.nosplash) + self.assertTrue(self.t.quiet) + self.assertTrue(self.t.verbose) + self.assertTrue(self.t.no_color) + + def test_stopwait(self): + self.t._stop_nodes = Mock() + self.t.stopwait(['foo', 'bar', 'baz'], 'celeryd') + self.assertEqual(self.t._stop_nodes.call_args[1]['retry'], 2) + + @patch('celery.bin.celeryd_multi.MultiTool') + def test_main(self, MultiTool): + m = MultiTool.return_value = Mock() + with self.assertRaises(SystemExit): + main() + m.execute_from_commandline.assert_called_with(sys.argv) diff --git a/awx/lib/site-packages/celery/tests/bin/test_celeryev.py b/awx/lib/site-packages/celery/tests/bin/test_celeryev.py new file mode 100644 index 0000000000..02313970c3 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/bin/test_celeryev.py @@ -0,0 +1,76 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from nose import SkipTest +from mock import patch as mpatch + +from celery.app import app_or_default +from celery.bin import celeryev + +from celery.tests.utils import Case, patch + + +class MockCommand(object): + executed = [] + + def execute_from_commandline(self, **kwargs): + self.executed.append(True) + + +def proctitle(prog, info=None): + proctitle.last = (prog, info) +proctitle.last = () + + +class test_EvCommand(Case): + + def setUp(self): + self.app = app_or_default() + self.ev = celeryev.EvCommand(app=self.app) + + @patch('celery.events.dumper', 'evdump', lambda **kw: 'me dumper, you?') + @patch('celery.bin.celeryev', 'set_process_title', proctitle) + def test_run_dump(self): + self.assertEqual(self.ev.run(dump=True), 'me dumper, you?') + self.assertIn('celeryev:dump', proctitle.last[0]) + + def test_run_top(self): + try: + import curses # noqa + except ImportError: + raise SkipTest('curses monitor requires curses') + + @patch('celery.events.cursesmon', 'evtop', lambda **kw: 'me top, you?') + @patch('celery.bin.celeryev', 'set_process_title', proctitle) + def _inner(): + self.assertEqual(self.ev.run(), 'me top, you?') + self.assertIn('celeryev:top', proctitle.last[0]) + return _inner() + + @patch('celery.events.snapshot', 'evcam', lambda *a, **k: (a, k)) + @patch('celery.bin.celeryev', 'set_process_title', proctitle) + def test_run_cam(self): + a, kw = self.ev.run(camera='foo.bar.baz', logfile='logfile') + self.assertEqual(a[0], 'foo.bar.baz') + self.assertEqual(kw['freq'], 1.0) + self.assertIsNone(kw['maxrate']) + self.assertEqual(kw['loglevel'], 'INFO') + self.assertEqual(kw['logfile'], 'logfile') + self.assertIn('celeryev:cam', proctitle.last[0]) + + @mpatch('celery.events.snapshot.evcam') + @mpatch('celery.bin.celeryev.detached') + def test_run_cam_detached(self, detached, evcam): + self.ev.prog_name = 'celeryev' + self.ev.run_evcam('myapp.Camera', detach=True) + self.assertTrue(detached.called) + self.assertTrue(evcam.called) + + def test_get_options(self): + self.assertTrue(self.ev.get_options()) + + @patch('celery.bin.celeryev', 'EvCommand', MockCommand) + def test_main(self): + MockCommand.executed = [] + celeryev.main() + self.assertTrue(MockCommand.executed) diff --git a/awx/lib/site-packages/celery/tests/bin/test_celeryevdump.py b/awx/lib/site-packages/celery/tests/bin/test_celeryevdump.py new file mode 100644 index 0000000000..b04f85a60f --- /dev/null +++ b/awx/lib/site-packages/celery/tests/bin/test_celeryevdump.py @@ -0,0 +1,47 @@ +from __future__ import absolute_import + +from mock import patch +from time import time + +from celery.events.dumper import ( + humanize_type, + Dumper, + evdump, +) + +from celery.tests.utils import Case, WhateverIO + + +class test_Dumper(Case): + + def setUp(self): + self.out = WhateverIO() + self.dumper = Dumper(out=self.out) + + def test_humanize_type(self): + self.assertEqual(humanize_type('worker-offline'), 'shutdown') + self.assertEqual(humanize_type('task-started'), 'task started') + + def test_format_task_event(self): + self.dumper.format_task_event( + 'worker.example.com', time(), 'task-started', 'tasks.add', {}) + self.assertTrue(self.out.getvalue()) + + def test_on_event(self): + event = { + 'hostname': 'worker.example.com', + 'timestamp': time(), + 'uuid': '1ef', + 'name': 'tasks.add', + 'args': '(2, 2)', + 'kwargs': '{}', + } + self.dumper.on_event(dict(event, type='task-received')) + self.assertTrue(self.out.getvalue()) + self.dumper.on_event(dict(event, type='task-revoked')) + self.dumper.on_event(dict(event, type='worker-online')) + + @patch('celery.events.EventReceiver.capture') + def test_evdump(self, capture): + capture.side_effect = KeyboardInterrupt() + evdump() diff --git a/awx/lib/site-packages/celery/tests/compat.py b/awx/lib/site-packages/celery/tests/compat.py new file mode 100644 index 0000000000..30eb853b0d --- /dev/null +++ b/awx/lib/site-packages/celery/tests/compat.py @@ -0,0 +1,85 @@ +from __future__ import absolute_import + +import sys + + +class WarningMessage(object): + + """Holds the result of a single showwarning() call.""" + + _WARNING_DETAILS = ('message', 'category', 'filename', 'lineno', 'file', + 'line') + + def __init__(self, message, category, filename, lineno, file=None, + line=None): + local_values = locals() + for attr in self._WARNING_DETAILS: + setattr(self, attr, local_values[attr]) + + self._category_name = category and category.__name__ or None + + def __str__(self): + return ('{message : %r, category : %r, filename : %r, lineno : %s, ' + 'line : %r}' % (self.message, self._category_name, + self.filename, self.lineno, self.line)) + + +class catch_warnings(object): + + """A context manager that copies and restores the warnings filter upon + exiting the context. + + The 'record' argument specifies whether warnings should be captured by a + custom implementation of warnings.showwarning() and be appended to a list + returned by the context manager. Otherwise None is returned by the context + manager. The objects appended to the list are arguments whose attributes + mirror the arguments to showwarning(). + + The 'module' argument is to specify an alternative module to the module + named 'warnings' and imported under that name. This argument is only + useful when testing the warnings module itself. + + """ + + def __init__(self, record=False, module=None): + """Specify whether to record warnings and if an alternative module + should be used other than sys.modules['warnings']. + + For compatibility with Python 3.0, please consider all arguments to be + keyword-only. + + """ + self._record = record + self._module = module is None and sys.modules['warnings'] or module + self._entered = False + + def __repr__(self): + args = [] + if self._record: + args.append('record=True') + if self._module is not sys.modules['warnings']: + args.append('module=%r' % self._module) + name = type(self).__name__ + return '%s(%s)' % (name, ', '.join(args)) + + def __enter__(self): + if self._entered: + raise RuntimeError('Cannot enter %r twice' % self) + self._entered = True + self._filters = self._module.filters + self._module.filters = self._filters[:] + self._showwarning = self._module.showwarning + if self._record: + log = [] + + def showwarning(*args, **kwargs): + log.append(WarningMessage(*args, **kwargs)) + + self._module.showwarning = showwarning + return log + + def __exit__(self, *exc_info): + if not self._entered: + raise RuntimeError('Cannot exit %r without entering first' % self) + self._module.filters = self._filters + self._module.showwarning = self._showwarning diff --git a/awx/lib/site-packages/celery/tests/compat_modules/__init__.py b/awx/lib/site-packages/celery/tests/compat_modules/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/celery/tests/compat_modules/test_decorators.py b/awx/lib/site-packages/celery/tests/compat_modules/test_decorators.py new file mode 100644 index 0000000000..b8e3c4ee62 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/compat_modules/test_decorators.py @@ -0,0 +1,33 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from celery.task import base + +from celery.tests.compat import catch_warnings +from celery.tests.utils import Case + + +def add(x, y): + return x + y + + +class test_decorators(Case): + + def setUp(self): + with catch_warnings(record=True): + from celery import decorators + self.decorators = decorators + + def assertCompatDecorator(self, decorator, type, **opts): + task = decorator(**opts)(add) + self.assertEqual(task(8, 8), 16) + self.assertTrue(task.accept_magic_kwargs) + self.assertIsInstance(task, type) + + def test_task(self): + self.assertCompatDecorator(self.decorators.task, base.BaseTask) + + def test_periodic_task(self): + self.assertCompatDecorator(self.decorators.periodic_task, + base.BaseTask, + run_every=1) diff --git a/awx/lib/site-packages/celery/tests/compat_modules/test_messaging.py b/awx/lib/site-packages/celery/tests/compat_modules/test_messaging.py new file mode 100644 index 0000000000..8e606ceaf4 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/compat_modules/test_messaging.py @@ -0,0 +1,19 @@ +from __future__ import absolute_import + +from celery import messaging +from celery.tests.utils import Case + + +class test_compat_messaging_module(Case): + + def test_with_connection(self): + + def foo(**kwargs): + pass + + self.assertTrue(messaging.with_connection(foo)) + + def test_get_consume_set(self): + conn = messaging.establish_connection() + messaging.get_consumer_set(conn).close() + conn.close() diff --git a/awx/lib/site-packages/celery/tests/concurrency/__init__.py b/awx/lib/site-packages/celery/tests/concurrency/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/celery/tests/concurrency/test_concurrency.py b/awx/lib/site-packages/celery/tests/concurrency/test_concurrency.py new file mode 100644 index 0000000000..30c675ee15 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/concurrency/test_concurrency.py @@ -0,0 +1,88 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import os + +from itertools import count + +from celery.concurrency.base import apply_target, BasePool +from celery.tests.utils import Case + + +class test_BasePool(Case): + + def test_apply_target(self): + + scratch = {} + counter = count(0).next + + def gen_callback(name, retval=None): + + def callback(*args): + scratch[name] = (counter(), args) + return retval + + return callback + + apply_target(gen_callback('target', 42), + args=(8, 16), + callback=gen_callback('callback'), + accept_callback=gen_callback('accept_callback')) + + self.assertDictContainsSubset( + {'target': (1, (8, 16)), 'callback': (2, (42, ))}, + scratch, + ) + pa1 = scratch['accept_callback'] + self.assertEqual(0, pa1[0]) + self.assertEqual(pa1[1][0], os.getpid()) + self.assertTrue(pa1[1][1]) + + # No accept callback + scratch.clear() + apply_target(gen_callback('target', 42), + args=(8, 16), + callback=gen_callback('callback'), + accept_callback=None) + self.assertDictEqual(scratch, + {'target': (3, (8, 16)), + 'callback': (4, (42, ))}) + + def test_does_not_debug(self): + x = BasePool(10) + x._does_debug = False + x.apply_async(object) + + def test_num_processes(self): + self.assertEqual(BasePool(7).num_processes, 7) + + def test_interface_on_start(self): + BasePool(10).on_start() + + def test_interface_on_stop(self): + BasePool(10).on_stop() + + def test_interface_on_apply(self): + BasePool(10).on_apply() + + def test_interface_info(self): + self.assertDictEqual(BasePool(10).info, {}) + + def test_active(self): + p = BasePool(10) + self.assertFalse(p.active) + p._state = p.RUN + self.assertTrue(p.active) + + def test_restart(self): + p = BasePool(10) + with self.assertRaises(NotImplementedError): + p.restart() + + def test_interface_on_terminate(self): + p = BasePool(10) + p.on_terminate() + + def test_interface_terminate_job(self): + with self.assertRaises(NotImplementedError): + BasePool(10).terminate_job(101) diff --git a/awx/lib/site-packages/celery/tests/concurrency/test_eventlet.py b/awx/lib/site-packages/celery/tests/concurrency/test_eventlet.py new file mode 100644 index 0000000000..54ae5c8b14 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/concurrency/test_eventlet.py @@ -0,0 +1,130 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import os +import sys + +from nose import SkipTest +from mock import patch, Mock + +from celery.app.defaults import is_pypy +from celery.concurrency.eventlet import ( + apply_target, + Schedule, + Timer, + TaskPool, +) + +from celery.tests.utils import Case, mock_module, patch_many, skip_if_pypy + + +class EventletCase(Case): + + @skip_if_pypy + def setUp(self): + if is_pypy: + raise SkipTest('mock_modules not working on PyPy1.9') + try: + self.eventlet = __import__('eventlet') + except ImportError: + raise SkipTest( + 'eventlet not installed, skipping related tests.') + + @skip_if_pypy + def tearDown(self): + for mod in [mod for mod in sys.modules if mod.startswith('eventlet')]: + try: + del(sys.modules[mod]) + except KeyError: + pass + + +class test_aaa_eventlet_patch(EventletCase): + + def test_aaa_is_patched(self): + raise SkipTest("side effects") + monkey_patched = [] + prev_monkey_patch = self.eventlet.monkey_patch + self.eventlet.monkey_patch = lambda: monkey_patched.append(True) + prev_eventlet = sys.modules.pop('celery.concurrency.eventlet', None) + os.environ.pop('EVENTLET_NOPATCH') + try: + import celery.concurrency.eventlet # noqa + self.assertTrue(monkey_patched) + finally: + sys.modules['celery.concurrency.eventlet'] = prev_eventlet + os.environ['EVENTLET_NOPATCH'] = 'yes' + self.eventlet.monkey_patch = prev_monkey_patch + + +eventlet_modules = ( + 'eventlet', + 'eventlet.debug', + 'eventlet.greenthread', + 'eventlet.greenpool', + 'greenlet', +) + + +class test_Schedule(EventletCase): + + def test_sched(self): + with mock_module(*eventlet_modules): + with patch_many('eventlet.greenthread.spawn_after', + 'greenlet.GreenletExit') as (spawn_after, + GreenletExit): + x = Schedule() + x.GreenletExit = KeyError + entry = Mock() + g = x._enter(1, 0, entry) + self.assertTrue(x.queue) + + x._entry_exit(g, entry) + g.wait.side_effect = KeyError() + x._entry_exit(g, entry) + entry.cancel.assert_called_with() + self.assertFalse(x._queue) + + x._queue.add(g) + x.clear() + x._queue.add(g) + g.cancel.side_effect = KeyError() + x.clear() + + +class test_TaskPool(EventletCase): + + def test_pool(self): + with mock_module(*eventlet_modules): + with patch_many('eventlet.greenpool.GreenPool', + 'eventlet.greenthread') as (GreenPool, + greenthread): + x = TaskPool() + x.on_start() + x.on_stop() + x.on_apply(Mock()) + x._pool = None + x.on_stop() + self.assertTrue(x.getpid()) + + @patch('celery.concurrency.eventlet.base') + def test_apply_target(self, base): + apply_target(Mock(), getpid=Mock()) + self.assertTrue(base.apply_target.called) + + +class test_Timer(EventletCase): + + def test_timer(self): + x = Timer() + x.ensure_started() + x.schedule = Mock() + x.start() + x.stop() + x.schedule.clear.assert_called_with() + + tref = Mock() + x.cancel(tref) + x.schedule.GreenletExit = KeyError + tref.cancel.side_effect = KeyError() + x.cancel(tref) diff --git a/awx/lib/site-packages/celery/tests/concurrency/test_gevent.py b/awx/lib/site-packages/celery/tests/concurrency/test_gevent.py new file mode 100644 index 0000000000..a6661e23e9 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/concurrency/test_gevent.py @@ -0,0 +1,121 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import os +import sys + +from nose import SkipTest +from mock import Mock + +from celery.concurrency.gevent import ( + Schedule, + Timer, + TaskPool, +) + +from celery.tests.utils import Case, mock_module, patch_many, skip_if_pypy +gevent_modules = ( + 'gevent', + 'gevent.monkey', + 'gevent.greenlet', + 'gevent.pool', + 'greenlet', +) + + +class GeventCase(Case): + + @skip_if_pypy + def setUp(self): + try: + self.gevent = __import__('gevent') + except ImportError: + raise SkipTest( + 'gevent not installed, skipping related tests.') + + +class test_gevent_patch(GeventCase): + + def test_is_patched(self): + with mock_module(*gevent_modules): + monkey_patched = [] + import gevent + from gevent import monkey + gevent.version_info = (1, 0, 0) + prev_monkey_patch = monkey.patch_all + monkey.patch_all = lambda: monkey_patched.append(True) + prev_gevent = sys.modules.pop('celery.concurrency.gevent', None) + os.environ.pop('GEVENT_NOPATCH') + try: + import celery.concurrency.gevent # noqa + self.assertTrue(monkey_patched) + finally: + sys.modules['celery.concurrency.gevent'] = prev_gevent + os.environ['GEVENT_NOPATCH'] = 'yes' + monkey.patch_all = prev_monkey_patch + + +class test_Schedule(Case): + + def test_sched(self): + with mock_module(*gevent_modules): + with patch_many('gevent.greenlet', + 'gevent.greenlet.GreenletExit') as (greenlet, + GreenletExit): + greenlet.Greenlet = object + x = Schedule() + greenlet.Greenlet = Mock() + x._Greenlet.spawn_later = Mock() + x._GreenletExit = KeyError + entry = Mock() + g = x._enter(1, 0, entry) + self.assertTrue(x.queue) + + x._entry_exit(g) + g.kill.assert_called_with() + self.assertFalse(x._queue) + + x._queue.add(g) + x.clear() + x._queue.add(g) + g.kill.side_effect = KeyError() + x.clear() + + +class test_TasKPool(Case): + + def test_pool(self): + with mock_module(*gevent_modules): + with patch_many('gevent.spawn_raw', 'gevent.pool.Pool') as ( + spawn_raw, Pool): + x = TaskPool() + x.on_start() + x.on_stop() + x.on_apply(Mock()) + x._pool = None + x.on_stop() + + x._pool = Mock() + x._pool._semaphore.counter = 1 + x._pool.size = 1 + x.grow() + self.assertEqual(x._pool.size, 2) + self.assertEqual(x._pool._semaphore.counter, 2) + x.shrink() + self.assertEqual(x._pool.size, 1) + self.assertEqual(x._pool._semaphore.counter, 1) + + x._pool = [4, 5, 6] + self.assertEqual(x.num_processes, 3) + + +class test_Timer(Case): + + def test_timer(self): + with mock_module(*gevent_modules): + x = Timer() + x.ensure_started() + x.schedule = Mock() + x.start() + x.stop() + x.schedule.clear.assert_called_with() diff --git a/awx/lib/site-packages/celery/tests/concurrency/test_pool.py b/awx/lib/site-packages/celery/tests/concurrency/test_pool.py new file mode 100644 index 0000000000..97441ba1ed --- /dev/null +++ b/awx/lib/site-packages/celery/tests/concurrency/test_pool.py @@ -0,0 +1,83 @@ +from __future__ import absolute_import + +import time +import itertools + +from nose import SkipTest + +from celery.datastructures import ExceptionInfo +from celery.tests.utils import Case + + +def do_something(i): + return i * i + + +def long_something(): + time.sleep(1) + + +def raise_something(i): + try: + raise KeyError('FOO EXCEPTION') + except KeyError: + return ExceptionInfo() + + +class test_TaskPool(Case): + + def setUp(self): + try: + __import__('multiprocessing') + except ImportError: + raise SkipTest('multiprocessing not supported') + from celery.concurrency.processes import TaskPool + self.TaskPool = TaskPool + + def test_attrs(self): + p = self.TaskPool(2) + self.assertEqual(p.limit, 2) + self.assertIsNone(p._pool) + + def x_apply(self): + p = self.TaskPool(2) + p.start() + scratchpad = {} + proc_counter = itertools.count().next + + def mycallback(ret_value): + process = proc_counter() + scratchpad[process] = {} + scratchpad[process]['ret_value'] = ret_value + + myerrback = mycallback + + res = p.apply_async(do_something, args=[10], callback=mycallback) + res2 = p.apply_async(raise_something, args=[10], errback=myerrback) + res3 = p.apply_async(do_something, args=[20], callback=mycallback) + + self.assertEqual(res.get(), 100) + time.sleep(0.5) + self.assertDictContainsSubset({'ret_value': 100}, + scratchpad.get(0)) + + self.assertIsInstance(res2.get(), ExceptionInfo) + self.assertTrue(scratchpad.get(1)) + time.sleep(1) + self.assertIsInstance(scratchpad[1]['ret_value'], + ExceptionInfo) + self.assertEqual(scratchpad[1]['ret_value'].exception.args, + ('FOO EXCEPTION', )) + + self.assertEqual(res3.get(), 400) + time.sleep(0.5) + self.assertDictContainsSubset({'ret_value': 400}, + scratchpad.get(2)) + + res3 = p.apply_async(do_something, args=[30], callback=mycallback) + + self.assertEqual(res3.get(), 900) + time.sleep(0.5) + self.assertDictContainsSubset({'ret_value': 900}, + scratchpad.get(3)) + p.stop() diff --git a/awx/lib/site-packages/celery/tests/concurrency/test_processes.py b/awx/lib/site-packages/celery/tests/concurrency/test_processes.py new file mode 100644 index 0000000000..fb35bc70eb --- /dev/null +++ b/awx/lib/site-packages/celery/tests/concurrency/test_processes.py @@ -0,0 +1,191 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import time + +from itertools import cycle + +from mock import Mock +from nose import SkipTest + +from celery.utils.functional import noop +from celery.tests.utils import Case +try: + from celery.concurrency import processes as mp +except ImportError: + + class _mp(object): + RUN = 0x1 + + class TaskPool(object): + _pool = Mock() + + def __init__(self, *args, **kwargs): + pass + + def start(self): + pass + + def stop(self): + pass + + def apply_async(self, *args, **kwargs): + pass + mp = _mp() # noqa + + +class Object(object): # for writeable attributes. + + def __init__(self, **kwargs): + [setattr(self, k, v) for k, v in kwargs.items()] + + +class MockResult(object): + + def __init__(self, value, pid): + self.value = value + self.pid = pid + + def worker_pids(self): + return [self.pid] + + def get(self): + return self.value + + +class MockPool(object): + started = False + closed = False + joined = False + terminated = False + _state = None + + def __init__(self, *args, **kwargs): + self.started = True + self._timeout_handler = Mock() + self._result_handler = Mock() + self.maintain_pool = Mock() + self._state = mp.RUN + self._processes = kwargs.get('processes') + self._pool = [Object(pid=i) for i in range(self._processes)] + self._current_proc = cycle(xrange(self._processes)).next + + def close(self): + self.closed = True + self._state = 'CLOSE' + + def join(self): + self.joined = True + + def terminate(self): + self.terminated = True + + def grow(self, n=1): + self._processes += n + + def shrink(self, n=1): + self._processes -= n + + def apply_async(self, *args, **kwargs): + pass + + +class ExeMockPool(MockPool): + + def apply_async(self, target, args=(), kwargs={}, callback=noop): + from threading import Timer + res = target(*args, **kwargs) + Timer(0.1, callback, (res, )).start() + return MockResult(res, self._current_proc()) + + +class TaskPool(mp.TaskPool): + Pool = MockPool + + +class ExeMockTaskPool(mp.TaskPool): + Pool = ExeMockPool + + +class test_TaskPool(Case): + + def setUp(self): + try: + import multiprocessing # noqa + except ImportError: + raise SkipTest('multiprocessing not supported') + + def test_start(self): + pool = TaskPool(10) + pool.start() + self.assertTrue(pool._pool.started) + self.assertTrue(pool._pool._state == mp.RUN) + + _pool = pool._pool + pool.stop() + self.assertTrue(_pool.closed) + self.assertTrue(_pool.joined) + pool.stop() + + pool.start() + _pool = pool._pool + pool.terminate() + pool.terminate() + self.assertTrue(_pool.terminated) + + def test_apply_async(self): + pool = TaskPool(10) + pool.start() + pool.apply_async(lambda x: x, (2, ), {}) + + def test_terminate_job(self): + pool = TaskPool(10) + pool._pool = Mock() + pool.terminate_job(1341) + pool._pool.terminate_job.assert_called_with(1341, None) + + def test_grow_shrink(self): + pool = TaskPool(10) + pool.start() + self.assertEqual(pool._pool._processes, 10) + pool.grow() + self.assertEqual(pool._pool._processes, 11) + pool.shrink(2) + self.assertEqual(pool._pool._processes, 9) + + def test_info(self): + pool = TaskPool(10) + procs = [Object(pid=i) for i in range(pool.limit)] + pool._pool = Object(_pool=procs, + _maxtasksperchild=None, + timeout=10, + soft_timeout=5) + info = pool.info + self.assertEqual(info['max-concurrency'], pool.limit) + self.assertIsNone(info['max-tasks-per-child']) + self.assertEqual(info['timeouts'], (5, 10)) + + def test_num_processes(self): + pool = TaskPool(7) + pool.start() + self.assertEqual(pool.num_processes, 7) + + def test_restart_pool(self): + pool = TaskPool() + pool._pool = Mock() + pool.restart() + pool._pool.restart.assert_called_with() + + def test_restart(self): + raise SkipTest('functional test') + + def get_pids(pool): + return set([p.pid for p in pool._pool._pool]) + + tp = self.TaskPool(5) + time.sleep(0.5) + tp.start() + pids = get_pids(tp) + tp.restart() + time.sleep(0.5) + self.assertEqual(pids, get_pids(tp)) diff --git a/awx/lib/site-packages/celery/tests/concurrency/test_solo.py b/awx/lib/site-packages/celery/tests/concurrency/test_solo.py new file mode 100644 index 0000000000..ba420b60a5 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/concurrency/test_solo.py @@ -0,0 +1,24 @@ +from __future__ import absolute_import + +import operator + +from celery.concurrency import solo +from celery.utils.functional import noop +from celery.tests.utils import Case + + +class test_solo_TaskPool(Case): + + def test_on_start(self): + x = solo.TaskPool() + x.on_start() + + def test_on_apply(self): + x = solo.TaskPool() + x.on_start() + x.on_apply(operator.add, (2, 2), {}, noop, noop) + + def test_info(self): + x = solo.TaskPool() + x.on_start() + self.assertTrue(x.info) diff --git a/awx/lib/site-packages/celery/tests/concurrency/test_threads.py b/awx/lib/site-packages/celery/tests/concurrency/test_threads.py new file mode 100644 index 0000000000..4443c52946 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/concurrency/test_threads.py @@ -0,0 +1,63 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from mock import Mock + +from celery.concurrency.threads import NullDict, TaskPool, apply_target + +from celery.tests.utils import Case, mask_modules, mock_module + + +class test_NullDict(Case): + + def test_setitem(self): + x = NullDict() + x['foo'] = 1 + with self.assertRaises(KeyError): + x['foo'] + + +class test_TaskPool(Case): + + def test_without_threadpool(self): + + with mask_modules('threadpool'): + with self.assertRaises(ImportError): + TaskPool() + + def test_with_threadpool(self): + with mock_module('threadpool'): + x = TaskPool() + self.assertTrue(x.ThreadPool) + self.assertTrue(x.WorkRequest) + + def test_on_start(self): + with mock_module('threadpool'): + x = TaskPool() + x.on_start() + self.assertTrue(x._pool) + self.assertIsInstance(x._pool.workRequests, NullDict) + + def test_on_stop(self): + with mock_module('threadpool'): + x = TaskPool() + x.on_start() + x.on_stop() + x._pool.dismissWorkers.assert_called_with(x.limit, do_join=True) + + def test_on_apply(self): + with mock_module('threadpool'): + x = TaskPool() + x.on_start() + callback = Mock() + accept_callback = Mock() + target = Mock() + req = x.on_apply(target, args=(1, 2), kwargs={'a': 10}, + callback=callback, + accept_callback=accept_callback) + x.WorkRequest.assert_called_with( + apply_target, + (target, (1, 2), {'a': 10}, callback, accept_callback), + ) + x._pool.putRequest.assert_called_with(req) + x._pool._results_queue.queue.clear.assert_called_with() diff --git a/awx/lib/site-packages/celery/tests/config.py b/awx/lib/site-packages/celery/tests/config.py new file mode 100644 index 0000000000..c874674232 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/config.py @@ -0,0 +1,54 @@ +from __future__ import absolute_import + +import os + +from kombu import Queue + +BROKER_URL = 'memory://' + +#: warn if config module not found +os.environ['C_WNOCONF'] = 'yes' + +#: Don't want log output when running suite. +CELERYD_HIJACK_ROOT_LOGGER = False + +CELERY_RESULT_BACKEND = 'cache' +CELERY_CACHE_BACKEND = 'memory' +CELERY_RESULT_DBURI = 'sqlite:///test.db' +CELERY_SEND_TASK_ERROR_EMAILS = False + +CELERY_DEFAULT_QUEUE = 'testcelery' +CELERY_DEFAULT_EXCHANGE = 'testcelery' +CELERY_DEFAULT_ROUTING_KEY = 'testcelery' +CELERY_QUEUES = ( + Queue('testcelery', routing_key='testcelery'), +) + +CELERY_ENABLE_UTC = True +CELERY_TIMEZONE = 'UTC' + +CELERYD_LOG_COLOR = False + +# Tyrant results tests (only executed if installed and running) +TT_HOST = os.environ.get('TT_HOST') or 'localhost' +TT_PORT = int(os.environ.get('TT_PORT') or 1978) + +# Redis results tests (only executed if installed and running) +CELERY_REDIS_HOST = os.environ.get('REDIS_HOST') or 'localhost' +CELERY_REDIS_PORT = int(os.environ.get('REDIS_PORT') or 6379) +CELERY_REDIS_DB = os.environ.get('REDIS_DB') or 0 +CELERY_REDIS_PASSWORD = os.environ.get('REDIS_PASSWORD') + +# Mongo results tests (only executed if installed and running) +CELERY_MONGODB_BACKEND_SETTINGS = { + 'host': os.environ.get('MONGO_HOST') or 'localhost', + 'port': os.environ.get('MONGO_PORT') or 27017, + 'database': os.environ.get('MONGO_DB') or 'celery_unittests', + 'taskmeta_collection': (os.environ.get('MONGO_TASKMETA_COLLECTION') + or 'taskmeta_collection'), +} +if os.environ.get('MONGO_USER'): + CELERY_MONGODB_BACKEND_SETTINGS['user'] = os.environ.get('MONGO_USER') +if os.environ.get('MONGO_PASSWORD'): + CELERY_MONGODB_BACKEND_SETTINGS['password'] = \ + os.environ.get('MONGO_PASSWORD') diff --git a/awx/lib/site-packages/celery/tests/contrib/__init__.py b/awx/lib/site-packages/celery/tests/contrib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/celery/tests/contrib/test_abortable.py b/awx/lib/site-packages/celery/tests/contrib/test_abortable.py new file mode 100644 index 0000000000..a72f645347 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/contrib/test_abortable.py @@ -0,0 +1,51 @@ +from __future__ import absolute_import + +from celery.contrib.abortable import AbortableTask, AbortableAsyncResult +from celery.result import AsyncResult +from celery.tests.utils import Case + + +class MyAbortableTask(AbortableTask): + + def run(self, **kwargs): + return True + + +class test_AbortableTask(Case): + + def test_async_result_is_abortable(self): + t = MyAbortableTask() + result = t.apply_async() + tid = result.id + self.assertIsInstance(t.AsyncResult(tid), AbortableAsyncResult) + + def test_is_not_aborted(self): + t = MyAbortableTask() + t.push_request() + try: + result = t.apply_async() + tid = result.id + self.assertFalse(t.is_aborted(task_id=tid)) + finally: + t.pop_request() + + def test_is_aborted_not_abort_result(self): + t = MyAbortableTask() + t.AsyncResult = AsyncResult + t.push_request() + try: + t.request.id = 'foo' + self.assertFalse(t.is_aborted()) + finally: + t.pop_request() + + def test_abort_yields_aborted(self): + t = MyAbortableTask() + t.push_request() + try: + result = t.apply_async() + result.abort() + tid = result.id + self.assertTrue(t.is_aborted(task_id=tid)) + finally: + t.pop_request() diff --git a/awx/lib/site-packages/celery/tests/contrib/test_migrate.py b/awx/lib/site-packages/celery/tests/contrib/test_migrate.py new file mode 100644 index 0000000000..ce9ead6af9 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/contrib/test_migrate.py @@ -0,0 +1,111 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from kombu import Connection, Producer, Queue, Exchange +from kombu.exceptions import StdChannelError +from mock import patch + +from celery.contrib.migrate import ( + State, + migrate_task, + migrate_tasks, +) +from celery.utils.encoding import bytes_t, ensure_bytes +from celery.tests.utils import AppCase, Case, Mock + + +def Message(body, exchange='exchange', routing_key='rkey', + compression=None, content_type='application/json', + content_encoding='utf-8'): + return Mock( + attrs={ + 'body': body, + 'delivery_info': { + 'exchange': exchange, + 'routing_key': routing_key, + }, + 'headers': { + 'compression': compression, + }, + 'content_type': content_type, + 'content_encoding': content_encoding, + 'properties': {} + }, + ) + + +class test_State(Case): + + def test_strtotal(self): + x = State() + self.assertEqual(x.strtotal, u'?') + x.total_apx = 100 + self.assertEqual(x.strtotal, u'100') + + +class test_migrate_task(Case): + + def test_removes_compression_header(self): + x = Message('foo', compression='zlib') + producer = Mock() + migrate_task(producer, x.body, x) + self.assertTrue(producer.publish.called) + args, kwargs = producer.publish.call_args + self.assertIsInstance(args[0], bytes_t) + self.assertNotIn('compression', kwargs['headers']) + self.assertEqual(kwargs['compression'], 'zlib') + self.assertEqual(kwargs['content_type'], 'application/json') + self.assertEqual(kwargs['content_encoding'], 'utf-8') + self.assertEqual(kwargs['exchange'], 'exchange') + self.assertEqual(kwargs['routing_key'], 'rkey') + + +class test_migrate_tasks(AppCase): + + def test_migrate(self, name='testcelery'): + x = Connection('memory://foo') + y = Connection('memory://foo') + # use separate state + x.default_channel.queues = {} + y.default_channel.queues = {} + + ex = Exchange(name, 'direct') + q = Queue(name, exchange=ex, routing_key=name) + q(x.default_channel).declare() + Producer(x).publish('foo', exchange=name, routing_key=name) + Producer(x).publish('bar', exchange=name, routing_key=name) + Producer(x).publish('baz', exchange=name, routing_key=name) + self.assertTrue(x.default_channel.queues) + self.assertFalse(y.default_channel.queues) + + migrate_tasks(x, y) + + yq = q(y.default_channel) + self.assertEqual(yq.get().body, ensure_bytes('foo')) + self.assertEqual(yq.get().body, ensure_bytes('bar')) + self.assertEqual(yq.get().body, ensure_bytes('baz')) + + Producer(x).publish('foo', exchange=name, routing_key=name) + callback = Mock() + migrate_tasks(x, y, callback=callback) + self.assertTrue(callback.called) + migrate = Mock() + Producer(x).publish('baz', exchange=name, routing_key=name) + migrate_tasks(x, y, callback=callback, migrate=migrate) + self.assertTrue(migrate.called) + + with patch('kombu.transport.virtual.Channel.queue_declare') as qd: + + def effect(*args, **kwargs): + if kwargs.get('passive'): + raise StdChannelError() + return 0, 3, 0 + qd.side_effect = effect + migrate_tasks(x, y) + + x = Connection('memory://') + x.default_channel.queues = {} + y.default_channel.queues = {} + callback = Mock() + migrate_tasks(x, y, callback=callback) + self.assertFalse(callback.called) diff --git a/awx/lib/site-packages/celery/tests/contrib/test_rdb.py b/awx/lib/site-packages/celery/tests/contrib/test_rdb.py new file mode 100644 index 0000000000..ff50f03933 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/contrib/test_rdb.py @@ -0,0 +1,101 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import errno +import socket + +from mock import Mock, patch + +from celery.contrib.rdb import ( + Rdb, + debugger, + set_trace, +) +from celery.tests.utils import Case, WhateverIO, skip_if_pypy + + +class test_Rdb(Case): + + @patch('celery.contrib.rdb.Rdb') + def test_debugger(self, Rdb): + x = debugger() + self.assertTrue(x) + self.assertIs(x, debugger()) + + @patch('celery.contrib.rdb.debugger') + @patch('celery.contrib.rdb._frame') + def test_set_trace(self, _frame, debugger): + self.assertTrue(set_trace(Mock())) + self.assertTrue(set_trace()) + self.assertTrue(debugger.return_value.set_trace.called) + + @patch('celery.contrib.rdb.Rdb.get_avail_port') + @skip_if_pypy + def test_rdb(self, get_avail_port): + sock = Mock() + get_avail_port.return_value = (sock, 8000) + sock.accept.return_value = (Mock(), ['helu']) + out = WhateverIO() + rdb = Rdb(out=out) + self.assertTrue(get_avail_port.called) + self.assertIn('helu', out.getvalue()) + + # set_quit + with patch('sys.settrace') as settrace: + rdb.set_quit() + settrace.assert_called_with(None) + + # set_trace + with patch('celery.contrib.rdb.Pdb.set_trace') as pset: + with patch('celery.contrib.rdb._frame'): + rdb.set_trace() + rdb.set_trace(Mock()) + pset.side_effect = socket.error + pset.side_effect.errno = errno.ECONNRESET + rdb.set_trace() + pset.side_effect.errno = errno.ENOENT + with self.assertRaises(socket.error): + rdb.set_trace() + + # _close_session + rdb._close_session() + + # do_continue + rdb.set_continue = Mock() + rdb.do_continue(Mock()) + rdb.set_continue.assert_called_with() + + # do_quit + rdb.set_quit = Mock() + rdb.do_quit(Mock()) + rdb.set_quit.assert_called_with() + + @patch('socket.socket') + @skip_if_pypy + def test_get_avail_port(self, sock): + out = WhateverIO() + sock.return_value.accept.return_value = (Mock(), ['helu']) + Rdb(out=out) + + with patch('celery.contrib.rdb.current_process') as curproc: + curproc.return_value.name = 'PoolWorker-10' + Rdb(out=out) + + err = sock.return_value.bind.side_effect = socket.error() + err.errno = errno.ENOENT + with self.assertRaises(socket.error): + Rdb(out=out) + err.errno = errno.EADDRINUSE + with self.assertRaises(Exception): + Rdb(out=out) + called = [0] + + def effect(*a, **kw): + try: + if called[0] > 50: + return True + raise err + finally: + called[0] += 1 + sock.return_value.bind.side_effect = effect + Rdb(out=out) diff --git a/awx/lib/site-packages/celery/tests/events/__init__.py b/awx/lib/site-packages/celery/tests/events/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/celery/tests/events/test_cursesmon.py b/awx/lib/site-packages/celery/tests/events/test_cursesmon.py new file mode 100644 index 0000000000..e242fed951 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/events/test_cursesmon.py @@ -0,0 +1,72 @@ +from __future__ import absolute_import + +from nose import SkipTest + +from celery.tests.utils import Case + + +class MockWindow(object): + + def getmaxyx(self): + return self.y, self.x + + +class test_CursesDisplay(Case): + + def setUp(self): + try: + import curses # noqa + except ImportError: + raise SkipTest('curses monitor requires curses') + + from celery.events import cursesmon + self.monitor = cursesmon.CursesMonitor(object()) + self.win = MockWindow() + self.monitor.win = self.win + + def test_format_row_with_default_widths(self): + self.win.x, self.win.y = 91, 24 + row = self.monitor.format_row( + '783da208-77d0-40ca-b3d6-37dd6dbb55d3', + 'task.task.task.task.task.task.task.task.task.tas', + 'workerworkerworkerworkerworkerworkerworkerworker', + '21:13:20', + 'SUCCESS') + self.assertEqual('783da208-77d0-40ca-b3d6-37dd6dbb55d3 ' + 'workerworker... task.task.[.]tas 21:13:20 SUCCESS ', + row) + + def test_format_row_with_truncated_uuid(self): + self.win.x, self.win.y = 80, 24 + row = self.monitor.format_row( + '783da208-77d0-40ca-b3d6-37dd6dbb55d3', + 'task.task.task.task.task.task.task.task.task.tas', + 'workerworkerworkerworkerworkerworkerworkerworker', + '21:13:20', + 'SUCCESS') + self.assertEqual('783da208-77d0-40ca-b3d... workerworker... ' + 'task.task.[.]tas 21:13:20 SUCCESS ', + row) + + def test_format_title_row(self): + self.win.x, self.win.y = 80, 24 + row = self.monitor.format_row('UUID', 'TASK', + 'WORKER', 'TIME', 'STATE') + self.assertEqual('UUID WORKER ' + 'TASK TIME STATE ', + row) + + def test_format_row_for_wide_screen_with_short_uuid(self): + self.win.x, self.win.y = 140, 24 + row = self.monitor.format_row( + '783da208-77d0-40ca-b3d6-37dd6dbb55d3', + 'task.task.task.task.task.task.task.task.task.tas', + 'workerworkerworkerworkerworkerworkerworkerworker', + '21:13:20', + 'SUCCESS') + self.assertEqual(136, len(row)) + self.assertEqual('783da208-77d0-40ca-b3d6-37dd6dbb55d3 ' + 'workerworkerworkerworkerworkerworker... ' + 'task.task.task.task.task.task.task.[.]tas ' + '21:13:20 SUCCESS ', + row) diff --git a/awx/lib/site-packages/celery/tests/events/test_events.py b/awx/lib/site-packages/celery/tests/events/test_events.py new file mode 100644 index 0000000000..332fc60bab --- /dev/null +++ b/awx/lib/site-packages/celery/tests/events/test_events.py @@ -0,0 +1,245 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import socket + +from mock import Mock + +from celery import Celery +from celery import events +from celery.tests.utils import AppCase + + +class MockProducer(object): + raise_on_publish = False + + def __init__(self, *args, **kwargs): + self.sent = [] + + def publish(self, msg, *args, **kwargs): + if self.raise_on_publish: + raise KeyError() + self.sent.append(msg) + + def close(self): + pass + + def has_event(self, kind): + for event in self.sent: + if event['type'] == kind: + return event + return False + + +class test_Event(AppCase): + + def test_constructor(self): + event = events.Event('world war II') + self.assertEqual(event['type'], 'world war II') + self.assertTrue(event['timestamp']) + + +class test_EventDispatcher(AppCase): + + def test_redis_uses_fanout_exchange(self): + with Celery(set_as_current=False) as app: + app.connection = Mock() + conn = app.connection.return_value = Mock() + conn.transport.driver_type = 'redis' + + dispatcher = app.events.Dispatcher(conn, enabled=False) + self.assertEqual(dispatcher.exchange.type, 'fanout') + + def test_others_use_topic_exchange(self): + with Celery(set_as_current=False) as app: + app.connection = Mock() + conn = app.connection.return_value = Mock() + conn.transport.driver_type = 'amqp' + dispatcher = app.events.Dispatcher(conn, enabled=False) + self.assertEqual(dispatcher.exchange.type, 'topic') + + def test_send(self): + producer = MockProducer() + producer.connection = self.app.connection() + connection = Mock() + connection.transport.driver_type = 'amqp' + eventer = self.app.events.Dispatcher(connection, enabled=False, + buffer_while_offline=False) + eventer.producer = producer + eventer.enabled = True + eventer.send('World War II', ended=True) + self.assertTrue(producer.has_event('World War II')) + eventer.enabled = False + eventer.send('World War III') + self.assertFalse(producer.has_event('World War III')) + + evs = ('Event 1', 'Event 2', 'Event 3') + eventer.enabled = True + eventer.producer.raise_on_publish = True + eventer.buffer_while_offline = False + with self.assertRaises(KeyError): + eventer.send('Event X') + eventer.buffer_while_offline = True + for ev in evs: + eventer.send(ev) + eventer.producer.raise_on_publish = False + eventer.flush() + for ev in evs: + self.assertTrue(producer.has_event(ev)) + + buf = eventer._outbound_buffer = Mock() + buf.popleft.side_effect = IndexError() + eventer.flush() + + def test_enter_exit(self): + with self.app.connection() as conn: + d = self.app.events.Dispatcher(conn) + d.close = Mock() + with d as _d: + self.assertTrue(_d) + d.close.assert_called_with() + + def test_enable_disable_callbacks(self): + on_enable = Mock() + on_disable = Mock() + with self.app.connection() as conn: + with self.app.events.Dispatcher(conn, enabled=False) as d: + d.on_enabled.add(on_enable) + d.on_disabled.add(on_disable) + d.enable() + on_enable.assert_called_with() + d.disable() + on_disable.assert_called_with() + + def test_enabled_disable(self): + connection = self.app.connection() + channel = connection.channel() + try: + dispatcher = self.app.events.Dispatcher(connection, + enabled=True) + dispatcher2 = self.app.events.Dispatcher(connection, + enabled=True, + channel=channel) + self.assertTrue(dispatcher.enabled) + self.assertTrue(dispatcher.producer.channel) + self.assertEqual(dispatcher.producer.serializer, + self.app.conf.CELERY_EVENT_SERIALIZER) + + created_channel = dispatcher.producer.channel + dispatcher.disable() + dispatcher.disable() # Disable with no active producer + dispatcher2.disable() + self.assertFalse(dispatcher.enabled) + self.assertIsNone(dispatcher.producer) + self.assertFalse(dispatcher2.channel.closed, + 'does not close manually provided channel') + + dispatcher.enable() + self.assertTrue(dispatcher.enabled) + self.assertTrue(dispatcher.producer) + + # XXX test compat attribute + self.assertIs(dispatcher.publisher, dispatcher.producer) + prev, dispatcher.publisher = dispatcher.producer, 42 + try: + self.assertEqual(dispatcher.producer, 42) + finally: + dispatcher.producer = prev + finally: + channel.close() + connection.close() + self.assertTrue(created_channel.closed) + + +class test_EventReceiver(AppCase): + + def test_process(self): + + message = {'type': 'world-war'} + + got_event = [False] + + def my_handler(event): + got_event[0] = True + + connection = Mock() + connection.transport_cls = 'memory' + r = events.EventReceiver(connection, + handlers={'world-war': my_handler}, + node_id='celery.tests') + r._receive(message, object()) + self.assertTrue(got_event[0]) + + def test_catch_all_event(self): + + message = {'type': 'world-war'} + + got_event = [False] + + def my_handler(event): + got_event[0] = True + + connection = Mock() + connection.transport_cls = 'memory' + r = events.EventReceiver(connection, node_id='celery.tests') + events.EventReceiver.handlers['*'] = my_handler + try: + r._receive(message, object()) + self.assertTrue(got_event[0]) + finally: + events.EventReceiver.handlers = {} + + def test_itercapture(self): + connection = self.app.connection() + try: + r = self.app.events.Receiver(connection, node_id='celery.tests') + it = r.itercapture(timeout=0.0001, wakeup=False) + consumer = it.next() + self.assertTrue(consumer.queues) + self.assertEqual(consumer.callbacks[0], r._receive) + + with self.assertRaises(socket.timeout): + it.next() + + with self.assertRaises(socket.timeout): + r.capture(timeout=0.00001) + finally: + connection.close() + + def test_itercapture_limit(self): + connection = self.app.connection() + channel = connection.channel() + try: + events_received = [0] + + def handler(event): + events_received[0] += 1 + + producer = self.app.events.Dispatcher(connection, + enabled=True, + channel=channel) + r = self.app.events.Receiver(connection, + handlers={'*': handler}, + node_id='celery.tests') + evs = ['ev1', 'ev2', 'ev3', 'ev4', 'ev5'] + for ev in evs: + producer.send(ev) + it = r.itercapture(limit=4, wakeup=True) + it.next() # skip consumer (see itercapture) + list(it) + self.assertEqual(events_received[0], 4) + finally: + channel.close() + connection.close() + + +class test_misc(AppCase): + + def test_State(self): + state = self.app.events.State() + self.assertDictEqual(dict(state.workers), {}) + + def test_default_dispatcher(self): + with self.app.events.default_dispatcher() as d: + self.assertTrue(d) + self.assertTrue(d.connection) diff --git a/awx/lib/site-packages/celery/tests/events/test_snapshot.py b/awx/lib/site-packages/celery/tests/events/test_snapshot.py new file mode 100644 index 0000000000..ecbe77213e --- /dev/null +++ b/awx/lib/site-packages/celery/tests/events/test_snapshot.py @@ -0,0 +1,138 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from mock import patch + +from celery.app import app_or_default +from celery.events import Events +from celery.events.snapshot import Polaroid, evcam +from celery.tests.utils import Case + + +class TRef(object): + active = True + called = False + + def __call__(self): + self.called = True + + def cancel(self): + self.active = False + + +class MockTimer(object): + installed = [] + + def apply_interval(self, msecs, fun, *args, **kwargs): + self.installed.append(fun) + return TRef() +timer = MockTimer() + + +class test_Polaroid(Case): + + def setUp(self): + self.app = app_or_default() + self.state = self.app.events.State() + + def test_constructor(self): + x = Polaroid(self.state, app=self.app) + self.assertIs(x.app, self.app) + self.assertIs(x.state, self.state) + self.assertTrue(x.freq) + self.assertTrue(x.cleanup_freq) + self.assertTrue(x.logger) + self.assertFalse(x.maxrate) + + def test_install_timers(self): + x = Polaroid(self.state, app=self.app) + x.timer = timer + x.__exit__() + x.__enter__() + self.assertIn(x.capture, MockTimer.installed) + self.assertIn(x.cleanup, MockTimer.installed) + self.assertTrue(x._tref.active) + self.assertTrue(x._ctref.active) + x.__exit__() + self.assertFalse(x._tref.active) + self.assertFalse(x._ctref.active) + self.assertTrue(x._tref.called) + self.assertFalse(x._ctref.called) + + def test_cleanup(self): + x = Polaroid(self.state, app=self.app) + cleanup_signal_sent = [False] + + def handler(**kwargs): + cleanup_signal_sent[0] = True + + x.cleanup_signal.connect(handler) + x.cleanup() + self.assertTrue(cleanup_signal_sent[0]) + + def test_shutter__capture(self): + x = Polaroid(self.state, app=self.app) + shutter_signal_sent = [False] + + def handler(**kwargs): + shutter_signal_sent[0] = True + + x.shutter_signal.connect(handler) + x.shutter() + self.assertTrue(shutter_signal_sent[0]) + + shutter_signal_sent[0] = False + x.capture() + self.assertTrue(shutter_signal_sent[0]) + + def test_shutter_maxrate(self): + x = Polaroid(self.state, app=self.app, maxrate='1/h') + shutter_signal_sent = [0] + + def handler(**kwargs): + shutter_signal_sent[0] += 1 + + x.shutter_signal.connect(handler) + for i in range(30): + x.shutter() + x.shutter() + x.shutter() + self.assertEqual(shutter_signal_sent[0], 1) + + +class test_evcam(Case): + + class MockReceiver(object): + raise_keyboard_interrupt = False + + def capture(self, **kwargs): + if self.__class__.raise_keyboard_interrupt: + raise KeyboardInterrupt() + + class MockEvents(Events): + + def Receiver(self, *args, **kwargs): + return test_evcam.MockReceiver() + + def setUp(self): + self.app = app_or_default() + self.prev, self.app.events = self.app.events, self.MockEvents() + self.app.events.app = self.app + + def tearDown(self): + self.app.events = self.prev + + def test_evcam(self): + evcam(Polaroid, timer=timer) + evcam(Polaroid, timer=timer, loglevel='CRITICAL') + self.MockReceiver.raise_keyboard_interrupt = True + try: + with self.assertRaises(SystemExit): + evcam(Polaroid, timer=timer) + finally: + self.MockReceiver.raise_keyboard_interrupt = False + + @patch('celery.platforms.create_pidlock') + def test_evcam_pidfile(self, create_pidlock): + evcam(Polaroid, timer=timer, pidfile='/var/pid') + create_pidlock.assert_called_with('/var/pid') diff --git a/awx/lib/site-packages/celery/tests/events/test_state.py b/awx/lib/site-packages/celery/tests/events/test_state.py new file mode 100644 index 0000000000..18bdcbfb78 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/events/test_state.py @@ -0,0 +1,347 @@ +from __future__ import absolute_import + +from time import time + +from itertools import count + +from celery import states +from celery.events import Event +from celery.events.state import State, Worker, Task, HEARTBEAT_EXPIRE_WINDOW +from celery.utils import uuid +from celery.tests.utils import Case + + +class replay(object): + + def __init__(self, state): + self.state = state + self.rewind() + self.setup() + + def setup(self): + pass + + def __iter__(self): + return self + + def __next__(self): + try: + self.state.event(self.events[self.position()]) + except IndexError: + raise StopIteration() + next = __next__ + + def rewind(self): + self.position = count(0).next + return self + + def play(self): + for _ in self: + pass + + +class ev_worker_online_offline(replay): + + def setup(self): + self.events = [ + Event('worker-online', hostname='utest1'), + Event('worker-offline', hostname='utest1'), + ] + + +class ev_worker_heartbeats(replay): + + def setup(self): + self.events = [ + Event('worker-heartbeat', hostname='utest1', + timestamp=time() - HEARTBEAT_EXPIRE_WINDOW * 2), + Event('worker-heartbeat', hostname='utest1'), + ] + + +class ev_task_states(replay): + + def setup(self): + tid = self.tid = uuid() + self.events = [ + Event('task-received', uuid=tid, name='task1', + args='(2, 2)', kwargs="{'foo': 'bar'}", + retries=0, eta=None, hostname='utest1'), + Event('task-started', uuid=tid, hostname='utest1'), + Event('task-revoked', uuid=tid, hostname='utest1'), + Event('task-retried', uuid=tid, exception="KeyError('bar')", + traceback='line 2 at main', hostname='utest1'), + Event('task-failed', uuid=tid, exception="KeyError('foo')", + traceback='line 1 at main', hostname='utest1'), + Event('task-succeeded', uuid=tid, result='4', + runtime=0.1234, hostname='utest1'), + ] + + +class ev_snapshot(replay): + + def setup(self): + self.events = [ + Event('worker-online', hostname='utest1'), + Event('worker-online', hostname='utest2'), + Event('worker-online', hostname='utest3'), + ] + for i in range(20): + worker = not i % 2 and 'utest2' or 'utest1' + type = not i % 2 and 'task2' or 'task1' + self.events.append(Event('task-received', name=type, + uuid=uuid(), hostname=worker)) + + +class test_Worker(Case): + + def test_survives_missing_timestamp(self): + worker = Worker(hostname='foo') + worker.on_heartbeat(timestamp=None) + self.assertEqual(worker.heartbeats, []) + + def test_repr(self): + self.assertTrue(repr(Worker(hostname='foo'))) + + +class test_Task(Case): + + def test_info(self): + task = Task(uuid='abcdefg', + name='tasks.add', + args='(2, 2)', + kwargs='{}', + retries=2, + result=42, + eta=1, + runtime=0.0001, + expires=1, + exception=1, + received=time() - 10, + started=time() - 8, + exchange='celery', + routing_key='celery', + succeeded=time()) + self.assertEqual(sorted(list(task._info_fields)), + sorted(task.info().keys())) + + self.assertEqual(sorted(list(task._info_fields + ('received', ))), + sorted(task.info(extra=('received', )))) + + self.assertEqual(sorted(['args', 'kwargs']), + sorted(task.info(['args', 'kwargs']).keys())) + + def test_ready(self): + task = Task(uuid='abcdefg', + name='tasks.add') + task.on_received(timestamp=time()) + self.assertFalse(task.ready) + task.on_succeeded(timestamp=time()) + self.assertTrue(task.ready) + + def test_sent(self): + task = Task(uuid='abcdefg', + name='tasks.add') + task.on_sent(timestamp=time()) + self.assertEqual(task.state, states.PENDING) + + def test_merge(self): + task = Task() + task.on_failed(timestamp=time()) + task.on_started(timestamp=time()) + task.on_received(timestamp=time(), name='tasks.add', args=(2, 2)) + self.assertEqual(task.state, states.FAILURE) + self.assertEqual(task.name, 'tasks.add') + self.assertTupleEqual(task.args, (2, 2)) + task.on_retried(timestamp=time()) + self.assertEqual(task.state, states.RETRY) + + def test_repr(self): + self.assertTrue(repr(Task(uuid='xxx', name='tasks.add'))) + + +class test_State(Case): + + def test_repr(self): + self.assertTrue(repr(State())) + + def test_worker_online_offline(self): + r = ev_worker_online_offline(State()) + r.next() + self.assertTrue(r.state.alive_workers()) + self.assertTrue(r.state.workers['utest1'].alive) + r.play() + self.assertFalse(r.state.alive_workers()) + self.assertFalse(r.state.workers['utest1'].alive) + + def test_itertasks(self): + s = State() + s.tasks = {'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'} + self.assertEqual(len(list(s.itertasks(limit=2))), 2) + + def test_worker_heartbeat_expire(self): + r = ev_worker_heartbeats(State()) + r.next() + self.assertFalse(r.state.alive_workers()) + self.assertFalse(r.state.workers['utest1'].alive) + r.play() + self.assertTrue(r.state.alive_workers()) + self.assertTrue(r.state.workers['utest1'].alive) + + def test_task_states(self): + r = ev_task_states(State()) + + # RECEIVED + r.next() + self.assertTrue(r.tid in r.state.tasks) + task = r.state.tasks[r.tid] + self.assertEqual(task.state, states.RECEIVED) + self.assertTrue(task.received) + self.assertEqual(task.timestamp, task.received) + self.assertEqual(task.worker.hostname, 'utest1') + + # STARTED + r.next() + self.assertTrue(r.state.workers['utest1'].alive, + 'any task event adds worker heartbeat') + self.assertEqual(task.state, states.STARTED) + self.assertTrue(task.started) + self.assertEqual(task.timestamp, task.started) + self.assertEqual(task.worker.hostname, 'utest1') + + # REVOKED + r.next() + self.assertEqual(task.state, states.REVOKED) + self.assertTrue(task.revoked) + self.assertEqual(task.timestamp, task.revoked) + self.assertEqual(task.worker.hostname, 'utest1') + + # RETRY + r.next() + self.assertEqual(task.state, states.RETRY) + self.assertTrue(task.retried) + self.assertEqual(task.timestamp, task.retried) + self.assertEqual(task.worker.hostname, 'utest1') + self.assertEqual(task.exception, "KeyError('bar')") + self.assertEqual(task.traceback, 'line 2 at main') + + # FAILURE + r.next() + self.assertEqual(task.state, states.FAILURE) + self.assertTrue(task.failed) + self.assertEqual(task.timestamp, task.failed) + self.assertEqual(task.worker.hostname, 'utest1') + self.assertEqual(task.exception, "KeyError('foo')") + self.assertEqual(task.traceback, 'line 1 at main') + + # SUCCESS + r.next() + self.assertEqual(task.state, states.SUCCESS) + self.assertTrue(task.succeeded) + self.assertEqual(task.timestamp, task.succeeded) + self.assertEqual(task.worker.hostname, 'utest1') + self.assertEqual(task.result, '4') + self.assertEqual(task.runtime, 0.1234) + + def assertStateEmpty(self, state): + self.assertFalse(state.tasks) + self.assertFalse(state.workers) + self.assertFalse(state.event_count) + self.assertFalse(state.task_count) + + def assertState(self, state): + self.assertTrue(state.tasks) + self.assertTrue(state.workers) + self.assertTrue(state.event_count) + self.assertTrue(state.task_count) + + def test_freeze_while(self): + s = State() + r = ev_snapshot(s) + r.play() + + def work(): + pass + + s.freeze_while(work, clear_after=True) + self.assertFalse(s.event_count) + + s2 = State() + r = ev_snapshot(s2) + r.play() + s2.freeze_while(work, clear_after=False) + self.assertTrue(s2.event_count) + + def test_clear_tasks(self): + s = State() + r = ev_snapshot(s) + r.play() + self.assertTrue(s.tasks) + s.clear_tasks(ready=False) + self.assertFalse(s.tasks) + + def test_clear(self): + r = ev_snapshot(State()) + r.play() + self.assertTrue(r.state.event_count) + self.assertTrue(r.state.workers) + self.assertTrue(r.state.tasks) + self.assertTrue(r.state.task_count) + + r.state.clear() + self.assertFalse(r.state.event_count) + self.assertFalse(r.state.workers) + self.assertTrue(r.state.tasks) + self.assertFalse(r.state.task_count) + + r.state.clear(False) + self.assertFalse(r.state.tasks) + + def test_task_types(self): + r = ev_snapshot(State()) + r.play() + self.assertEqual(sorted(r.state.task_types()), ['task1', 'task2']) + + def test_tasks_by_timestamp(self): + r = ev_snapshot(State()) + r.play() + self.assertEqual(len(r.state.tasks_by_timestamp()), 20) + + def test_tasks_by_type(self): + r = ev_snapshot(State()) + r.play() + self.assertEqual(len(r.state.tasks_by_type('task1')), 10) + self.assertEqual(len(r.state.tasks_by_type('task2')), 10) + + def test_alive_workers(self): + r = ev_snapshot(State()) + r.play() + self.assertEqual(len(r.state.alive_workers()), 3) + + def test_tasks_by_worker(self): + r = ev_snapshot(State()) + r.play() + self.assertEqual(len(r.state.tasks_by_worker('utest1')), 10) + self.assertEqual(len(r.state.tasks_by_worker('utest2')), 10) + + def test_survives_unknown_worker_event(self): + s = State() + s.worker_event('worker-unknown-event-xxx', {'foo': 'bar'}) + s.worker_event('worker-unknown-event-xxx', {'hostname': 'xxx', + 'foo': 'bar'}) + + def test_survives_unknown_task_event(self): + s = State() + s.task_event('task-unknown-event-xxx', {'foo': 'bar', + 'uuid': 'x', + 'hostname': 'y'}) + + def test_callback(self): + scratch = {} + + def callback(state, event): + scratch['recv'] = True + + s = State(callback=callback) + s.event({'type': 'worker-online'}) + self.assertTrue(scratch.get('recv')) diff --git a/awx/lib/site-packages/celery/tests/functional/__init__.py b/awx/lib/site-packages/celery/tests/functional/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/celery/tests/functional/case.py b/awx/lib/site-packages/celery/tests/functional/case.py new file mode 100644 index 0000000000..a11fbc5413 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/functional/case.py @@ -0,0 +1,174 @@ +from __future__ import absolute_import + +import atexit +import logging +import os +import signal +import socket +import sys +import traceback + +from itertools import count +from time import time + +from celery.exceptions import TimeoutError +from celery.task.control import ping, flatten_reply, inspect +from celery.utils.imports import qualname + +from celery.tests.utils import Case + +HOSTNAME = socket.gethostname() + + +def say(msg): + sys.stderr.write('%s\n' % msg) + + +def try_while(fun, reason='Timed out', timeout=10, interval=0.5): + time_start = time() + for iterations in count(0): + if time() - time_start >= timeout: + raise TimeoutError() + ret = fun() + if ret: + return ret + + +class Worker(object): + started = False + next_worker_id = count(1).next + _shutdown_called = False + + def __init__(self, hostname, loglevel='error'): + self.hostname = hostname + self.loglevel = loglevel + + def start(self): + if not self.started: + self._fork_and_exec() + self.started = True + + def _fork_and_exec(self): + pid = os.fork() + if pid == 0: + from celery import current_app + current_app.worker_main(['celeryd', '--loglevel=INFO', + '-n', self.hostname, + '-P', 'solo']) + os._exit(0) + self.pid = pid + + def is_alive(self, timeout=1): + r = ping(destination=[self.hostname], + timeout=timeout) + return self.hostname in flatten_reply(r) + + def wait_until_started(self, timeout=10, interval=0.5): + try_while( + lambda: self.is_alive(interval), + "Worker won't start (after %s secs.)" % timeout, + interval=interval, timeout=timeout, + ) + say('--WORKER %s IS ONLINE--' % self.hostname) + + def ensure_shutdown(self, timeout=10, interval=0.5): + os.kill(self.pid, signal.SIGTERM) + try_while( + lambda: not self.is_alive(interval), + "Worker won't shutdown (after %s secs.)" % timeout, + timeout=10, interval=0.5, + ) + say('--WORKER %s IS SHUTDOWN--' % self.hostname) + self._shutdown_called = True + + def ensure_started(self): + self.start() + self.wait_until_started() + + @classmethod + def managed(cls, hostname=None, caller=None): + hostname = hostname or socket.gethostname() + if caller: + hostname = '.'.join([qualname(caller), hostname]) + else: + hostname += str(cls.next_worker_id()) + worker = cls(hostname) + worker.ensure_started() + stack = traceback.format_stack() + + @atexit.register + def _ensure_shutdown_once(): + if not worker._shutdown_called: + say('-- Found worker not stopped at shutdown: %s\n%s' % ( + worker.hostname, + '\n'.join(stack))) + worker.ensure_shutdown() + + return worker + + +class WorkerCase(Case): + hostname = HOSTNAME + worker = None + + @classmethod + def setUpClass(cls): + logging.getLogger('amqp').setLevel(logging.ERROR) + cls.worker = Worker.managed(cls.hostname, caller=cls) + + @classmethod + def tearDownClass(cls): + cls.worker.ensure_shutdown() + + def assertWorkerAlive(self, timeout=1): + self.assertTrue(self.worker.is_alive) + + def inspect(self, timeout=1): + return inspect([self.worker.hostname], timeout=timeout) + + def my_response(self, response): + return flatten_reply(response)[self.worker.hostname] + + def is_accepted(self, task_id, interval=0.5): + active = self.inspect(timeout=interval).active() + if active: + for task in active[self.worker.hostname]: + if task['id'] == task_id: + return True + return False + + def is_reserved(self, task_id, interval=0.5): + reserved = self.inspect(timeout=interval).reserved() + if reserved: + for task in reserved[self.worker.hostname]: + if task['id'] == task_id: + return True + return False + + def is_scheduled(self, task_id, interval=0.5): + schedule = self.inspect(timeout=interval).scheduled() + if schedule: + for item in schedule[self.worker.hostname]: + if item['request']['id'] == task_id: + return True + return False + + def is_received(self, task_id, interval=0.5): + return (self.is_reserved(task_id, interval) or + self.is_scheduled(task_id, interval) or + self.is_accepted(task_id, interval)) + + def ensure_accepted(self, task_id, interval=0.5, timeout=10): + return try_while(lambda: self.is_accepted(task_id, interval), + 'Task not accepted within timeout', + interval=0.5, timeout=10) + + def ensure_received(self, task_id, interval=0.5, timeout=10): + return try_while(lambda: self.is_received(task_id, interval), + 'Task not receied within timeout', + interval=0.5, timeout=10) + + def ensure_scheduled(self, task_id, interval=0.5, timeout=10): + return try_while(lambda: self.is_scheduled(task_id, interval), + 'Task not scheduled within timeout', + interval=0.5, timeout=10) diff --git a/awx/lib/site-packages/celery/tests/functional/tasks.py b/awx/lib/site-packages/celery/tests/functional/tasks.py new file mode 100644 index 0000000000..b094667057 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/functional/tasks.py @@ -0,0 +1,24 @@ +from __future__ import absolute_import + +import time + +from celery import task, subtask + + +@task() +def add(x, y): + return x + y + + +@task() +def add_cb(x, y, callback=None): + result = x + y + if callback: + return subtask(callback).apply_async(result) + return result + + +@task() +def sleeptask(i): + time.sleep(i) + return i diff --git a/awx/lib/site-packages/celery/tests/security/__init__.py b/awx/lib/site-packages/celery/tests/security/__init__.py new file mode 100644 index 0000000000..6aa1c2760b --- /dev/null +++ b/awx/lib/site-packages/celery/tests/security/__init__.py @@ -0,0 +1,61 @@ +from __future__ import absolute_import + +KEY1 = """-----BEGIN RSA PRIVATE KEY----- +MIICXgIBAAKBgQDCsmLC+eqL4z6bhtv0nzbcnNXuQrZUoh827jGfDI3kxNZ2LbEy +kJOn7GIl2tPpcY2Dm1sOM8G1XLm/8Izprp4ifpF4Gi0mqz0GquY5dcMNASG9zkRO +J1z8dQUyp3PIUHdQdrKbYQVifkA4dh6Kg27k8/IcdY1lHsaIju4bX7MADwIDAQAB +AoGBAKWpCRWjdiluwu+skO0Up6aRIAop42AhzfN8OuZ81SMJRP2rJTHECI8COATD +rDneb63Ce3ibG0BI1Jf3gr624D806xVqK/SVHZNbfWx0daE3Q43DDk1UdhRF5+0X +HPqqU/IdeW1YGyWJi+IhMTXyGqhZ1BTN+4vHL7NlRpDt6JOpAkEA+xvfRO4Ca7Lw +NEgvW7n+/L9b+xygQBtOA5s260pO+8jMrXvOdCjISaKHD8HZGFN9oUmLsDXXBhjh +j0WCMdsHbQJBAMZ9OIw6M/Uxv5ANPCD58p6PZTb0knXVPMYBFQ7Y/h2HZzqbEyiI +DLGZpAa9/IhVkoCULd/TNytz5rl27KEni+sCQArFQEdZmhr6etkTO4zIpoo6vvw/ +VxRI14jKEIn5Dvg3vae3RryuvyCBax+e5evoMNxJJkexl354dLxLc/ElfuUCQQCq +U14pBvD7ITuPM6w7aAEIi2iBZhIgR2GlT9xwJ0i4si6lHdms2EJ8TKlyl6mSnEvh +RkavYSJgiU6eLC0WhUcNAkEA7vuNcz/uuckmq870qfSzUQJIYLzwVOadEdEEAVy0 +L0usztlKmAH8U/ceQMMJLMI9W4m680JrMf3iS7f+SkgUTA== +-----END RSA PRIVATE KEY-----""" + +KEY2 = """-----BEGIN RSA PRIVATE KEY----- +MIICXQIBAAKBgQDH22L8b9AmST9ABDmQTQ2DWMdDmK5YXZt4AIY81IcsTQ/ccM0C +fwXEP9tdkYwtcxMCWdASwY5pfMy9vFp0hyrRQMSNfuoxAgONuNWPyQoIvY3ZXRe6 +rS+hb/LN4+vdjX+oxmYiQ2HmSB9rh2bepE6Cw+RLJr5sXXq+xZJ+BLt5tQIDAQAB +AoGBAMGBO0Arip/nP6Rd8tYypKjN5nEefX/1cjgoWdC//fj4zCil1vlZv12abm0U +JWNEDd2y0/G1Eow0V5BFtFcrIFowU44LZEiSf7sKXlNHRHlbZmDgNXFZOt7nVbHn +6SN+oCYjaPjji8idYeb3VQXPtqMoMn73MuyxD3k3tWmVLonpAkEA6hsu62qhUk5k +Nt88UZOauU1YizxsWvT0bHioaceE4TEsbO3NZs7dmdJIcRFcU787lANaaIq7Rw26 +qcumME9XhwJBANqMOzsYQ6BX54UzS6x99Jjlq9MEbTCbAEZr/yjopb9f617SwfuE +AEKnIq3HL6/Tnhv3V8Zy3wYHgDoGNeTVe+MCQQDi/nyeNAQ8RFqTgh2Ak/jAmCi0 +yV/fSgj+bHgQKS/FEuMas/IoL4lbrzQivkyhv5lLSX0ORQaWPM+z+A0qZqRdAkBh +XE+Wx/x4ljCh+nQf6AzrgIXHgBVUrfi1Zq9Jfjs4wnaMy793WRr0lpiwaigoYFHz +i4Ei+1G30eeh8dpYk3KZAkB0ucTOsQynDlL5rLGYZ+IcfSfH3w2l5EszY47kKQG9 +Fxeq/HOp9JYw4gRu6Ycvqu57KHwpHhR0FCXRBxuYcJ5V +-----END RSA PRIVATE KEY-----""" + +CERT1 = """-----BEGIN CERTIFICATE----- +MIICATCCAWoCCQCR6B3XQcBOvjANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJB +VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0 +cyBQdHkgTHRkMB4XDTExMDcxOTA5MDgyMloXDTEyMDcxODA5MDgyMlowRTELMAkG +A1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0 +IFdpZGdpdHMgUHR5IEx0ZDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAwrJi +wvnqi+M+m4bb9J823JzV7kK2VKIfNu4xnwyN5MTWdi2xMpCTp+xiJdrT6XGNg5tb +DjPBtVy5v/CM6a6eIn6ReBotJqs9BqrmOXXDDQEhvc5ETidc/HUFMqdzyFB3UHay +m2EFYn5AOHYeioNu5PPyHHWNZR7GiI7uG1+zAA8CAwEAATANBgkqhkiG9w0BAQUF +AAOBgQA4+OiJ+pyq9lbEMFYC9K2+e77noHJkwUOs4wO6p1R14ZqSmoIszQ7KEBiH +2HHPMUY6kt4GL1aX4Vr1pUlXXdH5WaEk0fvDYZemILDMqIQJ9ettx8KihZjFGC4k +Y4Sy5xmqdE9Kjjd854gTRRnzpMnJp6+74Ki2X8GHxn3YBM+9Ng== +-----END CERTIFICATE-----""" + +CERT2 = """-----BEGIN CERTIFICATE----- +MIICATCCAWoCCQCV/9A2ZBM37TANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJB +VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0 +cyBQdHkgTHRkMB4XDTExMDcxOTA5MDkwMloXDTEyMDcxODA5MDkwMlowRTELMAkG +A1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0 +IFdpZGdpdHMgUHR5IEx0ZDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAx9ti +/G/QJkk/QAQ5kE0Ng1jHQ5iuWF2beACGPNSHLE0P3HDNAn8FxD/bXZGMLXMTAlnQ +EsGOaXzMvbxadIcq0UDEjX7qMQIDjbjVj8kKCL2N2V0Xuq0voW/yzePr3Y1/qMZm +IkNh5kgfa4dm3qROgsPkSya+bF16vsWSfgS7ebUCAwEAATANBgkqhkiG9w0BAQUF +AAOBgQBzaZ5vBkzksPhnWb2oobuy6Ne/LMEtdQ//qeVY4sKl2tOJUCSdWRen9fqP +e+zYdEdkFCd8rp568Eiwkq/553uy4rlE927/AEqs/+KGYmAtibk/9vmi+/+iZXyS +WWZybzzDZFncq1/N1C3Y/hrCBNDFO4TsnTLAhWtZ4c0vDAiacw== +-----END CERTIFICATE-----""" diff --git a/awx/lib/site-packages/celery/tests/security/case.py b/awx/lib/site-packages/celery/tests/security/case.py new file mode 100644 index 0000000000..5d2828a794 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/security/case.py @@ -0,0 +1,14 @@ +from __future__ import absolute_import + +from nose import SkipTest + +from celery.tests.utils import Case + + +class SecurityCase(Case): + + def setUp(self): + try: + from OpenSSL import crypto # noqa + except ImportError: + raise SkipTest('OpenSSL.crypto not installed') diff --git a/awx/lib/site-packages/celery/tests/security/test_certificate.py b/awx/lib/site-packages/celery/tests/security/test_certificate.py new file mode 100644 index 0000000000..5708ff1b49 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/security/test_certificate.py @@ -0,0 +1,80 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from celery.exceptions import SecurityError +from celery.security.certificate import Certificate, CertStore, FSCertStore + +from mock import Mock, patch + +from . import CERT1, CERT2, KEY1 +from .case import SecurityCase + +from celery.tests.utils import mock_open + + +class test_Certificate(SecurityCase): + + def test_valid_certificate(self): + Certificate(CERT1) + Certificate(CERT2) + + def test_invalid_certificate(self): + self.assertRaises(TypeError, Certificate, None) + self.assertRaises(SecurityError, Certificate, '') + self.assertRaises(SecurityError, Certificate, 'foo') + self.assertRaises(SecurityError, Certificate, CERT1[:20] + CERT1[21:]) + self.assertRaises(SecurityError, Certificate, KEY1) + + def test_has_expired(self): + self.assertTrue(Certificate(CERT1).has_expired()) + + +class test_CertStore(SecurityCase): + + def test_itercerts(self): + cert1 = Certificate(CERT1) + cert2 = Certificate(CERT2) + certstore = CertStore() + for c in certstore.itercerts(): + self.assertTrue(False) + certstore.add_cert(cert1) + certstore.add_cert(cert2) + for c in certstore.itercerts(): + self.assertIn(c, (cert1, cert2)) + + def test_duplicate(self): + cert1 = Certificate(CERT1) + certstore = CertStore() + certstore.add_cert(cert1) + self.assertRaises(SecurityError, certstore.add_cert, cert1) + + +class test_FSCertStore(SecurityCase): + + @patch('os.path.isdir') + @patch('glob.glob') + @patch('celery.security.certificate.Certificate') + def test_init(self, Certificate, glob, isdir): + cert = Certificate.return_value = Mock() + cert.has_expired.return_value = False + isdir.return_value = True + glob.return_value = ['foo.cert'] + with mock_open(): + cert.get_id.return_value = 1 + x = FSCertStore('/var/certs') + self.assertIn(1, x._certs) + glob.assert_called_with('/var/certs/*') + + # they both end up with the same id + glob.return_value = ['foo.cert', 'bar.cert'] + with self.assertRaises(SecurityError): + x = FSCertStore('/var/certs') + glob.return_value = ['foo.cert'] + + cert.has_expired.return_value = True + with self.assertRaises(SecurityError): + x = FSCertStore('/var/certs') + + isdir.return_value = False + with self.assertRaises(SecurityError): + x = FSCertStore('/var/certs') diff --git a/awx/lib/site-packages/celery/tests/security/test_key.py b/awx/lib/site-packages/celery/tests/security/test_key.py new file mode 100644 index 0000000000..9e286e98b0 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/security/test_key.py @@ -0,0 +1,26 @@ +from __future__ import absolute_import + +from celery.exceptions import SecurityError +from celery.security.key import PrivateKey + +from . import CERT1, KEY1, KEY2 +from .case import SecurityCase + + +class test_PrivateKey(SecurityCase): + + def test_valid_private_key(self): + PrivateKey(KEY1) + PrivateKey(KEY2) + + def test_invalid_private_key(self): + self.assertRaises(TypeError, PrivateKey, None) + self.assertRaises(SecurityError, PrivateKey, '') + self.assertRaises(SecurityError, PrivateKey, 'foo') + self.assertRaises(SecurityError, PrivateKey, KEY1[:20] + KEY1[21:]) + self.assertRaises(SecurityError, PrivateKey, CERT1) + + def test_sign(self): + pkey = PrivateKey(KEY1) + pkey.sign('test', 'sha1') + self.assertRaises(ValueError, pkey.sign, 'test', 'unknown') diff --git a/awx/lib/site-packages/celery/tests/security/test_security.py b/awx/lib/site-packages/celery/tests/security/test_security.py new file mode 100644 index 0000000000..afe840c4d2 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/security/test_security.py @@ -0,0 +1,98 @@ +""" +Keys and certificates for tests (KEY1 is a private key of CERT1, etc.) + +Generated with: + +.. code-block:: bash + + $ openssl genrsa -des3 -passout pass:test -out key1.key 1024 + $ openssl req -new -key key1.key -out key1.csr -passin pass:test + $ cp key1.key key1.key.org + $ openssl rsa -in key1.key.org -out key1.key -passin pass:test + $ openssl x509 -req -days 365 -in cert1.csr \ + -signkey key1.key -out cert1.crt + $ rm key1.key.org cert1.csr + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import __builtin__ + +from mock import Mock, patch + +from celery import current_app +from celery.exceptions import ImproperlyConfigured +from celery.security import setup_security, disable_untrusted_serializers +from kombu.serialization import registry + +from .case import SecurityCase + +from celery.tests.utils import mock_open + + +class test_security(SecurityCase): + + def tearDown(self): + registry._disabled_content_types.clear() + + def test_disable_untrusted_serializers(self): + disabled = registry._disabled_content_types + self.assertEqual(0, len(disabled)) + + disable_untrusted_serializers( + ['application/json', 'application/x-python-serialize']) + self.assertIn('application/x-yaml', disabled) + self.assertNotIn('application/json', disabled) + self.assertNotIn('application/x-python-serialize', disabled) + disabled.clear() + + disable_untrusted_serializers() + self.assertIn('application/x-yaml', disabled) + self.assertIn('application/json', disabled) + self.assertIn('application/x-python-serialize', disabled) + + def test_setup_security(self): + disabled = registry._disabled_content_types + self.assertEqual(0, len(disabled)) + + current_app.conf.CELERY_TASK_SERIALIZER = 'json' + + setup_security() + self.assertIn('application/x-python-serialize', disabled) + disabled.clear() + + @patch('celery.security.register_auth') + @patch('celery.security.disable_untrusted_serializers') + def test_setup_registry_complete(self, dis, reg, key='KEY', cert='CERT'): + calls = [0] + + def effect(*args): + try: + m = Mock() + m.read.return_value = 'B' if calls[0] else 'A' + return m + finally: + calls[0] += 1 + + with mock_open(side_effect=effect): + store = Mock() + setup_security(['json'], key, cert, store) + dis.assert_called_with(['json']) + reg.assert_called_with('A', 'B', store) + + def test_security_conf(self): + current_app.conf.CELERY_TASK_SERIALIZER = 'auth' + + self.assertRaises(ImproperlyConfigured, setup_security) + + _import = __builtin__.__import__ + + def import_hook(name, *args, **kwargs): + if name == 'OpenSSL': + raise ImportError + return _import(name, *args, **kwargs) + + __builtin__.__import__ = import_hook + self.assertRaises(ImproperlyConfigured, setup_security) + __builtin__.__import__ = _import diff --git a/awx/lib/site-packages/celery/tests/security/test_serialization.py b/awx/lib/site-packages/celery/tests/security/test_serialization.py new file mode 100644 index 0000000000..3f0704d077 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/security/test_serialization.py @@ -0,0 +1,55 @@ +from __future__ import absolute_import + +from celery.exceptions import SecurityError + +from celery.security.serialization import SecureSerializer, register_auth +from celery.security.certificate import Certificate, CertStore +from celery.security.key import PrivateKey +from kombu.serialization import registry + +from . import CERT1, CERT2, KEY1, KEY2 +from .case import SecurityCase + + +class test_SecureSerializer(SecurityCase): + + def _get_s(self, key, cert, certs): + store = CertStore() + for c in certs: + store.add_cert(Certificate(c)) + return SecureSerializer(PrivateKey(key), Certificate(cert), store) + + def test_serialize(self): + s = self._get_s(KEY1, CERT1, [CERT1]) + self.assertEqual(s.deserialize(s.serialize('foo')), 'foo') + + def test_deserialize(self): + s = self._get_s(KEY1, CERT1, [CERT1]) + self.assertRaises(SecurityError, s.deserialize, 'bad data') + + def test_unmatched_key_cert(self): + s = self._get_s(KEY1, CERT2, [CERT1, CERT2]) + self.assertRaises(SecurityError, + s.deserialize, s.serialize('foo')) + + def test_unknown_source(self): + s1 = self._get_s(KEY1, CERT1, [CERT2]) + s2 = self._get_s(KEY1, CERT1, []) + self.assertRaises(SecurityError, + s1.deserialize, s1.serialize('foo')) + self.assertRaises(SecurityError, + s2.deserialize, s2.serialize('foo')) + + def test_self_send(self): + s1 = self._get_s(KEY1, CERT1, [CERT1]) + s2 = self._get_s(KEY1, CERT1, [CERT1]) + self.assertEqual(s2.deserialize(s1.serialize('foo')), 'foo') + + def test_separate_ends(self): + s1 = self._get_s(KEY1, CERT1, [CERT2]) + s2 = self._get_s(KEY2, CERT2, [CERT1]) + self.assertEqual(s2.deserialize(s1.serialize('foo')), 'foo') + + def test_register_auth(self): + register_auth(KEY1, CERT1, '') + self.assertIn('application/data', registry._decoders) diff --git a/awx/lib/site-packages/celery/tests/slow/__init__.py b/awx/lib/site-packages/celery/tests/slow/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/celery/tests/slow/test_buckets.py b/awx/lib/site-packages/celery/tests/slow/test_buckets.py new file mode 100644 index 0000000000..22b7c8bcf9 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/slow/test_buckets.py @@ -0,0 +1,346 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import sys +import time + +from functools import partial +from itertools import chain, izip +from Queue import Empty + +from mock import Mock, patch + +from celery.app.registry import TaskRegistry +from celery.task.base import Task +from celery.utils import timeutils +from celery.utils import uuid +from celery.worker import buckets + +from celery.tests.utils import Case, skip_if_environ, mock_context + +skip_if_disabled = partial(skip_if_environ('SKIP_RLIMITS')) + + +class MockJob(object): + + def __init__(self, id, name, args, kwargs): + self.id = id + self.name = name + self.args = args + self.kwargs = kwargs + + def __eq__(self, other): + if isinstance(other, self.__class__): + return bool(self.id == other.id + and self.name == other.name + and self.args == other.args + and self.kwargs == other.kwargs) + else: + return self == other + + def __repr__(self): + return '' % ( + self.task1['id'])) + self.assertEqual(repr(ok2_res), '' % ( + self.task2['id'])) + self.assertEqual(repr(nok_res), '' % ( + self.task3['id'])) + + pending_id = uuid() + pending_res = AsyncResult(pending_id) + self.assertEqual(repr(pending_res), '' % ( + pending_id)) + + def test_hash(self): + self.assertEqual(hash(AsyncResult('x0w991')), + hash(AsyncResult('x0w991'))) + self.assertNotEqual(hash(AsyncResult('x0w991')), + hash(AsyncResult('x1w991'))) + + def test_get_traceback(self): + ok_res = AsyncResult(self.task1['id']) + nok_res = AsyncResult(self.task3['id']) + nok_res2 = AsyncResult(self.task4['id']) + self.assertFalse(ok_res.traceback) + self.assertTrue(nok_res.traceback) + self.assertTrue(nok_res2.traceback) + + pending_res = AsyncResult(uuid()) + self.assertFalse(pending_res.traceback) + + def test_get(self): + ok_res = AsyncResult(self.task1['id']) + ok2_res = AsyncResult(self.task2['id']) + nok_res = AsyncResult(self.task3['id']) + nok2_res = AsyncResult(self.task4['id']) + + self.assertEqual(ok_res.get(), 'the') + self.assertEqual(ok2_res.get(), 'quick') + with self.assertRaises(KeyError): + nok_res.get() + self.assertTrue(nok_res.get(propagate=False)) + self.assertIsInstance(nok2_res.result, KeyError) + self.assertEqual(ok_res.info, 'the') + + def test_get_timeout(self): + res = AsyncResult(self.task4['id']) # has RETRY state + with self.assertRaises(TimeoutError): + res.get(timeout=0.1) + + pending_res = AsyncResult(uuid()) + with self.assertRaises(TimeoutError): + pending_res.get(timeout=0.1) + + @skip_if_quick + def test_get_timeout_longer(self): + res = AsyncResult(self.task4['id']) # has RETRY state + with self.assertRaises(TimeoutError): + res.get(timeout=1) + + def test_ready(self): + oks = (AsyncResult(self.task1['id']), + AsyncResult(self.task2['id']), + AsyncResult(self.task3['id'])) + self.assertTrue(all(result.ready() for result in oks)) + self.assertFalse(AsyncResult(self.task4['id']).ready()) + + self.assertFalse(AsyncResult(uuid()).ready()) + + +class test_ResultSet(AppCase): + + def test_resultset_repr(self): + self.assertTrue(repr(ResultSet(map(AsyncResult, ['1', '2', '3'])))) + + def test_eq_other(self): + self.assertFalse(ResultSet([1, 3, 3]) == 1) + self.assertTrue(ResultSet([1]) == ResultSet([1])) + + def test_get(self): + x = ResultSet(map(AsyncResult, [1, 2, 3])) + b = x.results[0].backend = Mock() + b.supports_native_join = False + x.join_native = Mock() + x.join = Mock() + x.get() + self.assertTrue(x.join.called) + b.supports_native_join = True + x.get() + self.assertTrue(x.join_native.called) + + def test_add(self): + x = ResultSet([1]) + x.add(2) + self.assertEqual(len(x), 2) + x.add(2) + self.assertEqual(len(x), 2) + + def test_add_discard(self): + x = ResultSet([]) + x.add(AsyncResult('1')) + self.assertIn(AsyncResult('1'), x.results) + x.discard(AsyncResult('1')) + x.discard(AsyncResult('1')) + x.discard('1') + self.assertNotIn(AsyncResult('1'), x.results) + + x.update([AsyncResult('2')]) + + def test_clear(self): + x = ResultSet([]) + r = x.results + x.clear() + self.assertIs(x.results, r) + + +class MockAsyncResultFailure(AsyncResult): + + @property + def result(self): + return KeyError('baz') + + @property + def state(self): + return states.FAILURE + + def get(self, propagate=True, **kwargs): + if propagate: + raise self.result + return self.result + + +class MockAsyncResultSuccess(AsyncResult): + forgotten = False + + def forget(self): + self.forgotten = True + + @property + def result(self): + return 42 + + @property + def state(self): + return states.SUCCESS + + def get(self, **kwargs): + return self.result + + +class SimpleBackend(object): + ids = [] + + def __init__(self, ids=[]): + self.ids = ids + + def get_many(self, *args, **kwargs): + return ((id, {'result': i, 'status': states.SUCCESS}) + for i, id in enumerate(self.ids)) + + +class test_TaskSetResult(AppCase): + + def setup(self): + self.size = 10 + self.ts = TaskSetResult(uuid(), make_mock_group(self.size)) + + def test_total(self): + self.assertEqual(self.ts.total, self.size) + + def test_compat_properties(self): + self.assertEqual(self.ts.taskset_id, self.ts.id) + self.ts.taskset_id = 'foo' + self.assertEqual(self.ts.taskset_id, 'foo') + + def test_compat_subtasks_kwarg(self): + x = TaskSetResult(uuid(), subtasks=[1, 2, 3]) + self.assertEqual(x.results, [1, 2, 3]) + + def test_itersubtasks(self): + it = self.ts.itersubtasks() + + for i, t in enumerate(it): + self.assertEqual(t.get(), i) + + +class test_GroupResult(AppCase): + + def setup(self): + self.size = 10 + self.ts = GroupResult(uuid(), make_mock_group(self.size)) + + def test_len(self): + self.assertEqual(len(self.ts), self.size) + + def test_eq_other(self): + self.assertFalse(self.ts == 1) + + def test_reduce(self): + self.assertTrue(loads(dumps(self.ts))) + + def test_iterate_raises(self): + ar = MockAsyncResultFailure(uuid()) + ts = GroupResult(uuid(), [ar]) + it = iter(ts) + with self.assertRaises(KeyError): + it.next() + + def test_forget(self): + subs = [MockAsyncResultSuccess(uuid()), + MockAsyncResultSuccess(uuid())] + ts = GroupResult(uuid(), subs) + ts.forget() + for sub in subs: + self.assertTrue(sub.forgotten) + + def test_getitem(self): + subs = [MockAsyncResultSuccess(uuid()), + MockAsyncResultSuccess(uuid())] + ts = GroupResult(uuid(), subs) + self.assertIs(ts[0], subs[0]) + + def test_save_restore(self): + subs = [MockAsyncResultSuccess(uuid()), + MockAsyncResultSuccess(uuid())] + ts = GroupResult(uuid(), subs) + ts.save() + with self.assertRaises(AttributeError): + ts.save(backend=object()) + self.assertEqual(GroupResult.restore(ts.id).subtasks, + ts.subtasks) + ts.delete() + self.assertIsNone(GroupResult.restore(ts.id)) + with self.assertRaises(AttributeError): + GroupResult.restore(ts.id, backend=object()) + + def test_join_native(self): + backend = SimpleBackend() + subtasks = [AsyncResult(uuid(), backend=backend) + for i in range(10)] + ts = GroupResult(uuid(), subtasks) + backend.ids = [subtask.id for subtask in subtasks] + res = ts.join_native() + self.assertEqual(res, range(10)) + + def test_iter_native(self): + backend = SimpleBackend() + subtasks = [AsyncResult(uuid(), backend=backend) + for i in range(10)] + ts = GroupResult(uuid(), subtasks) + backend.ids = [subtask.id for subtask in subtasks] + self.assertEqual(len(list(ts.iter_native())), 10) + + def test_iterate_yields(self): + ar = MockAsyncResultSuccess(uuid()) + ar2 = MockAsyncResultSuccess(uuid()) + ts = GroupResult(uuid(), [ar, ar2]) + it = iter(ts) + self.assertEqual(it.next(), 42) + self.assertEqual(it.next(), 42) + + def test_iterate_eager(self): + ar1 = EagerResult(uuid(), 42, states.SUCCESS) + ar2 = EagerResult(uuid(), 42, states.SUCCESS) + ts = GroupResult(uuid(), [ar1, ar2]) + it = iter(ts) + self.assertEqual(it.next(), 42) + self.assertEqual(it.next(), 42) + + def test_join_timeout(self): + ar = MockAsyncResultSuccess(uuid()) + ar2 = MockAsyncResultSuccess(uuid()) + ar3 = AsyncResult(uuid()) + ts = GroupResult(uuid(), [ar, ar2, ar3]) + with self.assertRaises(TimeoutError): + ts.join(timeout=0.0000001) + + def test___iter__(self): + it = iter(self.ts) + results = sorted(list(it)) + self.assertListEqual(results, list(xrange(self.size))) + + def test_join(self): + joined = self.ts.join() + self.assertListEqual(joined, list(xrange(self.size))) + + def test_successful(self): + self.assertTrue(self.ts.successful()) + + def test_failed(self): + self.assertFalse(self.ts.failed()) + + def test_waiting(self): + self.assertFalse(self.ts.waiting()) + + def test_ready(self): + self.assertTrue(self.ts.ready()) + + def test_completed_count(self): + self.assertEqual(self.ts.completed_count(), len(self.ts)) + + +class test_pending_AsyncResult(AppCase): + + def setup(self): + self.task = AsyncResult(uuid()) + + def test_result(self): + self.assertIsNone(self.task.result) + + +class test_failed_AsyncResult(test_GroupResult): + + def setup(self): + self.size = 11 + subtasks = make_mock_group(10) + failed = mock_task('ts11', states.FAILURE, KeyError('Baz')) + save_result(failed) + failed_res = AsyncResult(failed['id']) + self.ts = GroupResult(uuid(), subtasks + [failed_res]) + + def test_completed_count(self): + self.assertEqual(self.ts.completed_count(), len(self.ts) - 1) + + def test___iter__(self): + it = iter(self.ts) + + def consume(): + return list(it) + + with self.assertRaises(KeyError): + consume() + + def test_join(self): + with self.assertRaises(KeyError): + self.ts.join() + + def test_successful(self): + self.assertFalse(self.ts.successful()) + + def test_failed(self): + self.assertTrue(self.ts.failed()) + + +class test_pending_Group(AppCase): + + def setup(self): + self.ts = GroupResult(uuid(), [AsyncResult(uuid()), + AsyncResult(uuid())]) + + def test_completed_count(self): + self.assertEqual(self.ts.completed_count(), 0) + + def test_ready(self): + self.assertFalse(self.ts.ready()) + + def test_waiting(self): + self.assertTrue(self.ts.waiting()) + + def x_join(self): + with self.assertRaises(TimeoutError): + self.ts.join(timeout=0.001) + + @skip_if_quick + def x_join_longer(self): + with self.assertRaises(TimeoutError): + self.ts.join(timeout=1) + + +class RaisingTask(Task): + + def run(self, x, y): + raise KeyError('xy') + + +class test_EagerResult(AppCase): + + def test_wait_raises(self): + res = RaisingTask.apply(args=[3, 3]) + with self.assertRaises(KeyError): + res.wait() + self.assertTrue(res.wait(propagate=False)) + + def test_wait(self): + res = EagerResult('x', 'x', states.RETRY) + res.wait() + self.assertEqual(res.state, states.RETRY) + self.assertEqual(res.status, states.RETRY) + + def test_forget(self): + res = EagerResult('x', 'x', states.RETRY) + res.forget() + + def test_revoke(self): + res = RaisingTask.apply(args=[3, 3]) + self.assertFalse(res.revoke()) + + +class test_serializable(AppCase): + + def test_AsyncResult(self): + x = AsyncResult(uuid()) + self.assertEqual(x, from_serializable(x.serializable(), self.app)) + self.assertEqual(x, from_serializable(x, self.app)) + + def test_GroupResult(self): + x = GroupResult(uuid(), [AsyncResult(uuid()) for _ in range(10)]) + self.assertEqual(x, from_serializable(x.serializable(), self.app)) + self.assertEqual(x, from_serializable(x, self.app)) diff --git a/awx/lib/site-packages/celery/tests/tasks/test_sets.py b/awx/lib/site-packages/celery/tests/tasks/test_sets.py new file mode 100644 index 0000000000..7258ae2ddc --- /dev/null +++ b/awx/lib/site-packages/celery/tests/tasks/test_sets.py @@ -0,0 +1,194 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import anyjson + +from celery import current_app +from celery.task import Task +from celery.task.sets import subtask, TaskSet +from celery.canvas import Signature + +from celery.tests.utils import Case + + +class MockTask(Task): + name = 'tasks.add' + + def run(self, x, y, **kwargs): + return x + y + + @classmethod + def apply_async(cls, args, kwargs, **options): + return (args, kwargs, options) + + @classmethod + def apply(cls, args, kwargs, **options): + return (args, kwargs, options) + + +class test_subtask(Case): + + def test_behaves_like_type(self): + s = subtask('tasks.add', (2, 2), {'cache': True}, + {'routing_key': 'CPU-bound'}) + self.assertDictEqual(subtask(s), s) + + def test_task_argument_can_be_task_cls(self): + s = subtask(MockTask, (2, 2)) + self.assertEqual(s.task, MockTask.name) + + def test_apply_async(self): + s = MockTask.subtask( + (2, 2), {'cache': True}, {'routing_key': 'CPU-bound'}, + ) + args, kwargs, options = s.apply_async() + self.assertTupleEqual(args, (2, 2)) + self.assertDictEqual(kwargs, {'cache': True}) + self.assertDictEqual(options, {'routing_key': 'CPU-bound'}) + + def test_delay_argmerge(self): + s = MockTask.subtask( + (2, ), {'cache': True}, {'routing_key': 'CPU-bound'}, + ) + args, kwargs, options = s.delay(10, cache=False, other='foo') + self.assertTupleEqual(args, (10, 2)) + self.assertDictEqual(kwargs, {'cache': False, 'other': 'foo'}) + self.assertDictEqual(options, {'routing_key': 'CPU-bound'}) + + def test_apply_async_argmerge(self): + s = MockTask.subtask( + (2, ), {'cache': True}, {'routing_key': 'CPU-bound'}, + ) + args, kwargs, options = s.apply_async((10, ), + {'cache': False, 'other': 'foo'}, + routing_key='IO-bound', + exchange='fast') + + self.assertTupleEqual(args, (10, 2)) + self.assertDictEqual(kwargs, {'cache': False, 'other': 'foo'}) + self.assertDictEqual(options, {'routing_key': 'IO-bound', + 'exchange': 'fast'}) + + def test_apply_argmerge(self): + s = MockTask.subtask( + (2, ), {'cache': True}, {'routing_key': 'CPU-bound'}, + ) + args, kwargs, options = s.apply((10, ), + {'cache': False, 'other': 'foo'}, + routing_key='IO-bound', + exchange='fast') + + self.assertTupleEqual(args, (10, 2)) + self.assertDictEqual(kwargs, {'cache': False, 'other': 'foo'}) + self.assertDictEqual( + options, {'routing_key': 'IO-bound', 'exchange': 'fast'}, + ) + + def test_is_JSON_serializable(self): + s = MockTask.subtask( + (2, ), {'cache': True}, {'routing_key': 'CPU-bound'}, + ) + s.args = list(s.args) # tuples are not preserved + # but this doesn't matter. + self.assertEqual(s, subtask(anyjson.loads(anyjson.dumps(s)))) + + def test_repr(self): + s = MockTask.subtask((2, ), {'cache': True}) + self.assertIn('2', repr(s)) + self.assertIn('cache=True', repr(s)) + + def test_reduce(self): + s = MockTask.subtask((2, ), {'cache': True}) + cls, args = s.__reduce__() + self.assertDictEqual(dict(cls(*args)), dict(s)) + + +class test_TaskSet(Case): + + def test_task_arg_can_be_iterable__compat(self): + ts = TaskSet([MockTask.subtask((i, i)) + for i in (2, 4, 8)]) + self.assertEqual(len(ts), 3) + + def test_respects_ALWAYS_EAGER(self): + app = current_app + + class MockTaskSet(TaskSet): + applied = 0 + + def apply(self, *args, **kwargs): + self.applied += 1 + + ts = MockTaskSet( + [MockTask.subtask((i, i)) for i in (2, 4, 8)], + ) + app.conf.CELERY_ALWAYS_EAGER = True + try: + ts.apply_async() + finally: + app.conf.CELERY_ALWAYS_EAGER = False + self.assertEqual(ts.applied, 1) + + def test_apply_async(self): + + applied = [0] + + class mocksubtask(Signature): + + def apply_async(self, *args, **kwargs): + applied[0] += 1 + + ts = TaskSet([mocksubtask(MockTask, (i, i)) + for i in (2, 4, 8)]) + ts.apply_async() + self.assertEqual(applied[0], 3) + + class Publisher(object): + + def send(self, *args, **kwargs): + pass + + ts.apply_async(publisher=Publisher()) + + # setting current_task + + @current_app.task + def xyz(): + pass + from celery._state import _task_stack + xyz.push_request() + _task_stack.push(xyz) + try: + ts.apply_async(publisher=Publisher()) + finally: + _task_stack.pop() + xyz.pop_request() + + def test_apply(self): + + applied = [0] + + class mocksubtask(Signature): + + def apply(self, *args, **kwargs): + applied[0] += 1 + + ts = TaskSet([mocksubtask(MockTask, (i, i)) + for i in (2, 4, 8)]) + ts.apply() + self.assertEqual(applied[0], 3) + + def test_set_app(self): + ts = TaskSet([]) + ts.app = 42 + self.assertEqual(ts.app, 42) + + def test_set_tasks(self): + ts = TaskSet([]) + ts.tasks = [1, 2, 3] + self.assertEqual(ts, [1, 2, 3]) + + def test_set_Publisher(self): + ts = TaskSet([]) + ts.Publisher = 42 + self.assertEqual(ts.Publisher, 42) diff --git a/awx/lib/site-packages/celery/tests/tasks/test_states.py b/awx/lib/site-packages/celery/tests/tasks/test_states.py new file mode 100644 index 0000000000..4acf8aafee --- /dev/null +++ b/awx/lib/site-packages/celery/tests/tasks/test_states.py @@ -0,0 +1,31 @@ +from __future__ import absolute_import + +from celery.states import state +from celery import states +from celery.tests.utils import Case + + +class test_state_precedence(Case): + + def test_gt(self): + self.assertGreater(state(states.SUCCESS), + state(states.PENDING)) + self.assertGreater(state(states.FAILURE), + state(states.RECEIVED)) + self.assertGreater(state(states.REVOKED), + state(states.STARTED)) + self.assertGreater(state(states.SUCCESS), + state('CRASHED')) + self.assertGreater(state(states.FAILURE), + state('CRASHED')) + self.assertFalse(state(states.REVOKED) > state('CRASHED')) + + def test_lt(self): + self.assertLess(state(states.PENDING), state(states.SUCCESS)) + self.assertLess(state(states.RECEIVED), state(states.FAILURE)) + self.assertLess(state(states.STARTED), state(states.REVOKED)) + self.assertLess(state('CRASHED'), state(states.SUCCESS)) + self.assertLess(state('CRASHED'), state(states.FAILURE)) + self.assertTrue(state(states.REVOKED) < state('CRASHED')) + self.assertTrue(state(states.REVOKED) <= state('CRASHED')) + self.assertTrue(state('CRASHED') >= state(states.REVOKED)) diff --git a/awx/lib/site-packages/celery/tests/tasks/test_tasks.py b/awx/lib/site-packages/celery/tests/tasks/test_tasks.py new file mode 100644 index 0000000000..dd53e50faf --- /dev/null +++ b/awx/lib/site-packages/celery/tests/tasks/test_tasks.py @@ -0,0 +1,1227 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from datetime import datetime, timedelta +from functools import wraps +from mock import patch +from pickle import loads, dumps + +from celery.task import ( + current, + task, + Task, + BaseTask, + TaskSet, + periodic_task, + PeriodicTask +) +from celery import current_app +from celery.app import app_or_default +from celery.exceptions import RetryTaskError +from celery.execute import send_task +from celery.result import EagerResult +from celery.schedules import crontab, crontab_parser, ParseException +from celery.utils import uuid +from celery.utils.timeutils import parse_iso8601, timedelta_seconds + +from celery.tests.utils import Case, with_eager_tasks, WhateverIO + + +def now(): + return current_app.now() + + +def return_True(*args, **kwargs): + # Task run functions can't be closures/lambdas, as they're pickled. + return True + + +return_True_task = task()(return_True) + + +def raise_exception(self, **kwargs): + raise Exception('%s error' % self.__class__) + + +class MockApplyTask(Task): + applied = 0 + + def run(self, x, y): + return x * y + + @classmethod + def apply_async(self, *args, **kwargs): + self.applied += 1 + + +@task(name='c.unittest.increment_counter_task', count=0) +def increment_counter(increment_by=1): + increment_counter.count += increment_by or 1 + return increment_counter.count + + +@task(name='c.unittest.raising_task') +def raising(): + raise KeyError('foo') + + +@task(max_retries=3, iterations=0) +def retry_task(arg1, arg2, kwarg=1, max_retries=None, care=True): + current.iterations += 1 + rmax = current.max_retries if max_retries is None else max_retries + + assert repr(current.request) + retries = current.request.retries + if care and retries >= rmax: + return arg1 + else: + raise current.retry(countdown=0, max_retries=rmax) + + +@task(max_retries=3, iterations=0, accept_magic_kwargs=True) +def retry_task_noargs(**kwargs): + current.iterations += 1 + + retries = kwargs['task_retries'] + if retries >= 3: + return 42 + else: + raise current.retry(countdown=0) + + +@task(max_retries=3, iterations=0, base=MockApplyTask, + accept_magic_kwargs=True) +def retry_task_mockapply(arg1, arg2, kwarg=1, **kwargs): + current.iterations += 1 + + retries = kwargs['task_retries'] + if retries >= 3: + return arg1 + else: + kwargs.update(kwarg=kwarg) + raise current.retry(countdown=0) + + +class MyCustomException(Exception): + """Random custom exception.""" + + +@task(max_retries=3, iterations=0, accept_magic_kwargs=True) +def retry_task_customexc(arg1, arg2, kwarg=1, **kwargs): + current.iterations += 1 + + retries = kwargs['task_retries'] + if retries >= 3: + return arg1 + kwarg + else: + try: + raise MyCustomException('Elaine Marie Benes') + except MyCustomException, exc: + kwargs.update(kwarg=kwarg) + raise current.retry(countdown=0, exc=exc) + + +class test_task_retries(Case): + + def test_retry(self): + retry_task.__class__.max_retries = 3 + retry_task.iterations = 0 + retry_task.apply([0xFF, 0xFFFF]) + self.assertEqual(retry_task.iterations, 4) + + retry_task.__class__.max_retries = 3 + retry_task.iterations = 0 + retry_task.apply([0xFF, 0xFFFF], {'max_retries': 10}) + self.assertEqual(retry_task.iterations, 11) + + def test_retry_no_args(self): + assert retry_task_noargs.accept_magic_kwargs + retry_task_noargs.__class__.max_retries = 3 + retry_task_noargs.iterations = 0 + retry_task_noargs.apply() + self.assertEqual(retry_task_noargs.iterations, 4) + + def test_retry_kwargs_can_be_empty(self): + retry_task_mockapply.push_request() + try: + with self.assertRaises(RetryTaskError): + retry_task_mockapply.retry(args=[4, 4], kwargs=None) + finally: + retry_task_mockapply.pop_request() + + def test_retry_not_eager(self): + retry_task_mockapply.push_request() + try: + retry_task_mockapply.request.called_directly = False + exc = Exception('baz') + try: + retry_task_mockapply.retry( + args=[4, 4], kwargs={'task_retries': 0}, + exc=exc, throw=False, + ) + self.assertTrue(retry_task_mockapply.__class__.applied) + finally: + retry_task_mockapply.__class__.applied = 0 + + try: + with self.assertRaises(RetryTaskError): + retry_task_mockapply.retry( + args=[4, 4], kwargs={'task_retries': 0}, + exc=exc, throw=True) + self.assertTrue(retry_task_mockapply.__class__.applied) + finally: + retry_task_mockapply.__class__.applied = 0 + finally: + retry_task_mockapply.pop_request() + + def test_retry_with_kwargs(self): + retry_task_customexc.__class__.max_retries = 3 + retry_task_customexc.iterations = 0 + retry_task_customexc.apply([0xFF, 0xFFFF], {'kwarg': 0xF}) + self.assertEqual(retry_task_customexc.iterations, 4) + + def test_retry_with_custom_exception(self): + retry_task_customexc.__class__.max_retries = 2 + retry_task_customexc.iterations = 0 + result = retry_task_customexc.apply([0xFF, 0xFFFF], {'kwarg': 0xF}) + with self.assertRaises(MyCustomException): + result.get() + self.assertEqual(retry_task_customexc.iterations, 3) + + def test_max_retries_exceeded(self): + retry_task.__class__.max_retries = 2 + retry_task.iterations = 0 + result = retry_task.apply([0xFF, 0xFFFF], {'care': False}) + with self.assertRaises(retry_task.MaxRetriesExceededError): + result.get() + self.assertEqual(retry_task.iterations, 3) + + retry_task.__class__.max_retries = 1 + retry_task.iterations = 0 + result = retry_task.apply([0xFF, 0xFFFF], {'care': False}) + with self.assertRaises(retry_task.MaxRetriesExceededError): + result.get() + self.assertEqual(retry_task.iterations, 2) + + +class test_canvas_utils(Case): + + def test_si(self): + self.assertTrue(retry_task.si()) + self.assertTrue(retry_task.si().immutable) + + def test_chunks(self): + self.assertTrue(retry_task.chunks(range(100), 10)) + + def test_map(self): + self.assertTrue(retry_task.map(range(100))) + + def test_starmap(self): + self.assertTrue(retry_task.starmap(range(100))) + + def test_on_success(self): + retry_task.on_success(1, 1, (), {}) + + +class test_tasks(Case): + + def test_unpickle_task(self): + import pickle + + @task + def xxx(): + pass + + self.assertIs(pickle.loads(pickle.dumps(xxx)), xxx.app.tasks[xxx.name]) + + def createTask(self, name): + return task(__module__=self.__module__, name=name)(return_True) + + def test_AsyncResult(self): + task_id = uuid() + result = retry_task.AsyncResult(task_id) + self.assertEqual(result.backend, retry_task.backend) + self.assertEqual(result.id, task_id) + + def assertNextTaskDataEqual(self, consumer, presult, task_name, + test_eta=False, test_expires=False, **kwargs): + next_task = consumer.queues[0].get() + task_data = next_task.decode() + self.assertEqual(task_data['id'], presult.id) + self.assertEqual(task_data['task'], task_name) + task_kwargs = task_data.get('kwargs', {}) + if test_eta: + self.assertIsInstance(task_data.get('eta'), basestring) + to_datetime = parse_iso8601(task_data.get('eta')) + self.assertIsInstance(to_datetime, datetime) + if test_expires: + self.assertIsInstance(task_data.get('expires'), basestring) + to_datetime = parse_iso8601(task_data.get('expires')) + self.assertIsInstance(to_datetime, datetime) + for arg_name, arg_value in kwargs.items(): + self.assertEqual(task_kwargs.get(arg_name), arg_value) + + def test_incomplete_task_cls(self): + + class IncompleteTask(Task): + name = 'c.unittest.t.itask' + + with self.assertRaises(NotImplementedError): + IncompleteTask().run() + + def test_task_kwargs_must_be_dictionary(self): + with self.assertRaises(ValueError): + increment_counter.apply_async([], 'str') + + def test_task_args_must_be_list(self): + with self.assertRaises(ValueError): + increment_counter.apply_async('str', {}) + + def test_regular_task(self): + T1 = self.createTask('c.unittest.t.t1') + self.assertIsInstance(T1, BaseTask) + self.assertTrue(T1.run()) + self.assertTrue(callable(T1), 'Task class is callable()') + self.assertTrue(T1(), 'Task class runs run() when called') + + consumer = T1.get_consumer() + with self.assertRaises(NotImplementedError): + consumer.receive('foo', 'foo') + consumer.purge() + self.assertIsNone(consumer.queues[0].get()) + + # Without arguments. + presult = T1.delay() + self.assertNextTaskDataEqual(consumer, presult, T1.name) + + # With arguments. + presult2 = T1.apply_async(kwargs=dict(name='George Costanza')) + self.assertNextTaskDataEqual( + consumer, presult2, T1.name, name='George Costanza', + ) + + # send_task + sresult = send_task(T1.name, kwargs=dict(name='Elaine M. Benes')) + self.assertNextTaskDataEqual( + consumer, sresult, T1.name, name='Elaine M. Benes', + ) + + # With eta. + presult2 = T1.apply_async( + kwargs=dict(name='George Costanza'), + eta=now() + timedelta(days=1), + expires=now() + timedelta(days=2), + ) + self.assertNextTaskDataEqual( + consumer, presult2, T1.name, + name='George Costanza', test_eta=True, test_expires=True, + ) + + # With countdown. + presult2 = T1.apply_async(kwargs=dict(name='George Costanza'), + countdown=10, expires=12) + self.assertNextTaskDataEqual( + consumer, presult2, T1.name, + name='George Costanza', test_eta=True, test_expires=True, + ) + + # Discarding all tasks. + consumer.purge() + T1.apply_async() + self.assertEqual(consumer.purge(), 1) + self.assertIsNone(consumer.queues[0].get()) + + self.assertFalse(presult.successful()) + T1.backend.mark_as_done(presult.id, result=None) + self.assertTrue(presult.successful()) + + publisher = T1.get_publisher() + self.assertTrue(publisher.exchange) + + def test_context_get(self): + task = self.createTask('c.unittest.t.c.g') + task.push_request() + try: + request = task.request + request.foo = 32 + self.assertEqual(request.get('foo'), 32) + self.assertEqual(request.get('bar', 36), 36) + request.clear() + finally: + task.pop_request() + + def test_task_class_repr(self): + task = self.createTask('c.unittest.t.repr') + self.assertIn('class Task of', repr(task.app.Task)) + prev, task.app.Task._app = task.app.Task._app, None + try: + self.assertIn('unbound', repr(task.app.Task, )) + finally: + task.app.Task._app = prev + + def test_bind_no_magic_kwargs(self): + task = self.createTask('c.unittest.t.magic_kwargs') + task.__class__.accept_magic_kwargs = None + task.bind(task.app) + + def test_annotate(self): + with patch('celery.app.task.resolve_all_annotations') as anno: + anno.return_value = [{'FOO': 'BAR'}] + Task.annotate() + self.assertEqual(Task.FOO, 'BAR') + + def test_after_return(self): + task = self.createTask('c.unittest.t.after_return') + task.push_request() + try: + task.request.chord = return_True_task.s() + task.after_return('SUCCESS', 1.0, 'foobar', (), {}, None) + task.request.clear() + finally: + task.pop_request() + + def test_send_task_sent_event(self): + T1 = self.createTask('c.unittest.t.t1') + app = T1.app + with app.connection() as conn: + app.conf.CELERY_SEND_TASK_SENT_EVENT = True + del(app.amqp.__dict__['TaskProducer']) + try: + self.assertTrue(app.amqp.TaskProducer(conn).send_sent_event) + finally: + app.conf.CELERY_SEND_TASK_SENT_EVENT = False + del(app.amqp.__dict__['TaskProducer']) + + def test_get_publisher(self): + connection = app_or_default().connection() + p = increment_counter.get_publisher(connection, auto_declare=False, + exchange='foo') + self.assertEqual(p.exchange.name, 'foo') + p = increment_counter.get_publisher(connection, auto_declare=False, + exchange='foo', + exchange_type='fanout') + self.assertEqual(p.exchange.type, 'fanout') + + def test_update_state(self): + + @task + def yyy(): + pass + + yyy.push_request() + try: + tid = uuid() + yyy.update_state(tid, 'FROBULATING', {'fooz': 'baaz'}) + self.assertEqual(yyy.AsyncResult(tid).status, 'FROBULATING') + self.assertDictEqual(yyy.AsyncResult(tid).result, {'fooz': 'baaz'}) + + yyy.request.id = tid + yyy.update_state(state='FROBUZATING', meta={'fooz': 'baaz'}) + self.assertEqual(yyy.AsyncResult(tid).status, 'FROBUZATING') + self.assertDictEqual(yyy.AsyncResult(tid).result, {'fooz': 'baaz'}) + finally: + yyy.pop_request() + + def test_repr(self): + + @task + def task_test_repr(): + pass + + self.assertIn('task_test_repr', repr(task_test_repr)) + + def test_has___name__(self): + + @task + def yyy2(): + pass + + self.assertTrue(yyy2.__name__) + + def test_get_logger(self): + t1 = self.createTask('c.unittest.t.t1') + t1.push_request() + try: + logfh = WhateverIO() + logger = t1.get_logger(logfile=logfh, loglevel=0) + self.assertTrue(logger) + + t1.request.loglevel = 3 + logger = t1.get_logger(logfile=logfh, loglevel=None) + self.assertTrue(logger) + finally: + t1.pop_request() + + +class test_TaskSet(Case): + + @with_eager_tasks + def test_function_taskset(self): + subtasks = [return_True_task.s(i) for i in range(1, 6)] + ts = TaskSet(subtasks) + res = ts.apply_async() + self.assertListEqual(res.join(), [True, True, True, True, True]) + + def test_counter_taskset(self): + increment_counter.count = 0 + ts = TaskSet(tasks=[ + increment_counter.s(), + increment_counter.s(increment_by=2), + increment_counter.s(increment_by=3), + increment_counter.s(increment_by=4), + increment_counter.s(increment_by=5), + increment_counter.s(increment_by=6), + increment_counter.s(increment_by=7), + increment_counter.s(increment_by=8), + increment_counter.s(increment_by=9), + ]) + self.assertEqual(ts.total, 9) + + consumer = increment_counter.get_consumer() + consumer.purge() + consumer.close() + taskset_res = ts.apply_async() + subtasks = taskset_res.subtasks + taskset_id = taskset_res.taskset_id + consumer = increment_counter.get_consumer() + for subtask in subtasks: + m = consumer.queues[0].get().payload + self.assertDictContainsSubset({'taskset': taskset_id, + 'task': increment_counter.name, + 'id': subtask.id}, m) + increment_counter( + increment_by=m.get('kwargs', {}).get('increment_by')) + self.assertEqual(increment_counter.count, sum(xrange(1, 10))) + + def test_named_taskset(self): + prefix = 'test_named_taskset-' + ts = TaskSet([return_True_task.subtask([1])]) + res = ts.apply(taskset_id=prefix + uuid()) + self.assertTrue(res.taskset_id.startswith(prefix)) + + +class test_apply_task(Case): + + def test_apply_throw(self): + with self.assertRaises(KeyError): + raising.apply(throw=True) + + def test_apply_no_magic_kwargs(self): + increment_counter.accept_magic_kwargs = False + try: + increment_counter.apply() + finally: + increment_counter.accept_magic_kwargs = True + + def test_apply_with_CELERY_EAGER_PROPAGATES_EXCEPTIONS(self): + raising.app.conf.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True + try: + with self.assertRaises(KeyError): + raising.apply() + finally: + raising.app.conf.CELERY_EAGER_PROPAGATES_EXCEPTIONS = False + + def test_apply(self): + increment_counter.count = 0 + + e = increment_counter.apply() + self.assertIsInstance(e, EagerResult) + self.assertEqual(e.get(), 1) + + e = increment_counter.apply(args=[1]) + self.assertEqual(e.get(), 2) + + e = increment_counter.apply(kwargs={'increment_by': 4}) + self.assertEqual(e.get(), 6) + + self.assertTrue(e.successful()) + self.assertTrue(e.ready()) + self.assertTrue(repr(e).startswith('= 3: + raise + else: + break + + def test_every_minute_execution_is_due(self): + last_ran = self.now - timedelta(seconds=61) + due, remaining = every_minute.run_every.is_due(last_ran) + self.assertTrue(due) + self.seconds_almost_equal(remaining, self.next_minute, 1) + + def test_every_minute_execution_is_not_due(self): + last_ran = self.now - timedelta(seconds=self.now.second) + due, remaining = every_minute.run_every.is_due(last_ran) + self.assertFalse(due) + self.seconds_almost_equal(remaining, self.next_minute, 1) + + # 29th of May 2010 is a saturday + @patch_crontab_nowfun(hourly, datetime(2010, 5, 29, 10, 30)) + def test_execution_is_due_on_saturday(self): + last_ran = self.now - timedelta(seconds=61) + due, remaining = every_minute.run_every.is_due(last_ran) + self.assertTrue(due) + self.seconds_almost_equal(remaining, self.next_minute, 1) + + # 30th of May 2010 is a sunday + @patch_crontab_nowfun(hourly, datetime(2010, 5, 30, 10, 30)) + def test_execution_is_due_on_sunday(self): + last_ran = self.now - timedelta(seconds=61) + due, remaining = every_minute.run_every.is_due(last_ran) + self.assertTrue(due) + self.seconds_almost_equal(remaining, self.next_minute, 1) + + # 31st of May 2010 is a monday + @patch_crontab_nowfun(hourly, datetime(2010, 5, 31, 10, 30)) + def test_execution_is_due_on_monday(self): + last_ran = self.now - timedelta(seconds=61) + due, remaining = every_minute.run_every.is_due(last_ran) + self.assertTrue(due) + self.seconds_almost_equal(remaining, self.next_minute, 1) + + @patch_crontab_nowfun(hourly, datetime(2010, 5, 10, 10, 30)) + def test_every_hour_execution_is_due(self): + due, remaining = hourly.run_every.is_due( + datetime(2010, 5, 10, 6, 30)) + self.assertTrue(due) + self.assertEqual(remaining, 60 * 60) + + @patch_crontab_nowfun(hourly, datetime(2010, 5, 10, 10, 29)) + def test_every_hour_execution_is_not_due(self): + due, remaining = hourly.run_every.is_due( + datetime(2010, 5, 10, 9, 30)) + self.assertFalse(due) + self.assertEqual(remaining, 60) + + @patch_crontab_nowfun(quarterly, datetime(2010, 5, 10, 10, 15)) + def test_first_quarter_execution_is_due(self): + due, remaining = quarterly.run_every.is_due( + datetime(2010, 5, 10, 6, 30)) + self.assertTrue(due) + self.assertEqual(remaining, 15 * 60) + + @patch_crontab_nowfun(quarterly, datetime(2010, 5, 10, 10, 30)) + def test_second_quarter_execution_is_due(self): + due, remaining = quarterly.run_every.is_due( + datetime(2010, 5, 10, 6, 30)) + self.assertTrue(due) + self.assertEqual(remaining, 15 * 60) + + @patch_crontab_nowfun(quarterly, datetime(2010, 5, 10, 10, 14)) + def test_first_quarter_execution_is_not_due(self): + due, remaining = quarterly.run_every.is_due( + datetime(2010, 5, 10, 10, 0)) + self.assertFalse(due) + self.assertEqual(remaining, 60) + + @patch_crontab_nowfun(quarterly, datetime(2010, 5, 10, 10, 29)) + def test_second_quarter_execution_is_not_due(self): + due, remaining = quarterly.run_every.is_due( + datetime(2010, 5, 10, 10, 15)) + self.assertFalse(due) + self.assertEqual(remaining, 60) + + @patch_crontab_nowfun(daily, datetime(2010, 5, 10, 7, 30)) + def test_daily_execution_is_due(self): + due, remaining = daily.run_every.is_due( + datetime(2010, 5, 9, 7, 30)) + self.assertTrue(due) + self.assertEqual(remaining, 24 * 60 * 60) + + @patch_crontab_nowfun(daily, datetime(2010, 5, 10, 10, 30)) + def test_daily_execution_is_not_due(self): + due, remaining = daily.run_every.is_due( + datetime(2010, 5, 10, 7, 30)) + self.assertFalse(due) + self.assertEqual(remaining, 21 * 60 * 60) + + @patch_crontab_nowfun(weekly, datetime(2010, 5, 6, 7, 30)) + def test_weekly_execution_is_due(self): + due, remaining = weekly.run_every.is_due( + datetime(2010, 4, 30, 7, 30)) + self.assertTrue(due) + self.assertEqual(remaining, 7 * 24 * 60 * 60) + + @patch_crontab_nowfun(weekly, datetime(2010, 5, 7, 10, 30)) + def test_weekly_execution_is_not_due(self): + due, remaining = weekly.run_every.is_due( + datetime(2010, 5, 6, 7, 30)) + self.assertFalse(due) + self.assertEqual(remaining, 6 * 24 * 60 * 60 - 3 * 60 * 60) + + @patch_crontab_nowfun(monthly, datetime(2010, 5, 13, 7, 30)) + def test_monthly_execution_is_due(self): + due, remaining = monthly.run_every.is_due( + datetime(2010, 4, 8, 7, 30)) + self.assertTrue(due) + self.assertEqual(remaining, 28 * 24 * 60 * 60) + + @patch_crontab_nowfun(monthly, datetime(2010, 5, 9, 10, 30)) + def test_monthly_execution_is_not_due(self): + due, remaining = monthly.run_every.is_due( + datetime(2010, 4, 8, 7, 30)) + self.assertFalse(due) + self.assertEqual(remaining, 4 * 24 * 60 * 60 - 3 * 60 * 60) + + @patch_crontab_nowfun(yearly, datetime(2010, 3, 11, 7, 30)) + def test_yearly_execution_is_due(self): + due, remaining = yearly.run_every.is_due( + datetime(2009, 3, 12, 7, 30)) + self.assertTrue(due) + self.assertEqual(remaining, 364 * 24 * 60 * 60) + + @patch_crontab_nowfun(yearly, datetime(2010, 3, 7, 10, 30)) + def test_yearly_execution_is_not_due(self): + due, remaining = yearly.run_every.is_due( + datetime(2009, 3, 12, 7, 30)) + self.assertFalse(due) + self.assertEqual(remaining, 4 * 24 * 60 * 60 - 3 * 60 * 60) diff --git a/awx/lib/site-packages/celery/tests/tasks/test_trace.py b/awx/lib/site-packages/celery/tests/tasks/test_trace.py new file mode 100644 index 0000000000..558ba832f1 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/tasks/test_trace.py @@ -0,0 +1,88 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from mock import patch + +from celery import current_app +from celery import states +from celery.exceptions import RetryTaskError +from celery.task.trace import TraceInfo, eager_trace_task, trace_task +from celery.tests.utils import Case, Mock + + +@current_app.task +def add(x, y): + return x + y + + +@current_app.task(ignore_result=True) +def add_cast(x, y): + return x + y + + +@current_app.task +def raises(exc): + raise exc + + +def trace(task, args=(), kwargs={}, propagate=False): + return eager_trace_task(task, 'id-1', args, kwargs, + propagate=propagate) + + +class test_trace(Case): + + def test_trace_successful(self): + retval, info = trace(add, (2, 2), {}) + self.assertIsNone(info) + self.assertEqual(retval, 4) + + def test_trace_SystemExit(self): + with self.assertRaises(SystemExit): + trace(raises, (SystemExit(), ), {}) + + def test_trace_RetryTaskError(self): + exc = RetryTaskError('foo', 'bar') + _, info = trace(raises, (exc, ), {}) + self.assertEqual(info.state, states.RETRY) + self.assertIs(info.retval, exc) + + def test_trace_exception(self): + exc = KeyError('foo') + _, info = trace(raises, (exc, ), {}) + self.assertEqual(info.state, states.FAILURE) + self.assertIs(info.retval, exc) + + def test_trace_exception_propagate(self): + with self.assertRaises(KeyError): + trace(raises, (KeyError('foo'), ), {}, propagate=True) + + @patch('celery.task.trace.build_tracer') + @patch('celery.task.trace.report_internal_error') + def test_outside_body_error(self, report_internal_error, build_tracer): + tracer = Mock() + tracer.side_effect = KeyError('foo') + build_tracer.return_value = tracer + + @current_app.task + def xtask(): + pass + + trace_task(xtask, 'uuid', (), {}) + self.assertTrue(report_internal_error.call_count) + self.assertIs(xtask.__trace__, tracer) + + +class test_TraceInfo(Case): + + class TI(TraceInfo): + __slots__ = TraceInfo.__slots__ + ('__dict__', ) + + def test_handle_error_state(self): + x = self.TI(states.FAILURE) + x.handle_failure = Mock() + x.handle_error_state(add_cast) + x.handle_failure.assert_called_with( + add_cast, + store_errors=add_cast.store_errors_even_if_ignored, + ) diff --git a/awx/lib/site-packages/celery/tests/utilities/__init__.py b/awx/lib/site-packages/celery/tests/utilities/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/celery/tests/utilities/test_compat.py b/awx/lib/site-packages/celery/tests/utilities/test_compat.py new file mode 100644 index 0000000000..07ec3b5951 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/utilities/test_compat.py @@ -0,0 +1,59 @@ +from __future__ import absolute_import + + +import celery +from celery.app.task import Task as ModernTask +from celery.task.base import Task as CompatTask + +from celery.tests.utils import Case + + +class test_MagicModule(Case): + + def test_class_property_set_without_type(self): + self.assertTrue(ModernTask.__dict__['app'].__get__(CompatTask())) + + def test_class_property_set_on_class(self): + self.assertIs(ModernTask.__dict__['app'].__set__(None, None), + ModernTask.__dict__['app']) + + def test_class_property_set(self): + + class X(CompatTask): + pass + + app = celery.Celery(set_as_current=False) + ModernTask.__dict__['app'].__set__(X(), app) + self.assertEqual(X.app, app) + + def test_dir(self): + self.assertTrue(dir(celery.messaging)) + + def test_direct(self): + import sys + prev_celery = sys.modules.pop('celery', None) + prev_task = sys.modules.pop('celery.task', None) + try: + import celery + self.assertTrue(celery.task) + finally: + sys.modules['celery'] = prev_celery + sys.modules['celery.task'] = prev_task + + def test_app_attrs(self): + self.assertEqual(celery.task.control.broadcast, + celery.current_app.control.broadcast) + + def test_decorators_task(self): + @celery.decorators.task + def _test_decorators_task(): + pass + + self.assertTrue(_test_decorators_task.accept_magic_kwargs) + + def test_decorators_periodic_task(self): + @celery.decorators.periodic_task(run_every=3600) + def _test_decorators_ptask(): + pass + + self.assertTrue(_test_decorators_ptask.accept_magic_kwargs) diff --git a/awx/lib/site-packages/celery/tests/utilities/test_datastructures.py b/awx/lib/site-packages/celery/tests/utilities/test_datastructures.py new file mode 100644 index 0000000000..873be505d7 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/utilities/test_datastructures.py @@ -0,0 +1,313 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from celery.datastructures import ( + ExceptionInfo, + LRUCache, + LimitedSet, + AttributeDict, + DictAttribute, + ConfigurationView, + DependencyGraph, +) +from celery.utils.compat import THREAD_TIMEOUT_MAX +from celery.tests.utils import Case, WhateverIO + + +class Object(object): + pass + + +class test_DictAttribute(Case): + + def test_get_set(self): + x = DictAttribute(Object()) + x['foo'] = 'The quick brown fox' + self.assertEqual(x['foo'], 'The quick brown fox') + self.assertEqual(x['foo'], x.obj.foo) + self.assertEqual(x.get('foo'), 'The quick brown fox') + self.assertIsNone(x.get('bar')) + with self.assertRaises(KeyError): + x['bar'] + + def test_setdefault(self): + x = DictAttribute(Object()) + self.assertEqual(x.setdefault('foo', 'NEW'), 'NEW') + self.assertEqual(x.setdefault('foo', 'XYZ'), 'NEW') + + def test_contains(self): + x = DictAttribute(Object()) + x['foo'] = 1 + self.assertIn('foo', x) + self.assertNotIn('bar', x) + + def test_items(self): + obj = Object() + obj.attr1 = 1 + x = DictAttribute(obj) + x['attr2'] = 2 + self.assertEqual(x['attr1'], 1) + self.assertEqual(x['attr2'], 2) + + +class test_ConfigurationView(Case): + + def setUp(self): + self.view = ConfigurationView({'changed_key': 1, + 'both': 2}, + [{'default_key': 1, + 'both': 1}]) + + def test_setdefault(self): + self.assertEqual(self.view.setdefault('both', 36), 2) + self.assertEqual(self.view.setdefault('new', 36), 36) + + def test_get(self): + self.assertEqual(self.view.get('both'), 2) + sp = object() + self.assertIs(self.view.get('nonexisting', sp), sp) + + def test_update(self): + changes = dict(self.view.changes) + self.view.update(a=1, b=2, c=3) + self.assertDictEqual(self.view.changes, + dict(changes, a=1, b=2, c=3)) + + def test_contains(self): + self.assertIn('changed_key', self.view) + self.assertIn('default_key', self.view) + self.assertNotIn('new', self.view) + + def test_repr(self): + self.assertIn('changed_key', repr(self.view)) + self.assertIn('default_key', repr(self.view)) + + def test_iter(self): + expected = {'changed_key': 1, + 'default_key': 1, + 'both': 2} + self.assertDictEqual(dict(self.view.items()), expected) + self.assertItemsEqual(list(iter(self.view)), + expected.keys()) + self.assertItemsEqual(self.view.keys(), expected.keys()) + self.assertItemsEqual(self.view.values(), expected.values()) + + def test_isa_mapping(self): + from collections import Mapping + self.assertTrue(issubclass(ConfigurationView, Mapping)) + + def test_isa_mutable_mapping(self): + from collections import MutableMapping + self.assertTrue(issubclass(ConfigurationView, MutableMapping)) + + +class test_ExceptionInfo(Case): + + def test_exception_info(self): + + try: + raise LookupError('The quick brown fox jumps...') + except Exception: + einfo = ExceptionInfo() + self.assertEqual(str(einfo), einfo.traceback) + self.assertIsInstance(einfo.exception, LookupError) + self.assertTupleEqual( + einfo.exception.args, ('The quick brown fox jumps...', ), + ) + self.assertTrue(einfo.traceback) + + r = repr(einfo) + self.assertTrue(r) + + +class test_LimitedSet(Case): + + def test_add(self): + s = LimitedSet(maxlen=2) + s.add('foo') + s.add('bar') + for n in 'foo', 'bar': + self.assertIn(n, s) + s.add('baz') + for n in 'bar', 'baz': + self.assertIn(n, s) + self.assertNotIn('foo', s) + + def test_iter(self): + s = LimitedSet(maxlen=2) + items = 'foo', 'bar' + for item in items: + s.add(item) + l = list(iter(s)) + for item in items: + self.assertIn(item, l) + + def test_repr(self): + s = LimitedSet(maxlen=2) + items = 'foo', 'bar' + for item in items: + s.add(item) + self.assertIn('LimitedSet(', repr(s)) + + def test_clear(self): + s = LimitedSet(maxlen=2) + s.add('foo') + s.add('bar') + self.assertEqual(len(s), 2) + s.clear() + self.assertFalse(s) + + def test_update(self): + s1 = LimitedSet(maxlen=2) + s1.add('foo') + s1.add('bar') + + s2 = LimitedSet(maxlen=2) + s2.update(s1) + self.assertItemsEqual(list(s2), ['foo', 'bar']) + + s2.update(['bla']) + self.assertItemsEqual(list(s2), ['bla', 'bar']) + + s2.update(['do', 're']) + self.assertItemsEqual(list(s2), ['do', 're']) + + def test_as_dict(self): + s = LimitedSet(maxlen=2) + s.add('foo') + self.assertIsInstance(s.as_dict(), dict) + + +class test_LRUCache(Case): + + def test_expires(self): + limit = 100 + x = LRUCache(limit=limit) + slots = list(xrange(limit * 2)) + for i in slots: + x[i] = i + self.assertListEqual(x.keys(), list(slots[limit:])) + + def test_update_expires(self): + limit = 100 + x = LRUCache(limit=limit) + slots = list(xrange(limit * 2)) + for i in slots: + x.update({i: i}) + + self.assertListEqual(list(x.keys()), list(slots[limit:])) + + def test_least_recently_used(self): + x = LRUCache(3) + + x[1], x[2], x[3] = 1, 2, 3 + self.assertEqual(x.keys(), [1, 2, 3]) + + x[4], x[5] = 4, 5 + self.assertEqual(x.keys(), [3, 4, 5]) + + # access 3, which makes it the last used key. + x[3] + x[6] = 6 + self.assertEqual(x.keys(), [5, 3, 6]) + + x[7] = 7 + self.assertEqual(x.keys(), [3, 6, 7]) + + def assertSafeIter(self, method, interval=0.01, size=10000): + from threading import Thread, Event + from time import sleep + x = LRUCache(size) + x.update(zip(xrange(size), xrange(size))) + + class Burglar(Thread): + + def __init__(self, cache): + self.cache = cache + self._is_shutdown = Event() + self._is_stopped = Event() + Thread.__init__(self) + + def run(self): + while not self._is_shutdown.isSet(): + try: + self.cache.data.popitem(last=False) + except KeyError: + break + self._is_stopped.set() + + def stop(self): + self._is_shutdown.set() + self._is_stopped.wait() + self.join(THREAD_TIMEOUT_MAX) + + burglar = Burglar(x) + burglar.start() + try: + for _ in getattr(x, method)(): + sleep(0.0001) + finally: + burglar.stop() + + def test_safe_to_remove_while_iteritems(self): + self.assertSafeIter('iteritems') + + def test_safe_to_remove_while_keys(self): + self.assertSafeIter('keys') + + def test_safe_to_remove_while_itervalues(self): + self.assertSafeIter('itervalues') + + def test_items(self): + c = LRUCache() + c.update(a=1, b=2, c=3) + self.assertTrue(c.items()) + + +class test_AttributeDict(Case): + + def test_getattr__setattr(self): + x = AttributeDict({'foo': 'bar'}) + self.assertEqual(x['foo'], 'bar') + with self.assertRaises(AttributeError): + x.bar + x.bar = 'foo' + self.assertEqual(x['bar'], 'foo') + + +class test_DependencyGraph(Case): + + def graph1(self): + return DependencyGraph([ + ('A', []), + ('B', []), + ('C', ['A']), + ('D', ['C', 'B']), + ]) + + def test_repr(self): + self.assertTrue(repr(self.graph1())) + + def test_topsort(self): + order = self.graph1().topsort() + # C must start before D + self.assertLess(order.index('C'), order.index('D')) + # and B must start before D + self.assertLess(order.index('B'), order.index('D')) + # and A must start before C + self.assertLess(order.index('A'), order.index('C')) + + def test_edges(self): + self.assertListEqual(list(self.graph1().edges()), + ['C', 'D']) + + def test_items(self): + self.assertDictEqual( + dict(self.graph1().items()), + {'A': [], 'B': [], 'C': ['A'], 'D': ['C', 'B']}, + ) + + def test_to_dot(self): + s = WhateverIO() + self.graph1().to_dot(s) + self.assertTrue(s.getvalue()) diff --git a/awx/lib/site-packages/celery/tests/utilities/test_dispatcher.py b/awx/lib/site-packages/celery/tests/utilities/test_dispatcher.py new file mode 100644 index 0000000000..159351979b --- /dev/null +++ b/awx/lib/site-packages/celery/tests/utilities/test_dispatcher.py @@ -0,0 +1,139 @@ +from __future__ import absolute_import + + +import gc +import sys +import time + +from celery.utils.dispatch import Signal +from celery.tests.utils import Case + + +if sys.platform.startswith('java'): + + def garbage_collect(): + # Some JVM GCs will execute finalizers in a different thread, meaning + # we need to wait for that to complete before we go on looking for the + # effects of that. + gc.collect() + time.sleep(0.1) + +elif hasattr(sys, 'pypy_version_info'): + + def garbage_collect(): # noqa + # Collecting weakreferences can take two collections on PyPy. + gc.collect() + gc.collect() +else: + + def garbage_collect(): # noqa + gc.collect() + + +def receiver_1_arg(val, **kwargs): + return val + + +class Callable(object): + + def __call__(self, val, **kwargs): + return val + + def a(self, val, **kwargs): + return val + +a_signal = Signal(providing_args=['val']) + + +class DispatcherTests(Case): + """Test suite for dispatcher (barely started)""" + + def _testIsClean(self, signal): + """Assert that everything has been cleaned up automatically""" + self.assertEqual(signal.receivers, []) + + # force cleanup just in case + signal.receivers = [] + + def testExact(self): + a_signal.connect(receiver_1_arg, sender=self) + expected = [(receiver_1_arg, 'test')] + result = a_signal.send(sender=self, val='test') + self.assertEqual(result, expected) + a_signal.disconnect(receiver_1_arg, sender=self) + self._testIsClean(a_signal) + + def testIgnoredSender(self): + a_signal.connect(receiver_1_arg) + expected = [(receiver_1_arg, 'test')] + result = a_signal.send(sender=self, val='test') + self.assertEqual(result, expected) + a_signal.disconnect(receiver_1_arg) + self._testIsClean(a_signal) + + def testGarbageCollected(self): + a = Callable() + a_signal.connect(a.a, sender=self) + expected = [] + del a + garbage_collect() + result = a_signal.send(sender=self, val='test') + self.assertEqual(result, expected) + self._testIsClean(a_signal) + + def testMultipleRegistration(self): + a = Callable() + a_signal.connect(a) + a_signal.connect(a) + a_signal.connect(a) + a_signal.connect(a) + a_signal.connect(a) + a_signal.connect(a) + result = a_signal.send(sender=self, val='test') + self.assertEqual(len(result), 1) + self.assertEqual(len(a_signal.receivers), 1) + del a + del result + garbage_collect() + self._testIsClean(a_signal) + + def testUidRegistration(self): + + def uid_based_receiver_1(**kwargs): + pass + + def uid_based_receiver_2(**kwargs): + pass + + a_signal.connect(uid_based_receiver_1, dispatch_uid='uid') + a_signal.connect(uid_based_receiver_2, dispatch_uid='uid') + self.assertEqual(len(a_signal.receivers), 1) + a_signal.disconnect(dispatch_uid='uid') + self._testIsClean(a_signal) + + def testRobust(self): + """Test the sendRobust function""" + + def fails(val, **kwargs): + raise ValueError('this') + + a_signal.connect(fails) + result = a_signal.send_robust(sender=self, val='test') + err = result[0][1] + self.assertTrue(isinstance(err, ValueError)) + self.assertEqual(err.args, ('this',)) + a_signal.disconnect(fails) + self._testIsClean(a_signal) + + def testDisconnection(self): + receiver_1 = Callable() + receiver_2 = Callable() + receiver_3 = Callable() + a_signal.connect(receiver_1) + a_signal.connect(receiver_2) + a_signal.connect(receiver_3) + a_signal.disconnect(receiver_1) + del receiver_2 + garbage_collect() + a_signal.disconnect(receiver_3) + self._testIsClean(a_signal) diff --git a/awx/lib/site-packages/celery/tests/utilities/test_encoding.py b/awx/lib/site-packages/celery/tests/utilities/test_encoding.py new file mode 100644 index 0000000000..a7bd28a411 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/utilities/test_encoding.py @@ -0,0 +1,36 @@ +from __future__ import absolute_import + +import sys + +from nose import SkipTest + +from celery.utils import encoding +from celery.tests.utils import Case + + +class test_encoding(Case): + + def test_safe_str(self): + self.assertTrue(encoding.safe_str(object())) + self.assertTrue(encoding.safe_str('foo')) + self.assertTrue(encoding.safe_str(u'foo')) + + def test_safe_str_UnicodeDecodeError(self): + if sys.version_info >= (3, 0): + raise SkipTest('py3k: not relevant') + + class foo(unicode): + + def encode(self, *args, **kwargs): + raise UnicodeDecodeError('foo') + + self.assertIn(' queue1: exchange:exchange1(type1) binding:bind1""" +QUEUE_FORMAT2 = """.> queue2: exchange:exchange2(type2) binding:bind2""" + + +class test_Info(Case): + + def test_textindent(self): + self.assertEqual(indent(RANDTEXT, 4), RANDTEXT_RES) + + def test_format_queues(self): + celery = Celery(set_as_current=False) + celery.amqp.queues = celery.amqp.Queues(QUEUES) + self.assertEqual(sorted(celery.amqp.queues.format().split('\n')), + sorted([QUEUE_FORMAT1, QUEUE_FORMAT2])) diff --git a/awx/lib/site-packages/celery/tests/utilities/test_local.py b/awx/lib/site-packages/celery/tests/utilities/test_local.py new file mode 100644 index 0000000000..b1d2ea6768 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/utilities/test_local.py @@ -0,0 +1,289 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import sys + +from nose import SkipTest + +from celery.local import Proxy, PromiseProxy, maybe_evaluate, try_import + +from celery.tests.utils import Case + + +class test_try_import(Case): + + def test_imports(self): + self.assertTrue(try_import(__name__)) + + def test_when_default(self): + default = object() + self.assertIs(try_import('foobar.awqewqe.asdwqewq', default), default) + + +class test_Proxy(Case): + + def test_std_class_attributes(self): + self.assertEqual(Proxy.__name__, 'Proxy') + self.assertEqual(Proxy.__module__, 'celery.local') + self.assertIsInstance(Proxy.__doc__, str) + + def test_name(self): + + def real(): + """real function""" + return 'REAL' + + x = Proxy(lambda: real, name='xyz') + self.assertEqual(x.__name__, 'xyz') + + y = Proxy(lambda: real) + self.assertEqual(y.__name__, 'real') + + self.assertEqual(x.__doc__, 'real function') + + self.assertEqual(x.__class__, type(real)) + self.assertEqual(x.__dict__, real.__dict__) + self.assertEqual(repr(x), repr(real)) + + def test_nonzero(self): + + class X(object): + + def __nonzero__(self): + return False + + x = Proxy(lambda: X()) + self.assertFalse(x) + + def test_slots(self): + + class X(object): + __slots__ = () + + x = Proxy(X) + with self.assertRaises(AttributeError): + x.__dict__ + + def test_unicode(self): + + class X(object): + + def __unicode__(self): + return u'UNICODE' + + def __repr__(self): + return 'REPR' + + x = Proxy(lambda: X()) + self.assertEqual(unicode(x), u'UNICODE') + del(X.__unicode__) + self.assertEqual(unicode(x), 'REPR') + + def test_dir(self): + if sys.version_info < (2, 6): + raise SkipTest('Not relevant for Py2.5') + + class X(object): + + def __dir__(self): + return ['a', 'b', 'c'] + + x = Proxy(lambda: X()) + self.assertListEqual(dir(x), ['a', 'b', 'c']) + + class Y(object): + + def __dir__(self): + raise RuntimeError() + y = Proxy(lambda: Y()) + self.assertListEqual(dir(y), []) + + def test_getsetdel_attr(self): + if sys.version_info < (2, 6): + raise SkipTest('Not relevant for Py2.5') + + class X(object): + a = 1 + b = 2 + c = 3 + + def __dir__(self): + return ['a', 'b', 'c'] + + v = X() + + x = Proxy(lambda: v) + self.assertListEqual(x.__members__, ['a', 'b', 'c']) + self.assertEqual(x.a, 1) + self.assertEqual(x.b, 2) + self.assertEqual(x.c, 3) + + setattr(x, 'a', 10) + self.assertEqual(x.a, 10) + + del(x.a) + self.assertEqual(x.a, 1) + + def test_dictproxy(self): + v = {} + x = Proxy(lambda: v) + x['foo'] = 42 + self.assertEqual(x['foo'], 42) + self.assertEqual(len(x), 1) + self.assertIn('foo', x) + del(x['foo']) + with self.assertRaises(KeyError): + x['foo'] + self.assertTrue(iter(x)) + + def test_listproxy(self): + v = [] + x = Proxy(lambda: v) + x.append(1) + x.extend([2, 3, 4]) + self.assertEqual(x[0], 1) + self.assertEqual(x[:-1], [1, 2, 3]) + del(x[-1]) + self.assertEqual(x[:-1], [1, 2]) + x[0] = 10 + self.assertEqual(x[0], 10) + self.assertIn(10, x) + self.assertEqual(len(x), 3) + self.assertTrue(iter(x)) + + def test_int(self): + self.assertEqual(Proxy(lambda: 10) + 1, Proxy(lambda: 11)) + self.assertEqual(Proxy(lambda: 10) - 1, Proxy(lambda: 9)) + self.assertEqual(Proxy(lambda: 10) * 2, Proxy(lambda: 20)) + self.assertEqual(Proxy(lambda: 10) ** 2, Proxy(lambda: 100)) + self.assertEqual(Proxy(lambda: 20) / 2, Proxy(lambda: 10)) + self.assertEqual(Proxy(lambda: 20) // 2, Proxy(lambda: 10)) + self.assertEqual(Proxy(lambda: 11) % 2, Proxy(lambda: 1)) + self.assertEqual(Proxy(lambda: 10) << 2, Proxy(lambda: 40)) + self.assertEqual(Proxy(lambda: 10) >> 2, Proxy(lambda: 2)) + self.assertEqual(Proxy(lambda: 10) ^ 7, Proxy(lambda: 13)) + self.assertEqual(Proxy(lambda: 10) | 40, Proxy(lambda: 42)) + self.assertEqual(~Proxy(lambda: 10), Proxy(lambda: -11)) + self.assertEqual(-Proxy(lambda: 10), Proxy(lambda: -10)) + self.assertEqual(+Proxy(lambda: -10), Proxy(lambda: -10)) + self.assertTrue(Proxy(lambda: 10) < Proxy(lambda: 20)) + self.assertTrue(Proxy(lambda: 20) > Proxy(lambda: 10)) + self.assertTrue(Proxy(lambda: 10) >= Proxy(lambda: 10)) + self.assertTrue(Proxy(lambda: 10) <= Proxy(lambda: 10)) + self.assertTrue(Proxy(lambda: 10) == Proxy(lambda: 10)) + self.assertTrue(Proxy(lambda: 20) != Proxy(lambda: 10)) + + x = Proxy(lambda: 10) + x -= 1 + self.assertEqual(x, 9) + x = Proxy(lambda: 9) + x += 1 + self.assertEqual(x, 10) + x = Proxy(lambda: 10) + x *= 2 + self.assertEqual(x, 20) + x = Proxy(lambda: 20) + x /= 2 + self.assertEqual(x, 10) + x = Proxy(lambda: 10) + x %= 2 + self.assertEqual(x, 0) + x = Proxy(lambda: 10) + x <<= 3 + self.assertEqual(x, 80) + x = Proxy(lambda: 80) + x >>= 4 + self.assertEqual(x, 5) + x = Proxy(lambda: 5) + x ^= 1 + self.assertEqual(x, 4) + x = Proxy(lambda: 4) + x **= 4 + self.assertEqual(x, 256) + x = Proxy(lambda: 256) + x //= 2 + self.assertEqual(x, 128) + x = Proxy(lambda: 128) + x |= 2 + self.assertEqual(x, 130) + x = Proxy(lambda: 130) + x &= 10 + self.assertEqual(x, 2) + + x = Proxy(lambda: 10) + self.assertEqual(type(x.__float__()), float) + self.assertEqual(type(x.__int__()), int) + self.assertEqual(type(x.__long__()), long) + self.assertTrue(hex(x)) + self.assertTrue(oct(x)) + + def test_hash(self): + + class X(object): + + def __hash__(self): + return 1234 + + self.assertEqual(hash(Proxy(lambda: X())), 1234) + + def test_call(self): + + class X(object): + + def __call__(self): + return 1234 + + self.assertEqual(Proxy(lambda: X())(), 1234) + + def test_context(self): + + class X(object): + entered = exited = False + + def __enter__(self): + self.entered = True + return 1234 + + def __exit__(self, *exc_info): + self.exited = True + + v = X() + x = Proxy(lambda: v) + with x as val: + self.assertEqual(val, 1234) + self.assertTrue(x.entered) + self.assertTrue(x.exited) + + def test_reduce(self): + + class X(object): + + def __reduce__(self): + return 123 + + x = Proxy(lambda: X()) + self.assertEqual(x.__reduce__(), 123) + + +class test_PromiseProxy(Case): + + def test_only_evaluated_once(self): + + class X(object): + attr = 123 + evals = 0 + + def __init__(self): + self.__class__.evals += 1 + + p = PromiseProxy(X) + self.assertEqual(p.attr, 123) + self.assertEqual(p.attr, 123) + self.assertEqual(X.evals, 1) + + def test_maybe_evaluate(self): + x = PromiseProxy(lambda: 30) + self.assertEqual(maybe_evaluate(x), 30) + self.assertEqual(maybe_evaluate(x), 30) + + self.assertEqual(maybe_evaluate(30), 30) diff --git a/awx/lib/site-packages/celery/tests/utilities/test_mail.py b/awx/lib/site-packages/celery/tests/utilities/test_mail.py new file mode 100644 index 0000000000..a47a3d8600 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/utilities/test_mail.py @@ -0,0 +1,66 @@ +from __future__ import absolute_import + +from mock import Mock, patch + +from celery.utils.mail import Message, Mailer + +from celery.tests.utils import Case + + +msg = Message(to='george@vandelay.com', sender='elaine@pendant.com', + subject="What's up with Jerry?", body='???!') + + +class test_Message(Case): + + def test_repr(self): + self.assertTrue(repr(msg)) + + def test_str(self): + self.assertTrue(str(msg)) + + +class test_Mailer(Case): + + def test_send_supports_timeout(self): + mailer = Mailer() + mailer.supports_timeout = True + mailer._send = Mock() + mailer.send(msg) + mailer._send.assert_called_with(msg, timeout=2) + + @patch('socket.setdefaulttimeout') + @patch('socket.getdefaulttimeout') + def test_send_no_timeout(self, get, set): + mailer = Mailer() + mailer.supports_timeout = False + mailer._send = Mock() + get.return_value = 10 + mailer.send(msg) + get.assert_called_with() + sets = set.call_args_list + self.assertEqual(sets[0][0], (2, )) + self.assertEqual(sets[1][0], (10, )) + mailer._send.assert_called_with(msg) + + @patch('smtplib.SMTP_SSL', create=True) + def test_send_ssl_tls(self, SMTP_SSL): + mailer = Mailer(use_ssl=True, use_tls=True) + client = SMTP_SSL.return_value = Mock() + mailer._send(msg) + self.assertTrue(client.starttls.called) + self.assertEqual(client.ehlo.call_count, 2) + client.quit.assert_called_with() + client.sendmail.assert_called_with(msg.sender, msg.to, str(msg)) + mailer = Mailer(use_ssl=True, use_tls=True, user='foo', + password='bar') + mailer._send(msg) + client.login.assert_called_with('foo', 'bar') + + @patch('smtplib.SMTP') + def test_send(self, SMTP): + client = SMTP.return_value = Mock() + mailer = Mailer(use_ssl=False, use_tls=False) + mailer._send(msg) + + client.sendmail.assert_called_With(msg.sender, msg.to, str(msg)) diff --git a/awx/lib/site-packages/celery/tests/utilities/test_pickle.py b/awx/lib/site-packages/celery/tests/utilities/test_pickle.py new file mode 100644 index 0000000000..580286fc46 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/utilities/test_pickle.py @@ -0,0 +1,51 @@ +from __future__ import absolute_import + +from celery.utils.serialization import pickle +from celery.tests.utils import Case + + +class RegularException(Exception): + pass + + +class ArgOverrideException(Exception): + + def __init__(self, message, status_code=10): + self.status_code = status_code + Exception.__init__(self, message, status_code) + + +class test_Pickle(Case): + + def test_pickle_regular_exception(self): + exc = None + try: + raise RegularException('RegularException raised') + except RegularException, exc_: + exc = exc_ + + pickled = pickle.dumps({'exception': exc}) + unpickled = pickle.loads(pickled) + exception = unpickled.get('exception') + self.assertTrue(exception) + self.assertIsInstance(exception, RegularException) + self.assertTupleEqual(exception.args, ('RegularException raised', )) + + def test_pickle_arg_override_exception(self): + + exc = None + try: + raise ArgOverrideException( + 'ArgOverrideException raised', status_code=100, + ) + except ArgOverrideException, exc_: + exc = exc_ + + pickled = pickle.dumps({'exception': exc}) + unpickled = pickle.loads(pickled) + exception = unpickled.get('exception') + self.assertTrue(exception) + self.assertIsInstance(exception, ArgOverrideException) + self.assertTupleEqual(exception.args, ( + 'ArgOverrideException raised', 100)) + self.assertEqual(exception.status_code, 100) diff --git a/awx/lib/site-packages/celery/tests/utilities/test_platforms.py b/awx/lib/site-packages/celery/tests/utilities/test_platforms.py new file mode 100644 index 0000000000..b7f22deddf --- /dev/null +++ b/awx/lib/site-packages/celery/tests/utilities/test_platforms.py @@ -0,0 +1,628 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import errno +import os +import resource +import signal + +from mock import Mock, patch + +from celery import current_app +from celery import platforms +from celery.platforms import ( + get_fdmax, + shellsplit, + ignore_errno, + set_process_title, + signals, + maybe_drop_privileges, + setuid, + setgid, + initgroups, + parse_uid, + parse_gid, + detached, + DaemonContext, + create_pidlock, + Pidfile, + LockFailed, + setgroups, + _setgroups_hack +) + +from celery.tests.utils import Case, WhateverIO, override_stdouts, mock_open + + +class test_ignore_errno(Case): + + def test_raises_EBADF(self): + with ignore_errno('EBADF'): + exc = OSError() + exc.errno = errno.EBADF + raise exc + + def test_otherwise(self): + with self.assertRaises(OSError): + with ignore_errno('EBADF'): + exc = OSError() + exc.errno = errno.ENOENT + raise exc + + +class test_shellsplit(Case): + + def test_split(self): + self.assertEqual( + shellsplit("the 'quick' brown fox"), + ['the', 'quick', 'brown', 'fox'], + ) + + +class test_set_process_title(Case): + + def when_no_setps(self): + prev = platforms._setproctitle = platforms._setproctitle, None + try: + set_process_title('foo') + finally: + platforms._setproctitle = prev + + +class test_Signals(Case): + + @patch('signal.getsignal') + def test_getitem(self, getsignal): + signals['SIGINT'] + getsignal.assert_called_with(signal.SIGINT) + + def test_supported(self): + self.assertTrue(signals.supported('INT')) + self.assertFalse(signals.supported('SIGIMAGINARY')) + + def test_signum(self): + self.assertEqual(signals.signum(13), 13) + self.assertEqual(signals.signum('INT'), signal.SIGINT) + self.assertEqual(signals.signum('SIGINT'), signal.SIGINT) + with self.assertRaises(TypeError): + signals.signum('int') + signals.signum(object()) + + @patch('signal.signal') + def test_ignore(self, set): + signals.ignore('SIGINT') + set.assert_called_with(signals.signum('INT'), signals.ignored) + signals.ignore('SIGTERM') + set.assert_called_with(signals.signum('TERM'), signals.ignored) + + @patch('signal.signal') + def test_setitem(self, set): + handle = lambda *a: a + signals['INT'] = handle + set.assert_called_with(signal.SIGINT, handle) + + @patch('signal.signal') + def test_setitem_raises(self, set): + set.side_effect = ValueError() + signals['INT'] = lambda *a: a + + +if not current_app.IS_WINDOWS: + + class test_get_fdmax(Case): + + @patch('resource.getrlimit') + def test_when_infinity(self, getrlimit): + getrlimit.return_value = [None, resource.RLIM_INFINITY] + default = object() + self.assertIs(get_fdmax(default), default) + + @patch('resource.getrlimit') + def test_when_actual(self, getrlimit): + getrlimit.return_value = [None, 13] + self.assertEqual(get_fdmax(None), 13) + + class test_maybe_drop_privileges(Case): + + @patch('celery.platforms.parse_uid') + @patch('pwd.getpwuid') + @patch('celery.platforms.setgid') + @patch('celery.platforms.setuid') + @patch('celery.platforms.initgroups') + def test_with_uid(self, initgroups, setuid, setgid, + getpwuid, parse_uid): + + class pw_struct(object): + pw_gid = 50001 + getpwuid.return_value = pw_struct() + parse_uid.return_value = 5001 + maybe_drop_privileges(uid='user') + parse_uid.assert_called_with('user') + getpwuid.assert_called_with(5001) + setgid.assert_called_with(50001) + initgroups.assert_called_with(5001, 50001) + setuid.assert_called_with(5001) + + @patch('celery.platforms.parse_uid') + @patch('celery.platforms.parse_gid') + @patch('celery.platforms.setgid') + @patch('celery.platforms.setuid') + @patch('celery.platforms.initgroups') + def test_with_guid(self, initgroups, setuid, setgid, + parse_gid, parse_uid): + parse_uid.return_value = 5001 + parse_gid.return_value = 50001 + maybe_drop_privileges(uid='user', gid='group') + parse_uid.assert_called_with('user') + parse_gid.assert_called_with('group') + setgid.assert_called_with(50001) + initgroups.assert_called_with(5001, 50001) + setuid.assert_called_with(5001) + + @patch('celery.platforms.setuid') + @patch('celery.platforms.setgid') + @patch('celery.platforms.parse_gid') + def test_only_gid(self, parse_gid, setgid, setuid): + parse_gid.return_value = 50001 + maybe_drop_privileges(gid='group') + parse_gid.assert_called_with('group') + setgid.assert_called_with(50001) + self.assertFalse(setuid.called) + + class test_setget_uid_gid(Case): + + @patch('celery.platforms.parse_uid') + @patch('os.setuid') + def test_setuid(self, _setuid, parse_uid): + parse_uid.return_value = 5001 + setuid('user') + parse_uid.assert_called_with('user') + _setuid.assert_called_with(5001) + + @patch('celery.platforms.parse_gid') + @patch('os.setgid') + def test_setgid(self, _setgid, parse_gid): + parse_gid.return_value = 50001 + setgid('group') + parse_gid.assert_called_with('group') + _setgid.assert_called_with(50001) + + def test_parse_uid_when_int(self): + self.assertEqual(parse_uid(5001), 5001) + + @patch('pwd.getpwnam') + def test_parse_uid_when_existing_name(self, getpwnam): + + class pwent(object): + pw_uid = 5001 + + getpwnam.return_value = pwent() + self.assertEqual(parse_uid('user'), 5001) + + @patch('pwd.getpwnam') + def test_parse_uid_when_nonexisting_name(self, getpwnam): + getpwnam.side_effect = KeyError('user') + + with self.assertRaises(KeyError): + parse_uid('user') + + def test_parse_gid_when_int(self): + self.assertEqual(parse_gid(50001), 50001) + + @patch('grp.getgrnam') + def test_parse_gid_when_existing_name(self, getgrnam): + + class grent(object): + gr_gid = 50001 + + getgrnam.return_value = grent() + self.assertEqual(parse_gid('group'), 50001) + + @patch('grp.getgrnam') + def test_parse_gid_when_nonexisting_name(self, getgrnam): + getgrnam.side_effect = KeyError('group') + + with self.assertRaises(KeyError): + parse_gid('group') + + class test_initgroups(Case): + + @patch('pwd.getpwuid') + @patch('os.initgroups', create=True) + def test_with_initgroups(self, initgroups_, getpwuid): + getpwuid.return_value = ['user'] + initgroups(5001, 50001) + initgroups_.assert_called_with('user', 50001) + + @patch('celery.platforms.setgroups') + @patch('grp.getgrall') + @patch('pwd.getpwuid') + def test_without_initgroups(self, getpwuid, getgrall, setgroups): + prev = getattr(os, 'initgroups', None) + try: + delattr(os, 'initgroups') + except AttributeError: + pass + try: + getpwuid.return_value = ['user'] + + class grent(object): + gr_mem = ['user'] + + def __init__(self, gid): + self.gr_gid = gid + + getgrall.return_value = [grent(1), grent(2), grent(3)] + initgroups(5001, 50001) + setgroups.assert_called_with([1, 2, 3]) + finally: + if prev: + os.initgroups = prev + + class test_detached(Case): + + def test_without_resource(self): + prev, platforms.resource = platforms.resource, None + try: + with self.assertRaises(RuntimeError): + detached() + finally: + platforms.resource = prev + + @patch('celery.platforms._create_pidlock') + @patch('celery.platforms.signals') + @patch('celery.platforms.maybe_drop_privileges') + @patch('os.geteuid') + @patch('__builtin__.open') + def test_default(self, open, geteuid, maybe_drop, + signals, pidlock): + geteuid.return_value = 0 + context = detached(uid='user', gid='group') + self.assertIsInstance(context, DaemonContext) + signals.reset.assert_called_with('SIGCLD') + maybe_drop.assert_called_with(uid='user', gid='group') + open.return_value = Mock() + + geteuid.return_value = 5001 + context = detached(uid='user', gid='group', logfile='/foo/bar') + self.assertIsInstance(context, DaemonContext) + self.assertTrue(context.after_chdir) + context.after_chdir() + open.assert_called_with('/foo/bar', 'a') + open.return_value.close.assert_called_with() + + context = detached(pidfile='/foo/bar/pid') + self.assertIsInstance(context, DaemonContext) + self.assertTrue(context.after_chdir) + context.after_chdir() + pidlock.assert_called_with('/foo/bar/pid') + + class test_DaemonContext(Case): + + @patch('os.fork') + @patch('os.setsid') + @patch('os._exit') + @patch('os.chdir') + @patch('os.umask') + @patch('os.close') + @patch('os.open') + @patch('os.dup2') + def test_open(self, dup2, open, close, umask, + chdir, _exit, setsid, fork): + x = DaemonContext(workdir='/opt/workdir') + + fork.return_value = 0 + with x: + self.assertTrue(x._is_open) + with x: + pass + self.assertEqual(fork.call_count, 2) + setsid.assert_called_with() + self.assertFalse(_exit.called) + + chdir.assert_called_with(x.workdir) + umask.assert_called_with(x.umask) + self.assertTrue(dup2.called) + + fork.reset_mock() + fork.return_value = 1 + x = DaemonContext(workdir='/opt/workdir') + with x: + pass + self.assertEqual(fork.call_count, 1) + _exit.assert_called_with(0) + + x = DaemonContext(workdir='/opt/workdir', fake=True) + x._detach = Mock() + with x: + pass + self.assertFalse(x._detach.called) + + class test_Pidfile(Case): + + @patch('celery.platforms.Pidfile') + def test_create_pidlock(self, Pidfile): + p = Pidfile.return_value = Mock() + p.is_locked.return_value = True + p.remove_if_stale.return_value = False + with self.assertRaises(SystemExit): + create_pidlock('/var/pid') + + p.remove_if_stale.return_value = True + ret = create_pidlock('/var/pid') + self.assertIs(ret, p) + + def test_context(self): + p = Pidfile('/var/pid') + p.write_pid = Mock() + p.remove = Mock() + + with p as _p: + self.assertIs(_p, p) + p.write_pid.assert_called_with() + p.remove.assert_called_with() + + def test_acquire_raises_LockFailed(self): + p = Pidfile('/var/pid') + p.write_pid = Mock() + p.write_pid.side_effect = OSError() + + with self.assertRaises(LockFailed): + with p: + pass + + @patch('os.path.exists') + def test_is_locked(self, exists): + p = Pidfile('/var/pid') + exists.return_value = True + self.assertTrue(p.is_locked()) + exists.return_value = False + self.assertFalse(p.is_locked()) + + def test_read_pid(self): + with mock_open() as s: + s.write('1816\n') + s.seek(0) + p = Pidfile('/var/pid') + self.assertEqual(p.read_pid(), 1816) + + def test_read_pid_partially_written(self): + with mock_open() as s: + s.write('1816') + s.seek(0) + p = Pidfile('/var/pid') + with self.assertRaises(ValueError): + p.read_pid() + + def test_read_pid_raises_ENOENT(self): + exc = IOError() + exc.errno = errno.ENOENT + with mock_open(side_effect=exc): + p = Pidfile('/var/pid') + self.assertIsNone(p.read_pid()) + + def test_read_pid_raises_IOError(self): + exc = IOError() + exc.errno = errno.EAGAIN + with mock_open(side_effect=exc): + p = Pidfile('/var/pid') + with self.assertRaises(IOError): + p.read_pid() + + def test_read_pid_bogus_pidfile(self): + with mock_open() as s: + s.write('eighteensixteen\n') + s.seek(0) + p = Pidfile('/var/pid') + with self.assertRaises(ValueError): + p.read_pid() + + @patch('os.unlink') + def test_remove(self, unlink): + unlink.return_value = True + p = Pidfile('/var/pid') + p.remove() + unlink.assert_called_with(p.path) + + @patch('os.unlink') + def test_remove_ENOENT(self, unlink): + exc = OSError() + exc.errno = errno.ENOENT + unlink.side_effect = exc + p = Pidfile('/var/pid') + p.remove() + unlink.assert_called_with(p.path) + + @patch('os.unlink') + def test_remove_EACCES(self, unlink): + exc = OSError() + exc.errno = errno.EACCES + unlink.side_effect = exc + p = Pidfile('/var/pid') + p.remove() + unlink.assert_called_with(p.path) + + @patch('os.unlink') + def test_remove_OSError(self, unlink): + exc = OSError() + exc.errno = errno.EAGAIN + unlink.side_effect = exc + p = Pidfile('/var/pid') + with self.assertRaises(OSError): + p.remove() + unlink.assert_called_with(p.path) + + @patch('os.kill') + def test_remove_if_stale_process_alive(self, kill): + p = Pidfile('/var/pid') + p.read_pid = Mock() + p.read_pid.return_value = 1816 + kill.return_value = 0 + self.assertFalse(p.remove_if_stale()) + kill.assert_called_with(1816, 0) + p.read_pid.assert_called_with() + + kill.side_effect = OSError() + kill.side_effect.errno = errno.ENOENT + self.assertFalse(p.remove_if_stale()) + + @patch('os.kill') + def test_remove_if_stale_process_dead(self, kill): + with override_stdouts(): + p = Pidfile('/var/pid') + p.read_pid = Mock() + p.read_pid.return_value = 1816 + p.remove = Mock() + exc = OSError() + exc.errno = errno.ESRCH + kill.side_effect = exc + self.assertTrue(p.remove_if_stale()) + kill.assert_called_with(1816, 0) + p.remove.assert_called_with() + + def test_remove_if_stale_broken_pid(self): + with override_stdouts(): + p = Pidfile('/var/pid') + p.read_pid = Mock() + p.read_pid.side_effect = ValueError() + p.remove = Mock() + + self.assertTrue(p.remove_if_stale()) + p.remove.assert_called_with() + + def test_remove_if_stale_no_pidfile(self): + p = Pidfile('/var/pid') + p.read_pid = Mock() + p.read_pid.return_value = None + p.remove = Mock() + + self.assertTrue(p.remove_if_stale()) + p.remove.assert_called_with() + + @patch('os.fsync') + @patch('os.getpid') + @patch('os.open') + @patch('os.fdopen') + @patch('__builtin__.open') + def test_write_pid(self, open_, fdopen, osopen, getpid, fsync): + getpid.return_value = 1816 + osopen.return_value = 13 + w = fdopen.return_value = WhateverIO() + w.close = Mock() + r = open_.return_value = WhateverIO() + r.write('1816\n') + r.seek(0) + + p = Pidfile('/var/pid') + p.write_pid() + w.seek(0) + self.assertEqual(w.readline(), '1816\n') + self.assertTrue(w.close.called) + getpid.assert_called_with() + osopen.assert_called_with(p.path, platforms.PIDFILE_FLAGS, + platforms.PIDFILE_MODE) + fdopen.assert_called_with(13, 'w') + fsync.assert_called_with(13) + open_.assert_called_with(p.path) + + @patch('os.fsync') + @patch('os.getpid') + @patch('os.open') + @patch('os.fdopen') + @patch('__builtin__.open') + def test_write_reread_fails(self, open_, fdopen, + osopen, getpid, fsync): + getpid.return_value = 1816 + osopen.return_value = 13 + w = fdopen.return_value = WhateverIO() + w.close = Mock() + r = open_.return_value = WhateverIO() + r.write('11816\n') + r.seek(0) + + p = Pidfile('/var/pid') + with self.assertRaises(LockFailed): + p.write_pid() + + class test_setgroups(Case): + + @patch('os.setgroups', create=True) + def test_setgroups_hack_ValueError(self, setgroups): + + def on_setgroups(groups): + if len(groups) <= 200: + setgroups.return_value = True + return + raise ValueError() + setgroups.side_effect = on_setgroups + _setgroups_hack(range(400)) + + setgroups.side_effect = ValueError() + with self.assertRaises(ValueError): + _setgroups_hack(range(400)) + + @patch('os.setgroups', create=True) + def test_setgroups_hack_OSError(self, setgroups): + exc = OSError() + exc.errno = errno.EINVAL + + def on_setgroups(groups): + if len(groups) <= 200: + setgroups.return_value = True + return + raise exc + setgroups.side_effect = on_setgroups + + _setgroups_hack(range(400)) + + setgroups.side_effect = exc + with self.assertRaises(OSError): + _setgroups_hack(range(400)) + + exc2 = OSError() + exc.errno = errno.ESRCH + setgroups.side_effect = exc2 + with self.assertRaises(OSError): + _setgroups_hack(range(400)) + + @patch('os.sysconf') + @patch('celery.platforms._setgroups_hack') + def test_setgroups(self, hack, sysconf): + sysconf.return_value = 100 + setgroups(range(400)) + hack.assert_called_with(range(100)) + + @patch('os.sysconf') + @patch('celery.platforms._setgroups_hack') + def test_setgroups_sysconf_raises(self, hack, sysconf): + sysconf.side_effect = ValueError() + setgroups(range(400)) + hack.assert_called_with(range(400)) + + @patch('os.getgroups') + @patch('os.sysconf') + @patch('celery.platforms._setgroups_hack') + def test_setgroups_raises_ESRCH(self, hack, sysconf, getgroups): + sysconf.side_effect = ValueError() + esrch = OSError() + esrch.errno = errno.ESRCH + hack.side_effect = esrch + with self.assertRaises(OSError): + setgroups(range(400)) + + @patch('os.getgroups') + @patch('os.sysconf') + @patch('celery.platforms._setgroups_hack') + def test_setgroups_raises_EPERM(self, hack, sysconf, getgroups): + sysconf.side_effect = ValueError() + eperm = OSError() + eperm.errno = errno.EPERM + hack.side_effect = eperm + getgroups.return_value = range(400) + setgroups(range(400)) + getgroups.assert_called_with() + + getgroups.return_value = [1000] + with self.assertRaises(OSError): + setgroups(range(400)) + getgroups.assert_called_with() diff --git a/awx/lib/site-packages/celery/tests/utilities/test_saferef.py b/awx/lib/site-packages/celery/tests/utilities/test_saferef.py new file mode 100644 index 0000000000..1f5ebba550 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/utilities/test_saferef.py @@ -0,0 +1,79 @@ +from __future__ import absolute_import + +from celery.utils.dispatch.saferef import safe_ref +from celery.tests.utils import Case + + +class Class1(object): + + def x(self): + pass + + +def fun(obj): + pass + + +class Class2(object): + + def __call__(self, obj): + pass + + +class SaferefTests(Case): + + def setUp(self): + ts = [] + ss = [] + for x in xrange(5000): + t = Class1() + ts.append(t) + s = safe_ref(t.x, self._closure) + ss.append(s) + ts.append(fun) + ss.append(safe_ref(fun, self._closure)) + for x in xrange(30): + t = Class2() + ts.append(t) + s = safe_ref(t, self._closure) + ss.append(s) + self.ts = ts + self.ss = ss + self.closureCount = 0 + + def tearDown(self): + del self.ts + del self.ss + + def testIn(self): + """Test the "in" operator for safe references (cmp)""" + for t in self.ts[:50]: + self.assertTrue(safe_ref(t.x) in self.ss) + + def testValid(self): + """Test that the references are valid (return instance methods)""" + for s in self.ss: + self.assertTrue(s()) + + def testShortCircuit(self): + """Test that creation short-circuits to reuse existing references""" + sd = {} + for s in self.ss: + sd[s] = 1 + for t in self.ts: + if hasattr(t, 'x'): + self.assertIn(safe_ref(t.x), sd) + else: + self.assertIn(safe_ref(t), sd) + + def testRepresentation(self): + """Test that the reference object's representation works + + XXX Doesn't currently check the results, just that no error + is raised + """ + repr(self.ss[-1]) + + def _closure(self, ref): + """Dumb utility mechanism to increment deletion counter""" + self.closureCount += 1 diff --git a/awx/lib/site-packages/celery/tests/utilities/test_serialization.py b/awx/lib/site-packages/celery/tests/utilities/test_serialization.py new file mode 100644 index 0000000000..e76f39871e --- /dev/null +++ b/awx/lib/site-packages/celery/tests/utilities/test_serialization.py @@ -0,0 +1,19 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import sys + +from celery.tests.utils import Case, mask_modules + + +class test_AAPickle(Case): + + def test_no_cpickle(self): + prev = sys.modules.pop('celery.utils.serialization', None) + try: + with mask_modules('cPickle'): + from celery.utils.serialization import pickle + import pickle as orig_pickle + self.assertIs(pickle.dumps, orig_pickle.dumps) + finally: + sys.modules['celery.utils.serialization'] = prev diff --git a/awx/lib/site-packages/celery/tests/utilities/test_term.py b/awx/lib/site-packages/celery/tests/utilities/test_term.py new file mode 100644 index 0000000000..ce1285701d --- /dev/null +++ b/awx/lib/site-packages/celery/tests/utilities/test_term.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +from celery.utils import term +from celery.utils.term import colored, fg + +from celery.tests.utils import Case + + +class test_colored(Case): + + def test_colors(self): + colors = ( + ('black', term.BLACK), + ('red', term.RED), + ('green', term.GREEN), + ('yellow', term.YELLOW), + ('blue', term.BLUE), + ('magenta', term.MAGENTA), + ('cyan', term.CYAN), + ('white', term.WHITE), + ) + + for name, key in colors: + self.assertIn(fg(30 + key), str(colored().names[name]('foo'))) + + self.assertTrue(str(colored().bold('f'))) + self.assertTrue(str(colored().underline('f'))) + self.assertTrue(str(colored().blink('f'))) + self.assertTrue(str(colored().reverse('f'))) + self.assertTrue(str(colored().bright('f'))) + self.assertTrue(str(colored().ired('f'))) + self.assertTrue(str(colored().igreen('f'))) + self.assertTrue(str(colored().iyellow('f'))) + self.assertTrue(str(colored().iblue('f'))) + self.assertTrue(str(colored().imagenta('f'))) + self.assertTrue(str(colored().icyan('f'))) + self.assertTrue(str(colored().iwhite('f'))) + self.assertTrue(str(colored().reset('f'))) + + self.assertTrue(str(colored().green(u'∂bar'))) + + self.assertTrue( + colored().red(u'éefoo') + colored().green(u'∂bar')) + + self.assertEqual( + colored().red('foo').no_color(), 'foo') + + self.assertTrue( + repr(colored().blue(u'åfoo'))) + + self.assertEqual(repr(colored()), "''") + + c = colored() + s = c.red('foo', c.blue('bar'), c.green('baz')) + self.assertTrue(s.no_color()) + + c._fold_no_color(s, u'øfoo') + c._fold_no_color(u'fooå', s) + + c = colored().red(u'åfoo') + self.assertEqual( + c._add(c, u'baræ'), + u'\x1b[1;31m\xe5foo\x1b[0mbar\xe6', + ) + + c2 = colored().blue(u'ƒƒz') + c3 = c._add(c, c2) + self.assertEqual( + c3, + u'\x1b[1;31m\xe5foo\x1b[0m\x1b[1;34m\u0192\u0192z\x1b[0m', + ) diff --git a/awx/lib/site-packages/celery/tests/utilities/test_timer2.py b/awx/lib/site-packages/celery/tests/utilities/test_timer2.py new file mode 100644 index 0000000000..6a6dd45183 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/utilities/test_timer2.py @@ -0,0 +1,186 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import sys +import time + +from kombu.tests.utils import redirect_stdouts +from mock import Mock, patch + +import celery.utils.timer2 as timer2 + +from celery.tests.utils import Case, skip_if_quick + + +class test_Entry(Case): + + def test_call(self): + scratch = [None] + + def timed(x, y, moo='foo'): + scratch[0] = (x, y, moo) + + tref = timer2.Entry(timed, (4, 4), {'moo': 'baz'}) + tref() + + self.assertTupleEqual(scratch[0], (4, 4, 'baz')) + + def test_cancel(self): + tref = timer2.Entry(lambda x: x, (1, ), {}) + tref.cancel() + self.assertTrue(tref.cancelled) + + +class test_Schedule(Case): + + def test_supports_Timer_interface(self): + x = timer2.Schedule() + x.stop() + + tref = Mock() + x.cancel(tref) + tref.cancel.assert_called_with() + + def test_handle_error(self): + from datetime import datetime + to_timestamp = timer2.to_timestamp + scratch = [None] + + def _overflow(x): + raise OverflowError(x) + + def on_error(exc_info): + scratch[0] = exc_info + + s = timer2.Schedule(on_error=on_error) + + timer2.to_timestamp = _overflow + try: + s.enter(timer2.Entry(lambda: None, (), {}), + eta=datetime.now()) + s.enter(timer2.Entry(lambda: None, (), {}), + eta=None) + s.on_error = None + with self.assertRaises(OverflowError): + s.enter(timer2.Entry(lambda: None, (), {}), + eta=datetime.now()) + finally: + timer2.to_timestamp = to_timestamp + + exc = scratch[0] + self.assertIsInstance(exc, OverflowError) + + +class test_Timer(Case): + + @skip_if_quick + def test_enter_after(self): + t = timer2.Timer() + try: + done = [False] + + def set_done(): + done[0] = True + + t.apply_after(300, set_done) + mss = 0 + while not done[0]: + if mss >= 2.0: + raise Exception('test timed out') + time.sleep(0.1) + mss += 0.1 + finally: + t.stop() + + def test_exit_after(self): + t = timer2.Timer() + t.apply_after = Mock() + t.exit_after(300, priority=10) + t.apply_after.assert_called_with(300, sys.exit, 10) + + def test_apply_interval(self): + t = timer2.Timer() + try: + t.schedule.enter_after = Mock() + + myfun = Mock() + myfun.__name__ = 'myfun' + t.apply_interval(30, myfun) + + self.assertEqual(t.schedule.enter_after.call_count, 1) + args1, _ = t.schedule.enter_after.call_args_list[0] + msec1, tref1, _ = args1 + self.assertEqual(msec1, 30) + tref1() + + self.assertEqual(t.schedule.enter_after.call_count, 2) + args2, _ = t.schedule.enter_after.call_args_list[1] + msec2, tref2, _ = args2 + self.assertEqual(msec2, 30) + tref2.cancelled = True + tref2() + + self.assertEqual(t.schedule.enter_after.call_count, 2) + finally: + t.stop() + + @patch('celery.utils.timer2.logger') + def test_apply_entry_error_handled(self, logger): + t = timer2.Timer() + t.schedule.on_error = None + + fun = Mock() + fun.side_effect = ValueError() + + t.schedule.apply_entry(fun) + self.assertTrue(logger.error.called) + + @redirect_stdouts + def test_apply_entry_error_not_handled(self, stdout, stderr): + t = timer2.Timer() + t.schedule.on_error = Mock() + + fun = Mock() + fun.side_effect = ValueError() + t.schedule.apply_entry(fun) + fun.assert_called_with() + self.assertFalse(stderr.getvalue()) + + @patch('os._exit') + def test_thread_crash(self, _exit): + t = timer2.Timer() + t._next_entry = Mock() + t._next_entry.side_effect = OSError(131) + t.run() + _exit.assert_called_with(1) + + def test_gc_race_lost(self): + t = timer2.Timer() + t._is_stopped.set = Mock() + t._is_stopped.set.side_effect = TypeError() + + t._is_shutdown.set() + t.run() + t._is_stopped.set.assert_called_with() + + def test_to_timestamp(self): + self.assertIs(timer2.to_timestamp(3.13), 3.13) + + def test_test_enter(self): + t = timer2.Timer() + t._do_enter = Mock() + e = Mock() + t.enter(e, 13, 0) + t._do_enter.assert_called_with('enter', e, 13, priority=0) + + def test_test_enter_after(self): + t = timer2.Timer() + t._do_enter = Mock() + t.enter_after() + t._do_enter.assert_called_with('enter_after') + + def test_cancel(self): + t = timer2.Timer() + tref = Mock() + t.cancel(tref) + tref.cancel.assert_called_with() diff --git a/awx/lib/site-packages/celery/tests/utilities/test_timeutils.py b/awx/lib/site-packages/celery/tests/utilities/test_timeutils.py new file mode 100644 index 0000000000..ed49eac76a --- /dev/null +++ b/awx/lib/site-packages/celery/tests/utilities/test_timeutils.py @@ -0,0 +1,91 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from datetime import datetime, timedelta + +from mock import Mock + +from celery.exceptions import ImproperlyConfigured +from celery.utils import timeutils +from celery.utils.timeutils import timezone +from celery.tests.utils import Case + + +class test_timeutils(Case): + + def test_delta_resolution(self): + D = timeutils.delta_resolution + + dt = datetime(2010, 3, 30, 11, 50, 58, 41065) + deltamap = ((timedelta(days=2), datetime(2010, 3, 30, 0, 0)), + (timedelta(hours=2), datetime(2010, 3, 30, 11, 0)), + (timedelta(minutes=2), datetime(2010, 3, 30, 11, 50)), + (timedelta(seconds=2), dt)) + for delta, shoulda in deltamap: + self.assertEqual(D(dt, delta), shoulda) + + def test_timedelta_seconds(self): + deltamap = ((timedelta(seconds=1), 1), + (timedelta(seconds=27), 27), + (timedelta(minutes=3), 3 * 60), + (timedelta(hours=4), 4 * 60 * 60), + (timedelta(days=3), 3 * 86400)) + for delta, seconds in deltamap: + self.assertEqual(timeutils.timedelta_seconds(delta), seconds) + + def test_timedelta_seconds_returns_0_on_negative_time(self): + delta = timedelta(days=-2) + self.assertEqual(timeutils.timedelta_seconds(delta), 0) + + def test_humanize_seconds(self): + t = ((4 * 60 * 60 * 24, '4.00 days'), + (1 * 60 * 60 * 24, '1.00 day'), + (4 * 60 * 60, '4.00 hours'), + (1 * 60 * 60, '1.00 hour'), + (4 * 60, '4.00 minutes'), + (1 * 60, '1.00 minute'), + (4, '4.00 seconds'), + (1, '1.00 second'), + (4.3567631221, '4.36 seconds'), + (0, 'now')) + + for seconds, human in t: + self.assertEqual(timeutils.humanize_seconds(seconds), human) + + self.assertEqual(timeutils.humanize_seconds(4, prefix='about '), + 'about 4.00 seconds') + + def test_maybe_iso8601_datetime(self): + now = datetime.now() + self.assertIs(timeutils.maybe_iso8601(now), now) + + def test_maybe_timedelta(self): + D = timeutils.maybe_timedelta + + for i in (30, 30.6): + self.assertEqual(D(i), timedelta(seconds=i)) + + self.assertEqual(D(timedelta(days=2)), timedelta(days=2)) + + def test_remaining_relative(self): + timeutils.remaining(datetime.utcnow(), timedelta(hours=1), + relative=True) + + +class test_timezone(Case): + + def test_get_timezone_with_pytz(self): + prev, timeutils.pytz = timeutils.pytz, Mock() + try: + self.assertTrue(timezone.get_timezone('UTC')) + finally: + timeutils.pytz = prev + + def test_get_timezone_without_pytz(self): + prev, timeutils.pytz = timeutils.pytz, None + try: + self.assertTrue(timezone.get_timezone('UTC')) + with self.assertRaises(ImproperlyConfigured): + timezone.get_timezone('Europe/Oslo') + finally: + timeutils.pytz = prev diff --git a/awx/lib/site-packages/celery/tests/utilities/test_utils.py b/awx/lib/site-packages/celery/tests/utilities/test_utils.py new file mode 100644 index 0000000000..4446d91e5d --- /dev/null +++ b/awx/lib/site-packages/celery/tests/utilities/test_utils.py @@ -0,0 +1,165 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from kombu.utils.functional import promise + +from mock import patch + +from celery import utils +from celery.utils import text +from celery.utils import functional +from celery.utils.functional import mpromise, maybe_list +from celery.utils.threads import bgThread +from celery.tests.utils import Case + + +def double(x): + return x * 2 + + +class test_bgThread_interface(Case): + + def test_body(self): + x = bgThread() + with self.assertRaises(NotImplementedError): + x.body() + + +class test_chunks(Case): + + def test_chunks(self): + + # n == 2 + x = utils.chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 2) + self.assertListEqual( + list(x), + [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9], [10]], + ) + + # n == 3 + x = utils.chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 3) + self.assertListEqual( + list(x), + [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10]], + ) + + # n == 2 (exact) + x = utils.chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), 2) + self.assertListEqual( + list(x), + [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9]], + ) + + +class test_utils(Case): + + def test_is_iterable(self): + for a in 'f', ['f'], ('f', ), {'f': 'f'}: + self.assertTrue(utils.is_iterable(a)) + for b in object(), 1: + self.assertFalse(utils.is_iterable(b)) + + def test_padlist(self): + self.assertListEqual( + functional.padlist(['George', 'Costanza', 'NYC'], 3), + ['George', 'Costanza', 'NYC'], + ) + self.assertListEqual( + functional.padlist(['George', 'Costanza'], 3), + ['George', 'Costanza', None], + ) + self.assertListEqual( + functional.padlist(['George', 'Costanza', 'NYC'], 4, + default='Earth'), + ['George', 'Costanza', 'NYC', 'Earth'], + ) + + def test_firstmethod_AttributeError(self): + self.assertIsNone(functional.firstmethod('foo')([object()])) + + def test_firstmethod_promises(self): + + class A(object): + + def __init__(self, value=None): + self.value = value + + def m(self): + return self.value + + self.assertEqual('four', functional.firstmethod('m')([ + A(), A(), A(), A('four'), A('five')])) + self.assertEqual('four', functional.firstmethod('m')([ + A(), A(), A(), promise(lambda: A('four')), A('five')])) + + def test_first(self): + iterations = [0] + + def predicate(value): + iterations[0] += 1 + if value == 5: + return True + return False + + self.assertEqual(5, functional.first(predicate, xrange(10))) + self.assertEqual(iterations[0], 6) + + iterations[0] = 0 + self.assertIsNone(functional.first(predicate, xrange(10, 20))) + self.assertEqual(iterations[0], 10) + + def test_truncate_text(self): + self.assertEqual(text.truncate('ABCDEFGHI', 3), 'ABC...') + self.assertEqual(text.truncate('ABCDEFGHI', 10), 'ABCDEFGHI') + + def test_abbr(self): + self.assertEqual(text.abbr(None, 3), '???') + self.assertEqual(text.abbr('ABCDEFGHI', 6), 'ABC...') + self.assertEqual(text.abbr('ABCDEFGHI', 20), 'ABCDEFGHI') + self.assertEqual(text.abbr('ABCDEFGHI', 6, None), 'ABCDEF') + + def test_abbrtask(self): + self.assertEqual(text.abbrtask(None, 3), '???') + self.assertEqual( + text.abbrtask('feeds.tasks.refresh', 10), + '[.]refresh', + ) + self.assertEqual( + text.abbrtask('feeds.tasks.refresh', 30), + 'feeds.tasks.refresh', + ) + + def test_pretty(self): + self.assertTrue(text.pretty(('a', 'b', 'c'))) + + def test_cached_property(self): + + def fun(obj): + return fun.value + + x = utils.cached_property(fun) + self.assertIs(x.__get__(None), x) + self.assertIs(x.__set__(None, None), x) + self.assertIs(x.__delete__(None), x) + + def test_maybe_list(self): + self.assertEqual(maybe_list(1), [1]) + self.assertEqual(maybe_list([1]), [1]) + self.assertIsNone(maybe_list(None)) + + @patch('warnings.warn') + def test_warn_deprecated(self, warn): + utils.warn_deprecated('Foo') + self.assertTrue(warn.called) + + +class test_mpromise(Case): + + def test_is_memoized(self): + + it = iter(xrange(20, 30)) + p = mpromise(it.next) + self.assertEqual(p(), 20) + self.assertTrue(p.evaluated) + self.assertEqual(p(), 20) + self.assertEqual(repr(p), '20') diff --git a/awx/lib/site-packages/celery/tests/utils.py b/awx/lib/site-packages/celery/tests/utils.py new file mode 100644 index 0000000000..37fd8c575d --- /dev/null +++ b/awx/lib/site-packages/celery/tests/utils.py @@ -0,0 +1,594 @@ +from __future__ import absolute_import +from __future__ import with_statement + +try: + import unittest # noqa + unittest.skip + from unittest.util import safe_repr, unorderable_list_difference +except AttributeError: + import unittest2 as unittest # noqa + from unittest2.util import safe_repr, unorderable_list_difference # noqa + +import importlib +import logging +import os +import platform +import re +import sys +import time +import warnings +try: + import __builtin__ as builtins +except ImportError: # py3k + import builtins # noqa + +from contextlib import contextmanager +from functools import partial, wraps +from types import ModuleType + +import mock +from nose import SkipTest +from kombu.log import NullHandler +from kombu.utils import nested + +from celery.app import app_or_default +from celery.utils.compat import WhateverIO +from celery.utils.functional import noop + +from .compat import catch_warnings + + +class Mock(mock.Mock): + + def __init__(self, *args, **kwargs): + attrs = kwargs.pop('attrs', None) or {} + super(Mock, self).__init__(*args, **kwargs) + for attr_name, attr_value in attrs.items(): + setattr(self, attr_name, attr_value) + + +def skip_unless_module(module): + + def _inner(fun): + + @wraps(fun) + def __inner(*args, **kwargs): + try: + importlib.import_module(module) + except ImportError: + raise SkipTest('Does not have %s' % (module, )) + + return fun(*args, **kwargs) + + return __inner + return _inner + + +# -- adds assertWarns from recent unittest2, not in Python 2.7. + +class _AssertRaisesBaseContext(object): + + def __init__(self, expected, test_case, callable_obj=None, + expected_regex=None): + self.expected = expected + self.failureException = test_case.failureException + self.obj_name = None + if isinstance(expected_regex, basestring): + expected_regex = re.compile(expected_regex) + self.expected_regex = expected_regex + + +class _AssertWarnsContext(_AssertRaisesBaseContext): + """A context manager used to implement TestCase.assertWarns* methods.""" + + def __enter__(self): + # The __warningregistry__'s need to be in a pristine state for tests + # to work properly. + warnings.resetwarnings() + for v in sys.modules.values(): + if getattr(v, '__warningregistry__', None): + v.__warningregistry__ = {} + self.warnings_manager = catch_warnings(record=True) + self.warnings = self.warnings_manager.__enter__() + warnings.simplefilter('always', self.expected) + return self + + def __exit__(self, exc_type, exc_value, tb): + self.warnings_manager.__exit__(exc_type, exc_value, tb) + if exc_type is not None: + # let unexpected exceptions pass through + return + try: + exc_name = self.expected.__name__ + except AttributeError: + exc_name = str(self.expected) + first_matching = None + for m in self.warnings: + w = m.message + if not isinstance(w, self.expected): + continue + if first_matching is None: + first_matching = w + if (self.expected_regex is not None and + not self.expected_regex.search(str(w))): + continue + # store warning for later retrieval + self.warning = w + self.filename = m.filename + self.lineno = m.lineno + return + # Now we simply try to choose a helpful failure message + if first_matching is not None: + raise self.failureException( + '%r does not match %r' % ( + self.expected_regex.pattern, str(first_matching))) + if self.obj_name: + raise self.failureException( + '%s not triggered by %s' % (exc_name, self.obj_name)) + else: + raise self.failureException('%s not triggered' % exc_name) + + +class Case(unittest.TestCase): + + def assertWarns(self, expected_warning): + return _AssertWarnsContext(expected_warning, self, None) + + def assertWarnsRegex(self, expected_warning, expected_regex): + return _AssertWarnsContext(expected_warning, self, + None, expected_regex) + + def assertDictContainsSubset(self, expected, actual, msg=None): + missing, mismatched = [], [] + + for key, value in expected.iteritems(): + if key not in actual: + missing.append(key) + elif value != actual[key]: + mismatched.append('%s, expected: %s, actual: %s' % ( + safe_repr(key), safe_repr(value), + safe_repr(actual[key]))) + + if not (missing or mismatched): + return + + standard_msg = '' + if missing: + standard_msg = 'Missing: %s' % ','.join(map(safe_repr, missing)) + + if mismatched: + if standard_msg: + standard_msg += '; ' + standard_msg += 'Mismatched values: %s' % ( + ','.join(mismatched)) + + self.fail(self._formatMessage(msg, standard_msg)) + + def assertItemsEqual(self, expected_seq, actual_seq, msg=None): + missing = unexpected = None + try: + expected = sorted(expected_seq) + actual = sorted(actual_seq) + except TypeError: + # Unsortable items (example: set(), complex(), ...) + expected = list(expected_seq) + actual = list(actual_seq) + missing, unexpected = unorderable_list_difference( + expected, actual) + else: + return self.assertSequenceEqual(expected, actual, msg=msg) + + errors = [] + if missing: + errors.append( + 'Expected, but missing:\n %s' % (safe_repr(missing), ), + ) + if unexpected: + errors.append( + 'Unexpected, but present:\n %s' % (safe_repr(unexpected), ), + ) + if errors: + standardMsg = '\n'.join(errors) + self.fail(self._formatMessage(msg, standardMsg)) + + +class AppCase(Case): + + def setUp(self): + from celery.app import current_app + from celery.backends.cache import CacheBackend, DummyClient + app = self.app = self._current_app = current_app() + if isinstance(app.backend, CacheBackend): + if isinstance(app.backend.client, DummyClient): + app.backend.client.cache.clear() + app.backend._cache.clear() + self.setup() + + def tearDown(self): + self.teardown() + self._current_app.set_current() + + def setup(self): + pass + + def teardown(self): + pass + + +def get_handlers(logger): + return [h for h in logger.handlers if not isinstance(h, NullHandler)] + + +@contextmanager +def wrap_logger(logger, loglevel=logging.ERROR): + old_handlers = get_handlers(logger) + sio = WhateverIO() + siohandler = logging.StreamHandler(sio) + logger.handlers = [siohandler] + + try: + yield sio + finally: + logger.handlers = old_handlers + + +@contextmanager +def eager_tasks(): + app = app_or_default() + + prev = app.conf.CELERY_ALWAYS_EAGER + app.conf.CELERY_ALWAYS_EAGER = True + try: + yield True + finally: + app.conf.CELERY_ALWAYS_EAGER = prev + + +def with_eager_tasks(fun): + + @wraps(fun) + def _inner(*args, **kwargs): + app = app_or_default() + prev = app.conf.CELERY_ALWAYS_EAGER + app.conf.CELERY_ALWAYS_EAGER = True + try: + return fun(*args, **kwargs) + finally: + app.conf.CELERY_ALWAYS_EAGER = prev + + +def with_environ(env_name, env_value): + + def _envpatched(fun): + + @wraps(fun) + def _patch_environ(*args, **kwargs): + prev_val = os.environ.get(env_name) + os.environ[env_name] = env_value + try: + return fun(*args, **kwargs) + finally: + if prev_val is not None: + os.environ[env_name] = prev_val + + return _patch_environ + return _envpatched + + +def sleepdeprived(module=time): + + def _sleepdeprived(fun): + + @wraps(fun) + def __sleepdeprived(*args, **kwargs): + old_sleep = module.sleep + module.sleep = noop + try: + return fun(*args, **kwargs) + finally: + module.sleep = old_sleep + + return __sleepdeprived + + return _sleepdeprived + + +def skip_if_environ(env_var_name): + + def _wrap_test(fun): + + @wraps(fun) + def _skips_if_environ(*args, **kwargs): + if os.environ.get(env_var_name): + raise SkipTest('SKIP %s: %s set\n' % ( + fun.__name__, env_var_name)) + return fun(*args, **kwargs) + + return _skips_if_environ + + return _wrap_test + + +def skip_if_quick(fun): + return skip_if_environ('QUICKTEST')(fun) + + +def _skip_test(reason, sign): + + def _wrap_test(fun): + + @wraps(fun) + def _skipped_test(*args, **kwargs): + raise SkipTest('%s: %s' % (sign, reason)) + + return _skipped_test + return _wrap_test + + +def todo(reason): + """TODO test decorator.""" + return _skip_test(reason, 'TODO') + + +def skip(reason): + """Skip test decorator.""" + return _skip_test(reason, 'SKIP') + + +def skip_if(predicate, reason): + """Skip test if predicate is :const:`True`.""" + + def _inner(fun): + return predicate and skip(reason)(fun) or fun + + return _inner + + +def skip_unless(predicate, reason): + """Skip test if predicate is :const:`False`.""" + return skip_if(not predicate, reason) + + +# Taken from +# http://bitbucket.org/runeh/snippets/src/tip/missing_modules.py +@contextmanager +def mask_modules(*modnames): + """Ban some modules from being importable inside the context + + For example: + + >>> with missing_modules('sys'): + ... try: + ... import sys + ... except ImportError: + ... print 'sys not found' + sys not found + + >>> import sys + >>> sys.version + (2, 5, 2, 'final', 0) + + """ + + realimport = builtins.__import__ + + def myimp(name, *args, **kwargs): + if name in modnames: + raise ImportError('No module named %s' % name) + else: + return realimport(name, *args, **kwargs) + + builtins.__import__ = myimp + try: + yield True + finally: + builtins.__import__ = realimport + + +@contextmanager +def override_stdouts(): + """Override `sys.stdout` and `sys.stderr` with `WhateverIO`.""" + prev_out, prev_err = sys.stdout, sys.stderr + mystdout, mystderr = WhateverIO(), WhateverIO() + sys.stdout = sys.__stdout__ = mystdout + sys.stderr = sys.__stderr__ = mystderr + + try: + yield mystdout, mystderr + finally: + sys.stdout = sys.__stdout__ = prev_out + sys.stderr = sys.__stderr__ = prev_err + + +def patch(module, name, mocked): + module = importlib.import_module(module) + + def _patch(fun): + + @wraps(fun) + def __patched(*args, **kwargs): + prev = getattr(module, name) + setattr(module, name, mocked) + try: + return fun(*args, **kwargs) + finally: + setattr(module, name, prev) + return __patched + return _patch + + +@contextmanager +def replace_module_value(module, name, value=None): + has_prev = hasattr(module, name) + prev = getattr(module, name, None) + if value: + setattr(module, name, value) + else: + try: + delattr(module, name) + except AttributeError: + pass + try: + yield + finally: + if prev is not None: + setattr(sys, name, prev) + if not has_prev: + try: + delattr(module, name) + except AttributeError: + pass +pypy_version = partial( + replace_module_value, sys, 'pypy_version_info', +) +platform_pyimp = partial( + replace_module_value, platform, 'python_implementation', +) + + +@contextmanager +def sys_platform(value): + prev, sys.platform = sys.platform, value + try: + yield + finally: + sys.platform = prev + + +@contextmanager +def reset_modules(*modules): + prev = dict((k, sys.modules.pop(k)) for k in modules if k in sys.modules) + try: + yield + finally: + sys.modules.update(prev) + + +@contextmanager +def patch_modules(*modules): + prev = {} + for mod in modules: + prev[mod], sys.modules[mod] = sys.modules[mod], ModuleType(mod) + try: + yield + finally: + for name, mod in prev.iteritems(): + if mod is None: + sys.modules.pop(name, None) + else: + sys.modules[name] = mod + + +@contextmanager +def mock_module(*names): + prev = {} + + class MockModule(ModuleType): + + def __getattr__(self, attr): + setattr(self, attr, Mock()) + return ModuleType.__getattribute__(self, attr) + + mods = [] + for name in names: + try: + prev[name] = sys.modules[name] + except KeyError: + pass + mod = sys.modules[name] = MockModule(name) + mods.append(mod) + try: + yield mods + finally: + for name in names: + try: + sys.modules[name] = prev[name] + except KeyError: + try: + del(sys.modules[name]) + except KeyError: + pass + + +@contextmanager +def mock_context(mock, typ=Mock): + context = mock.return_value = Mock() + context.__enter__ = typ() + context.__exit__ = typ() + + def on_exit(*x): + if x[0]: + raise x[0], x[1], x[2] + context.__exit__.side_effect = on_exit + context.__enter__.return_value = context + try: + yield context + finally: + context.reset() + + +@contextmanager +def mock_open(typ=WhateverIO, side_effect=None): + with mock.patch('__builtin__.open') as open_: + with mock_context(open_) as context: + if side_effect is not None: + context.__enter__.side_effect = side_effect + val = context.__enter__.return_value = typ() + val.__exit__ = Mock() + yield val + + +def patch_many(*targets): + return nested(*[mock.patch(target) for target in targets]) + + +@contextmanager +def patch_settings(app=None, **config): + if app is None: + from celery import current_app + app = current_app + prev = {} + for key, value in config.iteritems(): + try: + prev[key] = getattr(app.conf, key) + except AttributeError: + pass + setattr(app.conf, key, value) + + try: + yield app.conf + finally: + for key, value in prev.iteritems(): + setattr(app.conf, key, value) + + +@contextmanager +def assert_signal_called(signal, **expected): + handler = Mock() + call_handler = partial(handler) + signal.connect(call_handler) + try: + yield handler + finally: + signal.disconnect(call_handler) + handler.assert_called_with(signal=signal, **expected) + + +def skip_if_pypy(fun): + + @wraps(fun) + def _inner(*args, **kwargs): + if getattr(sys, 'pypy_version_info', None): + raise SkipTest('does not work on PyPy') + return fun(*args, **kwargs) + return _inner + + +def skip_if_jython(fun): + + @wraps(fun) + def _inner(*args, **kwargs): + if sys.platform.startswith('java'): + raise SkipTest('does not work on Jython') + return fun(*args, **kwargs) + return _inner diff --git a/awx/lib/site-packages/celery/tests/worker/__init__.py b/awx/lib/site-packages/celery/tests/worker/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/celery/tests/worker/test_autoreload.py b/awx/lib/site-packages/celery/tests/worker/test_autoreload.py new file mode 100644 index 0000000000..38d8c9ed80 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/worker/test_autoreload.py @@ -0,0 +1,259 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import errno +import select +import sys + +from mock import Mock, patch +from time import time + +from celery.worker import autoreload +from celery.worker.autoreload import ( + WorkerComponent, + file_hash, + BaseMonitor, + StatMonitor, + KQueueMonitor, + InotifyMonitor, + default_implementation, + Autoreloader, +) + +from celery.tests.utils import AppCase, Case, mock_open + + +class test_WorkerComponent(AppCase): + + def test_create_threaded(self): + w = Mock() + w.use_eventloop = False + x = WorkerComponent(w) + x.instantiate = Mock() + r = x.create(w) + x.instantiate.assert_called_with(w.autoreloader_cls, w) + self.assertIs(r, w.autoreloader) + + @patch('select.kevent', create=True) + @patch('select.kqueue', create=True) + def test_create_ev(self, kqueue, kevent): + w = Mock() + w.use_eventloop = True + x = WorkerComponent(w) + x.instantiate = Mock() + r = x.create(w) + x.instantiate.assert_called_with(w.autoreloader_cls, w) + self.assertIsNone(r) + w.hub.on_init.append.assert_called_with(w.autoreloader.on_poll_init) + w.hub.on_close.append.assert_called_with(w.autoreloader.on_poll_close) + + +class test_file_hash(Case): + + def test_hash(self): + with mock_open() as a: + a.write('the quick brown fox\n') + a.seek(0) + A = file_hash('foo') + with mock_open() as b: + b.write('the quick brown bar\n') + b.seek(0) + B = file_hash('bar') + self.assertNotEqual(A, B) + + +class test_BaseMonitor(Case): + + def test_start_stop_on_change(self): + x = BaseMonitor(['a', 'b']) + + with self.assertRaises(NotImplementedError): + x.start() + x.stop() + x.on_change([]) + x._on_change = Mock() + x.on_change('foo') + x._on_change.assert_called_with('foo') + + +class test_StatMonitor(Case): + + @patch('os.stat') + def test_start(self, stat): + + class st(object): + st_mtime = time() + stat.return_value = st() + x = StatMonitor(['a', 'b']) + + def on_is_set(): + if x.shutdown_event.is_set.call_count > 3: + return True + return False + x.shutdown_event = Mock() + x.shutdown_event.is_set.side_effect = on_is_set + + x.start() + x.shutdown_event = Mock() + stat.side_effect = OSError() + x.start() + + +class test_KQueueMontior(Case): + + @patch('select.kqueue', create=True) + @patch('os.close') + def test_stop(self, close, kqueue): + x = KQueueMonitor(['a', 'b']) + x.poller = Mock() + x.filemap['a'] = 10 + x.stop() + x.poller.close.assert_called_with() + close.assert_called_with(10) + + close.side_effect = OSError() + close.side_effect.errno = errno.EBADF + x.stop() + + @patch('kombu.utils.eventio.kqueue', create=True) + @patch('kombu.utils.eventio.kevent', create=True) + @patch('os.open') + @patch('select.kqueue', create=True) + def test_start(self, _kq, osopen, kevent, kqueue): + from kombu.utils import eventio + prev_poll, eventio.poll = eventio.poll, kqueue + prev = {} + flags = ['KQ_FILTER_VNODE', 'KQ_EV_ADD', 'KQ_EV_ENABLE', + 'KQ_EV_CLEAR', 'KQ_NOTE_WRITE', 'KQ_NOTE_EXTEND'] + for i, flag in enumerate(flags): + prev[flag] = getattr(eventio, flag, None) + if not prev[flag]: + setattr(eventio, flag, i) + try: + kq = kqueue.return_value = Mock() + + class ev(object): + ident = 10 + filter = eventio.KQ_FILTER_VNODE + fflags = eventio.KQ_NOTE_WRITE + kq.control.return_value = [ev()] + x = KQueueMonitor(['a']) + osopen.return_value = 10 + calls = [0] + + def on_is_set(): + calls[0] += 1 + if calls[0] > 2: + return True + return False + x.shutdown_event = Mock() + x.shutdown_event.is_set.side_effect = on_is_set + x.start() + finally: + for flag in flags: + if prev[flag]: + setattr(eventio, flag, prev[flag]) + else: + delattr(eventio, flag) + eventio.poll = prev_poll + + +class test_InotifyMonitor(Case): + + @patch('celery.worker.autoreload.pyinotify') + def test_start(self, inotify): + x = InotifyMonitor(['a']) + inotify.IN_MODIFY = 1 + inotify.IN_ATTRIB = 2 + x.start() + + inotify.WatchManager.side_effect = ValueError() + with self.assertRaises(ValueError): + x.start() + x.stop() + + x._on_change = None + x.process_(Mock()) + x._on_change = Mock() + x.process_(Mock()) + self.assertTrue(x._on_change.called) + + +class test_default_implementation(Case): + + @patch('select.kqueue', create=True) + def test_kqueue(self, kqueue): + self.assertEqual(default_implementation(), 'kqueue') + + @patch('celery.worker.autoreload.pyinotify') + def test_inotify(self, pyinotify): + kq = getattr(select, 'kqueue', None) + try: + delattr(select, 'kqueue') + except AttributeError: + pass + platform, sys.platform = sys.platform, 'linux' + try: + self.assertEqual(default_implementation(), 'inotify') + ino, autoreload.pyinotify = autoreload.pyinotify, None + try: + self.assertEqual(default_implementation(), 'stat') + finally: + autoreload.pyinotify = ino + finally: + if kq: + select.kqueue = kq + sys.platform = platform + + +class test_Autoreloader(AppCase): + + @patch('celery.worker.autoreload.file_hash') + def test_start(self, fhash): + x = Autoreloader(Mock(), modules=[__name__]) + x.Monitor = Mock() + mon = x.Monitor.return_value = Mock() + mon.start.side_effect = OSError() + mon.start.side_effect.errno = errno.EINTR + x.body() + mon.start.side_effect.errno = errno.ENOENT + with self.assertRaises(OSError): + x.body() + mon.start.side_effect = None + x.body() + + @patch('celery.worker.autoreload.file_hash') + @patch('os.path.exists') + def test_maybe_modified(self, exists, fhash): + exists.return_value = True + fhash.return_value = 'abcd' + x = Autoreloader(Mock(), modules=[__name__]) + x._hashes = {} + x._hashes[__name__] = 'dcba' + self.assertTrue(x._maybe_modified(__name__)) + x._hashes[__name__] = 'abcd' + self.assertFalse(x._maybe_modified(__name__)) + + def test_on_change(self): + x = Autoreloader(Mock(), modules=[__name__]) + mm = x._maybe_modified = Mock(0) + mm.return_value = True + x._reload = Mock() + x.file_to_module[__name__] = __name__ + x.on_change([__name__]) + self.assertTrue(x._reload.called) + mm.return_value = False + x.on_change([__name__]) + + def test_reload(self): + x = Autoreloader(Mock(), modules=[__name__]) + x._reload([__name__]) + x.controller.reload.assert_called_with([__name__], reload=True) + + def test_stop(self): + x = Autoreloader(Mock(), modules=[__name__]) + x._monitor = None + x.stop() + x._monitor = Mock() + x.stop() + x._monitor.stop.assert_called_with() diff --git a/awx/lib/site-packages/celery/tests/worker/test_autoscale.py b/awx/lib/site-packages/celery/tests/worker/test_autoscale.py new file mode 100644 index 0000000000..f82aa7c326 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/worker/test_autoscale.py @@ -0,0 +1,163 @@ +from __future__ import absolute_import + +import sys + +from time import time + +from mock import Mock, patch + +from celery.concurrency.base import BasePool +from celery.worker import state +from celery.worker import autoscale +from celery.tests.utils import Case, sleepdeprived + + +class Object(object): + pass + + +class MockPool(BasePool): + shrink_raises_exception = False + shrink_raises_ValueError = False + + def __init__(self, *args, **kwargs): + super(MockPool, self).__init__(*args, **kwargs) + self._pool = Object() + self._pool._processes = self.limit + + def grow(self, n=1): + self._pool._processes += n + + def shrink(self, n=1): + if self.shrink_raises_exception: + raise KeyError('foo') + if self.shrink_raises_ValueError: + raise ValueError('foo') + self._pool._processes -= n + + @property + def num_processes(self): + return self._pool._processes + + +class test_Autoscaler(Case): + + def setUp(self): + self.pool = MockPool(3) + + def test_stop(self): + + class Scaler(autoscale.Autoscaler): + alive = True + joined = False + + def is_alive(self): + return self.alive + + def join(self, timeout=None): + self.joined = True + + x = Scaler(self.pool, 10, 3) + x._is_stopped.set() + x.stop() + self.assertTrue(x.joined) + x.joined = False + x.alive = False + x.stop() + self.assertFalse(x.joined) + + @sleepdeprived(autoscale) + def test_body(self): + x = autoscale.Autoscaler(self.pool, 10, 3) + x.body() + self.assertEqual(x.pool.num_processes, 3) + for i in range(20): + state.reserved_requests.add(i) + x.body() + x.body() + self.assertEqual(x.pool.num_processes, 10) + state.reserved_requests.clear() + x.body() + self.assertEqual(x.pool.num_processes, 10) + x._last_action = time() - 10000 + x.body() + self.assertEqual(x.pool.num_processes, 3) + + def test_run(self): + + class Scaler(autoscale.Autoscaler): + scale_called = False + + def body(self): + self.scale_called = True + self._is_shutdown.set() + + x = Scaler(self.pool, 10, 3) + x.run() + self.assertTrue(x._is_shutdown.isSet()) + self.assertTrue(x._is_stopped.isSet()) + self.assertTrue(x.scale_called) + + def test_shrink_raises_exception(self): + x = autoscale.Autoscaler(self.pool, 10, 3) + x.scale_up(3) + x._last_action = time() - 10000 + x.pool.shrink_raises_exception = True + x.scale_down(1) + + @patch('celery.worker.autoscale.debug') + def test_shrink_raises_ValueError(self, debug): + x = autoscale.Autoscaler(self.pool, 10, 3) + x.scale_up(3) + x._last_action = time() - 10000 + x.pool.shrink_raises_ValueError = True + x.scale_down(1) + self.assertTrue(debug.call_count) + + def test_update_and_force(self): + x = autoscale.Autoscaler(self.pool, 10, 3) + self.assertEqual(x.processes, 3) + x.force_scale_up(5) + self.assertEqual(x.processes, 8) + x.update(5, None) + self.assertEqual(x.processes, 5) + x.force_scale_down(3) + self.assertEqual(x.processes, 2) + x.update(3, None) + self.assertEqual(x.processes, 3) + x.force_scale_down(1000) + self.assertEqual(x.min_concurrency, 0) + self.assertEqual(x.processes, 0) + x.force_scale_up(1000) + x.min_concurrency = 1 + x.force_scale_down(1) + + x.update(max=300, min=10) + x.update(max=300, min=2) + x.update(max=None, min=None) + + def test_info(self): + x = autoscale.Autoscaler(self.pool, 10, 3) + info = x.info() + self.assertEqual(info['max'], 10) + self.assertEqual(info['min'], 3) + self.assertEqual(info['current'], 3) + + @patch('os._exit') + def test_thread_crash(self, _exit): + + class _Autoscaler(autoscale.Autoscaler): + + def body(self): + self._is_shutdown.set() + raise OSError('foo') + x = _Autoscaler(self.pool, 10, 3) + + stderr = Mock() + p, sys.stderr = sys.stderr, stderr + try: + x.run() + finally: + sys.stderr = p + _exit.assert_called_with(1) + self.assertTrue(stderr.write.call_count) diff --git a/awx/lib/site-packages/celery/tests/worker/test_bootsteps.py b/awx/lib/site-packages/celery/tests/worker/test_bootsteps.py new file mode 100644 index 0000000000..6b50204e83 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/worker/test_bootsteps.py @@ -0,0 +1,225 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from mock import Mock + +from celery.worker import bootsteps + +from celery.tests.utils import AppCase, Case + + +class test_Component(Case): + + class Def(bootsteps.Component): + name = 'test_Component.Def' + + def test_components_must_be_named(self): + with self.assertRaises(NotImplementedError): + + class X(bootsteps.Component): + pass + + class Y(bootsteps.Component): + abstract = True + + def test_namespace_name(self, ns='test_namespace_name'): + + class X(bootsteps.Component): + namespace = ns + name = 'X' + self.assertEqual(X.namespace, ns) + self.assertEqual(X.name, 'X') + + class Y(bootsteps.Component): + name = '%s.Y' % (ns, ) + self.assertEqual(Y.namespace, ns) + self.assertEqual(Y.name, 'Y') + + def test_init(self): + self.assertTrue(self.Def(self)) + + def test_create(self): + self.Def(self).create(self) + + def test_include_if(self): + x = self.Def(self) + x.enabled = True + self.assertTrue(x.include_if(self)) + + x.enabled = False + self.assertFalse(x.include_if(self)) + + def test_instantiate(self): + self.assertIsInstance(self.Def(self).instantiate(self.Def, self), + self.Def) + + def test_include_when_enabled(self): + x = self.Def(self) + x.create = Mock() + x.create.return_value = 'George' + self.assertTrue(x.include(self)) + + self.assertEqual(x.obj, 'George') + x.create.assert_called_with(self) + + def test_include_when_disabled(self): + x = self.Def(self) + x.enabled = False + x.create = Mock() + + self.assertFalse(x.include(self)) + self.assertFalse(x.create.call_count) + + +class test_StartStopComponent(Case): + + class Def(bootsteps.StartStopComponent): + name = 'test_StartStopComponent.Def' + + def setUp(self): + self.components = [] + + def test_start__stop(self): + x = self.Def(self) + x.create = Mock() + + # include creates the underlying object and sets + # its x.obj attribute to it, as well as appending + # it to the parent.components list. + x.include(self) + self.assertTrue(self.components) + self.assertIs(self.components[0], x.obj) + + x.start() + x.obj.start.assert_called_with() + + x.stop() + x.obj.stop.assert_called_with() + + def test_include_when_disabled(self): + x = self.Def(self) + x.enabled = False + x.include(self) + self.assertFalse(self.components) + + def test_terminate_when_terminable(self): + x = self.Def(self) + x.terminable = True + x.create = Mock() + + x.include(self) + x.terminate() + x.obj.terminate.assert_called_with() + self.assertFalse(x.obj.stop.call_count) + + def test_terminate_calls_stop_when_not_terminable(self): + x = self.Def(self) + x.terminable = False + x.create = Mock() + + x.include(self) + x.terminate() + x.obj.stop.assert_called_with() + self.assertFalse(x.obj.terminate.call_count) + + +class test_Namespace(AppCase): + + class NS(bootsteps.Namespace): + name = 'test_Namespace' + + class ImportingNS(bootsteps.Namespace): + + def __init__(self, *args, **kwargs): + bootsteps.Namespace.__init__(self, *args, **kwargs) + self.imported = [] + + def modules(self): + return ['A', 'B', 'C'] + + def import_module(self, module): + self.imported.append(module) + + def test_components_added_to_unclaimed(self): + + class tnA(bootsteps.Component): + name = 'test_Namespace.A' + + class tnB(bootsteps.Component): + name = 'test_Namespace.B' + + class xxA(bootsteps.Component): + name = 'xx.A' + + self.assertIn('A', self.NS._unclaimed['test_Namespace']) + self.assertIn('B', self.NS._unclaimed['test_Namespace']) + self.assertIn('A', self.NS._unclaimed['xx']) + self.assertNotIn('B', self.NS._unclaimed['xx']) + + def test_init(self): + ns = self.NS(app=self.app) + self.assertIs(ns.app, self.app) + self.assertEqual(ns.name, 'test_Namespace') + self.assertFalse(ns.services) + + def test_interface_modules(self): + self.NS(app=self.app).modules() + + def test_load_modules(self): + x = self.ImportingNS(app=self.app) + x.load_modules() + self.assertListEqual(x.imported, ['A', 'B', 'C']) + + def test_apply(self): + + class MyNS(bootsteps.Namespace): + name = 'test_apply' + + def modules(self): + return ['A', 'B'] + + class A(bootsteps.Component): + name = 'test_apply.A' + requires = ['C'] + + class B(bootsteps.Component): + name = 'test_apply.B' + + class C(bootsteps.Component): + name = 'test_apply.C' + requires = ['B'] + + class D(bootsteps.Component): + name = 'test_apply.D' + last = True + + x = MyNS(app=self.app) + x.import_module = Mock() + x.apply(self) + + self.assertItemsEqual(x.components.values(), [A, B, C, D]) + self.assertTrue(x.import_module.call_count) + + for boot_step in x.boot_steps: + self.assertEqual(boot_step.namespace, x) + + self.assertIsInstance(x.boot_steps[0], B) + self.assertIsInstance(x.boot_steps[1], C) + self.assertIsInstance(x.boot_steps[2], A) + self.assertIsInstance(x.boot_steps[3], D) + + self.assertIs(x['A'], A) + + def test_import_module(self): + x = self.NS(app=self.app) + import os + self.assertIs(x.import_module('os'), os) + + def test_find_last_but_no_components(self): + + class MyNS(bootsteps.Namespace): + name = 'qwejwioqjewoqiej' + + x = MyNS(app=self.app) + x.apply(self) + self.assertIsNone(x._find_last()) diff --git a/awx/lib/site-packages/celery/tests/worker/test_control.py b/awx/lib/site-packages/celery/tests/worker/test_control.py new file mode 100644 index 0000000000..b743586bba --- /dev/null +++ b/awx/lib/site-packages/celery/tests/worker/test_control.py @@ -0,0 +1,494 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import sys +import socket + +from datetime import datetime, timedelta + +from kombu import pidbox +from mock import Mock, patch + +from celery import current_app +from celery.datastructures import AttributeDict +from celery.task import task +from celery.utils import uuid +from celery.utils.timer2 import Timer +from celery.worker import WorkController as _WC +from celery.worker import consumer +from celery.worker import control +from celery.worker import state +from celery.worker.buckets import FastQueue +from celery.worker.job import TaskRequest +from celery.worker.state import revoked +from celery.worker.control import Panel +from celery.tests.utils import Case + +hostname = socket.gethostname() + + +@task(rate_limit=200) # for extra info in dump_tasks +def mytask(): + pass + + +class WorkController(object): + autoscaler = None + + +class Consumer(consumer.Consumer): + + def __init__(self): + self.ready_queue = FastQueue() + self.timer = Timer() + self.app = current_app + self.event_dispatcher = Mock() + self.controller = WorkController() + self.task_consumer = Mock() + + from celery.concurrency.base import BasePool + self.pool = BasePool(10) + + @property + def info(self): + return {'xyz': 'XYZ'} + + +class test_ControlPanel(Case): + + def setUp(self): + self.app = current_app + self.panel = self.create_panel(consumer=Consumer()) + + def create_state(self, **kwargs): + kwargs.setdefault('app', self.app) + return AttributeDict(kwargs) + + def create_panel(self, **kwargs): + return self.app.control.mailbox.Node(hostname=hostname, + state=self.create_state(**kwargs), + handlers=Panel.data) + + def test_enable_events(self): + consumer = Consumer() + panel = self.create_panel(consumer=consumer) + consumer.event_dispatcher.enabled = False + panel.handle('enable_events') + self.assertTrue(consumer.event_dispatcher.enable.call_count) + self.assertIn( + ('worker-online', ), + consumer.event_dispatcher.send.call_args, + ) + consumer.event_dispatcher.enabled = True + self.assertIn('already enabled', panel.handle('enable_events')['ok']) + + def test_disable_events(self): + consumer = Consumer() + panel = self.create_panel(consumer=consumer) + consumer.event_dispatcher.enabled = True + panel.handle('disable_events') + self.assertTrue(consumer.event_dispatcher.disable.call_count) + self.assertIn(('worker-offline', ), + consumer.event_dispatcher.send.call_args) + consumer.event_dispatcher.enabled = False + self.assertIn('already disabled', panel.handle('disable_events')['ok']) + + def test_heartbeat(self): + consumer = Consumer() + panel = self.create_panel(consumer=consumer) + consumer.event_dispatcher.enabled = True + panel.handle('heartbeat') + self.assertIn(('worker-heartbeat', ), + consumer.event_dispatcher.send.call_args) + + def test_time_limit(self): + panel = self.create_panel(consumer=Mock()) + th, ts = mytask.time_limit, mytask.soft_time_limit + try: + r = panel.handle('time_limit', arguments=dict( + task_name=mytask.name, hard=30, soft=10)) + self.assertEqual((mytask.time_limit, mytask.soft_time_limit), + (30, 10)) + self.assertIn('ok', r) + r = panel.handle('time_limit', arguments=dict( + task_name=mytask.name, hard=None, soft=None)) + self.assertEqual((mytask.time_limit, mytask.soft_time_limit), + (None, None)) + self.assertIn('ok', r) + + r = panel.handle('time_limit', arguments=dict( + task_name='248e8afya9s8dh921eh928', hard=30)) + self.assertIn('error', r) + finally: + mytask.time_limit, mytask.soft_time_limit = th, ts + + def test_active_queues(self): + import kombu + + x = kombu.Consumer(current_app.connection(), + [kombu.Queue('foo', kombu.Exchange('foo'), 'foo'), + kombu.Queue('bar', kombu.Exchange('bar'), 'bar')], + auto_declare=False) + consumer = Mock() + consumer.task_consumer = x + panel = self.create_panel(consumer=consumer) + r = panel.handle('active_queues') + self.assertListEqual(list(sorted(q['name'] for q in r)), + ['bar', 'foo']) + + def test_dump_tasks(self): + info = '\n'.join(self.panel.handle('dump_tasks')) + self.assertIn('mytask', info) + self.assertIn('rate_limit=200', info) + + def test_stats(self): + prev_count, state.total_count = state.total_count, 100 + try: + self.assertDictContainsSubset({'total': 100, + 'consumer': {'xyz': 'XYZ'}}, + self.panel.handle('stats')) + self.panel.state.consumer = Mock() + self.panel.handle('stats') + self.assertTrue( + self.panel.state.consumer.controller.autoscaler.info.called) + finally: + state.total_count = prev_count + + def test_report(self): + self.panel.handle('report') + + def test_active(self): + r = TaskRequest(mytask.name, 'do re mi', (), {}) + state.active_requests.add(r) + try: + self.assertTrue(self.panel.handle('dump_active')) + finally: + state.active_requests.discard(r) + + def test_pool_grow(self): + + class MockPool(object): + + def __init__(self, size=1): + self.size = size + + def grow(self, n=1): + self.size += n + + def shrink(self, n=1): + self.size -= n + + consumer = Consumer() + consumer.pool = MockPool() + panel = self.create_panel(consumer=consumer) + + panel.handle('pool_grow') + self.assertEqual(consumer.pool.size, 2) + panel.handle('pool_shrink') + self.assertEqual(consumer.pool.size, 1) + + panel.state.consumer = Mock() + panel.state.consumer.controller = Mock() + sc = panel.state.consumer.controller.autoscaler = Mock() + panel.handle('pool_grow') + self.assertTrue(sc.force_scale_up.called) + panel.handle('pool_shrink') + self.assertTrue(sc.force_scale_down.called) + + def test_add__cancel_consumer(self): + + class MockConsumer(object): + queues = [] + cancelled = [] + consuming = False + + def add_queue(self, queue): + self.queues.append(queue.name) + + def consume(self): + self.consuming = True + + def cancel_by_queue(self, queue): + self.cancelled.append(queue) + + def consuming_from(self, queue): + return queue in self.queues + + consumer = Consumer() + consumer.task_consumer = MockConsumer() + panel = self.create_panel(consumer=consumer) + + panel.handle('add_consumer', {'queue': 'MyQueue'}) + self.assertIn('MyQueue', consumer.task_consumer.queues) + self.assertTrue(consumer.task_consumer.consuming) + panel.handle('add_consumer', {'queue': 'MyQueue'}) + panel.handle('cancel_consumer', {'queue': 'MyQueue'}) + self.assertIn('MyQueue', consumer.task_consumer.cancelled) + + def test_revoked(self): + state.revoked.clear() + state.revoked.add('a1') + state.revoked.add('a2') + + try: + self.assertEqual(sorted(self.panel.handle('dump_revoked')), + ['a1', 'a2']) + finally: + state.revoked.clear() + + def test_dump_schedule(self): + consumer = Consumer() + panel = self.create_panel(consumer=consumer) + self.assertFalse(panel.handle('dump_schedule')) + r = TaskRequest(mytask.name, 'CAFEBABE', (), {}) + consumer.timer.schedule.enter( + consumer.timer.Entry(lambda x: x, (r, )), + datetime.now() + timedelta(seconds=10)) + self.assertTrue(panel.handle('dump_schedule')) + + def test_dump_reserved(self): + from celery.worker import state + consumer = Consumer() + state.reserved_requests.add( + TaskRequest(mytask.name, uuid(), args=(2, 2), kwargs={}), + ) + try: + panel = self.create_panel(consumer=consumer) + response = panel.handle('dump_reserved', {'safe': True}) + self.assertDictContainsSubset( + {'name': mytask.name, + 'args': (2, 2), + 'kwargs': {}, + 'hostname': socket.gethostname()}, + response[0], + ) + state.reserved_requests.clear() + self.assertFalse(panel.handle('dump_reserved')) + finally: + state.reserved_requests.clear() + + def test_rate_limit_when_disabled(self): + app = current_app + app.conf.CELERY_DISABLE_RATE_LIMITS = True + try: + e = self.panel.handle( + 'rate_limit', + arguments={'task_name': mytask.name, + 'rate_limit': '100/m'}) + self.assertIn('rate limits disabled', e.get('error')) + finally: + app.conf.CELERY_DISABLE_RATE_LIMITS = False + + def test_rate_limit_invalid_rate_limit_string(self): + e = self.panel.handle('rate_limit', arguments=dict( + task_name='tasks.add', rate_limit='x1240301#%!')) + self.assertIn('Invalid rate limit string', e.get('error')) + + def test_rate_limit(self): + + class Consumer(object): + + class ReadyQueue(object): + fresh = False + + def refresh(self): + self.fresh = True + + def __init__(self): + self.ready_queue = self.ReadyQueue() + + consumer = Consumer() + panel = self.create_panel(app=current_app, consumer=consumer) + + task = current_app.tasks[mytask.name] + old_rate_limit = task.rate_limit + try: + panel.handle('rate_limit', arguments=dict(task_name=task.name, + rate_limit='100/m')) + self.assertEqual(task.rate_limit, '100/m') + self.assertTrue(consumer.ready_queue.fresh) + consumer.ready_queue.fresh = False + panel.handle('rate_limit', arguments=dict(task_name=task.name, + rate_limit=0)) + self.assertEqual(task.rate_limit, 0) + self.assertTrue(consumer.ready_queue.fresh) + finally: + task.rate_limit = old_rate_limit + + def test_rate_limit_nonexistant_task(self): + self.panel.handle('rate_limit', arguments={ + 'task_name': 'xxxx.does.not.exist', + 'rate_limit': '1000/s'}) + + def test_unexposed_command(self): + with self.assertRaises(KeyError): + self.panel.handle('foo', arguments={}) + + def test_revoke_with_name(self): + tid = uuid() + m = {'method': 'revoke', + 'destination': hostname, + 'arguments': {'task_id': tid, + 'task_name': mytask.name}} + self.panel.handle_message(m, None) + self.assertIn(tid, revoked) + + def test_revoke_with_name_not_in_registry(self): + tid = uuid() + m = {'method': 'revoke', + 'destination': hostname, + 'arguments': {'task_id': tid, + 'task_name': 'xxxxxxxxx33333333388888'}} + self.panel.handle_message(m, None) + self.assertIn(tid, revoked) + + def test_revoke(self): + tid = uuid() + m = {'method': 'revoke', + 'destination': hostname, + 'arguments': {'task_id': tid}} + self.panel.handle_message(m, None) + self.assertIn(tid, revoked) + + m = {'method': 'revoke', + 'destination': 'does.not.exist', + 'arguments': {'task_id': tid + 'xxx'}} + self.panel.handle_message(m, None) + self.assertNotIn(tid + 'xxx', revoked) + + def test_revoke_terminate(self): + request = Mock() + request.id = tid = uuid() + state.reserved_requests.add(request) + try: + r = control.revoke(Mock(), tid, terminate=True) + self.assertIn(tid, revoked) + self.assertTrue(request.terminate.call_count) + self.assertIn('terminating', r['ok']) + # unknown task id only revokes + r = control.revoke(Mock(), uuid(), terminate=True) + self.assertIn('not found', r['ok']) + finally: + state.reserved_requests.discard(request) + + def test_autoscale(self): + self.panel.state.consumer = Mock() + self.panel.state.consumer.controller = Mock() + sc = self.panel.state.consumer.controller.autoscaler = Mock() + sc.update.return_value = 10, 2 + m = {'method': 'autoscale', + 'destination': hostname, + 'arguments': {'max': '10', 'min': '2'}} + r = self.panel.handle_message(m, None) + self.assertIn('ok', r) + + self.panel.state.consumer.controller.autoscaler = None + r = self.panel.handle_message(m, None) + self.assertIn('error', r) + + def test_ping(self): + m = {'method': 'ping', + 'destination': hostname} + r = self.panel.handle_message(m, None) + self.assertEqual(r, 'pong') + + def test_shutdown(self): + m = {'method': 'shutdown', + 'destination': hostname} + with self.assertRaises(SystemExit): + self.panel.handle_message(m, None) + + def test_panel_reply(self): + + replies = [] + + class _Node(pidbox.Node): + + def reply(self, data, exchange, routing_key, **kwargs): + replies.append(data) + + panel = _Node(hostname=hostname, + state=self.create_state(consumer=Consumer()), + handlers=Panel.data, + mailbox=self.app.control.mailbox) + r = panel.dispatch('ping', reply_to={'exchange': 'x', + 'routing_key': 'x'}) + self.assertEqual(r, 'pong') + self.assertDictEqual(replies[0], {panel.hostname: 'pong'}) + + def test_pool_restart(self): + consumer = Consumer() + consumer.controller = _WC(app=current_app) + consumer.controller.pool.restart = Mock() + panel = self.create_panel(consumer=consumer) + panel.app = self.app + _import = panel.app.loader.import_from_cwd = Mock() + _reload = Mock() + + with self.assertRaises(ValueError): + panel.handle('pool_restart', {'reloader': _reload}) + + current_app.conf.CELERYD_POOL_RESTARTS = True + try: + panel.handle('pool_restart', {'reloader': _reload}) + self.assertTrue(consumer.controller.pool.restart.called) + self.assertFalse(_reload.called) + self.assertFalse(_import.called) + finally: + current_app.conf.CELERYD_POOL_RESTARTS = False + + def test_pool_restart_import_modules(self): + consumer = Consumer() + consumer.controller = _WC(app=current_app) + consumer.controller.pool.restart = Mock() + panel = self.create_panel(consumer=consumer) + panel.app = self.app + _import = consumer.controller.app.loader.import_from_cwd = Mock() + _reload = Mock() + + current_app.conf.CELERYD_POOL_RESTARTS = True + try: + panel.handle('pool_restart', {'modules': ['foo', 'bar'], + 'reloader': _reload}) + self.assertTrue(consumer.controller.pool.restart.called) + self.assertFalse(_reload.called) + self.assertEqual( + [(('foo',), {}), (('bar',), {})], + _import.call_args_list, + ) + finally: + current_app.conf.CELERYD_POOL_RESTARTS = False + + def test_pool_restart_relaod_modules(self): + consumer = Consumer() + consumer.controller = _WC(app=current_app) + consumer.controller.pool.restart = Mock() + panel = self.create_panel(consumer=consumer) + panel.app = self.app + _import = panel.app.loader.import_from_cwd = Mock() + _reload = Mock() + + current_app.conf.CELERYD_POOL_RESTARTS = True + try: + with patch.dict(sys.modules, {'foo': None}): + panel.handle('pool_restart', {'modules': ['foo'], + 'reload': False, + 'reloader': _reload}) + + self.assertTrue(consumer.controller.pool.restart.called) + self.assertFalse(_reload.called) + self.assertFalse(_import.called) + + _import.reset_mock() + _reload.reset_mock() + consumer.controller.pool.restart.reset_mock() + + panel.handle('pool_restart', {'modules': ['foo'], + 'reload': True, + 'reloader': _reload}) + + self.assertTrue(consumer.controller.pool.restart.called) + self.assertTrue(_reload.called) + self.assertFalse(_import.called) + finally: + current_app.conf.CELERYD_POOL_RESTARTS = False diff --git a/awx/lib/site-packages/celery/tests/worker/test_heartbeat.py b/awx/lib/site-packages/celery/tests/worker/test_heartbeat.py new file mode 100644 index 0000000000..1446ce65ac --- /dev/null +++ b/awx/lib/site-packages/celery/tests/worker/test_heartbeat.py @@ -0,0 +1,72 @@ +from __future__ import absolute_import + +from celery.worker.heartbeat import Heart +from celery.tests.utils import Case, sleepdeprived + + +class MockDispatcher(object): + heart = None + next_iter = 0 + + def __init__(self): + self.sent = [] + self.on_enabled = set() + self.on_disabled = set() + self.enabled = True + + def send(self, msg, **_fields): + self.sent.append(msg) + if self.heart: + if self.next_iter > 10: + self.heart._shutdown.set() + self.next_iter += 1 + + +class MockDispatcherRaising(object): + + def send(self, msg): + if msg == 'worker-offline': + raise Exception('foo') + + +class MockTimer(object): + + def apply_interval(self, msecs, fun, args=(), kwargs={}): + + class entry(tuple): + cancelled = False + + def cancel(self): + self.cancelled = True + + return entry((msecs, fun, args, kwargs)) + + def cancel(self, entry): + entry.cancel() + + +class test_Heart(Case): + + def test_stop(self): + timer = MockTimer() + eventer = MockDispatcher() + h = Heart(timer, eventer, interval=1) + h.start() + self.assertTrue(h.tref) + h.stop() + self.assertIsNone(h.tref) + h.stop() + + @sleepdeprived + def test_run_manages_cycle(self): + eventer = MockDispatcher() + heart = Heart(MockTimer(), eventer, interval=0.1) + eventer.heart = heart + heart.start() + msecs, fun, args, kwargs = tref = heart.tref + self.assertEqual(msecs, 0.1 * 1000) + self.assertEqual(tref.fun, eventer.send) + self.assertTrue(tref.args) + self.assertTrue(tref.kwargs) + heart.stop() + self.assertTrue(tref.cancelled) diff --git a/awx/lib/site-packages/celery/tests/worker/test_hub.py b/awx/lib/site-packages/celery/tests/worker/test_hub.py new file mode 100644 index 0000000000..e66ecb7a09 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/worker/test_hub.py @@ -0,0 +1,269 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from celery.worker.hub import ( + DummyLock, + BoundedSemaphore, + Hub, +) + +from mock import Mock, call, patch + +from celery.tests.utils import Case + + +class File(object): + + def __init__(self, fd): + self.fd = fd + + def fileno(self): + return self.fd + + def __eq__(self, other): + if isinstance(other, File): + return self.fd == other.fd + return NotImplemented + + +class test_DummyLock(Case): + + def test_context(self): + mutex = DummyLock() + with mutex: + pass + + +class test_BoundedSemaphore(Case): + + def test_acquire_release(self): + x = BoundedSemaphore(2) + + c1 = Mock() + x.acquire(c1, 1) + self.assertEqual(x.value, 1) + c1.assert_called_with(1) + + c2 = Mock() + x.acquire(c2, 2) + self.assertEqual(x.value, 0) + c2.assert_called_with(2) + + c3 = Mock() + x.acquire(c3, 3) + self.assertEqual(x.value, 0) + self.assertFalse(c3.called) + + x.release() + self.assertEqual(x.value, 1) + c3.assert_called_with(3) + + def test_bounded(self): + x = BoundedSemaphore(2) + for i in xrange(100): + x.release() + self.assertEqual(x.value, 2) + + def test_grow_shrink(self): + x = BoundedSemaphore(1) + self.assertEqual(x.initial_value, 1) + cb1 = Mock() + x.acquire(cb1, 1) + cb1.assert_called_with(1) + self.assertEqual(x.value, 0) + + cb2 = Mock() + x.acquire(cb2, 2) + self.assertFalse(cb2.called) + self.assertEqual(x.value, 0) + + cb3 = Mock() + x.acquire(cb3, 3) + self.assertFalse(cb3.called) + + x.grow(2) + cb2.assert_called_with(2) + cb3.assert_called_with(3) + self.assertEqual(x.value, 3) + self.assertEqual(x.initial_value, 3) + + self.assertFalse(x._waiting) + x.grow(3) + for i in xrange(x.initial_value): + self.assertTrue(x.acquire(Mock())) + self.assertFalse(x.acquire(Mock())) + x.clear() + + x.shrink(3) + for i in xrange(x.initial_value): + self.assertTrue(x.acquire(Mock())) + self.assertFalse(x.acquire(Mock())) + self.assertEqual(x.value, 0) + + for i in xrange(100): + x.release() + self.assertEqual(x.value, x.initial_value) + + def test_clear(self): + x = BoundedSemaphore(10) + for i in xrange(11): + x.acquire(Mock()) + self.assertTrue(x._waiting) + self.assertEqual(x.value, 0) + + x.clear() + self.assertFalse(x._waiting) + self.assertEqual(x.value, x.initial_value) + + +class test_Hub(Case): + + @patch('kombu.utils.eventio.poll') + def test_start_stop(self, poll): + hub = Hub() + hub.start() + poll.assert_called_with() + + hub.stop() + hub.poller.close.assert_called_with() + + def test_init(self): + hub = Hub() + cb1 = Mock() + cb2 = Mock() + hub.on_init.extend([cb1, cb2]) + + hub.init() + cb1.assert_called_with(hub) + cb2.assert_called_with(hub) + + def test_fire_timers(self): + hub = Hub() + hub.timer = Mock() + hub.timer._queue = [] + self.assertEqual(hub.fire_timers(min_delay=42.324, + max_delay=32.321), 32.321) + + hub.timer._queue = [1] + hub.scheduler = Mock() + hub.scheduler.next.return_value = 3.743, None + self.assertEqual(hub.fire_timers(), 3.743) + + e1, e2, e3 = Mock(), Mock(), Mock() + entries = [e1, e2, e3] + + reset = lambda: [m.reset() for m in [e1, e2, e3]] + + def se(): + if entries: + return None, entries.pop() + return 3.982, None + hub.scheduler.next = Mock() + hub.scheduler.next.side_effect = se + + self.assertEqual(hub.fire_timers(max_timers=10), 3.982) + for E in [e3, e2, e1]: + E.assert_called_with() + reset() + + entries[:] = [Mock() for _ in xrange(11)] + keep = list(entries) + self.assertEqual(hub.fire_timers(max_timers=10, min_delay=1.13), 1.13) + for E in reversed(keep[1:]): + E.assert_called_with() + reset() + self.assertEqual(hub.fire_timers(max_timers=10), 3.982) + keep[0].assert_called_with() + + def test_update_readers(self): + hub = Hub() + P = hub.poller = Mock() + + read_A = Mock() + read_B = Mock() + hub.update_readers({10: read_A, File(11): read_B}) + + P.register.assert_has_calls([ + call(10, hub.READ | hub.ERR), + call(File(11), hub.READ | hub.ERR), + ], any_order=True) + + self.assertIs(hub.readers[10], read_A) + self.assertIs(hub.readers[11], read_B) + + hub.remove(10) + self.assertNotIn(10, hub.readers) + hub.remove(File(11)) + self.assertNotIn(11, hub.readers) + P.unregister.assert_has_calls([ + call(10), call(File(11)), + ]) + + def test_can_remove_unknown_fds(self): + hub = Hub() + hub.poller = Mock() + hub.remove(30) + hub.remove(File(301)) + + def test_remove__unregister_raises(self): + hub = Hub() + hub.poller = Mock() + hub.poller.unregister.side_effect = OSError() + + hub.remove(313) + + def test_update_writers(self): + hub = Hub() + P = hub.poller = Mock() + + write_A = Mock() + write_B = Mock() + hub.update_writers({20: write_A, File(21): write_B}) + + P.register.assert_has_calls([ + call(20, hub.WRITE), + call(File(21), hub.WRITE), + ], any_order=True) + + self.assertIs(hub.writers[20], write_A) + self.assertIs(hub.writers[21], write_B) + + hub.remove(20) + self.assertNotIn(20, hub.writers) + hub.remove(File(21)) + self.assertNotIn(21, hub.writers) + P.unregister.assert_has_calls([ + call(20), call(File(21)), + ]) + + def test_enter__exit(self): + hub = Hub() + P = hub.poller = Mock() + hub.init = Mock() + + on_close = Mock() + hub.on_close.append(on_close) + + with hub: + hub.init.assert_called_with() + + read_A = Mock() + read_B = Mock() + hub.update_readers({10: read_A, File(11): read_B}) + write_A = Mock() + write_B = Mock() + hub.update_writers({20: write_A, File(21): write_B}) + self.assertTrue(hub.readers) + self.assertTrue(hub.writers) + self.assertFalse(hub.readers) + self.assertFalse(hub.writers) + + P.unregister.assert_has_calls([ + call(10), call(11), call(20), call(21), + ], any_order=True) + + on_close.assert_called_with(hub) + + def test_scheduler_property(self): + hub = Hub(timer=[1, 2, 3]) + self.assertEqual(list(hub.scheduler), [1, 2, 3]) diff --git a/awx/lib/site-packages/celery/tests/worker/test_mediator.py b/awx/lib/site-packages/celery/tests/worker/test_mediator.py new file mode 100644 index 0000000000..66511f6162 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/worker/test_mediator.py @@ -0,0 +1,113 @@ +from __future__ import absolute_import + +import sys + +from Queue import Queue + +from mock import Mock, patch + +from celery.worker.mediator import Mediator +from celery.worker.state import revoked as revoked_tasks +from celery.tests.utils import Case + + +class MockTask(object): + hostname = 'harness.com' + id = 1234 + name = 'mocktask' + + def __init__(self, value, **kwargs): + self.value = value + + on_ack = Mock() + + def revoked(self): + if self.id in revoked_tasks: + self.on_ack() + return True + return False + + +class test_Mediator(Case): + + def test_mediator_start__stop(self): + ready_queue = Queue() + m = Mediator(ready_queue, lambda t: t) + m.start() + self.assertFalse(m._is_shutdown.isSet()) + self.assertFalse(m._is_stopped.isSet()) + m.stop() + m.join() + self.assertTrue(m._is_shutdown.isSet()) + self.assertTrue(m._is_stopped.isSet()) + + def test_mediator_body(self): + ready_queue = Queue() + got = {} + + def mycallback(value): + got['value'] = value.value + + m = Mediator(ready_queue, mycallback) + ready_queue.put(MockTask('George Costanza')) + + m.body() + + self.assertEqual(got['value'], 'George Costanza') + + ready_queue.put(MockTask('Jerry Seinfeld')) + m._does_debug = False + m.body() + self.assertEqual(got['value'], 'Jerry Seinfeld') + + @patch('os._exit') + def test_mediator_crash(self, _exit): + ms = [None] + + class _Mediator(Mediator): + + def body(self): + try: + raise KeyError('foo') + finally: + ms[0]._is_shutdown.set() + + ready_queue = Queue() + ms[0] = m = _Mediator(ready_queue, None) + ready_queue.put(MockTask('George Constanza')) + + stderr = Mock() + p, sys.stderr = sys.stderr, stderr + try: + m.run() + finally: + sys.stderr = p + self.assertTrue(_exit.call_count) + self.assertTrue(stderr.write.call_count) + + def test_mediator_body_exception(self): + ready_queue = Queue() + + def mycallback(value): + raise KeyError('foo') + + m = Mediator(ready_queue, mycallback) + ready_queue.put(MockTask('Elaine M. Benes')) + + m.body() + + def test_run(self): + ready_queue = Queue() + + condition = [None] + + def mycallback(value): + condition[0].set() + + m = Mediator(ready_queue, mycallback) + condition[0] = m._is_shutdown + ready_queue.put(MockTask('Elaine M. Benes')) + + m.run() + self.assertTrue(m._is_shutdown.isSet()) + self.assertTrue(m._is_stopped.isSet()) diff --git a/awx/lib/site-packages/celery/tests/worker/test_request.py b/awx/lib/site-packages/celery/tests/worker/test_request.py new file mode 100644 index 0000000000..b83768bf3d --- /dev/null +++ b/awx/lib/site-packages/celery/tests/worker/test_request.py @@ -0,0 +1,851 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import with_statement + +import anyjson +import os +import signal +import sys +import time + +from datetime import datetime, timedelta + +from kombu.transport.base import Message +from kombu.utils.encoding import from_utf8, default_encode +from mock import Mock, patch +from nose import SkipTest + +from celery import current_app +from celery import states +from celery.app import app_or_default +from celery.concurrency.base import BasePool +from celery.datastructures import ExceptionInfo +from celery.exceptions import ( + RetryTaskError, + WorkerLostError, + InvalidTaskError, + TaskRevokedError, +) +from celery.task.trace import ( + trace_task, + _trace_task_ret, + TraceInfo, + mro_lookup, + build_tracer, + setup_worker_optimizations, + reset_worker_optimizations, +) +from celery.result import AsyncResult +from celery.signals import task_revoked +from celery.task import task as task_dec +from celery.task.base import Task +from celery.utils import uuid +from celery.worker import job as module +from celery.worker.job import Request, TaskRequest +from celery.worker.state import revoked + +from celery.tests.utils import AppCase, Case, assert_signal_called + +scratch = {'ACK': False} +some_kwargs_scratchpad = {} + + +class test_mro_lookup(Case): + + def test_order(self): + + class A(object): + pass + + class B(A): + pass + + class C(B): + pass + + class D(C): + + @classmethod + def mro(cls): + return () + + A.x = 10 + self.assertEqual(mro_lookup(C, 'x'), A) + self.assertIsNone(mro_lookup(C, 'x', stop=(A, ))) + B.x = 10 + self.assertEqual(mro_lookup(C, 'x'), B) + C.x = 10 + self.assertEqual(mro_lookup(C, 'x'), C) + self.assertIsNone(mro_lookup(D, 'x')) + + +def jail(task_id, name, args, kwargs): + request = {'id': task_id} + task = current_app.tasks[name] + task.__trace__ = None # rebuild + return trace_task( + task, task_id, args, kwargs, request=request, eager=False, + ) + + +def on_ack(*args, **kwargs): + scratch['ACK'] = True + + +@task_dec(accept_magic_kwargs=False) +def mytask(i, **kwargs): + return i ** i + + +@task_dec # traverses coverage for decorator without parens +def mytask_no_kwargs(i): + return i ** i + + +class MyTaskIgnoreResult(Task): + ignore_result = True + + def run(self, i): + return i ** i + + +@task_dec(accept_magic_kwargs=True) +def mytask_some_kwargs(i, task_id): + some_kwargs_scratchpad['task_id'] = task_id + return i ** i + + +@task_dec(accept_magic_kwargs=False) +def mytask_raising(i): + raise KeyError(i) + + +class test_default_encode(Case): + + def setUp(self): + if sys.version_info >= (3, 0): + raise SkipTest('py3k: not relevant') + + def test_jython(self): + prev, sys.platform = sys.platform, 'java 1.6.1' + try: + self.assertEqual(default_encode('foo'), 'foo') + finally: + sys.platform = prev + + def test_cython(self): + prev, sys.platform = sys.platform, 'darwin' + gfe, sys.getfilesystemencoding = ( + sys.getfilesystemencoding, + lambda: 'utf-8', + ) + try: + self.assertEqual(default_encode('foo'), 'foo') + finally: + sys.platform = prev + sys.getfilesystemencoding = gfe + + +class test_RetryTaskError(Case): + + def test_retry_task_error(self): + try: + raise Exception('foo') + except Exception, exc: + ret = RetryTaskError('Retrying task', exc) + self.assertEqual(ret.exc, exc) + + +class test_trace_task(Case): + + @patch('celery.task.trace._logger') + def test_process_cleanup_fails(self, _logger): + backend = mytask.backend + mytask.backend = Mock() + mytask.backend.process_cleanup = Mock(side_effect=KeyError()) + try: + tid = uuid() + ret = jail(tid, mytask.name, [2], {}) + self.assertEqual(ret, 4) + mytask.backend.store_result.assert_called_with(tid, 4, + states.SUCCESS) + self.assertIn('Process cleanup failed', + _logger.error.call_args[0][0]) + finally: + mytask.backend = backend + + def test_process_cleanup_BaseException(self): + backend = mytask.backend + mytask.backend = Mock() + mytask.backend.process_cleanup = Mock(side_effect=SystemExit()) + try: + with self.assertRaises(SystemExit): + jail(uuid(), mytask.name, [2], {}) + finally: + mytask.backend = backend + + def test_execute_jail_success(self): + ret = jail(uuid(), mytask.name, [2], {}) + self.assertEqual(ret, 4) + + def test_marked_as_started(self): + + class Backend(mytask.backend.__class__): + _started = [] + + def store_result(self, tid, meta, state): + if state == states.STARTED: + self._started.append(tid) + + prev, mytask.backend = mytask.backend, Backend() + mytask.track_started = True + + try: + tid = uuid() + jail(tid, mytask.name, [2], {}) + self.assertIn(tid, Backend._started) + + mytask.ignore_result = True + tid = uuid() + jail(tid, mytask.name, [2], {}) + self.assertNotIn(tid, Backend._started) + finally: + mytask.backend = prev + mytask.track_started = False + mytask.ignore_result = False + + def test_execute_jail_failure(self): + ret = jail(uuid(), mytask_raising.name, + [4], {}) + self.assertIsInstance(ret, ExceptionInfo) + self.assertTupleEqual(ret.exception.args, (4, )) + + def test_execute_ignore_result(self): + task_id = uuid() + ret = jail(task_id, MyTaskIgnoreResult.name, [4], {}) + self.assertEqual(ret, 256) + self.assertFalse(AsyncResult(task_id).ready()) + + +class MockEventDispatcher(object): + + def __init__(self): + self.sent = [] + self.enabled = True + + def send(self, event, **fields): + self.sent.append(event) + + +class test_TaskRequest(AppCase): + + def test_task_wrapper_repr(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + self.assertTrue(repr(tw)) + + @patch('celery.worker.job.kwdict') + def test_kwdict(self, kwdict): + + prev, module.NEEDS_KWDICT = module.NEEDS_KWDICT, True + try: + TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + self.assertTrue(kwdict.called) + finally: + module.NEEDS_KWDICT = prev + + def test_sets_store_errors(self): + mytask.ignore_result = True + try: + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + self.assertFalse(tw.store_errors) + mytask.store_errors_even_if_ignored = True + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + self.assertTrue(tw.store_errors) + finally: + mytask.ignore_result = False + mytask.store_errors_even_if_ignored = False + + def test_send_event(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + tw.eventer = MockEventDispatcher() + tw.send_event('task-frobulated') + self.assertIn('task-frobulated', tw.eventer.sent) + + def test_on_retry(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + tw.eventer = MockEventDispatcher() + try: + raise RetryTaskError('foo', KeyError('moofoobar')) + except: + einfo = ExceptionInfo() + tw.on_failure(einfo) + self.assertIn('task-retried', tw.eventer.sent) + prev, module._does_info = module._does_info, False + try: + tw.on_failure(einfo) + finally: + module._does_info = prev + einfo.internal = True + tw.on_failure(einfo) + + def test_compat_properties(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + self.assertEqual(tw.task_id, tw.id) + self.assertEqual(tw.task_name, tw.name) + tw.task_id = 'ID' + self.assertEqual(tw.id, 'ID') + tw.task_name = 'NAME' + self.assertEqual(tw.name, 'NAME') + + def test_terminate__task_started(self): + pool = Mock() + signum = signal.SIGKILL + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + with assert_signal_called(task_revoked, sender=tw.task, + terminated=True, + expired=False, + signum=signum): + tw.time_start = time.time() + tw.worker_pid = 313 + tw.terminate(pool, signal='KILL') + pool.terminate_job.assert_called_with(tw.worker_pid, signum) + + def test_terminate__task_reserved(self): + pool = Mock() + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + tw.time_start = None + tw.terminate(pool, signal='KILL') + self.assertFalse(pool.terminate_job.called) + self.assertTupleEqual(tw._terminate_on_ack, (pool, 'KILL')) + tw.terminate(pool, signal='KILL') + + def test_revoked_expires_expired(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}, + expires=datetime.utcnow() - timedelta(days=1)) + with assert_signal_called(task_revoked, sender=tw.task, + terminated=False, + expired=True, + signum=None): + tw.revoked() + self.assertIn(tw.id, revoked) + self.assertEqual(mytask.backend.get_status(tw.id), + states.REVOKED) + + def test_revoked_expires_not_expired(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}, + expires=datetime.utcnow() + timedelta(days=1)) + tw.revoked() + self.assertNotIn(tw.id, revoked) + self.assertNotEqual( + mytask.backend.get_status(tw.id), + states.REVOKED, + ) + + def test_revoked_expires_ignore_result(self): + mytask.ignore_result = True + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}, + expires=datetime.utcnow() - timedelta(days=1)) + try: + tw.revoked() + self.assertIn(tw.id, revoked) + self.assertNotEqual(mytask.backend.get_status(tw.id), + states.REVOKED) + + finally: + mytask.ignore_result = False + + def test_send_email(self): + app = app_or_default() + old_mail_admins = app.mail_admins + old_enable_mails = mytask.send_error_emails + mail_sent = [False] + + def mock_mail_admins(*args, **kwargs): + mail_sent[0] = True + + def get_ei(): + try: + raise KeyError('moofoobar') + except: + return ExceptionInfo() + + app.mail_admins = mock_mail_admins + mytask.send_error_emails = True + try: + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + + einfo = get_ei() + tw.on_failure(einfo) + self.assertTrue(mail_sent[0]) + + einfo = get_ei() + mail_sent[0] = False + mytask.send_error_emails = False + tw.on_failure(einfo) + self.assertFalse(mail_sent[0]) + + einfo = get_ei() + mail_sent[0] = False + mytask.send_error_emails = True + mytask.error_whitelist = [KeyError] + tw.on_failure(einfo) + self.assertTrue(mail_sent[0]) + + einfo = get_ei() + mail_sent[0] = False + mytask.send_error_emails = True + mytask.error_whitelist = [SyntaxError] + tw.on_failure(einfo) + self.assertFalse(mail_sent[0]) + + finally: + app.mail_admins = old_mail_admins + mytask.send_error_emails = old_enable_mails + mytask.error_whitelist = () + + def test_already_revoked(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + tw._already_revoked = True + self.assertTrue(tw.revoked()) + + def test_revoked(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + with assert_signal_called(task_revoked, sender=tw.task, + terminated=False, + expired=False, + signum=None): + revoked.add(tw.id) + self.assertTrue(tw.revoked()) + self.assertTrue(tw._already_revoked) + self.assertTrue(tw.acknowledged) + + def test_execute_does_not_execute_revoked(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + revoked.add(tw.id) + tw.execute() + + def test_execute_acks_late(self): + mytask_raising.acks_late = True + tw = TaskRequest(mytask_raising.name, uuid(), [1]) + try: + tw.execute() + self.assertTrue(tw.acknowledged) + tw.task.accept_magic_kwargs = False + tw.execute() + finally: + mytask_raising.acks_late = False + + def test_execute_using_pool_does_not_execute_revoked(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + revoked.add(tw.id) + with self.assertRaises(TaskRevokedError): + tw.execute_using_pool(None) + + def test_on_accepted_acks_early(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + tw.on_accepted(pid=os.getpid(), time_accepted=time.time()) + self.assertTrue(tw.acknowledged) + prev, module._does_debug = module._does_debug, False + try: + tw.on_accepted(pid=os.getpid(), time_accepted=time.time()) + finally: + module._does_debug = prev + + def test_on_accepted_acks_late(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + mytask.acks_late = True + try: + tw.on_accepted(pid=os.getpid(), time_accepted=time.time()) + self.assertFalse(tw.acknowledged) + finally: + mytask.acks_late = False + + def test_on_accepted_terminates(self): + signum = signal.SIGKILL + pool = Mock() + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + with assert_signal_called(task_revoked, sender=tw.task, + terminated=True, + expired=False, + signum=signum): + tw.terminate(pool, signal='KILL') + self.assertFalse(pool.terminate_job.call_count) + tw.on_accepted(pid=314, time_accepted=time.time()) + pool.terminate_job.assert_called_with(314, signum) + + def test_on_success_acks_early(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + tw.time_start = 1 + tw.on_success(42) + prev, module._does_info = module._does_info, False + try: + tw.on_success(42) + self.assertFalse(tw.acknowledged) + finally: + module._does_info = prev + + def test_on_success_BaseException(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + tw.time_start = 1 + with self.assertRaises(SystemExit): + try: + raise SystemExit() + except SystemExit: + tw.on_success(ExceptionInfo()) + else: + assert False + + def test_on_success_eventer(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + tw.time_start = 1 + tw.eventer = Mock() + tw.send_event = Mock() + tw.on_success(42) + self.assertTrue(tw.send_event.called) + + def test_on_success_when_failure(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + tw.time_start = 1 + tw.on_failure = Mock() + try: + raise KeyError('foo') + except Exception: + tw.on_success(ExceptionInfo()) + self.assertTrue(tw.on_failure.called) + + def test_on_success_acks_late(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + tw.time_start = 1 + mytask.acks_late = True + try: + tw.on_success(42) + self.assertTrue(tw.acknowledged) + finally: + mytask.acks_late = False + + def test_on_failure_WorkerLostError(self): + + def get_ei(): + try: + raise WorkerLostError('do re mi') + except WorkerLostError: + return ExceptionInfo() + + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + exc_info = get_ei() + tw.on_failure(exc_info) + self.assertEqual(mytask.backend.get_status(tw.id), + states.FAILURE) + + mytask.ignore_result = True + try: + exc_info = get_ei() + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + tw.on_failure(exc_info) + self.assertEqual(mytask.backend.get_status(tw.id), + states.PENDING) + finally: + mytask.ignore_result = False + + def test_on_failure_acks_late(self): + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + tw.time_start = 1 + mytask.acks_late = True + try: + try: + raise KeyError('foo') + except KeyError: + exc_info = ExceptionInfo() + tw.on_failure(exc_info) + self.assertTrue(tw.acknowledged) + finally: + mytask.acks_late = False + + def test_from_message_invalid_kwargs(self): + body = dict(task=mytask.name, id=1, args=(), kwargs='foo') + with self.assertRaises(InvalidTaskError): + TaskRequest.from_message(None, body) + + @patch('celery.worker.job.error') + @patch('celery.worker.job.warn') + def test_on_timeout(self, warn, error): + + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + tw.on_timeout(soft=True, timeout=1337) + self.assertIn('Soft time limit', warn.call_args[0][0]) + tw.on_timeout(soft=False, timeout=1337) + self.assertIn('Hard time limit', error.call_args[0][0]) + self.assertEqual(mytask.backend.get_status(tw.id), + states.FAILURE) + + mytask.ignore_result = True + try: + tw = TaskRequest(mytask.name, uuid(), [1], {'f': 'x'}) + tw.on_timeout(soft=True, timeout=1336) + self.assertEqual(mytask.backend.get_status(tw.id), + states.PENDING) + finally: + mytask.ignore_result = False + + def test_fast_trace_task(self): + from celery.task import trace + setup_worker_optimizations(self.app) + self.assertIs(trace.trace_task_ret, trace._fast_trace_task) + try: + mytask.__trace__ = build_tracer(mytask.name, mytask, + self.app.loader, 'test') + res = trace.trace_task_ret(mytask.name, uuid(), [4], {}) + self.assertEqual(res, 4 ** 4) + finally: + reset_worker_optimizations() + self.assertIs(trace.trace_task_ret, trace._trace_task_ret) + delattr(mytask, '__trace__') + res = trace.trace_task_ret(mytask.name, uuid(), [4], {}) + self.assertEqual(res, 4 ** 4) + + def test_trace_task_ret(self): + mytask.__trace__ = build_tracer(mytask.name, mytask, + self.app.loader, 'test') + res = _trace_task_ret(mytask.name, uuid(), [4], {}) + self.assertEqual(res, 4 ** 4) + + def test_trace_task_ret__no_trace(self): + try: + delattr(mytask, '__trace__') + except AttributeError: + pass + res = _trace_task_ret(mytask.name, uuid(), [4], {}) + self.assertEqual(res, 4 ** 4) + + def test_execute_safe_catches_exception(self): + + def _error_exec(self, *args, **kwargs): + raise KeyError('baz') + + @task_dec(request=None) + def raising(): + raise KeyError('baz') + + with self.assertWarnsRegex( + RuntimeWarning, r'Exception raised outside'): + res = trace_task(raising, uuid(), [], {}) + self.assertIsInstance(res, ExceptionInfo) + + def test_worker_task_trace_handle_retry(self): + from celery.exceptions import RetryTaskError + tid = uuid() + mytask.push_request(id=tid) + try: + raise ValueError('foo') + except Exception, exc: + try: + raise RetryTaskError(str(exc), exc=exc) + except RetryTaskError, exc: + w = TraceInfo(states.RETRY, exc) + w.handle_retry(mytask, store_errors=False) + self.assertEqual(mytask.backend.get_status(tid), + states.PENDING) + w.handle_retry(mytask, store_errors=True) + self.assertEqual(mytask.backend.get_status(tid), + states.RETRY) + finally: + mytask.pop_request() + + def test_worker_task_trace_handle_failure(self): + tid = uuid() + mytask.push_request() + try: + mytask.request.id = tid + try: + raise ValueError('foo') + except Exception, exc: + w = TraceInfo(states.FAILURE, exc) + w.handle_failure(mytask, store_errors=False) + self.assertEqual(mytask.backend.get_status(tid), + states.PENDING) + w.handle_failure(mytask, store_errors=True) + self.assertEqual(mytask.backend.get_status(tid), + states.FAILURE) + finally: + mytask.pop_request() + + def test_task_wrapper_mail_attrs(self): + tw = TaskRequest(mytask.name, uuid(), [], {}) + x = tw.success_msg % { + 'name': tw.name, + 'id': tw.id, + 'return_value': 10, + 'runtime': 0.3641, + } + self.assertTrue(x) + x = tw.error_msg % { + 'name': tw.name, + 'id': tw.id, + 'exc': 'FOOBARBAZ', + 'traceback': 'foobarbaz', + } + self.assertTrue(x) + + def test_from_message(self): + us = u'æØåveéðƒeæ' + body = {'task': mytask.name, 'id': uuid(), + 'args': [2], 'kwargs': {us: 'bar'}} + m = Message(None, body=anyjson.dumps(body), backend='foo', + content_type='application/json', + content_encoding='utf-8') + tw = TaskRequest.from_message(m, m.decode()) + self.assertIsInstance(tw, Request) + self.assertEqual(tw.name, body['task']) + self.assertEqual(tw.id, body['id']) + self.assertEqual(tw.args, body['args']) + us = from_utf8(us) + if sys.version_info < (2, 6): + self.assertEqual(tw.kwargs.keys()[0], us) + self.assertIsInstance(tw.kwargs.keys()[0], str) + + def test_from_message_empty_args(self): + body = {'task': mytask.name, 'id': uuid()} + m = Message(None, body=anyjson.dumps(body), backend='foo', + content_type='application/json', + content_encoding='utf-8') + tw = TaskRequest.from_message(m, m.decode()) + self.assertIsInstance(tw, Request) + self.assertEquals(tw.args, []) + self.assertEquals(tw.kwargs, {}) + + def test_from_message_missing_required_fields(self): + body = {} + m = Message(None, body=anyjson.dumps(body), backend='foo', + content_type='application/json', + content_encoding='utf-8') + with self.assertRaises(KeyError): + TaskRequest.from_message(m, m.decode()) + + def test_from_message_nonexistant_task(self): + body = {'task': 'cu.mytask.doesnotexist', 'id': uuid(), + 'args': [2], 'kwargs': {u'æØåveéðƒeæ': 'bar'}} + m = Message(None, body=anyjson.dumps(body), backend='foo', + content_type='application/json', + content_encoding='utf-8') + with self.assertRaises(KeyError): + TaskRequest.from_message(m, m.decode()) + + def test_execute(self): + tid = uuid() + tw = TaskRequest(mytask.name, tid, [4], {'f': 'x'}) + self.assertEqual(tw.execute(), 256) + meta = mytask.backend.get_task_meta(tid) + self.assertEqual(meta['result'], 256) + self.assertEqual(meta['status'], states.SUCCESS) + + def test_execute_success_no_kwargs(self): + tid = uuid() + tw = TaskRequest(mytask_no_kwargs.name, tid, [4], {}) + self.assertEqual(tw.execute(), 256) + meta = mytask_no_kwargs.backend.get_task_meta(tid) + self.assertEqual(meta['result'], 256) + self.assertEqual(meta['status'], states.SUCCESS) + + def test_execute_success_some_kwargs(self): + tid = uuid() + tw = TaskRequest(mytask_some_kwargs.name, tid, [4], {}) + self.assertEqual(tw.execute(), 256) + meta = mytask_some_kwargs.backend.get_task_meta(tid) + self.assertEqual(some_kwargs_scratchpad.get('task_id'), tid) + self.assertEqual(meta['result'], 256) + self.assertEqual(meta['status'], states.SUCCESS) + + def test_execute_ack(self): + tid = uuid() + tw = TaskRequest(mytask.name, tid, [4], {'f': 'x'}, + on_ack=on_ack) + self.assertEqual(tw.execute(), 256) + meta = mytask.backend.get_task_meta(tid) + self.assertTrue(scratch['ACK']) + self.assertEqual(meta['result'], 256) + self.assertEqual(meta['status'], states.SUCCESS) + + def test_execute_fail(self): + tid = uuid() + tw = TaskRequest(mytask_raising.name, tid, [4]) + self.assertIsInstance(tw.execute(), ExceptionInfo) + meta = mytask_raising.backend.get_task_meta(tid) + self.assertEqual(meta['status'], states.FAILURE) + self.assertIsInstance(meta['result'], KeyError) + + def test_execute_using_pool(self): + tid = uuid() + tw = TaskRequest(mytask.name, tid, [4], {'f': 'x'}) + + class MockPool(BasePool): + target = None + args = None + kwargs = None + + def __init__(self, *args, **kwargs): + pass + + def apply_async(self, target, args=None, kwargs=None, + *margs, **mkwargs): + self.target = target + self.args = args + self.kwargs = kwargs + + p = MockPool() + tw.execute_using_pool(p) + self.assertTrue(p.target) + self.assertEqual(p.args[0], mytask.name) + self.assertEqual(p.args[1], tid) + self.assertEqual(p.args[2], [4]) + self.assertIn('f', p.args[3]) + self.assertIn([4], p.args) + + tw.task.accept_magic_kwargs = False + tw.execute_using_pool(p) + + def test_default_kwargs(self): + tid = uuid() + tw = TaskRequest(mytask.name, tid, [4], {'f': 'x'}) + self.assertDictEqual( + tw.extend_with_default_kwargs(), { + 'f': 'x', + 'logfile': None, + 'loglevel': None, + 'task_id': tw.id, + 'task_retries': 0, + 'task_is_eager': False, + 'delivery_info': { + 'exchange': None, + 'routing_key': None, + 'priority': None, + }, + 'task_name': tw.name}) + + @patch('celery.worker.job.logger') + def _test_on_failure(self, exception, logger): + app = app_or_default() + tid = uuid() + tw = TaskRequest(mytask.name, tid, [4], {'f': 'x'}) + try: + raise exception + except Exception: + exc_info = ExceptionInfo() + app.conf.CELERY_SEND_TASK_ERROR_EMAILS = True + try: + tw.on_failure(exc_info) + self.assertTrue(logger.log.called) + context = logger.log.call_args[0][2] + self.assertEqual(mytask.name, context['name']) + self.assertIn(tid, context['id']) + finally: + app.conf.CELERY_SEND_TASK_ERROR_EMAILS = False + + def test_on_failure(self): + self._test_on_failure(Exception('Inside unit tests')) + + def test_on_failure_unicode_exception(self): + self._test_on_failure(Exception(u'Бобры атакуют')) + + def test_on_failure_utf8_exception(self): + self._test_on_failure(Exception( + from_utf8(u'Бобры атакуют'))) diff --git a/awx/lib/site-packages/celery/tests/worker/test_revoke.py b/awx/lib/site-packages/celery/tests/worker/test_revoke.py new file mode 100644 index 0000000000..61c7fb6d44 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/worker/test_revoke.py @@ -0,0 +1,13 @@ +from __future__ import absolute_import + +from celery.worker import state +from celery.tests.utils import Case + + +class test_revoked(Case): + + def test_is_working(self): + state.revoked.add('foo') + self.assertIn('foo', state.revoked) + state.revoked.pop_value('foo') + self.assertNotIn('foo', state.revoked) diff --git a/awx/lib/site-packages/celery/tests/worker/test_state.py b/awx/lib/site-packages/celery/tests/worker/test_state.py new file mode 100644 index 0000000000..161db03df8 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/worker/test_state.py @@ -0,0 +1,117 @@ +from __future__ import absolute_import + +from celery.datastructures import LimitedSet +from celery.worker import state +from celery.tests.utils import Case + + +class StateResetCase(Case): + + def setUp(self): + self.reset_state() + self.on_setup() + + def tearDown(self): + self.reset_state() + self.on_teardown() + + def reset_state(self): + state.active_requests.clear() + state.revoked.clear() + state.total_count.clear() + + def on_setup(self): + pass + + def on_teardown(self): + pass + + +class MockShelve(dict): + filename = None + in_sync = False + closed = False + + def open(self, filename, **kwargs): + self.filename = filename + return self + + def sync(self): + self.in_sync = True + + def close(self): + self.closed = True + + +class MyPersistent(state.Persistent): + storage = MockShelve() + + +class test_Persistent(StateResetCase): + + def on_setup(self): + self.p = MyPersistent(filename='celery-state') + + def test_close_twice(self): + self.p._is_open = False + self.p.close() + + def test_constructor(self): + self.assertDictEqual(self.p.db, {}) + self.assertEqual(self.p.db.filename, self.p.filename) + + def test_save(self): + self.p.db['foo'] = 'bar' + self.p.save() + self.assertTrue(self.p.db.in_sync) + self.assertTrue(self.p.db.closed) + + def add_revoked(self, *ids): + for id in ids: + self.p.db.setdefault('revoked', LimitedSet()).add(id) + + def test_merge(self, data=['foo', 'bar', 'baz']): + self.add_revoked(*data) + self.p.merge(self.p.db) + for item in data: + self.assertIn(item, state.revoked) + + def test_sync(self, data1=['foo', 'bar', 'baz'], + data2=['baz', 'ini', 'koz']): + self.add_revoked(*data1) + for item in data2: + state.revoked.add(item) + self.p.sync(self.p.db) + + for item in data2: + self.assertIn(item, self.p.db['revoked']) + + +class SimpleReq(object): + + def __init__(self, name): + self.name = name + + +class test_state(StateResetCase): + + def test_accepted(self, requests=[SimpleReq('foo'), + SimpleReq('bar'), + SimpleReq('baz'), + SimpleReq('baz')]): + for request in requests: + state.task_accepted(request) + for req in requests: + self.assertIn(req, state.active_requests) + self.assertEqual(state.total_count['foo'], 1) + self.assertEqual(state.total_count['bar'], 1) + self.assertEqual(state.total_count['baz'], 2) + + def test_ready(self, requests=[SimpleReq('foo'), + SimpleReq('bar')]): + for request in requests: + state.task_accepted(request) + self.assertEqual(len(state.active_requests), 2) + for request in requests: + state.task_ready(request) + self.assertEqual(len(state.active_requests), 0) diff --git a/awx/lib/site-packages/celery/tests/worker/test_worker.py b/awx/lib/site-packages/celery/tests/worker/test_worker.py new file mode 100644 index 0000000000..006488dbf5 --- /dev/null +++ b/awx/lib/site-packages/celery/tests/worker/test_worker.py @@ -0,0 +1,1128 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import socket +import sys + +from collections import deque +from datetime import datetime, timedelta +from Queue import Empty + +from billiard.exceptions import WorkerLostError +from kombu import Connection +from kombu.exceptions import StdChannelError +from kombu.transport.base import Message +from mock import Mock, patch +from nose import SkipTest + +from celery import current_app +from celery.app.defaults import DEFAULTS +from celery.concurrency.base import BasePool +from celery.datastructures import AttributeDict +from celery.exceptions import SystemTerminate +from celery.task import task as task_dec +from celery.task import periodic_task as periodic_task_dec +from celery.utils import uuid +from celery.worker import WorkController, Queues, Timers, EvLoop, Pool +from celery.worker.buckets import FastQueue, AsyncTaskBucket +from celery.worker.job import Request +from celery.worker.consumer import BlockingConsumer +from celery.worker.consumer import QoS, RUN, PREFETCH_COUNT_MAX, CLOSE +from celery.utils.serialization import pickle +from celery.utils.timer2 import Timer +from celery.utils.threads import Event + +from celery.tests.utils import AppCase, Case + + +class PlaceHolder(object): + pass + + +class MyKombuConsumer(BlockingConsumer): + broadcast_consumer = Mock() + task_consumer = Mock() + + def __init__(self, *args, **kwargs): + kwargs.setdefault('pool', BasePool(2)) + super(MyKombuConsumer, self).__init__(*args, **kwargs) + + def restart_heartbeat(self): + self.heart = None + + +class MockNode(object): + commands = [] + + def handle_message(self, body, message): + self.commands.append(body.pop('command', None)) + + +class MockEventDispatcher(object): + sent = [] + closed = False + flushed = False + _outbound_buffer = [] + + def send(self, event, *args, **kwargs): + self.sent.append(event) + + def close(self): + self.closed = True + + def flush(self): + self.flushed = True + + +class MockHeart(object): + closed = False + + def stop(self): + self.closed = True + + +@task_dec() +def foo_task(x, y, z, **kwargs): + return x * y * z + + +@periodic_task_dec(run_every=60) +def foo_periodic_task(): + return 'foo' + + +def create_message(channel, **data): + data.setdefault('id', uuid()) + channel.no_ack_consumers = set() + return Message(channel, body=pickle.dumps(dict(**data)), + content_type='application/x-python-serialize', + content_encoding='binary', + delivery_info={'consumer_tag': 'mock'}) + + +class test_QoS(Case): + + class _QoS(QoS): + def __init__(self, value): + self.value = value + QoS.__init__(self, None, value) + + def set(self, value): + return value + + def test_qos_increment_decrement(self): + qos = self._QoS(10) + self.assertEqual(qos.increment_eventually(), 11) + self.assertEqual(qos.increment_eventually(3), 14) + self.assertEqual(qos.increment_eventually(-30), 14) + self.assertEqual(qos.decrement_eventually(7), 7) + self.assertEqual(qos.decrement_eventually(), 6) + + def test_qos_disabled_increment_decrement(self): + qos = self._QoS(0) + self.assertEqual(qos.increment_eventually(), 0) + self.assertEqual(qos.increment_eventually(3), 0) + self.assertEqual(qos.increment_eventually(-30), 0) + self.assertEqual(qos.decrement_eventually(7), 0) + self.assertEqual(qos.decrement_eventually(), 0) + self.assertEqual(qos.decrement_eventually(10), 0) + + def test_qos_thread_safe(self): + qos = self._QoS(10) + + def add(): + for i in xrange(1000): + qos.increment_eventually() + + def sub(): + for i in xrange(1000): + qos.decrement_eventually() + + def threaded(funs): + from threading import Thread + threads = [Thread(target=fun) for fun in funs] + for thread in threads: + thread.start() + for thread in threads: + thread.join() + + threaded([add, add]) + self.assertEqual(qos.value, 2010) + + qos.value = 1000 + threaded([add, sub]) # n = 2 + self.assertEqual(qos.value, 1000) + + def test_exceeds_short(self): + qos = QoS(Mock(), PREFETCH_COUNT_MAX - 1) + qos.update() + self.assertEqual(qos.value, PREFETCH_COUNT_MAX - 1) + qos.increment_eventually() + self.assertEqual(qos.value, PREFETCH_COUNT_MAX) + qos.increment_eventually() + self.assertEqual(qos.value, PREFETCH_COUNT_MAX + 1) + qos.decrement_eventually() + self.assertEqual(qos.value, PREFETCH_COUNT_MAX) + qos.decrement_eventually() + self.assertEqual(qos.value, PREFETCH_COUNT_MAX - 1) + + def test_consumer_increment_decrement(self): + consumer = Mock() + qos = QoS(consumer, 10) + qos.update() + self.assertEqual(qos.value, 10) + consumer.qos.assert_called_with(prefetch_count=10) + qos.decrement_eventually() + qos.update() + self.assertEqual(qos.value, 9) + consumer.qos.assert_called_with(prefetch_count=9) + qos.decrement_eventually() + self.assertEqual(qos.value, 8) + consumer.qos.assert_called_with(prefetch_count=9) + self.assertIn({'prefetch_count': 9}, consumer.qos.call_args) + + # Does not decrement 0 value + qos.value = 0 + qos.decrement_eventually() + self.assertEqual(qos.value, 0) + qos.increment_eventually() + self.assertEqual(qos.value, 0) + + def test_consumer_decrement_eventually(self): + consumer = Mock() + qos = QoS(consumer, 10) + qos.decrement_eventually() + self.assertEqual(qos.value, 9) + qos.value = 0 + qos.decrement_eventually() + self.assertEqual(qos.value, 0) + + def test_set(self): + consumer = Mock() + qos = QoS(consumer, 10) + qos.set(12) + self.assertEqual(qos.prev, 12) + qos.set(qos.prev) + + +class test_Consumer(Case): + + def setUp(self): + self.ready_queue = FastQueue() + self.timer = Timer() + + def tearDown(self): + self.timer.stop() + + def test_info(self): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + l.qos = QoS(l.task_consumer, 10) + info = l.info + self.assertEqual(info['prefetch_count'], 10) + self.assertFalse(info['broker']) + + l.connection = current_app.connection() + info = l.info + self.assertTrue(info['broker']) + + def test_start_when_closed(self): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + l._state = CLOSE + l.start() + + def test_connection(self): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + + l.reset_connection() + self.assertIsInstance(l.connection, Connection) + + l._state = RUN + l.event_dispatcher = None + l.stop_consumers(close_connection=False) + self.assertTrue(l.connection) + + l._state = RUN + l.stop_consumers() + self.assertIsNone(l.connection) + self.assertIsNone(l.task_consumer) + + l.reset_connection() + self.assertIsInstance(l.connection, Connection) + l.stop_consumers() + + l.stop() + l.close_connection() + self.assertIsNone(l.connection) + self.assertIsNone(l.task_consumer) + + def test_close_connection(self): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + l._state = RUN + l.close_connection() + + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + eventer = l.event_dispatcher = Mock() + eventer.enabled = True + heart = l.heart = MockHeart() + l._state = RUN + l.stop_consumers() + self.assertTrue(eventer.close.call_count) + self.assertTrue(heart.closed) + + @patch('celery.worker.consumer.warn') + def test_receive_message_unknown(self, warn): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + backend = Mock() + m = create_message(backend, unknown={'baz': '!!!'}) + l.event_dispatcher = Mock() + l.pidbox_node = MockNode() + + l.receive_message(m.decode(), m) + self.assertTrue(warn.call_count) + + @patch('celery.worker.consumer.to_timestamp') + def test_receive_message_eta_OverflowError(self, to_timestamp): + to_timestamp.side_effect = OverflowError() + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + m = create_message(Mock(), task=foo_task.name, + args=('2, 2'), + kwargs={}, + eta=datetime.now().isoformat()) + l.event_dispatcher = Mock() + l.pidbox_node = MockNode() + l.update_strategies() + l.qos = Mock() + + l.receive_message(m.decode(), m) + self.assertTrue(to_timestamp.called) + self.assertTrue(m.acknowledged) + + @patch('celery.worker.consumer.error') + def test_receive_message_InvalidTaskError(self, error): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + m = create_message(Mock(), task=foo_task.name, + args=(1, 2), kwargs='foobarbaz', id=1) + l.update_strategies() + l.event_dispatcher = Mock() + l.pidbox_node = MockNode() + + l.receive_message(m.decode(), m) + self.assertIn('Received invalid task message', error.call_args[0][0]) + + @patch('celery.worker.consumer.crit') + def test_on_decode_error(self, crit): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + + class MockMessage(Mock): + content_type = 'application/x-msgpack' + content_encoding = 'binary' + body = 'foobarbaz' + + message = MockMessage() + l.on_decode_error(message, KeyError('foo')) + self.assertTrue(message.ack.call_count) + self.assertIn("Can't decode message body", crit.call_args[0][0]) + + def test_receieve_message(self): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + m = create_message(Mock(), task=foo_task.name, + args=[2, 4, 8], kwargs={}) + l.update_strategies() + + l.event_dispatcher = Mock() + l.receive_message(m.decode(), m) + + in_bucket = self.ready_queue.get_nowait() + self.assertIsInstance(in_bucket, Request) + self.assertEqual(in_bucket.name, foo_task.name) + self.assertEqual(in_bucket.execute(), 2 * 4 * 8) + self.assertTrue(self.timer.empty()) + + def test_start_connection_error(self): + + class MockConsumer(BlockingConsumer): + iterations = 0 + + def consume_messages(self): + if not self.iterations: + self.iterations = 1 + raise KeyError('foo') + raise SyntaxError('bar') + + l = MockConsumer(self.ready_queue, timer=self.timer, + send_events=False, pool=BasePool()) + l.connection_errors = (KeyError, ) + with self.assertRaises(SyntaxError): + l.start() + l.heart.stop() + l.timer.stop() + + def test_start_channel_error(self): + # Regression test for AMQPChannelExceptions that can occur within the + # consumer. (i.e. 404 errors) + + class MockConsumer(BlockingConsumer): + iterations = 0 + + def consume_messages(self): + if not self.iterations: + self.iterations = 1 + raise KeyError('foo') + raise SyntaxError('bar') + + l = MockConsumer(self.ready_queue, timer=self.timer, + send_events=False, pool=BasePool()) + + l.channel_errors = (KeyError, ) + self.assertRaises(SyntaxError, l.start) + l.heart.stop() + l.timer.stop() + + def test_consume_messages_ignores_socket_timeout(self): + + class Connection(current_app.connection().__class__): + obj = None + + def drain_events(self, **kwargs): + self.obj.connection = None + raise socket.timeout(10) + + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + l.connection = Connection() + l.task_consumer = Mock() + l.connection.obj = l + l.qos = QoS(l.task_consumer, 10) + l.consume_messages() + + def test_consume_messages_when_socket_error(self): + + class Connection(current_app.connection().__class__): + obj = None + + def drain_events(self, **kwargs): + self.obj.connection = None + raise socket.error('foo') + + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + l._state = RUN + c = l.connection = Connection() + l.connection.obj = l + l.task_consumer = Mock() + l.qos = QoS(l.task_consumer, 10) + with self.assertRaises(socket.error): + l.consume_messages() + + l._state = CLOSE + l.connection = c + l.consume_messages() + + def test_consume_messages(self): + + class Connection(current_app.connection().__class__): + obj = None + + def drain_events(self, **kwargs): + self.obj.connection = None + + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + l.connection = Connection() + l.connection.obj = l + l.task_consumer = Mock() + l.qos = QoS(l.task_consumer, 10) + + l.consume_messages() + l.consume_messages() + self.assertTrue(l.task_consumer.consume.call_count) + l.task_consumer.qos.assert_called_with(prefetch_count=10) + l.task_consumer.qos = Mock() + self.assertEqual(l.qos.value, 10) + l.qos.decrement_eventually() + self.assertEqual(l.qos.value, 9) + l.qos.update() + self.assertEqual(l.qos.value, 9) + l.task_consumer.qos.assert_called_with(prefetch_count=9) + + def test_maybe_conn_error(self): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + l.connection_errors = (KeyError, ) + l.channel_errors = (SyntaxError, ) + l.maybe_conn_error(Mock(side_effect=AttributeError('foo'))) + l.maybe_conn_error(Mock(side_effect=KeyError('foo'))) + l.maybe_conn_error(Mock(side_effect=SyntaxError('foo'))) + with self.assertRaises(IndexError): + l.maybe_conn_error(Mock(side_effect=IndexError('foo'))) + + def test_apply_eta_task(self): + from celery.worker import state + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + l.qos = QoS(None, 10) + + task = object() + qos = l.qos.value + l.apply_eta_task(task) + self.assertIn(task, state.reserved_requests) + self.assertEqual(l.qos.value, qos - 1) + self.assertIs(self.ready_queue.get_nowait(), task) + + def test_receieve_message_eta_isoformat(self): + if sys.version_info < (2, 6): + raise SkipTest('test broken on Python 2.5') + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + m = create_message(Mock(), task=foo_task.name, + eta=datetime.now().isoformat(), + args=[2, 4, 8], kwargs={}) + + l.task_consumer = Mock() + l.qos = QoS(l.task_consumer, l.initial_prefetch_count) + current_pcount = l.qos.value + l.event_dispatcher = Mock() + l.enabled = False + l.update_strategies() + l.receive_message(m.decode(), m) + l.timer.stop() + l.timer.join(1) + + items = [entry[2] for entry in self.timer.queue] + found = 0 + for item in items: + if item.args[0].name == foo_task.name: + found = True + self.assertTrue(found) + self.assertGreater(l.qos.value, current_pcount) + l.timer.stop() + + def test_on_control(self): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + l.pidbox_node = Mock() + l.reset_pidbox_node = Mock() + + l.on_control('foo', 'bar') + l.pidbox_node.handle_message.assert_called_with('foo', 'bar') + + l.pidbox_node = Mock() + l.pidbox_node.handle_message.side_effect = KeyError('foo') + l.on_control('foo', 'bar') + l.pidbox_node.handle_message.assert_called_with('foo', 'bar') + + l.pidbox_node = Mock() + l.pidbox_node.handle_message.side_effect = ValueError('foo') + l.on_control('foo', 'bar') + l.pidbox_node.handle_message.assert_called_with('foo', 'bar') + l.reset_pidbox_node.assert_called_with() + + def test_revoke(self): + ready_queue = FastQueue() + l = MyKombuConsumer(ready_queue, timer=self.timer) + backend = Mock() + id = uuid() + t = create_message(backend, task=foo_task.name, args=[2, 4, 8], + kwargs={}, id=id) + from celery.worker.state import revoked + revoked.add(id) + + l.receive_message(t.decode(), t) + self.assertTrue(ready_queue.empty()) + + def test_receieve_message_not_registered(self): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + backend = Mock() + m = create_message(backend, task='x.X.31x', args=[2, 4, 8], kwargs={}) + + l.event_dispatcher = Mock() + self.assertFalse(l.receive_message(m.decode(), m)) + with self.assertRaises(Empty): + self.ready_queue.get_nowait() + self.assertTrue(self.timer.empty()) + + @patch('celery.worker.consumer.warn') + @patch('celery.worker.consumer.logger') + def test_receieve_message_ack_raises(self, logger, warn): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + backend = Mock() + m = create_message(backend, args=[2, 4, 8], kwargs={}) + + l.event_dispatcher = Mock() + l.connection_errors = (socket.error, ) + m.reject = Mock() + m.reject.side_effect = socket.error('foo') + self.assertFalse(l.receive_message(m.decode(), m)) + self.assertTrue(warn.call_count) + with self.assertRaises(Empty): + self.ready_queue.get_nowait() + self.assertTrue(self.timer.empty()) + m.reject.assert_called_with() + self.assertTrue(logger.critical.call_count) + + def test_receieve_message_eta(self): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + l.event_dispatcher = Mock() + l.event_dispatcher._outbound_buffer = deque() + backend = Mock() + m = create_message( + backend, task=foo_task.name, + args=[2, 4, 8], kwargs={}, + eta=(datetime.now() + timedelta(days=1)).isoformat(), + ) + + l.reset_connection() + p = l.app.conf.BROKER_CONNECTION_RETRY + l.app.conf.BROKER_CONNECTION_RETRY = False + try: + l.reset_connection() + finally: + l.app.conf.BROKER_CONNECTION_RETRY = p + l.stop_consumers() + l.event_dispatcher = Mock() + l.receive_message(m.decode(), m) + l.timer.stop() + in_hold = l.timer.queue[0] + self.assertEqual(len(in_hold), 3) + eta, priority, entry = in_hold + task = entry.args[0] + self.assertIsInstance(task, Request) + self.assertEqual(task.name, foo_task.name) + self.assertEqual(task.execute(), 2 * 4 * 8) + with self.assertRaises(Empty): + self.ready_queue.get_nowait() + + def test_reset_pidbox_node(self): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + l.pidbox_node = Mock() + chan = l.pidbox_node.channel = Mock() + l.connection = Mock() + chan.close.side_effect = socket.error('foo') + l.connection_errors = (socket.error, ) + l.reset_pidbox_node() + chan.close.assert_called_with() + + def test_reset_pidbox_node_green(self): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + l.pool = Mock() + l.pool.is_green = True + l.reset_pidbox_node() + l.pool.spawn_n.assert_called_with(l._green_pidbox_node) + + def test__green_pidbox_node(self): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + l.pidbox_node = Mock() + + class BConsumer(Mock): + + def __enter__(self): + self.consume() + return self + + def __exit__(self, *exc_info): + self.cancel() + + l.pidbox_node.listen = BConsumer() + connections = [] + + class Connection(object): + calls = 0 + + def __init__(self, obj): + connections.append(self) + self.obj = obj + self.default_channel = self.channel() + self.closed = False + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + + def channel(self): + return Mock() + + def as_uri(self): + return 'dummy://' + + def drain_events(self, **kwargs): + if not self.calls: + self.calls += 1 + raise socket.timeout() + self.obj.connection = None + self.obj._pidbox_node_shutdown.set() + + def close(self): + self.closed = True + + l.connection = Mock() + l._open_connection = lambda: Connection(obj=l) + l._green_pidbox_node() + + l.pidbox_node.listen.assert_called_with(callback=l.on_control) + self.assertTrue(l.broadcast_consumer) + l.broadcast_consumer.consume.assert_called_with() + + self.assertIsNone(l.connection) + self.assertTrue(connections[0].closed) + + @patch('kombu.connection.Connection._establish_connection') + @patch('kombu.utils.sleep') + def test_open_connection_errback(self, sleep, connect): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + from kombu.transport.memory import Transport + Transport.connection_errors = (StdChannelError, ) + + def effect(): + if connect.call_count > 1: + return + raise StdChannelError() + connect.side_effect = effect + l._open_connection() + connect.assert_called_with() + + def test_stop_pidbox_node(self): + l = MyKombuConsumer(self.ready_queue, timer=self.timer) + l._pidbox_node_stopped = Event() + l._pidbox_node_shutdown = Event() + l._pidbox_node_stopped.set() + l.stop_pidbox_node() + + def test_start__consume_messages(self): + + class _QoS(object): + prev = 3 + value = 4 + + def update(self): + self.prev = self.value + + class _Consumer(MyKombuConsumer): + iterations = 0 + + def reset_connection(self): + if self.iterations >= 1: + raise KeyError('foo') + + init_callback = Mock() + l = _Consumer(self.ready_queue, timer=self.timer, + init_callback=init_callback) + l.task_consumer = Mock() + l.broadcast_consumer = Mock() + l.qos = _QoS() + l.connection = Connection() + l.iterations = 0 + + def raises_KeyError(limit=None): + l.iterations += 1 + if l.qos.prev != l.qos.value: + l.qos.update() + if l.iterations >= 2: + raise KeyError('foo') + + l.consume_messages = raises_KeyError + with self.assertRaises(KeyError): + l.start() + self.assertTrue(init_callback.call_count) + self.assertEqual(l.iterations, 1) + self.assertEqual(l.qos.prev, l.qos.value) + + init_callback.reset_mock() + l = _Consumer(self.ready_queue, timer=self.timer, + send_events=False, init_callback=init_callback) + l.qos = _QoS() + l.task_consumer = Mock() + l.broadcast_consumer = Mock() + l.connection = Connection() + l.consume_messages = Mock(side_effect=socket.error('foo')) + with self.assertRaises(socket.error): + l.start() + self.assertTrue(init_callback.call_count) + self.assertTrue(l.consume_messages.call_count) + + def test_reset_connection_with_no_node(self): + l = BlockingConsumer(self.ready_queue, timer=self.timer) + self.assertEqual(None, l.pool) + l.reset_connection() + + def test_on_task_revoked(self): + l = BlockingConsumer(self.ready_queue, timer=self.timer) + task = Mock() + task.revoked.return_value = True + l.on_task(task) + + def test_on_task_no_events(self): + l = BlockingConsumer(self.ready_queue, timer=self.timer) + task = Mock() + task.revoked.return_value = False + l.event_dispatcher = Mock() + l.event_dispatcher.enabled = False + task.eta = None + l._does_info = False + l.on_task(task) + + +class test_WorkController(AppCase): + + def setup(self): + self.worker = self.create_worker() + from celery import worker + self._logger = worker.logger + self.logger = worker.logger = Mock() + + def teardown(self): + from celery import worker + worker.logger = self._logger + + def create_worker(self, **kw): + worker = self.app.WorkController(concurrency=1, loglevel=0, **kw) + worker._shutdown_complete.set() + return worker + + @patch('celery.platforms.create_pidlock') + def test_use_pidfile(self, create_pidlock): + create_pidlock.return_value = Mock() + worker = self.create_worker(pidfile='pidfilelockfilepid') + worker.components = [] + worker.start() + self.assertTrue(create_pidlock.called) + worker.stop() + self.assertTrue(worker.pidlock.release.called) + + @patch('celery.platforms.signals') + @patch('celery.platforms.set_mp_process_title') + def test_process_initializer(self, set_mp_process_title, _signals): + from celery import Celery + from celery import signals + from celery._state import _tls + from celery.concurrency.processes import process_initializer + from celery.concurrency.processes import (WORKER_SIGRESET, + WORKER_SIGIGNORE) + + def on_worker_process_init(**kwargs): + on_worker_process_init.called = True + on_worker_process_init.called = False + signals.worker_process_init.connect(on_worker_process_init) + + loader = Mock() + loader.override_backends = {} + app = Celery(loader=loader, set_as_current=False) + app.loader = loader + app.conf = AttributeDict(DEFAULTS) + process_initializer(app, 'awesome.worker.com') + _signals.ignore.assert_any_call(*WORKER_SIGIGNORE) + _signals.reset.assert_any_call(*WORKER_SIGRESET) + self.assertTrue(app.loader.init_worker.call_count) + self.assertTrue(on_worker_process_init.called) + self.assertIs(_tls.current_app, app) + set_mp_process_title.assert_called_with( + 'celeryd', hostname='awesome.worker.com', + ) + + def test_with_rate_limits_disabled(self): + worker = WorkController(concurrency=1, loglevel=0, + disable_rate_limits=True) + self.assertTrue(hasattr(worker.ready_queue, 'put')) + + def test_attrs(self): + worker = self.worker + self.assertIsInstance(worker.timer, Timer) + self.assertTrue(worker.timer) + self.assertTrue(worker.pool) + self.assertTrue(worker.consumer) + self.assertTrue(worker.mediator) + self.assertTrue(worker.components) + + def test_with_embedded_celerybeat(self): + worker = WorkController(concurrency=1, loglevel=0, beat=True) + self.assertTrue(worker.beat) + self.assertIn(worker.beat, worker.components) + + def test_with_autoscaler(self): + worker = self.create_worker( + autoscale=[10, 3], send_events=False, + timer_cls='celery.utils.timer2.Timer', + ) + self.assertTrue(worker.autoscaler) + + def test_dont_stop_or_terminate(self): + worker = WorkController(concurrency=1, loglevel=0) + worker.stop() + self.assertNotEqual(worker._state, worker.CLOSE) + worker.terminate() + self.assertNotEqual(worker._state, worker.CLOSE) + + sigsafe, worker.pool.signal_safe = worker.pool.signal_safe, False + try: + worker._state = worker.RUN + worker.stop(in_sighandler=True) + self.assertNotEqual(worker._state, worker.CLOSE) + worker.terminate(in_sighandler=True) + self.assertNotEqual(worker._state, worker.CLOSE) + finally: + worker.pool.signal_safe = sigsafe + + def test_on_timer_error(self): + worker = WorkController(concurrency=1, loglevel=0) + + try: + raise KeyError('foo') + except KeyError, exc: + Timers(worker).on_timer_error(exc) + msg, args = self.logger.error.call_args[0] + self.assertIn('KeyError', msg % args) + + def test_on_timer_tick(self): + worker = WorkController(concurrency=1, loglevel=10) + + Timers(worker).on_timer_tick(30.0) + xargs = self.logger.debug.call_args[0] + fmt, arg = xargs[0], xargs[1] + self.assertEqual(30.0, arg) + self.assertIn('Next eta %s secs', fmt) + + def test_process_task(self): + worker = self.worker + worker.pool = Mock() + backend = Mock() + m = create_message(backend, task=foo_task.name, args=[4, 8, 10], + kwargs={}) + task = Request.from_message(m, m.decode()) + worker.process_task(task) + self.assertEqual(worker.pool.apply_async.call_count, 1) + worker.pool.stop() + + def test_process_task_raise_base(self): + worker = self.worker + worker.pool = Mock() + worker.pool.apply_async.side_effect = KeyboardInterrupt('Ctrl+C') + backend = Mock() + m = create_message(backend, task=foo_task.name, args=[4, 8, 10], + kwargs={}) + task = Request.from_message(m, m.decode()) + worker.components = [] + worker._state = worker.RUN + with self.assertRaises(KeyboardInterrupt): + worker.process_task(task) + self.assertEqual(worker._state, worker.TERMINATE) + + def test_process_task_raise_SystemTerminate(self): + worker = self.worker + worker.pool = Mock() + worker.pool.apply_async.side_effect = SystemTerminate() + backend = Mock() + m = create_message(backend, task=foo_task.name, args=[4, 8, 10], + kwargs={}) + task = Request.from_message(m, m.decode()) + worker.components = [] + worker._state = worker.RUN + with self.assertRaises(SystemExit): + worker.process_task(task) + self.assertEqual(worker._state, worker.TERMINATE) + + def test_process_task_raise_regular(self): + worker = self.worker + worker.pool = Mock() + worker.pool.apply_async.side_effect = KeyError('some exception') + backend = Mock() + m = create_message(backend, task=foo_task.name, args=[4, 8, 10], + kwargs={}) + task = Request.from_message(m, m.decode()) + worker.process_task(task) + worker.pool.stop() + + def test_start_catches_base_exceptions(self): + worker1 = self.create_worker() + stc = Mock() + stc.start.side_effect = SystemTerminate() + worker1.components = [stc] + worker1.start() + self.assertTrue(stc.terminate.call_count) + + worker2 = self.create_worker() + sec = Mock() + sec.start.side_effect = SystemExit() + sec.terminate = None + worker2.components = [sec] + worker2.start() + self.assertTrue(sec.stop.call_count) + + def test_state_db(self): + from celery.worker import state + Persistent = state.Persistent + + state.Persistent = Mock() + try: + worker = self.create_worker(state_db='statefilename') + self.assertTrue(worker._persistence) + finally: + state.Persistent = Persistent + + def test_disable_rate_limits_solo(self): + worker = self.create_worker(disable_rate_limits=True, + pool_cls='solo') + self.assertIsInstance(worker.ready_queue, FastQueue) + self.assertIsNone(worker.mediator) + self.assertEqual(worker.ready_queue.put, worker.process_task) + + def test_enable_rate_limits_eventloop(self): + try: + worker = self.create_worker(disable_rate_limits=False, + use_eventloop=True, + pool_cls='processes') + except ImportError: + raise SkipTest('multiprocessing not supported') + self.assertIsInstance(worker.ready_queue, AsyncTaskBucket) + self.assertFalse(worker.mediator) + self.assertNotEqual(worker.ready_queue.put, worker.process_task) + + def test_disable_rate_limits_processes(self): + try: + worker = self.create_worker(disable_rate_limits=True, + use_eventloop=False, + pool_cls='processes') + except ImportError: + raise SkipTest('multiprocessing not supported') + self.assertIsInstance(worker.ready_queue, FastQueue) + self.assertFalse(worker.mediator) + self.assertEqual(worker.ready_queue.put, worker.process_task) + + def test_process_task_sem(self): + worker = self.worker + worker.semaphore = Mock() + worker._quick_acquire = worker.semaphore.acquire + + req = Mock() + worker.process_task_sem(req) + worker.semaphore.acquire.assert_called_with(worker.process_task, req) + + def test_signal_consumer_close(self): + worker = self.worker + worker.consumer = Mock() + + worker.signal_consumer_close() + worker.consumer.close.assert_called_with() + + worker.consumer.close.side_effect = AttributeError() + worker.signal_consumer_close() + + def test_start__stop(self): + worker = self.worker + worker._shutdown_complete.set() + worker.components = [Mock(), Mock(), Mock(), Mock()] + + worker.start() + for w in worker.components: + self.assertTrue(w.start.call_count) + worker.stop() + for component in worker.components: + self.assertTrue(w.stop.call_count) + + # Doesn't close pool if no pool. + worker.start() + worker.pool = None + worker.stop() + + # test that stop of None is not attempted + worker.components[-1] = None + worker.start() + worker.stop() + + def test_component_raises(self): + worker = self.worker + comp = Mock() + worker.components = [comp] + comp.start.side_effect = TypeError() + worker.stop = Mock() + worker.start() + worker.stop.assert_called_with() + + def test_state(self): + self.assertTrue(self.worker.state) + + def test_start__terminate(self): + worker = self.worker + worker._shutdown_complete.set() + worker.components = [Mock(), Mock(), Mock(), Mock(), Mock()] + for component in worker.components[:3]: + component.terminate = None + + worker.start() + for w in worker.components[:3]: + self.assertTrue(w.start.call_count) + self.assertTrue(worker._running, len(worker.components)) + self.assertEqual(worker._state, RUN) + worker.terminate() + for component in worker.components[:3]: + self.assertTrue(component.stop.call_count) + self.assertTrue(worker.components[4].terminate.call_count) + + def test_Queues_pool_not_rlimit_safe(self): + w = Mock() + w.pool_cls.rlimit_safe = False + Queues(w).create(w) + self.assertTrue(w.disable_rate_limits) + + def test_Queues_pool_no_sem(self): + w = Mock() + w.pool_cls.uses_semaphore = False + Queues(w).create(w) + self.assertIs(w.ready_queue.put, w.process_task) + + def test_EvLoop_crate(self): + w = Mock() + x = EvLoop(w) + hub = x.create(w) + self.assertTrue(w.timer.max_interval) + self.assertIs(w.hub, hub) + + def test_Pool_crate_threaded(self): + w = Mock() + w._conninfo.connection_errors = w._conninfo.channel_errors = () + w.pool_cls = Mock() + w.use_eventloop = False + pool = Pool(w) + pool.create(w) + + def test_Pool_create(self): + from celery.worker.hub import BoundedSemaphore + w = Mock() + w._conninfo.connection_errors = w._conninfo.channel_errors = () + w.hub = Mock() + w.hub.on_init = [] + w.pool_cls = Mock() + P = w.pool_cls.return_value = Mock() + P.timers = {Mock(): 30} + w.use_eventloop = True + w.consumer.restart_count = -1 + pool = Pool(w) + pool.create(w) + self.assertIsInstance(w.semaphore, BoundedSemaphore) + self.assertTrue(w.hub.on_init) + + hub = Mock() + w.hub.on_init[0](hub) + + cbs = w.pool.init_callbacks.call_args[1] + w = Mock() + cbs['on_process_up'](w) + hub.add_reader.assert_called_with(w.sentinel, P.maintain_pool) + + cbs['on_process_down'](w) + hub.remove.assert_called_with(w.sentinel) + + result = Mock() + tref = result._tref + + cbs['on_timeout_cancel'](result) + tref.cancel.assert_called_with() + cbs['on_timeout_cancel'](result) # no more tref + + cbs['on_timeout_set'](result, 10, 20) + tsoft, callback = hub.timer.apply_after.call_args[0] + callback() + + cbs['on_timeout_set'](result, 10, None) + tsoft, callback = hub.timer.apply_after.call_args[0] + callback() + cbs['on_timeout_set'](result, None, 10) + cbs['on_timeout_set'](result, None, None) + + with self.assertRaises(WorkerLostError): + P.did_start_ok.return_value = False + w.consumer.restart_count = 0 + pool.on_poll_init(P, w, hub) diff --git a/awx/lib/site-packages/celery/utils/__init__.py b/awx/lib/site-packages/celery/utils/__init__.py new file mode 100644 index 0000000000..0f360a9f6c --- /dev/null +++ b/awx/lib/site-packages/celery/utils/__init__.py @@ -0,0 +1,260 @@ +# -*- coding: utf-8 -*- +""" + celery.utils + ~~~~~~~~~~~~ + + Utility functions. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import os +import sys +import traceback +import warnings +import types +import datetime + +from functools import wraps +from inspect import getargspec +from pprint import pprint + +from kombu.entity import Exchange, Queue + +from celery.exceptions import CPendingDeprecationWarning, CDeprecationWarning +from .compat import StringIO + +from .functional import noop + +PENDING_DEPRECATION_FMT = """ + %(description)s is scheduled for deprecation in \ + version %(deprecation)s and removal in version v%(removal)s. \ + %(alternative)s +""" + +DEPRECATION_FMT = """ + %(description)s is deprecated and scheduled for removal in + version %(removal)s. %(alternative)s +""" + +#: Billiard sets this when execv is enabled. +#: We use it to find out the name of the original ``__main__`` +#: module, so that we can properly rewrite the name of the +#: task to be that of ``App.main``. +MP_MAIN_FILE = os.environ.get('MP_MAIN_FILE') or None + +#: Exchange for worker direct queues. +WORKER_DIRECT_EXCHANGE = Exchange('C.dq') + +#: Format for worker direct queue names. +WORKER_DIRECT_QUEUE_FORMAT = '%s.dq' + + +def worker_direct(hostname): + if isinstance(hostname, Queue): + return hostname + return Queue(WORKER_DIRECT_QUEUE_FORMAT % hostname, + WORKER_DIRECT_EXCHANGE, + hostname, + auto_delete=True) + + +def warn_deprecated(description=None, deprecation=None, + removal=None, alternative=None): + ctx = {'description': description, + 'deprecation': deprecation, 'removal': removal, + 'alternative': alternative} + if deprecation is not None: + w = CPendingDeprecationWarning(PENDING_DEPRECATION_FMT % ctx) + else: + w = CDeprecationWarning(DEPRECATION_FMT % ctx) + warnings.warn(w) + + +def deprecated(description=None, deprecation=None, + removal=None, alternative=None): + + def _inner(fun): + + @wraps(fun) + def __inner(*args, **kwargs): + from .imports import qualname + warn_deprecated(description=description or qualname(fun), + deprecation=deprecation, + removal=removal, + alternative=alternative) + return fun(*args, **kwargs) + return __inner + return _inner + + +def lpmerge(L, R): + """In place left precedent dictionary merge. + + Keeps values from `L`, if the value in `R` is :const:`None`.""" + set = L.__setitem__ + [set(k, v) for k, v in R.iteritems() if v is not None] + return L + + +def is_iterable(obj): + try: + iter(obj) + except TypeError: + return False + return True + + +def fun_takes_kwargs(fun, kwlist=[]): + """With a function, and a list of keyword arguments, returns arguments + in the list which the function takes. + + If the object has an `argspec` attribute that is used instead + of using the :meth:`inspect.getargspec` introspection. + + :param fun: The function to inspect arguments of. + :param kwlist: The list of keyword arguments. + + Examples + + >>> def foo(self, x, y, logfile=None, loglevel=None): + ... return x * y + >>> fun_takes_kwargs(foo, ['logfile', 'loglevel', 'task_id']) + ['logfile', 'loglevel'] + + >>> def foo(self, x, y, **kwargs): + >>> fun_takes_kwargs(foo, ['logfile', 'loglevel', 'task_id']) + ['logfile', 'loglevel', 'task_id'] + + """ + argspec = getattr(fun, 'argspec', getargspec(fun)) + args, _varargs, keywords, _defaults = argspec + if keywords is not None: + return kwlist + return [kw for kw in kwlist if kw in args] + + +def isatty(fh): + # Fixes bug with mod_wsgi: + # mod_wsgi.Log object has no attribute isatty. + return getattr(fh, 'isatty', None) and fh.isatty() + + +def cry(): # pragma: no cover + """Return stacktrace of all active threads. + + From https://gist.github.com/737056 + + """ + import threading + + tmap = {} + main_thread = None + # get a map of threads by their ID so we can print their names + # during the traceback dump + for t in threading.enumerate(): + if getattr(t, 'ident', None): + tmap[t.ident] = t + else: + main_thread = t + + out = StringIO() + sep = '=' * 49 + '\n' + for tid, frame in sys._current_frames().iteritems(): + thread = tmap.get(tid, main_thread) + if not thread: + # skip old junk (left-overs from a fork) + continue + out.write('%s\n' % (thread.getName(), )) + out.write(sep) + traceback.print_stack(frame, file=out) + out.write(sep) + out.write('LOCAL VARIABLES\n') + out.write(sep) + pprint(frame.f_locals, stream=out) + out.write('\n\n') + return out.getvalue() + + +def maybe_reraise(): + """Reraise if an exception is currently being handled, or return + otherwise.""" + exc_info = sys.exc_info() + try: + if exc_info[2]: + raise exc_info[0], exc_info[1], exc_info[2] + finally: + # see http://docs.python.org/library/sys.html#sys.exc_info + del(exc_info) + + +def strtobool(term, table={'false': False, 'no': False, '0': False, + 'true': True, 'yes': True, '1': True, + 'on': True, 'off': False}): + if isinstance(term, basestring): + try: + return table[term.lower()] + except KeyError: + raise TypeError('Cannot coerce %r to type bool' % (term, )) + return term + + +def jsonify(obj): + "Transforms object making it suitable for json serialization" + if isinstance(obj, (int, float, basestring, types.NoneType)): + return obj + elif isinstance(obj, (tuple, list)): + return [jsonify(v) for v in obj] + elif isinstance(obj, dict): + return dict((k, jsonify(v)) for k, v in obj.iteritems()) + # See "Date Time String Format" in the ECMA-262 specification. + elif isinstance(obj, datetime.datetime): + r = obj.isoformat() + if obj.microsecond: + r = r[:23] + r[26:] + if r.endswith('+00:00'): + r = r[:-6] + 'Z' + return r + elif isinstance(obj, datetime.date): + return obj.isoformat() + elif isinstance(obj, datetime.time): + r = obj.isoformat() + if obj.microsecond: + r = r[:12] + return r + elif isinstance(obj, datetime.timedelta): + return str(obj) + else: + raise ValueError("Unsupported type: %s" % type(obj)) + + +def gen_task_name(app, name, module_name): + try: + module = sys.modules[module_name] + except KeyError: + # Fix for manage.py shell_plus (Issue #366) + module = None + + if module is not None: + module_name = module.__name__ + # - If the task module is used as the __main__ script + # - we need to rewrite the module part of the task name + # - to match App.main. + if MP_MAIN_FILE and module.__file__ == MP_MAIN_FILE: + # - see comment about :envvar:`MP_MAIN_FILE` above. + module_name = '__main__' + if module_name == '__main__' and app.main: + return '.'.join([app.main, name]) + return '.'.join(p for p in (module_name, name) if p) + +# ------------------------------------------------------------------------ # +# > XXX Compat +from .log import LOG_LEVELS # noqa +from .imports import ( # noqa + qualname as get_full_cls_name, symbol_by_name as get_cls_by_name, + instantiate, import_from_cwd +) +from .functional import chunks, noop # noqa +from kombu.utils import cached_property, kwdict, uuid # noqa +gen_unique_id = uuid diff --git a/awx/lib/site-packages/celery/utils/compat.py b/awx/lib/site-packages/celery/utils/compat.py new file mode 100644 index 0000000000..fcbdc5b9fa --- /dev/null +++ b/awx/lib/site-packages/celery/utils/compat.py @@ -0,0 +1,172 @@ +# -*- coding: utf-8 -*- +""" + celery.utils.compat + ~~~~~~~~~~~~~~~~~~~ + + Compatibility implementations of features + only available in newer Python versions. + + +""" +from __future__ import absolute_import + +############## py3k ######################################################### +import sys +is_py3k = sys.version_info[0] == 3 + +try: + reload = reload # noqa +except NameError: # pragma: no cover + from imp import reload # noqa + +try: + from UserList import UserList # noqa +except ImportError: # pragma: no cover + from collections import UserList # noqa + +try: + from UserDict import UserDict # noqa +except ImportError: # pragma: no cover + from collections import UserDict # noqa + +if is_py3k: # pragma: no cover + from io import StringIO, BytesIO + from .encoding import bytes_to_str + + class WhateverIO(StringIO): + + def write(self, data): + StringIO.write(self, bytes_to_str(data)) +else: + from StringIO import StringIO # noqa + BytesIO = WhateverIO = StringIO # noqa + + +############## collections.OrderedDict ###################################### +# was moved to kombu +from kombu.utils.compat import OrderedDict # noqa + +############## threading.TIMEOUT_MAX ####################################### +try: + from threading import TIMEOUT_MAX as THREAD_TIMEOUT_MAX +except ImportError: + THREAD_TIMEOUT_MAX = 1e10 # noqa + +############## itertools.zip_longest ####################################### + +try: + from itertools import izip_longest as zip_longest +except ImportError: # pragma: no cover + import itertools + + def zip_longest(*args, **kwds): # noqa + fillvalue = kwds.get('fillvalue') + + def sentinel(counter=([fillvalue] * (len(args) - 1)).pop): + yield counter() # yields the fillvalue, or raises IndexError + + fillers = itertools.repeat(fillvalue) + iters = [itertools.chain(it, sentinel(), fillers) + for it in args] + try: + for tup in itertools.izip(*iters): + yield tup + except IndexError: + pass + + +############## itertools.chain.from_iterable ################################ +from itertools import chain + + +def _compat_chain_from_iterable(iterables): # pragma: no cover + for it in iterables: + for element in it: + yield element + +try: + chain_from_iterable = getattr(chain, 'from_iterable') +except AttributeError: # pragma: no cover + chain_from_iterable = _compat_chain_from_iterable + + +############## logging.handlers.WatchedFileHandler ########################## +import logging +import os +from stat import ST_DEV, ST_INO +import platform as _platform + +if _platform.system() == 'Windows': # pragma: no cover + #since windows doesn't go with WatchedFileHandler use FileHandler instead + WatchedFileHandler = logging.FileHandler +else: + try: + from logging.handlers import WatchedFileHandler + except ImportError: # pragma: no cover + class WatchedFileHandler(logging.FileHandler): # noqa + """ + A handler for logging to a file, which watches the file + to see if it has changed while in use. This can happen because of + usage of programs such as newsyslog and logrotate which perform + log file rotation. This handler, intended for use under Unix, + watches the file to see if it has changed since the last emit. + (A file has changed if its device or inode have changed.) + If it has changed, the old file stream is closed, and the file + opened to get a new stream. + + This handler is not appropriate for use under Windows, because + under Windows open files cannot be moved or renamed - logging + opens the files with exclusive locks - and so there is no need + for such a handler. Furthermore, ST_INO is not supported under + Windows; stat always returns zero for this value. + + This handler is based on a suggestion and patch by Chad J. + Schroeder. + """ + def __init__(self, *args, **kwargs): + logging.FileHandler.__init__(self, *args, **kwargs) + + if not os.path.exists(self.baseFilename): + self.dev, self.ino = -1, -1 + else: + stat = os.stat(self.baseFilename) + self.dev, self.ino = stat[ST_DEV], stat[ST_INO] + + def emit(self, record): + """ + Emit a record. + + First check if the underlying file has changed, and if it + has, close the old stream and reopen the file to get the + current stream. + """ + if not os.path.exists(self.baseFilename): + stat = None + changed = 1 + else: + stat = os.stat(self.baseFilename) + changed = ((stat[ST_DEV] != self.dev) or + (stat[ST_INO] != self.ino)) + if changed and self.stream is not None: + self.stream.flush() + self.stream.close() + self.stream = self._open() + if stat is None: + stat = os.stat(self.baseFilename) + self.dev, self.ino = stat[ST_DEV], stat[ST_INO] + logging.FileHandler.emit(self, record) + + +############## format(int, ',d') ########################## + +if sys.version_info >= (2, 7): # pragma: no cover + def format_d(i): + return format(i, ',d') +else: # pragma: no cover + def format_d(i): # noqa + s = '%d' % i + groups = [] + while s and s[-1].isdigit(): + groups.append(s[-3:]) + s = s[:-3] + return s + ','.join(reversed(groups)) diff --git a/awx/lib/site-packages/celery/utils/debug.py b/awx/lib/site-packages/celery/utils/debug.py new file mode 100644 index 0000000000..54c988877f --- /dev/null +++ b/awx/lib/site-packages/celery/utils/debug.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +""" + celery.utils.debug + ~~~~~~~~~~~~~~~~~~ + + Utilities for debugging memory usage. + +""" +from __future__ import absolute_import + +import os + +from .compat import format_d + +try: + from psutil import Process +except ImportError: + Process = None # noqa + +_process = None +_mem_sample = [] + + +def sample_mem(): + """Sample RSS memory usage. + + Statistics can then be output by calling :func:`memdump`. + + """ + _mem_sample.append(mem_rss()) + + +def memdump(samples=10): + """Dump memory statistics. + + Will print a sample of all RSS memory samples added by + calling :func:`sample_mem`, and in addition print + used RSS memory after :func:`gc.collect`. + + """ + if ps() is None: + print('- rss: (psutil not installed).') + return + if any(_mem_sample): + print('- rss (sample):') + for mem in sample(_mem_sample, samples): + print('- > %s,' % mem) + _mem_sample[:] = [] + import gc + gc.collect() + print('- rss (end): %s.' % (mem_rss())) + + +def sample(x, n, k=0): + """Given a list `x` a sample of length ``n`` of that list is returned. + + E.g. if `n` is 10, and `x` has 100 items, a list of every 10th + item is returned. + + ``k`` can be used as offset. + + """ + j = len(x) // n + for _ in xrange(n): + yield x[k] + k += j + + +def mem_rss(): + """Returns RSS memory usage as a humanized string.""" + p = ps() + if p is not None: + return '%sMB' % (format_d(p.get_memory_info().rss // 1024), ) + + +def ps(): + """Returns the global :class:`psutil.Process` instance, + or :const:`None` if :mod:`psutil` is not installed.""" + global _process + if _process is None and Process is not None: + _process = Process(os.getpid()) + return _process diff --git a/awx/lib/site-packages/celery/utils/dispatch/__init__.py b/awx/lib/site-packages/celery/utils/dispatch/__init__.py new file mode 100644 index 0000000000..888fe6d8af --- /dev/null +++ b/awx/lib/site-packages/celery/utils/dispatch/__init__.py @@ -0,0 +1,4 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +from .signal import Signal # noqa diff --git a/awx/lib/site-packages/celery/utils/dispatch/saferef.py b/awx/lib/site-packages/celery/utils/dispatch/saferef.py new file mode 100644 index 0000000000..7aa2fd0e6b --- /dev/null +++ b/awx/lib/site-packages/celery/utils/dispatch/saferef.py @@ -0,0 +1,281 @@ +# -*- coding: utf-8 -*- +""" +"Safe weakrefs", originally from pyDispatcher. + +Provides a way to safely weakref any function, including bound methods (which +aren't handled by the core weakref module). +""" +from __future__ import absolute_import + +import weakref +import traceback + + +def safe_ref(target, on_delete=None): # pragma: no cover + """Return a *safe* weak reference to a callable target + + :param target: the object to be weakly referenced, if it's a + bound method reference, will create a :class:`BoundMethodWeakref`, + otherwise creates a simple :class:`weakref.ref`. + + :keyword on_delete: if provided, will have a hard reference stored + to the callable to be called after the safe reference + goes out of scope with the reference object, (either a + :class:`weakref.ref` or a :class:`BoundMethodWeakref`) as argument. + """ + if getattr(target, "im_self", None) is not None: + # Turn a bound method into a BoundMethodWeakref instance. + # Keep track of these instances for lookup by disconnect(). + assert hasattr(target, 'im_func'), \ + """safe_ref target %r has im_self, but no im_func, " \ + "don't know how to create reference""" % (target, ) + return get_bound_method_weakref(target=target, + on_delete=on_delete) + if callable(on_delete): + return weakref.ref(target, on_delete) + else: + return weakref.ref(target) + + +class BoundMethodWeakref(object): # pragma: no cover + """'Safe' and reusable weak references to instance methods. + + BoundMethodWeakref objects provide a mechanism for + referencing a bound method without requiring that the + method object itself (which is normally a transient + object) is kept alive. Instead, the BoundMethodWeakref + object keeps weak references to both the object and the + function which together define the instance method. + + .. attribute:: key + + the identity key for the reference, calculated + by the class's :meth:`calculate_key` method applied to the + target instance method + + .. attribute:: deletion_methods + + sequence of callable objects taking + single argument, a reference to this object which + will be called when *either* the target object or + target function is garbage collected (i.e. when + this object becomes invalid). These are specified + as the on_delete parameters of :func:`safe_ref` calls. + + .. attribute:: weak_self + + weak reference to the target object + + .. attribute:: weak_func + + weak reference to the target function + + .. attribute:: _all_instances + + class attribute pointing to all live + BoundMethodWeakref objects indexed by the class's + `calculate_key(target)` method applied to the target + objects. This weak value dictionary is used to + short-circuit creation so that multiple references + to the same (object, function) pair produce the + same BoundMethodWeakref instance. + + """ + + _all_instances = weakref.WeakValueDictionary() + + def __new__(cls, target, on_delete=None, *arguments, **named): + """Create new instance or return current instance + + Basically this method of construction allows us to + short-circuit creation of references to already- + referenced instance methods. The key corresponding + to the target is calculated, and if there is already + an existing reference, that is returned, with its + deletionMethods attribute updated. Otherwise the + new instance is created and registered in the table + of already-referenced methods. + + """ + key = cls.calculate_key(target) + current = cls._all_instances.get(key) + if current is not None: + current.deletion_methods.append(on_delete) + return current + else: + base = super(BoundMethodWeakref, cls).__new__(cls) + cls._all_instances[key] = base + base.__init__(target, on_delete, *arguments, **named) + return base + + def __init__(self, target, on_delete=None): + """Return a weak-reference-like instance for a bound method + + :param target: the instance-method target for the weak + reference, must have `im_self` and `im_func` attributes + and be reconstructable via:: + + target.im_func.__get__(target.im_self) + + which is true of built-in instance methods. + + :keyword on_delete: optional callback which will be called + when this weak reference ceases to be valid + (i.e. either the object or the function is garbage + collected). Should take a single argument, + which will be passed a pointer to this object. + + """ + def remove(weak, self=self): + """Set self.is_dead to true when method or instance is destroyed""" + methods = self.deletion_methods[:] + del(self.deletion_methods[:]) + try: + del(self.__class__._all_instances[self.key]) + except KeyError: + pass + for function in methods: + try: + if callable(function): + function(self) + except Exception, exc: + try: + traceback.print_exc() + except AttributeError: + print("Exception during saferef %s cleanup function " + "%s: %s" % (self, function, exc)) + + self.deletion_methods = [on_delete] + self.key = self.calculate_key(target) + self.weak_self = weakref.ref(target.im_self, remove) + self.weak_func = weakref.ref(target.im_func, remove) + self.self_name = str(target.im_self) + self.func_name = str(target.im_func.__name__) + + def calculate_key(cls, target): + """Calculate the reference key for this reference + + Currently this is a two-tuple of the `id()`'s of the + target object and the target function respectively. + """ + return id(target.im_self), id(target.im_func) + calculate_key = classmethod(calculate_key) + + def __str__(self): + """Give a friendly representation of the object""" + return """%s( %s.%s )""" % ( + self.__class__.__name__, + self.self_name, + self.func_name, + ) + + __repr__ = __str__ + + def __nonzero__(self): + """Whether we are still a valid reference""" + return self() is not None + + def __cmp__(self, other): + """Compare with another reference""" + if not isinstance(other, self.__class__): + return cmp(self.__class__, type(other)) + return cmp(self.key, other.key) + + def __call__(self): + """Return a strong reference to the bound method + + If the target cannot be retrieved, then will + return None, otherwise returns a bound instance + method for our object and function. + + Note: + You may call this method any number of times, + as it does not invalidate the reference. + """ + target = self.weak_self() + if target is not None: + function = self.weak_func() + if function is not None: + return function.__get__(target) + + +class BoundNonDescriptorMethodWeakref(BoundMethodWeakref): # pragma: no cover + """A specialized :class:`BoundMethodWeakref`, for platforms where + instance methods are not descriptors. + + It assumes that the function name and the target attribute name are the + same, instead of assuming that the function is a descriptor. This approach + is equally fast, but not 100% reliable because functions can be stored on + an attribute named differenty than the function's name such as in:: + + >>> class A(object): + ... pass + + >>> def foo(self): + ... return "foo" + >>> A.bar = foo + + But this shouldn't be a common use case. So, on platforms where methods + aren't descriptors (such as Jython) this implementation has the advantage + of working in the most cases. + + """ + def __init__(self, target, on_delete=None): + """Return a weak-reference-like instance for a bound method + + :param target: the instance-method target for the weak + reference, must have `im_self` and `im_func` attributes + and be reconstructable via:: + + target.im_func.__get__(target.im_self) + + which is true of built-in instance methods. + + :keyword on_delete: optional callback which will be called + when this weak reference ceases to be valid + (i.e. either the object or the function is garbage + collected). Should take a single argument, + which will be passed a pointer to this object. + + """ + assert getattr(target.im_self, target.__name__) == target, \ + "method %s isn't available as the attribute %s of %s" % ( + target, target.__name__, target.im_self) + super(BoundNonDescriptorMethodWeakref, self).__init__(target, + on_delete) + + def __call__(self): + """Return a strong reference to the bound method + + If the target cannot be retrieved, then will + return None, otherwise returns a bound instance + method for our object and function. + + Note: + You may call this method any number of times, + as it does not invalidate the reference. + + """ + target = self.weak_self() + if target is not None: + function = self.weak_func() + if function is not None: + # Using curry() would be another option, but it erases the + # "signature" of the function. That is, after a function is + # curried, the inspect module can't be used to determine how + # many arguments the function expects, nor what keyword + # arguments it supports, and pydispatcher needs this + # information. + return getattr(target, function.__name__) + + +def get_bound_method_weakref(target, on_delete): # pragma: no cover + """Instantiates the appropiate :class:`BoundMethodWeakRef`, depending + on the details of the underlying class method implementation.""" + if hasattr(target, '__get__'): + # target method is a descriptor, so the default implementation works: + return BoundMethodWeakref(target=target, on_delete=on_delete) + else: + # no luck, use the alternative implementation: + return BoundNonDescriptorMethodWeakref(target=target, + on_delete=on_delete) diff --git a/awx/lib/site-packages/celery/utils/dispatch/signal.py b/awx/lib/site-packages/celery/utils/dispatch/signal.py new file mode 100644 index 0000000000..1bee956f30 --- /dev/null +++ b/awx/lib/site-packages/celery/utils/dispatch/signal.py @@ -0,0 +1,222 @@ +# -*- coding: utf-8 -*- +"""Signal class.""" +from __future__ import absolute_import + +import weakref +from . import saferef + +WEAKREF_TYPES = (weakref.ReferenceType, saferef.BoundMethodWeakref) + + +def _make_id(target): # pragma: no cover + if hasattr(target, 'im_func'): + return (id(target.im_self), id(target.im_func)) + return id(target) + + +class Signal(object): # pragma: no cover + """Base class for all signals + + + .. attribute:: receivers + Internal attribute, holds a dictionary of + `{receriverkey (id): weakref(receiver)}` mappings. + + """ + + def __init__(self, providing_args=None): + """Create a new signal. + + :param providing_args: A list of the arguments this signal can pass + along in a :meth:`send` call. + + """ + self.receivers = [] + if providing_args is None: + providing_args = [] + self.providing_args = set(providing_args) + + def connect(self, *args, **kwargs): + """Connect receiver to sender for signal. + + :param receiver: A function or an instance method which is to + receive signals. Receivers must be hashable objects. + + if weak is :const:`True`, then receiver must be weak-referencable + (more precisely :func:`saferef.safe_ref()` must be able to create a + reference to the receiver). + + Receivers must be able to accept keyword arguments. + + If receivers have a `dispatch_uid` attribute, the receiver will + not be added if another receiver already exists with that + `dispatch_uid`. + + :keyword sender: The sender to which the receiver should respond. + Must either be of type :class:`Signal`, or :const:`None` to receive + events from any sender. + + :keyword weak: Whether to use weak references to the receiver. + By default, the module will attempt to use weak references to the + receiver objects. If this parameter is false, then strong + references will be used. + + :keyword dispatch_uid: An identifier used to uniquely identify a + particular instance of a receiver. This will usually be a + string, though it may be anything hashable. + + """ + def _handle_options(sender=None, weak=True, dispatch_uid=None): + + def _connect_signal(fun): + receiver = fun + + if dispatch_uid: + lookup_key = (dispatch_uid, _make_id(sender)) + else: + lookup_key = (_make_id(receiver), _make_id(sender)) + + if weak: + receiver = saferef.safe_ref( + receiver, on_delete=self._remove_receiver, + ) + + for r_key, _ in self.receivers: + if r_key == lookup_key: + break + else: + self.receivers.append((lookup_key, receiver)) + + return fun + + return _connect_signal + + if args and callable(args[0]): + return _handle_options(*args[1:], **kwargs)(args[0]) + return _handle_options(*args, **kwargs) + + def disconnect(self, receiver=None, sender=None, weak=True, + dispatch_uid=None): + """Disconnect receiver from sender for signal. + + If weak references are used, disconnect need not be called. The + receiver will be removed from dispatch automatically. + + :keyword receiver: The registered receiver to disconnect. May be + none if `dispatch_uid` is specified. + + :keyword sender: The registered sender to disconnect. + + :keyword weak: The weakref state to disconnect. + + :keyword dispatch_uid: the unique identifier of the receiver + to disconnect + + """ + if dispatch_uid: + lookup_key = (dispatch_uid, _make_id(sender)) + else: + lookup_key = (_make_id(receiver), _make_id(sender)) + + for index in xrange(len(self.receivers)): + (r_key, _) = self.receivers[index] + if r_key == lookup_key: + del self.receivers[index] + break + + def send(self, sender, **named): + """Send signal from sender to all connected receivers. + + If any receiver raises an error, the error propagates back through + send, terminating the dispatch loop, so it is quite possible to not + have all receivers called if a raises an error. + + :param sender: The sender of the signal. Either a specific + object or :const:`None`. + + :keyword \*\*named: Named arguments which will be passed to receivers. + + :returns: a list of tuple pairs: `[(receiver, response), ... ]`. + + """ + responses = [] + if not self.receivers: + return responses + + for receiver in self._live_receivers(_make_id(sender)): + response = receiver(signal=self, sender=sender, **named) + responses.append((receiver, response)) + return responses + + def send_robust(self, sender, **named): + """Send signal from sender to all connected receivers catching errors. + + :param sender: The sender of the signal. Can be any python object + (normally one registered with a connect if you actually want + something to occur). + + :keyword \*\*named: Named arguments which will be passed to receivers. + These arguments must be a subset of the argument names defined in + :attr:`providing_args`. + + :returns: a list of tuple pairs: `[(receiver, response), ... ]`. + + :raises DispatcherKeyError: + + if any receiver raises an error (specifically any subclass of + :exc:`Exception`), the error instance is returned as the result + for that receiver. + + """ + responses = [] + if not self.receivers: + return responses + + # Call each receiver with whatever arguments it can accept. + # Return a list of tuple pairs [(receiver, response), ... ]. + for receiver in self._live_receivers(_make_id(sender)): + try: + response = receiver(signal=self, sender=sender, **named) + except Exception, err: + responses.append((receiver, err)) + else: + responses.append((receiver, response)) + return responses + + def _live_receivers(self, senderkey): + """Filter sequence of receivers to get resolved, live receivers. + + This checks for weak references and resolves them, then returning only + live receivers. + + """ + none_senderkey = _make_id(None) + receivers = [] + + for (receiverkey, r_senderkey), receiver in self.receivers: + if r_senderkey == none_senderkey or r_senderkey == senderkey: + if isinstance(receiver, WEAKREF_TYPES): + # Dereference the weak reference. + receiver = receiver() + if receiver is not None: + receivers.append(receiver) + else: + receivers.append(receiver) + return receivers + + def _remove_receiver(self, receiver): + """Remove dead receivers from connections.""" + + to_remove = [] + for key, connected_receiver in self.receivers: + if connected_receiver == receiver: + to_remove.append(key) + for key in to_remove: + for idx, (r_key, _) in enumerate(self.receivers): + if r_key == key: + del self.receivers[idx] + + def __repr__(self): + return '' % (self.__class__.__name__, ) + + __str__ = __repr__ diff --git a/awx/lib/site-packages/celery/utils/encoding.py b/awx/lib/site-packages/celery/utils/encoding.py new file mode 100644 index 0000000000..3ddcd35ebc --- /dev/null +++ b/awx/lib/site-packages/celery/utils/encoding.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" + celery.utils.encoding + ~~~~~~~~~~~~~~~~~~~~~ + + This module has moved to :mod:`kombu.utils.encoding`. + +""" +from __future__ import absolute_import + +from kombu.utils.encoding import ( # noqa + default_encode, default_encoding, bytes_t, bytes_to_str, str_t, + str_to_bytes, ensure_bytes, from_utf8, safe_str, safe_repr, +) diff --git a/awx/lib/site-packages/celery/utils/functional.py b/awx/lib/site-packages/celery/utils/functional.py new file mode 100644 index 0000000000..242dda2280 --- /dev/null +++ b/awx/lib/site-packages/celery/utils/functional.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +""" + celery.utils.functional + ~~~~~~~~~~~~~~~~~~~~~~~ + + Utilities for functions. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import threading + +from functools import wraps +from itertools import islice + +from kombu.utils import cached_property +from kombu.utils.functional import promise, maybe_promise +from kombu.utils.compat import OrderedDict, next + +from .compat import UserDict, UserList + +KEYWORD_MARK = object() + + +class LRUCache(UserDict): + """LRU Cache implementation using a doubly linked list to track access. + + :keyword limit: The maximum number of keys to keep in the cache. + When a new key is inserted and the limit has been exceeded, + the *Least Recently Used* key will be discarded from the + cache. + + """ + + def __init__(self, limit=None): + self.limit = limit + self.mutex = threading.RLock() + self.data = OrderedDict() + + def __getitem__(self, key): + with self.mutex: + value = self[key] = self.data.pop(key) + return value + + def keys(self): + # userdict.keys in py3k calls __getitem__ + return self.data.keys() + + def values(self): + return list(self._iterate_values()) + + def items(self): + return list(self._iterate_items()) + + def update(self, *args, **kwargs): + with self.mutex: + data, limit = self.data, self.limit + data.update(*args, **kwargs) + if limit and len(data) > limit: + # pop additional items in case limit exceeded + # negative overflow will lead to an empty list + for item in islice(iter(data), len(data) - limit): + data.pop(item) + + def __setitem__(self, key, value): + # remove least recently used key. + with self.mutex: + if self.limit and len(self.data) >= self.limit: + self.data.pop(iter(self.data).next()) + self.data[key] = value + + def __iter__(self): + return iter(self.data) + + def _iterate_items(self): + for k in self: + try: + yield (k, self.data[k]) + except KeyError: # pragma: no cover + pass + iteritems = _iterate_items + + def _iterate_values(self): + for k in self: + try: + yield self.data[k] + except KeyError: # pragma: no cover + pass + itervalues = _iterate_values + + def incr(self, key, delta=1): + with self.mutex: + # this acts as memcached does- store as a string, but return a + # integer as long as it exists and we can cast it + newval = int(self.data.pop(key)) + delta + self[key] = str(newval) + return newval + + def __getstate__(self): + d = dict(vars(self)) + d.pop('mutex') + return d + + def __setstate__(self, state): + self.__dict__ = state + self.mutex = threading.RLock() + + +def is_list(l): + """Returns true if object is list-like, but not a dict or string.""" + return hasattr(l, '__iter__') and not isinstance(l, (dict, basestring)) + + +def maybe_list(l): + """Returns list of one element if ``l`` is a scalar.""" + return l if l is None or is_list(l) else [l] + + +def memoize(maxsize=None, Cache=LRUCache): + + def _memoize(fun): + mutex = threading.Lock() + cache = Cache(limit=maxsize) + + @wraps(fun) + def _M(*args, **kwargs): + key = args + (KEYWORD_MARK, ) + tuple(sorted(kwargs.iteritems())) + try: + with mutex: + value = cache[key] + except KeyError: + value = fun(*args, **kwargs) + _M.misses += 1 + with mutex: + cache[key] = value + else: + _M.hits += 1 + return value + + def clear(): + """Clear the cache and reset cache statistics.""" + cache.clear() + _M.hits = _M.misses = 0 + + _M.hits = _M.misses = 0 + _M.clear = clear + _M.original_func = fun + return _M + + return _memoize + + +class mpromise(promise): + """Memoized promise. + + The function is only evaluated once, every subsequent access + will return the same value. + + .. attribute:: evaluated + + Set to to :const:`True` after the promise has been evaluated. + + """ + evaluated = False + _value = None + + def evaluate(self): + if not self.evaluated: + self._value = super(mpromise, self).evaluate() + self.evaluated = True + return self._value + + +def noop(*args, **kwargs): + """No operation. + + Takes any arguments/keyword arguments and does nothing. + + """ + pass + + +def first(predicate, it): + """Returns the first element in `iterable` that `predicate` returns a + :const:`True` value for. + + If `predicate` is None it will return the first item that is not None. + + """ + return next( + (v for v in it if (predicate(v) if predicate else v is not None)), + None, + ) + + +def firstmethod(method): + """Returns a function that with a list of instances, + finds the first instance that returns a value for the given method. + + The list can also contain promises (:class:`promise`.) + + """ + + def _matcher(it, *args, **kwargs): + for obj in it: + try: + answer = getattr(maybe_promise(obj), method)(*args, **kwargs) + except AttributeError: + pass + else: + if answer is not None: + return answer + + return _matcher + + +def chunks(it, n): + """Split an iterator into chunks with `n` elements each. + + Examples + + # n == 2 + >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 2) + >>> list(x) + [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9], [10]] + + # n == 3 + >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 3) + >>> list(x) + [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10]] + + """ + # XXX This function is not used anymore, at least not by Celery itself. + for first in it: + yield [first] + list(islice(it, n - 1)) + + +def padlist(container, size, default=None): + """Pad list with default elements. + + Examples: + + >>> first, last, city = padlist(['George', 'Costanza', 'NYC'], 3) + ('George', 'Costanza', 'NYC') + >>> first, last, city = padlist(['George', 'Costanza'], 3) + ('George', 'Costanza', None) + >>> first, last, city, planet = padlist(['George', 'Costanza', + 'NYC'], 4, default='Earth') + ('George', 'Costanza', 'NYC', 'Earth') + + """ + return list(container)[:size] + [default] * (size - len(container)) + + +def mattrgetter(*attrs): + """Like :func:`operator.itemgetter` but returns :const:`None` on missing + attributes instead of raising :exc:`AttributeError`.""" + return lambda obj: dict((attr, getattr(obj, attr, None)) + for attr in attrs) + + +def uniq(it): + """Returns all unique elements in ``it``, preserving order.""" + seen = set() + return (seen.add(obj) or obj for obj in it if obj not in seen) + + +def regen(it): + """Regen takes any iterable, and if the object is an + generator it will cache the evaluated list on first access, + so that the generator can be "consumed" multiple times.""" + if isinstance(it, (list, tuple)): + return it + return _regen(it) + + +class _regen(UserList, list): + # must be subclass of list so that json can encode. + def __init__(self, it): + self.__it = it + + @cached_property + def data(self): + return list(self.__it) + + def __iter__(self): # needed for Python 2.5 + return iter(self.data) diff --git a/awx/lib/site-packages/celery/utils/imports.py b/awx/lib/site-packages/celery/utils/imports.py new file mode 100644 index 0000000000..0ea1d77523 --- /dev/null +++ b/awx/lib/site-packages/celery/utils/imports.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +""" + celery.utils.import + ~~~~~~~~~~~~~~~~~~~ + + Utilities related to importing modules and symbols by name. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import imp as _imp +import importlib +import os +import sys + +from contextlib import contextmanager + +from kombu.utils import symbol_by_name + +from .compat import reload + + +class NotAPackage(Exception): + pass + + +if sys.version_info >= (3, 3): # pragma: no cover + + def qualname(obj): + return obj.__qualname__ + +else: + + def qualname(obj): # noqa + if not hasattr(obj, '__name__') and hasattr(obj, '__class__'): + return qualname(obj.__class__) + + return '%s.%s' % (obj.__module__, obj.__name__) + + +def instantiate(name, *args, **kwargs): + """Instantiate class by name. + + See :func:`symbol_by_name`. + + """ + return symbol_by_name(name)(*args, **kwargs) + + +@contextmanager +def cwd_in_path(): + cwd = os.getcwd() + if cwd in sys.path: + yield + else: + sys.path.insert(0, cwd) + try: + yield cwd + finally: + try: + sys.path.remove(cwd) + except ValueError: # pragma: no cover + pass + + +def find_module(module, path=None, imp=None): + """Version of :func:`imp.find_module` supporting dots.""" + if imp is None: + imp = importlib.import_module + with cwd_in_path(): + if '.' in module: + last = None + parts = module.split('.') + for i, part in enumerate(parts[:-1]): + mpart = imp('.'.join(parts[:i + 1])) + try: + path = mpart.__path__ + except AttributeError: + raise NotAPackage(module) + last = _imp.find_module(parts[i + 1], path) + return last + return _imp.find_module(module) + + +def import_from_cwd(module, imp=None, package=None): + """Import module, but make sure it finds modules + located in the current directory. + + Modules located in the current directory has + precedence over modules located in `sys.path`. + """ + if imp is None: + imp = importlib.import_module + with cwd_in_path(): + return imp(module, package=package) + + +def reload_from_cwd(module, reloader=None): + if reloader is None: + reloader = reload + with cwd_in_path(): + return reloader(module) + + +def module_file(module): + name = module.__file__ + return name[:-1] if name.endswith('.pyc') else name diff --git a/awx/lib/site-packages/celery/utils/log.py b/awx/lib/site-packages/celery/utils/log.py new file mode 100644 index 0000000000..efe2239ba3 --- /dev/null +++ b/awx/lib/site-packages/celery/utils/log.py @@ -0,0 +1,250 @@ +# -*- coding: utf-8 -*- +""" + celery.utils.log + ~~~~~~~~~~~~~~~~ + + Logging utilities. + +""" +from __future__ import absolute_import + +import logging +import os +import sys +import threading +import traceback + +from billiard import current_process, util as mputil +from kombu.log import get_logger as _get_logger, LOG_LEVELS + +from .encoding import safe_str, str_t +from .term import colored + +_process_aware = False +is_py3k = sys.version_info[0] == 3 + +MP_LOG = os.environ.get('MP_LOG', False) + + +# Sets up our logging hierarchy. +# +# Every logger in the celery package inherits from the "celery" +# logger, and every task logger inherits from the "celery.task" +# logger. +base_logger = logger = _get_logger('celery') +mp_logger = _get_logger('multiprocessing') + +in_sighandler = False + + +def set_in_sighandler(value): + global in_sighandler + in_sighandler = value + + +def get_logger(name): + l = _get_logger(name) + if logging.root not in (l, l.parent) and l is not base_logger: + l.parent = base_logger + return l +task_logger = get_logger('celery.task') + + +def get_task_logger(name): + logger = get_logger(name) + if logger.parent is logging.root: + logger.parent = task_logger + return logger + + +def mlevel(level): + if level and not isinstance(level, int): + return LOG_LEVELS[level.upper()] + return level + + +class ColorFormatter(logging.Formatter): + #: Loglevel -> Color mapping. + COLORS = colored().names + colors = {'DEBUG': COLORS['blue'], 'WARNING': COLORS['yellow'], + 'ERROR': COLORS['red'], 'CRITICAL': COLORS['magenta']} + + def __init__(self, fmt=None, use_color=True): + logging.Formatter.__init__(self, fmt) + self.use_color = use_color + + def formatException(self, ei): + if ei and not isinstance(ei, tuple): + ei = sys.exc_info() + r = logging.Formatter.formatException(self, ei) + if isinstance(r, str) and not is_py3k: + return safe_str(r) + return r + + def format(self, record): + levelname = record.levelname + color = self.colors.get(levelname) + + if self.use_color and color: + try: + msg = record.msg + # safe_str will repr the color object + # and color will break on non-string objects + # so need to reorder calls based on type. + # Issue #427 + if isinstance(msg, basestring): + record.msg = str_t(color(safe_str(msg))) + else: + record.msg = safe_str(color(msg)) + except Exception, exc: + record.msg = '' % ( + type(record.msg), exc) + record.exc_info = True + + if not is_py3k and 'processName' not in record.__dict__: + # Very ugly, but have to make sure processName is supported + # by foreign logger instances. + # (processName is always supported by Python 2.7) + process_name = current_process and current_process()._name or '' + record.__dict__['processName'] = process_name + return safe_str(logging.Formatter.format(self, record)) + + +class LoggingProxy(object): + """Forward file object to :class:`logging.Logger` instance. + + :param logger: The :class:`logging.Logger` instance to forward to. + :param loglevel: Loglevel to use when writing messages. + + """ + mode = 'w' + name = None + closed = False + loglevel = logging.ERROR + _thread = threading.local() + + def __init__(self, logger, loglevel=None): + self.logger = logger + self.loglevel = mlevel(loglevel or self.logger.level or self.loglevel) + self._safewrap_handlers() + + def _safewrap_handlers(self): + """Make the logger handlers dump internal errors to + `sys.__stderr__` instead of `sys.stderr` to circumvent + infinite loops.""" + + def wrap_handler(handler): # pragma: no cover + + class WithSafeHandleError(logging.Handler): + + def handleError(self, record): + exc_info = sys.exc_info() + try: + try: + traceback.print_exception(exc_info[0], + exc_info[1], + exc_info[2], + None, sys.__stderr__) + except IOError: + pass # see python issue 5971 + finally: + del(exc_info) + + handler.handleError = WithSafeHandleError().handleError + + return [wrap_handler(h) for h in self.logger.handlers] + + def write(self, data): + """Write message to logging object.""" + if in_sighandler: + return sys.__stderr__.write(safe_str(data)) + if getattr(self._thread, 'recurse_protection', False): + # Logger is logging back to this file, so stop recursing. + return + data = data.strip() + if data and not self.closed: + self._thread.recurse_protection = True + try: + self.logger.log(self.loglevel, safe_str(data)) + finally: + self._thread.recurse_protection = False + + def writelines(self, sequence): + """`writelines(sequence_of_strings) -> None`. + + Write the strings to the file. + + The sequence can be any iterable object producing strings. + This is equivalent to calling :meth:`write` for each string. + + """ + for part in sequence: + self.write(part) + + def flush(self): + """This object is not buffered so any :meth:`flush` requests + are ignored.""" + pass + + def close(self): + """When the object is closed, no write requests are forwarded to + the logging object anymore.""" + self.closed = True + + def isatty(self): + """Always returns :const:`False`. Just here for file support.""" + return False + + +def ensure_process_aware_logger(): + """Make sure process name is recorded when loggers are used.""" + global _process_aware + if not _process_aware: + logging._acquireLock() + try: + _process_aware = True + Logger = logging.getLoggerClass() + if getattr(Logger, '_process_aware', False): # pragma: no cover + return + + class ProcessAwareLogger(Logger): + _process_aware = True + + def makeRecord(self, *args, **kwds): + record = Logger.makeRecord(self, *args, **kwds) + record.processName = current_process()._name + return record + logging.setLoggerClass(ProcessAwareLogger) + finally: + logging._releaseLock() + + +def get_multiprocessing_logger(): + return mputil.get_logger() if mputil else None + + +def reset_multiprocessing_logger(): + if mputil and hasattr(mputil, '_logger'): + mputil._logger = None + + +def _patch_logger_class(): + """Make sure loggers don't log while in a signal handler.""" + + logging._acquireLock() + try: + OldLoggerClass = logging.getLoggerClass() + if not getattr(OldLoggerClass, '_signal_safe', False): + + class SigSafeLogger(OldLoggerClass): + _signal_safe = True + + def log(self, *args, **kwargs): + if in_sighandler: + sys.__stderr__.write('CANNOT LOG IN SIGHANDLER') + return + return OldLoggerClass.log(self, *args, **kwargs) + logging.setLoggerClass(SigSafeLogger) + finally: + logging._releaseLock() +_patch_logger_class() diff --git a/awx/lib/site-packages/celery/utils/mail.py b/awx/lib/site-packages/celery/utils/mail.py new file mode 100644 index 0000000000..7fd7af6a47 --- /dev/null +++ b/awx/lib/site-packages/celery/utils/mail.py @@ -0,0 +1,201 @@ +# -*- coding: utf-8 -*- +""" + celery.utils.mail + ~~~~~~~~~~~~~~~~~ + + How task error emails are formatted and sent. + +""" +from __future__ import absolute_import + +import sys +import smtplib +import socket +import traceback +import warnings + +from email.mime.text import MIMEText + +from .functional import maybe_list +from .imports import symbol_by_name + +supports_timeout = sys.version_info >= (2, 6) + +_local_hostname = None + + +def get_local_hostname(): + global _local_hostname + if _local_hostname is None: + _local_hostname = socket.getfqdn() + return _local_hostname + + +class SendmailWarning(UserWarning): + """Problem happened while sending the email message.""" + + +class Message(object): + + def __init__(self, to=None, sender=None, subject=None, + body=None, charset='us-ascii'): + self.to = maybe_list(to) + self.sender = sender + self.subject = subject + self.body = body + self.charset = charset + + def __repr__(self): + return '' % (self.to, self.subject) + + def __str__(self): + msg = MIMEText(self.body, 'plain', self.charset) + msg['Subject'] = self.subject + msg['From'] = self.sender + msg['To'] = ', '.join(self.to) + return msg.as_string() + + +class Mailer(object): + supports_timeout = supports_timeout + + def __init__(self, host='localhost', port=0, user=None, password=None, + timeout=2, use_ssl=False, use_tls=False): + self.host = host + self.port = port + self.user = user + self.password = password + self.timeout = timeout + self.use_ssl = use_ssl + self.use_tls = use_tls + + def send(self, message, fail_silently=False): + try: + if self.supports_timeout: + self._send(message, timeout=self.timeout) + else: + import socket + old_timeout = socket.getdefaulttimeout() + socket.setdefaulttimeout(self.timeout) + try: + self._send(message) + finally: + socket.setdefaulttimeout(old_timeout) + except Exception, exc: + if not fail_silently: + raise + warnings.warn(SendmailWarning( + 'Mail could not be sent: %r %r\n%r' % ( + exc, {'To': ', '.join(message.to), + 'Subject': message.subject}, + traceback.format_stack()))) + + def _send(self, message, **kwargs): + Client = smtplib.SMTP_SSL if self.use_ssl else smtplib.SMTP + client = Client(self.host, self.port, + local_hostname=get_local_hostname(), **kwargs) + + if self.use_tls: + client.ehlo() + client.starttls() + client.ehlo() + + if self.user and self.password: + client.login(self.user, self.password) + + client.sendmail(message.sender, message.to, str(message)) + try: + client.quit() + except socket.sslerror: + client.close() + + +class ErrorMail(object): + """Defines how and when task error e-mails should be sent. + + :param task: The task instance that raised the error. + + :attr:`subject` and :attr:`body` are format strings which + are passed a context containing the following keys: + + * name + + Name of the task. + + * id + + UUID of the task. + + * exc + + String representation of the exception. + + * args + + Positional arguments. + + * kwargs + + Keyword arguments. + + * traceback + + String representation of the traceback. + + * hostname + + Worker hostname. + + """ + + # pep8.py borks on a inline signature separator and + # says "trailing whitespace" ;) + EMAIL_SIGNATURE_SEP = '-- ' + + #: Format string used to generate error email subjects. + subject = """\ + [celery@%(hostname)s] Error: Task %(name)s (%(id)s): %(exc)s + """ + + #: Format string used to generate error email content. + body = """ +Task %%(name)s with id %%(id)s raised exception:\n%%(exc)r + + +Task was called with args: %%(args)s kwargs: %%(kwargs)s. + +The contents of the full traceback was: + +%%(traceback)s + +%(EMAIL_SIGNATURE_SEP)s +Just to let you know, +py-celery at %%(hostname)s. +""" % {'EMAIL_SIGNATURE_SEP': EMAIL_SIGNATURE_SEP} + + error_whitelist = None + + def __init__(self, task, **kwargs): + self.task = task + self.email_subject = kwargs.get('subject', self.subject) + self.email_body = kwargs.get('body', self.body) + self.error_whitelist = getattr(task, 'error_whitelist', None) or () + + def should_send(self, context, exc): + """Returns true or false depending on if a task error mail + should be sent for this type of error.""" + return not self.error_whitelist or isinstance( + exc, tuple(symbol_by_name(n) for n in self.error_whitelist), + ) + + def format_subject(self, context): + return self.subject.strip() % context + + def format_body(self, context): + return self.body.strip() % context + + def send(self, context, exc, fail_silently=True): + if self.should_send(context, exc): + self.task.app.mail_admins(self.format_subject(context), + self.format_body(context), + fail_silently=fail_silently) diff --git a/awx/lib/site-packages/celery/utils/serialization.py b/awx/lib/site-packages/celery/utils/serialization.py new file mode 100644 index 0000000000..0a7cdf25c7 --- /dev/null +++ b/awx/lib/site-packages/celery/utils/serialization.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +""" + celery.utils.serialization + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Utilities for safely pickling exceptions. + +""" +from __future__ import absolute_import + +import inspect +import sys +import types + +import pickle as pypickle +try: + import cPickle as cpickle +except ImportError: + cpickle = None # noqa + +from .encoding import safe_repr + + +if sys.version_info < (2, 6): # pragma: no cover + # cPickle is broken in Python <= 2.6. + # It unsafely and incorrectly uses relative instead of absolute imports, + # so e.g.: + # exceptions.KeyError + # becomes: + # celery.exceptions.KeyError + # + # Your best choice is to upgrade to Python 2.6, + # as while the pure pickle version has worse performance, + # it is the only safe option for older Python versions. + pickle = pypickle +else: + pickle = cpickle or pypickle + +#: List of base classes we probably don't want to reduce to. +unwanted_base_classes = (StandardError, Exception, BaseException, object) + +if sys.version_info < (2, 5): # pragma: no cover + + # Prior to Python 2.5, Exception was an old-style class + def subclass_exception(name, parent, unused): + return types.ClassType(name, (parent,), {}) +else: + + def subclass_exception(name, parent, module): # noqa + return type(name, (parent,), {'__module__': module}) + + +def find_nearest_pickleable_exception(exc): + """With an exception instance, iterate over its super classes (by mro) + and find the first super exception that is pickleable. It does + not go below :exc:`Exception` (i.e. it skips :exc:`Exception`, + :class:`BaseException` and :class:`object`). If that happens + you should use :exc:`UnpickleableException` instead. + + :param exc: An exception instance. + + :returns: the nearest exception if it's not :exc:`Exception` or below, + if it is it returns :const:`None`. + + :rtype :exc:`Exception`: + + """ + cls = exc.__class__ + getmro_ = getattr(cls, 'mro', None) + + # old-style classes doesn't have mro() + if not getmro_: # pragma: no cover + # all Py2.4 exceptions has a baseclass. + if not getattr(cls, '__bases__', ()): + return + # Use inspect.getmro() to traverse bases instead. + getmro_ = lambda: inspect.getmro(cls) + + for supercls in getmro_(): + if supercls in unwanted_base_classes: + # only BaseException and object, from here on down, + # we don't care about these. + return + try: + exc_args = getattr(exc, 'args', []) + superexc = supercls(*exc_args) + pickle.loads(pickle.dumps(superexc)) + except: + pass + else: + return superexc + + +def create_exception_cls(name, module, parent=None): + """Dynamically create an exception class.""" + if not parent: + parent = Exception + return subclass_exception(name, parent, module) + + +class UnpickleableExceptionWrapper(Exception): + """Wraps unpickleable exceptions. + + :param exc_module: see :attr:`exc_module`. + :param exc_cls_name: see :attr:`exc_cls_name`. + :param exc_args: see :attr:`exc_args` + + **Example** + + .. code-block:: python + + >>> try: + ... something_raising_unpickleable_exc() + >>> except Exception, e: + ... exc = UnpickleableException(e.__class__.__module__, + ... e.__class__.__name__, + ... e.args) + ... pickle.dumps(exc) # Works fine. + + """ + + #: The module of the original exception. + exc_module = None + + #: The name of the original exception class. + exc_cls_name = None + + #: The arguments for the original exception. + exc_args = None + + def __init__(self, exc_module, exc_cls_name, exc_args, text=None): + safe_exc_args = [] + for arg in exc_args: + try: + pickle.dumps(arg) + safe_exc_args.append(arg) + except Exception: + safe_exc_args.append(safe_repr(arg)) + self.exc_module = exc_module + self.exc_cls_name = exc_cls_name + self.exc_args = safe_exc_args + self.text = text + Exception.__init__(self, exc_module, exc_cls_name, safe_exc_args, text) + + def restore(self): + return create_exception_cls(self.exc_cls_name, + self.exc_module)(*self.exc_args) + + def __str__(self): + return self.text + + @classmethod + def from_exception(cls, exc): + return cls(exc.__class__.__module__, + exc.__class__.__name__, + getattr(exc, 'args', []), + safe_repr(exc)) + + +def get_pickleable_exception(exc): + """Make sure exception is pickleable.""" + try: + pickle.loads(pickle.dumps(exc)) + except Exception: + pass + else: + return exc + nearest = find_nearest_pickleable_exception(exc) + if nearest: + return nearest + return UnpickleableExceptionWrapper.from_exception(exc) + + +def get_pickled_exception(exc): + """Get original exception from exception pickled using + :meth:`get_pickleable_exception`.""" + if isinstance(exc, UnpickleableExceptionWrapper): + return exc.restore() + return exc diff --git a/awx/lib/site-packages/celery/utils/term.py b/awx/lib/site-packages/celery/utils/term.py new file mode 100644 index 0000000000..7207c6aad3 --- /dev/null +++ b/awx/lib/site-packages/celery/utils/term.py @@ -0,0 +1,154 @@ +# -*- coding: utf-8 -*- +""" + celery.utils.term + ~~~~~~~~~~~~~~~~~ + + Terminals and colors. + +""" +from __future__ import absolute_import + +import platform + +from .encoding import safe_str + +BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8) +OP_SEQ = '\033[%dm' +RESET_SEQ = '\033[0m' +COLOR_SEQ = '\033[1;%dm' +fg = lambda s: COLOR_SEQ % s + +SYSTEM = platform.system() +IS_WINDOWS = SYSTEM == 'Windows' + + +class colored(object): + """Terminal colored text. + + Example:: + >>> c = colored(enabled=True) + >>> print(str(c.red('the quick '), c.blue('brown ', c.bold('fox ')), + ... c.magenta(c.underline('jumps over')), + ... c.yellow(' the lazy '), + ... c.green('dog '))) + + """ + + def __init__(self, *s, **kwargs): + self.s = s + self.enabled = not IS_WINDOWS and kwargs.get('enabled', True) + self.op = kwargs.get('op', '') + self.names = {'black': self.black, + 'red': self.red, + 'green': self.green, + 'yellow': self.yellow, + 'blue': self.blue, + 'magenta': self.magenta, + 'cyan': self.cyan, + 'white': self.white} + + def _add(self, a, b): + return unicode(a) + unicode(b) + + def _fold_no_color(self, a, b): + try: + A = a.no_color() + except AttributeError: + A = unicode(a) + try: + B = b.no_color() + except AttributeError: + B = unicode(b) + return safe_str(A) + safe_str(B) + + def no_color(self): + if self.s: + return reduce(self._fold_no_color, self.s) + return '' + + def embed(self): + prefix = '' + if self.enabled: + prefix = self.op + return prefix + safe_str(reduce(self._add, self.s)) + + def __unicode__(self): + suffix = '' + if self.enabled: + suffix = RESET_SEQ + return safe_str(self.embed() + suffix) + + def __str__(self): + return safe_str(self.__unicode__()) + + def node(self, s, op): + return self.__class__(enabled=self.enabled, op=op, *s) + + def black(self, *s): + return self.node(s, fg(30 + BLACK)) + + def red(self, *s): + return self.node(s, fg(30 + RED)) + + def green(self, *s): + return self.node(s, fg(30 + GREEN)) + + def yellow(self, *s): + return self.node(s, fg(30 + YELLOW)) + + def blue(self, *s): + return self.node(s, fg(30 + BLUE)) + + def magenta(self, *s): + return self.node(s, fg(30 + MAGENTA)) + + def cyan(self, *s): + return self.node(s, fg(30 + CYAN)) + + def white(self, *s): + return self.node(s, fg(30 + WHITE)) + + def __repr__(self): + return repr(self.no_color()) + + def bold(self, *s): + return self.node(s, OP_SEQ % 1) + + def underline(self, *s): + return self.node(s, OP_SEQ % 4) + + def blink(self, *s): + return self.node(s, OP_SEQ % 5) + + def reverse(self, *s): + return self.node(s, OP_SEQ % 7) + + def bright(self, *s): + return self.node(s, OP_SEQ % 8) + + def ired(self, *s): + return self.node(s, fg(40 + RED)) + + def igreen(self, *s): + return self.node(s, fg(40 + GREEN)) + + def iyellow(self, *s): + return self.node(s, fg(40 + YELLOW)) + + def iblue(self, *s): + return self.node(s, fg(40 + BLUE)) + + def imagenta(self, *s): + return self.node(s, fg(40 + MAGENTA)) + + def icyan(self, *s): + return self.node(s, fg(40 + CYAN)) + + def iwhite(self, *s): + return self.node(s, fg(40 + WHITE)) + + def reset(self, *s): + return self.node(s or [''], RESET_SEQ) + + def __add__(self, other): + return unicode(self) + unicode(other) diff --git a/awx/lib/site-packages/celery/utils/text.py b/awx/lib/site-packages/celery/utils/text.py new file mode 100644 index 0000000000..fe52f914f7 --- /dev/null +++ b/awx/lib/site-packages/celery/utils/text.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +""" + celery.utils.text + ~~~~~~~~~~~~~~~~~ + + Text formatting utilities + +""" +from __future__ import absolute_import + +import textwrap + +from pprint import pformat + + +def dedent_initial(s, n=4): + return s[n:] if s[:n] == ' ' * n else s + + +def dedent(s, n=4, sep='\n'): + return sep.join(dedent_initial(l) for l in s.splitlines()) + + +def fill_paragraphs(s, width, sep='\n'): + return sep.join(textwrap.fill(p, width) for p in s.split(sep)) + + +def join(l, sep='\n'): + return sep.join(v for v in l if v) + + +def ensure_2lines(s, sep='\n'): + if len(s.splitlines()) <= 2: + return s + sep + return s + + +def abbr(S, max, ellipsis='...'): + if S is None: + return '???' + if len(S) > max: + return ellipsis and (S[:max - len(ellipsis)] + ellipsis) or S[:max] + return S + + +def abbrtask(S, max): + if S is None: + return '???' + if len(S) > max: + module, _, cls = S.rpartition('.') + module = abbr(module, max - len(cls) - 3, False) + return module + '[.]' + cls + return S + + +def indent(t, indent=0, sep='\n'): + """Indent text.""" + return sep.join(' ' * indent + p for p in t.split(sep)) + + +def truncate(text, maxlen=128, suffix='...'): + """Truncates text to a maximum number of characters.""" + if len(text) >= maxlen: + return text[:maxlen].rsplit(' ', 1)[0] + suffix + return text + + +def pluralize(n, text, suffix='s'): + if n > 1: + return text + suffix + return text + + +def pretty(value, width=80, nl_width=80, sep='\n', **kw): + if isinstance(value, dict): + return '{%s %s' % (sep, pformat(value, 4, nl_width)[1:]) + elif isinstance(value, tuple): + return '%s%s%s' % (sep, ' ' * 4, pformat(value, width=nl_width, **kw)) + else: + return pformat(value, width=width, **kw) diff --git a/awx/lib/site-packages/celery/utils/threads.py b/awx/lib/site-packages/celery/utils/threads.py new file mode 100644 index 0000000000..8f88aabf84 --- /dev/null +++ b/awx/lib/site-packages/celery/utils/threads.py @@ -0,0 +1,335 @@ +# -*- coding: utf-8 -*- +""" + celery.utils.threads + ~~~~~~~~~~~~~~~~~~~~ + + Threading utilities. + +""" +from __future__ import absolute_import + +import os +import sys +import threading +import traceback + +from celery.local import Proxy +from celery.utils.compat import THREAD_TIMEOUT_MAX + +USE_FAST_LOCALS = os.environ.get('USE_FAST_LOCALS') +PY3 = sys.version_info[0] == 3 + +_Thread = threading.Thread +_Event = threading.Event if PY3 else threading._Event +active_count = (getattr(threading, 'active_count', None) or + threading.activeCount) + + +class Event(_Event): + + if not hasattr(_Event, 'is_set'): # pragma: no cover + is_set = _Event.isSet + + +class Thread(_Thread): + + if not hasattr(_Thread, 'is_alive'): # pragma: no cover + is_alive = _Thread.isAlive + + if not hasattr(_Thread, 'daemon'): # pragma: no cover + daemon = property(_Thread.isDaemon, _Thread.setDaemon) + + if not hasattr(_Thread, 'name'): # pragma: no cover + name = property(_Thread.getName, _Thread.setName) + + +class bgThread(Thread): + + def __init__(self, name=None, **kwargs): + super(bgThread, self).__init__() + self._is_shutdown = Event() + self._is_stopped = Event() + self.daemon = True + self.name = name or self.__class__.__name__ + + def body(self): + raise NotImplementedError('subclass responsibility') + + def on_crash(self, msg, *fmt, **kwargs): + sys.stderr.write((msg + '\n') % fmt) + exc_info = sys.exc_info() + try: + traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], + None, sys.stderr) + finally: + del(exc_info) + + def run(self): + body = self.body + shutdown_set = self._is_shutdown.is_set + try: + while not shutdown_set(): + try: + body() + except Exception, exc: + try: + self.on_crash('%r crashed: %r', self.name, exc) + self._set_stopped() + finally: + os._exit(1) # exiting by normal means won't work + finally: + self._set_stopped() + + def _set_stopped(self): + try: + self._is_stopped.set() + except TypeError: # pragma: no cover + # we lost the race at interpreter shutdown, + # so gc collected built-in modules. + pass + + def stop(self): + """Graceful shutdown.""" + self._is_shutdown.set() + self._is_stopped.wait() + if self.is_alive(): + self.join(THREAD_TIMEOUT_MAX) + +try: + from greenlet import getcurrent as get_ident +except ImportError: # pragma: no cover + try: + from thread import get_ident # noqa + except ImportError: # pragma: no cover + try: + from dummy_thread import get_ident # noqa + except ImportError: # pragma: no cover + from _thread import get_ident # noqa + + +def release_local(local): + """Releases the contents of the local for the current context. + This makes it possible to use locals without a manager. + + Example:: + + >>> loc = Local() + >>> loc.foo = 42 + >>> release_local(loc) + >>> hasattr(loc, 'foo') + False + + With this function one can release :class:`Local` objects as well + as :class:`StackLocal` objects. However it is not possible to + release data held by proxies that way, one always has to retain + a reference to the underlying local object in order to be able + to release it. + + .. versionadded:: 0.6.1 + """ + local.__release_local__() + + +class Local(object): + __slots__ = ('__storage__', '__ident_func__') + + def __init__(self): + object.__setattr__(self, '__storage__', {}) + object.__setattr__(self, '__ident_func__', get_ident) + + def __iter__(self): + return iter(self.__storage__.items()) + + def __call__(self, proxy): + """Create a proxy for a name.""" + return Proxy(self, proxy) + + def __release_local__(self): + self.__storage__.pop(self.__ident_func__(), None) + + def __getattr__(self, name): + try: + return self.__storage__[self.__ident_func__()][name] + except KeyError: + raise AttributeError(name) + + def __setattr__(self, name, value): + ident = self.__ident_func__() + storage = self.__storage__ + try: + storage[ident][name] = value + except KeyError: + storage[ident] = {name: value} + + def __delattr__(self, name): + try: + del self.__storage__[self.__ident_func__()][name] + except KeyError: + raise AttributeError(name) + + +class _LocalStack(object): + """This class works similar to a :class:`Local` but keeps a stack + of objects instead. This is best explained with an example:: + + >>> ls = LocalStack() + >>> ls.push(42) + >>> ls.top + 42 + >>> ls.push(23) + >>> ls.top + 23 + >>> ls.pop() + 23 + >>> ls.top + 42 + + They can be force released by using a :class:`LocalManager` or with + the :func:`release_local` function but the correct way is to pop the + item from the stack after using. When the stack is empty it will + no longer be bound to the current context (and as such released). + + By calling the stack without arguments it returns a proxy that + resolves to the topmost item on the stack. + + """ + + def __init__(self): + self._local = Local() + + def __release_local__(self): + self._local.__release_local__() + + def _get__ident_func__(self): + return self._local.__ident_func__ + + def _set__ident_func__(self, value): + object.__setattr__(self._local, '__ident_func__', value) + __ident_func__ = property(_get__ident_func__, _set__ident_func__) + del _get__ident_func__, _set__ident_func__ + + def __call__(self): + def _lookup(): + rv = self.top + if rv is None: + raise RuntimeError('object unbound') + return rv + return Proxy(_lookup) + + def push(self, obj): + """Pushes a new item to the stack""" + rv = getattr(self._local, 'stack', None) + if rv is None: + self._local.stack = rv = [] + rv.append(obj) + return rv + + def pop(self): + """Removes the topmost item from the stack, will return the + old value or `None` if the stack was already empty. + """ + stack = getattr(self._local, 'stack', None) + if stack is None: + return None + elif len(stack) == 1: + release_local(self._local) + return stack[-1] + else: + return stack.pop() + + def __len__(self): + stack = getattr(self._local, 'stack', None) + return len(stack) if stack else 0 + + @property + def stack(self): + """get_current_worker_task uses this to find + the original task that was executed by the worker.""" + stack = getattr(self._local, 'stack', None) + if stack is not None: + return stack + return [] + + @property + def top(self): + """The topmost item on the stack. If the stack is empty, + `None` is returned. + """ + try: + return self._local.stack[-1] + except (AttributeError, IndexError): + return None + + +class LocalManager(object): + """Local objects cannot manage themselves. For that you need a local + manager. You can pass a local manager multiple locals or add them + later by appending them to `manager.locals`. Everytime the manager + cleans up it, will clean up all the data left in the locals for this + context. + + The `ident_func` parameter can be added to override the default ident + function for the wrapped locals. + + """ + + def __init__(self, locals=None, ident_func=None): + if locals is None: + self.locals = [] + elif isinstance(locals, Local): + self.locals = [locals] + else: + self.locals = list(locals) + if ident_func is not None: + self.ident_func = ident_func + for local in self.locals: + object.__setattr__(local, '__ident_func__', ident_func) + else: + self.ident_func = get_ident + + def get_ident(self): + """Return the context identifier the local objects use internally + for this context. You cannot override this method to change the + behavior but use it to link other context local objects (such as + SQLAlchemy's scoped sessions) to the Werkzeug locals.""" + return self.ident_func() + + def cleanup(self): + """Manually clean up the data in the locals for this context. + + Call this at the end of the request or use `make_middleware()`. + + """ + for local in self.locals: + release_local(local) + + def __repr__(self): + return '<{0} storages: {1}>'.format( + self.__class__.__name__, len(self.locals)) + + +class _FastLocalStack(threading.local): + + def __init__(self): + self.stack = [] + self.push = self.stack.append + self.pop = self.stack.pop + + @property + def top(self): + try: + return self.stack[-1] + except (AttributeError, IndexError): + return None + + def __len__(self): + return len(self.stack) + +if USE_FAST_LOCALS: + LocalStack = _FastLocalStack +else: + # - See #706 + # since each thread has its own greenlet we can just use those as + # identifiers for the context. If greenlets are not available we + # fall back to the current thread ident. + LocalStack = _LocalStack # noqa diff --git a/awx/lib/site-packages/celery/utils/timer2.py b/awx/lib/site-packages/celery/utils/timer2.py new file mode 100644 index 0000000000..37be017fb2 --- /dev/null +++ b/awx/lib/site-packages/celery/utils/timer2.py @@ -0,0 +1,348 @@ +# -*- coding: utf-8 -*- +""" + timer2 + ~~~~~~ + + Scheduler for Python functions. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import atexit +import heapq +import os +import sys +import threading + +from datetime import datetime +from functools import wraps +from itertools import count +from time import time, sleep +from weakref import proxy as weakrefproxy + +from celery.utils.compat import THREAD_TIMEOUT_MAX +from celery.utils.timeutils import timedelta_seconds, timezone +from kombu.log import get_logger + +VERSION = (1, 0, 0) +__version__ = '.'.join(str(p) for p in VERSION) +__author__ = 'Ask Solem' +__contact__ = 'ask@celeryproject.org' +__homepage__ = 'http://github.com/ask/timer2/' +__docformat__ = 'restructuredtext' + +DEFAULT_MAX_INTERVAL = 2 +TIMER_DEBUG = os.environ.get('TIMER_DEBUG') +EPOCH = datetime.utcfromtimestamp(0).replace(tzinfo=timezone.utc) +IS_PYPY = hasattr(sys, 'pypy_version_info') + +logger = get_logger('timer2') + + +class Entry(object): + if not IS_PYPY: + __slots__ = ( + 'fun', 'args', 'kwargs', 'tref', 'cancelled', + '_last_run', '__weakref__', + ) + + def __init__(self, fun, args=None, kwargs=None): + self.fun = fun + self.args = args or [] + self.kwargs = kwargs or {} + self.tref = weakrefproxy(self) + self._last_run = None + self.cancelled = False + + def __call__(self): + return self.fun(*self.args, **self.kwargs) + + def cancel(self): + try: + self.tref.cancelled = True + except ReferenceError: + pass + + def __repr__(self): + return ' hash(other) + + def __eq__(self, other): + return hash(self) == hash(other) + + +def to_timestamp(d, default_timezone=timezone.utc): + if isinstance(d, datetime): + if d.tzinfo is None: + d = d.replace(tzinfo=default_timezone) + return timedelta_seconds(d - EPOCH) + return d + + +class Schedule(object): + """ETA scheduler.""" + Entry = Entry + + on_error = None + + def __init__(self, max_interval=None, on_error=None, **kwargs): + self.max_interval = float(max_interval or DEFAULT_MAX_INTERVAL) + self.on_error = on_error or self.on_error + self._queue = [] + + def apply_entry(self, entry): + try: + entry() + except Exception, exc: + if not self.handle_error(exc): + logger.error('Error in timer: %r', exc, exc_info=True) + + def handle_error(self, exc_info): + if self.on_error: + self.on_error(exc_info) + return True + + def stop(self): + pass + + def enter(self, entry, eta=None, priority=0): + """Enter function into the scheduler. + + :param entry: Item to enter. + :keyword eta: Scheduled time as a :class:`datetime.datetime` object. + :keyword priority: Unused. + + """ + if eta is None: + eta = time() + if isinstance(eta, datetime): + try: + eta = to_timestamp(eta) + except Exception, exc: + if not self.handle_error(exc): + raise + return + return self._enter(eta, priority, entry) + + def _enter(self, eta, priority, entry): + heapq.heappush(self._queue, (eta, priority, entry)) + return entry + + def apply_at(self, eta, fun, args=(), kwargs={}, priority=0): + return self.enter(self.Entry(fun, args, kwargs), eta, priority) + + def enter_after(self, msecs, entry, priority=0, time=time): + return self.enter(entry, time() + (msecs / 1000.0), priority) + + def apply_after(self, msecs, fun, args=(), kwargs={}, priority=0): + return self.enter_after(msecs, self.Entry(fun, args, kwargs), priority) + + def apply_interval(self, msecs, fun, args=(), kwargs={}, priority=0): + tref = self.Entry(fun, args, kwargs) + secs = msecs * 1000.0 + + @wraps(fun) + def _reschedules(*args, **kwargs): + last, now = tref._last_run, time() + lsince = (now - tref._last_run) * 1000.0 if last else msecs + try: + if lsince and lsince >= msecs: + tref._last_run = now + return fun(*args, **kwargs) + finally: + if not tref.cancelled: + last = tref._last_run + next = secs - (now - last) if last else secs + self.enter_after(next / 1000.0, tref, priority) + + tref.fun = _reschedules + tref._last_run = None + return self.enter_after(msecs, tref, priority) + + @property + def schedule(self): + return self + + def __iter__(self, min=min, nowfun=time, pop=heapq.heappop, + push=heapq.heappush): + """The iterator yields the time to sleep for between runs.""" + max_interval = self.max_interval + queue = self._queue + + while 1: + if queue: + eta, priority, entry = verify = queue[0] + now = nowfun() + + if now < eta: + yield min(eta - now, max_interval), None + else: + event = pop(queue) + + if event is verify: + if not entry.cancelled: + yield None, entry + continue + else: + push(queue, event) + else: + yield None, None + + def empty(self): + """Is the schedule empty?""" + return not self._queue + + def clear(self): + self._queue[:] = [] # used because we can't replace the object + # and the operation is atomic. + + def info(self): + return ({'eta': eta, 'priority': priority, 'item': item} + for eta, priority, item in self.queue) + + def cancel(self, tref): + tref.cancel() + + @property + def queue(self, _pop=heapq.heappop): + """Snapshot of underlying datastructure.""" + events = list(self._queue) + return [_pop(i) for i in [events] * len(events)] + + +class Timer(threading.Thread): + Entry = Entry + Schedule = Schedule + + running = False + on_tick = None + _timer_count = count(1).next + + if TIMER_DEBUG: # pragma: no cover + def start(self, *args, **kwargs): + import traceback + print('- Timer starting') + traceback.print_stack() + super(Timer, self).start(*args, **kwargs) + + def __init__(self, schedule=None, on_error=None, on_tick=None, + max_interval=None, **kwargs): + self.schedule = schedule or self.Schedule(on_error=on_error, + max_interval=max_interval) + self.on_tick = on_tick or self.on_tick + + threading.Thread.__init__(self) + self._is_shutdown = threading.Event() + self._is_stopped = threading.Event() + self.mutex = threading.Lock() + self.not_empty = threading.Condition(self.mutex) + self.setDaemon(True) + self.setName('Timer-%s' % (self._timer_count(), )) + + def _next_entry(self): + with self.not_empty: + delay, entry = self.scheduler.next() + if entry is None: + if delay is None: + self.not_empty.wait(1.0) + return delay + return self.schedule.apply_entry(entry) + __next__ = next = _next_entry # for 2to3 + + def run(self): + try: + self.running = True + self.scheduler = iter(self.schedule) + + while not self._is_shutdown.isSet(): + delay = self._next_entry() + if delay: + if self.on_tick: + self.on_tick(delay) + if sleep is None: # pragma: no cover + break + sleep(delay) + try: + self._is_stopped.set() + except TypeError: # pragma: no cover + # we lost the race at interpreter shutdown, + # so gc collected built-in modules. + pass + except Exception, exc: + logger.error('Thread Timer crashed: %r', exc, exc_info=True) + os._exit(1) + + def stop(self): + if self.running: + self._is_shutdown.set() + self._is_stopped.wait() + self.join(THREAD_TIMEOUT_MAX) + self.running = False + + def ensure_started(self): + if not self.running and not self.isAlive(): + self.start() + + def _do_enter(self, meth, *args, **kwargs): + self.ensure_started() + with self.mutex: + entry = getattr(self.schedule, meth)(*args, **kwargs) + self.not_empty.notify() + return entry + + def enter(self, entry, eta, priority=None): + return self._do_enter('enter', entry, eta, priority=priority) + + def apply_at(self, *args, **kwargs): + return self._do_enter('apply_at', *args, **kwargs) + + def enter_after(self, *args, **kwargs): + return self._do_enter('enter_after', *args, **kwargs) + + def apply_after(self, *args, **kwargs): + return self._do_enter('apply_after', *args, **kwargs) + + def apply_interval(self, *args, **kwargs): + return self._do_enter('apply_interval', *args, **kwargs) + + def exit_after(self, msecs, priority=10): + self.apply_after(msecs, sys.exit, priority) + + def cancel(self, tref): + tref.cancel() + + def clear(self): + self.schedule.clear() + + def empty(self): + return self.schedule.empty() + + @property + def queue(self): + return self.schedule.queue + +default_timer = _default_timer = Timer() +apply_after = _default_timer.apply_after +apply_at = _default_timer.apply_at +apply_interval = _default_timer.apply_interval +enter_after = _default_timer.enter_after +enter = _default_timer.enter +exit_after = _default_timer.exit_after +cancel = _default_timer.cancel +clear = _default_timer.clear + +atexit.register(_default_timer.stop) diff --git a/awx/lib/site-packages/celery/utils/timeutils.py b/awx/lib/site-packages/celery/utils/timeutils.py new file mode 100644 index 0000000000..67f105e996 --- /dev/null +++ b/awx/lib/site-packages/celery/utils/timeutils.py @@ -0,0 +1,321 @@ +# -*- coding: utf-8 -*- +""" + celery.utils.timeutils + ~~~~~~~~~~~~~~~~~~~~~~ + + This module contains various utilities related to dates and times. + +""" +from __future__ import absolute_import + +import os +import time as _time + +from kombu.utils import cached_property + +from datetime import datetime, timedelta, tzinfo +from dateutil import tz +from dateutil.parser import parse as parse_iso8601 + +from celery.exceptions import ImproperlyConfigured + +from .text import pluralize + +try: + import pytz + from pytz import AmbiguousTimeError +except ImportError: # pragma: no cover + pytz = None # noqa + + class AmbiguousTimeError(Exception): # noqa + pass + + +C_REMDEBUG = os.environ.get('C_REMDEBUG', False) + + +DAYNAMES = 'sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat' +WEEKDAYS = dict((name, dow) for name, dow in zip(DAYNAMES, range(7))) + +RATE_MODIFIER_MAP = {'s': lambda n: n, + 'm': lambda n: n / 60.0, + 'h': lambda n: n / 60.0 / 60.0} + + +HAVE_TIMEDELTA_TOTAL_SECONDS = hasattr(timedelta, 'total_seconds') + +TIME_UNITS = (('day', 60 * 60 * 24.0, lambda n: '%.2f' % n), + ('hour', 60 * 60.0, lambda n: '%.2f' % n), + ('minute', 60.0, lambda n: '%.2f' % n), + ('second', 1.0, lambda n: '%.2f' % n)) + +ZERO = timedelta(0) + +_local_timezone = None + + +class LocalTimezone(tzinfo): + """ + Local time implementation taken from Python's docs. + + Used only when pytz isn't available, and most likely inaccurate. If you're + having trouble with this class, don't waste your time, just install pytz. + """ + + def __init__(self): + # This code is moved in __init__ to execute it as late as possible + # See get_default_timezone(). + self.STDOFFSET = timedelta(seconds=-_time.timezone) + if _time.daylight: + self.DSTOFFSET = timedelta(seconds=-_time.altzone) + else: + self.DSTOFFSET = self.STDOFFSET + self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET + tzinfo.__init__(self) + + def __repr__(self): + return "" + + def utcoffset(self, dt): + if self._isdst(dt): + return self.DSTOFFSET + else: + return self.STDOFFSET + + def dst(self, dt): + if self._isdst(dt): + return self.DSTDIFF + else: + return ZERO + + def tzname(self, dt): + return _time.tzname[self._isdst(dt)] + + def _isdst(self, dt): + tt = (dt.year, dt.month, dt.day, + dt.hour, dt.minute, dt.second, + dt.weekday(), 0, 0) + stamp = _time.mktime(tt) + tt = _time.localtime(stamp) + return tt.tm_isdst > 0 + + +class _Zone(object): + + def tz_or_local(self, tzinfo=None): + if tzinfo is None: + return self.local + return self.get_timezone(tzinfo) + + def to_local(self, dt, local=None, orig=None): + if is_naive(dt): + dt = make_aware(dt, orig or self.utc) + return localize(dt, self.tz_or_local(local)) + + def to_system(self, dt): + return localize(dt, self.local) + + def to_local_fallback(self, dt, *args, **kwargs): + if is_naive(dt): + return make_aware(dt, self.local) + return localize(dt, self.local) + + def get_timezone(self, zone): + if isinstance(zone, basestring): + if pytz is None: + if zone == 'UTC': + return tz.gettz('UTC') + raise ImproperlyConfigured( + 'Timezones requires the pytz library') + return pytz.timezone(zone) + return zone + + @cached_property + def local(self): + return LocalTimezone() + + @cached_property + def utc(self): + return self.get_timezone('UTC') +timezone = _Zone() + + +def maybe_timedelta(delta): + """Coerces integer to timedelta if `delta` is an integer.""" + if isinstance(delta, (int, float)): + return timedelta(seconds=delta) + return delta + + +if HAVE_TIMEDELTA_TOTAL_SECONDS: # pragma: no cover + + def timedelta_seconds(delta): + """Convert :class:`datetime.timedelta` to seconds. + + Doesn't account for negative values. + + """ + return max(delta.total_seconds(), 0) + +else: # pragma: no cover + + def timedelta_seconds(delta): # noqa + """Convert :class:`datetime.timedelta` to seconds. + + Doesn't account for negative values. + + """ + if delta.days < 0: + return 0 + return delta.days * 86400 + delta.seconds + (delta.microseconds / 10e5) + + +def delta_resolution(dt, delta): + """Round a datetime to the resolution of a timedelta. + + If the timedelta is in days, the datetime will be rounded + to the nearest days, if the timedelta is in hours the datetime + will be rounded to the nearest hour, and so on until seconds + which will just return the original datetime. + + """ + delta = timedelta_seconds(delta) + + resolutions = ((3, lambda x: x / 86400), + (4, lambda x: x / 3600), + (5, lambda x: x / 60)) + + args = dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second + for res, predicate in resolutions: + if predicate(delta) >= 1.0: + return datetime(*args[:res]) + return dt + + +def remaining(start, ends_in, now=None, relative=False, debug=False): + """Calculate the remaining time for a start date and a timedelta. + + e.g. "how many seconds left for 30 seconds after start?" + + :param start: Start :class:`~datetime.datetime`. + :param ends_in: The end delta as a :class:`~datetime.timedelta`. + :keyword relative: If enabled the end time will be + calculated using :func:`delta_resolution` (i.e. rounded to the + resolution of `ends_in`). + :keyword now: Function returning the current time and date, + defaults to :func:`datetime.utcnow`. + + """ + now = now or datetime.utcnow() + end_date = start + ends_in + if relative: + end_date = delta_resolution(end_date, ends_in) + ret = end_date - now + if C_REMDEBUG: + print('rem: NOW:%s START:%s END_DATE:%s REM:%s' % ( + now, start, end_date, ret)) + return ret + + +def rate(rate): + """Parses rate strings, such as `"100/m"`, `"2/h"` or `"0.5/s"` + and converts them to seconds.""" + if rate: + if isinstance(rate, basestring): + ops, _, modifier = rate.partition('/') + return RATE_MODIFIER_MAP[modifier or 's'](float(ops)) or 0 + return rate or 0 + return 0 + + +def weekday(name): + """Return the position of a weekday (0 - 7, where 0 is Sunday). + + Example:: + + >>> weekday('sunday'), weekday('sun'), weekday('mon') + (0, 0, 1) + + """ + abbreviation = name[0:3].lower() + try: + return WEEKDAYS[abbreviation] + except KeyError: + # Show original day name in exception, instead of abbr. + raise KeyError(name) + + +def humanize_seconds(secs, prefix='', sep=''): + """Show seconds in human form, e.g. 60 is "1 minute", 7200 is "2 + hours". + + :keyword prefix: Can be used to add a preposition to the output, + e.g. 'in' will give 'in 1 second', but add nothing to 'now'. + + """ + secs = float(secs) + for unit, divider, formatter in TIME_UNITS: + if secs >= divider: + w = secs / divider + return '%s%s%s %s' % (prefix, sep, formatter(w), + pluralize(w, unit)) + return 'now' + + +def maybe_iso8601(dt): + """`Either datetime | str -> datetime or None -> None`""" + if not dt: + return + if isinstance(dt, datetime): + return dt + return parse_iso8601(dt) + + +def is_naive(dt): + """Returns :const:`True` if the datetime is naive + (does not have timezone information).""" + return dt.tzinfo is None or dt.tzinfo.utcoffset(dt) is None + + +def make_aware(dt, tz): + """Sets the timezone for a datetime object.""" + try: + _localize = tz.localize + except AttributeError: + return dt.replace(tzinfo=tz) + else: + # works on pytz timezones + try: + return _localize(dt, is_dst=None) + except AmbiguousTimeError: + return min(_localize(dt, is_dst=True), + _localize(dt, is_dst=False)) + + +def localize(dt, tz): + """Convert aware datetime to another timezone.""" + dt = dt.astimezone(tz) + try: + _normalize = tz.normalize + except AttributeError: # non-pytz tz + return dt + else: + try: + return _normalize(dt, is_dst=None) + except TypeError: + return _normalize(dt) + except AmbiguousTimeError: + return min(_normalize(dt, is_dst=True), + _normalize(dt, is_dst=False)) + + +def to_utc(dt): + """Converts naive datetime to UTC""" + return make_aware(dt, timezone.utc) + + +def maybe_make_aware(dt, tz=None): + if is_naive(dt): + dt = to_utc(dt) + return localize(dt, + timezone.utc if tz is None else timezone.tz_or_local(tz)) diff --git a/awx/lib/site-packages/celery/worker/__init__.py b/awx/lib/site-packages/celery/worker/__init__.py new file mode 100644 index 0000000000..10faa394de --- /dev/null +++ b/awx/lib/site-packages/celery/worker/__init__.py @@ -0,0 +1,474 @@ +# -*- coding: utf-8 -*- +""" + celery.worker + ~~~~~~~~~~~~~ + + :class:`WorkController` can be used to instantiate in-process workers. + + The worker consists of several components, all managed by boot-steps + (mod:`celery.worker.bootsteps`). + +""" +from __future__ import absolute_import + +import atexit +import logging +import socket +import sys +import time +import traceback + +from functools import partial + +from billiard.exceptions import WorkerLostError +from billiard.util import Finalize +from kombu.syn import detect_environment + +from celery import concurrency as _concurrency +from celery import platforms +from celery.app import app_or_default +from celery.app.abstract import configurated, from_config +from celery.exceptions import SystemTerminate, TaskRevokedError +from celery.utils.functional import noop +from celery.utils.imports import qualname, reload_from_cwd +from celery.utils.log import get_logger +from celery.utils.threads import Event +from celery.utils.timer2 import Schedule + +from . import bootsteps +from . import state +from .buckets import TaskBucket, AsyncTaskBucket, FastQueue +from .hub import Hub, BoundedSemaphore + +#: Worker states +RUN = 0x1 +CLOSE = 0x2 +TERMINATE = 0x3 + +#: Default socket timeout at shutdown. +SHUTDOWN_SOCKET_TIMEOUT = 5.0 + +logger = get_logger(__name__) + + +class Namespace(bootsteps.Namespace): + """This is the boot-step namespace of the :class:`WorkController`. + + It loads modules from :setting:`CELERYD_BOOT_STEPS`, and its + own set of built-in boot-step modules. + + """ + name = 'worker' + builtin_boot_steps = ('celery.worker.autoscale', + 'celery.worker.autoreload', + 'celery.worker.consumer', + 'celery.worker.mediator') + + def modules(self): + return self.builtin_boot_steps + self.app.conf.CELERYD_BOOT_STEPS + + +class Pool(bootsteps.StartStopComponent): + """The pool component. + + Describes how to initialize the worker pool, and starts and stops + the pool during worker startup/shutdown. + + Adds attributes: + + * autoscale + * pool + * max_concurrency + * min_concurrency + + """ + name = 'worker.pool' + requires = ('queues', 'beat', ) + + def __init__(self, w, + autoscale=None, autoreload=False, no_execv=False, **kwargs): + w.autoscale = autoscale + w.pool = None + w.max_concurrency = None + w.min_concurrency = w.concurrency + w.no_execv = no_execv + if w.autoscale: + w.max_concurrency, w.min_concurrency = w.autoscale + self.autoreload_enabled = autoreload + + def on_poll_init(self, pool, w, hub): + apply_after = hub.timer.apply_after + apply_at = hub.timer.apply_at + on_soft_timeout = pool.on_soft_timeout + on_hard_timeout = pool.on_hard_timeout + maintain_pool = pool.maintain_pool + add_reader = hub.add_reader + remove = hub.remove + now = time.time + + # did_start_ok will verify that pool processes were able to start, + # but this will only work the first time we start, as + # maxtasksperchild will mess up metrics. + if not w.consumer.restart_count and not pool.did_start_ok(): + raise WorkerLostError('Could not start worker processes') + + # need to handle pool results before every task + # since multiple tasks can be received in a single poll() + hub.on_task.append(pool.maybe_handle_result) + + hub.update_readers(pool.readers) + for handler, interval in pool.timers.iteritems(): + hub.timer.apply_interval(interval * 1000.0, handler) + + def on_timeout_set(R, soft, hard): + + def _on_soft_timeout(): + if hard: + R._tref = apply_at(now() + (hard - soft), + on_hard_timeout, (R, )) + on_soft_timeout(R) + if soft: + R._tref = apply_after(soft * 1000.0, _on_soft_timeout) + elif hard: + R._tref = apply_after(hard * 1000.0, + on_hard_timeout, (R, )) + + def on_timeout_cancel(result): + try: + result._tref.cancel() + delattr(result, '_tref') + except AttributeError: + pass + + pool.init_callbacks( + on_process_up=lambda w: add_reader(w.sentinel, maintain_pool), + on_process_down=lambda w: remove(w.sentinel), + on_timeout_set=on_timeout_set, + on_timeout_cancel=on_timeout_cancel, + ) + + def create(self, w, semaphore=None, max_restarts=None): + threaded = not w.use_eventloop + procs = w.min_concurrency + forking_enable = w.no_execv or not w.force_execv + if not threaded: + semaphore = w.semaphore = BoundedSemaphore(procs) + w._quick_acquire = w.semaphore.acquire + w._quick_release = w.semaphore.release + max_restarts = 100 + allow_restart = self.autoreload_enabled or w.pool_restarts + pool = w.pool = self.instantiate( + w.pool_cls, w.min_concurrency, + initargs=(w.app, w.hostname), + maxtasksperchild=w.max_tasks_per_child, + timeout=w.task_time_limit, + soft_timeout=w.task_soft_time_limit, + putlocks=w.pool_putlocks and threaded, + lost_worker_timeout=w.worker_lost_wait, + threads=threaded, + max_restarts=max_restarts, + allow_restart=allow_restart, + forking_enable=forking_enable, + semaphore=semaphore, + callbacks_propagate=( + w._conninfo.connection_errors + w._conninfo.channel_errors + ), + ) + if w.hub: + w.hub.on_init.append(partial(self.on_poll_init, pool, w)) + return pool + + +class Beat(bootsteps.StartStopComponent): + """Component used to embed a celerybeat process. + + This will only be enabled if the ``beat`` + argument is set. + + """ + name = 'worker.beat' + + def __init__(self, w, beat=False, **kwargs): + self.enabled = w.beat = beat + w.beat = None + + def create(self, w): + from celery.beat import EmbeddedService + b = w.beat = EmbeddedService(app=w.app, + schedule_filename=w.schedule_filename, + scheduler_cls=w.scheduler_cls) + return b + + +class Queues(bootsteps.Component): + """This component initializes the internal queues + used by the worker.""" + name = 'worker.queues' + requires = ('ev', ) + + def create(self, w): + BucketType = TaskBucket + w.start_mediator = not w.disable_rate_limits + if not w.pool_cls.rlimit_safe: + w.start_mediator = False + BucketType = AsyncTaskBucket + process_task = w.process_task + if w.use_eventloop: + w.start_mediator = False + BucketType = AsyncTaskBucket + if w.pool_putlocks and w.pool_cls.uses_semaphore: + process_task = w.process_task_sem + if w.disable_rate_limits: + w.ready_queue = FastQueue() + w.ready_queue.put = process_task + else: + w.ready_queue = BucketType( + task_registry=w.app.tasks, callback=process_task, worker=w, + ) + + +class EvLoop(bootsteps.StartStopComponent): + name = 'worker.ev' + + def __init__(self, w, **kwargs): + w.hub = None + + def include_if(self, w): + return w.use_eventloop + + def create(self, w): + w.timer = Schedule(max_interval=10) + hub = w.hub = Hub(w.timer) + return hub + + +class Timers(bootsteps.Component): + """This component initializes the internal timers used by the worker.""" + name = 'worker.timers' + requires = ('pool', ) + + def include_if(self, w): + return not w.use_eventloop + + def create(self, w): + if not w.timer_cls: + # Default Timer is set by the pool, as e.g. eventlet + # needs a custom implementation. + w.timer_cls = w.pool.Timer + w.timer = self.instantiate(w.pool.Timer, + max_interval=w.timer_precision, + on_timer_error=self.on_timer_error, + on_timer_tick=self.on_timer_tick) + + def on_timer_error(self, exc): + logger.error('Timer error: %r', exc, exc_info=True) + + def on_timer_tick(self, delay): + logger.debug('Timer wake-up! Next eta %s secs.', delay) + + +class StateDB(bootsteps.Component): + """This component sets up the workers state db if enabled.""" + name = 'worker.state-db' + + def __init__(self, w, **kwargs): + self.enabled = w.state_db + w._persistence = None + + def create(self, w): + w._persistence = state.Persistent(w.state_db) + atexit.register(w._persistence.save) + + +class WorkController(configurated): + """Unmanaged worker instance.""" + RUN = RUN + CLOSE = CLOSE + TERMINATE = TERMINATE + + app = None + concurrency = from_config() + loglevel = logging.ERROR + logfile = from_config('log_file') + send_events = from_config() + pool_cls = from_config('pool') + consumer_cls = from_config('consumer') + mediator_cls = from_config('mediator') + timer_cls = from_config('timer') + timer_precision = from_config('timer_precision') + autoscaler_cls = from_config('autoscaler') + autoreloader_cls = from_config('autoreloader') + schedule_filename = from_config() + scheduler_cls = from_config('celerybeat_scheduler') + task_time_limit = from_config() + task_soft_time_limit = from_config() + max_tasks_per_child = from_config() + pool_putlocks = from_config() + pool_restarts = from_config() + force_execv = from_config() + prefetch_multiplier = from_config() + state_db = from_config() + disable_rate_limits = from_config() + worker_lost_wait = from_config() + + _state = None + _running = 0 + + def __init__(self, loglevel=None, hostname=None, ready_callback=noop, + queues=None, app=None, pidfile=None, use_eventloop=None, + **kwargs): + self.app = app_or_default(app or self.app) + + self._shutdown_complete = Event() + self.setup_defaults(kwargs, namespace='celeryd') + self.app.select_queues(queues) # select queues subset. + + # Options + self.loglevel = loglevel or self.loglevel + self.hostname = hostname or socket.gethostname() + self.ready_callback = ready_callback + self._finalize = Finalize(self, self.stop, exitpriority=1) + self.pidfile = pidfile + self.pidlock = None + # this connection is not established, only used for params + self._conninfo = self.app.connection() + self.use_eventloop = ( + self.should_use_eventloop() if use_eventloop is None + else use_eventloop + ) + + # Update celery_include to have all known task modules, so that we + # ensure all task modules are imported in case an execv happens. + task_modules = set(task.__class__.__module__ + for task in self.app.tasks.itervalues()) + self.app.conf.CELERY_INCLUDE = tuple( + set(self.app.conf.CELERY_INCLUDE) | task_modules, + ) + + # Initialize boot steps + self.pool_cls = _concurrency.get_implementation(self.pool_cls) + self.components = [] + self.namespace = Namespace(app=self.app).apply(self, **kwargs) + + def start(self): + """Starts the workers main loop.""" + self._state = self.RUN + if self.pidfile: + self.pidlock = platforms.create_pidlock(self.pidfile) + try: + for i, component in enumerate(self.components): + logger.debug('Starting %s...', qualname(component)) + self._running = i + 1 + if component: + component.start() + logger.debug('%s OK!', qualname(component)) + except SystemTerminate: + self.terminate() + except Exception, exc: + logger.error('Unrecoverable error: %r', exc, + exc_info=True) + self.stop() + except (KeyboardInterrupt, SystemExit): + self.stop() + + # Will only get here if running green, + # makes sure all greenthreads have exited. + self._shutdown_complete.wait() + + def process_task_sem(self, req): + return self._quick_acquire(self.process_task, req) + + def process_task(self, req): + """Process task by sending it to the pool of workers.""" + try: + req.execute_using_pool(self.pool) + except TaskRevokedError: + try: + self._quick_release() # Issue 877 + except AttributeError: + pass + except Exception, exc: + logger.critical('Internal error: %r\n%s', + exc, traceback.format_exc(), exc_info=True) + except SystemTerminate: + self.terminate() + raise + except BaseException, exc: + self.stop() + raise exc + + def signal_consumer_close(self): + try: + self.consumer.close() + except AttributeError: + pass + + def should_use_eventloop(self): + return (detect_environment() == 'default' and + self._conninfo.is_evented and not self.app.IS_WINDOWS) + + def stop(self, in_sighandler=False): + """Graceful shutdown of the worker server.""" + self.signal_consumer_close() + if not in_sighandler or self.pool.signal_safe: + self._shutdown(warm=True) + + def terminate(self, in_sighandler=False): + """Not so graceful shutdown of the worker server.""" + self.signal_consumer_close() + if not in_sighandler or self.pool.signal_safe: + self._shutdown(warm=False) + + def _shutdown(self, warm=True): + what = 'Stopping' if warm else 'Terminating' + socket_timeout = socket.getdefaulttimeout() + socket.setdefaulttimeout(SHUTDOWN_SOCKET_TIMEOUT) # Issue 975 + + if self._state in (self.CLOSE, self.TERMINATE): + return + + self.app.loader.shutdown_worker() + + if self.pool: + self.pool.close() + + if self._state != self.RUN or self._running != len(self.components): + # Not fully started, can safely exit. + self._state = self.TERMINATE + self._shutdown_complete.set() + return + self._state = self.CLOSE + + for component in reversed(self.components): + logger.debug('%s %s...', what, qualname(component)) + if component: + stop = component.stop + if not warm: + stop = getattr(component, 'terminate', None) or stop + stop() + + self.timer.stop() + self.consumer.close_connection() + + if self.pidlock: + self.pidlock.release() + self._state = self.TERMINATE + socket.setdefaulttimeout(socket_timeout) + self._shutdown_complete.set() + + def reload(self, modules=None, reload=False, reloader=None): + modules = self.app.loader.task_modules if modules is None else modules + imp = self.app.loader.import_from_cwd + + for module in set(modules or ()): + if module not in sys.modules: + logger.debug('importing module %s', module) + imp(module) + elif reload: + logger.debug('reloading module %s', module) + reload_from_cwd(sys.modules[module], reloader) + self.pool.restart() + + @property + def state(self): + return state diff --git a/awx/lib/site-packages/celery/worker/autoreload.py b/awx/lib/site-packages/celery/worker/autoreload.py new file mode 100644 index 0000000000..6d0881b50c --- /dev/null +++ b/awx/lib/site-packages/celery/worker/autoreload.py @@ -0,0 +1,273 @@ +# -*- coding: utf-8 -*- +""" + celery.worker.autoreload + ~~~~~~~~~~~~~~~~~~~~~~~~ + + This module implements automatic module reloading +""" +from __future__ import absolute_import +from __future__ import with_statement + +import hashlib +import os +import select +import sys +import time + +from collections import defaultdict + +from kombu.utils import eventio + +from celery.platforms import ignore_errno +from celery.utils.imports import module_file +from celery.utils.log import get_logger +from celery.utils.threads import bgThread, Event + +from .bootsteps import StartStopComponent + +try: # pragma: no cover + import pyinotify + _ProcessEvent = pyinotify.ProcessEvent +except ImportError: # pragma: no cover + pyinotify = None # noqa + _ProcessEvent = object # noqa + +logger = get_logger(__name__) + + +class WorkerComponent(StartStopComponent): + name = 'worker.autoreloader' + requires = ('pool', ) + + def __init__(self, w, autoreload=None, **kwargs): + self.enabled = w.autoreload = autoreload + w.autoreloader = None + + def create_ev(self, w): + ar = w.autoreloader = self.instantiate(w.autoreloader_cls, w) + w.hub.on_init.append(ar.on_poll_init) + w.hub.on_close.append(ar.on_poll_close) + + def create_threaded(self, w): + w.autoreloader = self.instantiate(w.autoreloader_cls, w) + return w.autoreloader + + def create(self, w): + if hasattr(select, 'kqueue') and w.use_eventloop: + return self.create_ev(w) + return self.create_threaded(w) + + +def file_hash(filename, algorithm='md5'): + hobj = hashlib.new(algorithm) + with open(filename, 'rb') as f: + for chunk in iter(lambda: f.read(1048576), ''): + hobj.update(chunk) + return hobj.digest() + + +class BaseMonitor(object): + + def __init__(self, files, + on_change=None, shutdown_event=None, interval=0.5): + self.files = files + self.interval = interval + self._on_change = on_change + self.modify_times = defaultdict(int) + self.shutdown_event = shutdown_event or Event() + + def start(self): + raise NotImplementedError('Subclass responsibility') + + def stop(self): + pass + + def on_change(self, modified): + if self._on_change: + return self._on_change(modified) + + +class StatMonitor(BaseMonitor): + """File change monitor based on the ``stat`` system call.""" + + def _mtimes(self): + return ((f, self._mtime(f)) for f in self.files) + + def _maybe_modified(self, f, mt): + return mt is not None and self.modify_times[f] != mt + + def start(self): + while not self.shutdown_event.is_set(): + modified = dict((f, mt) for f, mt in self._mtimes() + if self._maybe_modified(f, mt)) + if modified: + self.on_change(modified) + self.modify_times.update(modified) + time.sleep(self.interval) + + @staticmethod + def _mtime(path): + try: + return os.stat(path).st_mtime + except Exception: + pass + + +class KQueueMonitor(BaseMonitor): + """File change monitor based on BSD kernel event notifications""" + + def __init__(self, *args, **kwargs): + super(KQueueMonitor, self).__init__(*args, **kwargs) + self.filemap = dict((f, None) for f in self.files) + self.fdmap = {} + + def on_poll_init(self, hub): + self.add_events(hub.poller) + hub.poller.on_file_change = self.handle_event + + def on_poll_close(self, hub): + self.close(hub.poller) + + def add_events(self, poller): + for f in self.filemap: + self.filemap[f] = fd = os.open(f, os.O_RDONLY) + self.fdmap[fd] = f + poller.watch_file(fd) + + def handle_event(self, events): + self.on_change([self.fdmap[e.ident] for e in events]) + + def start(self): + self.poller = eventio.poll() + self.add_events(self.poller) + self.poller.on_file_change = self.handle_event + while not self.shutdown_event.is_set(): + self.poller.poll(1) + + def close(self, poller): + for f, fd in self.filemap.iteritems(): + if fd is not None: + poller.unregister(fd) + with ignore_errno('EBADF'): # pragma: no cover + os.close(fd) + self.filemap.clear() + self.fdmap.clear() + + def stop(self): + self.close(self.poller) + self.poller.close() + + +class InotifyMonitor(_ProcessEvent): + """File change monitor based on Linux kernel `inotify` subsystem""" + + def __init__(self, modules, on_change=None, **kwargs): + assert pyinotify + self._modules = modules + self._on_change = on_change + self._wm = None + self._notifier = None + + def start(self): + try: + self._wm = pyinotify.WatchManager() + self._notifier = pyinotify.Notifier(self._wm, self) + add_watch = self._wm.add_watch + flags = pyinotify.IN_MODIFY | pyinotify.IN_ATTRIB + for m in self._modules: + add_watch(m, flags) + self._notifier.loop() + finally: + if self._wm: + self._wm.close() + # Notifier.close is called at the end of Notifier.loop + self._wm = self._notifier = None + + def stop(self): + pass + + def process_(self, event): + self.on_change([event.path]) + + process_IN_ATTRIB = process_IN_MODIFY = process_ + + def on_change(self, modified): + if self._on_change: + return self._on_change(modified) + + +def default_implementation(): + # kqueue monitor not working properly at this time. + if hasattr(select, 'kqueue'): + return 'kqueue' + if sys.platform.startswith('linux') and pyinotify: + return 'inotify' + else: + return 'stat' + +implementations = {'kqueue': KQueueMonitor, + 'inotify': InotifyMonitor, + 'stat': StatMonitor} +Monitor = implementations[ + os.environ.get('CELERYD_FSNOTIFY') or default_implementation()] + + +class Autoreloader(bgThread): + """Tracks changes in modules and fires reload commands""" + Monitor = Monitor + + def __init__(self, controller, modules=None, monitor_cls=None, **options): + super(Autoreloader, self).__init__() + self.controller = controller + app = self.controller.app + self.modules = app.loader.task_modules if modules is None else modules + self.options = options + self._monitor = None + self._hashes = None + self.file_to_module = {} + + def on_init(self): + files = self.file_to_module + files.update(dict( + (module_file(sys.modules[m]), m) for m in self.modules)) + + self._monitor = self.Monitor( + files, self.on_change, + shutdown_event=self._is_shutdown, **self.options) + self._hashes = dict([(f, file_hash(f)) for f in files]) + + def on_poll_init(self, hub): + if self._monitor is None: + self.on_init() + self._monitor.on_poll_init(hub) + + def on_poll_close(self, hub): + if self._monitor is not None: + self._monitor.on_poll_close(hub) + + def body(self): + self.on_init() + with ignore_errno('EINTR', 'EAGAIN'): + self._monitor.start() + + def _maybe_modified(self, f): + if os.path.exists(f): + digest = file_hash(f) + if digest != self._hashes[f]: + self._hashes[f] = digest + return True + return False + + def on_change(self, files): + modified = [f for f in files if self._maybe_modified(f)] + if modified: + names = [self.file_to_module[module] for module in modified] + logger.info('Detected modified modules: %r', names) + self._reload(names) + + def _reload(self, modules): + self.controller.reload(modules, reload=True) + + def stop(self): + if self._monitor: + self._monitor.stop() diff --git a/awx/lib/site-packages/celery/worker/autoscale.py b/awx/lib/site-packages/celery/worker/autoscale.py new file mode 100644 index 0000000000..702f7b7fd3 --- /dev/null +++ b/awx/lib/site-packages/celery/worker/autoscale.py @@ -0,0 +1,163 @@ +# -*- coding: utf-8 -*- +""" + celery.worker.autoscale + ~~~~~~~~~~~~~~~~~~~~~~~ + + This module implements the internal thread responsible + for growing and shrinking the pool according to the + current autoscale settings. + + The autoscale thread is only enabled if autoscale + has been enabled on the command line. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import os +import threading + +from functools import partial +from time import sleep, time + +from celery.utils.log import get_logger +from celery.utils.threads import bgThread + +from . import state +from .bootsteps import StartStopComponent +from .hub import DummyLock + +logger = get_logger(__name__) +debug, info, error = logger.debug, logger.info, logger.error + +AUTOSCALE_KEEPALIVE = int(os.environ.get('AUTOSCALE_KEEPALIVE', 30)) + + +class WorkerComponent(StartStopComponent): + name = 'worker.autoscaler' + requires = ('pool', ) + + def __init__(self, w, **kwargs): + self.enabled = w.autoscale + w.autoscaler = None + + def create_threaded(self, w): + scaler = w.autoscaler = self.instantiate( + w.autoscaler_cls, + w.pool, w.max_concurrency, w.min_concurrency, + ) + return scaler + + def on_poll_init(self, scaler, hub): + hub.on_task.append(scaler.maybe_scale) + hub.timer.apply_interval(scaler.keepalive * 1000.0, scaler.maybe_scale) + + def create_ev(self, w): + scaler = w.autoscaler = self.instantiate( + w.autoscaler_cls, + w.pool, w.max_concurrency, w.min_concurrency, + mutex=DummyLock(), + ) + w.hub.on_init.append(partial(self.on_poll_init, scaler)) + + def create(self, w): + return (self.create_ev if w.use_eventloop + else self.create_threaded)(w) + + +class Autoscaler(bgThread): + + def __init__(self, pool, max_concurrency, + min_concurrency=0, keepalive=AUTOSCALE_KEEPALIVE, mutex=None): + super(Autoscaler, self).__init__() + self.pool = pool + self.mutex = mutex or threading.Lock() + self.max_concurrency = max_concurrency + self.min_concurrency = min_concurrency + self.keepalive = keepalive + self._last_action = None + + assert self.keepalive, 'cannot scale down too fast.' + + def body(self): + with self.mutex: + self.maybe_scale() + sleep(1.0) + + def _maybe_scale(self): + procs = self.processes + cur = min(self.qty, self.max_concurrency) + if cur > procs: + self.scale_up(cur - procs) + return True + elif cur < procs: + self.scale_down((procs - cur) - self.min_concurrency) + return True + + def maybe_scale(self): + if self._maybe_scale(): + self.pool.maintain_pool() + + def update(self, max=None, min=None): + with self.mutex: + if max is not None: + if max < self.max_concurrency: + self._shrink(self.processes - max) + self.max_concurrency = max + if min is not None: + if min > self.min_concurrency: + self._grow(min - self.min_concurrency) + self.min_concurrency = min + return self.max_concurrency, self.min_concurrency + + def force_scale_up(self, n): + with self.mutex: + new = self.processes + n + if new > self.max_concurrency: + self.max_concurrency = new + self.min_concurrency += 1 + self._grow(n) + + def force_scale_down(self, n): + with self.mutex: + new = self.processes - n + if new < self.min_concurrency: + self.min_concurrency = max(new, 0) + self._shrink(min(n, self.processes)) + + def scale_up(self, n): + self._last_action = time() + return self._grow(n) + + def scale_down(self, n): + if n and self._last_action and ( + time() - self._last_action > self.keepalive): + self._last_action = time() + return self._shrink(n) + + def _grow(self, n): + info('Scaling up %s processes.', n) + self.pool.grow(n) + + def _shrink(self, n): + info('Scaling down %s processes.', n) + try: + self.pool.shrink(n) + except ValueError: + debug("Autoscaler won't scale down: all processes busy.") + except Exception, exc: + error('Autoscaler: scale_down: %r', exc, exc_info=True) + + def info(self): + return {'max': self.max_concurrency, + 'min': self.min_concurrency, + 'current': self.processes, + 'qty': self.qty} + + @property + def qty(self): + return len(state.reserved_requests) + + @property + def processes(self): + return self.pool.num_processes diff --git a/awx/lib/site-packages/celery/worker/bootsteps.py b/awx/lib/site-packages/celery/worker/bootsteps.py new file mode 100644 index 0000000000..147b9a4a68 --- /dev/null +++ b/awx/lib/site-packages/celery/worker/bootsteps.py @@ -0,0 +1,211 @@ +# -*- coding: utf-8 -*- +""" + celery.worker.bootsteps + ~~~~~~~~~~~~~~~~~~~~~~~ + + The boot-step components. + +""" +from __future__ import absolute_import + +from collections import defaultdict +from importlib import import_module + +from celery.datastructures import DependencyGraph +from celery.utils.imports import instantiate +from celery.utils.log import get_logger + +logger = get_logger(__name__) + + +class Namespace(object): + """A namespace containing components. + + Every component must belong to a namespace. + + When component classes are created they are added to the + mapping of unclaimed components. The components will be + claimed when the namespace they belong to is created. + + :keyword name: Set the name of this namespace. + :keyword app: Set the Celery app for this namespace. + + """ + name = None + _unclaimed = defaultdict(dict) + _started_count = 0 + + def __init__(self, name=None, app=None): + self.app = app + self.name = name or self.name + self.services = [] + + def modules(self): + """Subclasses can override this to return a + list of modules to import before components are claimed.""" + return [] + + def load_modules(self): + """Will load the component modules this namespace depends on.""" + for m in self.modules(): + self.import_module(m) + + def apply(self, parent, **kwargs): + """Apply the components in this namespace to an object. + + This will apply the ``__init__`` and ``include`` methods + of each components with the object as argument. + + For ``StartStopComponents`` the services created + will also be added the the objects ``components`` attribute. + + """ + self._debug('Loading modules.') + self.load_modules() + self._debug('Claiming components.') + self.components = self._claim() + self._debug('Building boot step graph.') + self.boot_steps = [self.bind_component(name, parent, **kwargs) + for name in self._finalize_boot_steps()] + self._debug( + 'New boot order: {%s}', ', '.join(c.name for c in self.boot_steps), + ) + + for component in self.boot_steps: + component.include(parent) + return self + + def bind_component(self, name, parent, **kwargs): + """Bind component to parent object and this namespace.""" + comp = self[name](parent, **kwargs) + comp.namespace = self + return comp + + def import_module(self, module): + return import_module(module) + + def __getitem__(self, name): + return self.components[name] + + def _find_last(self): + for C in self.components.itervalues(): + if C.last: + return C + + def _finalize_boot_steps(self): + G = self.graph = DependencyGraph( + (C.name, C.requires) for C in self.components.itervalues()) + last = self._find_last() + if last: + for obj in G: + if obj != last.name: + G.add_edge(last.name, obj) + return G.topsort() + + def _claim(self): + return self._unclaimed[self.name] + + def _debug(self, msg, *args): + return logger.debug('[%s] ' + msg, + *(self.name.capitalize(), ) + args) + + +class ComponentType(type): + """Metaclass for components.""" + + def __new__(cls, name, bases, attrs): + abstract = attrs.pop('abstract', False) + if not abstract: + try: + cname = attrs['name'] + except KeyError: + raise NotImplementedError('Components must be named') + namespace = attrs.get('namespace', None) + if not namespace: + attrs['namespace'], _, attrs['name'] = cname.partition('.') + cls = super(ComponentType, cls).__new__(cls, name, bases, attrs) + if not abstract: + Namespace._unclaimed[cls.namespace][cls.name] = cls + return cls + + +class Component(object): + """A component. + + The :meth:`__init__` method is called when the component + is bound to a parent object, and can as such be used + to initialize attributes in the parent object at + parent instantiation-time. + + """ + __metaclass__ = ComponentType + + #: The name of the component, or the namespace + #: and the name of the component separated by dot. + name = None + + #: List of component names this component depends on. + #: Note that the dependencies must be in the same namespace. + requires = () + + #: can be used to specify the namespace, + #: if the name does not include it. + namespace = None + + #: if set the component will not be registered, + #: but can be used as a component base class. + abstract = True + + #: Optional obj created by the :meth:`create` method. + #: This is used by StartStopComponents to keep the + #: original service object. + obj = None + + #: This flag is reserved for the workers Consumer, + #: since it is required to always be started last. + #: There can only be one object marked with lsat + #: in every namespace. + last = False + + #: This provides the default for :meth:`include_if`. + enabled = True + + def __init__(self, parent, **kwargs): + pass + + def create(self, parent): + """Create the component.""" + pass + + def include_if(self, parent): + """An optional predicate that decided whether this + component should be created.""" + return self.enabled + + def instantiate(self, qualname, *args, **kwargs): + return instantiate(qualname, *args, **kwargs) + + def include(self, parent): + if self.include_if(parent): + self.obj = self.create(parent) + return True + + +class StartStopComponent(Component): + abstract = True + terminable = False + + def start(self): + return self.obj.start() + + def stop(self): + return self.obj.stop() + + def terminate(self): + if self.terminable: + return self.obj.terminate() + return self.obj.stop() + + def include(self, parent): + if super(StartStopComponent, self).include(parent): + parent.components.append(self.obj) diff --git a/awx/lib/site-packages/celery/worker/buckets.py b/awx/lib/site-packages/celery/worker/buckets.py new file mode 100644 index 0000000000..e975328d49 --- /dev/null +++ b/awx/lib/site-packages/celery/worker/buckets.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- +""" + celery.worker.buckets + ~~~~~~~~~~~~~~~~~~~~~ + + This module implements the rate limiting of tasks, + by having a token bucket queue for each task type. + When a task is allowed to be processed it's moved + over the the ``ready_queue`` + + The :mod:`celery.worker.mediator` is then responsible + for moving tasks from the ``ready_queue`` to the worker pool. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import threading + +from collections import deque +from time import time, sleep +from Queue import Queue, Empty + +from kombu.utils.limits import TokenBucket + +from celery.utils import timeutils +from celery.utils.compat import zip_longest, chain_from_iterable + + +class RateLimitExceeded(Exception): + """The token buckets rate limit has been exceeded.""" + + +class AsyncTaskBucket(object): + + def __init__(self, task_registry, callback=None, worker=None): + self.task_registry = task_registry + self.callback = callback + self.worker = worker + self.buckets = {} + self.refresh() + + def cont(self, request, bucket, tokens): + if not bucket.can_consume(tokens): + hold = bucket.expected_time(tokens) + self.worker.timer.apply_after( + hold * 1000.0, self.cont, (request, bucket, tokens), + ) + else: + self.callback(request) + + def put(self, request): + name = request.name + try: + bucket = self.buckets[name] + except KeyError: + bucket = self.add_bucket_for_type(name) + if not bucket: + return self.callback(request) + return self.cont(request, bucket, 1) + + def add_task_type(self, name): + task_type = self.task_registry[name] + limit = getattr(task_type, 'rate_limit', None) + limit = timeutils.rate(limit) + bucket = self.buckets[name] = ( + TokenBucket(limit, capacity=1) if limit else None + ) + return bucket + + def clear(self): + # called by the worker when the connection is lost, + # but this also clears out the timer so we be good. + pass + + def refresh(self): + for name in self.task_registry: + self.add_task_type(name) + + +class TaskBucket(object): + """This is a collection of token buckets, each task type having + its own token bucket. If the task type doesn't have a rate limit, + it will have a plain :class:`~Queue.Queue` object instead of a + :class:`TokenBucketQueue`. + + The :meth:`put` operation forwards the task to its appropriate bucket, + while the :meth:`get` operation iterates over the buckets and retrieves + the first available item. + + Say we have three types of tasks in the registry: `twitter.update`, + `feed.refresh` and `video.compress`, the TaskBucket will consist + of the following items:: + + {'twitter.update': TokenBucketQueue(fill_rate=300), + 'feed.refresh': Queue(), + 'video.compress': TokenBucketQueue(fill_rate=2)} + + The get operation will iterate over these until one of the buckets + is able to return an item. The underlying datastructure is a `dict`, + so the order is ignored here. + + :param task_registry: The task registry used to get the task + type class for a given task name. + + """ + + def __init__(self, task_registry, callback=None, worker=None): + self.task_registry = task_registry + self.buckets = {} + self.init_with_registry() + self.immediate = deque() + self.mutex = threading.Lock() + self.not_empty = threading.Condition(self.mutex) + self.callback = callback + self.worker = worker + + def put(self, request): + """Put a :class:`~celery.worker.job.Request` into + the appropiate bucket.""" + if request.name not in self.buckets: + self.add_bucket_for_type(request.name) + self.buckets[request.name].put_nowait(request) + with self.mutex: + self.not_empty.notify() + put_nowait = put + + def _get_immediate(self): + try: + return self.immediate.popleft() + except IndexError: + raise Empty() + + def _get(self): + # If the first bucket is always returning items, we would never + # get to fetch items from the other buckets. So we always iterate over + # all the buckets and put any ready items into a queue called + # "immediate". This queue is always checked for cached items first. + try: + return 0, self._get_immediate() + except Empty: + pass + + remaining_times = [] + for bucket in self.buckets.values(): + remaining = bucket.expected_time() + if not remaining: + try: + # Just put any ready items into the immediate queue. + self.immediate.append(bucket.get_nowait()) + except Empty: + pass + except RateLimitExceeded: + remaining_times.append(bucket.expected_time()) + else: + remaining_times.append(remaining) + + # Try the immediate queue again. + try: + return 0, self._get_immediate() + except Empty: + if not remaining_times: + # No items in any of the buckets. + raise + + # There's items, but have to wait before we can retrieve them, + # return the shortest remaining time. + return min(remaining_times), None + + def get(self, block=True, timeout=None): + """Retrive the task from the first available bucket. + + Available as in, there is an item in the queue and you can + consume tokens from it. + + """ + tstart = time() + get = self._get + not_empty = self.not_empty + + with not_empty: + while 1: + try: + remaining_time, item = get() + except Empty: + if not block or (timeout and time() - tstart > timeout): + raise + not_empty.wait(timeout) + continue + if remaining_time: + if not block or (timeout and time() - tstart > timeout): + raise Empty() + sleep(min(remaining_time, timeout or 1)) + else: + return item + + def get_nowait(self): + return self.get(block=False) + + def init_with_registry(self): + """Initialize with buckets for all the task types in the registry.""" + for task in self.task_registry: + self.add_bucket_for_type(task) + + def refresh(self): + """Refresh rate limits for all task types in the registry.""" + for task in self.task_registry: + self.update_bucket_for_type(task) + + def get_bucket_for_type(self, task_name): + """Get the bucket for a particular task type.""" + if task_name not in self.buckets: + return self.add_bucket_for_type(task_name) + return self.buckets[task_name] + + def _get_queue_for_type(self, task_name): + bucket = self.buckets[task_name] + if isinstance(bucket, TokenBucketQueue): + return bucket.queue + return bucket + + def update_bucket_for_type(self, task_name): + task_type = self.task_registry[task_name] + rate_limit = getattr(task_type, 'rate_limit', None) + rate_limit = timeutils.rate(rate_limit) + task_queue = FastQueue() + if task_name in self.buckets: + task_queue = self._get_queue_for_type(task_name) + else: + task_queue = FastQueue() + + if rate_limit: + task_queue = TokenBucketQueue(rate_limit, queue=task_queue) + + self.buckets[task_name] = task_queue + return task_queue + + def add_bucket_for_type(self, task_name): + """Add a bucket for a task type. + + Will read the tasks rate limit and create a :class:`TokenBucketQueue` + if it has one. If the task doesn't have a rate limit + :class:`FastQueue` will be used instead. + + """ + if task_name not in self.buckets: + return self.update_bucket_for_type(task_name) + + def qsize(self): + """Get the total size of all the queues.""" + return sum(bucket.qsize() for bucket in self.buckets.values()) + + def empty(self): + """Returns :const:`True` if all of the buckets are empty.""" + return all(bucket.empty() for bucket in self.buckets.values()) + + def clear(self): + """Delete the data in all of the buckets.""" + for bucket in self.buckets.values(): + bucket.clear() + + @property + def items(self): + """Flattens the data in all of the buckets into a single list.""" + # for queues with contents [(1, 2), (3, 4), (5, 6), (7, 8)] + # zips and flattens to [1, 3, 5, 7, 2, 4, 6, 8] + return filter(None, chain_from_iterable( + zip_longest(*[bucket.items for bucket in self.buckets.values()])), + ) + + +class FastQueue(Queue): + """:class:`Queue.Queue` supporting the interface of + :class:`TokenBucketQueue`.""" + + def clear(self): + return self.queue.clear() + + def expected_time(self, tokens=1): + return 0 + + def wait(self, block=True): + return self.get(block=block) + + @property + def items(self): + return self.queue + + +class TokenBucketQueue(object): + """Queue with rate limited get operations. + + This uses the token bucket algorithm to rate limit the queue on get + operations. + + :param fill_rate: The rate in tokens/second that the bucket will + be refilled. + :keyword capacity: Maximum number of tokens in the bucket. + Default is 1. + + """ + RateLimitExceeded = RateLimitExceeded + + def __init__(self, fill_rate, queue=None, capacity=1): + self._bucket = TokenBucket(fill_rate, capacity) + self.queue = queue + if not self.queue: + self.queue = Queue() + + def put(self, item, block=True): + """Put an item onto the queue.""" + self.queue.put(item, block=block) + + def put_nowait(self, item): + """Put an item into the queue without blocking. + + :raises Queue.Full: If a free slot is not immediately available. + + """ + return self.put(item, block=False) + + def get(self, block=True): + """Remove and return an item from the queue. + + :raises RateLimitExceeded: If a token could not be consumed from the + token bucket (consuming from the queue + too fast). + :raises Queue.Empty: If an item is not immediately available. + + """ + get = block and self.queue.get or self.queue.get_nowait + + if not block and not self.items: + raise Empty() + + if not self._bucket.can_consume(1): + raise RateLimitExceeded() + + return get() + + def get_nowait(self): + """Remove and return an item from the queue without blocking. + + :raises RateLimitExceeded: If a token could not be consumed from the + token bucket (consuming from the queue + too fast). + :raises Queue.Empty: If an item is not immediately available. + + """ + return self.get(block=False) + + def qsize(self): + """Returns the size of the queue.""" + return self.queue.qsize() + + def empty(self): + """Returns :const:`True` if the queue is empty.""" + return self.queue.empty() + + def clear(self): + """Delete all data in the queue.""" + return self.items.clear() + + def wait(self, block=False): + """Wait until a token can be retrieved from the bucket and return + the next item.""" + get = self.get + expected_time = self.expected_time + while 1: + remaining = expected_time() + if not remaining: + return get(block=block) + sleep(remaining) + + def expected_time(self, tokens=1): + """Returns the expected time in seconds of when a new token should be + available.""" + if not self.items: + return 0 + return self._bucket.expected_time(tokens) + + @property + def items(self): + """Underlying data. Do not modify.""" + return self.queue.queue diff --git a/awx/lib/site-packages/celery/worker/consumer.py b/awx/lib/site-packages/celery/worker/consumer.py new file mode 100644 index 0000000000..aed01f3e02 --- /dev/null +++ b/awx/lib/site-packages/celery/worker/consumer.py @@ -0,0 +1,893 @@ +# -*- coding: utf-8 -*- +""" +celery.worker.consumer +~~~~~~~~~~~~~~~~~~~~~~ + +This module contains the component responsible for consuming messages +from the broker, processing the messages and keeping the broker connections +up and running. + + +* :meth:`~Consumer.start` is an infinite loop, which only iterates + again if the connection is lost. For each iteration (at start, or if the + connection is lost) it calls :meth:`~Consumer.reset_connection`, + and starts the consumer by calling :meth:`~Consumer.consume_messages`. + +* :meth:`~Consumer.reset_connection`, clears the internal queues, + establishes a new connection to the broker, sets up the task + consumer (+ QoS), and the broadcast remote control command consumer. + + Also if events are enabled it configures the event dispatcher and starts + up the heartbeat thread. + +* Finally it can consume messages. :meth:`~Consumer.consume_messages` + is simply an infinite loop waiting for events on the AMQP channels. + + Both the task consumer and the broadcast consumer uses the same + callback: :meth:`~Consumer.receive_message`. + +* So for each message received the :meth:`~Consumer.receive_message` + method is called, this checks the payload of the message for either + a `task` key or a `control` key. + + If the message is a task, it verifies the validity of the message + converts it to a :class:`celery.worker.job.Request`, and sends + it to :meth:`~Consumer.on_task`. + + If the message is a control command the message is passed to + :meth:`~Consumer.on_control`, which in turn dispatches + the control command using the control dispatcher. + + It also tries to handle malformed or invalid messages properly, + so the worker doesn't choke on them and die. Any invalid messages + are acknowledged immediately and logged, so the message is not resent + again, and again. + +* If the task has an ETA/countdown, the task is moved to the `timer` + so the :class:`timer2.Timer` can schedule it at its + deadline. Tasks without an eta are moved immediately to the `ready_queue`, + so they can be picked up by the :class:`~celery.worker.mediator.Mediator` + to be sent to the pool. + +* When a task with an ETA is received the QoS prefetch count is also + incremented, so another message can be reserved. When the ETA is met + the prefetch count is decremented again, though this cannot happen + immediately because most broker clients don't support doing broker + requests across threads. Instead the current prefetch count is kept as a + shared counter, so as soon as :meth:`~Consumer.consume_messages` + detects that the value has changed it will send out the actual + QoS event to the broker. + +* Notice that when the connection is lost all internal queues are cleared + because we can no longer ack the messages reserved in memory. + However, this is not dangerous as the broker will resend them + to another worker when the channel is closed. + +* **WARNING**: :meth:`~Consumer.stop` does not close the connection! + This is because some pre-acked messages may be in processing, + and they need to be finished before the channel is closed. + For celeryd this means the pool must finish the tasks it has acked + early, *then* close the connection. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import logging +import socket +import threading + +from time import sleep +from Queue import Empty + +from kombu.syn import _detect_environment +from kombu.utils.encoding import safe_repr, safe_str, bytes_t +from kombu.utils.eventio import READ, WRITE, ERR + +from celery.app import app_or_default +from celery.datastructures import AttributeDict +from celery.exceptions import InvalidTaskError, SystemTerminate +from celery.task.trace import build_tracer +from celery.utils import text +from celery.utils import timer2 +from celery.utils.functional import noop +from celery.utils.log import get_logger +from celery.utils.timer2 import to_timestamp +from celery.utils.timeutils import humanize_seconds, timezone + +from . import state +from .bootsteps import StartStopComponent +from .control import Panel +from .heartbeat import Heart + +RUN = 0x1 +CLOSE = 0x2 + +#: Prefetch count can't exceed short. +PREFETCH_COUNT_MAX = 0xFFFF + +UNKNOWN_FORMAT = """\ +Received and deleted unknown message. Wrong destination?!? + +The full contents of the message body was: %s +""" +#: Error message for when an unregistered task is received. +UNKNOWN_TASK_ERROR = """\ +Received unregistered task of type %s. +The message has been ignored and discarded. + +Did you remember to import the module containing this task? +Or maybe you are using relative imports? +More: http://docs.celeryq.org/en/latest/userguide/tasks.html#names + +The full contents of the message body was: +%s +""" + +#: Error message for when an invalid task message is received. +INVALID_TASK_ERROR = """\ +Received invalid task message: %s +The message has been ignored and discarded. + +Please ensure your message conforms to the task message format: +http://docs.celeryq.org/en/latest/internals/protocol.html + +The full contents of the message body was: +%s +""" + +MESSAGE_REPORT = """\ +body: %s {content_type:%s content_encoding:%s delivery_info:%s}\ +""" + + +RETRY_CONNECTION = """\ +consumer: Connection to broker lost. \ +Trying to re-establish the connection...\ +""" + +CONNECTION_ERROR = """\ +consumer: Cannot connect to %s: %s. +%s +""" + +CONNECTION_RETRY = """\ +Trying again %(when)s...\ +""" + +CONNECTION_FAILOVER = """\ +Will retry using next failover.\ +""" + +task_reserved = state.task_reserved + +logger = get_logger(__name__) +info, warn, error, crit = (logger.info, logger.warning, + logger.error, logger.critical) + + +def debug(msg, *args, **kwargs): + logger.debug('consumer: %s' % (msg, ), *args, **kwargs) + + +def dump_body(m, body): + if isinstance(body, buffer): + body = bytes_t(body) + return "%s (%sb)" % (text.truncate(safe_repr(body), 1024), len(m.body)) + + +class Component(StartStopComponent): + name = 'worker.consumer' + last = True + + def Consumer(self, w): + return (w.consumer_cls or + Consumer if w.hub else BlockingConsumer) + + def create(self, w): + prefetch_count = w.concurrency * w.prefetch_multiplier + c = w.consumer = self.instantiate( + self.Consumer(w), + w.ready_queue, + hostname=w.hostname, + send_events=w.send_events, + init_callback=w.ready_callback, + initial_prefetch_count=prefetch_count, + pool=w.pool, + timer=w.timer, + app=w.app, + controller=w, + hub=w.hub, + ) + return c + + +class QoS(object): + """Thread safe increment/decrement of a channels prefetch_count. + + :param consumer: A :class:`kombu.messaging.Consumer` instance. + :param initial_value: Initial prefetch count value. + + """ + prev = None + + def __init__(self, consumer, initial_value): + self.consumer = consumer + self._mutex = threading.RLock() + self.value = initial_value or 0 + + def increment_eventually(self, n=1): + """Increment the value, but do not update the channels QoS. + + The MainThread will be responsible for calling :meth:`update` + when necessary. + + """ + with self._mutex: + if self.value: + self.value = self.value + max(n, 0) + return self.value + + def decrement_eventually(self, n=1): + """Decrement the value, but do not update the channels QoS. + + The MainThread will be responsible for calling :meth:`update` + when necessary. + + """ + with self._mutex: + if self.value: + self.value -= n + return self.value + + def set(self, pcount): + """Set channel prefetch_count setting.""" + if pcount != self.prev: + new_value = pcount + if pcount > PREFETCH_COUNT_MAX: + warn('QoS: Disabled: prefetch_count exceeds %r', + PREFETCH_COUNT_MAX) + new_value = 0 + debug('basic.qos: prefetch_count->%s', new_value) + self.consumer.qos(prefetch_count=new_value) + self.prev = pcount + return pcount + + def update(self): + """Update prefetch count with current value.""" + with self._mutex: + return self.set(self.value) + + +class Consumer(object): + """Listen for messages received from the broker and + move them to the ready queue for task processing. + + :param ready_queue: See :attr:`ready_queue`. + :param timer: See :attr:`timer`. + + """ + + #: The queue that holds tasks ready for immediate processing. + ready_queue = None + + #: Enable/disable events. + send_events = False + + #: Optional callback to be called when the connection is established. + #: Will only be called once, even if the connection is lost and + #: re-established. + init_callback = None + + #: The current hostname. Defaults to the system hostname. + hostname = None + + #: Initial QoS prefetch count for the task channel. + initial_prefetch_count = 0 + + #: A :class:`celery.events.EventDispatcher` for sending events. + event_dispatcher = None + + #: The thread that sends event heartbeats at regular intervals. + #: The heartbeats are used by monitors to detect that a worker + #: went offline/disappeared. + heart = None + + #: The broker connection. + connection = None + + #: The consumer used to consume task messages. + task_consumer = None + + #: The consumer used to consume broadcast commands. + broadcast_consumer = None + + #: The process mailbox (kombu pidbox node). + pidbox_node = None + _pidbox_node_shutdown = None # used for greenlets + _pidbox_node_stopped = None # used for greenlets + + #: The current worker pool instance. + pool = None + + #: A timer used for high-priority internal tasks, such + #: as sending heartbeats. + timer = None + + # Consumer state, can be RUN or CLOSE. + _state = None + + restart_count = -1 # first start is the same as a restart + + def __init__(self, ready_queue, + init_callback=noop, send_events=False, hostname=None, + initial_prefetch_count=2, pool=None, app=None, + timer=None, controller=None, hub=None, amqheartbeat=None, + **kwargs): + self.app = app_or_default(app) + self.connection = None + self.task_consumer = None + self.controller = controller + self.broadcast_consumer = None + self.ready_queue = ready_queue + self.send_events = send_events + self.init_callback = init_callback + self.hostname = hostname or socket.gethostname() + self.initial_prefetch_count = initial_prefetch_count + self.event_dispatcher = None + self.heart = None + self.pool = pool + self.timer = timer or timer2.default_timer + pidbox_state = AttributeDict(app=self.app, + hostname=self.hostname, + listener=self, # pre 2.2 + consumer=self) + self.pidbox_node = self.app.control.mailbox.Node( + safe_str(self.hostname), state=pidbox_state, handlers=Panel.data, + ) + conninfo = self.app.connection() + self.connection_errors = conninfo.connection_errors + self.channel_errors = conninfo.channel_errors + + self._does_info = logger.isEnabledFor(logging.INFO) + self.strategies = {} + if hub: + hub.on_init.append(self.on_poll_init) + self.hub = hub + self._quick_put = self.ready_queue.put + self.amqheartbeat = amqheartbeat + if self.amqheartbeat is None: + self.amqheartbeat = self.app.conf.BROKER_HEARTBEAT + if not hub: + self.amqheartbeat = 0 + + if _detect_environment() == 'gevent': + # there's a gevent bug that causes timeouts to not be reset, + # so if the connection timeout is exceeded once, it can NEVER + # connect again. + self.app.conf.BROKER_CONNECTION_TIMEOUT = None + + def update_strategies(self): + S = self.strategies + app = self.app + loader = app.loader + hostname = self.hostname + for name, task in self.app.tasks.iteritems(): + S[name] = task.start_strategy(app, self) + task.__trace__ = build_tracer(name, task, loader, hostname) + + def start(self): + """Start the consumer. + + Automatically survives intermittent connection failure, + and will retry establishing the connection and restart + consuming messages. + + """ + + self.init_callback(self) + + while self._state != CLOSE: + self.restart_count += 1 + self.maybe_shutdown() + try: + self.reset_connection() + self.consume_messages() + except self.connection_errors + self.channel_errors: + error(RETRY_CONNECTION, exc_info=True) + + def on_poll_init(self, hub): + hub.update_readers(self.connection.eventmap) + self.connection.transport.on_poll_init(hub.poller) + + def consume_messages(self, sleep=sleep, min=min, Empty=Empty): + """Consume messages forever (or until an exception is raised).""" + hbrate = self.app.conf.BROKER_HEARTBEAT_CHECKRATE + + with self.hub as hub: + qos = self.qos + update_qos = qos.update + update_readers = hub.update_readers + readers, writers = hub.readers, hub.writers + poll = hub.poller.poll + fire_timers = hub.fire_timers + scheduled = hub.timer._queue + connection = self.connection + hb = self.amqheartbeat + hbtick = connection.heartbeat_check + on_poll_start = connection.transport.on_poll_start + on_poll_empty = connection.transport.on_poll_empty + strategies = self.strategies + drain_nowait = connection.drain_nowait + on_task_callbacks = hub.on_task + keep_draining = connection.transport.nb_keep_draining + errors = connection.connection_errors + + if hb and connection.supports_heartbeats: + hub.timer.apply_interval( + hb * 1000.0 / hbrate, hbtick, (hbrate, )) + + def on_task_received(body, message): + if on_task_callbacks: + [callback() for callback in on_task_callbacks] + try: + name = body['task'] + except (KeyError, TypeError): + return self.handle_unknown_message(body, message) + try: + strategies[name](message, body, message.ack_log_error) + except KeyError, exc: + self.handle_unknown_task(body, message, exc) + except InvalidTaskError, exc: + self.handle_invalid_task(body, message, exc) + + self.task_consumer.callbacks = [on_task_received] + self.task_consumer.consume() + + debug('Ready to accept tasks!') + + while self._state != CLOSE and self.connection: + # shutdown if signal handlers told us to. + if state.should_stop: + raise SystemExit() + elif state.should_terminate: + raise SystemTerminate() + + # fire any ready timers, this also returns + # the number of seconds until we need to fire timers again. + poll_timeout = (fire_timers(propagate=errors) if scheduled + else 1) + + # We only update QoS when there is no more messages to read. + # This groups together qos calls, and makes sure that remote + # control commands will be prioritized over task messages. + if qos.prev != qos.value: + update_qos() + + update_readers(on_poll_start()) + if readers or writers: + connection.more_to_read = True + while connection.more_to_read: + try: + events = poll(poll_timeout) + except ValueError: # Issue 882 + return + if not events: + on_poll_empty() + for fileno, event in events or (): + try: + if event & READ: + readers[fileno](fileno, event) + if event & WRITE: + writers[fileno](fileno, event) + if event & ERR: + for handlermap in readers, writers: + try: + handlermap[fileno](fileno, event) + except KeyError: + pass + except (KeyError, Empty): + continue + except socket.error: + if self._state != CLOSE: # pragma: no cover + raise + if keep_draining: + drain_nowait() + poll_timeout = 0 + else: + connection.more_to_read = False + else: + # no sockets yet, startup is probably not done. + sleep(min(poll_timeout, 0.1)) + + def on_task(self, task, task_reserved=task_reserved, + to_system_tz=timezone.to_system): + """Handle received task. + + If the task has an `eta` we enter it into the ETA schedule, + otherwise we move it the ready queue for immediate processing. + + """ + if task.revoked(): + return + + if self._does_info: + info('Got task from broker: %s', task) + + if self.event_dispatcher.enabled: + self.event_dispatcher.send( + 'task-received', + uuid=task.id, name=task.name, + args=safe_repr(task.args), kwargs=safe_repr(task.kwargs), + retries=task.request_dict.get('retries', 0), + eta=task.eta and task.eta.isoformat(), + expires=task.expires and task.expires.isoformat(), + ) + + if task.eta: + try: + if task.utc: + eta = to_timestamp(to_system_tz(task.eta)) + else: + eta = to_timestamp(task.eta, timezone.local) + except OverflowError, exc: + error("Couldn't convert eta %s to timestamp: %r. Task: %r", + task.eta, exc, task.info(safe=True), exc_info=True) + task.acknowledge() + else: + self.qos.increment_eventually() + self.timer.apply_at( + eta, self.apply_eta_task, (task, ), priority=6, + ) + else: + task_reserved(task) + self._quick_put(task) + + def on_control(self, body, message): + """Process remote control command message.""" + try: + self.pidbox_node.handle_message(body, message) + except KeyError, exc: + error('No such control command: %s', exc) + except Exception, exc: + error('Control command error: %r', exc, exc_info=True) + self.reset_pidbox_node() + + def apply_eta_task(self, task): + """Method called by the timer to apply a task with an + ETA/countdown.""" + task_reserved(task) + self._quick_put(task) + self.qos.decrement_eventually() + + def _message_report(self, body, message): + return MESSAGE_REPORT % (dump_body(message, body), + safe_repr(message.content_type), + safe_repr(message.content_encoding), + safe_repr(message.delivery_info)) + + def handle_unknown_message(self, body, message): + warn(UNKNOWN_FORMAT, self._message_report(body, message)) + message.reject_log_error(logger, self.connection_errors) + + def handle_unknown_task(self, body, message, exc): + error(UNKNOWN_TASK_ERROR, exc, dump_body(message, body), exc_info=True) + message.reject_log_error(logger, self.connection_errors) + + def handle_invalid_task(self, body, message, exc): + error(INVALID_TASK_ERROR, exc, dump_body(message, body), exc_info=True) + message.reject_log_error(logger, self.connection_errors) + + def receive_message(self, body, message): + """Handles incoming messages. + + :param body: The message body. + :param message: The kombu message object. + + """ + try: + name = body['task'] + except (KeyError, TypeError): + return self.handle_unknown_message(body, message) + + try: + self.strategies[name](message, body, message.ack_log_error) + except KeyError, exc: + self.handle_unknown_task(body, message, exc) + except InvalidTaskError, exc: + self.handle_invalid_task(body, message, exc) + + def maybe_conn_error(self, fun): + """Applies function but ignores any connection or channel + errors raised.""" + try: + fun() + except (AttributeError, ) + \ + self.connection_errors + \ + self.channel_errors: + pass + + def close_connection(self): + """Closes the current broker connection and all open channels.""" + + # We must set self.connection to None here, so + # that the green pidbox thread exits. + connection, self.connection = self.connection, None + + if self.task_consumer: + debug('Closing consumer channel...') + self.task_consumer = self.maybe_conn_error( + self.task_consumer.close) + + self.stop_pidbox_node() + + if connection: + debug('Closing broker connection...') + self.maybe_conn_error(connection.close) + + def stop_consumers(self, close_connection=True, join=True): + """Stop consuming tasks and broadcast commands, also stops + the heartbeat thread and event dispatcher. + + :keyword close_connection: Set to False to skip closing the broker + connection. + + """ + if not self._state == RUN: + return + + if self.heart: + # Stop the heartbeat thread if it's running. + debug('Heart: Going into cardiac arrest...') + self.heart = self.heart.stop() + + debug('Cancelling task consumer...') + if join and self.task_consumer: + self.maybe_conn_error(self.task_consumer.cancel) + + if self.event_dispatcher: + debug('Shutting down event dispatcher...') + self.event_dispatcher = self.maybe_conn_error( + self.event_dispatcher.close) + + debug('Cancelling broadcast consumer...') + if join and self.broadcast_consumer: + self.maybe_conn_error(self.broadcast_consumer.cancel) + + if close_connection: + self.close_connection() + + def on_decode_error(self, message, exc): + """Callback called if an error occurs while decoding + a message received. + + Simply logs the error and acknowledges the message so it + doesn't enter a loop. + + :param message: The message with errors. + :param exc: The original exception instance. + + """ + crit("Can't decode message body: %r (type:%r encoding:%r raw:%r')", + exc, message.content_type, message.content_encoding, + dump_body(message, message.body), exc_info=1) + message.ack() + + def reset_pidbox_node(self): + """Sets up the process mailbox.""" + self.stop_pidbox_node() + # close previously opened channel if any. + if self.pidbox_node.channel: + try: + self.pidbox_node.channel.close() + except self.connection_errors + self.channel_errors: + pass + + if self.pool is not None and self.pool.is_green: + return self.pool.spawn_n(self._green_pidbox_node) + self.pidbox_node.channel = self.connection.channel() + self.broadcast_consumer = self.pidbox_node.listen( + callback=self.on_control, + ) + + def stop_pidbox_node(self): + if self._pidbox_node_stopped: + self._pidbox_node_shutdown.set() + debug('Waiting for broadcast thread to shutdown...') + self._pidbox_node_stopped.wait() + self._pidbox_node_stopped = self._pidbox_node_shutdown = None + elif self.broadcast_consumer: + debug('Closing broadcast channel...') + self.broadcast_consumer = \ + self.maybe_conn_error(self.broadcast_consumer.channel.close) + + def _green_pidbox_node(self): + """Sets up the process mailbox when running in a greenlet + environment.""" + # THIS CODE IS TERRIBLE + # Luckily work has already started rewriting the Consumer for 4.0. + self._pidbox_node_shutdown = threading.Event() + self._pidbox_node_stopped = threading.Event() + try: + with self._open_connection() as conn: + info('pidbox: Connected to %s.', conn.as_uri()) + self.pidbox_node.channel = conn.default_channel + self.broadcast_consumer = self.pidbox_node.listen( + callback=self.on_control, + ) + + with self.broadcast_consumer: + while not self._pidbox_node_shutdown.isSet(): + try: + conn.drain_events(timeout=1.0) + except socket.timeout: + pass + finally: + self._pidbox_node_stopped.set() + + def reset_connection(self): + """Re-establish the broker connection and set up consumers, + heartbeat and the event dispatcher.""" + debug('Re-establishing connection to the broker...') + self.stop_consumers(join=False) + + # Clear internal queues to get rid of old messages. + # They can't be acked anyway, as a delivery tag is specific + # to the current channel. + self.ready_queue.clear() + self.timer.clear() + + # Re-establish the broker connection and setup the task consumer. + self.connection = self._open_connection() + info('consumer: Connected to %s.', self.connection.as_uri()) + self.task_consumer = self.app.amqp.TaskConsumer( + self.connection, on_decode_error=self.on_decode_error, + ) + # QoS: Reset prefetch window. + self.qos = QoS(self.task_consumer, self.initial_prefetch_count) + self.qos.update() + + # Setup the process mailbox. + self.reset_pidbox_node() + + # Flush events sent while connection was down. + prev_event_dispatcher = self.event_dispatcher + self.event_dispatcher = self.app.events.Dispatcher( + self.connection, hostname=self.hostname, enabled=self.send_events, + ) + if prev_event_dispatcher: + self.event_dispatcher.copy_buffer(prev_event_dispatcher) + self.event_dispatcher.flush() + + # Restart heartbeat thread. + self.restart_heartbeat() + + # reload all task's execution strategies. + self.update_strategies() + + # We're back! + self._state = RUN + + def restart_heartbeat(self): + """Restart the heartbeat thread. + + This thread sends heartbeat events at intervals so monitors + can tell if the worker is off-line/missing. + + """ + self.heart = Heart(self.timer, self.event_dispatcher) + self.heart.start() + + def _open_connection(self): + """Establish the broker connection. + + Will retry establishing the connection if the + :setting:`BROKER_CONNECTION_RETRY` setting is enabled + + """ + conn = self.app.connection(heartbeat=self.amqheartbeat) + + # Callback called for each retry while the connection + # can't be established. + def _error_handler(exc, interval, next_step=CONNECTION_RETRY): + if getattr(conn, 'alt', None) and interval == 0: + next_step = CONNECTION_FAILOVER + error(CONNECTION_ERROR, conn.as_uri(), exc, + next_step % {'when': humanize_seconds(interval, 'in', ' ')}) + + # remember that the connection is lazy, it won't establish + # until it's needed. + if not self.app.conf.BROKER_CONNECTION_RETRY: + # retry disabled, just call connect directly. + conn.connect() + return conn + + return conn.ensure_connection( + _error_handler, self.app.conf.BROKER_CONNECTION_MAX_RETRIES, + callback=self.maybe_shutdown, + ) + + def stop(self): + """Stop consuming. + + Does not close the broker connection, so be sure to call + :meth:`close_connection` when you are finished with it. + + """ + # Notifies other threads that this instance can't be used + # anymore. + self.close() + debug('Stopping consumers...') + self.stop_consumers(close_connection=False, join=True) + + def close(self): + self._state = CLOSE + + def maybe_shutdown(self): + if state.should_stop: + raise SystemExit() + elif state.should_terminate: + raise SystemTerminate() + + def add_task_queue(self, queue, exchange=None, exchange_type=None, + routing_key=None, **options): + cset = self.task_consumer + queues = self.app.amqp.queues + # Must use in' here, as __missing__ will automatically + # create queues when CELERY_CREATE_MISSING_QUEUES is enabled. + # (Issue #1079) + if queue in queues: + q = queues[queue] + else: + exchange = queue if exchange is None else exchange + exchange_type = ('direct' if exchange_type is None + else exchange_type) + q = queues.select_add(queue, + exchange=exchange, + exchange_type=exchange_type, + routing_key=routing_key, **options) + if not cset.consuming_from(queue): + cset.add_queue(q) + cset.consume() + logger.info('Started consuming from %r', queue) + + def cancel_task_queue(self, queue): + self.app.amqp.queues.select_remove(queue) + self.task_consumer.cancel_by_queue(queue) + + @property + def info(self): + """Returns information about this consumer instance + as a dict. + + This is also the consumer related info returned by + ``celeryctl stats``. + + """ + conninfo = {} + if self.connection: + conninfo = self.connection.info() + conninfo.pop('password', None) # don't send password. + return {'broker': conninfo, 'prefetch_count': self.qos.value} + + +class BlockingConsumer(Consumer): + + def consume_messages(self): + # receive_message handles incoming messages. + self.task_consumer.register_callback(self.receive_message) + self.task_consumer.consume() + + debug('Ready to accept tasks!') + + while self._state != CLOSE and self.connection: + self.maybe_shutdown() + if self.qos.prev != self.qos.value: # pragma: no cover + self.qos.update() + try: + self.connection.drain_events(timeout=10.0) + except socket.timeout: + pass + except socket.error: + if self._state != CLOSE: # pragma: no cover + raise diff --git a/awx/lib/site-packages/celery/worker/control.py b/awx/lib/site-packages/celery/worker/control.py new file mode 100644 index 0000000000..4ad87e968b --- /dev/null +++ b/awx/lib/site-packages/celery/worker/control.py @@ -0,0 +1,302 @@ +# -*- coding: utf-8 -*- +""" + celery.worker.control + ~~~~~~~~~~~~~~~~~~~~~ + + Remote control commands. + +""" +from __future__ import absolute_import + +import logging +import os + +from datetime import datetime + +from kombu.utils.encoding import safe_repr + +from celery.platforms import signals as _signals +from celery.utils import timeutils +from celery.utils.compat import UserDict +from celery.utils.log import get_logger +from celery.utils import jsonify + +from . import state +from .state import revoked + +DEFAULT_TASK_INFO_ITEMS = ('exchange', 'routing_key', 'rate_limit') +logger = get_logger(__name__) + + +class Panel(UserDict): + data = dict() # Global registry. + + @classmethod + def register(cls, method, name=None): + cls.data[name or method.__name__] = method + return method + + +@Panel.register +def revoke(panel, task_id, terminate=False, signal=None, **kwargs): + """Revoke task by task id.""" + revoked.add(task_id) + if terminate: + signum = _signals.signum(signal or 'TERM') + for request in state.reserved_requests: + if request.id == task_id: + logger.info('Terminating %s (%s)', task_id, signum) + request.terminate(panel.consumer.pool, signal=signum) + break + else: + return {'ok': 'terminate: task %s not found' % (task_id, )} + return {'ok': 'terminating %s (%s)' % (task_id, signal)} + + logger.info('Revoking task %s', task_id) + return {'ok': 'revoking task %s' % (task_id, )} + + +@Panel.register +def report(panel): + return {'ok': panel.app.bugreport()} + + +@Panel.register +def enable_events(panel): + dispatcher = panel.consumer.event_dispatcher + if not dispatcher.enabled: + dispatcher.enable() + dispatcher.send('worker-online') + logger.info('Events enabled by remote.') + return {'ok': 'events enabled'} + return {'ok': 'events already enabled'} + + +@Panel.register +def disable_events(panel): + dispatcher = panel.consumer.event_dispatcher + if dispatcher.enabled: + dispatcher.send('worker-offline') + dispatcher.disable() + logger.info('Events disabled by remote.') + return {'ok': 'events disabled'} + return {'ok': 'events already disabled'} + + +@Panel.register +def heartbeat(panel): + logger.debug('Heartbeat requested by remote.') + dispatcher = panel.consumer.event_dispatcher + dispatcher.send('worker-heartbeat', freq=5, **state.SOFTWARE_INFO) + + +@Panel.register +def rate_limit(panel, task_name, rate_limit, **kwargs): + """Set new rate limit for a task type. + + See :attr:`celery.task.base.Task.rate_limit`. + + :param task_name: Type of task. + :param rate_limit: New rate limit. + + """ + + try: + timeutils.rate(rate_limit) + except ValueError, exc: + return {'error': 'Invalid rate limit string: %s' % exc} + + try: + panel.app.tasks[task_name].rate_limit = rate_limit + except KeyError: + logger.error('Rate limit attempt for unknown task %s', + task_name, exc_info=True) + return {'error': 'unknown task'} + + if not hasattr(panel.consumer.ready_queue, 'refresh'): + logger.error('Rate limit attempt, but rate limits disabled.') + return {'error': 'rate limits disabled'} + + panel.consumer.ready_queue.refresh() + + if not rate_limit: + logger.info('Rate limits disabled for tasks of type %s', task_name) + return {'ok': 'rate limit disabled successfully'} + + logger.info('New rate limit for tasks of type %s: %s.', + task_name, rate_limit) + return {'ok': 'new rate limit set successfully'} + + +@Panel.register +def time_limit(panel, task_name=None, hard=None, soft=None, **kwargs): + try: + task = panel.app.tasks[task_name] + except KeyError: + logger.error('Change time limit attempt for unknown task %s', + task_name, exc_info=True) + return {'error': 'unknown task'} + + task.soft_time_limit = soft + task.time_limit = hard + + logger.info('New time limits for tasks of type %s: soft=%s hard=%s', + task_name, soft, hard) + return {'ok': 'time limits set successfully'} + + +@Panel.register +def dump_schedule(panel, safe=False, **kwargs): + from celery.worker.job import Request + schedule = panel.consumer.timer.schedule + if not schedule.queue: + logger.debug('--Empty schedule--') + return [] + + formatitem = lambda i, item: '%s. %s pri%s %r' % ( + i, datetime.utcfromtimestamp(item['eta']), + item['priority'], item['item'], + ) + if logger.isEnabledFor(logging.DEBUG): + logger.debug('* Dump of current schedule:\n%s', '\n'.join( + formatitem(i, item) for i, item in enumerate(schedule.info()) + )) + scheduled_tasks = [] + for info in schedule.info(): + item = info['item'] + if item.args and isinstance(item.args[0], Request): + scheduled_tasks.append({ + 'eta': info['eta'], + 'priority': info['priority'], + 'request': item.args[0].info(safe=safe), + }) + return scheduled_tasks + + +@Panel.register +def dump_reserved(panel, safe=False, **kwargs): + reserved = state.reserved_requests - state.active_requests + if not reserved: + logger.debug('--Empty queue--') + return [] + if logger.isEnabledFor(logging.DEBUG): + logger.debug('* Dump of currently reserved tasks:\n%s', + '\n'.join(safe_repr(r) for r in reserved)) + return [request.info(safe=safe) for request in reserved] + + +@Panel.register +def dump_active(panel, safe=False, **kwargs): + return [request.info(safe=safe) for request in state.active_requests] + + +@Panel.register +def stats(panel, **kwargs): + asinfo = {} + if panel.consumer.controller.autoscaler: + asinfo = panel.consumer.controller.autoscaler.info() + return {'total': state.total_count, + 'consumer': panel.consumer.info, + 'pool': panel.consumer.pool.info, + 'autoscaler': asinfo, + 'pid': os.getpid()} + + +@Panel.register +def dump_revoked(panel, **kwargs): + return list(state.revoked) + + +@Panel.register +def dump_tasks(panel, taskinfoitems=None, **kwargs): + tasks = panel.app.tasks + taskinfoitems = taskinfoitems or DEFAULT_TASK_INFO_ITEMS + + def _extract_info(task): + fields = dict( + (field, str(getattr(task, field, None))) + for field in taskinfoitems + if getattr(task, field, None) is not None) + info = ['='.join(f) for f in fields.items()] + if not info: + return task.name + return '%s [%s]' % (task.name, ' '.join(info)) + + info = [_extract_info(tasks[task]) for task in sorted(tasks)] + if logger.isEnabledFor(logging.DEBUG): + logger.debug('* Dump of currently registered tasks:\n%s', + '\n'.join(info)) + return info + + +@Panel.register +def ping(panel, **kwargs): + return 'pong' + + +@Panel.register +def pool_grow(panel, n=1, **kwargs): + if panel.consumer.controller.autoscaler: + panel.consumer.controller.autoscaler.force_scale_up(n) + else: + panel.consumer.pool.grow(n) + return {'ok': 'spawned worker processes'} + + +@Panel.register +def pool_shrink(panel, n=1, **kwargs): + if panel.consumer.controller.autoscaler: + panel.consumer.controller.autoscaler.force_scale_down(n) + else: + panel.consumer.pool.shrink(n) + return {'ok': 'terminated worker processes'} + + +@Panel.register +def pool_restart(panel, modules=None, reload=False, reloader=None, **kwargs): + if panel.app.conf.CELERYD_POOL_RESTARTS: + panel.consumer.controller.reload(modules, reload, reloader=reloader) + return {'ok': 'reload started'} + else: + raise ValueError('Pool restarts not enabled') + + +@Panel.register +def autoscale(panel, max=None, min=None): + autoscaler = panel.consumer.controller.autoscaler + if autoscaler: + max_, min_ = autoscaler.update(max, min) + return {'ok': 'autoscale now min=%r max=%r' % (max_, min_)} + raise ValueError('Autoscale not enabled') + + +@Panel.register +def shutdown(panel, msg='Got shutdown from remote', **kwargs): + logger.warning(msg) + raise SystemExit(msg) + + +@Panel.register +def add_consumer(panel, queue, exchange=None, exchange_type=None, + routing_key=None, **options): + panel.consumer.add_task_queue(queue, exchange, exchange_type, + routing_key, **options) + return {'ok': 'add consumer %r' % (queue, )} + + +@Panel.register +def cancel_consumer(panel, queue=None, **_): + panel.consumer.cancel_task_queue(queue) + return {'ok': 'no longer consuming from %s' % (queue, )} + + +@Panel.register +def active_queues(panel): + """Returns the queues associated with each worker.""" + return [dict(queue.as_dict(recurse=True)) + for queue in panel.consumer.task_consumer.queues] + + +@Panel.register +def dump_conf(panel, **kwargs): + return jsonify(dict(panel.app.conf)) diff --git a/awx/lib/site-packages/celery/worker/heartbeat.py b/awx/lib/site-packages/celery/worker/heartbeat.py new file mode 100644 index 0000000000..d930f49be7 --- /dev/null +++ b/awx/lib/site-packages/celery/worker/heartbeat.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +""" + celery.worker.heartbeat + ~~~~~~~~~~~~~~~~~~~~~~~ + + This is the internal thread that sends heartbeat events + at regular intervals. + +""" +from __future__ import absolute_import + +from .state import SOFTWARE_INFO, active_requests, total_count + + +class Heart(object): + """Timer sending heartbeats at regular intervals. + + :param timer: Timer instance. + :param eventer: Event dispatcher used to send the event. + :keyword interval: Time in seconds between heartbeats. + Default is 30 seconds. + + """ + + def __init__(self, timer, eventer, interval=None): + self.timer = timer + self.eventer = eventer + self.interval = float(interval or 5.0) + self.tref = None + + # Make event dispatcher start/stop us when it's + # enabled/disabled. + self.eventer.on_enabled.add(self.start) + self.eventer.on_disabled.add(self.stop) + + def _send(self, event): + return self.eventer.send(event, freq=self.interval, + active=len(active_requests), + processed=sum(total_count.itervalues()), + **SOFTWARE_INFO) + + def start(self): + if self.eventer.enabled: + self._send('worker-online') + self.tref = self.timer.apply_interval( + self.interval * 1000.0, self._send, ('worker-heartbeat', ), + ) + + def stop(self): + if self.tref is not None: + self.timer.cancel(self.tref) + self.tref = None + if self.eventer.enabled: + self._send('worker-offline') diff --git a/awx/lib/site-packages/celery/worker/hub.py b/awx/lib/site-packages/celery/worker/hub.py new file mode 100644 index 0000000000..7c1c17a483 --- /dev/null +++ b/awx/lib/site-packages/celery/worker/hub.py @@ -0,0 +1,222 @@ +# -*- coding: utf-8 -*- +""" + celery.worker.hub + ~~~~~~~~~~~~~~~~~ + + Event-loop implementation. + +""" +from __future__ import absolute_import + +from kombu.utils import cached_property +from kombu.utils import eventio + +from celery.utils.log import get_logger +from celery.utils.timer2 import Schedule + +logger = get_logger(__name__) +READ, WRITE, ERR = eventio.READ, eventio.WRITE, eventio.ERR + + +class BoundedSemaphore(object): + """Asynchronous Bounded Semaphore. + + Bounded means that the value will stay within the specified + range even if it is released more times than it was acquired. + + This type is *not thread safe*. + + Example: + + >>> x = BoundedSemaphore(2) + + >>> def callback(i): + ... print('HELLO %r' % i) + + >>> x.acquire(callback, 1) + HELLO 1 + + >>> x.acquire(callback, 2) + HELLO 2 + + >>> x.acquire(callback, 3) + >>> x._waiters # private, do not access directly + [(callback, 3)] + + >>> x.release() + HELLO 3 + + """ + + def __init__(self, value): + self.initial_value = self.value = value + self._waiting = [] + + def acquire(self, callback, *partial_args): + """Acquire semaphore, applying ``callback`` when + the semaphore is ready. + + :param callback: The callback to apply. + :param \*partial_args: partial arguments to callback. + + """ + if self.value <= 0: + self._waiting.append((callback, partial_args)) + return False + else: + self.value = max(self.value - 1, 0) + callback(*partial_args) + return True + + def release(self): + """Release semaphore. + + This will apply any waiting callbacks from previous + calls to :meth:`acquire` done when the semaphore was busy. + + """ + self.value = min(self.value + 1, self.initial_value) + if self._waiting: + waiter, args = self._waiting.pop() + waiter(*args) + + def grow(self, n=1): + """Change the size of the semaphore to hold more values.""" + self.initial_value += n + self.value += n + [self.release() for _ in xrange(n)] + + def shrink(self, n=1): + """Change the size of the semaphore to hold less values.""" + self.initial_value = max(self.initial_value - n, 0) + self.value = max(self.value - n, 0) + + def clear(self): + """Reset the sempahore, including wiping out any waiting callbacks.""" + self._waiting[:] = [] + self.value = self.initial_value + + +class Hub(object): + """Event loop object. + + :keyword timer: Specify custom :class:`~celery.utils.timer2.Schedule`. + + """ + #: Flag set if reading from an fd will not block. + READ = READ + + #: Flag set if writing to an fd will not block. + WRITE = WRITE + + #: Flag set on error, and the fd should be read from asap. + ERR = ERR + + #: List of callbacks to be called when the loop is initialized, + #: applied with the hub instance as sole argument. + on_init = None + + #: List of callbacks to be called when the loop is exiting, + #: applied with the hub instance as sole argument. + on_close = None + + #: List of callbacks to be called when a task is received. + #: Takes no arguments. + on_task = None + + def __init__(self, timer=None): + self.timer = Schedule() if timer is None else timer + + self.readers = {} + self.writers = {} + self.on_init = [] + self.on_close = [] + self.on_task = [] + + def start(self): + """Called by StartStopComponent at worker startup.""" + self.poller = eventio.poll() + + def stop(self): + """Called by StartStopComponent at worker shutdown.""" + self.poller.close() + + def init(self): + for callback in self.on_init: + callback(self) + + def fire_timers(self, min_delay=1, max_delay=10, max_timers=10, + propagate=()): + delay = None + if self.timer._queue: + for i in range(max_timers): + delay, entry = self.scheduler.next() + if entry is None: + break + try: + entry() + except propagate: + raise + except Exception, exc: + logger.error('Error in timer: %r', exc, exc_info=1) + return min(max(delay or 0, min_delay), max_delay) + + def add(self, fd, callback, flags): + self.poller.register(fd, flags) + if not isinstance(fd, int): + fd = fd.fileno() + if flags & READ: + self.readers[fd] = callback + if flags & WRITE: + self.writers[fd] = callback + + def add_reader(self, fd, callback): + return self.add(fd, callback, READ | ERR) + + def add_writer(self, fd, callback): + return self.add(fd, callback, WRITE) + + def update_readers(self, readers): + [self.add_reader(*x) for x in readers.iteritems()] + + def update_writers(self, writers): + [self.add_writer(*x) for x in writers.iteritems()] + + def _unregister(self, fd): + try: + self.poller.unregister(fd) + except (KeyError, OSError): + pass + + def remove(self, fd): + fileno = fd.fileno() if not isinstance(fd, int) else fd + self.readers.pop(fileno, None) + self.writers.pop(fileno, None) + self._unregister(fd) + + def __enter__(self): + self.init() + return self + + def close(self, *args): + [self._unregister(fd) for fd in self.readers] + self.readers.clear() + [self._unregister(fd) for fd in self.writers] + self.writers.clear() + for callback in self.on_close: + callback(self) + __exit__ = close + + @cached_property + def scheduler(self): + return iter(self.timer) + + +class DummyLock(object): + """Pretending to be a lock.""" + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + pass diff --git a/awx/lib/site-packages/celery/worker/job.py b/awx/lib/site-packages/celery/worker/job.py new file mode 100644 index 0000000000..3bfe45e983 --- /dev/null +++ b/awx/lib/site-packages/celery/worker/job.py @@ -0,0 +1,520 @@ +# -*- coding: utf-8 -*- +""" + celery.worker.job + ~~~~~~~~~~~~~~~~~ + + This module defines the :class:`Request` class, + which specifies how tasks are executed. + +""" +from __future__ import absolute_import + +import logging +import time +import socket +import sys + +from datetime import datetime + +from kombu.utils import kwdict, reprcall +from kombu.utils.encoding import safe_repr, safe_str + +from celery import signals +from celery.app import app_or_default +from celery.datastructures import ExceptionInfo +from celery.exceptions import ( + Ignore, TaskRevokedError, InvalidTaskError, + SoftTimeLimitExceeded, TimeLimitExceeded, + WorkerLostError, Terminated, RetryTaskError, +) +from celery.platforms import signals as _signals +from celery.task.trace import ( + trace_task, + trace_task_ret, +) +from celery.utils import fun_takes_kwargs +from celery.utils.functional import noop +from celery.utils.log import get_logger +from celery.utils.serialization import get_pickled_exception +from celery.utils.text import truncate +from celery.utils.timeutils import maybe_iso8601, timezone, maybe_make_aware + +from . import state + +IS_PYPY = hasattr(sys, 'pypy_version_info') + +logger = get_logger(__name__) +debug, info, warn, error = (logger.debug, logger.info, + logger.warning, logger.error) +_does_info = False +_does_debug = False + + +def __optimize__(): + global _does_debug + global _does_info + _does_debug = logger.isEnabledFor(logging.DEBUG) + _does_info = logger.isEnabledFor(logging.INFO) +__optimize__() + +# Localize +tz_utc = timezone.utc +tz_or_local = timezone.tz_or_local +send_revoked = signals.task_revoked.send + +task_accepted = state.task_accepted +task_ready = state.task_ready +revoked_tasks = state.revoked + +NEEDS_KWDICT = sys.version_info <= (2, 6) + + +class Request(object): + """A request for task execution.""" + if not IS_PYPY: + __slots__ = ( + 'app', 'name', 'id', 'args', 'kwargs', 'on_ack', 'delivery_info', + 'hostname', 'eventer', 'connection_errors', 'task', 'eta', + 'expires', 'request_dict', 'acknowledged', + 'utc', 'time_start', 'worker_pid', '_already_revoked', + '_terminate_on_ack', + '_tzlocal', '__weakref__', + ) + + #: Format string used to log task success. + success_msg = """\ + Task %(name)s[%(id)s] succeeded in %(runtime)ss: %(return_value)s + """ + + #: Format string used to log task failure. + error_msg = """\ + Task %(name)s[%(id)s] raised exception: %(exc)s + """ + + #: Format string used to log internal error. + internal_error_msg = """\ + Task %(name)s[%(id)s] INTERNAL ERROR: %(exc)s + """ + + ignored_msg = """\ + Task %(name)s[%(id)s] ignored + """ + + #: Format string used to log task retry. + retry_msg = """Task %(name)s[%(id)s] retry: %(exc)s""" + + def __init__(self, body, on_ack=noop, + hostname=None, eventer=None, app=None, + connection_errors=None, request_dict=None, + delivery_info=None, task=None, **opts): + self.app = app or app_or_default(app) + name = self.name = body['task'] + self.id = body['id'] + self.args = body.get('args', []) + self.kwargs = body.get('kwargs', {}) + try: + self.kwargs.items + except AttributeError: + raise InvalidTaskError( + 'Task keyword arguments is not a mapping') + if NEEDS_KWDICT: + self.kwargs = kwdict(self.kwargs) + eta = body.get('eta') + expires = body.get('expires') + utc = self.utc = body.get('utc', False) + self.on_ack = on_ack + self.hostname = hostname or socket.gethostname() + self.eventer = eventer + self.connection_errors = connection_errors or () + self.task = task or self.app.tasks[name] + self.acknowledged = self._already_revoked = False + self.time_start = self.worker_pid = self._terminate_on_ack = None + self._tzlocal = None + + # timezone means the message is timezone-aware, and the only timezone + # supported at this point is UTC. + if eta is not None: + try: + self.eta = maybe_iso8601(eta) + except (AttributeError, ValueError), exc: + raise InvalidTaskError( + 'invalid eta value %r: %s' % (eta, exc, )) + if utc: + self.eta = maybe_make_aware(self.eta, self.tzlocal) + else: + self.eta = None + if expires is not None: + try: + self.expires = maybe_iso8601(expires) + except (AttributeError, ValueError), exc: + raise InvalidTaskError( + 'invalid expires value %r: %s' % (expires, exc, )) + if utc: + self.expires = maybe_make_aware(self.expires, self.tzlocal) + else: + self.expires = None + + delivery_info = {} if delivery_info is None else delivery_info + self.delivery_info = { + 'exchange': delivery_info.get('exchange'), + 'routing_key': delivery_info.get('routing_key'), + 'priority': delivery_info.get('priority'), + } + + # amqplib transport adds the channel here for some reason, so need + # to remove it. + self.delivery_info.pop('channel', None) + self.request_dict = body + + @classmethod + def from_message(cls, message, body, **kwargs): + # should be deprecated + return Request( + body, + delivery_info=getattr(message, 'delivery_info', None), **kwargs + ) + + def extend_with_default_kwargs(self): + """Extend the tasks keyword arguments with standard task arguments. + + Currently these are `logfile`, `loglevel`, `task_id`, + `task_name`, `task_retries`, and `delivery_info`. + + See :meth:`celery.task.base.Task.run` for more information. + + Magic keyword arguments are deprecated and will be removed + in version 4.0. + + """ + kwargs = dict(self.kwargs) + default_kwargs = {'logfile': None, # deprecated + 'loglevel': None, # deprecated + 'task_id': self.id, + 'task_name': self.name, + 'task_retries': self.request_dict.get('retries', 0), + 'task_is_eager': False, + 'delivery_info': self.delivery_info} + fun = self.task.run + supported_keys = fun_takes_kwargs(fun, default_kwargs) + extend_with = dict((key, val) for key, val in default_kwargs.items() + if key in supported_keys) + kwargs.update(extend_with) + return kwargs + + def execute_using_pool(self, pool, **kwargs): + """Like :meth:`execute`, but using a worker pool. + + :param pool: A :class:`celery.concurrency.base.TaskPool` instance. + + :raises celery.exceptions.TaskRevokedError: if the task was revoked + and ignored. + + """ + task = self.task + if self.revoked(): + raise TaskRevokedError(self.id) + + hostname = self.hostname + kwargs = self.kwargs + if task.accept_magic_kwargs: + kwargs = self.extend_with_default_kwargs() + request = self.request_dict + request.update({'hostname': hostname, 'is_eager': False, + 'delivery_info': self.delivery_info, + 'group': self.request_dict.get('taskset')}) + result = pool.apply_async(trace_task_ret, + args=(self.name, self.id, + self.args, kwargs, request), + accept_callback=self.on_accepted, + timeout_callback=self.on_timeout, + callback=self.on_success, + error_callback=self.on_failure, + soft_timeout=task.soft_time_limit, + timeout=task.time_limit) + return result + + def execute(self, loglevel=None, logfile=None): + """Execute the task in a :func:`~celery.task.trace.trace_task`. + + :keyword loglevel: The loglevel used by the task. + :keyword logfile: The logfile used by the task. + + """ + if self.revoked(): + return + + # acknowledge task as being processed. + if not self.task.acks_late: + self.acknowledge() + + kwargs = self.kwargs + if self.task.accept_magic_kwargs: + kwargs = self.extend_with_default_kwargs() + request = self.request_dict + request.update({'loglevel': loglevel, 'logfile': logfile, + 'hostname': self.hostname, 'is_eager': False, + 'delivery_info': self.delivery_info}) + retval = trace_task(self.task, self.id, self.args, kwargs, request, + **{'hostname': self.hostname, + 'loader': self.app.loader}) + self.acknowledge() + return retval + + def maybe_expire(self): + """If expired, mark the task as revoked.""" + if self.expires: + now = datetime.now(tz_or_local(self.tzlocal) if self.utc else None) + if now > self.expires: + revoked_tasks.add(self.id) + return True + + def terminate(self, pool, signal=None): + if self.time_start: + signal = _signals.signum(signal or 'TERM') + pool.terminate_job(self.worker_pid, signal) + self._announce_revoked('terminated', True, signal, False) + else: + self._terminate_on_ack = pool, signal + + def _announce_revoked(self, reason, terminated, signum, expired): + task_ready(self) + self.send_event('task-revoked', + terminated=terminated, signum=signum, expired=expired) + if self.store_errors: + self.task.backend.mark_as_revoked(self.id, reason) + self.acknowledge() + self._already_revoked = True + send_revoked(self.task, terminated=terminated, + signum=signum, expired=expired) + + def revoked(self): + """If revoked, skip task and mark state.""" + expired = False + if self._already_revoked: + return True + if self.expires: + expired = self.maybe_expire() + if self.id in revoked_tasks: + warn('Skipping revoked task: %s[%s]', self.name, self.id) + self._announce_revoked( + 'expired' if expired else 'revoked', False, None, expired, + ) + return True + return False + + def send_event(self, type, **fields): + if self.eventer and self.eventer.enabled: + self.eventer.send(type, uuid=self.id, **fields) + + def on_accepted(self, pid, time_accepted): + """Handler called when task is accepted by worker pool.""" + self.worker_pid = pid + self.time_start = time_accepted + task_accepted(self) + if not self.task.acks_late: + self.acknowledge() + self.send_event('task-started', pid=pid) + if _does_debug: + debug('Task accepted: %s[%s] pid:%r', self.name, self.id, pid) + if self._terminate_on_ack is not None: + self.terminate(*self._terminate_on_ack) + + def on_timeout(self, soft, timeout): + """Handler called if the task times out.""" + task_ready(self) + if soft: + warn('Soft time limit (%ss) exceeded for %s[%s]', + timeout, self.name, self.id) + exc = SoftTimeLimitExceeded(timeout) + else: + error('Hard time limit (%ss) exceeded for %s[%s]', + timeout, self.name, self.id) + exc = TimeLimitExceeded(timeout) + + if self.store_errors: + self.task.backend.mark_as_failure(self.id, exc) + + def on_success(self, ret_value, now=None): + """Handler called if the task was successfully processed.""" + if isinstance(ret_value, ExceptionInfo): + if isinstance(ret_value.exception, ( + SystemExit, KeyboardInterrupt)): + raise ret_value.exception + return self.on_failure(ret_value) + task_ready(self) + + if self.task.acks_late: + self.acknowledge() + + if self.eventer and self.eventer.enabled: + now = time.time() + runtime = self.time_start and (time.time() - self.time_start) or 0 + self.send_event('task-succeeded', + result=safe_repr(ret_value), runtime=runtime) + + if _does_info: + now = now or time.time() + runtime = self.time_start and (time.time() - self.time_start) or 0 + info(self.success_msg.strip(), { + 'id': self.id, 'name': self.name, + 'return_value': self.repr_result(ret_value), + 'runtime': runtime}) + + def on_retry(self, exc_info): + """Handler called if the task should be retried.""" + if self.task.acks_late: + self.acknowledge() + + self.send_event('task-retried', + exception=safe_repr(exc_info.exception.exc), + traceback=safe_str(exc_info.traceback)) + + if _does_info: + info(self.retry_msg.strip(), { + 'id': self.id, 'name': self.name, + 'exc': exc_info.exception}) + + def on_failure(self, exc_info): + """Handler called if the task raised an exception.""" + task_ready(self) + + if not exc_info.internal: + exc = exc_info.exception + + if isinstance(exc, RetryTaskError): + return self.on_retry(exc_info) + + # These are special cases where the process would not have had + # time to write the result. + if self.store_errors: + if isinstance(exc, WorkerLostError): + self.task.backend.mark_as_failure(self.id, exc) + elif isinstance(exc, Terminated): + self._announce_revoked('terminated', True, str(exc), False) + # (acks_late) acknowledge after result stored. + if self.task.acks_late: + self.acknowledge() + self._log_error(exc_info) + + def _log_error(self, einfo): + einfo.exception = get_pickled_exception(einfo.exception) + exception, traceback, exc_info, internal, sargs, skwargs = ( + safe_repr(einfo.exception), + safe_str(einfo.traceback), + einfo.exc_info, + einfo.internal, + safe_repr(self.args), + safe_repr(self.kwargs), + ) + format = self.error_msg + description = 'raised exception' + severity = logging.ERROR + self.send_event( + 'task-failed', exception=exception, traceback=traceback, + ) + + if internal: + if isinstance(einfo.exception, Ignore): + format = self.ignored_msg + description = 'ignored' + severity = logging.INFO + exc_info = None + self.acknowledge() + else: + format = self.internal_error_msg + description = 'INTERNAL ERROR' + severity = logging.CRITICAL + + context = { + 'hostname': self.hostname, + 'id': self.id, + 'name': self.name, + 'exc': exception, + 'traceback': traceback, + 'args': sargs, + 'kwargs': skwargs, + 'description': description, + } + + logger.log(severity, format.strip(), context, + exc_info=exc_info, + extra={'data': {'id': self.id, + 'name': self.name, + 'args': sargs, + 'kwargs': skwargs, + 'hostname': self.hostname, + 'internal': internal}}) + + self.task.send_error_email(context, einfo.exception) + + def acknowledge(self): + """Acknowledge task.""" + if not self.acknowledged: + self.on_ack(logger, self.connection_errors) + self.acknowledged = True + + def repr_result(self, result, maxlen=46): + # 46 is the length needed to fit + # 'the quick brown fox jumps over the lazy dog' :) + return truncate(safe_repr(result), maxlen) + + def info(self, safe=False): + return {'id': self.id, + 'name': self.name, + 'args': self.args if safe else safe_repr(self.args), + 'kwargs': self.kwargs if safe else safe_repr(self.kwargs), + 'hostname': self.hostname, + 'time_start': self.time_start, + 'acknowledged': self.acknowledged, + 'delivery_info': self.delivery_info, + 'worker_pid': self.worker_pid} + + def __str__(self): + return '%s[%s]%s%s' % ( + self.name, self.id, + ' eta:[%s]' % (self.eta, ) if self.eta else '', + ' expires:[%s]' % (self.expires, ) if self.expires else '') + shortinfo = __str__ + + def __repr__(self): + return '<%s %s: %s>' % ( + type(self).__name__, self.id, + reprcall(self.name, self.args, self.kwargs), + ) + + @property + def tzlocal(self): + if self._tzlocal is None: + self._tzlocal = self.app.conf.CELERY_TIMEZONE + return self._tzlocal + + @property + def store_errors(self): + return (not self.task.ignore_result + or self.task.store_errors_even_if_ignored) + + def _compat_get_task_id(self): + return self.id + + def _compat_set_task_id(self, value): + self.id = value + task_id = property(_compat_get_task_id, _compat_set_task_id) + + def _compat_get_task_name(self): + return self.name + + def _compat_set_task_name(self, value): + self.name = value + task_name = property(_compat_get_task_name, _compat_set_task_name) + + +class TaskRequest(Request): + + def __init__(self, name, id, args=(), kwargs={}, + eta=None, expires=None, **options): + """Compatibility class.""" + + super(TaskRequest, self).__init__({ + 'task': name, 'id': id, 'args': args, + 'kwargs': kwargs, 'eta': eta, + 'expires': expires}, **options) diff --git a/awx/lib/site-packages/celery/worker/mediator.py b/awx/lib/site-packages/celery/worker/mediator.py new file mode 100644 index 0000000000..b467b71c74 --- /dev/null +++ b/awx/lib/site-packages/celery/worker/mediator.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +""" + celery.worker.mediator + ~~~~~~~~~~~~~~~~~~~~~~ + + The mediator is an internal thread that moves tasks + from an internal :class:`Queue` to the worker pool. + + This is only used if rate limits are enabled, as it moves + messages from the rate limited queue (which holds tasks + that are allowed to be processed) to the pool. Disabling + rate limits will also disable this machinery, + and can improve performance. + +""" +from __future__ import absolute_import + +import logging + +from Queue import Empty + +from celery.app import app_or_default +from celery.utils.threads import bgThread +from celery.utils.log import get_logger + +from .bootsteps import StartStopComponent + +logger = get_logger(__name__) + + +class WorkerComponent(StartStopComponent): + name = 'worker.mediator' + requires = ('pool', 'queues', ) + + def __init__(self, w, **kwargs): + w.mediator = None + + def include_if(self, w): + return w.start_mediator and not w.use_eventloop + + def create(self, w): + m = w.mediator = self.instantiate(w.mediator_cls, w.ready_queue, + app=w.app, callback=w.process_task) + return m + + +class Mediator(bgThread): + """Mediator thread.""" + + #: The task queue, a :class:`~Queue.Queue` instance. + ready_queue = None + + #: Callback called when a task is obtained. + callback = None + + def __init__(self, ready_queue, callback, app=None, **kw): + self.app = app_or_default(app) + self.ready_queue = ready_queue + self.callback = callback + self._does_debug = logger.isEnabledFor(logging.DEBUG) + super(Mediator, self).__init__() + + def body(self): + try: + task = self.ready_queue.get(timeout=1.0) + except Empty: + return + + if self._does_debug: + logger.debug('Mediator: Running callback for task: %s[%s]', + task.name, task.id) + + try: + self.callback(task) + except Exception, exc: + logger.error('Mediator callback raised exception %r', + exc, exc_info=True, + extra={'data': {'id': task.id, + 'name': task.name, + 'hostname': task.hostname}}) diff --git a/awx/lib/site-packages/celery/worker/state.py b/awx/lib/site-packages/celery/worker/state.py new file mode 100644 index 0000000000..10fe53b303 --- /dev/null +++ b/awx/lib/site-packages/celery/worker/state.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- +""" + celery.worker.state + ~~~~~~~~~~~~~~~~~~~ + + Internal worker state (global) + + This includes the currently active and reserved tasks, + statistics, and revoked tasks. + +""" +from __future__ import absolute_import + +import os +import sys +import platform +import shelve + +from collections import defaultdict + +from kombu.serialization import pickle_protocol +from kombu.utils import cached_property + +from celery import __version__ +from celery.datastructures import LimitedSet + +#: Worker software/platform information. +SOFTWARE_INFO = {'sw_ident': 'py-celery', + 'sw_ver': __version__, + 'sw_sys': platform.system()} + +#: maximum number of revokes to keep in memory. +REVOKES_MAX = 10000 + +#: how many seconds a revoke will be active before +#: being expired when the max limit has been exceeded. +REVOKE_EXPIRES = 3600 + +#: set of all reserved :class:`~celery.worker.job.Request`'s. +reserved_requests = set() + +#: set of currently active :class:`~celery.worker.job.Request`'s. +active_requests = set() + +#: count of tasks executed by the worker, sorted by type. +total_count = defaultdict(int) + +#: the list of currently revoked tasks. Persistent if statedb set. +revoked = LimitedSet(maxlen=REVOKES_MAX, expires=REVOKE_EXPIRES) + +#: Updates global state when a task has been reserved. +task_reserved = reserved_requests.add + +should_stop = False +should_terminate = False + + +def task_accepted(request): + """Updates global state when a task has been accepted.""" + active_requests.add(request) + total_count[request.name] += 1 + + +def task_ready(request): + """Updates global state when a task is ready.""" + active_requests.discard(request) + reserved_requests.discard(request) + + +C_BENCH = os.environ.get('C_BENCH') or os.environ.get('CELERY_BENCH') +C_BENCH_EVERY = int(os.environ.get('C_BENCH_EVERY') or + os.environ.get('CELERY_BENCH_EVERY') or 1000) +if C_BENCH: # pragma: no cover + import atexit + + from time import time + from billiard import current_process + from celery.utils.debug import memdump, sample_mem + + all_count = 0 + bench_first = None + bench_start = None + bench_last = None + bench_every = C_BENCH_EVERY + bench_sample = [] + __reserved = task_reserved + __ready = task_ready + + if current_process()._name == 'MainProcess': + @atexit.register + def on_shutdown(): + if bench_first is not None and bench_last is not None: + print('- Time spent in benchmark: %r' % ( + bench_last - bench_first)) + print('- Avg: %s' % (sum(bench_sample) / len(bench_sample))) + memdump() + + def task_reserved(request): # noqa + global bench_start + global bench_first + now = None + if bench_start is None: + bench_start = now = time() + if bench_first is None: + bench_first = now + + return __reserved(request) + + def task_ready(request): # noqa + global all_count + global bench_start + global bench_last + all_count += 1 + if not all_count % bench_every: + now = time() + diff = now - bench_start + print('- Time spent processing %s tasks (since first ' + 'task received): ~%.4fs\n' % (bench_every, diff)) + sys.stdout.flush() + bench_start = bench_last = now + bench_sample.append(diff) + sample_mem() + return __ready(request) + + +class Persistent(object): + """This is the persistent data stored by the worker when + :option:`--statedb` is enabled. + + It currently only stores revoked task id's. + + """ + storage = shelve + protocol = pickle_protocol + _is_open = False + + def __init__(self, filename): + self.filename = filename + self._load() + + def save(self): + self.sync(self.db) + self.db.sync() + self.close() + + def merge(self, d): + saved = d.get('revoked') or LimitedSet() + if isinstance(saved, LimitedSet): + revoked.update(saved) + else: + # (pre 3.0.18) used to be stored as dict + for item in saved: + revoked.add(item) + return d + + def sync(self, d): + revoked.purge() + d['revoked'] = revoked + return d + + def open(self): + return self.storage.open( + self.filename, protocol=self.protocol, writeback=True, + ) + + def close(self): + if self._is_open: + self.db.close() + self._is_open = False + + def _load(self): + self.merge(self.db) + + @cached_property + def db(self): + self._is_open = True + return self.open() diff --git a/awx/lib/site-packages/celery/worker/strategy.py b/awx/lib/site-packages/celery/worker/strategy.py new file mode 100644 index 0000000000..4e4b7cefd3 --- /dev/null +++ b/awx/lib/site-packages/celery/worker/strategy.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +""" + celery.worker.strategy + ~~~~~~~~~~~~~~~~~~~~~~ + + Task execution strategy (optimization). + +""" +from __future__ import absolute_import + +from .job import Request + + +def default(task, app, consumer): + hostname = consumer.hostname + eventer = consumer.event_dispatcher + Req = Request + handle = consumer.on_task + connection_errors = consumer.connection_errors + + def task_message_handler(message, body, ack): + handle(Req(body, on_ack=ack, app=app, hostname=hostname, + eventer=eventer, task=task, + connection_errors=connection_errors, + delivery_info=message.delivery_info)) + return task_message_handler diff --git a/awx/lib/site-packages/dateutil/__init__.py b/awx/lib/site-packages/dateutil/__init__.py new file mode 100644 index 0000000000..0f91a31f6e --- /dev/null +++ b/awx/lib/site-packages/dateutil/__init__.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +""" +Copyright (c) 2003-2010 Gustavo Niemeyer + +This module offers extensions to the standard Python +datetime module. +""" +__author__ = "Tomi Pieviläinen " +__license__ = "Simplified BSD" +__version__ = "2.1" diff --git a/awx/lib/site-packages/dateutil/easter.py b/awx/lib/site-packages/dateutil/easter.py new file mode 100644 index 0000000000..d8a38844f9 --- /dev/null +++ b/awx/lib/site-packages/dateutil/easter.py @@ -0,0 +1,91 @@ +""" +Copyright (c) 2003-2007 Gustavo Niemeyer + +This module offers extensions to the standard Python +datetime module. +""" +__license__ = "Simplified BSD" + +import datetime + +__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"] + +EASTER_JULIAN = 1 +EASTER_ORTHODOX = 2 +EASTER_WESTERN = 3 + +def easter(year, method=EASTER_WESTERN): + """ + This method was ported from the work done by GM Arts, + on top of the algorithm by Claus Tondering, which was + based in part on the algorithm of Ouding (1940), as + quoted in "Explanatory Supplement to the Astronomical + Almanac", P. Kenneth Seidelmann, editor. + + This algorithm implements three different easter + calculation methods: + + 1 - Original calculation in Julian calendar, valid in + dates after 326 AD + 2 - Original method, with date converted to Gregorian + calendar, valid in years 1583 to 4099 + 3 - Revised method, in Gregorian calendar, valid in + years 1583 to 4099 as well + + These methods are represented by the constants: + + EASTER_JULIAN = 1 + EASTER_ORTHODOX = 2 + EASTER_WESTERN = 3 + + The default method is method 3. + + More about the algorithm may be found at: + + http://users.chariot.net.au/~gmarts/eastalg.htm + + and + + http://www.tondering.dk/claus/calendar.html + + """ + + if not (1 <= method <= 3): + raise ValueError("invalid method") + + # g - Golden year - 1 + # c - Century + # h - (23 - Epact) mod 30 + # i - Number of days from March 21 to Paschal Full Moon + # j - Weekday for PFM (0=Sunday, etc) + # p - Number of days from March 21 to Sunday on or before PFM + # (-6 to 28 methods 1 & 3, to 56 for method 2) + # e - Extra days to add for method 2 (converting Julian + # date to Gregorian date) + + y = year + g = y % 19 + e = 0 + if method < 3: + # Old method + i = (19*g+15)%30 + j = (y+y//4+i)%7 + if method == 2: + # Extra dates to convert Julian to Gregorian date + e = 10 + if y > 1600: + e = e+y//100-16-(y//100-16)//4 + else: + # New method + c = y//100 + h = (c-c//4-(8*c+13)//25+19*g+15)%30 + i = h-(h//28)*(1-(h//28)*(29//(h+1))*((21-g)//11)) + j = (y+y//4+i+2-c+c//4)%7 + + # p can be from -6 to 56 corresponding to dates 22 March to 23 May + # (later dates apply to method 2, although 23 May never actually occurs) + p = i-j+e + d = 1+(p+27+(p+6)//40)%31 + m = 3+(p+26)//30 + return datetime.date(int(y), int(m), int(d)) + diff --git a/awx/lib/site-packages/dateutil/parser.py b/awx/lib/site-packages/dateutil/parser.py new file mode 100644 index 0000000000..a2604a35ba --- /dev/null +++ b/awx/lib/site-packages/dateutil/parser.py @@ -0,0 +1,909 @@ +# -*- coding:iso-8859-1 -*- +""" +Copyright (c) 2003-2007 Gustavo Niemeyer + +This module offers extensions to the standard Python +datetime module. +""" +from __future__ import unicode_literals +__license__ = "Simplified BSD" + + +import datetime +import string +import time +import sys +import os +import collections + +try: + from io import StringIO +except ImportError: + from io import StringIO + +from six import text_type, binary_type, integer_types + +from . import relativedelta +from . import tz + + +__all__ = ["parse", "parserinfo"] + + +# Some pointers: +# +# http://www.cl.cam.ac.uk/~mgk25/iso-time.html +# http://www.iso.ch/iso/en/prods-services/popstds/datesandtime.html +# http://www.w3.org/TR/NOTE-datetime +# http://ringmaster.arc.nasa.gov/tools/time_formats.html +# http://search.cpan.org/author/MUIR/Time-modules-2003.0211/lib/Time/ParseDate.pm +# http://stein.cshl.org/jade/distrib/docs/java.text.SimpleDateFormat.html + + +class _timelex(object): + + def __init__(self, instream): + if isinstance(instream, text_type): + instream = StringIO(instream) + self.instream = instream + self.wordchars = ('abcdfeghijklmnopqrstuvwxyz' + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_' + '' + '') + self.numchars = '0123456789' + self.whitespace = ' \t\r\n' + self.charstack = [] + self.tokenstack = [] + self.eof = False + + def get_token(self): + if self.tokenstack: + return self.tokenstack.pop(0) + seenletters = False + token = None + state = None + wordchars = self.wordchars + numchars = self.numchars + whitespace = self.whitespace + while not self.eof: + if self.charstack: + nextchar = self.charstack.pop(0) + else: + nextchar = self.instream.read(1) + while nextchar == '\x00': + nextchar = self.instream.read(1) + if not nextchar: + self.eof = True + break + elif not state: + token = nextchar + if nextchar in wordchars: + state = 'a' + elif nextchar in numchars: + state = '0' + elif nextchar in whitespace: + token = ' ' + break # emit token + else: + break # emit token + elif state == 'a': + seenletters = True + if nextchar in wordchars: + token += nextchar + elif nextchar == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0': + if nextchar in numchars: + token += nextchar + elif nextchar == '.': + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == 'a.': + seenletters = True + if nextchar == '.' or nextchar in wordchars: + token += nextchar + elif nextchar in numchars and token[-1] == '.': + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0.': + if nextchar == '.' or nextchar in numchars: + token += nextchar + elif nextchar in wordchars and token[-1] == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + if (state in ('a.', '0.') and + (seenletters or token.count('.') > 1 or token[-1] == '.')): + l = token.split('.') + token = l[0] + for tok in l[1:]: + self.tokenstack.append('.') + if tok: + self.tokenstack.append(tok) + return token + + def __iter__(self): + return self + + def __next__(self): + token = self.get_token() + if token is None: + raise StopIteration + return token + + def next(self): + return self.__next__() # Python 2.x support + + def split(cls, s): + return list(cls(s)) + split = classmethod(split) + + +class _resultbase(object): + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def _repr(self, classname): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (classname, ", ".join(l)) + + def __repr__(self): + return self._repr(self.__class__.__name__) + + +class parserinfo(object): + + # m from a.m/p.m, t from ISO T separator + JUMP = [" ", ".", ",", ";", "-", "/", "'", + "at", "on", "and", "ad", "m", "t", "of", + "st", "nd", "rd", "th"] + + WEEKDAYS = [("Mon", "Monday"), + ("Tue", "Tuesday"), + ("Wed", "Wednesday"), + ("Thu", "Thursday"), + ("Fri", "Friday"), + ("Sat", "Saturday"), + ("Sun", "Sunday")] + MONTHS = [("Jan", "January"), + ("Feb", "February"), + ("Mar", "March"), + ("Apr", "April"), + ("May", "May"), + ("Jun", "June"), + ("Jul", "July"), + ("Aug", "August"), + ("Sep", "Sept", "September"), + ("Oct", "October"), + ("Nov", "November"), + ("Dec", "December")] + HMS = [("h", "hour", "hours"), + ("m", "minute", "minutes"), + ("s", "second", "seconds")] + AMPM = [("am", "a"), + ("pm", "p")] + UTCZONE = ["UTC", "GMT", "Z"] + PERTAIN = ["of"] + TZOFFSET = {} + + def __init__(self, dayfirst=False, yearfirst=False): + self._jump = self._convert(self.JUMP) + self._weekdays = self._convert(self.WEEKDAYS) + self._months = self._convert(self.MONTHS) + self._hms = self._convert(self.HMS) + self._ampm = self._convert(self.AMPM) + self._utczone = self._convert(self.UTCZONE) + self._pertain = self._convert(self.PERTAIN) + + self.dayfirst = dayfirst + self.yearfirst = yearfirst + + self._year = time.localtime().tm_year + self._century = self._year//100*100 + + def _convert(self, lst): + dct = {} + for i in range(len(lst)): + v = lst[i] + if isinstance(v, tuple): + for v in v: + dct[v.lower()] = i + else: + dct[v.lower()] = i + return dct + + def jump(self, name): + return name.lower() in self._jump + + def weekday(self, name): + if len(name) >= 3: + try: + return self._weekdays[name.lower()] + except KeyError: + pass + return None + + def month(self, name): + if len(name) >= 3: + try: + return self._months[name.lower()]+1 + except KeyError: + pass + return None + + def hms(self, name): + try: + return self._hms[name.lower()] + except KeyError: + return None + + def ampm(self, name): + try: + return self._ampm[name.lower()] + except KeyError: + return None + + def pertain(self, name): + return name.lower() in self._pertain + + def utczone(self, name): + return name.lower() in self._utczone + + def tzoffset(self, name): + if name in self._utczone: + return 0 + return self.TZOFFSET.get(name) + + def convertyear(self, year): + if year < 100: + year += self._century + if abs(year-self._year) >= 50: + if year < self._year: + year += 100 + else: + year -= 100 + return year + + def validate(self, res): + # move to info + if res.year is not None: + res.year = self.convertyear(res.year) + if res.tzoffset == 0 and not res.tzname or res.tzname == 'Z': + res.tzname = "UTC" + res.tzoffset = 0 + elif res.tzoffset != 0 and res.tzname and self.utczone(res.tzname): + res.tzoffset = 0 + return True + + +class parser(object): + + def __init__(self, info=None): + self.info = info or parserinfo() + + def parse(self, timestr, default=None, + ignoretz=False, tzinfos=None, + **kwargs): + if not default: + default = datetime.datetime.now().replace(hour=0, minute=0, + second=0, microsecond=0) + res = self._parse(timestr, **kwargs) + if res is None: + raise ValueError("unknown string format") + repl = {} + for attr in ["year", "month", "day", "hour", + "minute", "second", "microsecond"]: + value = getattr(res, attr) + if value is not None: + repl[attr] = value + ret = default.replace(**repl) + if res.weekday is not None and not res.day: + ret = ret+relativedelta.relativedelta(weekday=res.weekday) + if not ignoretz: + if isinstance(tzinfos, collections.Callable) or tzinfos and res.tzname in tzinfos: + if isinstance(tzinfos, collections.Callable): + tzdata = tzinfos(res.tzname, res.tzoffset) + else: + tzdata = tzinfos.get(res.tzname) + if isinstance(tzdata, datetime.tzinfo): + tzinfo = tzdata + elif isinstance(tzdata, text_type): + tzinfo = tz.tzstr(tzdata) + elif isinstance(tzdata, integer_types): + tzinfo = tz.tzoffset(res.tzname, tzdata) + else: + raise ValueError("offset must be tzinfo subclass, " \ + "tz string, or int offset") + ret = ret.replace(tzinfo=tzinfo) + elif res.tzname and res.tzname in time.tzname: + ret = ret.replace(tzinfo=tz.tzlocal()) + elif res.tzoffset == 0: + ret = ret.replace(tzinfo=tz.tzutc()) + elif res.tzoffset: + ret = ret.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) + return ret + + class _result(_resultbase): + __slots__ = ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond", + "tzname", "tzoffset"] + + def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False): + info = self.info + if dayfirst is None: + dayfirst = info.dayfirst + if yearfirst is None: + yearfirst = info.yearfirst + res = self._result() + l = _timelex.split(timestr) + try: + + # year/month/day list + ymd = [] + + # Index of the month string in ymd + mstridx = -1 + + len_l = len(l) + i = 0 + while i < len_l: + + # Check if it's a number + try: + value_repr = l[i] + value = float(value_repr) + except ValueError: + value = None + + if value is not None: + # Token is a number + len_li = len(l[i]) + i += 1 + if (len(ymd) == 3 and len_li in (2, 4) + and (i >= len_l or (l[i] != ':' and + info.hms(l[i]) is None))): + # 19990101T23[59] + s = l[i-1] + res.hour = int(s[:2]) + if len_li == 4: + res.minute = int(s[2:]) + elif len_li == 6 or (len_li > 6 and l[i-1].find('.') == 6): + # YYMMDD or HHMMSS[.ss] + s = l[i-1] + if not ymd and l[i-1].find('.') == -1: + ymd.append(info.convertyear(int(s[:2]))) + ymd.append(int(s[2:4])) + ymd.append(int(s[4:])) + else: + # 19990101T235959[.59] + res.hour = int(s[:2]) + res.minute = int(s[2:4]) + res.second, res.microsecond = _parsems(s[4:]) + elif len_li == 8: + # YYYYMMDD + s = l[i-1] + ymd.append(int(s[:4])) + ymd.append(int(s[4:6])) + ymd.append(int(s[6:])) + elif len_li in (12, 14): + # YYYYMMDDhhmm[ss] + s = l[i-1] + ymd.append(int(s[:4])) + ymd.append(int(s[4:6])) + ymd.append(int(s[6:8])) + res.hour = int(s[8:10]) + res.minute = int(s[10:12]) + if len_li == 14: + res.second = int(s[12:]) + elif ((i < len_l and info.hms(l[i]) is not None) or + (i+1 < len_l and l[i] == ' ' and + info.hms(l[i+1]) is not None)): + # HH[ ]h or MM[ ]m or SS[.ss][ ]s + if l[i] == ' ': + i += 1 + idx = info.hms(l[i]) + while True: + if idx == 0: + res.hour = int(value) + if value%1: + res.minute = int(60*(value%1)) + elif idx == 1: + res.minute = int(value) + if value%1: + res.second = int(60*(value%1)) + elif idx == 2: + res.second, res.microsecond = \ + _parsems(value_repr) + i += 1 + if i >= len_l or idx == 2: + break + # 12h00 + try: + value_repr = l[i] + value = float(value_repr) + except ValueError: + break + else: + i += 1 + idx += 1 + if i < len_l: + newidx = info.hms(l[i]) + if newidx is not None: + idx = newidx + elif i == len_l and l[i-2] == ' ' and info.hms(l[i-3]) is not None: + # X h MM or X m SS + idx = info.hms(l[i-3]) + 1 + if idx == 1: + res.minute = int(value) + if value%1: + res.second = int(60*(value%1)) + elif idx == 2: + res.second, res.microsecond = \ + _parsems(value_repr) + i += 1 + elif i+1 < len_l and l[i] == ':': + # HH:MM[:SS[.ss]] + res.hour = int(value) + i += 1 + value = float(l[i]) + res.minute = int(value) + if value%1: + res.second = int(60*(value%1)) + i += 1 + if i < len_l and l[i] == ':': + res.second, res.microsecond = _parsems(l[i+1]) + i += 2 + elif i < len_l and l[i] in ('-', '/', '.'): + sep = l[i] + ymd.append(int(value)) + i += 1 + if i < len_l and not info.jump(l[i]): + try: + # 01-01[-01] + ymd.append(int(l[i])) + except ValueError: + # 01-Jan[-01] + value = info.month(l[i]) + if value is not None: + ymd.append(value) + assert mstridx == -1 + mstridx = len(ymd)-1 + else: + return None + i += 1 + if i < len_l and l[i] == sep: + # We have three members + i += 1 + value = info.month(l[i]) + if value is not None: + ymd.append(value) + mstridx = len(ymd)-1 + assert mstridx == -1 + else: + ymd.append(int(l[i])) + i += 1 + elif i >= len_l or info.jump(l[i]): + if i+1 < len_l and info.ampm(l[i+1]) is not None: + # 12 am + res.hour = int(value) + if res.hour < 12 and info.ampm(l[i+1]) == 1: + res.hour += 12 + elif res.hour == 12 and info.ampm(l[i+1]) == 0: + res.hour = 0 + i += 1 + else: + # Year, month or day + ymd.append(int(value)) + i += 1 + elif info.ampm(l[i]) is not None: + # 12am + res.hour = int(value) + if res.hour < 12 and info.ampm(l[i]) == 1: + res.hour += 12 + elif res.hour == 12 and info.ampm(l[i]) == 0: + res.hour = 0 + i += 1 + elif not fuzzy: + return None + else: + i += 1 + continue + + # Check weekday + value = info.weekday(l[i]) + if value is not None: + res.weekday = value + i += 1 + continue + + # Check month name + value = info.month(l[i]) + if value is not None: + ymd.append(value) + assert mstridx == -1 + mstridx = len(ymd)-1 + i += 1 + if i < len_l: + if l[i] in ('-', '/'): + # Jan-01[-99] + sep = l[i] + i += 1 + ymd.append(int(l[i])) + i += 1 + if i < len_l and l[i] == sep: + # Jan-01-99 + i += 1 + ymd.append(int(l[i])) + i += 1 + elif (i+3 < len_l and l[i] == l[i+2] == ' ' + and info.pertain(l[i+1])): + # Jan of 01 + # In this case, 01 is clearly year + try: + value = int(l[i+3]) + except ValueError: + # Wrong guess + pass + else: + # Convert it here to become unambiguous + ymd.append(info.convertyear(value)) + i += 4 + continue + + # Check am/pm + value = info.ampm(l[i]) + if value is not None: + if value == 1 and res.hour < 12: + res.hour += 12 + elif value == 0 and res.hour == 12: + res.hour = 0 + i += 1 + continue + + # Check for a timezone name + if (res.hour is not None and len(l[i]) <= 5 and + res.tzname is None and res.tzoffset is None and + not [x for x in l[i] if x not in string.ascii_uppercase]): + res.tzname = l[i] + res.tzoffset = info.tzoffset(res.tzname) + i += 1 + + # Check for something like GMT+3, or BRST+3. Notice + # that it doesn't mean "I am 3 hours after GMT", but + # "my time +3 is GMT". If found, we reverse the + # logic so that timezone parsing code will get it + # right. + if i < len_l and l[i] in ('+', '-'): + l[i] = ('+', '-')[l[i] == '+'] + res.tzoffset = None + if info.utczone(res.tzname): + # With something like GMT+3, the timezone + # is *not* GMT. + res.tzname = None + + continue + + # Check for a numbered timezone + if res.hour is not None and l[i] in ('+', '-'): + signal = (-1, 1)[l[i] == '+'] + i += 1 + len_li = len(l[i]) + if len_li == 4: + # -0300 + res.tzoffset = int(l[i][:2])*3600+int(l[i][2:])*60 + elif i+1 < len_l and l[i+1] == ':': + # -03:00 + res.tzoffset = int(l[i])*3600+int(l[i+2])*60 + i += 2 + elif len_li <= 2: + # -[0]3 + res.tzoffset = int(l[i][:2])*3600 + else: + return None + i += 1 + res.tzoffset *= signal + + # Look for a timezone name between parenthesis + if (i+3 < len_l and + info.jump(l[i]) and l[i+1] == '(' and l[i+3] == ')' and + 3 <= len(l[i+2]) <= 5 and + not [x for x in l[i+2] + if x not in string.ascii_uppercase]): + # -0300 (BRST) + res.tzname = l[i+2] + i += 4 + continue + + # Check jumps + if not (info.jump(l[i]) or fuzzy): + return None + + i += 1 + + # Process year/month/day + len_ymd = len(ymd) + if len_ymd > 3: + # More than three members!? + return None + elif len_ymd == 1 or (mstridx != -1 and len_ymd == 2): + # One member, or two members with a month string + if mstridx != -1: + res.month = ymd[mstridx] + del ymd[mstridx] + if len_ymd > 1 or mstridx == -1: + if ymd[0] > 31: + res.year = ymd[0] + else: + res.day = ymd[0] + elif len_ymd == 2: + # Two members with numbers + if ymd[0] > 31: + # 99-01 + res.year, res.month = ymd + elif ymd[1] > 31: + # 01-99 + res.month, res.year = ymd + elif dayfirst and ymd[1] <= 12: + # 13-01 + res.day, res.month = ymd + else: + # 01-13 + res.month, res.day = ymd + if len_ymd == 3: + # Three members + if mstridx == 0: + res.month, res.day, res.year = ymd + elif mstridx == 1: + if ymd[0] > 31 or (yearfirst and ymd[2] <= 31): + # 99-Jan-01 + res.year, res.month, res.day = ymd + else: + # 01-Jan-01 + # Give precendence to day-first, since + # two-digit years is usually hand-written. + res.day, res.month, res.year = ymd + elif mstridx == 2: + # WTF!? + if ymd[1] > 31: + # 01-99-Jan + res.day, res.year, res.month = ymd + else: + # 99-01-Jan + res.year, res.day, res.month = ymd + else: + if ymd[0] > 31 or \ + (yearfirst and ymd[1] <= 12 and ymd[2] <= 31): + # 99-01-01 + res.year, res.month, res.day = ymd + elif ymd[0] > 12 or (dayfirst and ymd[1] <= 12): + # 13-01-01 + res.day, res.month, res.year = ymd + else: + # 01-13-01 + res.month, res.day, res.year = ymd + + except (IndexError, ValueError, AssertionError): + return None + + if not info.validate(res): + return None + return res + +DEFAULTPARSER = parser() +def parse(timestr, parserinfo=None, **kwargs): + # Python 2.x support: datetimes return their string presentation as + # bytes in 2.x and unicode in 3.x, so it's reasonable to expect that + # the parser will get both kinds. Internally we use unicode only. + if isinstance(timestr, binary_type): + timestr = timestr.decode() + if parserinfo: + return parser(parserinfo).parse(timestr, **kwargs) + else: + return DEFAULTPARSER.parse(timestr, **kwargs) + + +class _tzparser(object): + + class _result(_resultbase): + + __slots__ = ["stdabbr", "stdoffset", "dstabbr", "dstoffset", + "start", "end"] + + class _attr(_resultbase): + __slots__ = ["month", "week", "weekday", + "yday", "jyday", "day", "time"] + + def __repr__(self): + return self._repr("") + + def __init__(self): + _resultbase.__init__(self) + self.start = self._attr() + self.end = self._attr() + + def parse(self, tzstr): + res = self._result() + l = _timelex.split(tzstr) + try: + + len_l = len(l) + + i = 0 + while i < len_l: + # BRST+3[BRDT[+2]] + j = i + while j < len_l and not [x for x in l[j] + if x in "0123456789:,-+"]: + j += 1 + if j != i: + if not res.stdabbr: + offattr = "stdoffset" + res.stdabbr = "".join(l[i:j]) + else: + offattr = "dstoffset" + res.dstabbr = "".join(l[i:j]) + i = j + if (i < len_l and + (l[i] in ('+', '-') or l[i][0] in "0123456789")): + if l[i] in ('+', '-'): + # Yes, that's right. See the TZ variable + # documentation. + signal = (1, -1)[l[i] == '+'] + i += 1 + else: + signal = -1 + len_li = len(l[i]) + if len_li == 4: + # -0300 + setattr(res, offattr, + (int(l[i][:2])*3600+int(l[i][2:])*60)*signal) + elif i+1 < len_l and l[i+1] == ':': + # -03:00 + setattr(res, offattr, + (int(l[i])*3600+int(l[i+2])*60)*signal) + i += 2 + elif len_li <= 2: + # -[0]3 + setattr(res, offattr, + int(l[i][:2])*3600*signal) + else: + return None + i += 1 + if res.dstabbr: + break + else: + break + + if i < len_l: + for j in range(i, len_l): + if l[j] == ';': l[j] = ',' + + assert l[i] == ',' + + i += 1 + + if i >= len_l: + pass + elif (8 <= l.count(',') <= 9 and + not [y for x in l[i:] if x != ',' + for y in x if y not in "0123456789"]): + # GMT0BST,3,0,30,3600,10,0,26,7200[,3600] + for x in (res.start, res.end): + x.month = int(l[i]) + i += 2 + if l[i] == '-': + value = int(l[i+1])*-1 + i += 1 + else: + value = int(l[i]) + i += 2 + if value: + x.week = value + x.weekday = (int(l[i])-1)%7 + else: + x.day = int(l[i]) + i += 2 + x.time = int(l[i]) + i += 2 + if i < len_l: + if l[i] in ('-', '+'): + signal = (-1, 1)[l[i] == "+"] + i += 1 + else: + signal = 1 + res.dstoffset = (res.stdoffset+int(l[i]))*signal + elif (l.count(',') == 2 and l[i:].count('/') <= 2 and + not [y for x in l[i:] if x not in (',', '/', 'J', 'M', + '.', '-', ':') + for y in x if y not in "0123456789"]): + for x in (res.start, res.end): + if l[i] == 'J': + # non-leap year day (1 based) + i += 1 + x.jyday = int(l[i]) + elif l[i] == 'M': + # month[-.]week[-.]weekday + i += 1 + x.month = int(l[i]) + i += 1 + assert l[i] in ('-', '.') + i += 1 + x.week = int(l[i]) + if x.week == 5: + x.week = -1 + i += 1 + assert l[i] in ('-', '.') + i += 1 + x.weekday = (int(l[i])-1)%7 + else: + # year day (zero based) + x.yday = int(l[i])+1 + + i += 1 + + if i < len_l and l[i] == '/': + i += 1 + # start time + len_li = len(l[i]) + if len_li == 4: + # -0300 + x.time = (int(l[i][:2])*3600+int(l[i][2:])*60) + elif i+1 < len_l and l[i+1] == ':': + # -03:00 + x.time = int(l[i])*3600+int(l[i+2])*60 + i += 2 + if i+1 < len_l and l[i+1] == ':': + i += 2 + x.time += int(l[i]) + elif len_li <= 2: + # -[0]3 + x.time = (int(l[i][:2])*3600) + else: + return None + i += 1 + + assert i == len_l or l[i] == ',' + + i += 1 + + assert i >= len_l + + except (IndexError, ValueError, AssertionError): + return None + + return res + + +DEFAULTTZPARSER = _tzparser() +def _parsetz(tzstr): + return DEFAULTTZPARSER.parse(tzstr) + + +def _parsems(value): + """Parse a I[.F] seconds value into (seconds, microseconds).""" + if "." not in value: + return int(value), 0 + else: + i, f = value.split(".") + return int(i), int(f.ljust(6, "0")[:6]) + + +# vim:ts=4:sw=4:et diff --git a/awx/lib/site-packages/dateutil/relativedelta.py b/awx/lib/site-packages/dateutil/relativedelta.py new file mode 100644 index 0000000000..4393bcbcde --- /dev/null +++ b/awx/lib/site-packages/dateutil/relativedelta.py @@ -0,0 +1,436 @@ +""" +Copyright (c) 2003-2010 Gustavo Niemeyer + +This module offers extensions to the standard Python +datetime module. +""" +__license__ = "Simplified BSD" + +import datetime +import calendar + +from six import integer_types + +__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + +class weekday(object): + __slots__ = ["weekday", "n"] + + def __init__(self, weekday, n=None): + self.weekday = weekday + self.n = n + + def __call__(self, n): + if n == self.n: + return self + else: + return self.__class__(self.weekday, n) + + def __eq__(self, other): + try: + if self.weekday != other.weekday or self.n != other.n: + return False + except AttributeError: + return False + return True + + def __repr__(self): + s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday] + if not self.n: + return s + else: + return "%s(%+d)" % (s, self.n) + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple([weekday(x) for x in range(7)]) + +class relativedelta(object): + """ +The relativedelta type is based on the specification of the excelent +work done by M.-A. Lemburg in his mx.DateTime extension. However, +notice that this type does *NOT* implement the same algorithm as +his work. Do *NOT* expect it to behave like mx.DateTime's counterpart. + +There's two different ways to build a relativedelta instance. The +first one is passing it two date/datetime classes: + + relativedelta(datetime1, datetime2) + +And the other way is to use the following keyword arguments: + + year, month, day, hour, minute, second, microsecond: + Absolute information. + + years, months, weeks, days, hours, minutes, seconds, microseconds: + Relative information, may be negative. + + weekday: + One of the weekday instances (MO, TU, etc). These instances may + receive a parameter N, specifying the Nth weekday, which could + be positive or negative (like MO(+1) or MO(-2). Not specifying + it is the same as specifying +1. You can also use an integer, + where 0=MO. + + leapdays: + Will add given days to the date found, if year is a leap + year, and the date found is post 28 of february. + + yearday, nlyearday: + Set the yearday or the non-leap year day (jump leap days). + These are converted to day/month/leapdays information. + +Here is the behavior of operations with relativedelta: + +1) Calculate the absolute year, using the 'year' argument, or the + original datetime year, if the argument is not present. + +2) Add the relative 'years' argument to the absolute year. + +3) Do steps 1 and 2 for month/months. + +4) Calculate the absolute day, using the 'day' argument, or the + original datetime day, if the argument is not present. Then, + subtract from the day until it fits in the year and month + found after their operations. + +5) Add the relative 'days' argument to the absolute day. Notice + that the 'weeks' argument is multiplied by 7 and added to + 'days'. + +6) Do steps 1 and 2 for hour/hours, minute/minutes, second/seconds, + microsecond/microseconds. + +7) If the 'weekday' argument is present, calculate the weekday, + with the given (wday, nth) tuple. wday is the index of the + weekday (0-6, 0=Mon), and nth is the number of weeks to add + forward or backward, depending on its signal. Notice that if + the calculated date is already Monday, for example, using + (0, 1) or (0, -1) won't change the day. + """ + + def __init__(self, dt1=None, dt2=None, + years=0, months=0, days=0, leapdays=0, weeks=0, + hours=0, minutes=0, seconds=0, microseconds=0, + year=None, month=None, day=None, weekday=None, + yearday=None, nlyearday=None, + hour=None, minute=None, second=None, microsecond=None): + if dt1 and dt2: + if (not isinstance(dt1, datetime.date)) or (not isinstance(dt2, datetime.date)): + raise TypeError("relativedelta only diffs datetime/date") + if not type(dt1) == type(dt2): #isinstance(dt1, type(dt2)): + if not isinstance(dt1, datetime.datetime): + dt1 = datetime.datetime.fromordinal(dt1.toordinal()) + elif not isinstance(dt2, datetime.datetime): + dt2 = datetime.datetime.fromordinal(dt2.toordinal()) + self.years = 0 + self.months = 0 + self.days = 0 + self.leapdays = 0 + self.hours = 0 + self.minutes = 0 + self.seconds = 0 + self.microseconds = 0 + self.year = None + self.month = None + self.day = None + self.weekday = None + self.hour = None + self.minute = None + self.second = None + self.microsecond = None + self._has_time = 0 + + months = (dt1.year*12+dt1.month)-(dt2.year*12+dt2.month) + self._set_months(months) + dtm = self.__radd__(dt2) + if dt1 < dt2: + while dt1 > dtm: + months += 1 + self._set_months(months) + dtm = self.__radd__(dt2) + else: + while dt1 < dtm: + months -= 1 + self._set_months(months) + dtm = self.__radd__(dt2) + delta = dt1 - dtm + self.seconds = delta.seconds+delta.days*86400 + self.microseconds = delta.microseconds + else: + self.years = years + self.months = months + self.days = days+weeks*7 + self.leapdays = leapdays + self.hours = hours + self.minutes = minutes + self.seconds = seconds + self.microseconds = microseconds + self.year = year + self.month = month + self.day = day + self.hour = hour + self.minute = minute + self.second = second + self.microsecond = microsecond + + if isinstance(weekday, integer_types): + self.weekday = weekdays[weekday] + else: + self.weekday = weekday + + yday = 0 + if nlyearday: + yday = nlyearday + elif yearday: + yday = yearday + if yearday > 59: + self.leapdays = -1 + if yday: + ydayidx = [31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 366] + for idx, ydays in enumerate(ydayidx): + if yday <= ydays: + self.month = idx+1 + if idx == 0: + self.day = yday + else: + self.day = yday-ydayidx[idx-1] + break + else: + raise ValueError("invalid year day (%d)" % yday) + + self._fix() + + def _fix(self): + if abs(self.microseconds) > 999999: + s = self.microseconds//abs(self.microseconds) + div, mod = divmod(self.microseconds*s, 1000000) + self.microseconds = mod*s + self.seconds += div*s + if abs(self.seconds) > 59: + s = self.seconds//abs(self.seconds) + div, mod = divmod(self.seconds*s, 60) + self.seconds = mod*s + self.minutes += div*s + if abs(self.minutes) > 59: + s = self.minutes//abs(self.minutes) + div, mod = divmod(self.minutes*s, 60) + self.minutes = mod*s + self.hours += div*s + if abs(self.hours) > 23: + s = self.hours//abs(self.hours) + div, mod = divmod(self.hours*s, 24) + self.hours = mod*s + self.days += div*s + if abs(self.months) > 11: + s = self.months//abs(self.months) + div, mod = divmod(self.months*s, 12) + self.months = mod*s + self.years += div*s + if (self.hours or self.minutes or self.seconds or self.microseconds or + self.hour is not None or self.minute is not None or + self.second is not None or self.microsecond is not None): + self._has_time = 1 + else: + self._has_time = 0 + + def _set_months(self, months): + self.months = months + if abs(self.months) > 11: + s = self.months//abs(self.months) + div, mod = divmod(self.months*s, 12) + self.months = mod*s + self.years = div*s + else: + self.years = 0 + + def __add__(self, other): + if isinstance(other, relativedelta): + return relativedelta(years=other.years+self.years, + months=other.months+self.months, + days=other.days+self.days, + hours=other.hours+self.hours, + minutes=other.minutes+self.minutes, + seconds=other.seconds+self.seconds, + microseconds=other.microseconds+self.microseconds, + leapdays=other.leapdays or self.leapdays, + year=other.year or self.year, + month=other.month or self.month, + day=other.day or self.day, + weekday=other.weekday or self.weekday, + hour=other.hour or self.hour, + minute=other.minute or self.minute, + second=other.second or self.second, + microsecond=other.microsecond or self.microsecond) + if not isinstance(other, datetime.date): + raise TypeError("unsupported type for add operation") + elif self._has_time and not isinstance(other, datetime.datetime): + other = datetime.datetime.fromordinal(other.toordinal()) + year = (self.year or other.year)+self.years + month = self.month or other.month + if self.months: + assert 1 <= abs(self.months) <= 12 + month += self.months + if month > 12: + year += 1 + month -= 12 + elif month < 1: + year -= 1 + month += 12 + day = min(calendar.monthrange(year, month)[1], + self.day or other.day) + repl = {"year": year, "month": month, "day": day} + for attr in ["hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + repl[attr] = value + days = self.days + if self.leapdays and month > 2 and calendar.isleap(year): + days += self.leapdays + ret = (other.replace(**repl) + + datetime.timedelta(days=days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds, + microseconds=self.microseconds)) + if self.weekday: + weekday, nth = self.weekday.weekday, self.weekday.n or 1 + jumpdays = (abs(nth)-1)*7 + if nth > 0: + jumpdays += (7-ret.weekday()+weekday)%7 + else: + jumpdays += (ret.weekday()-weekday)%7 + jumpdays *= -1 + ret += datetime.timedelta(days=jumpdays) + return ret + + def __radd__(self, other): + return self.__add__(other) + + def __rsub__(self, other): + return self.__neg__().__radd__(other) + + def __sub__(self, other): + if not isinstance(other, relativedelta): + raise TypeError("unsupported type for sub operation") + return relativedelta(years=self.years-other.years, + months=self.months-other.months, + days=self.days-other.days, + hours=self.hours-other.hours, + minutes=self.minutes-other.minutes, + seconds=self.seconds-other.seconds, + microseconds=self.microseconds-other.microseconds, + leapdays=self.leapdays or other.leapdays, + year=self.year or other.year, + month=self.month or other.month, + day=self.day or other.day, + weekday=self.weekday or other.weekday, + hour=self.hour or other.hour, + minute=self.minute or other.minute, + second=self.second or other.second, + microsecond=self.microsecond or other.microsecond) + + def __neg__(self): + return relativedelta(years=-self.years, + months=-self.months, + days=-self.days, + hours=-self.hours, + minutes=-self.minutes, + seconds=-self.seconds, + microseconds=-self.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __bool__(self): + return not (not self.years and + not self.months and + not self.days and + not self.hours and + not self.minutes and + not self.seconds and + not self.microseconds and + not self.leapdays and + self.year is None and + self.month is None and + self.day is None and + self.weekday is None and + self.hour is None and + self.minute is None and + self.second is None and + self.microsecond is None) + + def __mul__(self, other): + f = float(other) + return relativedelta(years=int(self.years*f), + months=int(self.months*f), + days=int(self.days*f), + hours=int(self.hours*f), + minutes=int(self.minutes*f), + seconds=int(self.seconds*f), + microseconds=int(self.microseconds*f), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + __rmul__ = __mul__ + + def __eq__(self, other): + if not isinstance(other, relativedelta): + return False + if self.weekday or other.weekday: + if not self.weekday or not other.weekday: + return False + if self.weekday.weekday != other.weekday.weekday: + return False + n1, n2 = self.weekday.n, other.weekday.n + if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)): + return False + return (self.years == other.years and + self.months == other.months and + self.days == other.days and + self.hours == other.hours and + self.minutes == other.minutes and + self.seconds == other.seconds and + self.leapdays == other.leapdays and + self.year == other.year and + self.month == other.month and + self.day == other.day and + self.hour == other.hour and + self.minute == other.minute and + self.second == other.second and + self.microsecond == other.microsecond) + + def __ne__(self, other): + return not self.__eq__(other) + + def __div__(self, other): + return self.__mul__(1/float(other)) + + __truediv__ = __div__ + + def __repr__(self): + l = [] + for attr in ["years", "months", "days", "leapdays", + "hours", "minutes", "seconds", "microseconds"]: + value = getattr(self, attr) + if value: + l.append("%s=%+d" % (attr, value)) + for attr in ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (self.__class__.__name__, ", ".join(l)) + +# vim:ts=4:sw=4:et diff --git a/awx/lib/site-packages/dateutil/rrule.py b/awx/lib/site-packages/dateutil/rrule.py new file mode 100644 index 0000000000..ad4d3ba70c --- /dev/null +++ b/awx/lib/site-packages/dateutil/rrule.py @@ -0,0 +1,1112 @@ +""" +Copyright (c) 2003-2010 Gustavo Niemeyer + +This module offers extensions to the standard Python +datetime module. +""" +__license__ = "Simplified BSD" + +import itertools +import datetime +import calendar +try: + import _thread +except ImportError: + import thread as _thread +import sys + +from six import advance_iterator, integer_types + +__all__ = ["rrule", "rruleset", "rrulestr", + "YEARLY", "MONTHLY", "WEEKLY", "DAILY", + "HOURLY", "MINUTELY", "SECONDLY", + "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + +# Every mask is 7 days longer to handle cross-year weekly periods. +M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30+ + [7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7) +M365MASK = list(M366MASK) +M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32)) +MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +MDAY365MASK = list(MDAY366MASK) +M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0)) +NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +NMDAY365MASK = list(NMDAY366MASK) +M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366) +M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365) +WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55 +del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31] +MDAY365MASK = tuple(MDAY365MASK) +M365MASK = tuple(M365MASK) + +(YEARLY, + MONTHLY, + WEEKLY, + DAILY, + HOURLY, + MINUTELY, + SECONDLY) = list(range(7)) + +# Imported on demand. +easter = None +parser = None + +class weekday(object): + __slots__ = ["weekday", "n"] + + def __init__(self, weekday, n=None): + if n == 0: + raise ValueError("Can't create weekday with n == 0") + self.weekday = weekday + self.n = n + + def __call__(self, n): + if n == self.n: + return self + else: + return self.__class__(self.weekday, n) + + def __eq__(self, other): + try: + if self.weekday != other.weekday or self.n != other.n: + return False + except AttributeError: + return False + return True + + def __repr__(self): + s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday] + if not self.n: + return s + else: + return "%s(%+d)" % (s, self.n) + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple([weekday(x) for x in range(7)]) + +class rrulebase(object): + def __init__(self, cache=False): + if cache: + self._cache = [] + self._cache_lock = _thread.allocate_lock() + self._cache_gen = self._iter() + self._cache_complete = False + else: + self._cache = None + self._cache_complete = False + self._len = None + + def __iter__(self): + if self._cache_complete: + return iter(self._cache) + elif self._cache is None: + return self._iter() + else: + return self._iter_cached() + + def _iter_cached(self): + i = 0 + gen = self._cache_gen + cache = self._cache + acquire = self._cache_lock.acquire + release = self._cache_lock.release + while gen: + if i == len(cache): + acquire() + if self._cache_complete: + break + try: + for j in range(10): + cache.append(advance_iterator(gen)) + except StopIteration: + self._cache_gen = gen = None + self._cache_complete = True + break + release() + yield cache[i] + i += 1 + while i < self._len: + yield cache[i] + i += 1 + + def __getitem__(self, item): + if self._cache_complete: + return self._cache[item] + elif isinstance(item, slice): + if item.step and item.step < 0: + return list(iter(self))[item] + else: + return list(itertools.islice(self, + item.start or 0, + item.stop or sys.maxsize, + item.step or 1)) + elif item >= 0: + gen = iter(self) + try: + for i in range(item+1): + res = advance_iterator(gen) + except StopIteration: + raise IndexError + return res + else: + return list(iter(self))[item] + + def __contains__(self, item): + if self._cache_complete: + return item in self._cache + else: + for i in self: + if i == item: + return True + elif i > item: + return False + return False + + # __len__() introduces a large performance penality. + def count(self): + if self._len is None: + for x in self: pass + return self._len + + def before(self, dt, inc=False): + if self._cache_complete: + gen = self._cache + else: + gen = self + last = None + if inc: + for i in gen: + if i > dt: + break + last = i + else: + for i in gen: + if i >= dt: + break + last = i + return last + + def after(self, dt, inc=False): + if self._cache_complete: + gen = self._cache + else: + gen = self + if inc: + for i in gen: + if i >= dt: + return i + else: + for i in gen: + if i > dt: + return i + return None + + def between(self, after, before, inc=False): + if self._cache_complete: + gen = self._cache + else: + gen = self + started = False + l = [] + if inc: + for i in gen: + if i > before: + break + elif not started: + if i >= after: + started = True + l.append(i) + else: + l.append(i) + else: + for i in gen: + if i >= before: + break + elif not started: + if i > after: + started = True + l.append(i) + else: + l.append(i) + return l + +class rrule(rrulebase): + def __init__(self, freq, dtstart=None, + interval=1, wkst=None, count=None, until=None, bysetpos=None, + bymonth=None, bymonthday=None, byyearday=None, byeaster=None, + byweekno=None, byweekday=None, + byhour=None, byminute=None, bysecond=None, + cache=False): + super(rrule, self).__init__(cache) + global easter + if not dtstart: + dtstart = datetime.datetime.now().replace(microsecond=0) + elif not isinstance(dtstart, datetime.datetime): + dtstart = datetime.datetime.fromordinal(dtstart.toordinal()) + else: + dtstart = dtstart.replace(microsecond=0) + self._dtstart = dtstart + self._tzinfo = dtstart.tzinfo + self._freq = freq + self._interval = interval + self._count = count + if until and not isinstance(until, datetime.datetime): + until = datetime.datetime.fromordinal(until.toordinal()) + self._until = until + if wkst is None: + self._wkst = calendar.firstweekday() + elif isinstance(wkst, integer_types): + self._wkst = wkst + else: + self._wkst = wkst.weekday + if bysetpos is None: + self._bysetpos = None + elif isinstance(bysetpos, integer_types): + if bysetpos == 0 or not (-366 <= bysetpos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + self._bysetpos = (bysetpos,) + else: + self._bysetpos = tuple(bysetpos) + for pos in self._bysetpos: + if pos == 0 or not (-366 <= pos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + if not (byweekno or byyearday or bymonthday or + byweekday is not None or byeaster is not None): + if freq == YEARLY: + if not bymonth: + bymonth = dtstart.month + bymonthday = dtstart.day + elif freq == MONTHLY: + bymonthday = dtstart.day + elif freq == WEEKLY: + byweekday = dtstart.weekday() + # bymonth + if not bymonth: + self._bymonth = None + elif isinstance(bymonth, integer_types): + self._bymonth = (bymonth,) + else: + self._bymonth = tuple(bymonth) + # byyearday + if not byyearday: + self._byyearday = None + elif isinstance(byyearday, integer_types): + self._byyearday = (byyearday,) + else: + self._byyearday = tuple(byyearday) + # byeaster + if byeaster is not None: + if not easter: + from dateutil import easter + if isinstance(byeaster, integer_types): + self._byeaster = (byeaster,) + else: + self._byeaster = tuple(byeaster) + else: + self._byeaster = None + # bymonthay + if not bymonthday: + self._bymonthday = () + self._bynmonthday = () + elif isinstance(bymonthday, integer_types): + if bymonthday < 0: + self._bynmonthday = (bymonthday,) + self._bymonthday = () + else: + self._bymonthday = (bymonthday,) + self._bynmonthday = () + else: + self._bymonthday = tuple([x for x in bymonthday if x > 0]) + self._bynmonthday = tuple([x for x in bymonthday if x < 0]) + # byweekno + if byweekno is None: + self._byweekno = None + elif isinstance(byweekno, integer_types): + self._byweekno = (byweekno,) + else: + self._byweekno = tuple(byweekno) + # byweekday / bynweekday + if byweekday is None: + self._byweekday = None + self._bynweekday = None + elif isinstance(byweekday, integer_types): + self._byweekday = (byweekday,) + self._bynweekday = None + elif hasattr(byweekday, "n"): + if not byweekday.n or freq > MONTHLY: + self._byweekday = (byweekday.weekday,) + self._bynweekday = None + else: + self._bynweekday = ((byweekday.weekday, byweekday.n),) + self._byweekday = None + else: + self._byweekday = [] + self._bynweekday = [] + for wday in byweekday: + if isinstance(wday, integer_types): + self._byweekday.append(wday) + elif not wday.n or freq > MONTHLY: + self._byweekday.append(wday.weekday) + else: + self._bynweekday.append((wday.weekday, wday.n)) + self._byweekday = tuple(self._byweekday) + self._bynweekday = tuple(self._bynweekday) + if not self._byweekday: + self._byweekday = None + elif not self._bynweekday: + self._bynweekday = None + # byhour + if byhour is None: + if freq < HOURLY: + self._byhour = (dtstart.hour,) + else: + self._byhour = None + elif isinstance(byhour, integer_types): + self._byhour = (byhour,) + else: + self._byhour = tuple(byhour) + # byminute + if byminute is None: + if freq < MINUTELY: + self._byminute = (dtstart.minute,) + else: + self._byminute = None + elif isinstance(byminute, integer_types): + self._byminute = (byminute,) + else: + self._byminute = tuple(byminute) + # bysecond + if bysecond is None: + if freq < SECONDLY: + self._bysecond = (dtstart.second,) + else: + self._bysecond = None + elif isinstance(bysecond, integer_types): + self._bysecond = (bysecond,) + else: + self._bysecond = tuple(bysecond) + + if self._freq >= HOURLY: + self._timeset = None + else: + self._timeset = [] + for hour in self._byhour: + for minute in self._byminute: + for second in self._bysecond: + self._timeset.append( + datetime.time(hour, minute, second, + tzinfo=self._tzinfo)) + self._timeset.sort() + self._timeset = tuple(self._timeset) + + def _iter(self): + year, month, day, hour, minute, second, weekday, yearday, _ = \ + self._dtstart.timetuple() + + # Some local variables to speed things up a bit + freq = self._freq + interval = self._interval + wkst = self._wkst + until = self._until + bymonth = self._bymonth + byweekno = self._byweekno + byyearday = self._byyearday + byweekday = self._byweekday + byeaster = self._byeaster + bymonthday = self._bymonthday + bynmonthday = self._bynmonthday + bysetpos = self._bysetpos + byhour = self._byhour + byminute = self._byminute + bysecond = self._bysecond + + ii = _iterinfo(self) + ii.rebuild(year, month) + + getdayset = {YEARLY:ii.ydayset, + MONTHLY:ii.mdayset, + WEEKLY:ii.wdayset, + DAILY:ii.ddayset, + HOURLY:ii.ddayset, + MINUTELY:ii.ddayset, + SECONDLY:ii.ddayset}[freq] + + if freq < HOURLY: + timeset = self._timeset + else: + gettimeset = {HOURLY:ii.htimeset, + MINUTELY:ii.mtimeset, + SECONDLY:ii.stimeset}[freq] + if ((freq >= HOURLY and + self._byhour and hour not in self._byhour) or + (freq >= MINUTELY and + self._byminute and minute not in self._byminute) or + (freq >= SECONDLY and + self._bysecond and second not in self._bysecond)): + timeset = () + else: + timeset = gettimeset(hour, minute, second) + + total = 0 + count = self._count + while True: + # Get dayset with the right frequency + dayset, start, end = getdayset(year, month, day) + + # Do the "hard" work ;-) + filtered = False + for i in dayset[start:end]: + if ((bymonth and ii.mmask[i] not in bymonth) or + (byweekno and not ii.wnomask[i]) or + (byweekday and ii.wdaymask[i] not in byweekday) or + (ii.nwdaymask and not ii.nwdaymask[i]) or + (byeaster and not ii.eastermask[i]) or + ((bymonthday or bynmonthday) and + ii.mdaymask[i] not in bymonthday and + ii.nmdaymask[i] not in bynmonthday) or + (byyearday and + ((i < ii.yearlen and i+1 not in byyearday + and -ii.yearlen+i not in byyearday) or + (i >= ii.yearlen and i+1-ii.yearlen not in byyearday + and -ii.nextyearlen+i-ii.yearlen + not in byyearday)))): + dayset[i] = None + filtered = True + + # Output results + if bysetpos and timeset: + poslist = [] + for pos in bysetpos: + if pos < 0: + daypos, timepos = divmod(pos, len(timeset)) + else: + daypos, timepos = divmod(pos-1, len(timeset)) + try: + i = [x for x in dayset[start:end] + if x is not None][daypos] + time = timeset[timepos] + except IndexError: + pass + else: + date = datetime.date.fromordinal(ii.yearordinal+i) + res = datetime.datetime.combine(date, time) + if res not in poslist: + poslist.append(res) + poslist.sort() + for res in poslist: + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + total += 1 + yield res + if count: + count -= 1 + if not count: + self._len = total + return + else: + for i in dayset[start:end]: + if i is not None: + date = datetime.date.fromordinal(ii.yearordinal+i) + for time in timeset: + res = datetime.datetime.combine(date, time) + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + total += 1 + yield res + if count: + count -= 1 + if not count: + self._len = total + return + + # Handle frequency and interval + fixday = False + if freq == YEARLY: + year += interval + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == MONTHLY: + month += interval + if month > 12: + div, mod = divmod(month, 12) + month = mod + year += div + if month == 0: + month = 12 + year -= 1 + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == WEEKLY: + if wkst > weekday: + day += -(weekday+1+(6-wkst))+self._interval*7 + else: + day += -(weekday-wkst)+self._interval*7 + weekday = wkst + fixday = True + elif freq == DAILY: + day += interval + fixday = True + elif freq == HOURLY: + if filtered: + # Jump to one iteration before next day + hour += ((23-hour)//interval)*interval + while True: + hour += interval + div, mod = divmod(hour, 24) + if div: + hour = mod + day += div + fixday = True + if not byhour or hour in byhour: + break + timeset = gettimeset(hour, minute, second) + elif freq == MINUTELY: + if filtered: + # Jump to one iteration before next day + minute += ((1439-(hour*60+minute))//interval)*interval + while True: + minute += interval + div, mod = divmod(minute, 60) + if div: + minute = mod + hour += div + div, mod = divmod(hour, 24) + if div: + hour = mod + day += div + fixday = True + filtered = False + if ((not byhour or hour in byhour) and + (not byminute or minute in byminute)): + break + timeset = gettimeset(hour, minute, second) + elif freq == SECONDLY: + if filtered: + # Jump to one iteration before next day + second += (((86399-(hour*3600+minute*60+second)) + //interval)*interval) + while True: + second += self._interval + div, mod = divmod(second, 60) + if div: + second = mod + minute += div + div, mod = divmod(minute, 60) + if div: + minute = mod + hour += div + div, mod = divmod(hour, 24) + if div: + hour = mod + day += div + fixday = True + if ((not byhour or hour in byhour) and + (not byminute or minute in byminute) and + (not bysecond or second in bysecond)): + break + timeset = gettimeset(hour, minute, second) + + if fixday and day > 28: + daysinmonth = calendar.monthrange(year, month)[1] + if day > daysinmonth: + while day > daysinmonth: + day -= daysinmonth + month += 1 + if month == 13: + month = 1 + year += 1 + if year > datetime.MAXYEAR: + self._len = total + return + daysinmonth = calendar.monthrange(year, month)[1] + ii.rebuild(year, month) + +class _iterinfo(object): + __slots__ = ["rrule", "lastyear", "lastmonth", + "yearlen", "nextyearlen", "yearordinal", "yearweekday", + "mmask", "mrange", "mdaymask", "nmdaymask", + "wdaymask", "wnomask", "nwdaymask", "eastermask"] + + def __init__(self, rrule): + for attr in self.__slots__: + setattr(self, attr, None) + self.rrule = rrule + + def rebuild(self, year, month): + # Every mask is 7 days longer to handle cross-year weekly periods. + rr = self.rrule + if year != self.lastyear: + self.yearlen = 365+calendar.isleap(year) + self.nextyearlen = 365+calendar.isleap(year+1) + firstyday = datetime.date(year, 1, 1) + self.yearordinal = firstyday.toordinal() + self.yearweekday = firstyday.weekday() + + wday = datetime.date(year, 1, 1).weekday() + if self.yearlen == 365: + self.mmask = M365MASK + self.mdaymask = MDAY365MASK + self.nmdaymask = NMDAY365MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M365RANGE + else: + self.mmask = M366MASK + self.mdaymask = MDAY366MASK + self.nmdaymask = NMDAY366MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M366RANGE + + if not rr._byweekno: + self.wnomask = None + else: + self.wnomask = [0]*(self.yearlen+7) + #no1wkst = firstwkst = self.wdaymask.index(rr._wkst) + no1wkst = firstwkst = (7-self.yearweekday+rr._wkst)%7 + if no1wkst >= 4: + no1wkst = 0 + # Number of days in the year, plus the days we got + # from last year. + wyearlen = self.yearlen+(self.yearweekday-rr._wkst)%7 + else: + # Number of days in the year, minus the days we + # left in last year. + wyearlen = self.yearlen-no1wkst + div, mod = divmod(wyearlen, 7) + numweeks = div+mod//4 + for n in rr._byweekno: + if n < 0: + n += numweeks+1 + if not (0 < n <= numweeks): + continue + if n > 1: + i = no1wkst+(n-1)*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + else: + i = no1wkst + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if 1 in rr._byweekno: + # Check week number 1 of next year as well + # TODO: Check -numweeks for next year. + i = no1wkst+numweeks*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + if i < self.yearlen: + # If week starts in next year, we + # don't care about it. + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if no1wkst: + # Check last week number of last year as + # well. If no1wkst is 0, either the year + # started on week start, or week number 1 + # got days from last year, so there are no + # days from last year's last week number in + # this year. + if -1 not in rr._byweekno: + lyearweekday = datetime.date(year-1, 1, 1).weekday() + lno1wkst = (7-lyearweekday+rr._wkst)%7 + lyearlen = 365+calendar.isleap(year-1) + if lno1wkst >= 4: + lno1wkst = 0 + lnumweeks = 52+(lyearlen+ + (lyearweekday-rr._wkst)%7)%7//4 + else: + lnumweeks = 52+(self.yearlen-no1wkst)%7//4 + else: + lnumweeks = -1 + if lnumweeks in rr._byweekno: + for i in range(no1wkst): + self.wnomask[i] = 1 + + if (rr._bynweekday and + (month != self.lastmonth or year != self.lastyear)): + ranges = [] + if rr._freq == YEARLY: + if rr._bymonth: + for month in rr._bymonth: + ranges.append(self.mrange[month-1:month+1]) + else: + ranges = [(0, self.yearlen)] + elif rr._freq == MONTHLY: + ranges = [self.mrange[month-1:month+1]] + if ranges: + # Weekly frequency won't get here, so we may not + # care about cross-year weekly periods. + self.nwdaymask = [0]*self.yearlen + for first, last in ranges: + last -= 1 + for wday, n in rr._bynweekday: + if n < 0: + i = last+(n+1)*7 + i -= (self.wdaymask[i]-wday)%7 + else: + i = first+(n-1)*7 + i += (7-self.wdaymask[i]+wday)%7 + if first <= i <= last: + self.nwdaymask[i] = 1 + + if rr._byeaster: + self.eastermask = [0]*(self.yearlen+7) + eyday = easter.easter(year).toordinal()-self.yearordinal + for offset in rr._byeaster: + self.eastermask[eyday+offset] = 1 + + self.lastyear = year + self.lastmonth = month + + def ydayset(self, year, month, day): + return list(range(self.yearlen)), 0, self.yearlen + + def mdayset(self, year, month, day): + set = [None]*self.yearlen + start, end = self.mrange[month-1:month+1] + for i in range(start, end): + set[i] = i + return set, start, end + + def wdayset(self, year, month, day): + # We need to handle cross-year weeks here. + set = [None]*(self.yearlen+7) + i = datetime.date(year, month, day).toordinal()-self.yearordinal + start = i + for j in range(7): + set[i] = i + i += 1 + #if (not (0 <= i < self.yearlen) or + # self.wdaymask[i] == self.rrule._wkst): + # This will cross the year boundary, if necessary. + if self.wdaymask[i] == self.rrule._wkst: + break + return set, start, i + + def ddayset(self, year, month, day): + set = [None]*self.yearlen + i = datetime.date(year, month, day).toordinal()-self.yearordinal + set[i] = i + return set, i, i+1 + + def htimeset(self, hour, minute, second): + set = [] + rr = self.rrule + for minute in rr._byminute: + for second in rr._bysecond: + set.append(datetime.time(hour, minute, second, + tzinfo=rr._tzinfo)) + set.sort() + return set + + def mtimeset(self, hour, minute, second): + set = [] + rr = self.rrule + for second in rr._bysecond: + set.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo)) + set.sort() + return set + + def stimeset(self, hour, minute, second): + return (datetime.time(hour, minute, second, + tzinfo=self.rrule._tzinfo),) + + +class rruleset(rrulebase): + + class _genitem(object): + def __init__(self, genlist, gen): + try: + self.dt = advance_iterator(gen) + genlist.append(self) + except StopIteration: + pass + self.genlist = genlist + self.gen = gen + + def __next__(self): + try: + self.dt = advance_iterator(self.gen) + except StopIteration: + self.genlist.remove(self) + + next = __next__ + + def __lt__(self, other): + return self.dt < other.dt + + def __gt__(self, other): + return self.dt > other.dt + + def __eq__(self, other): + return self.dt == other.dt + + def __ne__(self, other): + return self.dt != other.dt + + def __init__(self, cache=False): + super(rruleset, self).__init__(cache) + self._rrule = [] + self._rdate = [] + self._exrule = [] + self._exdate = [] + + def rrule(self, rrule): + self._rrule.append(rrule) + + def rdate(self, rdate): + self._rdate.append(rdate) + + def exrule(self, exrule): + self._exrule.append(exrule) + + def exdate(self, exdate): + self._exdate.append(exdate) + + def _iter(self): + rlist = [] + self._rdate.sort() + self._genitem(rlist, iter(self._rdate)) + for gen in [iter(x) for x in self._rrule]: + self._genitem(rlist, gen) + rlist.sort() + exlist = [] + self._exdate.sort() + self._genitem(exlist, iter(self._exdate)) + for gen in [iter(x) for x in self._exrule]: + self._genitem(exlist, gen) + exlist.sort() + lastdt = None + total = 0 + while rlist: + ritem = rlist[0] + if not lastdt or lastdt != ritem.dt: + while exlist and exlist[0] < ritem: + advance_iterator(exlist[0]) + exlist.sort() + if not exlist or ritem != exlist[0]: + total += 1 + yield ritem.dt + lastdt = ritem.dt + advance_iterator(ritem) + rlist.sort() + self._len = total + +class _rrulestr(object): + + _freq_map = {"YEARLY": YEARLY, + "MONTHLY": MONTHLY, + "WEEKLY": WEEKLY, + "DAILY": DAILY, + "HOURLY": HOURLY, + "MINUTELY": MINUTELY, + "SECONDLY": SECONDLY} + + _weekday_map = {"MO":0,"TU":1,"WE":2,"TH":3,"FR":4,"SA":5,"SU":6} + + def _handle_int(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = int(value) + + def _handle_int_list(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = [int(x) for x in value.split(',')] + + _handle_INTERVAL = _handle_int + _handle_COUNT = _handle_int + _handle_BYSETPOS = _handle_int_list + _handle_BYMONTH = _handle_int_list + _handle_BYMONTHDAY = _handle_int_list + _handle_BYYEARDAY = _handle_int_list + _handle_BYEASTER = _handle_int_list + _handle_BYWEEKNO = _handle_int_list + _handle_BYHOUR = _handle_int_list + _handle_BYMINUTE = _handle_int_list + _handle_BYSECOND = _handle_int_list + + def _handle_FREQ(self, rrkwargs, name, value, **kwargs): + rrkwargs["freq"] = self._freq_map[value] + + def _handle_UNTIL(self, rrkwargs, name, value, **kwargs): + global parser + if not parser: + from dateutil import parser + try: + rrkwargs["until"] = parser.parse(value, + ignoretz=kwargs.get("ignoretz"), + tzinfos=kwargs.get("tzinfos")) + except ValueError: + raise ValueError("invalid until date") + + def _handle_WKST(self, rrkwargs, name, value, **kwargs): + rrkwargs["wkst"] = self._weekday_map[value] + + def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwarsg): + l = [] + for wday in value.split(','): + for i in range(len(wday)): + if wday[i] not in '+-0123456789': + break + n = wday[:i] or None + w = wday[i:] + if n: n = int(n) + l.append(weekdays[self._weekday_map[w]](n)) + rrkwargs["byweekday"] = l + + _handle_BYDAY = _handle_BYWEEKDAY + + def _parse_rfc_rrule(self, line, + dtstart=None, + cache=False, + ignoretz=False, + tzinfos=None): + if line.find(':') != -1: + name, value = line.split(':') + if name != "RRULE": + raise ValueError("unknown parameter name") + else: + value = line + rrkwargs = {} + for pair in value.split(';'): + name, value = pair.split('=') + name = name.upper() + value = value.upper() + try: + getattr(self, "_handle_"+name)(rrkwargs, name, value, + ignoretz=ignoretz, + tzinfos=tzinfos) + except AttributeError: + raise ValueError("unknown parameter '%s'" % name) + except (KeyError, ValueError): + raise ValueError("invalid '%s': %s" % (name, value)) + return rrule(dtstart=dtstart, cache=cache, **rrkwargs) + + def _parse_rfc(self, s, + dtstart=None, + cache=False, + unfold=False, + forceset=False, + compatible=False, + ignoretz=False, + tzinfos=None): + global parser + if compatible: + forceset = True + unfold = True + s = s.upper() + if not s.strip(): + raise ValueError("empty string") + if unfold: + lines = s.splitlines() + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + else: + lines = s.split() + if (not forceset and len(lines) == 1 and + (s.find(':') == -1 or s.startswith('RRULE:'))): + return self._parse_rfc_rrule(lines[0], cache=cache, + dtstart=dtstart, ignoretz=ignoretz, + tzinfos=tzinfos) + else: + rrulevals = [] + rdatevals = [] + exrulevals = [] + exdatevals = [] + for line in lines: + if not line: + continue + if line.find(':') == -1: + name = "RRULE" + value = line + else: + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0] + parms = parms[1:] + if name == "RRULE": + for parm in parms: + raise ValueError("unsupported RRULE parm: "+parm) + rrulevals.append(value) + elif name == "RDATE": + for parm in parms: + if parm != "VALUE=DATE-TIME": + raise ValueError("unsupported RDATE parm: "+parm) + rdatevals.append(value) + elif name == "EXRULE": + for parm in parms: + raise ValueError("unsupported EXRULE parm: "+parm) + exrulevals.append(value) + elif name == "EXDATE": + for parm in parms: + if parm != "VALUE=DATE-TIME": + raise ValueError("unsupported RDATE parm: "+parm) + exdatevals.append(value) + elif name == "DTSTART": + for parm in parms: + raise ValueError("unsupported DTSTART parm: "+parm) + if not parser: + from dateutil import parser + dtstart = parser.parse(value, ignoretz=ignoretz, + tzinfos=tzinfos) + else: + raise ValueError("unsupported property: "+name) + if (forceset or len(rrulevals) > 1 or + rdatevals or exrulevals or exdatevals): + if not parser and (rdatevals or exdatevals): + from dateutil import parser + set = rruleset(cache=cache) + for value in rrulevals: + set.rrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in rdatevals: + for datestr in value.split(','): + set.rdate(parser.parse(datestr, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exrulevals: + set.exrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exdatevals: + for datestr in value.split(','): + set.exdate(parser.parse(datestr, + ignoretz=ignoretz, + tzinfos=tzinfos)) + if compatible and dtstart: + set.rdate(dtstart) + return set + else: + return self._parse_rfc_rrule(rrulevals[0], + dtstart=dtstart, + cache=cache, + ignoretz=ignoretz, + tzinfos=tzinfos) + + def __call__(self, s, **kwargs): + return self._parse_rfc(s, **kwargs) + +rrulestr = _rrulestr() + +# vim:ts=4:sw=4:et diff --git a/awx/lib/site-packages/dateutil/tz.py b/awx/lib/site-packages/dateutil/tz.py new file mode 100644 index 0000000000..e849fc24b5 --- /dev/null +++ b/awx/lib/site-packages/dateutil/tz.py @@ -0,0 +1,960 @@ +""" +Copyright (c) 2003-2007 Gustavo Niemeyer + +This module offers extensions to the standard Python +datetime module. +""" +__license__ = "Simplified BSD" + +from six import string_types, PY3 + +import datetime +import struct +import time +import sys +import os + +relativedelta = None +parser = None +rrule = None + +__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange", + "tzstr", "tzical", "tzwin", "tzwinlocal", "gettz"] + +try: + from dateutil.tzwin import tzwin, tzwinlocal +except (ImportError, OSError): + tzwin, tzwinlocal = None, None + +def tzname_in_python2(myfunc): + """Change unicode output into bytestrings in Python 2 + + tzname() API changed in Python 3. It used to return bytes, but was changed + to unicode strings + """ + def inner_func(*args, **kwargs): + if PY3: + return myfunc(*args, **kwargs) + else: + return myfunc(*args, **kwargs).encode() + return inner_func + +ZERO = datetime.timedelta(0) +EPOCHORDINAL = datetime.datetime.utcfromtimestamp(0).toordinal() + +class tzutc(datetime.tzinfo): + + def utcoffset(self, dt): + return ZERO + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return "UTC" + + def __eq__(self, other): + return (isinstance(other, tzutc) or + (isinstance(other, tzoffset) and other._offset == ZERO)) + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + +class tzoffset(datetime.tzinfo): + + def __init__(self, name, offset): + self._name = name + self._offset = datetime.timedelta(seconds=offset) + + def utcoffset(self, dt): + return self._offset + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._name + + def __eq__(self, other): + return (isinstance(other, tzoffset) and + self._offset == other._offset) + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + return "%s(%s, %s)" % (self.__class__.__name__, + repr(self._name), + self._offset.days*86400+self._offset.seconds) + + __reduce__ = object.__reduce__ + +class tzlocal(datetime.tzinfo): + + _std_offset = datetime.timedelta(seconds=-time.timezone) + if time.daylight: + _dst_offset = datetime.timedelta(seconds=-time.altzone) + else: + _dst_offset = _std_offset + + def utcoffset(self, dt): + if self._isdst(dt): + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + if self._isdst(dt): + return self._dst_offset-self._std_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return time.tzname[self._isdst(dt)] + + def _isdst(self, dt): + # We can't use mktime here. It is unstable when deciding if + # the hour near to a change is DST or not. + # + # timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour, + # dt.minute, dt.second, dt.weekday(), 0, -1)) + # return time.localtime(timestamp).tm_isdst + # + # The code above yields the following result: + # + #>>> import tz, datetime + #>>> t = tz.tzlocal() + #>>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + #'BRDT' + #>>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname() + #'BRST' + #>>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + #'BRST' + #>>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname() + #'BRDT' + #>>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + #'BRDT' + # + # Here is a more stable implementation: + # + timestamp = ((dt.toordinal() - EPOCHORDINAL) * 86400 + + dt.hour * 3600 + + dt.minute * 60 + + dt.second) + return time.localtime(timestamp+time.timezone).tm_isdst + + def __eq__(self, other): + if not isinstance(other, tzlocal): + return False + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset) + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + +class _ttinfo(object): + __slots__ = ["offset", "delta", "isdst", "abbr", "isstd", "isgmt"] + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def __repr__(self): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (self.__class__.__name__, ", ".join(l)) + + def __eq__(self, other): + if not isinstance(other, _ttinfo): + return False + return (self.offset == other.offset and + self.delta == other.delta and + self.isdst == other.isdst and + self.abbr == other.abbr and + self.isstd == other.isstd and + self.isgmt == other.isgmt) + + def __ne__(self, other): + return not self.__eq__(other) + + def __getstate__(self): + state = {} + for name in self.__slots__: + state[name] = getattr(self, name, None) + return state + + def __setstate__(self, state): + for name in self.__slots__: + if name in state: + setattr(self, name, state[name]) + +class tzfile(datetime.tzinfo): + + # http://www.twinsun.com/tz/tz-link.htm + # ftp://ftp.iana.org/tz/tz*.tar.gz + + def __init__(self, fileobj): + if isinstance(fileobj, string_types): + self._filename = fileobj + fileobj = open(fileobj, 'rb') + elif hasattr(fileobj, "name"): + self._filename = fileobj.name + else: + self._filename = repr(fileobj) + + # From tzfile(5): + # + # The time zone information files used by tzset(3) + # begin with the magic characters "TZif" to identify + # them as time zone information files, followed by + # sixteen bytes reserved for future use, followed by + # six four-byte values of type long, written in a + # ``standard'' byte order (the high-order byte + # of the value is written first). + + if fileobj.read(4).decode() != "TZif": + raise ValueError("magic not found") + + fileobj.read(16) + + ( + # The number of UTC/local indicators stored in the file. + ttisgmtcnt, + + # The number of standard/wall indicators stored in the file. + ttisstdcnt, + + # The number of leap seconds for which data is + # stored in the file. + leapcnt, + + # The number of "transition times" for which data + # is stored in the file. + timecnt, + + # The number of "local time types" for which data + # is stored in the file (must not be zero). + typecnt, + + # The number of characters of "time zone + # abbreviation strings" stored in the file. + charcnt, + + ) = struct.unpack(">6l", fileobj.read(24)) + + # The above header is followed by tzh_timecnt four-byte + # values of type long, sorted in ascending order. + # These values are written in ``standard'' byte order. + # Each is used as a transition time (as returned by + # time(2)) at which the rules for computing local time + # change. + + if timecnt: + self._trans_list = struct.unpack(">%dl" % timecnt, + fileobj.read(timecnt*4)) + else: + self._trans_list = [] + + # Next come tzh_timecnt one-byte values of type unsigned + # char; each one tells which of the different types of + # ``local time'' types described in the file is associated + # with the same-indexed transition time. These values + # serve as indices into an array of ttinfo structures that + # appears next in the file. + + if timecnt: + self._trans_idx = struct.unpack(">%dB" % timecnt, + fileobj.read(timecnt)) + else: + self._trans_idx = [] + + # Each ttinfo structure is written as a four-byte value + # for tt_gmtoff of type long, in a standard byte + # order, followed by a one-byte value for tt_isdst + # and a one-byte value for tt_abbrind. In each + # structure, tt_gmtoff gives the number of + # seconds to be added to UTC, tt_isdst tells whether + # tm_isdst should be set by localtime(3), and + # tt_abbrind serves as an index into the array of + # time zone abbreviation characters that follow the + # ttinfo structure(s) in the file. + + ttinfo = [] + + for i in range(typecnt): + ttinfo.append(struct.unpack(">lbb", fileobj.read(6))) + + abbr = fileobj.read(charcnt).decode() + + # Then there are tzh_leapcnt pairs of four-byte + # values, written in standard byte order; the + # first value of each pair gives the time (as + # returned by time(2)) at which a leap second + # occurs; the second gives the total number of + # leap seconds to be applied after the given time. + # The pairs of values are sorted in ascending order + # by time. + + # Not used, for now + if leapcnt: + leap = struct.unpack(">%dl" % (leapcnt*2), + fileobj.read(leapcnt*8)) + + # Then there are tzh_ttisstdcnt standard/wall + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as standard + # time or wall clock time, and are used when + # a time zone file is used in handling POSIX-style + # time zone environment variables. + + if ttisstdcnt: + isstd = struct.unpack(">%db" % ttisstdcnt, + fileobj.read(ttisstdcnt)) + + # Finally, there are tzh_ttisgmtcnt UTC/local + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as UTC or + # local time, and are used when a time zone file + # is used in handling POSIX-style time zone envi- + # ronment variables. + + if ttisgmtcnt: + isgmt = struct.unpack(">%db" % ttisgmtcnt, + fileobj.read(ttisgmtcnt)) + + # ** Everything has been read ** + + # Build ttinfo list + self._ttinfo_list = [] + for i in range(typecnt): + gmtoff, isdst, abbrind = ttinfo[i] + # Round to full-minutes if that's not the case. Python's + # datetime doesn't accept sub-minute timezones. Check + # http://python.org/sf/1447945 for some information. + gmtoff = (gmtoff+30)//60*60 + tti = _ttinfo() + tti.offset = gmtoff + tti.delta = datetime.timedelta(seconds=gmtoff) + tti.isdst = isdst + tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)] + tti.isstd = (ttisstdcnt > i and isstd[i] != 0) + tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0) + self._ttinfo_list.append(tti) + + # Replace ttinfo indexes for ttinfo objects. + trans_idx = [] + for idx in self._trans_idx: + trans_idx.append(self._ttinfo_list[idx]) + self._trans_idx = tuple(trans_idx) + + # Set standard, dst, and before ttinfos. before will be + # used when a given time is before any transitions, + # and will be set to the first non-dst ttinfo, or to + # the first dst, if all of them are dst. + self._ttinfo_std = None + self._ttinfo_dst = None + self._ttinfo_before = None + if self._ttinfo_list: + if not self._trans_list: + self._ttinfo_std = self._ttinfo_first = self._ttinfo_list[0] + else: + for i in range(timecnt-1, -1, -1): + tti = self._trans_idx[i] + if not self._ttinfo_std and not tti.isdst: + self._ttinfo_std = tti + elif not self._ttinfo_dst and tti.isdst: + self._ttinfo_dst = tti + if self._ttinfo_std and self._ttinfo_dst: + break + else: + if self._ttinfo_dst and not self._ttinfo_std: + self._ttinfo_std = self._ttinfo_dst + + for tti in self._ttinfo_list: + if not tti.isdst: + self._ttinfo_before = tti + break + else: + self._ttinfo_before = self._ttinfo_list[0] + + # Now fix transition times to become relative to wall time. + # + # I'm not sure about this. In my tests, the tz source file + # is setup to wall time, and in the binary file isstd and + # isgmt are off, so it should be in wall time. OTOH, it's + # always in gmt time. Let me know if you have comments + # about this. + laststdoffset = 0 + self._trans_list = list(self._trans_list) + for i in range(len(self._trans_list)): + tti = self._trans_idx[i] + if not tti.isdst: + # This is std time. + self._trans_list[i] += tti.offset + laststdoffset = tti.offset + else: + # This is dst time. Convert to std. + self._trans_list[i] += laststdoffset + self._trans_list = tuple(self._trans_list) + + def _find_ttinfo(self, dt, laststd=0): + timestamp = ((dt.toordinal() - EPOCHORDINAL) * 86400 + + dt.hour * 3600 + + dt.minute * 60 + + dt.second) + idx = 0 + for trans in self._trans_list: + if timestamp < trans: + break + idx += 1 + else: + return self._ttinfo_std + if idx == 0: + return self._ttinfo_before + if laststd: + while idx > 0: + tti = self._trans_idx[idx-1] + if not tti.isdst: + return tti + idx -= 1 + else: + return self._ttinfo_std + else: + return self._trans_idx[idx-1] + + def utcoffset(self, dt): + if not self._ttinfo_std: + return ZERO + return self._find_ttinfo(dt).delta + + def dst(self, dt): + if not self._ttinfo_dst: + return ZERO + tti = self._find_ttinfo(dt) + if not tti.isdst: + return ZERO + + # The documentation says that utcoffset()-dst() must + # be constant for every dt. + return tti.delta-self._find_ttinfo(dt, laststd=1).delta + + # An alternative for that would be: + # + # return self._ttinfo_dst.offset-self._ttinfo_std.offset + # + # However, this class stores historical changes in the + # dst offset, so I belive that this wouldn't be the right + # way to implement this. + + @tzname_in_python2 + def tzname(self, dt): + if not self._ttinfo_std: + return None + return self._find_ttinfo(dt).abbr + + def __eq__(self, other): + if not isinstance(other, tzfile): + return False + return (self._trans_list == other._trans_list and + self._trans_idx == other._trans_idx and + self._ttinfo_list == other._ttinfo_list) + + def __ne__(self, other): + return not self.__eq__(other) + + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._filename)) + + def __reduce__(self): + if not os.path.isfile(self._filename): + raise ValueError("Unpickable %s class" % self.__class__.__name__) + return (self.__class__, (self._filename,)) + +class tzrange(datetime.tzinfo): + + def __init__(self, stdabbr, stdoffset=None, + dstabbr=None, dstoffset=None, + start=None, end=None): + global relativedelta + if not relativedelta: + from dateutil import relativedelta + self._std_abbr = stdabbr + self._dst_abbr = dstabbr + if stdoffset is not None: + self._std_offset = datetime.timedelta(seconds=stdoffset) + else: + self._std_offset = ZERO + if dstoffset is not None: + self._dst_offset = datetime.timedelta(seconds=dstoffset) + elif dstabbr and stdoffset is not None: + self._dst_offset = self._std_offset+datetime.timedelta(hours=+1) + else: + self._dst_offset = ZERO + if dstabbr and start is None: + self._start_delta = relativedelta.relativedelta( + hours=+2, month=4, day=1, weekday=relativedelta.SU(+1)) + else: + self._start_delta = start + if dstabbr and end is None: + self._end_delta = relativedelta.relativedelta( + hours=+1, month=10, day=31, weekday=relativedelta.SU(-1)) + else: + self._end_delta = end + + def utcoffset(self, dt): + if self._isdst(dt): + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + if self._isdst(dt): + return self._dst_offset-self._std_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + if self._isdst(dt): + return self._dst_abbr + else: + return self._std_abbr + + def _isdst(self, dt): + if not self._start_delta: + return False + year = datetime.datetime(dt.year, 1, 1) + start = year+self._start_delta + end = year+self._end_delta + dt = dt.replace(tzinfo=None) + if start < end: + return dt >= start and dt < end + else: + return dt >= start or dt < end + + def __eq__(self, other): + if not isinstance(other, tzrange): + return False + return (self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr and + self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._start_delta == other._start_delta and + self._end_delta == other._end_delta) + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + return "%s(...)" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + +class tzstr(tzrange): + + def __init__(self, s): + global parser + if not parser: + from dateutil import parser + self._s = s + + res = parser._parsetz(s) + if res is None: + raise ValueError("unknown string format") + + # Here we break the compatibility with the TZ variable handling. + # GMT-3 actually *means* the timezone -3. + if res.stdabbr in ("GMT", "UTC"): + res.stdoffset *= -1 + + # We must initialize it first, since _delta() needs + # _std_offset and _dst_offset set. Use False in start/end + # to avoid building it two times. + tzrange.__init__(self, res.stdabbr, res.stdoffset, + res.dstabbr, res.dstoffset, + start=False, end=False) + + if not res.dstabbr: + self._start_delta = None + self._end_delta = None + else: + self._start_delta = self._delta(res.start) + if self._start_delta: + self._end_delta = self._delta(res.end, isend=1) + + def _delta(self, x, isend=0): + kwargs = {} + if x.month is not None: + kwargs["month"] = x.month + if x.weekday is not None: + kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week) + if x.week > 0: + kwargs["day"] = 1 + else: + kwargs["day"] = 31 + elif x.day: + kwargs["day"] = x.day + elif x.yday is not None: + kwargs["yearday"] = x.yday + elif x.jyday is not None: + kwargs["nlyearday"] = x.jyday + if not kwargs: + # Default is to start on first sunday of april, and end + # on last sunday of october. + if not isend: + kwargs["month"] = 4 + kwargs["day"] = 1 + kwargs["weekday"] = relativedelta.SU(+1) + else: + kwargs["month"] = 10 + kwargs["day"] = 31 + kwargs["weekday"] = relativedelta.SU(-1) + if x.time is not None: + kwargs["seconds"] = x.time + else: + # Default is 2AM. + kwargs["seconds"] = 7200 + if isend: + # Convert to standard time, to follow the documented way + # of working with the extra hour. See the documentation + # of the tzinfo class. + delta = self._dst_offset-self._std_offset + kwargs["seconds"] -= delta.seconds+delta.days*86400 + return relativedelta.relativedelta(**kwargs) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + +class _tzicalvtzcomp(object): + def __init__(self, tzoffsetfrom, tzoffsetto, isdst, + tzname=None, rrule=None): + self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom) + self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto) + self.tzoffsetdiff = self.tzoffsetto-self.tzoffsetfrom + self.isdst = isdst + self.tzname = tzname + self.rrule = rrule + +class _tzicalvtz(datetime.tzinfo): + def __init__(self, tzid, comps=[]): + self._tzid = tzid + self._comps = comps + self._cachedate = [] + self._cachecomp = [] + + def _find_comp(self, dt): + if len(self._comps) == 1: + return self._comps[0] + dt = dt.replace(tzinfo=None) + try: + return self._cachecomp[self._cachedate.index(dt)] + except ValueError: + pass + lastcomp = None + lastcompdt = None + for comp in self._comps: + if not comp.isdst: + # Handle the extra hour in DST -> STD + compdt = comp.rrule.before(dt-comp.tzoffsetdiff, inc=True) + else: + compdt = comp.rrule.before(dt, inc=True) + if compdt and (not lastcompdt or lastcompdt < compdt): + lastcompdt = compdt + lastcomp = comp + if not lastcomp: + # RFC says nothing about what to do when a given + # time is before the first onset date. We'll look for the + # first standard component, or the first component, if + # none is found. + for comp in self._comps: + if not comp.isdst: + lastcomp = comp + break + else: + lastcomp = comp[0] + self._cachedate.insert(0, dt) + self._cachecomp.insert(0, lastcomp) + if len(self._cachedate) > 10: + self._cachedate.pop() + self._cachecomp.pop() + return lastcomp + + def utcoffset(self, dt): + return self._find_comp(dt).tzoffsetto + + def dst(self, dt): + comp = self._find_comp(dt) + if comp.isdst: + return comp.tzoffsetdiff + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._find_comp(dt).tzname + + def __repr__(self): + return "" % repr(self._tzid) + + __reduce__ = object.__reduce__ + +class tzical(object): + def __init__(self, fileobj): + global rrule + if not rrule: + from dateutil import rrule + + if isinstance(fileobj, string_types): + self._s = fileobj + fileobj = open(fileobj, 'r') # ical should be encoded in UTF-8 with CRLF + elif hasattr(fileobj, "name"): + self._s = fileobj.name + else: + self._s = repr(fileobj) + + self._vtz = {} + + self._parse_rfc(fileobj.read()) + + def keys(self): + return list(self._vtz.keys()) + + def get(self, tzid=None): + if tzid is None: + keys = list(self._vtz.keys()) + if len(keys) == 0: + raise ValueError("no timezones defined") + elif len(keys) > 1: + raise ValueError("more than one timezone available") + tzid = keys[0] + return self._vtz.get(tzid) + + def _parse_offset(self, s): + s = s.strip() + if not s: + raise ValueError("empty offset") + if s[0] in ('+', '-'): + signal = (-1, +1)[s[0]=='+'] + s = s[1:] + else: + signal = +1 + if len(s) == 4: + return (int(s[:2])*3600+int(s[2:])*60)*signal + elif len(s) == 6: + return (int(s[:2])*3600+int(s[2:4])*60+int(s[4:]))*signal + else: + raise ValueError("invalid offset: "+s) + + def _parse_rfc(self, s): + lines = s.splitlines() + if not lines: + raise ValueError("empty string") + + # Unfold + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + + tzid = None + comps = [] + invtz = False + comptype = None + for line in lines: + if not line: + continue + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0].upper() + parms = parms[1:] + if invtz: + if name == "BEGIN": + if value in ("STANDARD", "DAYLIGHT"): + # Process component + pass + else: + raise ValueError("unknown component: "+value) + comptype = value + founddtstart = False + tzoffsetfrom = None + tzoffsetto = None + rrulelines = [] + tzname = None + elif name == "END": + if value == "VTIMEZONE": + if comptype: + raise ValueError("component not closed: "+comptype) + if not tzid: + raise ValueError("mandatory TZID not found") + if not comps: + raise ValueError("at least one component is needed") + # Process vtimezone + self._vtz[tzid] = _tzicalvtz(tzid, comps) + invtz = False + elif value == comptype: + if not founddtstart: + raise ValueError("mandatory DTSTART not found") + if tzoffsetfrom is None: + raise ValueError("mandatory TZOFFSETFROM not found") + if tzoffsetto is None: + raise ValueError("mandatory TZOFFSETFROM not found") + # Process component + rr = None + if rrulelines: + rr = rrule.rrulestr("\n".join(rrulelines), + compatible=True, + ignoretz=True, + cache=True) + comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto, + (comptype == "DAYLIGHT"), + tzname, rr) + comps.append(comp) + comptype = None + else: + raise ValueError("invalid component end: "+value) + elif comptype: + if name == "DTSTART": + rrulelines.append(line) + founddtstart = True + elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"): + rrulelines.append(line) + elif name == "TZOFFSETFROM": + if parms: + raise ValueError("unsupported %s parm: %s "%(name, parms[0])) + tzoffsetfrom = self._parse_offset(value) + elif name == "TZOFFSETTO": + if parms: + raise ValueError("unsupported TZOFFSETTO parm: "+parms[0]) + tzoffsetto = self._parse_offset(value) + elif name == "TZNAME": + if parms: + raise ValueError("unsupported TZNAME parm: "+parms[0]) + tzname = value + elif name == "COMMENT": + pass + else: + raise ValueError("unsupported property: "+name) + else: + if name == "TZID": + if parms: + raise ValueError("unsupported TZID parm: "+parms[0]) + tzid = value + elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"): + pass + else: + raise ValueError("unsupported property: "+name) + elif name == "BEGIN" and value == "VTIMEZONE": + tzid = None + comps = [] + invtz = True + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + +if sys.platform != "win32": + TZFILES = ["/etc/localtime", "localtime"] + TZPATHS = ["/usr/share/zoneinfo", "/usr/lib/zoneinfo", "/etc/zoneinfo"] +else: + TZFILES = [] + TZPATHS = [] + +def gettz(name=None): + tz = None + if not name: + try: + name = os.environ["TZ"] + except KeyError: + pass + if name is None or name == ":": + for filepath in TZFILES: + if not os.path.isabs(filepath): + filename = filepath + for path in TZPATHS: + filepath = os.path.join(path, filename) + if os.path.isfile(filepath): + break + else: + continue + if os.path.isfile(filepath): + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = tzlocal() + else: + if name.startswith(":"): + name = name[:-1] + if os.path.isabs(name): + if os.path.isfile(name): + tz = tzfile(name) + else: + tz = None + else: + for path in TZPATHS: + filepath = os.path.join(path, name) + if not os.path.isfile(filepath): + filepath = filepath.replace(' ', '_') + if not os.path.isfile(filepath): + continue + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = None + if tzwin: + try: + tz = tzwin(name) + except OSError: + pass + if not tz: + from dateutil.zoneinfo import gettz + tz = gettz(name) + if not tz: + for c in name: + # name must have at least one offset to be a tzstr + if c in "0123456789": + try: + tz = tzstr(name) + except ValueError: + pass + break + else: + if name in ("GMT", "UTC"): + tz = tzutc() + elif name in time.tzname: + tz = tzlocal() + return tz + +# vim:ts=4:sw=4:et diff --git a/awx/lib/site-packages/dateutil/tzwin.py b/awx/lib/site-packages/dateutil/tzwin.py new file mode 100644 index 0000000000..041c6cc3d6 --- /dev/null +++ b/awx/lib/site-packages/dateutil/tzwin.py @@ -0,0 +1,179 @@ +# This code was originally contributed by Jeffrey Harris. +import datetime +import struct +import winreg + + +__all__ = ["tzwin", "tzwinlocal"] + +ONEWEEK = datetime.timedelta(7) + +TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" +TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones" +TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" + +def _settzkeyname(): + global TZKEYNAME + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + try: + winreg.OpenKey(handle, TZKEYNAMENT).Close() + TZKEYNAME = TZKEYNAMENT + except WindowsError: + TZKEYNAME = TZKEYNAME9X + handle.Close() + +_settzkeyname() + +class tzwinbase(datetime.tzinfo): + """tzinfo class based on win32's timezones available in the registry.""" + + def utcoffset(self, dt): + if self._isdst(dt): + return datetime.timedelta(minutes=self._dstoffset) + else: + return datetime.timedelta(minutes=self._stdoffset) + + def dst(self, dt): + if self._isdst(dt): + minutes = self._dstoffset - self._stdoffset + return datetime.timedelta(minutes=minutes) + else: + return datetime.timedelta(0) + + def tzname(self, dt): + if self._isdst(dt): + return self._dstname + else: + return self._stdname + + def list(): + """Return a list of all time zones known to the system.""" + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + tzkey = winreg.OpenKey(handle, TZKEYNAME) + result = [winreg.EnumKey(tzkey, i) + for i in range(winreg.QueryInfoKey(tzkey)[0])] + tzkey.Close() + handle.Close() + return result + list = staticmethod(list) + + def display(self): + return self._display + + def _isdst(self, dt): + dston = picknthweekday(dt.year, self._dstmonth, self._dstdayofweek, + self._dsthour, self._dstminute, + self._dstweeknumber) + dstoff = picknthweekday(dt.year, self._stdmonth, self._stddayofweek, + self._stdhour, self._stdminute, + self._stdweeknumber) + if dston < dstoff: + return dston <= dt.replace(tzinfo=None) < dstoff + else: + return not dstoff <= dt.replace(tzinfo=None) < dston + + +class tzwin(tzwinbase): + + def __init__(self, name): + self._name = name + + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + tzkey = winreg.OpenKey(handle, "%s\%s" % (TZKEYNAME, name)) + keydict = valuestodict(tzkey) + tzkey.Close() + handle.Close() + + self._stdname = keydict["Std"].encode("iso-8859-1") + self._dstname = keydict["Dlt"].encode("iso-8859-1") + + self._display = keydict["Display"] + + # See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm + tup = struct.unpack("=3l16h", keydict["TZI"]) + self._stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1 + self._dstoffset = self._stdoffset-tup[2] # + DaylightBias * -1 + + (self._stdmonth, + self._stddayofweek, # Sunday = 0 + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[4:9] + + (self._dstmonth, + self._dstdayofweek, # Sunday = 0 + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[12:17] + + def __repr__(self): + return "tzwin(%s)" % repr(self._name) + + def __reduce__(self): + return (self.__class__, (self._name,)) + + +class tzwinlocal(tzwinbase): + + def __init__(self): + + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + + tzlocalkey = winreg.OpenKey(handle, TZLOCALKEYNAME) + keydict = valuestodict(tzlocalkey) + tzlocalkey.Close() + + self._stdname = keydict["StandardName"].encode("iso-8859-1") + self._dstname = keydict["DaylightName"].encode("iso-8859-1") + + try: + tzkey = winreg.OpenKey(handle, "%s\%s"%(TZKEYNAME, self._stdname)) + _keydict = valuestodict(tzkey) + self._display = _keydict["Display"] + tzkey.Close() + except OSError: + self._display = None + + handle.Close() + + self._stdoffset = -keydict["Bias"]-keydict["StandardBias"] + self._dstoffset = self._stdoffset-keydict["DaylightBias"] + + + # See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm + tup = struct.unpack("=8h", keydict["StandardStart"]) + + (self._stdmonth, + self._stddayofweek, # Sunday = 0 + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[1:6] + + tup = struct.unpack("=8h", keydict["DaylightStart"]) + + (self._dstmonth, + self._dstdayofweek, # Sunday = 0 + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[1:6] + + def __reduce__(self): + return (self.__class__, ()) + +def picknthweekday(year, month, dayofweek, hour, minute, whichweek): + """dayofweek == 0 means Sunday, whichweek 5 means last instance""" + first = datetime.datetime(year, month, 1, hour, minute) + weekdayone = first.replace(day=((dayofweek-first.isoweekday())%7+1)) + for n in range(whichweek): + dt = weekdayone+(whichweek-n)*ONEWEEK + if dt.month == month: + return dt + +def valuestodict(key): + """Convert a registry key's values to a dictionary.""" + dict = {} + size = winreg.QueryInfoKey(key)[1] + for i in range(size): + data = winreg.EnumValue(key, i) + dict[data[0]] = data[1] + return dict diff --git a/awx/lib/site-packages/dateutil/zoneinfo/__init__.py b/awx/lib/site-packages/dateutil/zoneinfo/__init__.py new file mode 100644 index 0000000000..a1b34874ba --- /dev/null +++ b/awx/lib/site-packages/dateutil/zoneinfo/__init__.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +""" +Copyright (c) 2003-2005 Gustavo Niemeyer + +This module offers extensions to the standard Python +datetime module. +""" +from dateutil.tz import tzfile +from tarfile import TarFile +import os + +__author__ = "Tomi Pieviläinen " +__license__ = "Simplified BSD" + +__all__ = ["setcachesize", "gettz", "rebuild"] + +CACHE = [] +CACHESIZE = 10 + +class tzfile(tzfile): + def __reduce__(self): + return (gettz, (self._filename,)) + +def getzoneinfofile(): + filenames = sorted(os.listdir(os.path.join(os.path.dirname(__file__)))) + filenames.reverse() + for entry in filenames: + if entry.startswith("zoneinfo") and ".tar." in entry: + return os.path.join(os.path.dirname(__file__), entry) + return None + +ZONEINFOFILE = getzoneinfofile() + +del getzoneinfofile + +def setcachesize(size): + global CACHESIZE, CACHE + CACHESIZE = size + del CACHE[size:] + +def gettz(name): + tzinfo = None + if ZONEINFOFILE: + for cachedname, tzinfo in CACHE: + if cachedname == name: + break + else: + tf = TarFile.open(ZONEINFOFILE) + try: + zonefile = tf.extractfile(name) + except KeyError: + tzinfo = None + else: + tzinfo = tzfile(zonefile) + tf.close() + CACHE.insert(0, (name, tzinfo)) + del CACHE[CACHESIZE:] + return tzinfo + +def rebuild(filename, tag=None, format="gz"): + import tempfile, shutil + tmpdir = tempfile.mkdtemp() + zonedir = os.path.join(tmpdir, "zoneinfo") + moduledir = os.path.dirname(__file__) + if tag: tag = "-"+tag + targetname = "zoneinfo%s.tar.%s" % (tag, format) + try: + tf = TarFile.open(filename) + # The "backwards" zone file contains links to other files, so must be + # processed as last + for name in sorted(tf.getnames(), + key=lambda k: k != "backward" and k or "z"): + if not (name.endswith(".sh") or + name.endswith(".tab") or + name == "leapseconds"): + tf.extract(name, tmpdir) + filepath = os.path.join(tmpdir, name) + os.system("zic -d %s %s" % (zonedir, filepath)) + tf.close() + target = os.path.join(moduledir, targetname) + for entry in os.listdir(moduledir): + if entry.startswith("zoneinfo") and ".tar." in entry: + os.unlink(os.path.join(moduledir, entry)) + tf = TarFile.open(target, "w:%s" % format) + for entry in os.listdir(zonedir): + entrypath = os.path.join(zonedir, entry) + tf.add(entrypath, entry) + tf.close() + finally: + shutil.rmtree(tmpdir) diff --git a/awx/lib/site-packages/dateutil/zoneinfo/zoneinfo--latest.tar.gz b/awx/lib/site-packages/dateutil/zoneinfo/zoneinfo--latest.tar.gz new file mode 100644 index 0000000000..12eadffb09 Binary files /dev/null and b/awx/lib/site-packages/dateutil/zoneinfo/zoneinfo--latest.tar.gz differ diff --git a/awx/lib/site-packages/django_extensions/__init__.py b/awx/lib/site-packages/django_extensions/__init__.py new file mode 100644 index 0000000000..3efe475a76 --- /dev/null +++ b/awx/lib/site-packages/django_extensions/__init__.py @@ -0,0 +1,13 @@ + +VERSION = (1, 1, 1) + +# Dynamically calculate the version based on VERSION tuple +if len(VERSION) > 2 and VERSION[2] is not None: + if isinstance(VERSION[2], int): + str_version = "%s.%s.%s" % VERSION[:3] + else: + str_version = "%s.%s_%s" % VERSION[:3] +else: + str_version = "%s.%s" % VERSION[:2] + +__version__ = str_version diff --git a/awx/lib/site-packages/django_extensions/admin/__init__.py b/awx/lib/site-packages/django_extensions/admin/__init__.py new file mode 100644 index 0000000000..4df241460b --- /dev/null +++ b/awx/lib/site-packages/django_extensions/admin/__init__.py @@ -0,0 +1,145 @@ +# +# Autocomplete feature for admin panel +# +# Most of the code has been written by Jannis Leidel and was updated a bit +# for django_extensions. +# http://jannisleidel.com/2008/11/autocomplete-form-widget-foreignkey-model-fields/ +# +# to_string_function, Satchmo adaptation and some comments added by emes +# (Michal Salaban) +# + +import operator +from six.moves import reduce +from django.http import HttpResponse, HttpResponseNotFound +from django.db import models +from django.db.models.query import QuerySet +from django.utils.encoding import smart_str +from django.utils.translation import ugettext as _ +from django.utils.text import get_text_list +try: + from functools import update_wrapper + assert update_wrapper +except ImportError: + from django.utils.functional import update_wrapper + +from django_extensions.admin.widgets import ForeignKeySearchInput + +from django.conf import settings + +if 'reversion' in settings.INSTALLED_APPS: + from reversion.admin import VersionAdmin as ModelAdmin + assert ModelAdmin +else: + from django.contrib.admin import ModelAdmin + + +class ForeignKeyAutocompleteAdmin(ModelAdmin): + """Admin class for models using the autocomplete feature. + + There are two additional fields: + - related_search_fields: defines fields of managed model that + have to be represented by autocomplete input, together with + a list of target model fields that are searched for + input string, e.g.: + + related_search_fields = { + 'author': ('first_name', 'email'), + } + + - related_string_functions: contains optional functions which + take target model instance as only argument and return string + representation. By default __unicode__() method of target + object is used. + """ + + related_search_fields = {} + related_string_functions = {} + + def get_urls(self): + try: + from django.conf.urls import patterns, url + except ImportError: # django < 1.4 + from django.conf.urls.defaults import patterns, url + + def wrap(view): + def wrapper(*args, **kwargs): + return self.admin_site.admin_view(view)(*args, **kwargs) + return update_wrapper(wrapper, view) + + info = self.model._meta.app_label, self.model._meta.module_name + + urlpatterns = patterns('', url(r'foreignkey_autocomplete/$', wrap(self.foreignkey_autocomplete), name='%s_%s_autocomplete' % info)) + urlpatterns += super(ForeignKeyAutocompleteAdmin, self).get_urls() + return urlpatterns + + def foreignkey_autocomplete(self, request): + """ + Searches in the fields of the given related model and returns the + result as a simple string to be used by the jQuery Autocomplete plugin + """ + query = request.GET.get('q', None) + app_label = request.GET.get('app_label', None) + model_name = request.GET.get('model_name', None) + search_fields = request.GET.get('search_fields', None) + object_pk = request.GET.get('object_pk', None) + try: + to_string_function = self.related_string_functions[model_name] + except KeyError: + to_string_function = lambda x: x.__unicode__() + if search_fields and app_label and model_name and (query or object_pk): + def construct_search(field_name): + # use different lookup methods depending on the notation + if field_name.startswith('^'): + return "%s__istartswith" % field_name[1:] + elif field_name.startswith('='): + return "%s__iexact" % field_name[1:] + elif field_name.startswith('@'): + return "%s__search" % field_name[1:] + else: + return "%s__icontains" % field_name + model = models.get_model(app_label, model_name) + queryset = model._default_manager.all() + data = '' + if query: + for bit in query.split(): + or_queries = [models.Q(**{construct_search(smart_str(field_name)): smart_str(bit)}) for field_name in search_fields.split(',')] + other_qs = QuerySet(model) + other_qs.dup_select_related(queryset) + other_qs = other_qs.filter(reduce(operator.or_, or_queries)) + queryset = queryset & other_qs + data = ''.join([u'%s|%s\n' % ( + to_string_function(f), f.pk) for f in queryset]) + elif object_pk: + try: + obj = queryset.get(pk=object_pk) + except: + pass + else: + data = to_string_function(obj) + return HttpResponse(data) + return HttpResponseNotFound() + + def get_help_text(self, field_name, model_name): + searchable_fields = self.related_search_fields.get(field_name, None) + if searchable_fields: + help_kwargs = { + 'model_name': model_name, + 'field_list': get_text_list(searchable_fields, _('and')), + } + return _('Use the left field to do %(model_name)s lookups in the fields %(field_list)s.') % help_kwargs + return '' + + def formfield_for_dbfield(self, db_field, **kwargs): + """ + Overrides the default widget for Foreignkey fields if they are + specified in the related_search_fields class attribute. + """ + if (isinstance(db_field, models.ForeignKey) and db_field.name in self.related_search_fields): + model_name = db_field.rel.to._meta.object_name + help_text = self.get_help_text(db_field.name, model_name) + if kwargs.get('help_text'): + help_text = u'%s %s' % (kwargs['help_text'], help_text) + kwargs['widget'] = ForeignKeySearchInput(db_field.rel, self.related_search_fields[db_field.name]) + kwargs['help_text'] = help_text + return super(ForeignKeyAutocompleteAdmin, self).formfield_for_dbfield(db_field, **kwargs) diff --git a/awx/lib/site-packages/django_extensions/admin/widgets.py b/awx/lib/site-packages/django_extensions/admin/widgets.py new file mode 100644 index 0000000000..feefd406bc --- /dev/null +++ b/awx/lib/site-packages/django_extensions/admin/widgets.py @@ -0,0 +1,95 @@ +import django +from django import forms +from django.conf import settings +from django.contrib.admin.sites import site +from django.utils.safestring import mark_safe +if django.get_version() >= "1.4": + from django.utils.text import Truncator +else: + from django.utils.text import truncate_words +from django.template.loader import render_to_string +from django.contrib.admin.widgets import ForeignKeyRawIdWidget + + +class ForeignKeySearchInput(ForeignKeyRawIdWidget): + """ + A Widget for displaying ForeignKeys in an autocomplete search input + instead in a " ] || + + !tags.indexOf("", "" ] || + + tags.match(/^<(thead|tbody|tfoot|colg|cap)/) && + [ 1, "", "
" ] || + + !tags.indexOf("", "" ] || + + // matched above + (!tags.indexOf("", "" ] || + + !tags.indexOf("", "" ] || + + // IE can't serialize and diff --git a/awx/lib/site-packages/django_extensions/templatetags/__init__.py b/awx/lib/site-packages/django_extensions/templatetags/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/django_extensions/templatetags/highlighting.py b/awx/lib/site-packages/django_extensions/templatetags/highlighting.py new file mode 100644 index 0000000000..78f6dbcfab --- /dev/null +++ b/awx/lib/site-packages/django_extensions/templatetags/highlighting.py @@ -0,0 +1,91 @@ +""" +Similar to syntax_color.py but this is intended more for being able to +copy+paste actual code into your Django templates without needing to +escape or anything crazy. + +http://lobstertech.com/2008/aug/30/django_syntax_highlight_template_tag/ + +Example: + + {% load highlighting %} + + + +

check out this code

+ + {% highlight 'python' 'Excerpt: blah.py' %} + def need_food(self): + print("Love is than &death&") + {% endhighlight %} + +""" + +from pygments import highlight as pyghighlight +from pygments.lexers import get_lexer_by_name +from pygments.formatters import HtmlFormatter +from django import template +from django.template import Template, Context, Node, Variable, TemplateSyntaxError +from django.template.defaultfilters import stringfilter +from django.utils.safestring import mark_safe + +register = template.Library() + + +@register.filter +@stringfilter +def parse_template(value): + return mark_safe(Template(value).render(Context())) +parse_template.is_safe = True + + +class CodeNode(Node): + def __init__(self, language, nodelist, name=''): + self.language = Variable(language) + self.nodelist = nodelist + if name: + self.name = Variable(name) + else: + self.name = None + + def render(self, context): + code = self.nodelist.render(context).strip() + lexer = get_lexer_by_name(self.language.resolve(context)) + formatter = HtmlFormatter(linenos=False) + html = "" + if self.name: + name = self.name.resolve(context) + html = '
%s
' % (name) + return html + pyghighlight(code, lexer, formatter) + + +@register.tag +def highlight(parser, token): + """ + Allows you to put a highlighted source code
 block in your code.
+    This takes two arguments, the language and a little explaination message
+    that will be generated before the code.  The second argument is optional.
+
+    Your code will be fed through pygments so you can use any language it
+    supports.
+
+    {% load highlighting %}
+    {% highlight 'python' 'Excerpt: blah.py' %}
+    def need_food(self):
+        print("Love is colder than death")
+    {% endhighlight %}
+    """
+    nodelist = parser.parse(('endhighlight',))
+    parser.delete_first_token()
+    bits = token.split_contents()[1:]
+    if len(bits) < 1:
+        raise TemplateSyntaxError("'highlight' statement requires an argument")
+    return CodeNode(bits[0], nodelist, *bits[1:])
diff --git a/awx/lib/site-packages/django_extensions/templatetags/syntax_color.py b/awx/lib/site-packages/django_extensions/templatetags/syntax_color.py
new file mode 100644
index 0000000000..4c4c23dca3
--- /dev/null
+++ b/awx/lib/site-packages/django_extensions/templatetags/syntax_color.py
@@ -0,0 +1,97 @@
+r"""
+Template filter for rendering a string with syntax highlighting.
+It relies on Pygments to accomplish this.
+
+Some standard usage examples (from within Django templates).
+Coloring a string with the Python lexer:
+
+    {% load syntax_color %}
+    {{ code_string|colorize:"python" }}
+
+You may use any lexer in Pygments. The complete list of which
+can be found [on the Pygments website][1].
+
+[1]: http://pygments.org/docs/lexers/
+
+You may also have Pygments attempt to guess the correct lexer for
+a particular string. However, if may not be able to choose a lexer,
+in which case it will simply return the string unmodified. This is
+less efficient compared to specifying the lexer to use.
+
+    {{ code_string|colorize }}
+
+You may also render the syntax highlighed text with line numbers.
+
+    {% load syntax_color %}
+    {{ some_code|colorize_table:"html+django" }}
+    {{ let_pygments_pick_for_this_code|colorize_table }}
+
+Please note that before you can load the ``syntax_color`` template filters
+you will need to add the ``django_extensions.utils`` application to the
+``INSTALLED_APPS``setting in your project's ``settings.py`` file.
+"""
+
+__author__ = 'Will Larson '
+
+
+from django import template
+from django.template.defaultfilters import stringfilter
+from django.utils.safestring import mark_safe
+from django.core.exceptions import ImproperlyConfigured
+
+try:
+    from pygments import highlight
+    from pygments.formatters import HtmlFormatter
+    from pygments.lexers import get_lexer_by_name, guess_lexer, ClassNotFound
+except ImportError:
+    raise ImproperlyConfigured(
+        "Please install 'pygments' library to use syntax_color.")
+
+register = template.Library()
+
+
+@register.simple_tag
+def pygments_css():
+    return HtmlFormatter().get_style_defs('.highlight')
+
+
+def generate_pygments_css(path=None):
+    if path is None:
+        import os
+        path = os.path.join(os.getcwd(), 'pygments.css')
+    f = open(path, 'w')
+    f.write(pygments_css())
+    f.close()
+
+
+def get_lexer(value, arg):
+    if arg is None:
+        return guess_lexer(value)
+    return get_lexer_by_name(arg)
+
+
+@register.filter(name='colorize')
+@stringfilter
+def colorize(value, arg=None):
+    try:
+        return mark_safe(highlight(value, get_lexer(value, arg), HtmlFormatter()))
+    except ClassNotFound:
+        return value
+
+
+@register.filter(name='colorize_table')
+@stringfilter
+def colorize_table(value, arg=None):
+    try:
+        return mark_safe(highlight(value, get_lexer(value, arg), HtmlFormatter(linenos='table')))
+    except ClassNotFound:
+        return value
+
+
+@register.filter(name='colorize_noclasses')
+@stringfilter
+def colorize_noclasses(value, arg=None):
+    try:
+        return mark_safe(highlight(value, get_lexer(value, arg), HtmlFormatter(noclasses=True)))
+    except ClassNotFound:
+        return value
diff --git a/awx/lib/site-packages/django_extensions/templatetags/truncate_letters.py b/awx/lib/site-packages/django_extensions/templatetags/truncate_letters.py
new file mode 100644
index 0000000000..b77a6b1fc8
--- /dev/null
+++ b/awx/lib/site-packages/django_extensions/templatetags/truncate_letters.py
@@ -0,0 +1,28 @@
+import django
+from django import template
+from django.template.defaultfilters import stringfilter
+
+register = template.Library()
+
+
+def truncateletters(value, arg):
+    """
+    Truncates a string after a certain number of letters
+
+    Argument: Number of letters to truncate after
+    """
+    from django_extensions.utils.text import truncate_letters
+    try:
+        length = int(arg)
+    except ValueError:  # invalid literal for int()
+        return value  # Fail silently
+    return truncate_letters(value, length)
+
+if django.get_version() >= "1.4":
+    truncateletters = stringfilter(truncateletters)
+    register.filter(truncateletters, is_safe=True)
+else:
+    truncateletters.is_safe = True
+    truncateletters = stringfilter(truncateletters)
+    register.filter(truncateletters)
+
diff --git a/awx/lib/site-packages/django_extensions/templatetags/widont.py b/awx/lib/site-packages/django_extensions/templatetags/widont.py
new file mode 100644
index 0000000000..687e5114ee
--- /dev/null
+++ b/awx/lib/site-packages/django_extensions/templatetags/widont.py
@@ -0,0 +1,61 @@
+from django.template import Library
+from django.utils.encoding import force_unicode
+import re
+
+register = Library()
+re_widont = re.compile(r'\s+(\S+\s*)$')
+re_widont_html = re.compile(r'([^<>\s])\s+([^<>\s]+\s*)(]*>|$)', re.IGNORECASE)
+
+
+def widont(value, count=1):
+    """
+    Adds an HTML non-breaking space between the final two words of the string to
+    avoid "widowed" words.
+
+    Examples:
+
+    >>> print(widont('Test   me   out'))
+    Test   me out
+
+    >>> widont('It works with trailing spaces too  ')
+    u'It works with trailing spaces too  '
+
+    >>> print(widont('NoEffect'))
+    NoEffect
+    """
+    def replace(matchobj):
+        return u' %s' % matchobj.group(1)
+    for i in range(count):
+        value = re_widont.sub(replace, force_unicode(value))
+    return value
+
+
+def widont_html(value):
+    """
+    Adds an HTML non-breaking space between the final two words at the end of
+    (and in sentences just outside of) block level tags to avoid "widowed"
+    words.
+
+    Examples:
+
+    >>> print(widont_html('

Here is a simple example

Single

')) +

Here is a simple example

Single

+ + >>> print(widont_html('

test me
out

Ok?

Not in a p

and this

')) +

test me
out

Ok?

Not in a p

and this

+ + >>> print(widont_html('leading text

test me out

trailing text')) + leading text

test me out

trailing text + """ + def replace(matchobj): + return u'%s %s%s' % matchobj.groups() + return re_widont_html.sub(replace, force_unicode(value)) + +register.filter(widont) +register.filter(widont_html) + +if __name__ == "__main__": + def _test(): + import doctest + doctest.testmod() + _test() diff --git a/awx/lib/site-packages/django_extensions/tests/__init__.py b/awx/lib/site-packages/django_extensions/tests/__init__.py new file mode 100644 index 0000000000..c251bab6b7 --- /dev/null +++ b/awx/lib/site-packages/django_extensions/tests/__init__.py @@ -0,0 +1,19 @@ +from django.db import models # NOQA +from django_extensions.tests.test_dumpscript import DumpScriptTests +from django_extensions.tests.utils import TruncateLetterTests +from django_extensions.tests.json_field import JsonFieldTest +from django_extensions.tests.uuid_field import UUIDFieldTest +from django_extensions.tests.fields import AutoSlugFieldTest +from django_extensions.tests.management_command import CommandTest, ShowTemplateTagsTests + + +__test_classes__ = [ + DumpScriptTests, JsonFieldTest, UUIDFieldTest, AutoSlugFieldTest, CommandTest, ShowTemplateTagsTests, TruncateLetterTests +] + +try: + from django_extensions.tests.encrypted_fields import EncryptedFieldsTestCase + from django_extensions.tests.models import Secret # NOQA + __test_classes__.append(EncryptedFieldsTestCase) +except ImportError: + pass diff --git a/awx/lib/site-packages/django_extensions/tests/encrypted_fields.py b/awx/lib/site-packages/django_extensions/tests/encrypted_fields.py new file mode 100644 index 0000000000..1451c34e3f --- /dev/null +++ b/awx/lib/site-packages/django_extensions/tests/encrypted_fields.py @@ -0,0 +1,74 @@ +from django.db import connection +from django.conf import settings +from django.core.management import call_command +from django.db.models import loading +from django.utils import unittest + +# Only perform encrypted fields tests if keyczar is present +# Resolves http://github.com/django-extensions/django-extensions/issues/#issue/17 +try: + from keyczar import keyczar, keyczart, keyinfo # NOQA + from django_extensions.tests.models import Secret + from django_extensions.db.fields.encrypted import EncryptedTextField, EncryptedCharField # NOQA + keyczar_active = hasattr(settings, "ENCRYPTED_FIELD_KEYS_DIR") +except ImportError: + keyczar_active = False + + +class EncryptedFieldsTestCase(unittest.TestCase): + + def __init__(self, *args, **kwargs): + if keyczar_active: + self.crypt = keyczar.Crypter.Read(settings.ENCRYPTED_FIELD_KEYS_DIR) + super(EncryptedFieldsTestCase, self).__init__(*args, **kwargs) + + def setUp(self): + self.old_installed_apps = settings.INSTALLED_APPS + settings.INSTALLED_APPS = list(settings.INSTALLED_APPS) + settings.INSTALLED_APPS.append('django_extensions.tests') + loading.cache.loaded = False + call_command('syncdb', verbosity=0) + + def tearDown(self): + settings.INSTALLED_APPS = self.old_installed_apps + + def testCharFieldCreate(self): + if not keyczar_active: + return + test_val = "Test Secret" + secret = Secret.objects.create(name=test_val) + cursor = connection.cursor() + query = "SELECT name FROM %s WHERE id = %d" % (Secret._meta.db_table, secret.id) + cursor.execute(query) + db_val, = cursor.fetchone() + decrypted_val = self.crypt.Decrypt(db_val[len(EncryptedCharField.prefix):]) + self.assertEqual(test_val, decrypted_val) + + def testCharFieldRead(self): + if not keyczar_active: + return + test_val = "Test Secret" + secret = Secret.objects.create(name=test_val) + retrieved_secret = Secret.objects.get(id=secret.id) + self.assertEqual(test_val, retrieved_secret.name) + + def testTextFieldCreate(self): + if not keyczar_active: + return + test_val = "Test Secret" + secret = Secret.objects.create(text=test_val) + cursor = connection.cursor() + query = "SELECT text FROM %s WHERE id = %d" % (Secret._meta.db_table, secret.id) + cursor.execute(query) + db_val, = cursor.fetchone() + decrypted_val = self.crypt.Decrypt(db_val[len(EncryptedCharField.prefix):]) + self.assertEqual(test_val, decrypted_val) + + def testTextFieldRead(self): + if not keyczar_active: + return + test_val = "Test Secret" + secret = Secret.objects.create(text=test_val) + retrieved_secret = Secret.objects.get(id=secret.id) + self.assertEqual(test_val, retrieved_secret.text) + diff --git a/awx/lib/site-packages/django_extensions/tests/fields.py b/awx/lib/site-packages/django_extensions/tests/fields.py new file mode 100644 index 0000000000..3edd360e79 --- /dev/null +++ b/awx/lib/site-packages/django_extensions/tests/fields.py @@ -0,0 +1,105 @@ +from django.conf import settings +from django.core.management import call_command +from django.db.models import loading +from django.db import models +from django.utils import unittest + +from django_extensions.db.fields import AutoSlugField + + +class SluggedTestModel(models.Model): + title = models.CharField(max_length=42) + slug = AutoSlugField(populate_from='title') + + +class ChildSluggedTestModel(SluggedTestModel): + pass + + +class AutoSlugFieldTest(unittest.TestCase): + def setUp(self): + self.old_installed_apps = settings.INSTALLED_APPS + settings.INSTALLED_APPS = list(settings.INSTALLED_APPS) + settings.INSTALLED_APPS.append('django_extensions.tests') + loading.cache.loaded = False + call_command('syncdb', verbosity=0) + + def tearDown(self): + SluggedTestModel.objects.all().delete() + settings.INSTALLED_APPS = self.old_installed_apps + + def testAutoCreateSlug(self): + m = SluggedTestModel(title='foo') + m.save() + self.assertEqual(m.slug, 'foo') + + def testAutoCreateNextSlug(self): + m = SluggedTestModel(title='foo') + m.save() + + m = SluggedTestModel(title='foo') + m.save() + self.assertEqual(m.slug, 'foo-2') + + def testAutoCreateSlugWithNumber(self): + m = SluggedTestModel(title='foo 2012') + m.save() + self.assertEqual(m.slug, 'foo-2012') + + def testAutoUpdateSlugWithNumber(self): + m = SluggedTestModel(title='foo 2012') + m.save() + m.save() + self.assertEqual(m.slug, 'foo-2012') + + def testUpdateSlug(self): + m = SluggedTestModel(title='foo') + m.save() + + # update m instance without using `save' + SluggedTestModel.objects.filter(pk=m.pk).update(slug='foo-2012') + # update m instance with new data from the db + m = SluggedTestModel.objects.get(pk=m.pk) + + self.assertEqual(m.slug, 'foo-2012') + + m.save() + self.assertEqual(m.slug, 'foo-2012') + + def testSimpleSlugSource(self): + m = SluggedTestModel(title='-foo') + m.save() + self.assertEqual(m.slug, 'foo') + + n = SluggedTestModel(title='-foo') + n.save() + self.assertEqual(n.slug, 'foo-2') + + n.save() + self.assertEqual(n.slug, 'foo-2') + + def testEmptySlugSource(self): + # regression test + + m = SluggedTestModel(title='') + m.save() + self.assertEqual(m.slug, '-2') + + n = SluggedTestModel(title='') + n.save() + self.assertEqual(n.slug, '-3') + + n.save() + self.assertEqual(n.slug, '-3') + + def testInheritanceCreatesNextSlug(self): + m = SluggedTestModel(title='foo') + m.save() + + n = ChildSluggedTestModel(title='foo') + n.save() + self.assertEqual(n.slug, 'foo-2') + + o = SluggedTestModel(title='foo') + o.save() + self.assertEqual(o.slug, 'foo-3') diff --git a/awx/lib/site-packages/django_extensions/tests/json_field.py b/awx/lib/site-packages/django_extensions/tests/json_field.py new file mode 100644 index 0000000000..a73d1f6706 --- /dev/null +++ b/awx/lib/site-packages/django_extensions/tests/json_field.py @@ -0,0 +1,33 @@ +from django.conf import settings +from django.core.management import call_command +from django.db.models import loading +from django.db import models +from django.utils import unittest + +from django_extensions.db.fields.json import JSONField + + +class TestModel(models.Model): + a = models.IntegerField() + j_field = JSONField() + + +class JsonFieldTest(unittest.TestCase): + def setUp(self): + self.old_installed_apps = settings.INSTALLED_APPS + settings.INSTALLED_APPS = list(settings.INSTALLED_APPS) + settings.INSTALLED_APPS.append('django_extensions.tests') + loading.cache.loaded = False + call_command('syncdb', verbosity=0) + + def tearDown(self): + settings.INSTALLED_APPS = self.old_installed_apps + + def testCharFieldCreate(self): + j = TestModel.objects.create(a=6, j_field=dict(foo='bar')) + self.assertEquals(j.a, 6) + + def testEmptyList(self): + j = TestModel.objects.create(a=6, j_field=[]) + self.assertTrue(isinstance(j.j_field, list)) + self.assertEquals(j.j_field, []) diff --git a/awx/lib/site-packages/django_extensions/tests/management/__init__.py b/awx/lib/site-packages/django_extensions/tests/management/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/django_extensions/tests/management/commands/__init__.py b/awx/lib/site-packages/django_extensions/tests/management/commands/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/django_extensions/tests/management/commands/error_raising_command.py b/awx/lib/site-packages/django_extensions/tests/management/commands/error_raising_command.py new file mode 100644 index 0000000000..3bc646cf53 --- /dev/null +++ b/awx/lib/site-packages/django_extensions/tests/management/commands/error_raising_command.py @@ -0,0 +1,10 @@ + +from django_extensions.management.base import LoggingBaseCommand + + +class Command(LoggingBaseCommand): + help = 'Test error' + + def handle(self, *args, **options): + raise Exception("Test Error") + diff --git a/awx/lib/site-packages/django_extensions/tests/management_command.py b/awx/lib/site-packages/django_extensions/tests/management_command.py new file mode 100644 index 0000000000..86c6d38197 --- /dev/null +++ b/awx/lib/site-packages/django_extensions/tests/management_command.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +import logging + +try: + from cStringIO import StringIO # NOQA +except ImportError: + from io import StringIO # NOQA + +try: + import importlib # NOQA +except ImportError: + from django.utils import importlib # NOQA + +from django.core.management import call_command +from django.test import TestCase + + +class MockLoggingHandler(logging.Handler): + """ Mock logging handler to check for expected logs. """ + + def __init__(self, *args, **kwargs): + self.reset() + logging.Handler.__init__(self, *args, **kwargs) + + def emit(self, record): + self.messages[record.levelname.lower()].append(record.getMessage()) + + def reset(self): + self.messages = { + 'debug': [], + 'info': [], + 'warning': [], + 'error': [], + 'critical': [], + } + + +class CommandTest(TestCase): + def test_error_logging(self): + # Ensure command errors are properly logged and reraised + from django_extensions.management.base import logger + logger.addHandler(MockLoggingHandler()) + module_path = "django_extensions.tests.management.commands.error_raising_command" + module = importlib.import_module(module_path) + error_raising_command = module.Command() + self.assertRaises(Exception, error_raising_command.execute) + handler = logger.handlers[0] + self.assertEqual(len(handler.messages['error']), 1) + + +class ShowTemplateTagsTests(TestCase): + def test_some_output(self): + out = StringIO() + call_command('show_templatetags', stdout=out) + output = out.getvalue() + # Once django_extension is installed during tests it should appear with + # its templatetags + self.assertIn('django_extensions', output) + # let's check at least one + self.assertIn('truncate_letters', output) diff --git a/awx/lib/site-packages/django_extensions/tests/models.py b/awx/lib/site-packages/django_extensions/tests/models.py new file mode 100644 index 0000000000..f8deab6a10 --- /dev/null +++ b/awx/lib/site-packages/django_extensions/tests/models.py @@ -0,0 +1,36 @@ +from django.db import models +from django.conf import settings + +try: + from django_extensions.db.fields.encrypted import EncryptedTextField, EncryptedCharField + if not hasattr(settings, 'ENCRYPTED_FIELD_KEYS_DIR'): + raise ImportError +except ImportError: + class EncryptedCharField(object): + def __init__(self, *args, **kwargs): + pass + + class EncryptedTextField(object): + def __init__(self, *args, **kwargs): + pass + + +class Secret(models.Model): + name = EncryptedCharField("Name", blank=True, max_length=255) + text = EncryptedTextField("Text", blank=True) + + +class Name(models.Model): + name = models.CharField(max_length=50) + + +class Note(models.Model): + note = models.TextField() + + +class Person(models.Model): + name = models.ForeignKey(Name) + age = models.PositiveIntegerField() + children = models.ManyToManyField('self') + notes = models.ManyToManyField(Note) + diff --git a/awx/lib/site-packages/django_extensions/tests/test_dumpscript.py b/awx/lib/site-packages/django_extensions/tests/test_dumpscript.py new file mode 100644 index 0000000000..b25dae0537 --- /dev/null +++ b/awx/lib/site-packages/django_extensions/tests/test_dumpscript.py @@ -0,0 +1,95 @@ +import sys + +# conditional imports for python 3 +try: + import compiler # NOQA + from StringIO import StringIO # NOQA +except ImportError: + import ast as compiler # NOQA + from io import StringIO # NOQA +from django.test import TestCase + +from django.core.management import call_command +from django_extensions.tests.models import Name, Note, Person + +from django.conf import settings +from django.db.models import loading + + +class DumpScriptTests(TestCase): + def setUp(self): + self.real_stdout = sys.stdout + self.real_stderr = sys.stderr + sys.stdout = StringIO() + sys.stderr = StringIO() + + self.original_installed_apps = settings.INSTALLED_APPS + settings.INSTALLED_APPS = list(settings.INSTALLED_APPS) + settings.INSTALLED_APPS.append('django_extensions.tests') + loading.cache.loaded = False + call_command('syncdb', verbosity=0) + + def tearDown(self): + sys.stdout = self.real_stdout + sys.stderr = self.real_stderr + settings.INSTALLED_APPS.remove('django_extensions.tests') + settings.INSTALLED_APPS = self.original_installed_apps + loading.cache.loaded = False + + def test_runs(self): + # lame test...does it run? + n = Name(name='Gabriel') + n.save() + call_command('dumpscript', 'tests') + self.assertTrue('Gabriel' in sys.stdout.getvalue()) + + #---------------------------------------------------------------------- + def test_replaced_stdout(self): + # check if stdout can be replaced + sys.stdout = StringIO() + n = Name(name='Mike') + n.save() + tmp_out = StringIO() + call_command('dumpscript', 'tests', stdout=tmp_out) + self.assertTrue('Mike' in tmp_out.getvalue()) # script should go to tmp_out + self.assertEquals(0, len(sys.stdout.getvalue())) # there should not be any output to sys.stdout + tmp_out.close() + + #---------------------------------------------------------------------- + def test_replaced_stderr(self): + # check if stderr can be replaced, without changing stdout + n = Name(name='Fred') + n.save() + tmp_err = StringIO() + sys.stderr = StringIO() + call_command('dumpscript', 'tests', stderr=tmp_err) + self.assertTrue('Fred' in sys.stdout.getvalue()) # script should still go to stdout + self.assertTrue('Name' in tmp_err.getvalue()) # error output should go to tmp_err + self.assertEquals(0, len(sys.stderr.getvalue())) # there should not be any output to sys.stderr + tmp_err.close() + + #---------------------------------------------------------------------- + def test_valid_syntax(self): + n1 = Name(name='John') + n1.save() + p1 = Person(name=n1, age=40) + p1.save() + n2 = Name(name='Jane') + n2.save() + p2 = Person(name=n2, age=18) + p2.save() + p2.children.add(p1) + note1 = Note(note="This is the first note.") + note1.save() + note2 = Note(note="This is the second note.") + note2.save() + p2.notes.add(note1, note2) + tmp_out = StringIO() + call_command('dumpscript', 'tests', stdout=tmp_out) + ast_syntax_tree = compiler.parse(tmp_out.getvalue()) + if hasattr(ast_syntax_tree, 'body'): + self.assertTrue(len(ast_syntax_tree.body) > 1) + else: + self.assertTrue(len(ast_syntax_tree.asList()) > 1) + tmp_out.close() + diff --git a/awx/lib/site-packages/django_extensions/tests/urls.py b/awx/lib/site-packages/django_extensions/tests/urls.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/django_extensions/tests/utils.py b/awx/lib/site-packages/django_extensions/tests/utils.py new file mode 100644 index 0000000000..c91989f9f4 --- /dev/null +++ b/awx/lib/site-packages/django_extensions/tests/utils.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +import sys + +from django.test import TestCase +from django.utils.unittest import skipIf + +from django_extensions.utils.text import truncate_letters +try: + import uuid + assert uuid +except ImportError: + from django_extensions.utils import uuid + + +class TruncateLetterTests(TestCase): + def test_truncate_more_than_text_length(self): + self.assertEquals(u"hello tests", truncate_letters("hello tests", 100)) + + def test_truncate_text(self): + self.assertEquals(u"hello...", truncate_letters("hello tests", 5)) + + def test_truncate_with_range(self): + for i in range(10, -1, -1): + self.assertEqual( + u'hello tests'[:i] + '...', + truncate_letters("hello tests", i) + ) + + def test_with_non_ascii_characters(self): + self.assertEquals( + u'\u5ce0 (\u3068\u3046\u3052 t\u014dg...', + truncate_letters("峠 (とうげ tōge - mountain pass)", 10) + ) + + +class UUIDTests(TestCase): + @skipIf(sys.version_info >= (2, 5, 0), 'uuid already in stdlib') + def test_uuid3(self): + # make a UUID using an MD5 hash of a namespace UUID and a name + self.assertEquals( + uuid.UUID('6fa459ea-ee8a-3ca4-894e-db77e160355e'), + uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org') + ) + + @skipIf(sys.version_info >= (2, 5, 0), 'uuid already in stdlib') + def test_uuid5(self): + # make a UUID using a SHA-1 hash of a namespace UUID and a name + self.assertEquals( + uuid.UUID('886313e1-3b8a-5372-9b90-0c9aee199e5d'), + uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org') + ) + + @skipIf(sys.version_info >= (2, 5, 0), 'uuid already in stdlib') + def test_uuid_str(self): + # make a UUID from a string of hex digits (braces and hyphens ignored) + x = uuid.UUID('{00010203-0405-0607-0809-0a0b0c0d0e0f}') + # convert a UUID to a string of hex digits in standard form + self.assertEquals('00010203-0405-0607-0809-0a0b0c0d0e0f', str(x)) + + @skipIf(sys.version_info >= (2, 5, 0), 'uuid already in stdlib') + def test_uuid_bytes(self): + # make a UUID from a string of hex digits (braces and hyphens ignored) + x = uuid.UUID('{00010203-0405-0607-0809-0a0b0c0d0e0f}') + # get the raw 16 bytes of the UUID + self.assertEquals( + '\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b\\x0c\\r\\x0e\\x0f', + x.bytes + ) + + @skipIf(sys.version_info >= (2, 5, 0), 'uuid already in stdlib') + def test_make_uuid_from_byte_string(self): + self.assertEquals( + uuid.UUID(bytes='\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b\\x0c\\r\\x0e\\x0f'), + uuid.UUID('00010203-0405-0607-0809-0a0b0c0d0e0f') + ) diff --git a/awx/lib/site-packages/django_extensions/tests/uuid_field.py b/awx/lib/site-packages/django_extensions/tests/uuid_field.py new file mode 100644 index 0000000000..43416e5734 --- /dev/null +++ b/awx/lib/site-packages/django_extensions/tests/uuid_field.py @@ -0,0 +1,55 @@ +from django.conf import settings +from django.core.management import call_command +from django.db.models import loading +from django.db import models +from django.utils import unittest + +from django_extensions.db.fields import UUIDField + + +class TestModel_field(models.Model): + a = models.IntegerField() + uuid_field = UUIDField() + + +class TestModel_pk(models.Model): + uuid_field = UUIDField(primary_key=True) + + +class TestAgregateModel(TestModel_pk): + a = models.IntegerField() + + +class TestManyToManyModel(TestModel_pk): + many = models.ManyToManyField(TestModel_field) + + +class UUIDFieldTest(unittest.TestCase): + def setUp(self): + self.old_installed_apps = settings.INSTALLED_APPS + settings.INSTALLED_APPS = list(settings.INSTALLED_APPS) + settings.INSTALLED_APPS.append('django_extensions.tests') + loading.cache.loaded = False + call_command('syncdb', verbosity=0) + + def tearDown(self): + settings.INSTALLED_APPS = self.old_installed_apps + + def testUUIDFieldCreate(self): + j = TestModel_field.objects.create(a=6, uuid_field=u'550e8400-e29b-41d4-a716-446655440000') + self.assertEquals(j.uuid_field, u'550e8400-e29b-41d4-a716-446655440000') + + def testUUIDField_pkCreate(self): + j = TestModel_pk.objects.create(uuid_field=u'550e8400-e29b-41d4-a716-446655440000') + self.assertEquals(j.uuid_field, u'550e8400-e29b-41d4-a716-446655440000') + self.assertEquals(j.pk, u'550e8400-e29b-41d4-a716-446655440000') + + def testUUIDField_pkAgregateCreate(self): + j = TestAgregateModel.objects.create(a=6) + self.assertEquals(j.a, 6) + + def testUUIDFieldManyToManyCreate(self): + j = TestManyToManyModel.objects.create(uuid_field=u'550e8400-e29b-41d4-a716-446655440010') + self.assertEquals(j.uuid_field, u'550e8400-e29b-41d4-a716-446655440010') + self.assertEquals(j.pk, u'550e8400-e29b-41d4-a716-446655440010') + diff --git a/awx/lib/site-packages/django_extensions/utils/__init__.py b/awx/lib/site-packages/django_extensions/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/django_extensions/utils/dia2django.py b/awx/lib/site-packages/django_extensions/utils/dia2django.py new file mode 100644 index 0000000000..792529b10a --- /dev/null +++ b/awx/lib/site-packages/django_extensions/utils/dia2django.py @@ -0,0 +1,214 @@ +#!/usr/bin/python +# -*- coding: UTF-8 -*- +##Author Igor Támara igor@tamarapatino.org +##Use this little program as you wish, if you +#include it in your work, let others know you +#are using it preserving this note, you have +#the right to make derivative works, Use it +#at your own risk. +#Tested to work on(etch testing 13-08-2007): +# Python 2.4.4 (#2, Jul 17 2007, 11:56:54) +# [GCC 4.1.3 20070629 (prerelease) (Debian 4.1.2-13)] on linux2 + +dependclasses = ["User", "Group", "Permission", "Message"] + +import codecs +import sys +import gzip +from xml.dom.minidom import * # NOQA +import re + +#Type dictionary translation types SQL -> Django +tsd = { + "text": "TextField", + "date": "DateField", + "varchar": "CharField", + "int": "IntegerField", + "float": "FloatField", + "serial": "AutoField", + "boolean": "BooleanField", + "numeric": "FloatField", + "timestamp": "DateTimeField", + "bigint": "IntegerField", + "datetime": "DateTimeField", + "date": "DateField", + "time": "TimeField", + "bool": "BooleanField", + "int": "IntegerField", +} + +#convert varchar -> CharField +v2c = re.compile('varchar\((\d+)\)') + + +def index(fks, id): + """Looks for the id on fks, fks is an array of arrays, each array has on [1] + the id of the class in a dia diagram. When not present returns None, else + it returns the position of the class with id on fks""" + for i, j in fks.items(): + if fks[i][1] == id: + return i + return None + + +def addparentstofks(rels, fks): + """Gets a list of relations, between parents and sons and a dict of + clases named in dia, and modifies the fks to add the parent as fk to get + order on the output of classes and replaces the base class of the son, to + put the class parent name. + """ + for j in rels: + son = index(fks, j[1]) + parent = index(fks, j[0]) + fks[son][2] = fks[son][2].replace("models.Model", parent) + if parent not in fks[son][0]: + fks[son][0].append(parent) + + +def dia2django(archivo): + models_txt = '' + f = codecs.open(archivo, "rb") + #dia files are gzipped + data = gzip.GzipFile(fileobj=f).read() + ppal = parseString(data) + #diagram -> layer -> object -> UML - Class -> name, (attribs : composite -> name,type) + datos = ppal.getElementsByTagName("dia:diagram")[0].getElementsByTagName("dia:layer")[0].getElementsByTagName("dia:object") + clases = {} + herit = [] + imports = u"" + for i in datos: + #Look for the classes + if i.getAttribute("type") == "UML - Class": + myid = i.getAttribute("id") + for j in i.childNodes: + if j.nodeType == Node.ELEMENT_NODE and j.hasAttributes(): + if j.getAttribute("name") == "name": + actclas = j.getElementsByTagName("dia:string")[0].childNodes[0].data[1:-1] + myname = "\nclass %s(models.Model) :\n" % actclas + clases[actclas] = [[], myid, myname, 0] + if j.getAttribute("name") == "attributes": + for l in j.getElementsByTagName("dia:composite"): + if l.getAttribute("type") == "umlattribute": + #Look for the attribute name and type + for k in l.getElementsByTagName("dia:attribute"): + if k.getAttribute("name") == "name": + nc = k.getElementsByTagName("dia:string")[0].childNodes[0].data[1:-1] + elif k.getAttribute("name") == "type": + tc = k.getElementsByTagName("dia:string")[0].childNodes[0].data[1:-1] + elif k.getAttribute("name") == "value": + val = k.getElementsByTagName("dia:string")[0].childNodes[0].data[1:-1] + if val == '##': + val = '' + elif k.getAttribute("name") == "visibility" and k.getElementsByTagName("dia:enum")[0].getAttribute("val") == "2": + if tc.replace(" ", "").lower().startswith("manytomanyfield("): + #If we find a class not in our model that is marked as being to another model + newc = tc.replace(" ", "")[16:-1] + if dependclasses.count(newc) == 0: + dependclasses.append(newc) + if tc.replace(" ", "").lower().startswith("foreignkey("): + #If we find a class not in our model that is marked as being to another model + newc = tc.replace(" ", "")[11:-1] + if dependclasses.count(newc) == 0: + dependclasses.append(newc) + + #Mapping SQL types to Django + varch = v2c.search(tc) + if tc.replace(" ", "").startswith("ManyToManyField("): + myfor = tc.replace(" ", "")[16:-1] + if actclas == myfor: + #In case of a recursive type, we use 'self' + tc = tc.replace(myfor, "'self'") + elif clases[actclas][0].count(myfor) == 0: + #Adding related class + if myfor not in dependclasses: + #In case we are using Auth classes or external via protected dia visibility + clases[actclas][0].append(myfor) + tc = "models." + tc + if len(val) > 0: + tc = tc.replace(")", "," + val + ")") + elif tc.find("Field") != -1: + if tc.count("()") > 0 and len(val) > 0: + tc = "models.%s" % tc.replace(")", "," + val + ")") + else: + tc = "models.%s(%s)" % (tc, val) + elif tc.replace(" ", "").startswith("ForeignKey("): + myfor = tc.replace(" ", "")[11:-1] + if actclas == myfor: + #In case of a recursive type, we use 'self' + tc = tc.replace(myfor, "'self'") + elif clases[actclas][0].count(myfor) == 0: + #Adding foreign classes + if myfor not in dependclasses: + #In case we are using Auth classes + clases[actclas][0].append(myfor) + tc = "models." + tc + if len(val) > 0: + tc = tc.replace(")", "," + val + ")") + elif varch is None: + tc = "models." + tsd[tc.strip().lower()] + "(" + val + ")" + else: + tc = "models.CharField(max_length=" + varch.group(1) + ")" + if len(val) > 0: + tc = tc.replace(")", ", " + val + " )") + if not (nc == "id" and tc == "AutoField()"): + clases[actclas][2] = clases[actclas][2] + (" %s = %s\n" % (nc, tc)) + elif i.getAttribute("type") == "UML - Generalization": + mycons = ['A', 'A'] + a = i.getElementsByTagName("dia:connection") + for j in a: + if len(j.getAttribute("to")): + mycons[int(j.getAttribute("handle"))] = j.getAttribute("to") + print(mycons) + if not 'A' in mycons: + herit.append(mycons) + elif i.getAttribute("type") == "UML - SmallPackage": + a = i.getElementsByTagName("dia:string") + for j in a: + if len(j.childNodes[0].data[1:-1]): + imports += u"from %s.models import *" % j.childNodes[0].data[1:-1] + + addparentstofks(herit, clases) + #Ordering the appearance of classes + #First we make a list of the classes each classs is related to. + ordered = [] + for j, k in clases.iteritems(): + k[2] = k[2] + "\n def __unicode__(self):\n return u\"\"\n" + for fk in k[0]: + if fk not in dependclasses: + clases[fk][3] += 1 + ordered.append([j] + k) + + i = 0 + while i < len(ordered): + mark = i + j = i + 1 + while j < len(ordered): + if ordered[i][0] in ordered[j][1]: + mark = j + j += 1 + if mark == i: + i += 1 + else: + # swap %s in %s" % ( ordered[i] , ordered[mark]) to make ordered[i] to be at the end + if ordered[i][0] in ordered[mark][1] and ordered[mark][0] in ordered[i][1]: + #Resolving simplistic circular ForeignKeys + print("Not able to resolve circular ForeignKeys between %s and %s" % (ordered[i][1], ordered[mark][0])) + break + a = ordered[i] + ordered[i] = ordered[mark] + ordered[mark] = a + if i == len(ordered) - 1: + break + ordered.reverse() + if imports: + models_txt = str(imports) + for i in ordered: + models_txt += '%s\n' % str(i[3]) + + return models_txt + +if __name__ == '__main__': + if len(sys.argv) == 2: + dia2django(sys.argv[1]) + else: + print(" Use:\n \n " + sys.argv[0] + " diagram.dia\n\n") diff --git a/awx/lib/site-packages/django_extensions/utils/text.py b/awx/lib/site-packages/django_extensions/utils/text.py new file mode 100644 index 0000000000..fd650cf4e5 --- /dev/null +++ b/awx/lib/site-packages/django_extensions/utils/text.py @@ -0,0 +1,23 @@ +import six + +from django.utils.functional import allow_lazy + +# conditional import, force_unicode was renamed in Django 1.5 +try: + from django.utils.encoding import force_unicode # NOQA +except ImportError: + from django.utils.encoding import force_text as force_unicode # NOQA + + +def truncate_letters(s, num): + """ + truncates a string to a number of letters, similar to truncate_words + """ + s = force_unicode(s) + length = int(num) + if len(s) > length: + s = s[:length] + if not s.endswith('...'): + s += '...' + return s +truncate_letters = allow_lazy(truncate_letters, six.text_type) diff --git a/awx/lib/site-packages/django_extensions/utils/uuid.py b/awx/lib/site-packages/django_extensions/utils/uuid.py new file mode 100644 index 0000000000..2684f22019 --- /dev/null +++ b/awx/lib/site-packages/django_extensions/utils/uuid.py @@ -0,0 +1,566 @@ +# flake8:noqa +r"""UUID objects (universally unique identifiers) according to RFC 4122. + +This module provides immutable UUID objects (class UUID) and the functions +uuid1(), uuid3(), uuid4(), uuid5() for generating version 1, 3, 4, and 5 +UUIDs as specified in RFC 4122. + +If all you want is a unique ID, you should probably call uuid1() or uuid4(). +Note that uuid1() may compromise privacy since it creates a UUID containing +the computer's network address. uuid4() creates a random UUID. + +Typical usage: + + >>> import uuid + + # make a UUID based on the host ID and current time + >>> uuid.uuid1() + UUID('a8098c1a-f86e-11da-bd1a-00112444be1e') + + # make a UUID using an MD5 hash of a namespace UUID and a name + >>> uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org') + UUID('6fa459ea-ee8a-3ca4-894e-db77e160355e') + + # make a random UUID + >>> uuid.uuid4() + UUID('16fd2706-8baf-433b-82eb-8c7fada847da') + + # make a UUID using a SHA-1 hash of a namespace UUID and a name + >>> uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org') + UUID('886313e1-3b8a-5372-9b90-0c9aee199e5d') + + # make a UUID from a string of hex digits (braces and hyphens ignored) + >>> x = uuid.UUID('{00010203-0405-0607-0809-0a0b0c0d0e0f}') + + # convert a UUID to a string of hex digits in standard form + >>> str(x) + '00010203-0405-0607-0809-0a0b0c0d0e0f' + + # get the raw 16 bytes of the UUID + >>> x.bytes + '\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f' + + # make a UUID from a 16-byte string + >>> uuid.UUID(bytes=x.bytes) + UUID('00010203-0405-0607-0809-0a0b0c0d0e0f') +""" + +__author__ = 'Ka-Ping Yee ' + +RESERVED_NCS, RFC_4122, RESERVED_MICROSOFT, RESERVED_FUTURE = [ + 'reserved for NCS compatibility', 'specified in RFC 4122', + 'reserved for Microsoft compatibility', 'reserved for future definition' +] + + +class UUID(object): + """Instances of the UUID class represent UUIDs as specified in RFC 4122. + UUID objects are immutable, hashable, and usable as dictionary keys. + Converting a UUID to a string with str() yields something in the form + '12345678-1234-1234-1234-123456789abc'. The UUID constructor accepts + five possible forms: a similar string of hexadecimal digits, or a tuple + of six integer fields (with 32-bit, 16-bit, 16-bit, 8-bit, 8-bit, and + 48-bit values respectively) as an argument named 'fields', or a string + of 16 bytes (with all the integer fields in big-endian order) as an + argument named 'bytes', or a string of 16 bytes (with the first three + fields in little-endian order) as an argument named 'bytes_le', or a + single 128-bit integer as an argument named 'int'. + + UUIDs have these read-only attributes: + + bytes the UUID as a 16-byte string (containing the six + integer fields in big-endian byte order) + + bytes_le the UUID as a 16-byte string (with time_low, time_mid, + and time_hi_version in little-endian byte order) + + fields a tuple of the six integer fields of the UUID, + which are also available as six individual attributes + and two derived attributes: + + time_low the first 32 bits of the UUID + time_mid the next 16 bits of the UUID + time_hi_version the next 16 bits of the UUID + clock_seq_hi_variant the next 8 bits of the UUID + clock_seq_low the next 8 bits of the UUID + node the last 48 bits of the UUID + + time the 60-bit timestamp + clock_seq the 14-bit sequence number + + hex the UUID as a 32-character hexadecimal string + + int the UUID as a 128-bit integer + + urn the UUID as a URN as specified in RFC 4122 + + variant the UUID variant (one of the constants RESERVED_NCS, + RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE) + + version the UUID version number (1 through 5, meaningful only + when the variant is RFC_4122) + """ + + def __init__(self, hex=None, bytes=None, bytes_le=None, fields=None, int=None, version=None): + r"""Create a UUID from either a string of 32 hexadecimal digits, + a string of 16 bytes as the 'bytes' argument, a string of 16 bytes + in little-endian order as the 'bytes_le' argument, a tuple of six + integers (32-bit time_low, 16-bit time_mid, 16-bit time_hi_version, + 8-bit clock_seq_hi_variant, 8-bit clock_seq_low, 48-bit node) as + the 'fields' argument, or a single 128-bit integer as the 'int' + argument. When a string of hex digits is given, curly braces, + hyphens, and a URN prefix are all optional. For example, these + expressions all yield the same UUID: + + UUID('{12345678-1234-5678-1234-567812345678}') + UUID('12345678123456781234567812345678') + UUID('urn:uuid:12345678-1234-5678-1234-567812345678') + UUID(bytes='\x12\x34\x56\x78'*4) + UUID(bytes_le='\x78\x56\x34\x12\x34\x12\x78\x56' + + '\x12\x34\x56\x78\x12\x34\x56\x78') + UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678)) + UUID(int=0x12345678123456781234567812345678) + + Exactly one of 'hex', 'bytes', 'bytes_le', 'fields', or 'int' must + be given. The 'version' argument is optional; if given, the resulting + UUID will have its variant and version set according to RFC 4122, + overriding the given 'hex', 'bytes', 'bytes_le', 'fields', or 'int'. + """ + + if [hex, bytes, bytes_le, fields, int].count(None) != 4: + raise TypeError('need one of hex, bytes, bytes_le, fields, or int') + if hex is not None: + hex = hex.replace('urn:', '').replace('uuid:', '') + hex = hex.strip('{}').replace('-', '') + if len(hex) != 32: + raise ValueError('badly formed hexadecimal UUID string') + int = long(hex, 16) + if bytes_le is not None: + if len(bytes_le) != 16: + raise ValueError('bytes_le is not a 16-char string') + bytes = (bytes_le[3] + bytes_le[2] + bytes_le[1] + bytes_le[0] + + bytes_le[5] + bytes_le[4] + bytes_le[7] + bytes_le[6] + + bytes_le[8:]) + if bytes is not None: + if len(bytes) != 16: + raise ValueError('bytes is not a 16-char string') + int = long(('%02x' * 16) % tuple(map(ord, bytes)), 16) + if fields is not None: + if len(fields) != 6: + raise ValueError('fields is not a 6-tuple') + (time_low, time_mid, time_hi_version, + clock_seq_hi_variant, clock_seq_low, node) = fields + if not 0 <= time_low < 1 << 32L: + raise ValueError('field 1 out of range (need a 32-bit value)') + if not 0 <= time_mid < 1 << 16L: + raise ValueError('field 2 out of range (need a 16-bit value)') + if not 0 <= time_hi_version < 1 << 16L: + raise ValueError('field 3 out of range (need a 16-bit value)') + if not 0 <= clock_seq_hi_variant < 1 << 8L: + raise ValueError('field 4 out of range (need an 8-bit value)') + if not 0 <= clock_seq_low < 1 << 8L: + raise ValueError('field 5 out of range (need an 8-bit value)') + if not 0 <= node < 1 << 48L: + raise ValueError('field 6 out of range (need a 48-bit value)') + clock_seq = (clock_seq_hi_variant << 8L) | clock_seq_low + int = ((time_low << 96L) | (time_mid << 80L) | + (time_hi_version << 64L) | (clock_seq << 48L) | node) + if int is not None: + if not 0 <= int < 1 << 128L: + raise ValueError('int is out of range (need a 128-bit value)') + if version is not None: + if not 1 <= version <= 5: + raise ValueError('illegal version number') + # Set the variant to RFC 4122. + int &= ~(0xc000 << 48L) + int |= 0x8000 << 48L + # Set the version number. + int &= ~(0xf000 << 64L) + int |= version << 76L + self.__dict__['int'] = int + + def __cmp__(self, other): + if isinstance(other, UUID): + return cmp(self.int, other.int) + return NotImplemented + + def __hash__(self): + return hash(self.int) + + def __int__(self): + return self.int + + def __repr__(self): + return 'UUID(%r)' % str(self) + + def __setattr__(self, name, value): + raise TypeError('UUID objects are immutable') + + def __str__(self): + hex = '%032x' % self.int + return '%s-%s-%s-%s-%s' % ( + hex[:8], hex[8:12], hex[12:16], hex[16:20], hex[20:]) + + def get_bytes(self): + bytes = '' + for shift in range(0, 128, 8): + bytes = chr((self.int >> shift) & 0xff) + bytes + return bytes + + bytes = property(get_bytes) + + def get_bytes_le(self): + bytes = self.bytes + return (bytes[3] + bytes[2] + bytes[1] + bytes[0] + + bytes[5] + bytes[4] + bytes[7] + bytes[6] + bytes[8:]) + + bytes_le = property(get_bytes_le) + + def get_fields(self): + return (self.time_low, self.time_mid, self.time_hi_version, + self.clock_seq_hi_variant, self.clock_seq_low, self.node) + + fields = property(get_fields) + + def get_time_low(self): + return self.int >> 96L + + time_low = property(get_time_low) + + def get_time_mid(self): + return (self.int >> 80L) & 0xffff + + time_mid = property(get_time_mid) + + def get_time_hi_version(self): + return (self.int >> 64L) & 0xffff + + time_hi_version = property(get_time_hi_version) + + def get_clock_seq_hi_variant(self): + return (self.int >> 56L) & 0xff + + clock_seq_hi_variant = property(get_clock_seq_hi_variant) + + def get_clock_seq_low(self): + return (self.int >> 48L) & 0xff + + clock_seq_low = property(get_clock_seq_low) + + def get_time(self): + return (((self.time_hi_version & 0x0fffL) << 48L) | + (self.time_mid << 32L) | self.time_low) + + time = property(get_time) + + def get_clock_seq(self): + return (((self.clock_seq_hi_variant & 0x3fL) << 8L) | + self.clock_seq_low) + + clock_seq = property(get_clock_seq) + + def get_node(self): + return self.int & 0xffffffffffff + + node = property(get_node) + + def get_hex(self): + return '%032x' % self.int + + hex = property(get_hex) + + def get_urn(self): + return 'urn:uuid:' + str(self) + + urn = property(get_urn) + + def get_variant(self): + if not self.int & (0x8000 << 48L): + return RESERVED_NCS + elif not self.int & (0x4000 << 48L): + return RFC_4122 + elif not self.int & (0x2000 << 48L): + return RESERVED_MICROSOFT + else: + return RESERVED_FUTURE + + variant = property(get_variant) + + def get_version(self): + # The version bits are only meaningful for RFC 4122 UUIDs. + if self.variant == RFC_4122: + return int((self.int >> 76L) & 0xf) + + version = property(get_version) + + +def _find_mac(command, args, hw_identifiers, get_index): + import os + for dir in ['', '/sbin/', '/usr/sbin']: + executable = os.path.join(dir, command) + if not os.path.exists(executable): + continue + + try: + # LC_ALL to get English output, 2>/dev/null to + # prevent output on stderr + cmd = 'LC_ALL=C %s %s 2>/dev/null' % (executable, args) + pipe = os.popen(cmd) + except IOError: + continue + + for line in pipe: + words = line.lower().split() + for i in range(len(words)): + if words[i] in hw_identifiers: + return int(words[get_index(i)].replace(':', ''), 16) + return None + + +def _ifconfig_getnode(): + """Get the hardware address on Unix by running ifconfig.""" + + # This works on Linux ('' or '-a'), Tru64 ('-av'), but not all Unixes. + for args in ('', '-a', '-av'): + mac = _find_mac('ifconfig', args, ['hwaddr', 'ether'], lambda i: i + 1) + if mac: + return mac + + import socket + ip_addr = socket.gethostbyname(socket.gethostname()) + + # Try getting the MAC addr from arp based on our IP address (Solaris). + mac = _find_mac('arp', '-an', [ip_addr], lambda i: -1) + if mac: + return mac + + # This might work on HP-UX. + mac = _find_mac('lanscan', '-ai', ['lan0'], lambda i: 0) + if mac: + return mac + + return None + + +def _ipconfig_getnode(): + """Get the hardware address on Windows by running ipconfig.exe.""" + import os + import re + dirs = ['', r'c:\windows\system32', r'c:\winnt\system32'] + try: + import ctypes + buffer = ctypes.create_string_buffer(300) + ctypes.windll.kernel32.GetSystemDirectoryA(buffer, 300) + dirs.insert(0, buffer.value.decode('mbcs')) + except: + pass + for dir in dirs: + try: + pipe = os.popen(os.path.join(dir, 'ipconfig') + ' /all') + except IOError: + continue + for line in pipe: + value = line.split(':')[-1].strip().lower() + if re.match('([0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]', value): + return int(value.replace('-', ''), 16) + + +def _netbios_getnode(): + """Get the hardware address on Windows using NetBIOS calls. + See http://support.microsoft.com/kb/118623 for details.""" + import win32wnet + import netbios + ncb = netbios.NCB() + ncb.Command = netbios.NCBENUM + ncb.Buffer = adapters = netbios.LANA_ENUM() + adapters._pack() + if win32wnet.Netbios(ncb) != 0: + return + adapters._unpack() + for i in range(adapters.length): + ncb.Reset() + ncb.Command = netbios.NCBRESET + ncb.Lana_num = ord(adapters.lana[i]) + if win32wnet.Netbios(ncb) != 0: + continue + ncb.Reset() + ncb.Command = netbios.NCBASTAT + ncb.Lana_num = ord(adapters.lana[i]) + ncb.Callname = '*'.ljust(16) + ncb.Buffer = status = netbios.ADAPTER_STATUS() + if win32wnet.Netbios(ncb) != 0: + continue + status._unpack() + bytes = map(ord, status.adapter_address) + return ((bytes[0] << 40L) + (bytes[1] << 32L) + (bytes[2] << 24L) + + (bytes[3] << 16L) + (bytes[4] << 8L) + bytes[5]) + +# Thanks to Thomas Heller for ctypes and for his help with its use here. + +# If ctypes is available, use it to find system routines for UUID generation. +_uuid_generate_random = _uuid_generate_time = _UuidCreate = None +try: + import ctypes + import ctypes.util + _buffer = ctypes.create_string_buffer(16) + + # The uuid_generate_* routines are provided by libuuid on at least + # Linux and FreeBSD, and provided by libc on Mac OS X. + for libname in ['uuid', 'c']: + try: + lib = ctypes.CDLL(ctypes.util.find_library(libname)) + except: + continue + if hasattr(lib, 'uuid_generate_random'): + _uuid_generate_random = lib.uuid_generate_random + if hasattr(lib, 'uuid_generate_time'): + _uuid_generate_time = lib.uuid_generate_time + + # On Windows prior to 2000, UuidCreate gives a UUID containing the + # hardware address. On Windows 2000 and later, UuidCreate makes a + # random UUID and UuidCreateSequential gives a UUID containing the + # hardware address. These routines are provided by the RPC runtime. + # NOTE: at least on Tim's WinXP Pro SP2 desktop box, while the last + # 6 bytes returned by UuidCreateSequential are fixed, they don't appear + # to bear any relationship to the MAC address of any network device + # on the box. + try: + lib = ctypes.windll.rpcrt4 + except: + lib = None + _UuidCreate = getattr(lib, 'UuidCreateSequential', + getattr(lib, 'UuidCreate', None)) +except: + pass + + +def _unixdll_getnode(): + """Get the hardware address on Unix using ctypes.""" + _uuid_generate_time(_buffer) + return UUID(bytes=_buffer.raw).node + + +def _windll_getnode(): + """Get the hardware address on Windows using ctypes.""" + if _UuidCreate(_buffer) == 0: + return UUID(bytes=_buffer.raw).node + + +def _random_getnode(): + """Get a random node ID, with eighth bit set as suggested by RFC 4122.""" + import random + return random.randrange(0, 1 << 48L) | 0x010000000000L + +_node = None + + +def getnode(): + """Get the hardware address as a 48-bit positive integer. + + The first time this runs, it may launch a separate program, which could + be quite slow. If all attempts to obtain the hardware address fail, we + choose a random 48-bit number with its eighth bit set to 1 as recommended + in RFC 4122. + """ + + global _node + if _node is not None: + return _node + + import sys + if sys.platform == 'win32': + getters = [_windll_getnode, _netbios_getnode, _ipconfig_getnode] + else: + getters = [_unixdll_getnode, _ifconfig_getnode] + + for getter in getters + [_random_getnode]: + try: + _node = getter() + except: + continue + if _node is not None: + return _node + +_last_timestamp = None + + +def uuid1(node=None, clock_seq=None): + """Generate a UUID from a host ID, sequence number, and the current time. + If 'node' is not given, getnode() is used to obtain the hardware + address. If 'clock_seq' is given, it is used as the sequence number; + otherwise a random 14-bit sequence number is chosen.""" + + # When the system provides a version-1 UUID generator, use it (but don't + # use UuidCreate here because its UUIDs don't conform to RFC 4122). + if _uuid_generate_time and node is clock_seq is None: + _uuid_generate_time(_buffer) + return UUID(bytes=_buffer.raw) + + global _last_timestamp + import time + nanoseconds = int(time.time() * 1e9) + # 0x01b21dd213814000 is the number of 100-ns intervals between the + # UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00. + timestamp = int(nanoseconds / 100) + 0x01b21dd213814000L + if timestamp <= _last_timestamp: + timestamp = _last_timestamp + 1 + _last_timestamp = timestamp + if clock_seq is None: + import random + clock_seq = random.randrange(1 << 14L) # instead of stable storage + time_low = timestamp & 0xffffffffL + time_mid = (timestamp >> 32L) & 0xffffL + time_hi_version = (timestamp >> 48L) & 0x0fffL + clock_seq_low = clock_seq & 0xffL + clock_seq_hi_variant = (clock_seq >> 8L) & 0x3fL + if node is None: + node = getnode() + return UUID(fields=(time_low, time_mid, time_hi_version, + clock_seq_hi_variant, clock_seq_low, node), version=1) + + +def uuid3(namespace, name): + """Generate a UUID from the MD5 hash of a namespace UUID and a name.""" + try: + import hashlib + md5 = hashlib.md5 + except ImportError: + from md5 import md5 # NOQA + hash = md5(namespace.bytes + name).digest() + return UUID(bytes=hash[:16], version=3) + + +def uuid4(): + """Generate a random UUID.""" + + # When the system provides a version-4 UUID generator, use it. + if _uuid_generate_random: + _uuid_generate_random(_buffer) + return UUID(bytes=_buffer.raw) + + # Otherwise, get randomness from urandom or the 'random' module. + try: + import os + return UUID(bytes=os.urandom(16), version=4) + except: + import random + bytes = [chr(random.randrange(256)) for i in range(16)] + return UUID(bytes=bytes, version=4) + + +def uuid5(namespace, name): + """Generate a UUID from the SHA-1 hash of a namespace UUID and a name.""" + try: + import hashlib + sha = hashlib.sha1 + except ImportError: + from sha import sha # NOQA + hash = sha(namespace.bytes + name).digest() + return UUID(bytes=hash[:16], version=5) + +# The following standard UUIDs are for use with uuid3() or uuid5(). + +NAMESPACE_DNS = UUID('6ba7b810-9dad-11d1-80b4-00c04fd430c8') +NAMESPACE_URL = UUID('6ba7b811-9dad-11d1-80b4-00c04fd430c8') +NAMESPACE_OID = UUID('6ba7b812-9dad-11d1-80b4-00c04fd430c8') +NAMESPACE_X500 = UUID('6ba7b814-9dad-11d1-80b4-00c04fd430c8') diff --git a/awx/lib/site-packages/django_extensions/utils/validatingtemplatetags.py b/awx/lib/site-packages/django_extensions/utils/validatingtemplatetags.py new file mode 100644 index 0000000000..b36ca7e172 --- /dev/null +++ b/awx/lib/site-packages/django_extensions/utils/validatingtemplatetags.py @@ -0,0 +1,91 @@ +from django.template.base import Library, Node +from django.template import defaulttags +from django.templatetags import future +register = Library() + +error_on_old_style_url_tag = False +new_style_url_tag = False +errors = [] + + +def before_new_template(force_new_urls): + """Reset state ready for new template""" + global new_style_url_tag, error_on_old_style_url_tag, errors + new_style_url_tag = False + error_on_old_style_url_tag = force_new_urls + errors = [] + + +def get_template_errors(): + return errors + + +# Disable extends and include as they are not needed, slow parsing down, and cause duplicate errors +class NoOpNode(Node): + def render(self, context): + return '' + + +@register.tag +def extends(parser, token): + return NoOpNode() + + +@register.tag +def include(parser, token): + return NoOpNode() + + +# We replace load to determine whether new style urls are in use and re-patch url after +# a future version is loaded +@register.tag +def load(parser, token): + global new_style_url_tag + bits = token.contents.split() + + reloaded_url_tag = False + if len(bits) >= 4 and bits[-2] == "from" and bits[-1] == "future": + for name in bits[1:-2]: + if name == "url": + new_style_url_tag = True + reloaded_url_tag = True + + try: + return defaulttags.load(parser, token) + finally: + if reloaded_url_tag: + parser.tags['url'] = new_style_url + + +@register.tag(name='url') +def old_style_url(parser, token): + global error_on_old_style_url_tag + + bits = token.split_contents() + view = bits[1] + + if error_on_old_style_url_tag: + _error("Old style url tag used (only reported once per file): {%% %s %%}" % (" ".join(bits)), token) + error_on_old_style_url_tag = False + + if view[0] in "\"'" and view[0] == view[-1]: + _error("Old style url tag with quotes around view name: {%% %s %%}" % (" ".join(bits)), token) + + return defaulttags.url(parser, token) + + +def new_style_url(parser, token): + bits = token.split_contents() + view = bits[1] + + if view[0] not in "\"'" or view[0] != view[-1]: + _error("New style url tag without quotes around view name: {%% %s %%}" % (" ".join(bits)), token) + + return future.url(parser, token) + + +def _error(message, token): + origin, (start, upto) = token.source + source = origin.reload() + line = source.count("\n", 0, start) + 1 # 1 based line numbering + errors.append((origin, line, message)) diff --git a/awx/lib/site-packages/djcelery/__init__.py b/awx/lib/site-packages/djcelery/__init__.py new file mode 100644 index 0000000000..c281cfffdf --- /dev/null +++ b/awx/lib/site-packages/djcelery/__init__.py @@ -0,0 +1,25 @@ +"""Django Celery Integration.""" +# :copyright: (c) 2009 - 2012 by Ask Solem. +# :license: BSD, see LICENSE for more details. +from __future__ import absolute_import + +import os + +VERSION = (3, 0, 17) +__version__ = '.'.join(map(str, VERSION[0:3])) + ''.join(VERSION[3:]) +__author__ = 'Ask Solem' +__contact__ = 'ask@celeryproject.org' +__homepage__ = 'http://celeryproject.org' +__docformat__ = 'restructuredtext' +__license__ = 'BSD (3 clause)' + +# -eof meta- + + +def setup_loader(): + os.environ.setdefault('CELERY_LOADER', 'djcelery.loaders.DjangoLoader') + +# Importing this module enables the Celery Django loader. +setup_loader() + +from celery import current_app as celery # noqa diff --git a/awx/lib/site-packages/djcelery/admin.py b/awx/lib/site-packages/djcelery/admin.py new file mode 100644 index 0000000000..85d9d4aefe --- /dev/null +++ b/awx/lib/site-packages/djcelery/admin.py @@ -0,0 +1,312 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from django import forms +from django.conf import settings +from django.contrib import admin +from django.contrib.admin import helpers +from django.contrib.admin.views import main as main_views +from django.shortcuts import render_to_response +from django.template import RequestContext +from django.utils.encoding import force_unicode +from django.utils.html import escape +from django.utils.translation import ugettext_lazy as _ + +from celery import current_app +from celery import states +from celery.task.control import broadcast, revoke, rate_limit +from celery.utils.text import abbrtask + +from .admin_utils import action, display_field, fixedwidth +from .models import (TaskState, WorkerState, + PeriodicTask, IntervalSchedule, CrontabSchedule) +from .humanize import naturaldate + + +TASK_STATE_COLORS = {states.SUCCESS: 'green', + states.FAILURE: 'red', + states.REVOKED: 'magenta', + states.STARTED: 'yellow', + states.RETRY: 'orange', + 'RECEIVED': 'blue'} +NODE_STATE_COLORS = {'ONLINE': 'green', + 'OFFLINE': 'gray'} + + +class MonitorList(main_views.ChangeList): + + def __init__(self, *args, **kwargs): + super(MonitorList, self).__init__(*args, **kwargs) + self.title = self.model_admin.list_page_title + + +@display_field(_('state'), 'state') +def colored_state(task): + state = escape(task.state) + color = TASK_STATE_COLORS.get(task.state, 'black') + return '%s' % (color, state) + + +@display_field(_('state'), 'last_heartbeat') +def node_state(node): + state = node.is_alive() and 'ONLINE' or 'OFFLINE' + color = NODE_STATE_COLORS[state] + return '%s' % (color, state) + + +@display_field(_('ETA'), 'eta') +def eta(task): + if not task.eta: + return 'none' + return escape(task.eta) + + +@display_field(_('when'), 'tstamp') +def tstamp(task): + return '
%s
' % (escape(str(task.tstamp)), + escape(naturaldate(task.tstamp))) + + +@display_field(_('name'), 'name') +def name(task): + short_name = abbrtask(task.name, 16) + return '
%s
' % (escape(task.name), + escape(short_name)) + + +class ModelMonitor(admin.ModelAdmin): + can_add = False + can_delete = False + + def get_changelist(self, request, **kwargs): + return MonitorList + + def change_view(self, request, object_id, extra_context=None): + extra_context = extra_context or {} + extra_context.setdefault('title', self.detail_title) + return super(ModelMonitor, self).change_view(request, object_id, + extra_context) + + def has_delete_permission(self, request, obj=None): + if not self.can_delete: + return False + return super(ModelMonitor, self).has_delete_permission(request, obj) + + def has_add_permission(self, request): + if not self.can_add: + return False + return super(ModelMonitor, self).has_add_permission(request) + + +class TaskMonitor(ModelMonitor): + detail_title = _('Task detail') + list_page_title = _('Tasks') + rate_limit_confirmation_template = 'djcelery/confirm_rate_limit.html' + date_hierarchy = 'tstamp' + fieldsets = ( + (None, { + 'fields': ('state', 'task_id', 'name', 'args', 'kwargs', + 'eta', 'runtime', 'worker', 'tstamp'), + 'classes': ('extrapretty', ), + }), + ('Details', { + 'classes': ('collapse', 'extrapretty'), + 'fields': ('result', 'traceback', 'expires'), + }), + ) + list_display = ( + fixedwidth('task_id', name=_('UUID'), pt=8), + colored_state, + name, + fixedwidth('args', pretty=True), + fixedwidth('kwargs', pretty=True), + eta, + tstamp, + 'worker', + ) + readonly_fields = ( + 'state', 'task_id', 'name', 'args', 'kwargs', + 'eta', 'runtime', 'worker', 'result', 'traceback', + 'expires', 'tstamp', + ) + list_filter = ('state', 'name', 'tstamp', 'eta', 'worker') + search_fields = ('name', 'task_id', 'args', 'kwargs', 'worker__hostname') + actions = [ + 'revoke_tasks', + 'terminate_tasks', + 'kill_tasks', + 'rate_limit_tasks', + ] + + class Media: + css = {'all': ('djcelery/style.css',)} + + @action(_('Revoke selected tasks')) + def revoke_tasks(self, request, queryset): + with current_app.default_connection() as connection: + for state in queryset: + revoke(state.task_id, connection=connection) + + @action(_('Terminate selected tasks')) + def terminate_tasks(self, request, queryset): + with current_app.default_connection() as connection: + for state in queryset: + revoke(state.task_id, connection=connection, terminate=True) + + @action(_('Kill selected tasks')) + def kill_tasks(self, request, queryset): + with current_app.default_connection() as connection: + for state in queryset: + revoke(state.task_id, connection=connection, + terminate=True, signal='KILL') + + @action(_('Rate limit selected tasks')) + def rate_limit_tasks(self, request, queryset): + tasks = set([task.name for task in queryset]) + opts = self.model._meta + app_label = opts.app_label + if request.POST.get('post'): + rate = request.POST['rate_limit'] + with current_app.default_connection() as connection: + for task_name in tasks: + rate_limit(task_name, rate, connection=connection) + return None + + context = { + 'title': _('Rate limit selection'), + 'queryset': queryset, + 'object_name': force_unicode(opts.verbose_name), + 'action_checkbox_name': helpers.ACTION_CHECKBOX_NAME, + 'opts': opts, + 'app_label': app_label, + } + + return render_to_response( + self.rate_limit_confirmation_template, context, + context_instance=RequestContext(request), + ) + + def get_actions(self, request): + actions = super(TaskMonitor, self).get_actions(request) + actions.pop('delete_selected', None) + return actions + + def queryset(self, request): + qs = super(TaskMonitor, self).queryset(request) + return qs.select_related('worker') + + +class WorkerMonitor(ModelMonitor): + can_add = True + detail_title = _('Node detail') + list_page_title = _('Worker Nodes') + list_display = ('hostname', node_state) + readonly_fields = ('last_heartbeat', ) + actions = ['shutdown_nodes', + 'enable_events', + 'disable_events'] + + @action(_('Shutdown selected worker nodes')) + def shutdown_nodes(self, request, queryset): + broadcast('shutdown', destination=[n.hostname for n in queryset]) + + @action(_('Enable event mode for selected nodes.')) + def enable_events(self, request, queryset): + broadcast('enable_events', + destination=[n.hostname for n in queryset]) + + @action(_('Disable event mode for selected nodes.')) + def disable_events(self, request, queryset): + broadcast('disable_events', + destination=[n.hostname for n in queryset]) + + def get_actions(self, request): + actions = super(WorkerMonitor, self).get_actions(request) + actions.pop('delete_selected', None) + return actions + +admin.site.register(TaskState, TaskMonitor) +admin.site.register(WorkerState, WorkerMonitor) + + +# ### Periodic Tasks + + +class LaxChoiceField(forms.ChoiceField): + + def valid_value(self, value): + return True + + +def periodic_task_form(): + current_app.loader.import_default_modules() + tasks = list(sorted(name for name in current_app.tasks + if not name.startswith('celery.'))) + choices = (('', ''), ) + tuple(zip(tasks, tasks)) + + class PeriodicTaskForm(forms.ModelForm): + regtask = LaxChoiceField(label=_(u'Task (registered)'), + choices=choices, required=False) + task = forms.CharField(label=_('Task (custom)'), required=False, + max_length=200) + + class Meta: + model = PeriodicTask + + def clean(self): + data = super(PeriodicTaskForm, self).clean() + regtask = data.get('regtask') + if regtask: + data['task'] = regtask + if not data['task']: + exc = forms.ValidationError(_(u'Need name of task')) + self._errors['task'] = self.error_class(exc.messages) + raise exc + return data + + return PeriodicTaskForm + + +class PeriodicTaskAdmin(admin.ModelAdmin): + model = PeriodicTask + form = periodic_task_form() + list_display = ('__unicode__', 'enabled') + fieldsets = ( + (None, { + 'fields': ('name', 'regtask', 'task', 'enabled'), + 'classes': ('extrapretty', 'wide'), + }), + ('Schedule', { + 'fields': ('interval', 'crontab'), + 'classes': ('extrapretty', 'wide', ), + }), + ('Arguments', { + 'fields': ('args', 'kwargs'), + 'classes': ('extrapretty', 'wide', 'collapse'), + }), + ('Execution Options', { + 'fields': ('expires', 'queue', 'exchange', 'routing_key'), + 'classes': ('extrapretty', 'wide', 'collapse'), + }), + ) + + def __init__(self, *args, **kwargs): + super(PeriodicTaskAdmin, self).__init__(*args, **kwargs) + self.form = periodic_task_form() + + def changelist_view(self, request, extra_context=None): + extra_context = extra_context or {} + scheduler = getattr(settings, 'CELERYBEAT_SCHEDULER', None) + if scheduler != 'djcelery.schedulers.DatabaseScheduler': + extra_context['wrong_scheduler'] = True + return super(PeriodicTaskAdmin, self).changelist_view(request, + extra_context) + + def queryset(self, request): + qs = super(PeriodicTaskAdmin, self).queryset(request) + return qs.select_related('interval', 'crontab') + + +admin.site.register(IntervalSchedule) +admin.site.register(CrontabSchedule) +admin.site.register(PeriodicTask, PeriodicTaskAdmin) diff --git a/awx/lib/site-packages/djcelery/admin_utils.py b/awx/lib/site-packages/djcelery/admin_utils.py new file mode 100644 index 0000000000..40c5fbc885 --- /dev/null +++ b/awx/lib/site-packages/djcelery/admin_utils.py @@ -0,0 +1,49 @@ +from __future__ import absolute_import + +from pprint import pformat + +from django.utils.html import escape + +FIXEDWIDTH_STYLE = '''\ +%s \ +''' + + +def attrs(**kwargs): + def _inner(fun): + for attr_name, attr_value in kwargs.items(): + setattr(fun, attr_name, attr_value) + return fun + return _inner + + +def display_field(short_description, admin_order_field, + allow_tags=True, **kwargs): + return attrs(short_description=short_description, + admin_order_field=admin_order_field, + allow_tags=allow_tags, **kwargs) + + +def action(short_description, **kwargs): + return attrs(short_description=short_description, **kwargs) + + +def fixedwidth(field, name=None, pt=6, width=16, maxlen=64, pretty=False): + + @display_field(name or field, field) + def f(task): + val = getattr(task, field) + if pretty: + val = pformat(val, width=width) + if val.startswith("u'") or val.startswith('u"'): + val = val[2:-1] + shortval = val.replace(',', ',\n') + shortval = shortval.replace('\n', '|br/|') + + if len(shortval) > maxlen: + shortval = shortval[:maxlen] + '...' + styled = FIXEDWIDTH_STYLE % (escape(val[:255]), pt, + escape(shortval)) + return styled.replace('|br/|', '
') + return f diff --git a/awx/lib/site-packages/djcelery/app.py b/awx/lib/site-packages/djcelery/app.py new file mode 100644 index 0000000000..eece9ec570 --- /dev/null +++ b/awx/lib/site-packages/djcelery/app.py @@ -0,0 +1,7 @@ +from __future__ import absolute_import + +from celery import current_app + + +#: The Django-Celery app instance. +app = current_app._get_current_object() diff --git a/awx/lib/site-packages/djcelery/backends/__init__.py b/awx/lib/site-packages/djcelery/backends/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/djcelery/backends/cache.py b/awx/lib/site-packages/djcelery/backends/cache.py new file mode 100644 index 0000000000..2bcb9dcbf7 --- /dev/null +++ b/awx/lib/site-packages/djcelery/backends/cache.py @@ -0,0 +1,68 @@ +"""celery.backends.cache""" +from __future__ import absolute_import + +from datetime import timedelta + +import django +from django.utils.encoding import smart_str +from django.core.cache import cache, get_cache + +from celery import current_app +from celery.utils.timeutils import timedelta_seconds +from celery.backends.base import KeyValueStoreBackend + +# CELERY_CACHE_BACKEND overrides the django-global(tm) backend settings. +if current_app.conf.CELERY_CACHE_BACKEND: + cache = get_cache(current_app.conf.CELERY_CACHE_BACKEND) # noqa + + +class DjangoMemcacheWrapper(object): + """Wrapper class to django's memcache backend class, that overrides the + :meth:`get` method in order to remove the forcing of unicode strings + since it may cause binary or pickled data to break.""" + + def __init__(self, cache): + self.cache = cache + + def get(self, key, default=None): + val = self.cache._cache.get(smart_str(key)) + if val is None: + return default + else: + return val + + def set(self, key, value, timeout=0): + self.cache.set(key, value, timeout) + +# Check if django is using memcache as the cache backend. If so, wrap the +# cache object in a DjangoMemcacheWrapper for Django < 1.2 that fixes a bug +# with retrieving pickled data. +from django.core.cache.backends.base import InvalidCacheBackendError +try: + from django.core.cache.backends.memcached import CacheClass +except InvalidCacheBackendError: + pass +else: + if django.VERSION[0:2] < (1, 2) and isinstance(cache, CacheClass): + cache = DjangoMemcacheWrapper(cache) + + +class CacheBackend(KeyValueStoreBackend): + """Backend using the Django cache framework to store task metadata.""" + + def __init__(self, *args, **kwargs): + super(CacheBackend, self).__init__(*args, **kwargs) + expires = kwargs.get('expires', + current_app.conf.CELERY_TASK_RESULT_EXPIRES) + if isinstance(expires, timedelta): + expires = int(timedelta_seconds(expires)) + self.expires = expires + + def get(self, key): + return cache.get(key) + + def set(self, key, value): + cache.set(key, value, self.expires) + + def delete(self, key): + cache.delete(key) diff --git a/awx/lib/site-packages/djcelery/backends/database.py b/awx/lib/site-packages/djcelery/backends/database.py new file mode 100644 index 0000000000..07d7197558 --- /dev/null +++ b/awx/lib/site-packages/djcelery/backends/database.py @@ -0,0 +1,60 @@ +from __future__ import absolute_import + +from celery import current_app +from celery.backends.base import BaseDictBackend +from celery.utils.timeutils import maybe_timedelta + +from ..models import TaskMeta, TaskSetMeta + + +class DatabaseBackend(BaseDictBackend): + """The database backend. + + Using Django models to store task state. + + """ + TaskModel = TaskMeta + TaskSetModel = TaskSetMeta + + expires = current_app.conf.CELERY_TASK_RESULT_EXPIRES + create_django_tables = True + + subpolling_interval = 0.5 + + def _store_result(self, task_id, result, status, traceback=None): + """Store return value and status of an executed task.""" + self.TaskModel._default_manager.store_result( + task_id, result, status, + traceback=traceback, children=self.current_task_children(), + ) + return result + + def _save_group(self, group_id, result): + """Store the result of an executed group.""" + self.TaskSetModel._default_manager.store_result(group_id, result) + return result + + def _get_task_meta_for(self, task_id): + """Get task metadata for a task by id.""" + return self.TaskModel._default_manager.get_task(task_id).to_dict() + + def _restore_group(self, group_id): + """Get group metadata for a group by id.""" + meta = self.TaskSetModel._default_manager.restore_taskset(group_id) + if meta: + return meta.to_dict() + + def _delete_group(self, group_id): + self.TaskSetModel._default_manager.delete_taskset(group_id) + + def _forget(self, task_id): + try: + self.TaskModel._default_manager.get(task_id=task_id).delete() + except self.TaskModel.DoesNotExist: + pass + + def cleanup(self): + """Delete expired metadata.""" + expires = maybe_timedelta(self.expires) + for model in self.TaskModel, self.TaskSetModel: + model._default_manager.delete_expired(expires) diff --git a/awx/lib/site-packages/djcelery/common.py b/awx/lib/site-packages/djcelery/common.py new file mode 100644 index 0000000000..0c9d02686a --- /dev/null +++ b/awx/lib/site-packages/djcelery/common.py @@ -0,0 +1,74 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from contextlib import contextmanager +from functools import wraps + +from django.utils import translation + + +@contextmanager +def respect_language(language): + """ + Context manager that changes the current translation language for + all code inside the following block. + + Can e.g. be used inside tasks like this:: + + from celery import task + from djcelery.common import respect_language + + @task + def my_task(language=None): + with respect_language(language): + pass + """ + if language: + prev = translation.get_language() + translation.activate(language) + try: + yield + finally: + translation.activate(prev) + else: + yield + + +def respects_language(fun): + """Decorator for tasks with respect to site's current language. + You can use this decorator on your tasks together with default @task + decorator (remember that the task decorator must be applied last). + + See also the with-statement alternative :func:`respect_language`. + + **Example**: + + .. code-block:: python + + @task + @respects_language + def my_task() + # localize something. + + The task will then accept a ``language`` argument that will be + used to set the language in the task, and the task can thus be + called like: + + .. code-block:: python + + from django.utils import translation + from myapp.tasks import my_task + + # Pass the current language on to the task + my_task.delay(language=translation.get_language()) + + # or set the language explicitly + my_task.delay(language='no.no') + + """ + + @wraps(fun) + def _inner(*args, **kwargs): + with respect_language(kwargs.pop('language', None)): + return fun(*args, **kwargs) + return _inner diff --git a/awx/lib/site-packages/djcelery/contrib/__init__.py b/awx/lib/site-packages/djcelery/contrib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/djcelery/contrib/test_runner.py b/awx/lib/site-packages/djcelery/contrib/test_runner.py new file mode 100644 index 0000000000..2d11a8f53d --- /dev/null +++ b/awx/lib/site-packages/djcelery/contrib/test_runner.py @@ -0,0 +1,86 @@ +from __future__ import absolute_import + +from uuid import uuid4 +from datetime import datetime + +from django.conf import settings +from django.test.simple import DjangoTestSuiteRunner + +from celery.task import Task +from djcelery.models import TaskState + + +USAGE = """\ +Custom test runner to allow testing of celery delayed tasks. +""" + + +class CeleryTestSuiteRunner(DjangoTestSuiteRunner): + """Django test runner allowing testing of celery delayed tasks. + + All tasks are run locally, not in a worker. + + To use this runner set ``settings.TEST_RUNNER``:: + + TEST_RUNNER = 'djcelery.contrib.test_runner.CeleryTestSuiteRunner' + + """ + def setup_test_environment(self, **kwargs): + super(CeleryTestSuiteRunner, self).setup_test_environment(**kwargs) + settings.CELERY_ALWAYS_EAGER = True + settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True # Issue #75 + + +class CeleryTestSuiteRunnerStoringResult(DjangoTestSuiteRunner): + """This custom test suite runner make some preliminary + monkey-patching allowing storing result of Celery task execution + in ``djcelery.models.TaskState`` model. Tasks run eagerly. + + Exceptions is turned on. If you need to test ``on_failure`` + behavior, you should monkey-patch in your test: + ``settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = False`` + + USAGE: + In ``settings.py``: + TEST_RUNNER = 'djcelery.contrib.test_runner.' \ + 'CeleryTestSuiteRunnerStoringResult' + + In ``tests.py``: + from djcelery.models import TaskState + TaskState.object.filter(state='SUCCESS', args__contains='test') + + """ + def setup_test_environment(self, **kwargs): + """Setting up test environment.""" + + # Monkey-patch Task.on_success() method + def on_success_patched(self, retval, task_id, args, kwargs): + + TaskState.objects.create(task_id=uuid4().hex, + state='SUCCESS', + name=self.name, + result=retval, + args=args, + kwargs=kwargs, + tstamp=datetime.now()) + Task.on_success = classmethod(on_success_patched) + + # Monkey-patch Task.on_failure() method + def on_failure_patched(self, exc, task_id, args, kwargs, einfo): + + TaskState.objects.create(task_id=uuid4().hex, + state='FAILURE', + name=self.name, + result=einfo, + args=args, + kwargs=kwargs, + tstamp=datetime.now()) + Task.on_failure = classmethod(on_failure_patched) + + # Call parent's version + super(CeleryTestSuiteRunnerStoringResult, + self).setup_test_environment(**kwargs) + + # Tell celery run tasks synchronously + settings.CELERY_ALWAYS_EAGER = True + settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True # Issue #75 diff --git a/awx/lib/site-packages/djcelery/humanize.py b/awx/lib/site-packages/djcelery/humanize.py new file mode 100644 index 0000000000..8ff59ac74c --- /dev/null +++ b/awx/lib/site-packages/djcelery/humanize.py @@ -0,0 +1,69 @@ +from __future__ import absolute_import + +from datetime import datetime + +from django.utils.translation import ungettext, ugettext as _ +from .utils import now + +JUST_NOW = _('just now') +SECONDS_AGO = (_('%(seconds)d second ago'), _('%(seconds)d seconds ago')) +MINUTES_AGO = (_('%(minutes)d minute ago'), _('%(minutes)d minutes ago')) +HOURS_AGO = (_('%(hours)d hour ago'), _('%(hours)d hours ago')) +YESTERDAY_AT = _('yesterday at %(time)s') +OLDER_YEAR = (_('year'), _('years')) +OLDER_MONTH = (_('month'), _('months')) +OLDER_WEEK = (_('week'), _('weeks')) +OLDER_DAY = (_('day'), _('days')) +OLDER_CHUNKS = ( + (365.0, OLDER_YEAR), + (30.0, OLDER_MONTH), + (7.0, OLDER_WEEK), + (1.0, OLDER_DAY), +) +OLDER_AGO = _('%(number)d %(type)s ago') + + +def _un(singular__plural, n=None): + singular, plural = singular__plural + return ungettext(singular, plural, n) + + +def naturaldate(date): + """Convert datetime into a human natural date string.""" + + if not date: + return '' + + right_now = now() + today = datetime(right_now.year, right_now.month, + right_now.day, tzinfo=right_now.tzinfo) + delta = right_now - date + delta_midnight = today - date + + days = delta.days + hours = round(delta.seconds / 3600, 0) + minutes = delta.seconds / 60 + + if days < 0: + return JUST_NOW + + if days == 0: + if hours == 0: + if minutes > 0: + return _un(MINUTES_AGO, n=minutes) % {'minutes': minutes} + else: + return JUST_NOW + else: + return _un(HOURS_AGO, n=hours) % {'hours': hours} + + if delta_midnight.days == 0: + return YESTERDAY_AT % {'time': date.strftime('%H:%M')} + + count = 0 + for chunk, singular_plural in OLDER_CHUNKS: + if days >= chunk: + count = round((delta_midnight.days + 1) / chunk, 0) + type_ = _un(singular_plural, n=count) + break + + return OLDER_AGO % {'number': count, 'type': type_} diff --git a/awx/lib/site-packages/djcelery/loaders.py b/awx/lib/site-packages/djcelery/loaders.py new file mode 100644 index 0000000000..ea049ef49e --- /dev/null +++ b/awx/lib/site-packages/djcelery/loaders.py @@ -0,0 +1,195 @@ +from __future__ import absolute_import + +import os +import imp +import importlib +import warnings + +from datetime import datetime + +from celery import signals +from celery.loaders.base import BaseLoader +from celery.datastructures import DictAttribute + +import django +from django import db +from django.conf import settings +from django.core import cache +from django.core.mail import mail_admins + +from .utils import DATABASE_ERRORS, now + +_RACE_PROTECTION = False +NO_TZ = django.VERSION < (1, 4) + + +def _maybe_close_fd(fh): + try: + os.close(fh.fileno()) + except (AttributeError, OSError, TypeError): + # TypeError added for celery#962 + pass + + +class DjangoLoader(BaseLoader): + """The Django loader.""" + _db_reuse = 0 + + override_backends = { + 'database': 'djcelery.backends.database.DatabaseBackend', + 'cache': 'djcelery.backends.cache.CacheBackend', + } + + def __init__(self, *args, **kwargs): + super(DjangoLoader, self).__init__(*args, **kwargs) + self._install_signal_handlers() + + def _install_signal_handlers(self): + # Need to close any open database connection after + # any embedded celerybeat process forks. + signals.beat_embedded_init.connect(self.close_database) + signals.worker_ready.connect(self.warn_if_debug) + + def now(self, utc=False): + return datetime.utcnow() if utc else now() + + def read_configuration(self): + """Load configuration from Django settings.""" + self.configured = True + # Default backend needs to be the database backend for backward + # compatibility. + backend = (getattr(settings, 'CELERY_RESULT_BACKEND', None) or + getattr(settings, 'CELERY_BACKEND', None)) + if not backend: + settings.CELERY_RESULT_BACKEND = 'database' + if NO_TZ: + if getattr(settings, 'CELERY_ENABLE_UTC', None): + warnings.warn('CELERY_ENABLE_UTC requires Django 1.4+') + settings.CELERY_ENABLE_UTC = False + return DictAttribute(settings) + + def _close_database(self): + try: + funs = [conn.close for conn in db.connections] + except AttributeError: + funs = [db.close_connection] # pre multidb + + for close in funs: + try: + close() + except DATABASE_ERRORS, exc: + str_exc = str(exc) + if 'closed' not in str_exc and 'not connected' not in str_exc: + raise + + def close_database(self, **kwargs): + db_reuse_max = self.conf.get('CELERY_DB_REUSE_MAX', None) + if not db_reuse_max: + return self._close_database() + if self._db_reuse >= db_reuse_max * 2: + self._db_reuse = 0 + self._close_database() + self._db_reuse += 1 + + def close_cache(self): + try: + cache.cache.close() + except (TypeError, AttributeError): + pass + + def on_process_cleanup(self): + """Does everything necessary for Django to work in a long-living, + multiprocessing environment. + + """ + # See http://groups.google.com/group/django-users/ + # browse_thread/thread/78200863d0c07c6d/ + self.close_database() + self.close_cache() + + def on_task_init(self, task_id, task): + """Called before every task.""" + try: + is_eager = task.request.is_eager + except AttributeError: + is_eager = False + if not is_eager: + self.close_database() + + def on_worker_init(self): + """Called when the worker starts. + + Automatically discovers any ``tasks.py`` files in the applications + listed in ``INSTALLED_APPS``. + + """ + self.import_default_modules() + + self.close_database() + self.close_cache() + + def warn_if_debug(self, **kwargs): + if settings.DEBUG: + warnings.warn('Using settings.DEBUG leads to a memory leak, never ' + 'use this setting in production environments!') + + def import_default_modules(self): + super(DjangoLoader, self).import_default_modules() + self.autodiscover() + + def autodiscover(self): + self.task_modules.update(mod.__name__ for mod in autodiscover() or ()) + + def on_worker_process_init(self): + # the parent process may have established these, + # so need to close them. + + # calling db.close() on some DB connections will cause + # the inherited DB conn to also get broken in the parent + # process so we need to remove it without triggering any + # network IO that close() might cause. + try: + for c in db.connections.all(): + if c and c.connection: + _maybe_close_fd(c.connection) + except AttributeError: + if db.connection and db.connection.connection: + _maybe_close_fd(db.connection.connection) + + # use the _ version to avoid DB_REUSE preventing the conn.close() call + self._close_database() + self.close_cache() + + def mail_admins(self, subject, body, fail_silently=False, **kwargs): + return mail_admins(subject, body, fail_silently=fail_silently) + + +def autodiscover(): + """Include tasks for all applications in ``INSTALLED_APPS``.""" + global _RACE_PROTECTION + + if _RACE_PROTECTION: + return + _RACE_PROTECTION = True + try: + return filter(None, [find_related_module(app, 'tasks') + for app in settings.INSTALLED_APPS]) + finally: + _RACE_PROTECTION = False + + +def find_related_module(app, related_name): + """Given an application name and a module name, tries to find that + module in the application.""" + + try: + app_path = importlib.import_module(app).__path__ + except AttributeError: + return + + try: + imp.find_module(related_name, app_path) + except ImportError: + return + + return importlib.import_module('%s.%s' % (app, related_name)) diff --git a/awx/lib/site-packages/djcelery/management/__init__.py b/awx/lib/site-packages/djcelery/management/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/djcelery/management/base.py b/awx/lib/site-packages/djcelery/management/base.py new file mode 100644 index 0000000000..f7181b8743 --- /dev/null +++ b/awx/lib/site-packages/djcelery/management/base.py @@ -0,0 +1,106 @@ +from __future__ import absolute_import + +import os +import sys + +from django.core.management.base import BaseCommand + +import celery +import djcelery + +DB_SHARED_THREAD = """\ +DatabaseWrapper objects created in a thread can only \ +be used in that same thread. The object with alias '%s' \ +was created in thread id %s and this is thread id %s.\ +""" + + +def patch_thread_ident(): + # monkey patch django. + # This patch make sure that we use real threads to get the ident which + # is going to happen if we are using gevent or eventlet. + # -- patch taken from gunicorn + if getattr(patch_thread_ident, 'called', False): + return + try: + from django.db.backends import BaseDatabaseWrapper, DatabaseError + + if 'validate_thread_sharing' in BaseDatabaseWrapper.__dict__: + import thread + _get_ident = thread.get_ident + + __old__init__ = BaseDatabaseWrapper.__init__ + + def _init(self, *args, **kwargs): + __old__init__(self, *args, **kwargs) + self._thread_ident = _get_ident() + + def _validate_thread_sharing(self): + if (not self.allow_thread_sharing + and self._thread_ident != _get_ident()): + raise DatabaseError( + DB_SHARED_THREAD % ( + self.alias, self._thread_ident, _get_ident()), + ) + + BaseDatabaseWrapper.__init__ = _init + BaseDatabaseWrapper.validate_thread_sharing = \ + _validate_thread_sharing + + patch_thread_ident.called = True + except ImportError: + pass +patch_thread_ident() + + +class CeleryCommand(BaseCommand): + options = BaseCommand.option_list + skip_opts = ['--app', '--loader', '--config'] + keep_base_opts = False + + def get_version(self): + return 'celery %s\ndjango-celery %s' % (celery.__version__, + djcelery.__version__) + + def execute(self, *args, **options): + broker = options.get('broker') + if broker: + self.set_broker(broker) + super(CeleryCommand, self).execute(*args, **options) + + def set_broker(self, broker): + os.environ['CELERY_BROKER_URL'] = broker + + def run_from_argv(self, argv): + self.handle_default_options(argv[2:]) + return super(CeleryCommand, self).run_from_argv(argv) + + def handle_default_options(self, argv): + acc = [] + broker = None + for i, arg in enumerate(argv): + if '--settings=' in arg: + _, settings_module = arg.split('=') + os.environ['DJANGO_SETTINGS_MODULE'] = settings_module + elif '--pythonpath=' in arg: + _, pythonpath = arg.split('=') + sys.path.insert(0, pythonpath) + elif '--broker=' in arg: + _, broker = arg.split('=') + elif arg == '-b': + broker = argv[i + 1] + else: + acc.append(arg) + if broker: + self.set_broker(broker) + return argv if self.keep_base_opts else acc + + def die(self, msg): + sys.stderr.write(msg) + sys.stderr.write('\n') + sys.exit() + + @property + def option_list(self): + return [x for x in self.options + if x._long_opts[0] not in self.skip_opts] diff --git a/awx/lib/site-packages/djcelery/management/commands/__init__.py b/awx/lib/site-packages/djcelery/management/commands/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/djcelery/management/commands/camqadm.py b/awx/lib/site-packages/djcelery/management/commands/camqadm.py new file mode 100644 index 0000000000..169534ca65 --- /dev/null +++ b/awx/lib/site-packages/djcelery/management/commands/camqadm.py @@ -0,0 +1,25 @@ +""" + +Celery AMQP Administration Tool using the AMQP API. + +""" +from __future__ import absolute_import + +from celery.bin import camqadm + +from djcelery.app import app +from djcelery.management.base import CeleryCommand + +command = camqadm.AMQPAdminCommand(app=app) + + +class Command(CeleryCommand): + """Run the celery daemon.""" + options = (CeleryCommand.options + + command.get_options() + + command.preload_options) + help = 'Celery AMQP Administration Tool using the AMQP API.' + + def handle(self, *args, **options): + """Handle the management command.""" + command.run(*args, **options) diff --git a/awx/lib/site-packages/djcelery/management/commands/celery.py b/awx/lib/site-packages/djcelery/management/commands/celery.py new file mode 100644 index 0000000000..cf6b5a4e9c --- /dev/null +++ b/awx/lib/site-packages/djcelery/management/commands/celery.py @@ -0,0 +1,23 @@ +from __future__ import absolute_import + +from celery.bin import celery + +from djcelery.app import app +from djcelery.management.base import CeleryCommand + +base = celery.CeleryCommand(app=app) + + +class Command(CeleryCommand): + """The celery command.""" + help = 'celery commands, see celery help' + requires_model_validation = True + options = (CeleryCommand.options + + base.get_options() + + base.preload_options) + + def run_from_argv(self, argv): + argv = self.handle_default_options(argv) + base.execute_from_commandline( + ['%s %s' % (argv[0], argv[1])] + argv[2:], + ) diff --git a/awx/lib/site-packages/djcelery/management/commands/celerybeat.py b/awx/lib/site-packages/djcelery/management/commands/celerybeat.py new file mode 100644 index 0000000000..6d1006f115 --- /dev/null +++ b/awx/lib/site-packages/djcelery/management/commands/celerybeat.py @@ -0,0 +1,24 @@ +""" + +Start the celery clock service from the Django management command. + +""" +from __future__ import absolute_import + +from celery.bin import celerybeat + +from djcelery.app import app +from djcelery.management.base import CeleryCommand + +beat = celerybeat.BeatCommand(app=app) + + +class Command(CeleryCommand): + """Run the celery periodic task scheduler.""" + options = (CeleryCommand.options + + beat.get_options() + + beat.preload_options) + help = 'Old alias to the "celery beat" command.' + + def handle(self, *args, **options): + beat.run(*args, **options) diff --git a/awx/lib/site-packages/djcelery/management/commands/celerycam.py b/awx/lib/site-packages/djcelery/management/commands/celerycam.py new file mode 100644 index 0000000000..541e20f327 --- /dev/null +++ b/awx/lib/site-packages/djcelery/management/commands/celerycam.py @@ -0,0 +1,26 @@ +""" + +Shortcut to the Django snapshot service. + +""" +from __future__ import absolute_import + +from celery.bin import celeryev + +from djcelery.app import app +from djcelery.management.base import CeleryCommand + +ev = celeryev.EvCommand(app=app) + + +class Command(CeleryCommand): + """Run the celery curses event viewer.""" + options = (CeleryCommand.options + + ev.get_options() + + ev.preload_options) + help = 'Takes snapshots of the clusters state to the database.' + + def handle(self, *args, **options): + """Handle the management command.""" + options['camera'] = 'djcelery.snapshot.Camera' + ev.run(*args, **options) diff --git a/awx/lib/site-packages/djcelery/management/commands/celeryctl.py b/awx/lib/site-packages/djcelery/management/commands/celeryctl.py new file mode 100644 index 0000000000..190f0ea99a --- /dev/null +++ b/awx/lib/site-packages/djcelery/management/commands/celeryctl.py @@ -0,0 +1,28 @@ +""" + +Celery manamagent and monitoring utility. + +""" +from __future__ import absolute_import + +from celery.bin.celeryctl import celeryctl, Command as _Command + +from djcelery import __version__ +from djcelery.app import app +from djcelery.management.base import CeleryCommand + +# Django hijacks the version output and prints its version before our +# version. So display the names of the products so the output is sensible. +_Command.version = 'celery %s\ndjango-celery %s' % (_Command.version, + __version__) + + +class Command(CeleryCommand): + """Run the celery control utility.""" + help = 'Old alias to the "celery" command' + keep_base_opts = False + + def run_from_argv(self, argv): + util = celeryctl(app=app) + + util.execute_from_commandline(self.handle_default_options(argv)[1:]) diff --git a/awx/lib/site-packages/djcelery/management/commands/celeryd.py b/awx/lib/site-packages/djcelery/management/commands/celeryd.py new file mode 100644 index 0000000000..a14ffb546d --- /dev/null +++ b/awx/lib/site-packages/djcelery/management/commands/celeryd.py @@ -0,0 +1,25 @@ +""" + +Start the celery daemon from the Django management command. + +""" +from __future__ import absolute_import + +from celery.bin import celeryd + +from djcelery.app import app +from djcelery.management.base import CeleryCommand + +worker = celeryd.WorkerCommand(app=app) + + +class Command(CeleryCommand): + """Run the celery daemon.""" + help = 'Old alias to the "celery worker" command.' + requires_model_validation = True + options = (CeleryCommand.options + + worker.get_options() + + worker.preload_options) + + def handle(self, *args, **options): + worker.run(*args, **options) diff --git a/awx/lib/site-packages/djcelery/management/commands/celeryd_detach.py b/awx/lib/site-packages/djcelery/management/commands/celeryd_detach.py new file mode 100644 index 0000000000..5373ceca1f --- /dev/null +++ b/awx/lib/site-packages/djcelery/management/commands/celeryd_detach.py @@ -0,0 +1,26 @@ +""" + +Start detached worker node from the Django management utility. + +""" +from __future__ import absolute_import + +import os +import sys + +from celery.bin import celeryd_detach + +from djcelery.management.base import CeleryCommand + + +class Command(CeleryCommand): + """Run the celery daemon.""" + help = 'Runs a detached Celery worker node.' + requires_model_validation = True + options = celeryd_detach.OPTION_LIST + + def run_from_argv(self, argv): + + class detached(celeryd_detach.detached_celeryd): + execv_argv = [os.path.abspath(sys.argv[0]), 'celery', 'worker'] + detached().execute_from_commandline(argv) diff --git a/awx/lib/site-packages/djcelery/management/commands/celeryd_multi.py b/awx/lib/site-packages/djcelery/management/commands/celeryd_multi.py new file mode 100644 index 0000000000..0b46d21d0e --- /dev/null +++ b/awx/lib/site-packages/djcelery/management/commands/celeryd_multi.py @@ -0,0 +1,26 @@ +""" + +Utility to manage multiple :program:`celeryd` instances. + +""" +from __future__ import absolute_import + +from celery.bin import celeryd_multi + +from djcelery.management.base import CeleryCommand + + +class Command(CeleryCommand): + """Run the celery daemon.""" + args = '[name1, [name2, [...]> [worker options]' + help = 'Manage multiple Celery worker nodes.' + requires_model_validation = True + options = () + keep_base_opts = True + + def run_from_argv(self, argv): + argv = self.handle_default_options(argv) + argv.append('--cmd=%s celeryd_detach' % (argv[0], )) + celeryd_multi.MultiTool().execute_from_commandline( + ['%s %s' % (argv[0], argv[1])] + argv[2:], + ) diff --git a/awx/lib/site-packages/djcelery/management/commands/celeryev.py b/awx/lib/site-packages/djcelery/management/commands/celeryev.py new file mode 100644 index 0000000000..0ce9cdcf38 --- /dev/null +++ b/awx/lib/site-packages/djcelery/management/commands/celeryev.py @@ -0,0 +1,42 @@ +""" + +Curses Celery Event Viewer. + +""" +from __future__ import absolute_import + +from celery.bin import celeryev + +from djcelery.app import app +from djcelery.management.base import CeleryCommand + +ev = celeryev.EvCommand(app=app) + +SS_TRANSPORTS = ['amqplib', 'kombu.transport.pyamqplib', + 'redis', 'kombu.transport.pyredis', + 'pika', 'kombu.transport.pypika'] + +SS_COMPAT = """ +ERROR: Snapshots not currently supported by %s transport. +Please use one of: %s +""" + + +class Command(CeleryCommand): + """Run the celery curses event viewer.""" + options = (CeleryCommand.options + + ev.get_options() + + ev.preload_options) + help = 'Old alias to the "celery events command"' + + def handle(self, *args, **options): + """Handle the management command.""" + transport = app.conf.BROKER_TRANSPORT or 'amqplib' + if options['camera']: + if transport not in SS_TRANSPORTS: + self.die( + SS_COMPAT % ( + transport, + ', '.join(t for t in SS_TRANSPORTS if '.' not in t)), + ) + ev.run(*args, **options) diff --git a/awx/lib/site-packages/djcelery/management/commands/celerymon.py b/awx/lib/site-packages/djcelery/management/commands/celerymon.py new file mode 100644 index 0000000000..a4b13cc87d --- /dev/null +++ b/awx/lib/site-packages/djcelery/management/commands/celerymon.py @@ -0,0 +1,42 @@ +""" + +Start the celery clock service from the Django management command. + +""" +from __future__ import absolute_import + +import sys + +from djcelery.app import app +from djcelery.management.base import CeleryCommand + +try: + from celerymon.bin.celerymon import MonitorCommand + mon = MonitorCommand(app=app) +except ImportError: + mon = None + +MISSING = """ +You don't have celerymon installed, please install it by running the following +command: + + $ pip install -U celerymon + +or if you're still using easy_install (shame on you!) + + $ easy_install -U celerymon +""" + + +class Command(CeleryCommand): + """Run the celery monitor.""" + options = (CeleryCommand.options + + (mon and mon.get_options() + mon.preload_options or ())) + help = 'Run the celery monitor' + + def handle(self, *args, **options): + """Handle the management command.""" + if mon is None: + sys.stderr.write(MISSING) + else: + mon.run(**options) diff --git a/awx/lib/site-packages/djcelery/management/commands/djcelerymon.py b/awx/lib/site-packages/djcelery/management/commands/djcelerymon.py new file mode 100644 index 0000000000..97465fbee5 --- /dev/null +++ b/awx/lib/site-packages/djcelery/management/commands/djcelerymon.py @@ -0,0 +1,48 @@ +from __future__ import absolute_import + +import sys +import threading + +from celery.bin import celeryev + +from django.core.management.commands import runserver + +from djcelery.app import app +from djcelery.management.base import CeleryCommand + +ev = celeryev.EvCommand(app=app) + + +class WebserverThread(threading.Thread): + + def __init__(self, addrport='', *args, **options): + threading.Thread.__init__(self) + self.addrport = addrport + self.args = args + self.options = options + + def run(self): + options = dict(self.options, use_reloader=False) + command = runserver.Command() + # see http://code.djangoproject.com/changeset/13319 + command.stdout, command.stderr = sys.stdout, sys.stderr + command.handle(self.addrport, *self.args, **options) + + +class Command(CeleryCommand): + """Run the celery curses event viewer.""" + args = '[optional port number, or ipaddr:port]' + options = (runserver.Command.option_list + + ev.get_options() + + ev.preload_options) + help = 'Starts Django Admin instance and celerycam in the same process.' + # see http://code.djangoproject.com/changeset/13319. + stdout, stderr = sys.stdout, sys.stderr + + def handle(self, addrport='', *args, **options): + """Handle the management command.""" + server = WebserverThread(addrport, *args, **options) + server.start() + options['camera'] = 'djcelery.snapshot.Camera' + options['prog_name'] = 'djcelerymon' + ev.run(*args, **options) diff --git a/awx/lib/site-packages/djcelery/managers.py b/awx/lib/site-packages/djcelery/managers.py new file mode 100644 index 0000000000..95ad068319 --- /dev/null +++ b/awx/lib/site-packages/djcelery/managers.py @@ -0,0 +1,246 @@ +from __future__ import absolute_import + +import warnings + +from functools import wraps +from itertools import count + +from django.db import transaction, connection +try: + from django.db import connections, router +except ImportError: # pre-Django 1.2 + connections = router = None # noqa + +from django.db import models +from django.db.models.query import QuerySet +from django.conf import settings + +from celery.utils.timeutils import maybe_timedelta + +from .utils import now + + +class TxIsolationWarning(UserWarning): + pass + + +def transaction_retry(max_retries=1): + """Decorator for methods doing database operations. + + If the database operation fails, it will retry the operation + at most ``max_retries`` times. + + """ + def _outer(fun): + + @wraps(fun) + def _inner(*args, **kwargs): + _max_retries = kwargs.pop('exception_retry_count', max_retries) + for retries in count(0): + try: + return fun(*args, **kwargs) + except Exception: # pragma: no cover + # Depending on the database backend used we can experience + # various exceptions. E.g. psycopg2 raises an exception + # if some operation breaks the transaction, so saving + # the task result won't be possible until we rollback + # the transaction. + if retries >= _max_retries: + raise + transaction.rollback_unless_managed() + + return _inner + + return _outer + + +def update_model_with_dict(obj, fields): + [setattr(obj, attr_name, attr_value) + for attr_name, attr_value in fields.items()] + obj.save() + return obj + + +class ExtendedQuerySet(QuerySet): + + def update_or_create(self, **kwargs): + obj, created = self.get_or_create(**kwargs) + + if not created: + fields = dict(kwargs.pop('defaults', {})) + fields.update(kwargs) + update_model_with_dict(obj, fields) + + return obj + + +class ExtendedManager(models.Manager): + + def get_query_set(self): + return ExtendedQuerySet(self.model) + + def update_or_create(self, **kwargs): + return self.get_query_set().update_or_create(**kwargs) + + def connection_for_write(self): + if connections: + return connections[router.db_for_write(self.model)] + return connection + + def connection_for_read(self): + if connections: + return connections[self.db] + return connection + + def current_engine(self): + try: + return settings.DATABASES[self.db]['ENGINE'] + except AttributeError: + return settings.DATABASE_ENGINE + + +class ResultManager(ExtendedManager): + + def get_all_expired(self, expires): + """Get all expired task results.""" + return self.filter(date_done__lt=now() - maybe_timedelta(expires)) + + @transaction.commit_manually + def delete_expired(self, expires): + """Delete all expired taskset results.""" + try: + self.get_all_expired(expires).update(hidden=True) + cursor = self.connection_for_write().cursor() + cursor.execute( + 'DELETE FROM %s WHERE hidden=%%s' % ( + self.model._meta.db_table, ), + (True, ), + ) + except: + transaction.rollback() + raise + else: + transaction.commit() + + +class PeriodicTaskManager(ExtendedManager): + + def enabled(self): + return self.filter(enabled=True) + + +class TaskManager(ResultManager): + """Manager for :class:`celery.models.Task` models.""" + _last_id = None + + def get_task(self, task_id): + """Get task meta for task by ``task_id``. + + :keyword exception_retry_count: How many times to retry by + transaction rollback on exception. This could theoretically + happen in a race condition if another worker is trying to + create the same task. The default is to retry once. + + """ + try: + return self.get(task_id=task_id) + except self.model.DoesNotExist: + if self._last_id == task_id: + self.warn_if_repeatable_read() + self._last_id = task_id + return self.model(task_id=task_id) + + @transaction_retry(max_retries=2) + def store_result(self, task_id, result, status, + traceback=None, children=None): + """Store the result and status of a task. + + :param task_id: task id + + :param result: The return value of the task, or an exception + instance raised by the task. + + :param status: Task status. See + :meth:`celery.result.AsyncResult.get_status` for a list of + possible status values. + + :keyword traceback: The traceback at the point of exception (if the + task failed). + + :keyword children: List of serialized results of subtasks + of this task. + + :keyword exception_retry_count: How many times to retry by + transaction rollback on exception. This could theoretically + happen in a race condition if another worker is trying to + create the same task. The default is to retry twice. + + """ + return self.update_or_create(task_id=task_id, + defaults={'status': status, + 'result': result, + 'traceback': traceback, + 'meta': {'children': children}}) + + def warn_if_repeatable_read(self): + if 'mysql' in self.current_engine().lower(): + cursor = self.connection_for_read().cursor() + if cursor.execute('SELECT @@tx_isolation'): + isolation = cursor.fetchone()[0] + if isolation == 'REPEATABLE-READ': + warnings.warn(TxIsolationWarning( + 'Polling results with transaction isolation level ' + 'repeatable-read within the same transaction ' + 'may give outdated results. Be sure to commit the ' + 'transaction for each poll iteration.')) + + +class TaskSetManager(ResultManager): + """Manager for :class:`celery.models.TaskSet` models.""" + + def restore_taskset(self, taskset_id): + """Get the async result instance by taskset id.""" + try: + return self.get(taskset_id=taskset_id) + except self.model.DoesNotExist: + pass + + def delete_taskset(self, taskset_id): + """Delete a saved taskset result.""" + s = self.restore_taskset(taskset_id) + if s: + s.delete() + + @transaction_retry(max_retries=2) + def store_result(self, taskset_id, result): + """Store the async result instance of a taskset. + + :param taskset_id: task set id + + :param result: The return value of the taskset + + """ + return self.update_or_create(taskset_id=taskset_id, + defaults={'result': result}) + + +class TaskStateManager(ExtendedManager): + + def active(self): + return self.filter(hidden=False) + + def expired(self, states, expires, nowfun=now): + return self.filter(state__in=states, + tstamp__lte=nowfun() - maybe_timedelta(expires)) + + def expire_by_states(self, states, expires): + if expires is not None: + return self.expired(states, expires).update(hidden=True) + + def purge(self): + cursor = self.connection_for_write().cursor() + cursor.execute( + 'DELETE FROM %s WHERE hidden=%%s' % (self.model._meta.db_table, ), + (True, ), + ) + transaction.commit_unless_managed() diff --git a/awx/lib/site-packages/djcelery/migrations/0001_initial.py b/awx/lib/site-packages/djcelery/migrations/0001_initial.py new file mode 100644 index 0000000000..ffe60491ad --- /dev/null +++ b/awx/lib/site-packages/djcelery/migrations/0001_initial.py @@ -0,0 +1,206 @@ +# encoding: utf-8 +from __future__ import absolute_import + +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models +from django.db import DatabaseError + + +class Migration(SchemaMigration): + + def forwards(self, orm): + + # Adding model 'TaskMeta' + db.create_table('celery_taskmeta', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('task_id', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)), + ('status', self.gf('django.db.models.fields.CharField')(default='PENDING', max_length=50)), + ('result', self.gf('djcelery.picklefield.PickledObjectField')(default=None, null=True)), + ('date_done', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)), + ('traceback', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),)) + db.send_create_signal('djcelery', ['TaskMeta']) + + # Adding model 'TaskSetMeta' + db.create_table('celery_tasksetmeta', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('taskset_id', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)), + ('result', self.gf('djcelery.picklefield.PickledObjectField')()), + ('date_done', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),)) + db.send_create_signal('djcelery', ['TaskSetMeta']) + + # Adding model 'IntervalSchedule' + db.create_table('djcelery_intervalschedule', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('every', self.gf('django.db.models.fields.IntegerField')()), + ('period', self.gf('django.db.models.fields.CharField')(max_length=24)),)) + db.send_create_signal('djcelery', ['IntervalSchedule']) + + # Adding model 'CrontabSchedule' + db.create_table('djcelery_crontabschedule', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('minute', self.gf('django.db.models.fields.CharField')(default='*', max_length=64)), + ('hour', self.gf('django.db.models.fields.CharField')(default='*', max_length=64)), + ('day_of_week', self.gf('django.db.models.fields.CharField')(default='*', max_length=64)),)) + db.send_create_signal('djcelery', ['CrontabSchedule']) + + # Adding model 'PeriodicTasks' + db.create_table('djcelery_periodictasks', ( + ('ident', self.gf('django.db.models.fields.SmallIntegerField')(default=1, unique=True, primary_key=True)), + ('last_update', self.gf('django.db.models.fields.DateTimeField')()),)) + db.send_create_signal('djcelery', ['PeriodicTasks']) + + # Adding model 'PeriodicTask' + db.create_table('djcelery_periodictask', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=200)), + ('task', self.gf('django.db.models.fields.CharField')(max_length=200)), + ('interval', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['djcelery.IntervalSchedule'], null=True, blank=True)), + ('crontab', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['djcelery.CrontabSchedule'], null=True, blank=True)), + ('args', self.gf('django.db.models.fields.TextField')(default='[]', blank=True)), + ('kwargs', self.gf('django.db.models.fields.TextField')(default='{}', blank=True)), + ('queue', self.gf('django.db.models.fields.CharField')(default=None, max_length=200, null=True, blank=True)), + ('exchange', self.gf('django.db.models.fields.CharField')(default=None, max_length=200, null=True, blank=True)), + ('routing_key', self.gf('django.db.models.fields.CharField')(default=None, max_length=200, null=True, blank=True)), + ('expires', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)), + ('enabled', self.gf('django.db.models.fields.BooleanField')(default=True)), + ('last_run_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)), + ('total_run_count', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)), + ('date_changed', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),)) + db.send_create_signal('djcelery', ['PeriodicTask']) + + # Adding model 'WorkerState' + db.create_table('djcelery_workerstate', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('hostname', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)), + ('last_heartbeat', self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True)),)) + db.send_create_signal('djcelery', ['WorkerState']) + + # Adding model 'TaskState' + db.create_table('djcelery_taskstate', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('state', self.gf('django.db.models.fields.CharField')(max_length=64, db_index=True)), + ('task_id', self.gf('django.db.models.fields.CharField')(unique=True, max_length=36)), + ('name', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, db_index=True)), + ('tstamp', self.gf('django.db.models.fields.DateTimeField')(db_index=True)), + ('args', self.gf('django.db.models.fields.TextField')(null=True)), + ('kwargs', self.gf('django.db.models.fields.TextField')(null=True)), + ('eta', self.gf('django.db.models.fields.DateTimeField')(null=True)), + ('expires', self.gf('django.db.models.fields.DateTimeField')(null=True)), + ('result', self.gf('django.db.models.fields.TextField')(null=True)), + ('traceback', self.gf('django.db.models.fields.TextField')(null=True)), + ('runtime', self.gf('django.db.models.fields.FloatField')(null=True)), + ('retries', self.gf('django.db.models.fields.IntegerField')(default=0)), + ('worker', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['djcelery.WorkerState'], null=True)), + ('hidden', self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True)),)) + db.send_create_signal('djcelery', ['TaskState']) + + + def backwards(self, orm): + + # Deleting model 'TaskMeta' + db.delete_table('celery_taskmeta') + + # Deleting model 'TaskSetMeta' + db.delete_table('celery_tasksetmeta') + + # Deleting model 'IntervalSchedule' + db.delete_table('djcelery_intervalschedule') + + # Deleting model 'CrontabSchedule' + db.delete_table('djcelery_crontabschedule') + + # Deleting model 'PeriodicTasks' + db.delete_table('djcelery_periodictasks') + + # Deleting model 'PeriodicTask' + db.delete_table('djcelery_periodictask') + + # Deleting model 'WorkerState' + db.delete_table('djcelery_workerstate') + + # Deleting model 'TaskState' + db.delete_table('djcelery_taskstate') + + + models = { + 'djcelery.crontabschedule': { + 'Meta': {'object_name': 'CrontabSchedule'}, + 'day_of_week': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}), + 'hour': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'minute': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}) + }, + 'djcelery.intervalschedule': { + 'Meta': {'object_name': 'IntervalSchedule'}, + 'every': ('django.db.models.fields.IntegerField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'period': ('django.db.models.fields.CharField', [], {'max_length': '24'}) + }, + 'djcelery.periodictask': { + 'Meta': {'object_name': 'PeriodicTask'}, + 'args': ('django.db.models.fields.TextField', [], {'default': "'[]'", 'blank': 'True'}), + 'crontab': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djcelery.CrontabSchedule']", 'null': 'True', 'blank': 'True'}), + 'date_changed': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), + 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'exchange': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'interval': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djcelery.IntervalSchedule']", 'null': 'True', 'blank': 'True'}), + 'kwargs': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'blank': 'True'}), + 'last_run_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}), + 'queue': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'routing_key': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'task': ('django.db.models.fields.CharField', [], {'max_length': '200'}), + 'total_run_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}) + }, + 'djcelery.periodictasks': { + 'Meta': {'object_name': 'PeriodicTasks'}, + 'ident': ('django.db.models.fields.SmallIntegerField', [], {'default': '1', 'unique': 'True', 'primary_key': 'True'}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'djcelery.taskmeta': { + 'Meta': {'object_name': 'TaskMeta', 'db_table': "'celery_taskmeta'"}, + 'date_done': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'result': ('djcelery.picklefield.PickledObjectField', [], {'default': 'None', 'null': 'True'}), + 'status': ('django.db.models.fields.CharField', [], {'default': "'PENDING'", 'max_length': '50'}), + 'task_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'traceback': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}) + }, + 'djcelery.tasksetmeta': { + 'Meta': {'object_name': 'TaskSetMeta', 'db_table': "'celery_tasksetmeta'"}, + 'date_done': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'result': ('djcelery.picklefield.PickledObjectField', [], {}), + 'taskset_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'djcelery.taskstate': { + 'Meta': {'ordering': "['-tstamp']", 'object_name': 'TaskState'}, + 'args': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'eta': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'kwargs': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_index': 'True'}), + 'result': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'retries': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'runtime': ('django.db.models.fields.FloatField', [], {'null': 'True'}), + 'state': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}), + 'task_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '36'}), + 'traceback': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'tstamp': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), + 'worker': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djcelery.WorkerState']", 'null': 'True'}) + }, + 'djcelery.workerstate': { + 'Meta': {'ordering': "['-last_heartbeat']", 'object_name': 'WorkerState'}, + 'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_heartbeat': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}) + } + } + + complete_apps = ['djcelery'] diff --git a/awx/lib/site-packages/djcelery/migrations/0002_v25_changes.py b/awx/lib/site-packages/djcelery/migrations/0002_v25_changes.py new file mode 100644 index 0000000000..d4e81371e9 --- /dev/null +++ b/awx/lib/site-packages/djcelery/migrations/0002_v25_changes.py @@ -0,0 +1,146 @@ +# encoding: utf-8 +from __future__ import absolute_import +from south.db import db +from south.v2 import SchemaMigration +from django.db import connections + + +class Migration(SchemaMigration): + + def forwards(self, orm): + conn = connections[db.db_alias] + table_list = conn.introspection.get_table_list(conn.cursor()) + if 'celery_taskmeta' not in table_list: + self.create_celery_taskmeta() + if 'celery_tasksetmeta' not in table_list: + self.create_celery_tasksetmeta() + self.apply_current_migration() + + def create_celery_taskmeta(self): + # Adding model 'TaskMeta' + db.create_table('celery_taskmeta', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('task_id', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)), + ('status', self.gf('django.db.models.fields.CharField')(default='PENDING', max_length=50)), + ('result', self.gf('djcelery.picklefield.PickledObjectField')(default=None, null=True)), + ('date_done', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)), + ('traceback', self.gf('django.db.models.fields.TextField')(null=True, blank=True)), + )) + db.send_create_signal('djcelery', ['TaskMeta']) + + def create_celery_tasksetmeta(self): + # Adding model 'TaskSetMeta' + db.create_table('celery_tasksetmeta', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('taskset_id', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)), + ('result', self.gf('djcelery.picklefield.PickledObjectField')()), + ('date_done', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)), + )) + db.send_create_signal('djcelery', ['TaskSetMeta']) + + def apply_current_migration(self): + # Adding field 'PeriodicTask.description' + db.add_column('djcelery_periodictask', 'description', self.gf('django.db.models.fields.TextField')(default='', blank=True), keep_default=False) + + # Adding field 'TaskMeta.hidden' + db.add_column('celery_taskmeta', 'hidden', self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True), keep_default=False) + + # Adding field 'TaskSetMeta.hidden' + db.add_column('celery_tasksetmeta', 'hidden', self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True), keep_default=False) + + + def backwards(self, orm): + + # Deleting field 'PeriodicTask.description' + db.delete_column('djcelery_periodictask', 'description') + + # Deleting field 'TaskMeta.hidden' + db.delete_column('celery_taskmeta', 'hidden') + + # Deleting field 'TaskSetMeta.hidden' + db.delete_column('celery_tasksetmeta', 'hidden') + + + models = { + 'djcelery.crontabschedule': { + 'Meta': {'object_name': 'CrontabSchedule'}, + 'day_of_week': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}), + 'hour': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'minute': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}) + }, + 'djcelery.intervalschedule': { + 'Meta': {'object_name': 'IntervalSchedule'}, + 'every': ('django.db.models.fields.IntegerField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'period': ('django.db.models.fields.CharField', [], {'max_length': '24'}) + }, + 'djcelery.periodictask': { + 'Meta': {'object_name': 'PeriodicTask'}, + 'args': ('django.db.models.fields.TextField', [], {'default': "'[]'", 'blank': 'True'}), + 'crontab': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djcelery.CrontabSchedule']", 'null': 'True', 'blank': 'True'}), + 'date_changed': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'exchange': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'interval': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djcelery.IntervalSchedule']", 'null': 'True', 'blank': 'True'}), + 'kwargs': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'blank': 'True'}), + 'last_run_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}), + 'queue': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'routing_key': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'task': ('django.db.models.fields.CharField', [], {'max_length': '200'}), + 'total_run_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}) + }, + 'djcelery.periodictasks': { + 'Meta': {'object_name': 'PeriodicTasks'}, + 'ident': ('django.db.models.fields.SmallIntegerField', [], {'default': '1', 'unique': 'True', 'primary_key': 'True'}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'djcelery.taskmeta': { + 'Meta': {'object_name': 'TaskMeta', 'db_table': "'celery_taskmeta'"}, + 'date_done': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), + 'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'result': ('djcelery.picklefield.PickledObjectField', [], {'default': 'None', 'null': 'True'}), + 'status': ('django.db.models.fields.CharField', [], {'default': "'PENDING'", 'max_length': '50'}), + 'task_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'traceback': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}) + }, + 'djcelery.tasksetmeta': { + 'Meta': {'object_name': 'TaskSetMeta', 'db_table': "'celery_tasksetmeta'"}, + 'date_done': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), + 'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'result': ('djcelery.picklefield.PickledObjectField', [], {}), + 'taskset_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'djcelery.taskstate': { + 'Meta': {'ordering': "['-tstamp']", 'object_name': 'TaskState'}, + 'args': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'eta': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'kwargs': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_index': 'True'}), + 'result': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'retries': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'runtime': ('django.db.models.fields.FloatField', [], {'null': 'True'}), + 'state': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}), + 'task_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '36'}), + 'traceback': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'tstamp': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), + 'worker': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djcelery.WorkerState']", 'null': 'True'}) + }, + 'djcelery.workerstate': { + 'Meta': {'ordering': "['-last_heartbeat']", 'object_name': 'WorkerState'}, + 'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_heartbeat': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}) + } + } + + complete_apps = ['djcelery'] diff --git a/awx/lib/site-packages/djcelery/migrations/0003_v26_changes.py b/awx/lib/site-packages/djcelery/migrations/0003_v26_changes.py new file mode 100644 index 0000000000..09bef1d1af --- /dev/null +++ b/awx/lib/site-packages/djcelery/migrations/0003_v26_changes.py @@ -0,0 +1,112 @@ +# encoding: utf-8 +from __future__ import absolute_import +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + + # Adding field 'CrontabSchedule.day_of_month' + db.add_column('djcelery_crontabschedule', 'day_of_month', self.gf('django.db.models.fields.CharField')(default='*', max_length=64), keep_default=False) + + # Adding field 'CrontabSchedule.month_of_year' + db.add_column('djcelery_crontabschedule', 'month_of_year', self.gf('django.db.models.fields.CharField')(default='*', max_length=64), keep_default=False) + + + def backwards(self, orm): + + # Deleting field 'CrontabSchedule.day_of_month' + db.delete_column('djcelery_crontabschedule', 'day_of_month') + + # Deleting field 'CrontabSchedule.month_of_year' + db.delete_column('djcelery_crontabschedule', 'month_of_year') + + + models = { + 'djcelery.crontabschedule': { + 'Meta': {'object_name': 'CrontabSchedule'}, + 'day_of_month': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}), + 'day_of_week': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}), + 'hour': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'minute': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}), + 'month_of_year': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}) + }, + 'djcelery.intervalschedule': { + 'Meta': {'object_name': 'IntervalSchedule'}, + 'every': ('django.db.models.fields.IntegerField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'period': ('django.db.models.fields.CharField', [], {'max_length': '24'}) + }, + 'djcelery.periodictask': { + 'Meta': {'object_name': 'PeriodicTask'}, + 'args': ('django.db.models.fields.TextField', [], {'default': "'[]'", 'blank': 'True'}), + 'crontab': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djcelery.CrontabSchedule']", 'null': 'True', 'blank': 'True'}), + 'date_changed': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'exchange': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'interval': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djcelery.IntervalSchedule']", 'null': 'True', 'blank': 'True'}), + 'kwargs': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'blank': 'True'}), + 'last_run_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}), + 'queue': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'routing_key': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'task': ('django.db.models.fields.CharField', [], {'max_length': '200'}), + 'total_run_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}) + }, + 'djcelery.periodictasks': { + 'Meta': {'object_name': 'PeriodicTasks'}, + 'ident': ('django.db.models.fields.SmallIntegerField', [], {'default': '1', 'unique': 'True', 'primary_key': 'True'}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'djcelery.taskmeta': { + 'Meta': {'object_name': 'TaskMeta', 'db_table': "'celery_taskmeta'"}, + 'date_done': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), + 'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'result': ('djcelery.picklefield.PickledObjectField', [], {'default': 'None', 'null': 'True'}), + 'status': ('django.db.models.fields.CharField', [], {'default': "'PENDING'", 'max_length': '50'}), + 'task_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'traceback': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}) + }, + 'djcelery.tasksetmeta': { + 'Meta': {'object_name': 'TaskSetMeta', 'db_table': "'celery_tasksetmeta'"}, + 'date_done': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), + 'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'result': ('djcelery.picklefield.PickledObjectField', [], {}), + 'taskset_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'djcelery.taskstate': { + 'Meta': {'ordering': "['-tstamp']", 'object_name': 'TaskState'}, + 'args': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'eta': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'kwargs': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_index': 'True'}), + 'result': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'retries': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'runtime': ('django.db.models.fields.FloatField', [], {'null': 'True'}), + 'state': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}), + 'task_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '36'}), + 'traceback': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'tstamp': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), + 'worker': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djcelery.WorkerState']", 'null': 'True'}) + }, + 'djcelery.workerstate': { + 'Meta': {'ordering': "['-last_heartbeat']", 'object_name': 'WorkerState'}, + 'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_heartbeat': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}) + } + } + + complete_apps = ['djcelery'] diff --git a/awx/lib/site-packages/djcelery/migrations/0004_v30_changes.py b/awx/lib/site-packages/djcelery/migrations/0004_v30_changes.py new file mode 100644 index 0000000000..c28be7d9ad --- /dev/null +++ b/awx/lib/site-packages/djcelery/migrations/0004_v30_changes.py @@ -0,0 +1,107 @@ +# encoding: utf-8 +from __future__ import absolute_import +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + + # Adding field 'TaskMeta.meta' + db.add_column('celery_taskmeta', 'meta', self.gf('djcelery.picklefield.PickledObjectField')(default=None, null=True), keep_default=False) + + + def backwards(self, orm): + + # Deleting field 'TaskMeta.meta' + db.delete_column('celery_taskmeta', 'meta') + + + models = { + 'djcelery.crontabschedule': { + 'Meta': {'object_name': 'CrontabSchedule'}, + 'day_of_month': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}), + 'day_of_week': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}), + 'hour': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'minute': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}), + 'month_of_year': ('django.db.models.fields.CharField', [], {'default': "'*'", 'max_length': '64'}) + }, + 'djcelery.intervalschedule': { + 'Meta': {'object_name': 'IntervalSchedule'}, + 'every': ('django.db.models.fields.IntegerField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'period': ('django.db.models.fields.CharField', [], {'max_length': '24'}) + }, + 'djcelery.periodictask': { + 'Meta': {'object_name': 'PeriodicTask'}, + 'args': ('django.db.models.fields.TextField', [], {'default': "'[]'", 'blank': 'True'}), + 'crontab': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djcelery.CrontabSchedule']", 'null': 'True', 'blank': 'True'}), + 'date_changed': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'exchange': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'interval': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djcelery.IntervalSchedule']", 'null': 'True', 'blank': 'True'}), + 'kwargs': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'blank': 'True'}), + 'last_run_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}), + 'queue': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'routing_key': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'task': ('django.db.models.fields.CharField', [], {'max_length': '200'}), + 'total_run_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}) + }, + 'djcelery.periodictasks': { + 'Meta': {'object_name': 'PeriodicTasks'}, + 'ident': ('django.db.models.fields.SmallIntegerField', [], {'default': '1', 'unique': 'True', 'primary_key': 'True'}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'djcelery.taskmeta': { + 'Meta': {'object_name': 'TaskMeta', 'db_table': "'celery_taskmeta'"}, + 'date_done': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), + 'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'meta': ('djcelery.picklefield.PickledObjectField', [], {'default': 'None', 'null': 'True'}), + 'result': ('djcelery.picklefield.PickledObjectField', [], {'default': 'None', 'null': 'True'}), + 'status': ('django.db.models.fields.CharField', [], {'default': "'PENDING'", 'max_length': '50'}), + 'task_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'traceback': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}) + }, + 'djcelery.tasksetmeta': { + 'Meta': {'object_name': 'TaskSetMeta', 'db_table': "'celery_tasksetmeta'"}, + 'date_done': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), + 'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'result': ('djcelery.picklefield.PickledObjectField', [], {}), + 'taskset_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'djcelery.taskstate': { + 'Meta': {'ordering': "['-tstamp']", 'object_name': 'TaskState'}, + 'args': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'eta': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'kwargs': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_index': 'True'}), + 'result': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'retries': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'runtime': ('django.db.models.fields.FloatField', [], {'null': 'True'}), + 'state': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}), + 'task_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '36'}), + 'traceback': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'tstamp': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), + 'worker': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djcelery.WorkerState']", 'null': 'True'}) + }, + 'djcelery.workerstate': { + 'Meta': {'ordering': "['-last_heartbeat']", 'object_name': 'WorkerState'}, + 'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_heartbeat': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}) + } + } + + complete_apps = ['djcelery'] diff --git a/awx/lib/site-packages/djcelery/migrations/__init__.py b/awx/lib/site-packages/djcelery/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/djcelery/models.py b/awx/lib/site-packages/djcelery/models.py new file mode 100644 index 0000000000..31403c6980 --- /dev/null +++ b/awx/lib/site-packages/djcelery/models.py @@ -0,0 +1,369 @@ +from __future__ import absolute_import + +from datetime import timedelta +from time import time, mktime + +from django.core.exceptions import MultipleObjectsReturned, ValidationError +from django.db import models +from django.db.models import signals +from django.utils.translation import ugettext_lazy as _ + +from celery import schedules +from celery import states +from celery.events.state import heartbeat_expires +from celery.utils.timeutils import timedelta_seconds + +from . import managers +from .picklefield import PickledObjectField +from .utils import now + +TASK_STATE_CHOICES = zip(states.ALL_STATES, states.ALL_STATES) + + +class TaskMeta(models.Model): + """Task result/status.""" + task_id = models.CharField(_(u'task id'), max_length=255, unique=True) + status = models.CharField( + _(u'state'), + max_length=50, default=states.PENDING, choices=TASK_STATE_CHOICES, + ) + result = PickledObjectField(null=True, default=None, editable=False) + date_done = models.DateTimeField(_(u'done at'), auto_now=True) + traceback = models.TextField(_(u'traceback'), blank=True, null=True) + hidden = models.BooleanField(editable=False, default=False, db_index=True) + meta = PickledObjectField( + _(u'meta'), null=True, default=None, editable=False, + ) + + objects = managers.TaskManager() + + class Meta: + verbose_name = _(u'task state') + verbose_name_plural = _(u'task states') + db_table = 'celery_taskmeta' + + def to_dict(self): + return {'task_id': self.task_id, + 'status': self.status, + 'result': self.result, + 'date_done': self.date_done, + 'traceback': self.traceback, + 'children': (self.meta or {}).get('children')} + + def __unicode__(self): + return u'' % (self.task_id, self.status) + + +class TaskSetMeta(models.Model): + """TaskSet result""" + taskset_id = models.CharField(_(u'group id'), max_length=255, unique=True) + result = PickledObjectField() + date_done = models.DateTimeField(_(u'created at'), auto_now=True) + hidden = models.BooleanField(editable=False, default=False, db_index=True) + + objects = managers.TaskSetManager() + + class Meta: + """Model meta-data.""" + verbose_name = _(u'saved group result') + verbose_name_plural = _(u'saved group results') + db_table = 'celery_tasksetmeta' + + def to_dict(self): + return {'taskset_id': self.taskset_id, + 'result': self.result, + 'date_done': self.date_done} + + def __unicode__(self): + return u'' % (self.taskset_id) + + +PERIOD_CHOICES = (('days', _(u'Days')), + ('hours', _(u'Hours')), + ('minutes', _(u'Minutes')), + ('seconds', _(u'Seconds')), + ('microseconds', _(u'Microseconds'))) + + +class IntervalSchedule(models.Model): + every = models.IntegerField(_(u'every'), null=False) + period = models.CharField( + _(u'period'), max_length=24, choices=PERIOD_CHOICES, + ) + + class Meta: + verbose_name = _(u'interval') + verbose_name_plural = _(u'intervals') + + @property + def schedule(self): + return schedules.schedule(timedelta(**{self.period: self.every})) + + @classmethod + def from_schedule(cls, schedule, period='seconds'): + every = timedelta_seconds(schedule.run_every) + try: + return cls.objects.get(every=every, period=period) + except cls.DoesNotExist: + return cls(every=every, period=period) + except MultipleObjectsReturned: + cls.objects.filter(every=every, period=period).delete() + return cls(every=every, period=period) + + def __unicode__(self): + if self.every == 1: + return _(u'every %(period)s') % {'period': self.period[:-1]} + return _(u'every %(every)s %(period)s') % {'every': self.every, + 'period': self.period} + + +class CrontabSchedule(models.Model): + minute = models.CharField( + _(u'minute'), max_length=64, default='*', + ) + hour = models.CharField( + _(u'hour'), max_length=64, default='*', + ) + day_of_week = models.CharField( + _(u'day of week'), max_length=64, default='*', + ) + day_of_month = models.CharField( + _(u'day of month'), max_length=64, default='*', + ) + month_of_year = models.CharField( + _(u'month of year'), max_length=64, default='*', + ) + + class Meta: + verbose_name = _(u'crontab') + verbose_name_plural = _(u'crontabs') + + def __unicode__(self): + rfield = lambda f: f and str(f).replace(' ', '') or '*' + return u'%s %s %s %s %s (m/h/d/dM/MY)' % (rfield(self.minute), + rfield(self.hour), + rfield(self.day_of_week), + rfield(self.day_of_month), + rfield(self.month_of_year)) + + @property + def schedule(self): + return schedules.crontab(minute=self.minute, + hour=self.hour, + day_of_week=self.day_of_week, + day_of_month=self.day_of_month, + month_of_year=self.month_of_year) + + @classmethod + def from_schedule(cls, schedule): + spec = {'minute': schedule._orig_minute, + 'hour': schedule._orig_hour, + 'day_of_week': schedule._orig_day_of_week, + 'day_of_month': schedule._orig_day_of_month, + 'month_of_year': schedule._orig_month_of_year} + try: + return cls.objects.get(**spec) + except cls.DoesNotExist: + return cls(**spec) + except MultipleObjectsReturned: + cls.objects.filter(**spec).delete() + return cls(**spec) + + +class PeriodicTasks(models.Model): + ident = models.SmallIntegerField(default=1, primary_key=True, unique=True) + last_update = models.DateTimeField(null=False) + + objects = managers.ExtendedManager() + + @classmethod + def changed(cls, instance, **kwargs): + if not instance.no_changes: + cls.objects.update_or_create(ident=1, + defaults={'last_update': now()}) + + @classmethod + def last_change(cls): + try: + return cls.objects.get(ident=1).last_update + except cls.DoesNotExist: + pass + + +class PeriodicTask(models.Model): + name = models.CharField(_(u'name'), max_length=200, unique=True, + help_text=_(u'Useful description')) + task = models.CharField(_(u'task name'), max_length=200) + interval = models.ForeignKey( + IntervalSchedule, + null=True, blank=True, verbose_name=_(u'interval'), + ) + crontab = models.ForeignKey( + CrontabSchedule, + null=True, blank=True, verbose_name=_(u'crontab'), + help_text=_(u'Use one of interval/crontab'), + ) + args = models.TextField( + _(u'Arguments'), blank=True, default='[]', + help_text=_(u'JSON encoded positional arguments'), + ) + kwargs = models.TextField( + _(u'Keyword arguments'), blank=True, default='{}', + help_text=_('JSON encoded keyword arguments'), + ) + queue = models.CharField( + _('queue'), max_length=200, blank=True, null=True, default=None, + help_text=_(u'Queue defined in CELERY_QUEUES'), + ) + exchange = models.CharField( + _(u'exchange'), max_length=200, blank=True, null=True, default=None, + ) + routing_key = models.CharField( + _(u'routing key'), max_length=200, blank=True, null=True, default=None, + ) + expires = models.DateTimeField( + _(u'expires'), blank=True, null=True, + ) + enabled = models.BooleanField( + _(u'enabled'), default=True, + ) + last_run_at = models.DateTimeField( + auto_now=False, auto_now_add=False, + editable=False, blank=True, null=True, + ) + total_run_count = models.PositiveIntegerField( + default=0, editable=False, + ) + date_changed = models.DateTimeField(auto_now=True) + description = models.TextField(_('description'), blank=True) + + objects = managers.PeriodicTaskManager() + no_changes = False + + class Meta: + verbose_name = _(u'periodic task') + verbose_name_plural = _(u'periodic tasks') + + def validate_unique(self, *args, **kwargs): + super(PeriodicTask, self).validate_unique(*args, **kwargs) + if not self.interval and not self.crontab: + raise ValidationError( + {'interval': ['One of interval or crontab must be set.']}) + if self.interval and self.crontab: + raise ValidationError( + {'crontab': ['Only one of interval or crontab must be set']}) + + def save(self, *args, **kwargs): + self.exchange = self.exchange or None + self.routing_key = self.routing_key or None + self.queue = self.queue or None + if not self.enabled: + self.last_run_at = None + super(PeriodicTask, self).save(*args, **kwargs) + + def __unicode__(self): + if self.interval: + return u'%s: %s' % (self.name, unicode(self.interval)) + if self.crontab: + return u'%s: %s' % (self.name, unicode(self.crontab)) + return u'%s: {no schedule}' % (self.name, ) + + @property + def schedule(self): + if self.interval: + return self.interval.schedule + if self.crontab: + return self.crontab.schedule + +signals.pre_delete.connect(PeriodicTasks.changed, sender=PeriodicTask) +signals.pre_save.connect(PeriodicTasks.changed, sender=PeriodicTask) + + +class WorkerState(models.Model): + hostname = models.CharField(_(u'hostname'), max_length=255, unique=True) + last_heartbeat = models.DateTimeField(_(u'last heartbeat'), null=True, + db_index=True) + + objects = managers.ExtendedManager() + + class Meta: + """Model meta-data.""" + verbose_name = _(u'worker') + verbose_name_plural = _(u'workers') + get_latest_by = 'last_heartbeat' + ordering = ['-last_heartbeat'] + + def __unicode__(self): + return self.hostname + + def __repr__(self): + return '' % (self.hostname, ) + + def is_alive(self): + if self.last_heartbeat: + return time() < heartbeat_expires(self.heartbeat_timestamp) + return False + + @property + def heartbeat_timestamp(self): + return mktime(self.last_heartbeat.timetuple()) + + +class TaskState(models.Model): + state = models.CharField( + _(u'state'), max_length=64, choices=TASK_STATE_CHOICES, db_index=True, + ) + task_id = models.CharField( + _(u'UUID'), max_length=36, unique=True, + ) + name = models.CharField( + _(u'name'), max_length=200, null=True, db_index=True, + ) + tstamp = models.DateTimeField( + _(u'event received at'), db_index=True, + ) + args = models.TextField(_(u'Arguments'), null=True) + kwargs = models.TextField(_(u'Keyword arguments'), null=True) + eta = models.DateTimeField( + _(u'ETA'), null=True, + help_text=u'date to execute', + ) + expires = models.DateTimeField(_(u'expires'), null=True) + result = models.TextField(_(u'result'), null=True) + traceback = models.TextField(_(u'traceback'), null=True) + runtime = models.FloatField( + _(u'execution time'), null=True, + help_text=_(u'in seconds if task successful'), + ) + retries = models.IntegerField(_(u'number of retries'), default=0) + worker = models.ForeignKey( + WorkerState, null=True, verbose_name=_('worker'), + ) + hidden = models.BooleanField(editable=False, default=False, db_index=True) + + objects = managers.TaskStateManager() + + class Meta: + """Model meta-data.""" + verbose_name = _(u'task') + verbose_name_plural = _(u'tasks') + get_latest_by = 'tstamp' + ordering = ['-tstamp'] + + def save(self, *args, **kwargs): + super(TaskState, self).save(*args, **kwargs) + + def __unicode__(self): + name = self.name or 'UNKNOWN' + s = u'%s %s %s' % (self.state.ljust(10), + self.task_id.ljust(36), + name) + if self.eta: + s += u' eta:%s' % (self.eta, ) + return s + + def __repr__(self): + return '' % (self.state, + self.name or 'UNKNOWN', + self.task_id, + self.tstamp) diff --git a/awx/lib/site-packages/djcelery/mon.py b/awx/lib/site-packages/djcelery/mon.py new file mode 100644 index 0000000000..62e61e78d9 --- /dev/null +++ b/awx/lib/site-packages/djcelery/mon.py @@ -0,0 +1,72 @@ +from __future__ import absolute_import + +import os +import sys +import types + +from celery.app.defaults import strtobool +from celery.utils import import_from_cwd + +DEFAULT_APPS = ('django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.admin', + 'django.contrib.admindocs', + 'djcelery') + +DEFAULTS = {'ROOT_URLCONF': 'djcelery.monproj.urls', + 'DATABASE_ENGINE': 'sqlite3', + 'DATABASE_NAME': 'djcelerymon.db', + 'DATABASES': {'default': { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': 'djcelerymon.db'}}, + 'BROKER_URL': 'amqp://', + 'SITE_ID': 1, + 'INSTALLED_APPS': DEFAULT_APPS, + 'DEBUG': strtobool(os.environ.get('DJCELERYMON_DEBUG', '0'))} + + +def default_settings(name='__default_settings__'): + c = type(name, (types.ModuleType, ), DEFAULTS)(name) + c.__dict__.update({'__file__': __file__}) + sys.modules[name] = c + return name + + +def configure(): + from celery import current_app + from celery.loaders.default import DEFAULT_CONFIG_MODULE + from django.conf import settings + + app = current_app + conf = {} + + if not settings.configured: + if 'loader' in app.__dict__ and app.loader.configured: + conf = current_app.loader.conf + else: + os.environ.pop('CELERY_LOADER', None) + settings_module = os.environ.get('CELERY_CONFIG_MODULE', + DEFAULT_CONFIG_MODULE) + try: + import_from_cwd(settings_module) + except ImportError: + settings_module = default_settings() + settings.configure(SETTINGS_MODULE=settings_module, + **dict(DEFAULTS, **conf)) + + +def run_monitor(argv): + from .management.commands import djcelerymon + djcelerymon.Command().run_from_argv([argv[0], 'djcelerymon'] + argv[1:]) + + +def main(argv=sys.argv): + from django.core import management + os.environ['CELERY_LOADER'] = 'default' + configure() + management.call_command('syncdb') + run_monitor(argv) + +if __name__ == '__main__': + main() diff --git a/awx/lib/site-packages/djcelery/monproj/__init__.py b/awx/lib/site-packages/djcelery/monproj/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/djcelery/monproj/urls.py b/awx/lib/site-packages/djcelery/monproj/urls.py new file mode 100644 index 0000000000..829de064ca --- /dev/null +++ b/awx/lib/site-packages/djcelery/monproj/urls.py @@ -0,0 +1,20 @@ +from __future__ import absolute_import + +try: + from django.conf.urls import (patterns, include, url, + handler500, handler404) +except ImportError: + from django.conf.urls.defaults import (patterns, include, url, # noqa + handler500, handler404) +from django.contrib import admin + +admin.autodiscover() + +urlpatterns = patterns( + '', + # Uncomment the admin/doc line below and add 'django.contrib.admindocs' + # to INSTALLED_APPS to enable admin documentation: + (r'^doc/', include('django.contrib.admindocs.urls')), + + (r'', include(admin.site.urls)), +) diff --git a/awx/lib/site-packages/djcelery/picklefield.py b/awx/lib/site-packages/djcelery/picklefield.py new file mode 100644 index 0000000000..1f2845df1f --- /dev/null +++ b/awx/lib/site-packages/djcelery/picklefield.py @@ -0,0 +1,102 @@ +""" + Based on django-picklefield which is + Copyright (c) 2009-2010 Gintautas Miliauskas + but some improvements including not deepcopying values. + + Provides an implementation of a pickled object field. + Such fields can contain any picklable objects. + + The implementation is taken and adopted from Django snippet #1694 + by Taavi Taijala, + which is in turn based on Django snippet #513 + by Oliver Beattie. + +""" +from __future__ import absolute_import + +from base64 import b64encode, b64decode +from zlib import compress, decompress + +from celery.utils.serialization import pickle + +from django.db import models +from django.utils.encoding import force_unicode + +DEFAULT_PROTOCOL = 2 + + +class PickledObject(str): + pass + + +def maybe_compress(value, do_compress=False): + if do_compress: + return compress(value) + return value + + +def maybe_decompress(value, do_decompress=False): + if do_decompress: + return decompress(value) + return value + + +def encode(value, compress_object=False, pickle_protocol=DEFAULT_PROTOCOL): + return b64encode(maybe_compress( + pickle.dumps(value, pickle_protocol), compress_object), + ) + + +def decode(value, compress_object=False): + return pickle.loads(maybe_decompress(b64decode(value), compress_object)) + + +class PickledObjectField(models.Field): + __metaclass__ = models.SubfieldBase + + def __init__(self, compress=False, protocol=DEFAULT_PROTOCOL, + *args, **kwargs): + self.compress = compress + self.protocol = protocol + kwargs.setdefault('editable', False) + super(PickledObjectField, self).__init__(*args, **kwargs) + + def get_default(self): + if self.has_default(): + return self.default() if callable(self.default) else self.default + return super(PickledObjectField, self).get_default() + + def to_python(self, value): + if value is not None: + try: + return decode(value, self.compress) + except Exception: + if isinstance(value, PickledObject): + raise + return value + + def get_db_prep_value(self, value, **kwargs): + if value is not None and not isinstance(value, PickledObject): + return force_unicode(encode(value, self.compress, self.protocol)) + return value + + def value_to_string(self, obj): + return self.get_db_prep_value(self._get_val_from_obj(obj)) + + def get_internal_type(self): + return 'TextField' + + def get_db_prep_lookup(self, lookup_type, value, *args, **kwargs): + if lookup_type not in ['exact', 'in', 'isnull']: + raise TypeError('Lookup type %s is not supported.' % lookup_type) + return super(PickledObjectField, self) \ + .get_db_prep_lookup(*args, **kwargs) + +try: + from south.modelsinspector import add_introspection_rules +except ImportError: + pass +else: + add_introspection_rules( + [], [r'^djcelery\.picklefield\.PickledObjectField'], + ) diff --git a/awx/lib/site-packages/djcelery/schedulers.py b/awx/lib/site-packages/djcelery/schedulers.py new file mode 100644 index 0000000000..135fb5a7b5 --- /dev/null +++ b/awx/lib/site-packages/djcelery/schedulers.py @@ -0,0 +1,248 @@ +from __future__ import absolute_import + +import logging + +from warnings import warn + +from anyjson import deserialize, serialize +from celery import current_app +from celery import schedules +from celery.beat import Scheduler, ScheduleEntry +from celery.utils.encoding import safe_str, safe_repr +from celery.utils.timeutils import is_naive +from kombu.utils.finalize import Finalize + +from django.db import transaction +from django.core.exceptions import ObjectDoesNotExist + +from .models import (PeriodicTask, PeriodicTasks, + CrontabSchedule, IntervalSchedule) +from .utils import DATABASE_ERRORS, make_aware + +# This scheduler must wake up more frequently than the +# regular of 5 minutes because it needs to take external +# changes to the schedule into account. +DEFAULT_MAX_INTERVAL = 5 # seconds + + +class ModelEntry(ScheduleEntry): + model_schedules = ((schedules.crontab, CrontabSchedule, 'crontab'), + (schedules.schedule, IntervalSchedule, 'interval')) + save_fields = ['last_run_at', 'total_run_count', 'no_changes'] + + def __init__(self, model): + self.app = current_app._get_current_object() + self.name = model.name + self.task = model.task + self.schedule = model.schedule + try: + self.args = deserialize(model.args or u'[]') + self.kwargs = deserialize(model.kwargs or u'{}') + except ValueError: + logging.error('Failed to serialize arguments for %s.', self.name, + exc_info=1) + logging.warning('Disabling %s', self.name) + model.no_changes = True + model.enabled = False + model.save() + + self.options = {'queue': model.queue, + 'exchange': model.exchange, + 'routing_key': model.routing_key, + 'expires': model.expires} + self.total_run_count = model.total_run_count + self.model = model + + if not model.last_run_at: + model.last_run_at = self._default_now() + orig = self.last_run_at = model.last_run_at + if not is_naive(self.last_run_at): + self.last_run_at = self.last_run_at.replace(tzinfo=None) + assert orig.hour == self.last_run_at.hour # timezone sanity + + def is_due(self): + if not self.model.enabled: + return False, 5.0 # 5 second delay for re-enable. + return self.schedule.is_due(self.last_run_at) + + def _default_now(self): + return self.app.now() + + def next(self): + self.model.last_run_at = self.app.now() + self.model.total_run_count += 1 + self.model.no_changes = True + return self.__class__(self.model) + __next__ = next # for 2to3 + + def save(self): + # Object may not be synchronized, so only + # change the fields we care about. + obj = self.model._default_manager.get(pk=self.model.pk) + for field in self.save_fields: + setattr(obj, field, getattr(self.model, field)) + obj.last_run_at = make_aware(obj.last_run_at) + obj.save() + + @classmethod + def to_model_schedule(cls, schedule): + for schedule_type, model_type, model_field in cls.model_schedules: + schedule = schedules.maybe_schedule(schedule) + if isinstance(schedule, schedule_type): + model_schedule = model_type.from_schedule(schedule) + model_schedule.save() + return model_schedule, model_field + raise ValueError('Cannot convert schedule type %r to model' % schedule) + + @classmethod + def from_entry(cls, name, skip_fields=('relative', 'options'), **entry): + options = entry.get('options') or {} + fields = dict(entry) + for skip_field in skip_fields: + fields.pop(skip_field, None) + schedule = fields.pop('schedule') + model_schedule, model_field = cls.to_model_schedule(schedule) + fields[model_field] = model_schedule + fields['args'] = serialize(fields.get('args') or []) + fields['kwargs'] = serialize(fields.get('kwargs') or {}) + fields['queue'] = options.get('queue') + fields['exchange'] = options.get('exchange') + fields['routing_key'] = options.get('routing_key') + return cls(PeriodicTask._default_manager.update_or_create( + name=name, defaults=fields, + )) + + def __repr__(self): + return '' % (safe_str(self.name), + self.task, + safe_repr(self.args), + safe_repr(self.kwargs), + self.schedule) + + +class DatabaseScheduler(Scheduler): + Entry = ModelEntry + Model = PeriodicTask + Changes = PeriodicTasks + _schedule = None + _last_timestamp = None + _initial_read = False + + def __init__(self, *args, **kwargs): + self._dirty = set() + self._finalize = Finalize(self, self.sync, exitpriority=5) + Scheduler.__init__(self, *args, **kwargs) + self.max_interval = ( + kwargs.get('max_interval') or + self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL or + DEFAULT_MAX_INTERVAL) + + def setup_schedule(self): + self.install_default_entries(self.schedule) + self.update_from_dict(self.app.conf.CELERYBEAT_SCHEDULE) + + def all_as_schedule(self): + self.logger.debug('DatabaseScheduler: Fetching database schedule') + s = {} + for model in self.Model.objects.enabled(): + try: + s[model.name] = self.Entry(model) + except ValueError: + pass + return s + + def schedule_changed(self): + try: + # If MySQL is running with transaction isolation level + # REPEATABLE-READ (default), then we won't see changes done by + # other transactions until the current transaction is + # committed (Issue #41). + try: + transaction.commit() + except transaction.TransactionManagementError: + pass # not in transaction management. + + last, ts = self._last_timestamp, self.Changes.last_change() + except DATABASE_ERRORS, exc: + warn(RuntimeWarning('Database gave error: %r' % (exc, ))) + return False + try: + if ts and ts > (last if last else ts): + return True + finally: + self._last_timestamp = ts + return False + + def reserve(self, entry): + new_entry = Scheduler.reserve(self, entry) + # Need to store entry by name, because the entry may change + # in the mean time. + self._dirty.add(new_entry.name) + return new_entry + + @transaction.commit_manually + def sync(self): + self.logger.info('Writing entries...') + _tried = set() + try: + try: + while self._dirty: + try: + name = self._dirty.pop() + _tried.add(name) + self.schedule[name].save() + except (KeyError, ObjectDoesNotExist): + pass + except: + transaction.rollback() + raise + else: + transaction.commit() + except DATABASE_ERRORS, exc: + # retry later + self._dirty |= _tried + warn(RuntimeWarning('Database error while sync: %r' % (exc, ))) + + def update_from_dict(self, dict_): + s = {} + for name, entry in dict_.items(): + try: + s[name] = self.Entry.from_entry(name, **entry) + except Exception, exc: + self.logger.error( + 'Could not add entry %r to database schedule: %r. ' + 'Contents: %r' % (name, exc, entry)) + self.schedule.update(s) + + def install_default_entries(self, data): + entries = {} + if self.app.conf.CELERY_TASK_RESULT_EXPIRES: + entries.setdefault( + 'celery.backend_cleanup', { + 'task': 'celery.backend_cleanup', + 'schedule': schedules.crontab('0', '4', '*'), + 'options': {'expires': 12 * 3600}, + }, + ) + self.update_from_dict(entries) + + @property + def schedule(self): + update = False + if not self._initial_read: + self.logger.debug('DatabaseScheduler: intial read') + update = True + self._initial_read = True + elif self.schedule_changed(): + self.logger.info('DatabaseScheduler: Schedule changed.') + update = True + + if update: + self.sync() + self._schedule = self.all_as_schedule() + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug( + 'Current schedule:\n' + '\n'.join( + repr(entry) for entry in self._schedule.values()) + ) + return self._schedule diff --git a/awx/lib/site-packages/djcelery/snapshot.py b/awx/lib/site-packages/djcelery/snapshot.py new file mode 100644 index 0000000000..c0a112f4ab --- /dev/null +++ b/awx/lib/site-packages/djcelery/snapshot.py @@ -0,0 +1,161 @@ +from __future__ import absolute_import + +from collections import defaultdict +from datetime import datetime, timedelta +from time import time + +from django.db import transaction +from django.conf import settings + +from celery import states +from celery.events.state import Task +from celery.events.snapshot import Polaroid +from celery.utils.timeutils import maybe_iso8601, timezone + +from .models import WorkerState, TaskState +from .utils import maybe_make_aware + + +WORKER_UPDATE_FREQ = 60 # limit worker timestamp write freq. +SUCCESS_STATES = frozenset([states.SUCCESS]) + +# Expiry can be timedelta or None for never expire. +EXPIRE_SUCCESS = getattr(settings, 'CELERYCAM_EXPIRE_SUCCESS', + timedelta(days=1)) +EXPIRE_ERROR = getattr(settings, 'CELERYCAM_EXPIRE_ERROR', + timedelta(days=3)) +EXPIRE_PENDING = getattr(settings, 'CELERYCAM_EXPIRE_PENDING', + timedelta(days=5)) +NOT_SAVED_ATTRIBUTES = frozenset(['name', 'args', 'kwargs', 'eta']) + + +def aware_tstamp(secs): + """Event timestamps uses the local timezone.""" + return timezone.to_local_fallback(datetime.fromtimestamp(secs)) + + +class Camera(Polaroid): + TaskState = TaskState + WorkerState = WorkerState + + clear_after = True + worker_update_freq = WORKER_UPDATE_FREQ + expire_states = { + SUCCESS_STATES: EXPIRE_SUCCESS, + states.EXCEPTION_STATES: EXPIRE_ERROR, + states.UNREADY_STATES: EXPIRE_PENDING, + } + + def __init__(self, *args, **kwargs): + super(Camera, self).__init__(*args, **kwargs) + self._last_worker_write = defaultdict(lambda: (None, None)) + + def get_heartbeat(self, worker): + try: + heartbeat = worker.heartbeats[-1] + except IndexError: + return + return aware_tstamp(heartbeat) + + def handle_worker(self, (hostname, worker)): + last_write, obj = self._last_worker_write[hostname] + if not last_write or time() - last_write > self.worker_update_freq: + obj = self.WorkerState.objects.update_or_create( + hostname=hostname, + defaults={'last_heartbeat': self.get_heartbeat(worker)}, + ) + self._last_worker_write[hostname] = (time(), obj) + return obj + + def handle_task(self, (uuid, task), worker=None): + """Handle snapshotted event.""" + if task.worker and task.worker.hostname: + worker = self.handle_worker( + (task.worker.hostname, task.worker), + ) + + defaults = { + 'name': task.name, + 'args': task.args, + 'kwargs': task.kwargs, + 'eta': maybe_make_aware(maybe_iso8601(task.eta)), + 'expires': maybe_make_aware(maybe_iso8601(task.expires)), + 'state': task.state, + 'tstamp': aware_tstamp(task.timestamp), + 'result': task.result or task.exception, + 'traceback': task.traceback, + 'runtime': task.runtime, + 'worker': worker + } + # Some fields are only stored in the RECEIVED event, + # so we should remove these from default values, + # so that they are not overwritten by subsequent states. + [defaults.pop(attr, None) for attr in NOT_SAVED_ATTRIBUTES + if defaults[attr] is None] + return self.update_task(task.state, + task_id=uuid, defaults=defaults) + + def update_task(self, state, **kwargs): + objects = self.TaskState.objects + defaults = kwargs.pop('defaults', None) or {} + if not defaults.get('name'): + return + obj, created = objects.get_or_create(defaults=defaults, **kwargs) + if created: + return obj + else: + if states.state(state) < states.state(obj.state): + keep = Task.merge_rules[states.RECEIVED] + defaults = dict( + (k, v) for k, v in defaults.items() + if k not in keep + ) + + for k, v in defaults.items(): + setattr(obj, k, v) + for datefield in ('eta', 'expires', 'tstamp'): + # Brute force trying to fix #183 + setattr(obj, datefield, maybe_make_aware(getattr(obj, datefield))) + obj.save() + + return obj + + def _autocommit(self, fun): + try: + fun() + except (KeyboardInterrupt, SystemExit): + transaction.commit() + raise + except Exception: + transaction.rollback() + raise + else: + transaction.commit() + + @transaction.commit_manually + def on_shutter(self, state, commit_every=100): + if not state.event_count: + transaction.commit() + return + + def _handle_tasks(): + for i, task in enumerate(state.tasks.items()): + self.handle_task(task) + if not i % commit_every: + transaction.commit() + + self._autocommit(lambda: map(self.handle_worker, + state.workers.items())) + self._autocommit(_handle_tasks) + + def on_cleanup(self): + dirty = sum(self.TaskState.objects.expire_by_states(states, expires) + for states, expires in self.expire_states.items()) + if dirty: + self.logger.debug( + 'Cleanup: Marked %s objects as dirty.' % (dirty, ), + ) + self.TaskState.objects.purge() + self.logger.debug('Cleanup: %s objects purged.' % (dirty, )) + return dirty + return 0 diff --git a/awx/lib/site-packages/djcelery/templates/admin/djcelery/change_list.html b/awx/lib/site-packages/djcelery/templates/admin/djcelery/change_list.html new file mode 100644 index 0000000000..f35a801d50 --- /dev/null +++ b/awx/lib/site-packages/djcelery/templates/admin/djcelery/change_list.html @@ -0,0 +1,26 @@ +{% extends "admin/change_list.html" %} +{% load i18n %} + +{% block breadcrumbs %} + + {% if wrong_scheduler %} +
    +
  • + Periodic tasks won't be dispatched unless you set the + CELERYBEAT_SCHEDULER setting to + djcelery.schedulers.DatabaseScheduler, + or specify it using the -S option to celerybeat +
  • +
+ {% endif %} +{% endblock %} diff --git a/awx/lib/site-packages/djcelery/templates/djcelery/confirm_rate_limit.html b/awx/lib/site-packages/djcelery/templates/djcelery/confirm_rate_limit.html new file mode 100644 index 0000000000..6152b76f20 --- /dev/null +++ b/awx/lib/site-packages/djcelery/templates/djcelery/confirm_rate_limit.html @@ -0,0 +1,25 @@ +{% extends "admin/base_site.html" %} +{% load i18n %} + +{% block breadcrumbs %} + +{% endblock %} + +{% block content %} +
{% csrf_token %} +
+ {% for obj in queryset %} + + {% endfor %} + + + + +
+
+{% endblock %} diff --git a/awx/lib/site-packages/djcelery/tests/__init__.py b/awx/lib/site-packages/djcelery/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/djcelery/tests/req.py b/awx/lib/site-packages/djcelery/tests/req.py new file mode 100644 index 0000000000..1c2107c321 --- /dev/null +++ b/awx/lib/site-packages/djcelery/tests/req.py @@ -0,0 +1,74 @@ +from __future__ import absolute_import + +from django.test import Client +from django.core.handlers.wsgi import WSGIRequest +from django.core.handlers.base import BaseHandler + +from celery.utils.compat import WhateverIO + + +class RequestFactory(Client): + """Class that lets you create mock Request objects for use in testing. + + Usage: + + rf = RequestFactory() + get_request = rf.get('/hello/') + post_request = rf.post('/submit/', {'foo': 'bar'}) + + This class re-uses the django.test.client.Client interface, docs here: + http://www.djangoproject.com/documentation/testing/#the-test-client + + Once you have a request object you can pass it to any view function, + just as if that view had been hooked up using a URLconf. + + """ + + def request(self, **request): + """Similar to parent class, but returns the request object as + soon as it has created it.""" + environ = { + 'HTTP_COOKIE': self.cookies, + 'HTTP_USER_AGENT': 'Django UnitTest Client 1.0', + 'REMOTE_ADDR': '127.0.0.1', + 'PATH_INFO': '/', + 'QUERY_STRING': '', + 'REQUEST_METHOD': 'GET', + 'SCRIPT_NAME': '', + 'SERVER_NAME': 'testserver', + 'SERVER_PORT': 80, + 'SERVER_PROTOCOL': 'HTTP/1.1', + 'wsgi.input': WhateverIO(), + } + + environ.update(self.defaults) + environ.update(request) + return WSGIRequest(environ) + + +class MockRequest(object): + + def __init__(self): + handler = BaseHandler() + handler.load_middleware() + self.request_factory = RequestFactory() + self.middleware = handler._request_middleware + + def _make_request(self, request_method, *args, **kwargs): + request_method_handler = getattr(self.request_factory, request_method) + request = request_method_handler(*args, **kwargs) + [middleware_processor(request) + for middleware_processor in self.middleware] + return request + + def get(self, *args, **kwargs): + return self._make_request("get", *args, **kwargs) + + def post(self, *args, **kwargs): + return self._make_request("post", *args, **kwargs) + + def put(self, *args, **kwargs): + return self._make_request("put", *args, **kwargs) + + def delete(self, *args, **kwargs): + return self._make_request("delete", *args, **kwargs) diff --git a/awx/lib/site-packages/djcelery/tests/test_backends/__init__.py b/awx/lib/site-packages/djcelery/tests/test_backends/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/djcelery/tests/test_backends/test_cache.py b/awx/lib/site-packages/djcelery/tests/test_backends/test_cache.py new file mode 100644 index 0000000000..1662ed8768 --- /dev/null +++ b/awx/lib/site-packages/djcelery/tests/test_backends/test_cache.py @@ -0,0 +1,143 @@ +from __future__ import absolute_import + +import sys + +from datetime import timedelta + +from django.core.cache.backends.base import InvalidCacheBackendError + +from celery import result +from celery import states +from celery.utils import gen_unique_id +from celery.datastructures import ExceptionInfo + +from djcelery.backends.cache import CacheBackend +from djcelery.tests.utils import unittest + + +class SomeClass(object): + + def __init__(self, data): + self.data = data + + +class test_CacheBackend(unittest.TestCase): + + def test_mark_as_done(self): + cb = CacheBackend() + + tid = gen_unique_id() + + self.assertEqual(cb.get_status(tid), states.PENDING) + self.assertIsNone(cb.get_result(tid)) + + cb.mark_as_done(tid, 42) + self.assertEqual(cb.get_status(tid), states.SUCCESS) + self.assertEqual(cb.get_result(tid), 42) + self.assertTrue(cb.get_result(tid), 42) + + def test_forget(self): + b = CacheBackend() + tid = gen_unique_id() + b.mark_as_done(tid, {'foo': 'bar'}) + self.assertEqual(b.get_result(tid).get('foo'), 'bar') + b.forget(tid) + self.assertNotIn(tid, b._cache) + self.assertIsNone(b.get_result(tid)) + + def test_save_restore_delete_group(self): + backend = CacheBackend() + group_id = gen_unique_id() + subtask_ids = [gen_unique_id() for i in range(10)] + subtasks = map(result.AsyncResult, subtask_ids) + res = result.GroupResult(group_id, subtasks) + res.save(backend=backend) + saved = result.GroupResult.restore(group_id, backend=backend) + self.assertListEqual(saved.subtasks, subtasks) + self.assertEqual(saved.id, group_id) + saved.delete(backend=backend) + self.assertIsNone(result.GroupResult.restore(group_id, + backend=backend)) + + def test_is_pickled(self): + cb = CacheBackend() + + tid2 = gen_unique_id() + result = {'foo': 'baz', 'bar': SomeClass(12345)} + cb.mark_as_done(tid2, result) + # is serialized properly. + rindb = cb.get_result(tid2) + self.assertEqual(rindb.get('foo'), 'baz') + self.assertEqual(rindb.get('bar').data, 12345) + + def test_mark_as_failure(self): + cb = CacheBackend() + + einfo = None + tid3 = gen_unique_id() + try: + raise KeyError('foo') + except KeyError, exception: + einfo = ExceptionInfo(sys.exc_info()) + pass + cb.mark_as_failure(tid3, exception, traceback=einfo.traceback) + self.assertEqual(cb.get_status(tid3), states.FAILURE) + self.assertIsInstance(cb.get_result(tid3), KeyError) + self.assertEqual(cb.get_traceback(tid3), einfo.traceback) + + def test_process_cleanup(self): + cb = CacheBackend() + cb.process_cleanup() + + def test_set_expires(self): + cb1 = CacheBackend(expires=timedelta(seconds=16)) + self.assertEqual(cb1.expires, 16) + cb2 = CacheBackend(expires=32) + self.assertEqual(cb2.expires, 32) + + +class test_custom_CacheBackend(unittest.TestCase): + + def test_custom_cache_backend(self): + from celery import current_app + prev_backend = current_app.conf.CELERY_CACHE_BACKEND + prev_module = sys.modules['djcelery.backends.cache'] + current_app.conf.CELERY_CACHE_BACKEND = 'dummy://' + sys.modules.pop('djcelery.backends.cache') + try: + from djcelery.backends.cache import cache + from django.core.cache import cache as django_cache + self.assertEqual(cache.__class__.__module__, + 'django.core.cache.backends.dummy') + self.assertIsNot(cache, django_cache) + finally: + current_app.conf.CELERY_CACHE_BACKEND = prev_backend + sys.modules['djcelery.backends.cache'] = prev_module + + +class test_MemcacheWrapper(unittest.TestCase): + + def test_memcache_wrapper(self): + + try: + from django.core.cache.backends import memcached + from django.core.cache.backends import locmem + except InvalidCacheBackendError: + sys.stderr.write( + '\n* Memcache library is not installed. Skipping test.\n') + return + prev_cache_cls = memcached.CacheClass + memcached.CacheClass = locmem.CacheClass + prev_backend_module = sys.modules.pop('djcelery.backends.cache') + try: + from djcelery.backends.cache import cache + key = 'cu.test_memcache_wrapper' + val = 'The quick brown fox.' + default = 'The lazy dog.' + + self.assertEqual(cache.get(key, default=default), default) + cache.set(key, val) + self.assertEqual(cache.get(key, default=default), val) + finally: + memcached.CacheClass = prev_cache_cls + sys.modules['djcelery.backends.cache'] = prev_backend_module diff --git a/awx/lib/site-packages/djcelery/tests/test_backends/test_database.py b/awx/lib/site-packages/djcelery/tests/test_backends/test_database.py new file mode 100644 index 0000000000..55c1d61118 --- /dev/null +++ b/awx/lib/site-packages/djcelery/tests/test_backends/test_database.py @@ -0,0 +1,99 @@ +from __future__ import absolute_import + +from datetime import timedelta + +from celery import current_app +from celery import states +from celery.result import AsyncResult +from celery.task import PeriodicTask +from celery.utils import gen_unique_id + +from djcelery.backends.database import DatabaseBackend +from djcelery.utils import now +from djcelery.tests.utils import unittest + + +class SomeClass(object): + + def __init__(self, data): + self.data = data + + +class MyPeriodicTask(PeriodicTask): + name = 'c.u.my-periodic-task-244' + run_every = timedelta(seconds=1) + + def run(self, **kwargs): + return 42 + + +class TestDatabaseBackend(unittest.TestCase): + + def test_backend(self): + b = DatabaseBackend() + tid = gen_unique_id() + + self.assertEqual(b.get_status(tid), states.PENDING) + self.assertIsNone(b.get_result(tid)) + + b.mark_as_done(tid, 42) + self.assertEqual(b.get_status(tid), states.SUCCESS) + self.assertEqual(b.get_result(tid), 42) + + tid2 = gen_unique_id() + result = {'foo': 'baz', 'bar': SomeClass(12345)} + b.mark_as_done(tid2, result) + # is serialized properly. + rindb = b.get_result(tid2) + self.assertEqual(rindb.get('foo'), 'baz') + self.assertEqual(rindb.get('bar').data, 12345) + + tid3 = gen_unique_id() + try: + raise KeyError('foo') + except KeyError, exception: + pass + b.mark_as_failure(tid3, exception) + self.assertEqual(b.get_status(tid3), states.FAILURE) + self.assertIsInstance(b.get_result(tid3), KeyError) + + def test_forget(self): + b = DatabaseBackend() + tid = gen_unique_id() + b.mark_as_done(tid, {'foo': 'bar'}) + x = AsyncResult(tid) + self.assertEqual(x.result.get('foo'), 'bar') + x.forget() + self.assertIsNone(x.result) + + def test_group_store(self): + b = DatabaseBackend() + tid = gen_unique_id() + + self.assertIsNone(b.restore_group(tid)) + + result = {'foo': 'baz', 'bar': SomeClass(12345)} + b.save_group(tid, result) + rindb = b.restore_group(tid) + self.assertIsNotNone(rindb) + self.assertEqual(rindb.get('foo'), 'baz') + self.assertEqual(rindb.get('bar').data, 12345) + b.delete_group(tid) + self.assertIsNone(b.restore_group(tid)) + + def test_cleanup(self): + b = DatabaseBackend() + b.TaskModel._default_manager.all().delete() + ids = [gen_unique_id() for _ in xrange(3)] + for i, res in enumerate((16, 32, 64)): + b.mark_as_done(ids[i], res) + + self.assertEqual(b.TaskModel._default_manager.count(), 3) + + then = now() - current_app.conf.CELERY_TASK_RESULT_EXPIRES * 2 + # Have to avoid save() because it applies the auto_now=True. + b.TaskModel._default_manager.filter(task_id__in=ids[:-1]) \ + .update(date_done=then) + + b.cleanup() + self.assertEqual(b.TaskModel._default_manager.count(), 1) diff --git a/awx/lib/site-packages/djcelery/tests/test_discovery.py b/awx/lib/site-packages/djcelery/tests/test_discovery.py new file mode 100644 index 0000000000..4c9aeb1697 --- /dev/null +++ b/awx/lib/site-packages/djcelery/tests/test_discovery.py @@ -0,0 +1,30 @@ +from __future__ import absolute_import + +from django.conf import settings + +from celery.registry import tasks + +from djcelery.loaders import autodiscover +from djcelery.tests.utils import unittest + + +class TestDiscovery(unittest.TestCase): + + def assertDiscovery(self): + apps = autodiscover() + self.assertTrue(apps) + self.assertIn('c.unittest.SomeAppTask', tasks) + self.assertEqual(tasks['c.unittest.SomeAppTask'].run(), 42) + + def test_discovery(self): + if 'someapp' in settings.INSTALLED_APPS: + self.assertDiscovery() + + def test_discovery_with_broken(self): + if 'someapp' in settings.INSTALLED_APPS: + installed_apps = list(settings.INSTALLED_APPS) + settings.INSTALLED_APPS = installed_apps + ['xxxnot.aexist'] + try: + self.assertRaises(ImportError, autodiscover) + finally: + settings.INSTALLED_APPS = installed_apps diff --git a/awx/lib/site-packages/djcelery/tests/test_loaders.py b/awx/lib/site-packages/djcelery/tests/test_loaders.py new file mode 100644 index 0000000000..e507c4aa82 --- /dev/null +++ b/awx/lib/site-packages/djcelery/tests/test_loaders.py @@ -0,0 +1,44 @@ +from __future__ import absolute_import + +from celery import loaders + +from djcelery import loaders as djloaders +from djcelery.tests.utils import unittest + + +class TestDjangoLoader(unittest.TestCase): + + def setUp(self): + self.loader = djloaders.DjangoLoader() + + def test_get_loader_cls(self): + + self.assertEqual(loaders.get_loader_cls('django'), + self.loader.__class__) + # Execute cached branch. + self.assertEqual(loaders.get_loader_cls('django'), + self.loader.__class__) + + def test_on_worker_init(self): + from django.conf import settings + old_imports = getattr(settings, 'CELERY_IMPORTS', ()) + settings.CELERY_IMPORTS = ('xxx.does.not.exist', ) + try: + self.assertRaises(ImportError, self.loader.import_default_modules) + finally: + settings.CELERY_IMPORTS = old_imports + + def test_race_protection(self): + djloaders._RACE_PROTECTION = True + try: + self.assertFalse(self.loader.on_worker_init()) + finally: + djloaders._RACE_PROTECTION = False + + def test_find_related_module_no_path(self): + self.assertFalse(djloaders.find_related_module('sys', 'tasks')) + + def test_find_related_module_no_related(self): + self.assertFalse( + djloaders.find_related_module('someapp', 'frobulators'), + ) diff --git a/awx/lib/site-packages/djcelery/tests/test_models.py b/awx/lib/site-packages/djcelery/tests/test_models.py new file mode 100644 index 0000000000..94bd46af93 --- /dev/null +++ b/awx/lib/site-packages/djcelery/tests/test_models.py @@ -0,0 +1,101 @@ +from __future__ import absolute_import + +from datetime import datetime, timedelta + +from celery import states +from celery.utils import gen_unique_id + +from djcelery import celery +from djcelery.models import TaskMeta, TaskSetMeta +from djcelery.utils import now +from djcelery.tests.utils import unittest + + +class TestModels(unittest.TestCase): + + def createTaskMeta(self): + id = gen_unique_id() + taskmeta, created = TaskMeta.objects.get_or_create(task_id=id) + return taskmeta + + def createTaskSetMeta(self): + id = gen_unique_id() + tasksetmeta, created = TaskSetMeta.objects.get_or_create(taskset_id=id) + return tasksetmeta + + def test_taskmeta(self): + m1 = self.createTaskMeta() + m2 = self.createTaskMeta() + m3 = self.createTaskMeta() + self.assertTrue(unicode(m1).startswith('' % (self.__class__.__name__, self.args) + + +class MyRetryTaskError(Exception): + + def __repr__(self): + return '<%s: %r>' % (self.__class__.__name__, self.args) + + +task_is_successful = partial(reversestar, 'celery-is_task_successful') +task_status = partial(reversestar, 'celery-task_status') +task_apply = partial(reverse, 'celery-apply') +registered_tasks = partial(reverse, 'celery-tasks') +scratch = {} + + +@task() +def mytask(x, y): + ret = scratch['result'] = int(x) * int(y) + return ret + + +def create_exception(name, base=Exception): + return type(name, (base, ), {}) + + +def catch_exception(exception): + try: + raise exception + except exception.__class__, exc: + exc = current_app.backend.prepare_exception(exc) + return exc, ExceptionInfo(sys.exc_info()).traceback + + +class ViewTestCase(DjangoTestCase): + + def assertJSONEqual(self, json, py): + json = isinstance(json, HttpResponse) and json.content or json + try: + self.assertEqual(deserialize(json), py) + except TypeError, exc: + raise TypeError('%s: %s' % (exc, json)) + + def assertIn(self, expected, source, *args): + try: + DjangoTestCase.assertIn(self, expected, source, *args) + except AttributeError: + self.assertTrue(expected in source) + + def assertDictContainsSubset(self, a, b, *args): + try: + DjangoTestCase.assertDictContainsSubset(self, a, b, *args) + except AttributeError: + for key, value in a.items(): + self.assertTrue(key in b) + self.assertEqual(b[key], value) + + +class test_task_apply(ViewTestCase): + + def test_apply(self): + current_app.conf.CELERY_ALWAYS_EAGER = True + try: + self.client.get( + task_apply(kwargs={'task_name': mytask.name}) + '?x=4&y=4', + ) + self.assertEqual(scratch['result'], 16) + finally: + current_app.conf.CELERY_ALWAYS_EAGER = False + + def test_apply_raises_404_on_unregistered_task(self): + current_app.conf.CELERY_ALWAYS_EAGER = True + try: + name = 'xxx.does.not.exist' + action = partial( + self.client.get, + task_apply(kwargs={'task_name': name}) + '?x=4&y=4', + ) + try: + res = action() + except TemplateDoesNotExist: + pass # pre Django 1.5 + else: + self.assertEqual(res.status_code, 404) + finally: + current_app.conf.CELERY_ALWAYS_EAGER = False + + +class test_registered_tasks(ViewTestCase): + + def test_list_registered_tasks(self): + json = self.client.get(registered_tasks()) + tasks = deserialize(json.content) + self.assertIn('celery.backend_cleanup', tasks['regular']) + + +class test_webhook_task(ViewTestCase): + + def test_successful_request(self): + + @task_webhook + def add_webhook(request): + x = int(request.GET['x']) + y = int(request.GET['y']) + return x + y + + request = MockRequest().get('/tasks/add', dict(x=10, y=10)) + response = add_webhook(request) + self.assertDictContainsSubset({'status': 'success', 'retval': 20}, + deserialize(response.content)) + + def test_failed_request(self): + + @task_webhook + def error_webhook(request): + x = int(request.GET['x']) + y = int(request.GET['y']) + raise MyError(x + y) + + request = MockRequest().get('/tasks/error', dict(x=10, y=10)) + response = error_webhook(request) + self.assertDictContainsSubset({'status': 'failure', + 'reason': ''}, + deserialize(response.content)) + + +class test_task_status(ViewTestCase): + + def assertStatusForIs(self, status, res, traceback=None): + uuid = gen_unique_id() + current_app.backend.store_result(uuid, res, status, + traceback=traceback) + json = self.client.get(task_status(task_id=uuid)) + expect = dict(id=uuid, status=status, result=res) + if status in current_app.backend.EXCEPTION_STATES: + instore = current_app.backend.get_result(uuid) + self.assertEqual(str(instore.args[0]), str(res.args[0])) + expect['result'] = repr(res) + expect['exc'] = get_full_cls_name(res.__class__) + expect['traceback'] = traceback + + self.assertJSONEqual(json, dict(task=expect)) + + def test_success(self): + self.assertStatusForIs(states.SUCCESS, 'The quick brown fox') + + def test_failure(self): + exc, tb = catch_exception(MyError('foo')) + self.assertStatusForIs(states.FAILURE, exc, tb) + + def test_retry(self): + oexc, _ = catch_exception(MyError('Resource not available')) + exc, tb = catch_exception(MyRetryTaskError(str(oexc), oexc)) + self.assertStatusForIs(states.RETRY, exc, tb) + + +class test_task_is_successful(ViewTestCase): + + def assertStatusForIs(self, status, outcome): + uuid = gen_unique_id() + result = gen_unique_id() + current_app.backend.store_result(uuid, result, status) + json = self.client.get(task_is_successful(task_id=uuid)) + self.assertJSONEqual(json, {'task': {'id': uuid, + 'executed': outcome}}) + + def test_success(self): + self.assertStatusForIs(states.SUCCESS, True) + + def test_pending(self): + self.assertStatusForIs(states.PENDING, False) + + def test_failure(self): + self.assertStatusForIs(states.FAILURE, False) + + def test_retry(self): + self.assertStatusForIs(states.RETRY, False) diff --git a/awx/lib/site-packages/djcelery/tests/test_worker_job.py b/awx/lib/site-packages/djcelery/tests/test_worker_job.py new file mode 100644 index 0000000000..8692e2d05d --- /dev/null +++ b/awx/lib/site-packages/djcelery/tests/test_worker_job.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +from django.core import cache + +from celery.utils import gen_unique_id +from celery.task import task as task_dec + +from celery.tests.worker.test_request import jail + +from djcelery.tests.utils import unittest + + +@task_dec() +def mytask(i): + return i ** i + + +@task_dec() +def get_db_connection(i): + from django.db import connection + return id(connection) +get_db_connection.ignore_result = True + + +class TestJail(unittest.TestCase): + + def test_django_db_connection_is_closed(self): + from django.db import connection + connection._was_closed = False + old_connection_close = connection.close + + def monkeypatched_connection_close(*args, **kwargs): + connection._was_closed = True + return old_connection_close(*args, **kwargs) + + connection.close = monkeypatched_connection_close + try: + jail(gen_unique_id(), get_db_connection.name, [2], {}) + self.assertTrue(connection._was_closed) + finally: + connection.close = old_connection_close + + def test_django_cache_connection_is_closed(self): + old_cache_close = getattr(cache.cache, 'close', None) + cache._was_closed = False + old_cache_parse_backend = getattr(cache, 'parse_backend_uri', None) + if old_cache_parse_backend: # checks to make sure attr exists + delattr(cache, 'parse_backend_uri') + + def monkeypatched_cache_close(*args, **kwargs): + cache._was_closed = True + + cache.cache.close = monkeypatched_cache_close + + jail(gen_unique_id(), mytask.name, [4], {}) + self.assertTrue(cache._was_closed) + cache.cache.close = old_cache_close + if old_cache_parse_backend: + cache.parse_backend_uri = old_cache_parse_backend + + def test_django_cache_connection_is_closed_django_1_1(self): + old_cache_close = getattr(cache.cache, 'close', None) + cache._was_closed = False + old_cache_parse_backend = getattr(cache, 'parse_backend_uri', None) + cache.parse_backend_uri = lambda uri: ['libmemcached', '1', '2'] + + def monkeypatched_cache_close(*args, **kwargs): + cache._was_closed = True + + cache.cache.close = monkeypatched_cache_close + + jail(gen_unique_id(), mytask.name, [4], {}) + self.assertTrue(cache._was_closed) + cache.cache.close = old_cache_close + if old_cache_parse_backend: + cache.parse_backend_uri = old_cache_parse_backend + else: + del(cache.parse_backend_uri) diff --git a/awx/lib/site-packages/djcelery/tests/utils.py b/awx/lib/site-packages/djcelery/tests/utils.py new file mode 100644 index 0000000000..d911e22bf4 --- /dev/null +++ b/awx/lib/site-packages/djcelery/tests/utils.py @@ -0,0 +1,7 @@ +from __future__ import absolute_import + +try: + import unittest + unittest.skip +except AttributeError: + import unittest2 as unittest # noqa diff --git a/awx/lib/site-packages/djcelery/transport/__init__.py b/awx/lib/site-packages/djcelery/transport/__init__.py new file mode 100644 index 0000000000..3f5be67096 --- /dev/null +++ b/awx/lib/site-packages/djcelery/transport/__init__.py @@ -0,0 +1,10 @@ +""" + +This module is an alias to :mod:`kombu.transport.django` + +""" +from __future__ import absolute_import + +import kombu.transport.django as transport + +__path__.extend(transport.__path__) diff --git a/awx/lib/site-packages/djcelery/urls.py b/awx/lib/site-packages/djcelery/urls.py new file mode 100644 index 0000000000..084f4ec927 --- /dev/null +++ b/awx/lib/site-packages/djcelery/urls.py @@ -0,0 +1,32 @@ +""" + +URLs defined for celery. + +* ``/$task_id/done/`` + + URL to :func:`~celery.views.is_successful`. + +* ``/$task_id/status/`` + + URL to :func:`~celery.views.task_status`. + +""" +from __future__ import absolute_import + +try: + from django.conf.urls import patterns, url +except ImportError: # deprecated since Django 1.4 + from django.conf.urls.defaults import patterns, url # noqa + +from . import views + +task_pattern = r'(?P[\w\d\-\.]+)' + +urlpatterns = patterns( + '', + url(r'^%s/done/?$' % task_pattern, views.is_task_successful, + name='celery-is_task_successful'), + url(r'^%s/status/?$' % task_pattern, views.task_status, + name='celery-task_status'), + url(r'^tasks/?$', views.registered_tasks, name='celery-tasks'), +) diff --git a/awx/lib/site-packages/djcelery/utils.py b/awx/lib/site-packages/djcelery/utils.py new file mode 100644 index 0000000000..02702ef390 --- /dev/null +++ b/awx/lib/site-packages/djcelery/utils.py @@ -0,0 +1,86 @@ +# -- XXX This module must not use translation as that causes +# -- a recursive loader import! +from __future__ import absolute_import + +from datetime import datetime + +from django.conf import settings + +# Database-related exceptions. +from django.db import DatabaseError +try: + import MySQLdb as mysql + _my_database_errors = (mysql.DatabaseError, + mysql.InterfaceError, + mysql.OperationalError) +except ImportError: + _my_database_errors = () # noqa +try: + import psycopg2 as pg + _pg_database_errors = (pg.DatabaseError, + pg.InterfaceError, + pg.OperationalError) +except ImportError: + _pg_database_errors = () # noqa +try: + import sqlite3 + _lite_database_errors = (sqlite3.DatabaseError, + sqlite3.InterfaceError, + sqlite3.OperationalError) +except ImportError: + _lite_database_errors = () # noqa +try: + import cx_Oracle as oracle + _oracle_database_errors = (oracle.DatabaseError, + oracle.InterfaceError, + oracle.OperationalError) +except ImportError: + _oracle_database_errors = () # noqa + +DATABASE_ERRORS = ((DatabaseError, ) + + _my_database_errors + + _pg_database_errors + + _lite_database_errors + + _oracle_database_errors) + + +try: + from django.utils import timezone + is_aware = timezone.is_aware + + # see Issue #222 + now_localtime = getattr(timezone, 'template_localtime', timezone.localtime) + + def make_aware(value): + if getattr(settings, 'USE_TZ', False): + # naive datetimes are assumed to be in UTC. + value = timezone.make_aware(value, timezone.utc) + # then convert to the Django configured timezone. + default_tz = timezone.get_default_timezone() + value = timezone.localtime(value, default_tz) + return value + + def make_naive(value): + if getattr(settings, 'USE_TZ', False): + default_tz = timezone.get_default_timezone() + value = timezone.make_naive(value, default_tz) + return value + + def now(): + if getattr(settings, 'USE_TZ', False): + return now_localtime(timezone.now()) + else: + return timezone.now() + +except ImportError: + now = datetime.now + make_aware = make_naive = lambda x: x + is_aware = lambda x: False + + +def maybe_make_aware(value): + if isinstance(value, datetime) and is_aware(value): + return value + if value: + return make_aware(value) + return value diff --git a/awx/lib/site-packages/djcelery/views.py b/awx/lib/site-packages/djcelery/views.py new file mode 100644 index 0000000000..bf7d89c947 --- /dev/null +++ b/awx/lib/site-packages/djcelery/views.py @@ -0,0 +1,119 @@ +from __future__ import absolute_import + +from functools import wraps + +from django.http import HttpResponse, Http404 + +from anyjson import serialize + +from celery import states +from celery.registry import tasks +from celery.result import AsyncResult +from celery.utils import get_full_cls_name, kwdict +from celery.utils.encoding import safe_repr + +# Ensure built-in tasks are loaded for task_list view +import celery.task # noqa + + +def JsonResponse(response): + return HttpResponse(serialize(response), mimetype='application/json') + + +def task_view(task): + """Decorator turning any task into a view that applies the task + asynchronously. Keyword arguments (via URLconf, etc.) will + supercede GET or POST parameters when there are conflicts. + + Returns a JSON dictionary containing the keys ``ok``, and + ``task_id``. + + """ + + def _applier(request, **options): + kwargs = kwdict(request.method == 'POST' and + request.POST or request.GET) + # no multivalue + kwargs = dict(((k, v) for k, v in kwargs.iteritems()), **options) + result = task.apply_async(kwargs=kwargs) + return JsonResponse({'ok': 'true', 'task_id': result.task_id}) + + return _applier + + +def apply(request, task_name): + """View applying a task. + + **Note:** Please use this with caution. Preferably you shouldn't make this + publicly accessible without ensuring your code is safe! + + """ + try: + task = tasks[task_name] + except KeyError: + raise Http404('apply: no such task') + return task_view(task)(request) + + +def is_task_successful(request, task_id): + """Returns task execute status in JSON format.""" + response_data = {'task': {'id': task_id, + 'executed': AsyncResult(task_id).successful()}} + return HttpResponse(serialize(response_data), mimetype='application/json') + + +def task_status(request, task_id): + """Returns task status and result in JSON format.""" + result = AsyncResult(task_id) + state, retval = result.state, result.result + response_data = dict(id=task_id, status=state, result=retval) + if state in states.EXCEPTION_STATES: + traceback = result.traceback + response_data.update({'result': safe_repr(retval), + 'exc': get_full_cls_name(retval.__class__), + 'traceback': traceback}) + return JsonResponse({'task': response_data}) + + +def registered_tasks(request): + """A view returning all defined tasks as a JSON object.""" + return JsonResponse({'regular': tasks.regular().keys(), + 'periodic': tasks.periodic().keys()}) + + +def task_webhook(fun): + """Decorator turning a function into a task webhook. + + If an exception is raised within the function, the decorated + function catches this and returns an error JSON response, otherwise + it returns the result as a JSON response. + + + Example: + + .. code-block:: python + + @task_webhook + def add(request): + x = int(request.GET['x']) + y = int(request.GET['y']) + return x + y + + >>> response = add(request) + >>> response.content + "{'status': 'success', 'retval': 100}" + + """ + + @wraps(fun) + def _inner(*args, **kwargs): + try: + retval = fun(*args, **kwargs) + except Exception, exc: + response = {'status': 'failure', 'reason': safe_repr(exc)} + else: + response = {'status': 'success', 'retval': retval} + + return JsonResponse(response) + + return _inner diff --git a/awx/lib/site-packages/fdpexpect.py b/awx/lib/site-packages/fdpexpect.py new file mode 100644 index 0000000000..0ece98e6b9 --- /dev/null +++ b/awx/lib/site-packages/fdpexpect.py @@ -0,0 +1,82 @@ +"""This is like pexpect, but will work on any file descriptor that you pass it. +So you are reponsible for opening and close the file descriptor. + +$Id: fdpexpect.py 505 2007-12-26 21:33:50Z noah $ +""" + +from pexpect import * +import os + +__all__ = ['fdspawn'] + +class fdspawn (spawn): + + """This is like pexpect.spawn but allows you to supply your own open file + descriptor. For example, you could use it to read through a file looking + for patterns, or to control a modem or serial device. """ + + def __init__ (self, fd, args=[], timeout=30, maxread=2000, searchwindowsize=None, logfile=None): + + """This takes a file descriptor (an int) or an object that support the + fileno() method (returning an int). All Python file-like objects + support fileno(). """ + + ### TODO: Add better handling of trying to use fdspawn in place of spawn + ### TODO: (overload to allow fdspawn to also handle commands as spawn does. + + if type(fd) != type(0) and hasattr(fd, 'fileno'): + fd = fd.fileno() + + if type(fd) != type(0): + raise ExceptionPexpect ('The fd argument is not an int. If this is a command string then maybe you want to use pexpect.spawn.') + + try: # make sure fd is a valid file descriptor + os.fstat(fd) + except OSError: + raise ExceptionPexpect, 'The fd argument is not a valid file descriptor.' + + self.args = None + self.command = None + spawn.__init__(self, None, args, timeout, maxread, searchwindowsize, logfile) + self.child_fd = fd + self.own_fd = False + self.closed = False + self.name = '' % fd + + def __del__ (self): + + return + + def close (self): + + if self.child_fd == -1: + return + if self.own_fd: + self.close (self) + else: + self.flush() + os.close(self.child_fd) + self.child_fd = -1 + self.closed = True + + def isalive (self): + + """This checks if the file descriptor is still valid. If os.fstat() + does not raise an exception then we assume it is alive. """ + + if self.child_fd == -1: + return False + try: + os.fstat(self.child_fd) + return True + except: + return False + + def terminate (self, force=False): + + raise ExceptionPexpect ('This method is not valid for file descriptors.') + + def kill (self, sig): + + return + diff --git a/awx/lib/site-packages/funtests/__init__.py b/awx/lib/site-packages/funtests/__init__.py new file mode 100644 index 0000000000..1bea48807d --- /dev/null +++ b/awx/lib/site-packages/funtests/__init__.py @@ -0,0 +1,5 @@ +import os +import sys + +sys.path.insert(0, os.pardir) +sys.path.insert(0, os.getcwd()) diff --git a/awx/lib/site-packages/funtests/setup.py b/awx/lib/site-packages/funtests/setup.py new file mode 100644 index 0000000000..8159fc4e8b --- /dev/null +++ b/awx/lib/site-packages/funtests/setup.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +try: + from setuptools import setup + from setuptools.command.install import install +except ImportError: + from ez_setup import use_setuptools + use_setuptools() + from setuptools import setup # noqa + from setuptools.command.install import install # noqa + + +class no_install(install): + + def run(self, *args, **kwargs): + import sys + sys.stderr.write(""" +------------------------------------------------------- +The billiard functional test suite cannot be installed. +------------------------------------------------------- + + +But you can execute the tests by running the command: + + $ python setup.py test + + +""") + + +setup( + name='billiard-funtests', + version='DEV', + description='Functional test suite for billiard', + author='Ask Solem', + author_email='ask@celeryproject.org', + url='http://github.com/celery/billiard', + platforms=['any'], + packages=[], + data_files=[], + zip_safe=False, + cmdclass={'install': no_install}, + test_suite='nose.collector', + build_requires=[ + 'nose', + 'nose-cover3', + 'unittest2', + 'coverage>=3.0', + ], + classifiers=[ + 'Operating System :: OS Independent', + 'Programming Language :: Python', + 'Programming Language :: C' + 'License :: OSI Approved :: BSD License', + 'Intended Audience :: Developers', + ], + long_description='Do not install this package', +) diff --git a/awx/lib/site-packages/funtests/tests/__init__.py b/awx/lib/site-packages/funtests/tests/__init__.py new file mode 100644 index 0000000000..41cbef6791 --- /dev/null +++ b/awx/lib/site-packages/funtests/tests/__init__.py @@ -0,0 +1,7 @@ +import os +import sys + +sys.path.insert(0, os.path.join(os.getcwd(), os.pardir)) +print(sys.path[0]) +sys.path.insert(0, os.getcwd()) +print(sys.path[0]) diff --git a/awx/lib/site-packages/funtests/tests/test_multiprocessing.py b/awx/lib/site-packages/funtests/tests/test_multiprocessing.py new file mode 100644 index 0000000000..e387681c7d --- /dev/null +++ b/awx/lib/site-packages/funtests/tests/test_multiprocessing.py @@ -0,0 +1,2037 @@ +#!/usr/bin/env python + +from __future__ import absolute_import +from __future__ import with_statement + +# +# Unit tests for the multiprocessing package +# + +import unittest +import Queue +import time +import sys +import os +import gc +import array +import random +import logging +from nose import SkipTest +from test import test_support +from StringIO import StringIO +try: + from billiard._ext import _billiard +except ImportError, exc: + raise SkipTest(exc) +# import threading after _billiard to raise a more revelant error +# message: "No module named _billiard". _billiard is not compiled +# without thread support. +import threading + +# Work around broken sem_open implementations +try: + import billiard.synchronize +except ImportError, exc: + raise SkipTest(exc) + +import billiard.dummy +import billiard.connection +import billiard.managers +import billiard.heap +import billiard.pool + +from billiard import util +from billiard.compat import bytes + +latin = str + +# Constants +LOG_LEVEL = util.SUBWARNING + +DELTA = 0.1 +CHECK_TIMINGS = False # making true makes tests take a lot longer + # and can sometimes cause some non-serious + # failures because some calls block a bit + # longer than expected +if CHECK_TIMINGS: + TIMEOUT1, TIMEOUT2, TIMEOUT3 = 0.82, 0.35, 1.4 +else: + TIMEOUT1, TIMEOUT2, TIMEOUT3 = 0.1, 0.1, 0.1 + +HAVE_GETVALUE = not getattr(_billiard, + 'HAVE_BROKEN_SEM_GETVALUE', False) + +WIN32 = (sys.platform == "win32") + +# Some tests require ctypes +try: + from ctypes import Structure, c_int, c_double +except ImportError: + Structure = object + c_int = c_double = None + +try: + from ctypes import Value +except ImportError: + Value = None + +try: + from ctypes import copy as ctypes_copy +except ImportError: + ctypes_copy = None + + +class TimingWrapper(object): + """Creates a wrapper for a function which records the + time it takes to finish""" + + def __init__(self, func): + self.func = func + self.elapsed = None + + def __call__(self, *args, **kwds): + t = time.time() + try: + return self.func(*args, **kwds) + finally: + self.elapsed = time.time() - t + + +class BaseTestCase(object): + """Base class for test cases""" + ALLOWED_TYPES = ('processes', 'manager', 'threads') + + def assertTimingAlmostEqual(self, a, b): + if CHECK_TIMINGS: + self.assertAlmostEqual(a, b, 1) + + def assertReturnsIfImplemented(self, value, func, *args): + try: + res = func(*args) + except NotImplementedError: + pass + else: + return self.assertEqual(value, res) + + +def get_value(self): + """Return the value of a semaphore""" + try: + return self.get_value() + except AttributeError: + try: + return self._Semaphore__value + except AttributeError: + try: + return self._value + except AttributeError: + raise NotImplementedError + + +class _TestProcesses(BaseTestCase): + + ALLOWED_TYPES = ('processes', 'threads') + + def test_current(self): + if self.TYPE == 'threads': + return + + current = self.current_process() + authkey = current.authkey + + self.assertTrue(current.is_alive()) + self.assertTrue(not current.daemon) + self.assertIsInstance(authkey, bytes) + self.assertTrue(len(authkey) > 0) + self.assertEqual(current.ident, os.getpid()) + self.assertEqual(current.exitcode, None) + + def _test(self, q, *args, **kwds): + current = self.current_process() + q.put(args) + q.put(kwds) + q.put(current.name) + if self.TYPE != 'threads': + q.put(bytes(current.authkey, 'ascii')) + q.put(current.pid) + + def test_process(self): + q = self.Queue(1) + e = self.Event() # noqa + args = (q, 1, 2) + kwargs = {'hello': 23, 'bye': 2.54} + name = 'SomeProcess' + p = self.Process( + target=self._test, args=args, kwargs=kwargs, name=name + ) + p.daemon = True + current = self.current_process() + + if self.TYPE != 'threads': + self.assertEquals(p.authkey, current.authkey) + self.assertEquals(p.is_alive(), False) + self.assertEquals(p.daemon, True) + self.assertNotIn(p, self.active_children()) + self.assertTrue(type(self.active_children()) is list) + self.assertEqual(p.exitcode, None) + + p.start() + + self.assertEquals(p.exitcode, None) + self.assertEquals(p.is_alive(), True) + self.assertIn(p, self.active_children()) + + self.assertEquals(q.get(), args[1:]) + self.assertEquals(q.get(), kwargs) + self.assertEquals(q.get(), p.name) + if self.TYPE != 'threads': + self.assertEquals(q.get(), current.authkey) + self.assertEquals(q.get(), p.pid) + + p.join() + + self.assertEquals(p.exitcode, 0) + self.assertEquals(p.is_alive(), False) + self.assertNotIn(p, self.active_children()) + + def _test_terminate(self): + time.sleep(1000) + + def test_terminate(self): + if self.TYPE == 'threads': + return + + p = self.Process(target=self._test_terminate) + p.daemon = True + p.start() + + self.assertEqual(p.is_alive(), True) + self.assertIn(p, self.active_children()) + self.assertEqual(p.exitcode, None) + + p.terminate() + + join = TimingWrapper(p.join) + self.assertEqual(join(), None) + self.assertTimingAlmostEqual(join.elapsed, 0.0) + + self.assertEqual(p.is_alive(), False) + self.assertNotIn(p, self.active_children()) + + p.join() + + # XXX sometimes get p.exitcode == 0 on Windows ... + #self.assertEqual(p.exitcode, -signal.SIGTERM) + + def test_cpu_count(self): + try: + cpus = billiard.cpu_count() + except NotImplementedError: + cpus = 1 + self.assertTrue(type(cpus) is int) + self.assertTrue(cpus >= 1) + + def test_active_children(self): + self.assertEqual(type(self.active_children()), list) + + p = self.Process(target=time.sleep, args=(DELTA,)) + self.assertNotIn(p, self.active_children()) + + p.start() + self.assertIn(p, self.active_children()) + + p.join() + self.assertNotIn(p, self.active_children()) + + def _test_recursion(self, wconn, id): + __import__('billiard.forking') + wconn.send(id) + if len(id) < 2: + for i in range(2): + p = self.Process( + target=self._test_recursion, args=(wconn, id + [i]) + ) + p.start() + p.join() + + def test_recursion(self): + rconn, wconn = self.Pipe(duplex=False) + self._test_recursion(wconn, []) + + time.sleep(DELTA) + result = [] + while rconn.poll(): + result.append(rconn.recv()) + + expected = [ + [], + [0], + [0, 0], + [0, 1], + [1], + [1, 0], + [1, 1] + ] + self.assertEqual(result, expected) + + +class _UpperCaser(billiard.Process): + + def __init__(self): + billiard.Process.__init__(self) + self.child_conn, self.parent_conn = billiard.Pipe() + + def run(self): + self.parent_conn.close() + for s in iter(self.child_conn.recv, None): + self.child_conn.send(s.upper()) + self.child_conn.close() + + def submit(self, s): + assert type(s) is str + self.parent_conn.send(s) + return self.parent_conn.recv() + + def stop(self): + self.parent_conn.send(None) + self.parent_conn.close() + self.child_conn.close() + + +class _TestSubclassingProcess(BaseTestCase): + + ALLOWED_TYPES = ('processes',) + + def test_subclassing(self): + uppercaser = _UpperCaser() + uppercaser.start() + self.assertEqual(uppercaser.submit('hello'), 'HELLO') + self.assertEqual(uppercaser.submit('world'), 'WORLD') + uppercaser.stop() + uppercaser.join() + + +def queue_empty(q): + if hasattr(q, 'empty'): + return q.empty() + else: + return q.qsize() == 0 + + +def queue_full(q, maxsize): + if hasattr(q, 'full'): + return q.full() + else: + return q.qsize() == maxsize + + +class _TestQueue(BaseTestCase): + + def _test_put(self, queue, child_can_start, parent_can_continue): + child_can_start.wait() + for i in range(6): + queue.get() + parent_can_continue.set() + + def test_put(self): + MAXSIZE = 6 + queue = self.Queue(maxsize=MAXSIZE) + child_can_start = self.Event() + parent_can_continue = self.Event() + + proc = self.Process( + target=self._test_put, + args=(queue, child_can_start, parent_can_continue) + ) + proc.daemon = True + proc.start() + + self.assertEqual(queue_empty(queue), True) + self.assertEqual(queue_full(queue, MAXSIZE), False) + + queue.put(1) + queue.put(2, True) + queue.put(3, True, None) + queue.put(4, False) + queue.put(5, False, None) + queue.put_nowait(6) + + # the values may be in buffer but not yet in pipe so sleep a bit + time.sleep(DELTA) + + self.assertEqual(queue_empty(queue), False) + self.assertEqual(queue_full(queue, MAXSIZE), True) + + put = TimingWrapper(queue.put) + put_nowait = TimingWrapper(queue.put_nowait) + + self.assertRaises(Queue.Full, put, 7, False) + self.assertTimingAlmostEqual(put.elapsed, 0) + + self.assertRaises(Queue.Full, put, 7, False, None) + self.assertTimingAlmostEqual(put.elapsed, 0) + + self.assertRaises(Queue.Full, put_nowait, 7) + self.assertTimingAlmostEqual(put_nowait.elapsed, 0) + + self.assertRaises(Queue.Full, put, 7, True, TIMEOUT1) + self.assertTimingAlmostEqual(put.elapsed, TIMEOUT1) + + self.assertRaises(Queue.Full, put, 7, False, TIMEOUT2) + self.assertTimingAlmostEqual(put.elapsed, 0) + + self.assertRaises(Queue.Full, put, 7, True, timeout=TIMEOUT3) + self.assertTimingAlmostEqual(put.elapsed, TIMEOUT3) + + child_can_start.set() + parent_can_continue.wait() + + self.assertEqual(queue_empty(queue), True) + self.assertEqual(queue_full(queue, MAXSIZE), False) + + proc.join() + + def _test_get(self, queue, child_can_start, parent_can_continue): + child_can_start.wait() + #queue.put(1) + queue.put(2) + queue.put(3) + queue.put(4) + queue.put(5) + parent_can_continue.set() + + def test_get(self): + queue = self.Queue() + child_can_start = self.Event() + parent_can_continue = self.Event() + + proc = self.Process( + target=self._test_get, + args=(queue, child_can_start, parent_can_continue) + ) + proc.daemon = True + proc.start() + + self.assertEqual(queue_empty(queue), True) + + child_can_start.set() + parent_can_continue.wait() + + time.sleep(DELTA) + self.assertEqual(queue_empty(queue), False) + + # Hangs unexpectedly, remove for now + #self.assertEqual(queue.get(), 1) + self.assertEqual(queue.get(True, None), 2) + self.assertEqual(queue.get(True), 3) + self.assertEqual(queue.get(timeout=1), 4) + self.assertEqual(queue.get_nowait(), 5) + + self.assertEqual(queue_empty(queue), True) + + get = TimingWrapper(queue.get) + get_nowait = TimingWrapper(queue.get_nowait) + + self.assertRaises(Queue.Empty, get, False) + self.assertTimingAlmostEqual(get.elapsed, 0) + + self.assertRaises(Queue.Empty, get, False, None) + self.assertTimingAlmostEqual(get.elapsed, 0) + + self.assertRaises(Queue.Empty, get_nowait) + self.assertTimingAlmostEqual(get_nowait.elapsed, 0) + + self.assertRaises(Queue.Empty, get, True, TIMEOUT1) + self.assertTimingAlmostEqual(get.elapsed, TIMEOUT1) + + self.assertRaises(Queue.Empty, get, False, TIMEOUT2) + self.assertTimingAlmostEqual(get.elapsed, 0) + + self.assertRaises(Queue.Empty, get, timeout=TIMEOUT3) + self.assertTimingAlmostEqual(get.elapsed, TIMEOUT3) + + proc.join() + + def _test_fork(self, queue): + for i in range(10, 20): + queue.put(i) + # note that at this point the items may only be buffered, so the + # process cannot shutdown until the feeder thread has finished + # pushing items onto the pipe. + + def test_fork(self): + # Old versions of Queue would fail to create a new feeder + # thread for a forked process if the original process had its + # own feeder thread. This test checks that this no longer + # happens. + + queue = self.Queue() + + # put items on queue so that main process starts a feeder thread + for i in range(10): + queue.put(i) + + # wait to make sure thread starts before we fork a new process + time.sleep(DELTA) + + # fork process + p = self.Process(target=self._test_fork, args=(queue,)) + p.start() + + # check that all expected items are in the queue + for i in range(20): + self.assertEqual(queue.get(), i) + self.assertRaises(Queue.Empty, queue.get, False) + + p.join() + + def test_qsize(self): + q = self.Queue() + try: + self.assertEqual(q.qsize(), 0) + except NotImplementedError: + return + q.put(1) + self.assertEqual(q.qsize(), 1) + q.put(5) + self.assertEqual(q.qsize(), 2) + q.get() + self.assertEqual(q.qsize(), 1) + q.get() + self.assertEqual(q.qsize(), 0) + + def _test_task_done(self, q): + for obj in iter(q.get, None): + time.sleep(DELTA) + q.task_done() + + def test_task_done(self): + queue = self.JoinableQueue() + + if sys.version_info < (2, 5) and not hasattr(queue, 'task_done'): + self.skipTest("requires 'queue.task_done()' method") + + workers = [self.Process(target=self._test_task_done, args=(queue,)) + for i in xrange(4)] + + for p in workers: + p.start() + + for i in xrange(10): + queue.put(i) + + queue.join() + + for p in workers: + queue.put(None) + + for p in workers: + p.join() + + +class _TestLock(BaseTestCase): + + def test_lock(self): + lock = self.Lock() + self.assertEqual(lock.acquire(), True) + self.assertEqual(lock.acquire(False), False) + self.assertEqual(lock.release(), None) + self.assertRaises((ValueError, threading.ThreadError), lock.release) + + def test_rlock(self): + lock = self.RLock() + self.assertEqual(lock.acquire(), True) + self.assertEqual(lock.acquire(), True) + self.assertEqual(lock.acquire(), True) + self.assertEqual(lock.release(), None) + self.assertEqual(lock.release(), None) + self.assertEqual(lock.release(), None) + self.assertRaises((AssertionError, RuntimeError), lock.release) + + def test_lock_context(self): + with self.Lock(): + pass + + +class _TestSemaphore(BaseTestCase): + + def _test_semaphore(self, sem): + self.assertReturnsIfImplemented(2, get_value, sem) + self.assertEqual(sem.acquire(), True) + self.assertReturnsIfImplemented(1, get_value, sem) + self.assertEqual(sem.acquire(), True) + self.assertReturnsIfImplemented(0, get_value, sem) + self.assertEqual(sem.acquire(False), False) + self.assertReturnsIfImplemented(0, get_value, sem) + self.assertEqual(sem.release(), None) + self.assertReturnsIfImplemented(1, get_value, sem) + self.assertEqual(sem.release(), None) + self.assertReturnsIfImplemented(2, get_value, sem) + + def test_semaphore(self): + sem = self.Semaphore(2) + self._test_semaphore(sem) + self.assertEqual(sem.release(), None) + self.assertReturnsIfImplemented(3, get_value, sem) + self.assertEqual(sem.release(), None) + self.assertReturnsIfImplemented(4, get_value, sem) + + def test_bounded_semaphore(self): + sem = self.BoundedSemaphore(2) + self._test_semaphore(sem) + # Currently fails on OS/X + #if HAVE_GETVALUE: + # self.assertRaises(ValueError, sem.release) + # self.assertReturnsIfImplemented(2, get_value, sem) + + def test_timeout(self): + if self.TYPE != 'processes': + return + + sem = self.Semaphore(0) + acquire = TimingWrapper(sem.acquire) + + self.assertEqual(acquire(False), False) + self.assertTimingAlmostEqual(acquire.elapsed, 0.0) + + self.assertEqual(acquire(False, None), False) + self.assertTimingAlmostEqual(acquire.elapsed, 0.0) + + self.assertEqual(acquire(False, TIMEOUT1), False) + self.assertTimingAlmostEqual(acquire.elapsed, 0) + + self.assertEqual(acquire(True, TIMEOUT2), False) + self.assertTimingAlmostEqual(acquire.elapsed, TIMEOUT2) + + self.assertEqual(acquire(timeout=TIMEOUT3), False) + self.assertTimingAlmostEqual(acquire.elapsed, TIMEOUT3) + + +class _TestCondition(BaseTestCase): + + def f(self, cond, sleeping, woken, timeout=None): + cond.acquire() + sleeping.release() + cond.wait(timeout) + woken.release() + cond.release() + + def check_invariant(self, cond): + # this is only supposed to succeed when there are no sleepers + if self.TYPE == 'processes': + try: + sleepers = (cond._sleeping_count.get_value() - + cond._woken_count.get_value()) + self.assertEqual(sleepers, 0) + self.assertEqual(cond._wait_semaphore.get_value(), 0) + except NotImplementedError: + pass + + def test_notify(self): + cond = self.Condition() + sleeping = self.Semaphore(0) + woken = self.Semaphore(0) + + p = self.Process(target=self.f, args=(cond, sleeping, woken)) + p.daemon = True + p.start() + + p = threading.Thread(target=self.f, args=(cond, sleeping, woken)) + p.daemon = True + p.start() + + # wait for both children to start sleeping + sleeping.acquire() + sleeping.acquire() + + # check no process/thread has woken up + time.sleep(DELTA) + self.assertReturnsIfImplemented(0, get_value, woken) + + # wake up one process/thread + cond.acquire() + cond.notify() + cond.release() + + # check one process/thread has woken up + time.sleep(DELTA) + self.assertReturnsIfImplemented(1, get_value, woken) + + # wake up another + cond.acquire() + cond.notify() + cond.release() + + # check other has woken up + time.sleep(DELTA) + self.assertReturnsIfImplemented(2, get_value, woken) + + # check state is not mucked up + self.check_invariant(cond) + p.join() + + def test_notify_all(self): + cond = self.Condition() + sleeping = self.Semaphore(0) + woken = self.Semaphore(0) + + # start some threads/processes which will timeout + for i in range(3): + p = self.Process(target=self.f, + args=(cond, sleeping, woken, TIMEOUT1)) + p.daemon = True + p.start() + + t = threading.Thread(target=self.f, + args=(cond, sleeping, woken, TIMEOUT1)) + t.daemon = True + t.start() + + # wait for them all to sleep + for i in xrange(6): + sleeping.acquire() + + # check they have all timed out + for i in xrange(6): + woken.acquire() + self.assertReturnsIfImplemented(0, get_value, woken) + + # check state is not mucked up + self.check_invariant(cond) + + # start some more threads/processes + for i in range(3): + p = self.Process(target=self.f, args=(cond, sleeping, woken)) + p.daemon = True + p.start() + + t = threading.Thread(target=self.f, args=(cond, sleeping, woken)) + t.daemon = True + t.start() + + # wait for them to all sleep + for i in xrange(6): + sleeping.acquire() + + # check no process/thread has woken up + time.sleep(DELTA) + self.assertReturnsIfImplemented(0, get_value, woken) + + # wake them all up + cond.acquire() + cond.notify_all() + cond.release() + + # check they have all woken + time.sleep(DELTA) + self.assertReturnsIfImplemented(6, get_value, woken) + + # check state is not mucked up + self.check_invariant(cond) + + def test_timeout(self): + cond = self.Condition() + wait = TimingWrapper(cond.wait) + cond.acquire() + res = wait(TIMEOUT1) + cond.release() + self.assertEqual(res, None) + self.assertTimingAlmostEqual(wait.elapsed, TIMEOUT1) + + +class _TestEvent(BaseTestCase): + + def _test_event(self, event): + time.sleep(TIMEOUT2) + event.set() + + def test_event(self): + event = self.Event() + wait = TimingWrapper(event.wait) + + # Removed temporaily, due to API shear, this does not + # work with threading._Event objects. is_set == isSet + self.assertEqual(event.is_set(), False) + + # Removed, threading.Event.wait() will return the value of the __flag + # instead of None. API Shear with the semaphore backed mp.Event + self.assertEqual(wait(0.0), False) + self.assertTimingAlmostEqual(wait.elapsed, 0.0) + self.assertEqual(wait(TIMEOUT1), False) + self.assertTimingAlmostEqual(wait.elapsed, TIMEOUT1) + + event.set() + + # See note above on the API differences + self.assertEqual(event.is_set(), True) + self.assertEqual(wait(), True) + self.assertTimingAlmostEqual(wait.elapsed, 0.0) + self.assertEqual(wait(TIMEOUT1), True) + self.assertTimingAlmostEqual(wait.elapsed, 0.0) + # self.assertEqual(event.is_set(), True) + + event.clear() + + #self.assertEqual(event.is_set(), False) + + self.Process(target=self._test_event, args=(event,)).start() + self.assertEqual(wait(), True) + + +class _TestValue(BaseTestCase): + + ALLOWED_TYPES = ('processes',) + + codes_values = [ + ('i', 4343, 24234), + ('d', 3.625, -4.25), + ('h', -232, 234), + ('c', latin('x'), latin('y')) + ] + + def _test(self, values): + for sv, cv in zip(values, self.codes_values): + sv.value = cv[2] + + @unittest.skipIf(c_int is None, "requires _ctypes") + def test_value(self, raw=False): + if raw: + values = [self.RawValue(code, value) + for code, value, _ in self.codes_values] + else: + values = [self.Value(code, value) + for code, value, _ in self.codes_values] + + for sv, cv in zip(values, self.codes_values): + self.assertEqual(sv.value, cv[1]) + + proc = self.Process(target=self._test, args=(values,)) + proc.start() + proc.join() + + for sv, cv in zip(values, self.codes_values): + self.assertEqual(sv.value, cv[2]) + + @unittest.skipIf(c_int is None, "requires _ctypes") + def test_rawvalue(self): + self.test_value(raw=True) + + @unittest.skipIf(c_int is None, "requires _ctypes") + def test_getobj_getlock(self): + val1 = self.Value('i', 5) + lock1 = val1.get_lock() # noqa + obj1 = val1.get_obj() # noqa + + val2 = self.Value('i', 5, lock=None) + lock2 = val2.get_lock() # noqa + obj2 = val2.get_obj() # noqa + + lock = self.Lock() + val3 = self.Value('i', 5, lock=lock) + lock3 = val3.get_lock() # noqa + obj3 = val3.get_obj() # noqa + self.assertEqual(lock, lock3) + + arr4 = self.Value('i', 5, lock=False) + self.assertFalse(hasattr(arr4, 'get_lock')) + self.assertFalse(hasattr(arr4, 'get_obj')) + + self.assertRaises(AttributeError, self.Value, 'i', 5, lock='navalue') + + arr5 = self.RawValue('i', 5) + self.assertFalse(hasattr(arr5, 'get_lock')) + self.assertFalse(hasattr(arr5, 'get_obj')) + + +class _TestArray(BaseTestCase): + + ALLOWED_TYPES = ('processes',) + + def f(self, seq): + for i in range(1, len(seq)): + seq[i] += seq[i - 1] + + @unittest.skipIf(c_int is None, "requires _ctypes") + def test_array(self, raw=False): + seq = [680, 626, 934, 821, 150, 233, 548, 982, 714, 831] + if raw: + arr = self.RawArray('i', seq) + else: + arr = self.Array('i', seq) + + self.assertEqual(len(arr), len(seq)) + self.assertEqual(arr[3], seq[3]) + self.assertEqual(list(arr[2:7]), list(seq[2:7])) + + arr[4:8] = seq[4:8] = array.array('i', [1, 2, 3, 4]) + + self.assertEqual(list(arr[:]), seq) + + self.f(seq) + + p = self.Process(target=self.f, args=(arr,)) + p.start() + p.join() + + self.assertEqual(list(arr[:]), seq) + + @unittest.skipIf(c_int is None, "requires _ctypes") + def test_rawarray(self): + self.test_array(raw=True) + + @unittest.skipIf(c_int is None, "requires _ctypes") + def test_getobj_getlock_obj(self): + arr1 = self.Array('i', range(10)) + lock1 = arr1.get_lock() # noqa + obj1 = arr1.get_obj() # noqa + + arr2 = self.Array('i', range(10), lock=None) + lock2 = arr2.get_lock() # noqa + obj2 = arr2.get_obj() # noqa + + lock = self.Lock() + arr3 = self.Array('i', range(10), lock=lock) + lock3 = arr3.get_lock() + obj3 = arr3.get_obj() # noqa + self.assertEqual(lock, lock3) + + arr4 = self.Array('i', range(10), lock=False) + self.assertFalse(hasattr(arr4, 'get_lock')) + self.assertFalse(hasattr(arr4, 'get_obj')) + self.assertRaises(AttributeError, + self.Array, 'i', range(10), lock='notalock') + + arr5 = self.RawArray('i', range(10)) + self.assertFalse(hasattr(arr5, 'get_lock')) + self.assertFalse(hasattr(arr5, 'get_obj')) + + +class _TestContainers(BaseTestCase): + + ALLOWED_TYPES = ('manager',) + + def test_list(self): + a = self.list(range(10)) + self.assertEqual(a[:], range(10)) + + b = self.list() + self.assertEqual(b[:], []) + + b.extend(range(5)) + self.assertEqual(b[:], range(5)) + + self.assertEqual(b[2], 2) + self.assertEqual(b[2:10], [2, 3, 4]) + + b *= 2 + self.assertEqual(b[:], [0, 1, 2, 3, 4, 0, 1, 2, 3, 4]) + + self.assertEqual(b + [5, 6], [0, 1, 2, 3, 4, 0, 1, 2, 3, 4, 5, 6]) + + self.assertEqual(a[:], range(10)) + + d = [a, b] + e = self.list(d) + self.assertEqual( + e[:], + [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 0, 1, 2, 3, 4]] + ) + + f = self.list([a]) + a.append('hello') + self.assertEqual(f[:], [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 'hello']]) + + def test_dict(self): + d = self.dict() + indices = range(65, 70) + for i in indices: + d[i] = chr(i) + self.assertEqual(d.copy(), dict((i, chr(i)) for i in indices)) + self.assertEqual(sorted(d.keys()), indices) + self.assertEqual(sorted(d.values()), [chr(j) for j in indices]) + self.assertEqual(sorted(d.items()), [(x, chr(x)) for x in indices]) + + def test_namespace(self): + n = self.Namespace() + n.name = 'Bob' + n.job = 'Builder' + n._hidden = 'hidden' + self.assertEqual((n.name, n.job), ('Bob', 'Builder')) + del n.job + self.assertEqual(str(n), "Namespace(name='Bob')") + self.assertTrue(hasattr(n, 'name')) + self.assertTrue(not hasattr(n, 'job')) + + +def sqr(x, wait=0.0): + time.sleep(wait) + return x * x + + +class _TestPool(BaseTestCase): + + def test_apply(self): + papply = self.pool.apply + self.assertEqual(papply(sqr, (5,)), sqr(5)) + self.assertEqual(papply(sqr, (), {'x': 3}), sqr(x=3)) + + def test_map(self): + pmap = self.pool.map + self.assertEqual(pmap(sqr, range(10)), map(sqr, range(10))) + self.assertEqual(pmap(sqr, range(100), chunksize=20), + map(sqr, range(100))) + + def test_map_chunksize(self): + try: + self.pool.map_async(sqr, [], chunksize=1).get(timeout=TIMEOUT1) + except billiard.TimeoutError: + self.fail("pool.map_async with chunksize stalled on null list") + + def test_async(self): + res = self.pool.apply_async(sqr, (7, TIMEOUT1,)) + get = TimingWrapper(res.get) + self.assertEqual(get(), 49) + self.assertTimingAlmostEqual(get.elapsed, TIMEOUT1) + + def test_async_timeout(self): + res = self.pool.apply_async(sqr, (6, TIMEOUT2 + 0.2)) + get = TimingWrapper(res.get) + self.assertRaises(billiard.TimeoutError, get, timeout=TIMEOUT2) + self.assertTimingAlmostEqual(get.elapsed, TIMEOUT2) + + def test_imap(self): + it = self.pool.imap(sqr, range(10)) + self.assertEqual(list(it), map(sqr, range(10))) + + it = self.pool.imap(sqr, range(10)) + for i in range(10): + self.assertEqual(it.next(), i * i) + self.assertRaises(StopIteration, it.next) + + it = self.pool.imap(sqr, range(1000), chunksize=100) + for i in range(1000): + self.assertEqual(it.next(), i * i) + self.assertRaises(StopIteration, it.next) + + def test_imap_unordered(self): + it = self.pool.imap_unordered(sqr, range(1000)) + self.assertEqual(sorted(it), map(sqr, range(1000))) + + it = self.pool.imap_unordered(sqr, range(1000), chunksize=53) + self.assertEqual(sorted(it), map(sqr, range(1000))) + + def test_make_pool(self): + p = billiard.Pool(3) + self.assertEqual(3, len(p._pool)) + p.close() + p.join() + + def test_terminate(self): + if self.TYPE == 'manager': + # On Unix a forked process increfs each shared object to + # which its parent process held a reference. If the + # forked process gets terminated then there is likely to + # be a reference leak. So to prevent + # _TestZZZNumberOfObjects from failing we skip this test + # when using a manager. + return + + self.pool.map_async( + time.sleep, [0.1 for i in range(10000)], chunksize=1 + ) + self.pool.terminate() + join = TimingWrapper(self.pool.join) + join() + self.assertTrue(join.elapsed < 0.2) + + +class _TestPoolWorkerLifetime(BaseTestCase): + ALLOWED_TYPES = ('processes', ) + + def test_pool_worker_lifetime(self): + p = billiard.Pool(3, maxtasksperchild=10) + self.assertEqual(3, len(p._pool)) + origworkerpids = [w.pid for w in p._pool] + # Run many tasks so each worker gets replaced (hopefully) + results = [] + for i in range(100): + results.append(p.apply_async(sqr, (i, ))) + # Fetch the results and verify we got the right answers, + # also ensuring all the tasks have completed. + for (j, res) in enumerate(results): + self.assertEqual(res.get(), sqr(j)) + # Refill the pool + p._repopulate_pool() + # Wait until all workers are alive + countdown = 5 + while countdown and not all(w.is_alive() for w in p._pool): + countdown -= 1 + time.sleep(DELTA) + finalworkerpids = [worker.pid for worker in p._pool] + # All pids should be assigned. See issue #7805. + self.assertNotIn(None, origworkerpids) + self.assertNotIn(None, finalworkerpids) + # Finally, check that the worker pids have changed + self.assertNotEqual(sorted(origworkerpids), sorted(finalworkerpids)) + p.close() + p.join() + + +class _TestZZZNumberOfObjects(BaseTestCase): + # Test that manager has expected number of shared objects left + + # Because test cases are sorted alphabetically, this one will get + # run after all the other tests for the manager. It tests that + # there have been no "reference leaks" for the manager's shared + # objects. Note the comment in _TestPool.test_terminate(). + ALLOWED_TYPES = ('manager',) + + def test_number_of_objects(self): + EXPECTED_NUMBER = 1 # the pool object is still alive + billiard.active_children() # discard dead process objs + gc.collect() # do garbage collection + refs = self.manager._number_of_objects() + debug_info = self.manager._debug_info() + if refs != EXPECTED_NUMBER: + print self.manager._debug_info() + print debug_info + + self.assertEqual(refs, EXPECTED_NUMBER) + +# Test of creating a customized manager class +from billiard.managers import BaseManager, BaseProxy, RemoteError + + +class FooBar(object): + + def f(self): + return 'f()' + + def g(self): + raise ValueError + + def _h(self): + return '_h()' + + +def baz(): + for i in xrange(10): + yield i * i + + +class IteratorProxy(BaseProxy): + _exposed_ = ('next', '__next__') + + def __iter__(self): + return self + + def next(self): + return self._callmethod('next') + + def __next__(self): + return self._callmethod('__next__') + + +class MyManager(BaseManager): + pass + +MyManager.register('Foo', callable=FooBar) +MyManager.register('Bar', callable=FooBar, exposed=('f', '_h')) +MyManager.register('baz', callable=baz, proxytype=IteratorProxy) + + +class _TestMyManager(BaseTestCase): + + ALLOWED_TYPES = ('manager',) + + def test_mymanager(self): + manager = MyManager() + manager.start() + + foo = manager.Foo() + bar = manager.Bar() + baz = manager.baz() + + foo_methods = [name for name in ('f', 'g', '_h') if hasattr(foo, name)] + bar_methods = [name for name in ('f', 'g', '_h') if hasattr(bar, name)] + + self.assertEqual(foo_methods, ['f', 'g']) + self.assertEqual(bar_methods, ['f', '_h']) + + self.assertEqual(foo.f(), 'f()') + self.assertRaises(ValueError, foo.g) + self.assertEqual(foo._callmethod('f'), 'f()') + self.assertRaises(RemoteError, foo._callmethod, '_h') + + self.assertEqual(bar.f(), 'f()') + self.assertEqual(bar._h(), '_h()') + self.assertEqual(bar._callmethod('f'), 'f()') + self.assertEqual(bar._callmethod('_h'), '_h()') + + self.assertEqual(list(baz), [i * i for i in range(10)]) + + manager.shutdown() + +_queue = Queue.Queue() + + +# Test of connecting to a remote server and using xmlrpclib for serialization +def get_queue(): + return _queue + + +class QueueManager(BaseManager): + '''manager class used by server process''' +QueueManager.register('get_queue', callable=get_queue) + + +class QueueManager2(BaseManager): + '''manager class which specifies the same interface as QueueManager''' +QueueManager2.register('get_queue') + + +SERIALIZER = 'xmlrpclib' + + +class _TestRemoteManager(BaseTestCase): + + ALLOWED_TYPES = ('manager',) + + def _putter(self, address, authkey): + manager = QueueManager2( + address=address, authkey=authkey, serializer=SERIALIZER + ) + manager.connect() + queue = manager.get_queue() + queue.put(('hello world', None, True, 2.25)) + + def test_remote(self): + authkey = os.urandom(32) + + manager = QueueManager( + address=('localhost', 0), authkey=authkey, serializer=SERIALIZER + ) + manager.start() + + p = self.Process(target=self._putter, args=(manager.address, authkey)) + p.start() + + manager2 = QueueManager2( + address=manager.address, authkey=authkey, serializer=SERIALIZER + ) + manager2.connect() + queue = manager2.get_queue() + + # Note that xmlrpclib will deserialize object as a list not a tuple + self.assertEqual(queue.get(), ['hello world', None, True, 2.25]) + + # Because we are using xmlrpclib for serialization instead of + # pickle this will cause a serialization error. + self.assertRaises(Exception, queue.put, time.sleep) + + # Make queue finalizer run before the server is stopped + del queue + manager.shutdown() + + +class _TestManagerRestart(BaseTestCase): + + def _putter(self, address, authkey): + manager = QueueManager( + address=address, authkey=authkey, serializer=SERIALIZER) + manager.connect() + queue = manager.get_queue() + queue.put('hello world') + + def test_rapid_restart(self): + authkey = os.urandom(32) + manager = QueueManager( + address=('localhost', 0), authkey=authkey, serializer=SERIALIZER) + addr = manager.get_server().address + manager.start() + + p = self.Process(target=self._putter, args=(manager.address, authkey)) + p.start() + queue = manager.get_queue() + self.assertEqual(queue.get(), 'hello world') + del queue + manager.shutdown() + manager = QueueManager( + address=addr, authkey=authkey, serializer=SERIALIZER) + manager.start() + manager.shutdown() + +SENTINEL = latin('') + + +class _TestConnection(BaseTestCase): + + ALLOWED_TYPES = ('processes', 'threads') + + def _echo(self, conn): + for msg in iter(conn.recv_bytes, SENTINEL): + conn.send_bytes(msg) + conn.close() + + def test_connection(self): + conn, child_conn = self.Pipe() + + p = self.Process(target=self._echo, args=(child_conn,)) + p.daemon = True + p.start() + + seq = [1, 2.25, None] + msg = latin('hello world') + longmsg = msg * 10 + arr = array.array('i', range(4)) + + if self.TYPE == 'processes': + self.assertEqual(type(conn.fileno()), int) + + self.assertEqual(conn.send(seq), None) + self.assertEqual(conn.recv(), seq) + + self.assertEqual(conn.send_bytes(msg), None) + self.assertEqual(conn.recv_bytes(), msg) + + if self.TYPE == 'processes': + buffer = array.array('i', [0] * 10) + expected = list(arr) + [0] * (10 - len(arr)) + self.assertEqual(conn.send_bytes(arr), None) + self.assertEqual(conn.recv_bytes_into(buffer), + len(arr) * buffer.itemsize) + self.assertEqual(list(buffer), expected) + + buffer = array.array('i', [0] * 10) + expected = [0] * 3 + list(arr) + [0] * (10 - 3 - len(arr)) + self.assertEqual(conn.send_bytes(arr), None) + self.assertEqual(conn.recv_bytes_into(buffer, 3 * buffer.itemsize), + len(arr) * buffer.itemsize) + self.assertEqual(list(buffer), expected) + + buffer = bytearray(latin(' ' * 40)) + self.assertEqual(conn.send_bytes(longmsg), None) + try: + res = conn.recv_bytes_into(buffer) + except billiard.BufferTooShort, e: + self.assertEqual(e.args, (longmsg,)) + else: + self.fail('expected BufferTooShort, got %s' % res) + + poll = TimingWrapper(conn.poll) + + self.assertEqual(poll(), False) + self.assertTimingAlmostEqual(poll.elapsed, 0) + + self.assertEqual(poll(TIMEOUT1), False) + self.assertTimingAlmostEqual(poll.elapsed, TIMEOUT1) + + conn.send(None) + + self.assertEqual(poll(TIMEOUT1), True) + self.assertTimingAlmostEqual(poll.elapsed, 0) + + self.assertEqual(conn.recv(), None) + + really_big_msg = latin('X') * (1024 * 1024 * 16) # 16Mb + conn.send_bytes(really_big_msg) + self.assertEqual(conn.recv_bytes(), really_big_msg) + + conn.send_bytes(SENTINEL) # tell child to quit + child_conn.close() + + if self.TYPE == 'processes': + self.assertEqual(conn.readable, True) + self.assertEqual(conn.writable, True) + self.assertRaises(EOFError, conn.recv) + self.assertRaises(EOFError, conn.recv_bytes) + + p.join() + + def test_duplex_false(self): + reader, writer = self.Pipe(duplex=False) + self.assertEqual(writer.send(1), None) + self.assertEqual(reader.recv(), 1) + if self.TYPE == 'processes': + self.assertEqual(reader.readable, True) + self.assertEqual(reader.writable, False) + self.assertEqual(writer.readable, False) + self.assertEqual(writer.writable, True) + self.assertRaises(IOError, reader.send, 2) + self.assertRaises(IOError, writer.recv) + self.assertRaises(IOError, writer.poll) + + def test_spawn_close(self): + # We test that a pipe connection can be closed by parent + # process immediately after child is spawned. On Windows this + # would have sometimes failed on old versions because + # child_conn would be closed before the child got a chance to + # duplicate it. + conn, child_conn = self.Pipe() + + p = self.Process(target=self._echo, args=(child_conn,)) + p.start() + child_conn.close() # this might complete before child initializes + + msg = latin('hello') + conn.send_bytes(msg) + self.assertEqual(conn.recv_bytes(), msg) + + conn.send_bytes(SENTINEL) + conn.close() + p.join() + + def test_sendbytes(self): + if self.TYPE != 'processes': + return + + msg = latin('abcdefghijklmnopqrstuvwxyz') + a, b = self.Pipe() + + a.send_bytes(msg) + self.assertEqual(b.recv_bytes(), msg) + + a.send_bytes(msg, 5) + self.assertEqual(b.recv_bytes(), msg[5:]) + + a.send_bytes(msg, 7, 8) + self.assertEqual(b.recv_bytes(), msg[7:7 + 8]) + + a.send_bytes(msg, 26) + self.assertEqual(b.recv_bytes(), latin('')) + + a.send_bytes(msg, 26, 0) + self.assertEqual(b.recv_bytes(), latin('')) + + self.assertRaises(ValueError, a.send_bytes, msg, 27) + self.assertRaises(ValueError, a.send_bytes, msg, 22, 5) + self.assertRaises(ValueError, a.send_bytes, msg, 26, 1) + self.assertRaises(ValueError, a.send_bytes, msg, -1) + self.assertRaises(ValueError, a.send_bytes, msg, 4, -1) + + +class _TestListenerClient(BaseTestCase): + + ALLOWED_TYPES = ('processes', 'threads') + + def _test(self, address): + conn = self.connection.Client(address) + conn.send('hello') + conn.close() + + def test_listener_client(self): + for family in self.connection.families: + l = self.connection.Listener(family=family) + p = self.Process(target=self._test, args=(l.address,)) + p.daemon = True + p.start() + conn = l.accept() + self.assertEqual(conn.recv(), 'hello') + p.join() + l.close() +''' +class _TestPicklingConnections(BaseTestCase): + """Test of sending connection and socket objects between processes""" + + ALLOWED_TYPES = ('processes',) + + def _listener(self, conn, families): + for fam in families: + l = self.connection.Listener(family=fam) + conn.send(l.address) + new_conn = l.accept() + conn.send(new_conn) + + if self.TYPE == 'processes': + l = socket.socket() + l.bind(('localhost', 0)) + conn.send(l.getsockname()) + l.listen(1) + new_conn, addr = l.accept() + conn.send(new_conn) + + conn.recv() + + def _remote(self, conn): + for (address, msg) in iter(conn.recv, None): + client = self.connection.Client(address) + client.send(msg.upper()) + client.close() + + if self.TYPE == 'processes': + address, msg = conn.recv() + client = socket.socket() + client.connect(address) + client.sendall(msg.upper()) + client.close() + + conn.close() + + def test_pickling(self): + try: + billiard.allow_connection_pickling() + except ImportError: + return + + families = self.connection.families + + lconn, lconn0 = self.Pipe() + lp = self.Process(target=self._listener, args=(lconn0, families)) + lp.start() + lconn0.close() + + rconn, rconn0 = self.Pipe() + rp = self.Process(target=self._remote, args=(rconn0,)) + rp.start() + rconn0.close() + + for fam in families: + msg = ('This connection uses family %s' % fam).encode('ascii') + address = lconn.recv() + rconn.send((address, msg)) + new_conn = lconn.recv() + self.assertEqual(new_conn.recv(), msg.upper()) + + rconn.send(None) + + if self.TYPE == 'processes': + msg = latin('This connection uses a normal socket') + address = lconn.recv() + rconn.send((address, msg)) + if hasattr(socket, 'fromfd'): + new_conn = lconn.recv() + self.assertEqual(new_conn.recv(100), msg.upper()) + else: + # XXX On Windows with Py2.6 need to backport fromfd() + discard = lconn.recv_bytes() + + lconn.send(None) + + rconn.close() + lconn.close() + + lp.join() + rp.join() + +''' + + +class _TestHeap(BaseTestCase): + + ALLOWED_TYPES = ('processes',) + + def test_heap(self): + iterations = 5000 + maxblocks = 50 + blocks = [] + + # create and destroy lots of blocks of different sizes + for i in xrange(iterations): + size = int(random.lognormvariate(0, 1) * 1000) + b = billiard.heap.BufferWrapper(size) + blocks.append(b) + if len(blocks) > maxblocks: + i = random.randrange(maxblocks) + del blocks[i] + + # get the heap object + heap = billiard.heap.BufferWrapper._heap + + # verify the state of the heap + all = [] + occupied = 0 + for L in heap._len_to_seq.values(): + for arena, start, stop in L: + all.append((heap._arenas.index(arena), start, stop, + stop - start, 'free')) + for arena, start, stop in heap._allocated_blocks: + all.append((heap._arenas.index(arena), start, stop, + stop - start, 'occupied')) + occupied += stop - start + + all.sort() + + for i in range(len(all) - 1): + (arena, start, stop) = all[i][:3] + (narena, nstart, nstop) = all[i + 1][:3] + self.assertTrue((arena != narena and nstart == 0) or + (stop == nstart)) + + +class _Foo(Structure): + _fields_ = [ + ('x', c_int), + ('y', c_double) + ] + + +class _TestSharedCTypes(BaseTestCase): + + ALLOWED_TYPES = ('processes', ) + + def _double(self, x, y, foo, arr, string): + x.value *= 2 + y.value *= 2 + foo.x *= 2 + foo.y *= 2 + string.value *= 2 + for i in range(len(arr)): + arr[i] *= 2 + + @unittest.skipIf(Value is None, "requires ctypes.Value") + def test_sharedctypes(self, lock=False): + x = Value('i', 7, lock=lock) + y = Value(c_double, 1.0 / 3.0, lock=lock) + foo = Value(_Foo, 3, 2, lock=lock) + arr = self.Array('d', range(10), lock=lock) + string = self.Array('c', 20, lock=lock) + string.value = 'hello' + + p = self.Process(target=self._double, args=(x, y, foo, arr, string)) + p.start() + p.join() + + self.assertEqual(x.value, 14) + self.assertAlmostEqual(y.value, 2.0 / 3.0) + self.assertEqual(foo.x, 6) + self.assertAlmostEqual(foo.y, 4.0) + for i in range(10): + self.assertAlmostEqual(arr[i], i * 2) + self.assertEqual(string.value, latin('hellohello')) + + @unittest.skipIf(Value is None, "requires ctypes.Value") + def test_synchronize(self): + self.test_sharedctypes(lock=True) + + @unittest.skipIf(ctypes_copy is None, "requires ctypes.copy") + def test_copy(self): + foo = _Foo(2, 5.0) + bar = ctypes_copy(foo) + foo.x = 0 + foo.y = 0 + self.assertEqual(bar.x, 2) + self.assertAlmostEqual(bar.y, 5.0) + + +class _TestFinalize(BaseTestCase): + + ALLOWED_TYPES = ('processes',) + + def _test_finalize(self, conn): + class Foo(object): + pass + + a = Foo() + util.Finalize(a, conn.send, args=('a',)) + del a # triggers callback for a + + b = Foo() + close_b = util.Finalize(b, conn.send, args=('b',)) + close_b() # triggers callback for b + close_b() # does nothing because callback has already been called + del b # does nothing because callback has already been called + + c = Foo() + util.Finalize(c, conn.send, args=('c',)) + + d10 = Foo() + util.Finalize(d10, conn.send, args=('d10',), exitpriority=1) + + d01 = Foo() + util.Finalize(d01, conn.send, args=('d01',), exitpriority=0) + d02 = Foo() + util.Finalize(d02, conn.send, args=('d02',), exitpriority=0) + d03 = Foo() + util.Finalize(d03, conn.send, args=('d03',), exitpriority=0) + + util.Finalize(None, conn.send, args=('e',), exitpriority=-10) + + util.Finalize(None, conn.send, args=('STOP',), exitpriority=-100) + + # call mutliprocessing's cleanup function then exit process without + # garbage collecting locals + util._exit_function() + conn.close() + os._exit(0) + + def test_finalize(self): + conn, child_conn = self.Pipe() + + p = self.Process(target=self._test_finalize, args=(child_conn,)) + p.start() + p.join() + + result = [obj for obj in iter(conn.recv, 'STOP')] + self.assertEqual(result, ['a', 'b', 'd10', 'd03', 'd02', 'd01', 'e']) + + +class _TestImportStar(BaseTestCase): + """Test that from ... import * works for each module""" + ALLOWED_TYPES = ('processes',) + + def test_import(self): + modules = [ + 'billiard', 'billiard.connection', + 'billiard.heap', 'billiard.managers', + 'billiard.pool', 'billiard.process', + 'billiard.reduction', + 'billiard.synchronize', 'billiard.util' + ] + + if c_int is not None: + # This module requires _ctypes + modules.append('billiard.sharedctypes') + + for name in modules: + __import__(name) + mod = sys.modules[name] + + for attr in getattr(mod, '__all__', ()): + self.assertTrue( + hasattr(mod, attr), + '%r does not have attribute %r' % (mod, attr) + ) + + +class _TestLogging(BaseTestCase): + """Quick test that logging works -- does not test logging output""" + ALLOWED_TYPES = ('processes',) + + def test_enable_logging(self): + logger = billiard.get_logger() + logger.setLevel(util.SUBWARNING) + self.assertTrue(logger is not None) + logger.debug('this will not be printed') + logger.info('nor will this') + logger.setLevel(LOG_LEVEL) + + def _test_level(self, conn): + logger = billiard.get_logger() + conn.send(logger.getEffectiveLevel()) + + def test_level(self): + LEVEL1 = 32 + LEVEL2 = 37 + + logger = billiard.get_logger() + root_logger = logging.getLogger() + root_level = root_logger.level + + reader, writer = billiard.Pipe(duplex=False) + + logger.setLevel(LEVEL1) + self.Process(target=self._test_level, args=(writer,)).start() + self.assertEqual(LEVEL1, reader.recv()) + + logger.setLevel(logging.NOTSET) + root_logger.setLevel(LEVEL2) + self.Process(target=self._test_level, args=(writer,)).start() + self.assertEqual(LEVEL2, reader.recv()) + + root_logger.setLevel(root_level) + logger.setLevel(level=LOG_LEVEL) + + +# class _TestLoggingProcessName(BaseTestCase): +# +# def handle(self, record): +# assert record.processName == billiard.current_process().name +# self.__handled = True +# +# def test_logging(self): +# handler = logging.Handler() +# handler.handle = self.handle +# self.__handled = False +# # Bypass getLogger() and side-effects +# logger = logging.getLoggerClass()( +# 'billiard.test.TestLoggingProcessName') +# logger.addHandler(handler) +# logger.propagate = False +# +# logger.warn('foo') +# assert self.__handled + +# +# Test to verify handle verification, see issue 3321 +# + + +class TestInvalidHandle(unittest.TestCase): + + @unittest.skipIf(WIN32, "skipped on Windows") + def test_invalid_handles(self): + conn = _billiard.Connection(44977608) + self.assertRaises(IOError, conn.poll) + self.assertRaises(IOError, _billiard.Connection, -1) + + +def get_attributes(Source, names): + d = {} + for name in names: + obj = getattr(Source, name) + if type(obj) == type(get_attributes): + obj = staticmethod(obj) + d[name] = obj + return d + + +def create_test_cases(Mixin, type): + result = {} + glob = globals() + Type = type.capitalize() + + for name in glob.keys(): + if name.startswith('_Test'): + base = glob[name] + if type in base.ALLOWED_TYPES: + newname = 'With' + Type + name[1:] + + class Temp(base, unittest.TestCase, Mixin): + pass + + result[newname] = Temp + Temp.__name__ = newname + Temp.__module__ = Mixin.__module__ + return result + + +class ProcessesMixin(object): + TYPE = 'processes' + Process = billiard.Process + locals().update(get_attributes(billiard, ( + 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', + 'Condition', 'Event', 'Value', 'Array', 'RawValue', + 'RawArray', 'current_process', 'active_children', 'Pipe', + 'connection', 'JoinableQueue' + ))) + +testcases_processes = create_test_cases(ProcessesMixin, type='processes') +globals().update(testcases_processes) + + +class ManagerMixin(object): + TYPE = 'manager' + Process = billiard.Process + manager = object.__new__(billiard.managers.SyncManager) + locals().update(get_attributes(manager, ( + 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', + 'Condition', 'Event', 'Value', 'Array', 'list', 'dict', + 'Namespace', 'JoinableQueue' + ))) + +testcases_manager = create_test_cases(ManagerMixin, type='manager') +globals().update(testcases_manager) + + +class ThreadsMixin(object): + TYPE = 'threads' + Process = billiard.dummy.Process + locals().update(get_attributes(billiard.dummy, ( + 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', + 'Condition', 'Event', 'Value', 'Array', 'current_process', + 'active_children', 'Pipe', 'connection', 'dict', 'list', + 'Namespace', 'JoinableQueue' + ))) + +testcases_threads = create_test_cases(ThreadsMixin, type='threads') +globals().update(testcases_threads) + + +class OtherTest(unittest.TestCase): + # TODO: add more tests for deliver/answer challenge. + def test_deliver_challenge_auth_failure(self): + + class _FakeConnection(object): + + def recv_bytes(self, size): + return bytes('something bogus') + + def send_bytes(self, data): + pass + self.assertRaises(billiard.AuthenticationError, + billiard.connection.deliver_challenge, + _FakeConnection(), bytes('abc')) + + def test_answer_challenge_auth_failure(self): + + class _FakeConnection(object): + + def __init__(self): + self.count = 0 + + def recv_bytes(self, size): + self.count += 1 + if self.count == 1: + return billiard.connection.CHALLENGE + elif self.count == 2: + return bytes('something bogus') + return bytes('') + + def send_bytes(self, data): + pass + self.assertRaises(billiard.AuthenticationError, + billiard.connection.answer_challenge, + _FakeConnection(), bytes('abc')) + + +def initializer(ns): + ns.test += 1 + + +class TestInitializers(unittest.TestCase): + """Test Manager.start()/Pool.__init__() initializer feature + + - see issue 5585 + + """ + def setUp(self): + self.mgr = billiard.Manager() + self.ns = self.mgr.Namespace() + self.ns.test = 0 + + def tearDown(self): + self.mgr.shutdown() + + def test_manager_initializer(self): + m = billiard.managers.SyncManager() + self.assertRaises(TypeError, m.start, 1) + m.start(initializer, (self.ns,)) + self.assertEqual(self.ns.test, 1) + m.shutdown() + + def test_pool_initializer(self): + self.assertRaises(TypeError, billiard.Pool, initializer=1) + p = billiard.Pool(1, initializer, (self.ns,)) + p.close() + p.join() + self.assertEqual(self.ns.test, 1) + + +def _ThisSubProcess(q): + try: + q.get(block=False) + except Queue.Empty: + pass + + +def _TestProcess(q): + """Issue 5155, 5313, 5331: Test process in processes + + Verifies os.close(sys.stdin.fileno) vs. sys.stdin.close() behavior + + """ + queue = billiard.Queue() + subProc = billiard.Process(target=_ThisSubProcess, args=(queue,)) + subProc.start() + subProc.join() + + +def _afunc(x): + return x * x + + +def pool_in_process(): + pool = billiard.Pool(processes=4) + pool.map(_afunc, [1, 2, 3, 4, 5, 6, 7]) + + +class _file_like(object): + def __init__(self, delegate): + self._delegate = delegate + self._pid = None + + @property + def cache(self): + pid = os.getpid() + # There are no race conditions since fork keeps only the running thread + if pid != self._pid: + self._pid = pid + self._cache = [] + return self._cache + + def write(self, data): + self.cache.append(data) + + def flush(self): + self._delegate.write(''.join(self.cache)) + self._cache = [] + + +class TestStdinBadfiledescriptor(unittest.TestCase): + + def test_queue_in_process(self): + queue = billiard.Queue() + proc = billiard.Process(target=_TestProcess, args=(queue,)) + proc.start() + proc.join() + + def test_pool_in_process(self): + p = billiard.Process(target=pool_in_process) + p.start() + p.join() + + def test_flushing(self): + sio = StringIO() + flike = _file_like(sio) + flike.write('foo') + proc = billiard.Process(target=lambda: flike.flush()) + self.assertTrue(proc) + flike.flush() + assert sio.getvalue() == 'foo' + +testcases_other = [OtherTest, TestInvalidHandle, TestInitializers, + TestStdinBadfiledescriptor] + + +def test_main(run=None): + if sys.platform.startswith("linux"): + try: + billiard.RLock() + except OSError: + raise SkipTest("OSError raises on RLock creation, see issue 3111!") + + if run is None: + from test.test_support import run_unittest as run + + util.get_temp_dir() # creates temp directory for use by all processes + + billiard.get_logger().setLevel(LOG_LEVEL) + + ProcessesMixin.pool = billiard.Pool(4) + ThreadsMixin.pool = billiard.dummy.Pool(4) + ManagerMixin.manager.__init__() + ManagerMixin.manager.start() + ManagerMixin.pool = ManagerMixin.manager.Pool(4) + + testcases = ( + sorted(testcases_processes.values(), key=lambda tc: tc.__name__) + + sorted(testcases_threads.values(), key=lambda tc: tc.__name__) + + sorted(testcases_manager.values(), key=lambda tc: tc.__name__) + + testcases_other + ) + + loadTestsFromTestCase = unittest.defaultTestLoader.loadTestsFromTestCase + suite = unittest.TestSuite(loadTestsFromTestCase(tc) for tc in testcases) + # (ncoghlan): Whether or not sys.exc_clear is executed by the threading + # module during these tests is at least platform dependent and possibly + # non-deterministic on any given platform. So we don't mind if the listed + # warnings aren't actually raised. + with test_support.check_py3k_warnings( + (".+__(get|set)slice__ has been removed", DeprecationWarning), + (r"sys.exc_clear\(\) not supported", DeprecationWarning), + quiet=True): + run(suite) + + ThreadsMixin.pool.terminate() + ProcessesMixin.pool.terminate() + ManagerMixin.pool.terminate() + ManagerMixin.manager.shutdown() + + del ProcessesMixin.pool, ThreadsMixin.pool, ManagerMixin.pool + + +def main(): + test_main(unittest.TextTestRunner(verbosity=2).run) + +if __name__ == '__main__': + main() diff --git a/awx/lib/site-packages/importlib/__init__.py b/awx/lib/site-packages/importlib/__init__.py new file mode 100644 index 0000000000..ad31a1ac47 --- /dev/null +++ b/awx/lib/site-packages/importlib/__init__.py @@ -0,0 +1,38 @@ +"""Backport of importlib.import_module from 3.x.""" +# While not critical (and in no way guaranteed!), it would be nice to keep this +# code compatible with Python 2.3. +import sys + +def _resolve_name(name, package, level): + """Return the absolute name of the module to be imported.""" + if not hasattr(package, 'rindex'): + raise ValueError("'package' not set to a string") + dot = len(package) + for x in xrange(level, 1, -1): + try: + dot = package.rindex('.', 0, dot) + except ValueError: + raise ValueError("attempted relative import beyond top-level " + "package") + return "%s.%s" % (package[:dot], name) + + +def import_module(name, package=None): + """Import a module. + + The 'package' argument is required when performing a relative import. It + specifies the package to use as the anchor point from which to resolve the + relative import to an absolute import. + + """ + if name.startswith('.'): + if not package: + raise TypeError("relative imports require the 'package' argument") + level = 0 + for character in name: + if character != '.': + break + level += 1 + name = _resolve_name(name[level:], package, level) + __import__(name) + return sys.modules[name] diff --git a/awx/lib/site-packages/jsonfield/VERSION b/awx/lib/site-packages/jsonfield/VERSION new file mode 100644 index 0000000000..56f3151140 --- /dev/null +++ b/awx/lib/site-packages/jsonfield/VERSION @@ -0,0 +1 @@ +0.9.10 diff --git a/awx/lib/site-packages/jsonfield/__init__.py b/awx/lib/site-packages/jsonfield/__init__.py new file mode 100644 index 0000000000..0bf34dcac5 --- /dev/null +++ b/awx/lib/site-packages/jsonfield/__init__.py @@ -0,0 +1,7 @@ +import os +__version__ = open(os.path.join(os.path.dirname(__file__),'VERSION')).read().strip() + +try: + from .fields import JSONField +except ImportError: + pass \ No newline at end of file diff --git a/awx/lib/site-packages/jsonfield/fields.py b/awx/lib/site-packages/jsonfield/fields.py new file mode 100644 index 0000000000..c1915cd989 --- /dev/null +++ b/awx/lib/site-packages/jsonfield/fields.py @@ -0,0 +1,166 @@ +from __future__ import unicode_literals + +from django.core.exceptions import ValidationError +from django.conf import settings +from django.db import models, DatabaseError, transaction +from django.utils import simplejson as json +from django.utils.translation import ugettext_lazy as _ +from django.utils import six + +from decimal import Decimal +import datetime + +from .utils import default +from .widgets import JSONWidget +from .forms import JSONFormField + +class JSONField(six.with_metaclass(models.SubfieldBase, models.Field)): + """ + A field that will ensure the data entered into it is valid JSON. + """ + default_error_messages = { + 'invalid': _("'%s' is not a valid JSON string.") + } + description = "JSON object" + + def __init__(self, *args, **kwargs): + if not kwargs.get('null', False): + kwargs['default'] = kwargs.get('default', dict) + self.encoder_kwargs = { + 'indent': kwargs.get('indent', getattr(settings, 'JSONFIELD_INDENT', None)) + } + super(JSONField, self).__init__(*args, **kwargs) + self.validate(self.get_default(), None) + + def formfield(self, **kwargs): + defaults = { + 'form_class': JSONFormField, + 'widget': JSONWidget + } + defaults.update(**kwargs) + return super(JSONField, self).formfield(**defaults) + + def validate(self, value, model_instance): + if not self.null and value is None: + raise ValidationError(self.error_messages['null']) + try: + self.get_prep_value(value) + except: + raise ValidationError(self.error_messages['invalid'] % value) + + def get_default(self): + if self.has_default(): + default = self.default + if callable(default): + default = default() + if isinstance(default, six.string_types): + return json.loads(default) + return json.loads(json.dumps(default)) + return super(JSONField, self).get_default() + + def get_internal_type(self): + return 'TextField' + + def db_type(self, connection): + # Test to see if we support JSON querying. + # (Protip: nothing does, at this stage). + cursor = connection.cursor() + try: + sid = transaction.savepoint() + cursor.execute('SELECT \'{}\'::json = \'{}\'::json;') + except DatabaseError: + transaction.savepoint_rollback(sid) + return 'text' + else: + return 'json' + + def to_python(self, value): + if isinstance(value, six.string_types): + if value == "": + if self.null: + return None + if self.blank: + return "" + try: + value = json.loads(value) + except ValueError: + msg = self.error_messages['invalid'] % value + raise ValidationError(msg) + # TODO: Look for date/time/datetime objects within the structure? + return value + + def get_db_prep_value(self, value, connection=None, prepared=None): + return self.get_prep_value(value) + + def get_prep_value(self, value): + if value is None: + if not self.null and self.blank: + return "" + return None + return json.dumps(value, default=default, **self.encoder_kwargs) + + def get_prep_lookup(self, lookup_type, value): + if lookup_type in ["exact", "iexact"]: + return self.to_python(self.get_prep_value(value)) + if lookup_type == "in": + return [self.to_python(self.get_prep_value(v)) for v in value] + if lookup_type == "isnull": + return value + if lookup_type in ["contains", "icontains"]: + if isinstance(value, (list, tuple)): + raise TypeError("Lookup type %r not supported with argument of %s" % ( + lookup_type, type(value).__name__ + )) + # Need a way co combine the values with '%', but don't escape that. + return self.get_prep_value(value)[1:-1].replace(', ', r'%') + if isinstance(value, dict): + return self.get_prep_value(value)[1:-1] + return self.to_python(self.get_prep_value(value)) + raise TypeError('Lookup type %r not supported' % lookup_type) + + def value_to_string(self, obj): + return self._get_val_from_obj(obj) + +class TypedJSONField(JSONField): + """ + + """ + def __init__(self, *args, **kwargs): + self.json_required_fields = kwargs.pop('required_fields', {}) + self.json_validators = kwargs.pop('validators', []) + + super(TypedJSONField, self).__init__(*args, **kwargs) + + def cast_required_fields(self, obj): + if not obj: + return + for field_name, field_type in self.json_required_fields.items(): + obj[field_name] = field_type.to_python(obj[field_name]) + + def to_python(self, value): + value = super(TypedJSONField, self).to_python(value) + + if isinstance(value, list): + for item in value: + self.cast_required_fields(item) + else: + self.cast_required_fields(value) + + return value + + def validate(self, value, model_instance): + super(TypedJSONField, self).validate(value, model_instance) + + for v in self.json_validators: + if isinstance(value, list): + for item in value: + v(item) + else: + v(value) + +try: + from south.modelsinspector import add_introspection_rules + add_introspection_rules([], ['^jsonfield\.fields\.JSONField']) + add_introspection_rules([], ['^jsonfield\.fields\.TypedJSONField']) +except ImportError: + pass \ No newline at end of file diff --git a/awx/lib/site-packages/jsonfield/forms.py b/awx/lib/site-packages/jsonfield/forms.py new file mode 100644 index 0000000000..de6347f22b --- /dev/null +++ b/awx/lib/site-packages/jsonfield/forms.py @@ -0,0 +1,35 @@ +from django import forms +from django.utils import simplejson as json +from django.utils import six + +from .widgets import JSONWidget + + +class JSONFormField(forms.CharField): + def __init__(self, *args, **kwargs): + if 'widget' not in kwargs: + kwargs['widget'] = JSONWidget + super(JSONFormField, self).__init__(*args, **kwargs) + + def clean(self, value): + """ + The default is to have a TextField, and we will decode the string + that comes back from this. However, another use of this field is + to store a list of values, and use these in a MultipleSelect + widget. So, if we have an object that isn't a string, then for now + we will assume that is where it has come from. + """ + value = super(JSONFormField, self).clean(value) + + if not value: + return value + + if isinstance(value, six.string_types): + try: + return json.loads(value) + except Exception as exc: + raise forms.ValidationError( + 'JSON decode error: %s' % (unicode(exc),) + ) + else: + return value diff --git a/awx/lib/site-packages/jsonfield/models.py b/awx/lib/site-packages/jsonfield/models.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/jsonfield/templatetags/__init__.py b/awx/lib/site-packages/jsonfield/templatetags/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/jsonfield/templatetags/jsonify.py b/awx/lib/site-packages/jsonfield/templatetags/jsonify.py new file mode 100644 index 0000000000..cc99da8710 --- /dev/null +++ b/awx/lib/site-packages/jsonfield/templatetags/jsonify.py @@ -0,0 +1,12 @@ +from django import template +from django.utils import simplejson as json +from django.utils.safestring import mark_safe +from jsonfield.utils import TZAwareJSONEncoder + +register = template.Library() + +@register.filter +def jsonify(value): + if getattr(value, 'all', False): + value = list(value) + return mark_safe(json.dumps(value, cls=TZAwareJSONEncoder)) diff --git a/awx/lib/site-packages/jsonfield/tests/__init__.py b/awx/lib/site-packages/jsonfield/tests/__init__.py new file mode 100644 index 0000000000..d017229a9e --- /dev/null +++ b/awx/lib/site-packages/jsonfield/tests/__init__.py @@ -0,0 +1,2 @@ +from .test_fields import * +from .test_forms import * \ No newline at end of file diff --git a/awx/lib/site-packages/jsonfield/tests/jsonfield_test_app/__init__.py b/awx/lib/site-packages/jsonfield/tests/jsonfield_test_app/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/jsonfield/tests/jsonfield_test_app/forms.py b/awx/lib/site-packages/jsonfield/tests/jsonfield_test_app/forms.py new file mode 100644 index 0000000000..ec546d6965 --- /dev/null +++ b/awx/lib/site-packages/jsonfield/tests/jsonfield_test_app/forms.py @@ -0,0 +1,12 @@ +from django import forms + +from jsonfield.forms import JSONFormField +from .models import JSONFieldTestModel + +class JSONTestForm(forms.Form): + json_data = JSONFormField() + optional_json_data = JSONFormField(required=False) + +class JSONTestModelForm(forms.ModelForm): + class Meta: + model = JSONFieldTestModel diff --git a/awx/lib/site-packages/jsonfield/tests/jsonfield_test_app/models.py b/awx/lib/site-packages/jsonfield/tests/jsonfield_test_app/models.py new file mode 100644 index 0000000000..f4545e1dff --- /dev/null +++ b/awx/lib/site-packages/jsonfield/tests/jsonfield_test_app/models.py @@ -0,0 +1,25 @@ +from django.db import models +from jsonfield.fields import JSONField + +class JSONFieldTestModel(models.Model): + json = JSONField("test", null=True, blank=True) + class Meta: + app_label = 'jsonfield' + +class JSONFieldWithDefaultTestModel(models.Model): + json = JSONField(default={"sukasuka": "YAAAAAZ"}) + class Meta: + app_label = 'jsonfield' + + +class BlankJSONFieldTestModel(models.Model): + null_json = JSONField(null=True) + blank_json = JSONField(blank=True) + class Meta: + app_label = 'jsonfield' + +class CallableDefaultModel(models.Model): + json = JSONField(default=lambda:{'x':2}) + + class Meta: + app_label = 'jsonfield' \ No newline at end of file diff --git a/awx/lib/site-packages/jsonfield/tests/test_fields.py b/awx/lib/site-packages/jsonfield/tests/test_fields.py new file mode 100644 index 0000000000..8d80c4ac00 --- /dev/null +++ b/awx/lib/site-packages/jsonfield/tests/test_fields.py @@ -0,0 +1,145 @@ +#:coding=utf-8: +from django.test import TestCase as DjangoTestCase +from django.utils import unittest +from django.utils.encoding import force_text +from django import forms + +from jsonfield.tests.jsonfield_test_app.models import * +from jsonfield.fields import JSONField + +class JSONFieldTest(DjangoTestCase): + def test_json_field(self): + obj = JSONFieldTestModel(json='''{ + "spam": "eggs" + }''') + self.assertEquals(obj.json, {'spam':'eggs'}) + + def test_json_field_empty(self): + obj = JSONFieldTestModel(json='') + self.assertEquals(obj.json, None) + + def test_json_field_save(self): + JSONFieldTestModel.objects.create( + id=10, + json='''{ + "spam": "eggs" + }''', + ) + obj2 = JSONFieldTestModel.objects.get(id=10) + self.assertEquals(obj2.json, {'spam':'eggs'}) + + def test_json_field_save_empty(self): + JSONFieldTestModel.objects.create(id=10, json='') + obj2 = JSONFieldTestModel.objects.get(id=10) + self.assertEquals(obj2.json, None) + + def test_db_prep_save(self): + field = JSONField("test") + field.set_attributes_from_name("json") + self.assertEquals(None, field.get_db_prep_save(None, connection=None)) + self.assertEquals('{"spam": "eggs"}', field.get_db_prep_save({"spam": "eggs"}, connection=None)) + + def test_formfield(self): + from jsonfield.forms import JSONFormField + from jsonfield.widgets import JSONWidget + field = JSONField("test") + field.set_attributes_from_name("json") + formfield = field.formfield() + self.assertEquals(type(formfield), JSONFormField) + self.assertEquals(type(formfield.widget), JSONWidget) + + def test_formfield_clean_blank(self): + field = JSONField("test") + formfield = field.formfield() + self.assertRaisesMessage(forms.ValidationError, force_text(formfield.error_messages['required']), formfield.clean, value='') + + def test_formfield_clean_none(self): + field = JSONField("test") + formfield = field.formfield() + self.assertRaisesMessage(forms.ValidationError, force_text(formfield.error_messages['required']), formfield.clean, value=None) + + def test_formfield_null_and_blank_clean_blank(self): + field = JSONField("test", null=True, blank=True) + formfield = field.formfield() + self.assertEquals(formfield.clean(value=''), '') + + def test_formfield_null_and_blank_clean_none(self): + field = JSONField("test", null=True, blank=True) + formfield = field.formfield() + self.assertEquals(formfield.clean(value=None), '') + + def test_formfield_blank_clean_blank(self): + field = JSONField("test", null=False, blank=True) + formfield = field.formfield() + self.assertEquals(formfield.clean(value=''), '') + + def test_formfield_blank_clean_none(self): + field = JSONField("test", null=False, blank=True) + formfield = field.formfield() + self.assertEquals(formfield.clean(value=None), '') + + def test_default_value(self): + obj = JSONFieldWithDefaultTestModel.objects.create() + obj = JSONFieldWithDefaultTestModel.objects.get(id=obj.id) + self.assertEquals(obj.json, {'sukasuka': 'YAAAAAZ'}) + + def test_query_object(self): + JSONFieldTestModel.objects.create(json={}) + JSONFieldTestModel.objects.create(json={'foo':'bar'}) + self.assertEquals(2, JSONFieldTestModel.objects.all().count()) + self.assertEquals(1, JSONFieldTestModel.objects.exclude(json={}).count()) + self.assertEquals(1, JSONFieldTestModel.objects.filter(json={}).count()) + self.assertEquals(1, JSONFieldTestModel.objects.filter(json={'foo':'bar'}).count()) + self.assertEquals(1, JSONFieldTestModel.objects.filter(json__contains={'foo':'bar'}).count()) + JSONFieldTestModel.objects.create(json={'foo':'bar', 'baz':'bing'}) + self.assertEquals(2, JSONFieldTestModel.objects.filter(json__contains={'foo':'bar'}).count()) + self.assertEquals(1, JSONFieldTestModel.objects.filter(json__contains={'baz':'bing', 'foo':'bar'}).count()) + self.assertEquals(2, JSONFieldTestModel.objects.filter(json__contains='foo').count()) + # This code needs to be implemented! + self.assertRaises(TypeError, lambda:JSONFieldTestModel.objects.filter(json__contains=['baz', 'foo'])) + + def test_query_isnull(self): + JSONFieldTestModel.objects.create(json=None) + JSONFieldTestModel.objects.create(json={}) + JSONFieldTestModel.objects.create(json={'foo':'bar'}) + + self.assertEquals(1, JSONFieldTestModel.objects.filter(json=None).count()) + self.assertEquals(None, JSONFieldTestModel.objects.get(json=None).json) + + def test_jsonfield_blank(self): + BlankJSONFieldTestModel.objects.create(blank_json='', null_json=None) + obj = BlankJSONFieldTestModel.objects.get() + self.assertEquals(None, obj.null_json) + self.assertEquals("", obj.blank_json) + obj.save() + obj = BlankJSONFieldTestModel.objects.get() + self.assertEquals(None, obj.null_json) + self.assertEquals("", obj.blank_json) + + def test_callable_default(self): + CallableDefaultModel.objects.create() + obj = CallableDefaultModel.objects.get() + self.assertEquals({'x':2}, obj.json) + + def test_callable_default_overridden(self): + CallableDefaultModel.objects.create(json={'x':3}) + obj = CallableDefaultModel.objects.get() + self.assertEquals({'x':3}, obj.json) + + def test_mutable_default_checking(self): + obj1 = JSONFieldWithDefaultTestModel() + obj2 = JSONFieldWithDefaultTestModel() + + obj1.json['foo'] = 'bar' + self.assertNotIn('foo', obj2.json) + + def test_invalid_json(self): + obj = JSONFieldTestModel() + obj.json = '{"foo": 2}' + self.assertIn('foo', obj.json) + with self.assertRaises(forms.ValidationError): + obj.json = '{"foo"}' + + def test_invalid_json_default(self): + with self.assertRaises(ValueError): + field = JSONField('test', default='{"foo"}') \ No newline at end of file diff --git a/awx/lib/site-packages/jsonfield/tests/test_forms.py b/awx/lib/site-packages/jsonfield/tests/test_forms.py new file mode 100644 index 0000000000..3e057bda86 --- /dev/null +++ b/awx/lib/site-packages/jsonfield/tests/test_forms.py @@ -0,0 +1,22 @@ +from django.test import TestCase as DjangoTestCase +from django.utils import unittest + +from jsonfield.forms import JSONFormField +from jsonfield.tests.jsonfield_test_app.forms import JSONTestForm, JSONTestModelForm + +class JSONFormFieldTest(DjangoTestCase): + def test_form_field_clean(self): + field = JSONFormField(required=False) + self.assertEquals({}, field.clean('{}')) + + self.assertEquals( + {'foo':'bar', 'baz':2}, + field.clean('{"foo":"bar","baz":2}') + ) + + self.assertEquals([],field.clean('[]')) + +class JSONFormTest(DjangoTestCase): + def test_form_clean(self): + form = JSONTestForm({}) + self.assertFalse(form.is_valid()) \ No newline at end of file diff --git a/awx/lib/site-packages/jsonfield/utils.py b/awx/lib/site-packages/jsonfield/utils.py new file mode 100644 index 0000000000..3ffca827e3 --- /dev/null +++ b/awx/lib/site-packages/jsonfield/utils.py @@ -0,0 +1,28 @@ +import datetime +from decimal import Decimal + +from django.core.serializers.json import DjangoJSONEncoder + +class TZAwareJSONEncoder(DjangoJSONEncoder): + def default(self, obj): + if isinstance(obj, datetime.datetime): + return obj.strftime("%Y-%m-%d %H:%M:%S%z") + return super(TZAwareJSONEncoder, self).default(obj) + +def default(o): + if hasattr(o, 'to_json'): + return o.to_json() + if isinstance(o, Decimal): + return str(o) + if isinstance(o, datetime.datetime): + if o.tzinfo: + return o.strftime('%Y-%m-%dT%H:%M:%S%z') + return o.strftime("%Y-%m-%dT%H:%M:%S") + if isinstance(o, datetime.date): + return o.strftime("%Y-%m-%d") + if isinstance(o, datetime.time): + if o.tzinfo: + return o.strftime('%H:%M:%S%z') + return o.strftime("%H:%M:%S") + + raise TypeError(repr(o) + " is not JSON serializable") diff --git a/awx/lib/site-packages/jsonfield/widgets.py b/awx/lib/site-packages/jsonfield/widgets.py new file mode 100644 index 0000000000..72953810b8 --- /dev/null +++ b/awx/lib/site-packages/jsonfield/widgets.py @@ -0,0 +1,18 @@ +from django import forms +from django.utils import simplejson as json +from django.conf import settings + +from .utils import default + +class JSONWidget(forms.Textarea): + def render(self, name, value, attrs=None): + if value is None: + value = "" + if not isinstance(value, basestring): + value = json.dumps(value, indent=2, default=default) + return super(JSONWidget, self).render(name, value, attrs) + + +class JSONSelectWidget(forms.SelectMultiple): + pass + diff --git a/awx/lib/site-packages/kombu/__init__.py b/awx/lib/site-packages/kombu/__init__.py new file mode 100644 index 0000000000..962eae85d0 --- /dev/null +++ b/awx/lib/site-packages/kombu/__init__.py @@ -0,0 +1,106 @@ +"""Messaging Framework for Python""" +from __future__ import absolute_import + +VERSION = (2, 5, 10) +__version__ = '.'.join(map(str, VERSION[0:3])) + ''.join(VERSION[3:]) +__author__ = 'Ask Solem' +__contact__ = 'ask@celeryproject.org' +__homepage__ = 'http://kombu.readthedocs.org' +__docformat__ = 'restructuredtext en' + +# -eof meta- + +import os +import sys + +if sys.version_info < (2, 5): # pragma: no cover + if sys.version_info >= (2, 4): + raise Exception( + 'Python 2.4 is not supported by this version. ' + 'Please use Kombu versions 1.x.') + else: + raise Exception('Kombu requires Python versions 2.5 or later.') + +STATICA_HACK = True +globals()['kcah_acitats'[::-1].upper()] = False +if STATICA_HACK: # pragma: no cover + # This is never executed, but tricks static analyzers (PyDev, PyCharm, + # pylint, etc.) into knowing the types of these symbols, and what + # they contain. + from kombu.connection import Connection, BrokerConnection # noqa + from kombu.entity import Exchange, Queue, binding # noqa + from kombu.messaging import Consumer, Producer # noqa + from kombu.pools import connections, producers # noqa + from kombu.utils.url import parse_url # noqa + from kombu.common import eventloop, uuid # noqa + from kombu.serialization import ( # noqa + enable_insecure_serializers, + disable_insecure_serializers, + ) + +# Lazy loading. +# - See werkzeug/__init__.py for the rationale behind this. +from types import ModuleType + +all_by_module = { + 'kombu.connection': ['Connection', 'BrokerConnection'], + 'kombu.entity': ['Exchange', 'Queue', 'binding'], + 'kombu.messaging': ['Consumer', 'Producer'], + 'kombu.pools': ['connections', 'producers'], + 'kombu.utils.url': ['parse_url'], + 'kombu.common': ['eventloop', 'uuid'], + 'kombu.serialization': ['enable_insecure_serializers', + 'disable_insecure_serializers'], +} + +object_origins = {} +for module, items in all_by_module.iteritems(): + for item in items: + object_origins[item] = module + + +class module(ModuleType): + + def __getattr__(self, name): + if name in object_origins: + module = __import__(object_origins[name], None, None, [name]) + for extra_name in all_by_module[module.__name__]: + setattr(self, extra_name, getattr(module, extra_name)) + return getattr(module, name) + return ModuleType.__getattribute__(self, name) + + def __dir__(self): + result = list(new_module.__all__) + result.extend(('__file__', '__path__', '__doc__', '__all__', + '__docformat__', '__name__', '__path__', 'VERSION', + '__package__', '__version__', '__author__', + '__contact__', '__homepage__', '__docformat__')) + return result + +# 2.5 does not define __package__ +try: + package = __package__ +except NameError: # pragma: no cover + package = 'kombu' + +# keep a reference to this module so that it's not garbage collected +old_module = sys.modules[__name__] + +new_module = sys.modules[__name__] = module(__name__) +new_module.__dict__.update({ + '__file__': __file__, + '__path__': __path__, + '__doc__': __doc__, + '__all__': tuple(object_origins), + '__version__': __version__, + '__author__': __author__, + '__contact__': __contact__, + '__homepage__': __homepage__, + '__docformat__': __docformat__, + '__package__': package, + 'VERSION': VERSION}) + +if os.environ.get('KOMBU_LOG_DEBUG'): # pragma: no cover + os.environ.update(KOMBU_LOG_CHANNEL='1', KOMBU_LOG_CONNECTION='1') + from .utils import debug + debug.setup_logging() diff --git a/awx/lib/site-packages/kombu/abstract.py b/awx/lib/site-packages/kombu/abstract.py new file mode 100644 index 0000000000..f4218f02b4 --- /dev/null +++ b/awx/lib/site-packages/kombu/abstract.py @@ -0,0 +1,115 @@ +""" +kombu.compression +================= + +Object utilities. + +""" +from __future__ import absolute_import + +from copy import copy + +from .connection import maybe_channel +from .exceptions import NotBoundError +from .utils import ChannelPromise + +__all__ = ['Object', 'MaybeChannelBound'] + + +def unpickle_dict(cls, kwargs): + return cls(**kwargs) + + +class Object(object): + """Common base class supporting automatic kwargs->attributes handling, + and cloning.""" + attrs = () + + def __init__(self, *args, **kwargs): + any = lambda v: v + for name, type_ in self.attrs: + value = kwargs.get(name) + if value is not None: + setattr(self, name, (type_ or any)(value)) + else: + try: + getattr(self, name) + except AttributeError: + setattr(self, name, None) + + def as_dict(self, recurse=False): + def f(obj, type): + if recurse and isinstance(obj, Object): + return obj.as_dict(recurse=True) + return type(obj) if type else obj + return dict( + (attr, f(getattr(self, attr), type)) for attr, type in self.attrs + ) + + def __reduce__(self): + return unpickle_dict, (self.__class__, self.as_dict()) + + def __copy__(self): + return self.__class__(**self.as_dict()) + + +class MaybeChannelBound(Object): + """Mixin for classes that can be bound to an AMQP channel.""" + _channel = None + _is_bound = False + + #: Defines whether maybe_declare can skip declaring this entity twice. + can_cache_declaration = False + + def __call__(self, channel): + """`self(channel) -> self.bind(channel)`""" + return self.bind(channel) + + def bind(self, channel): + """Create copy of the instance that is bound to a channel.""" + return copy(self).maybe_bind(channel) + + def maybe_bind(self, channel): + """Bind instance to channel if not already bound.""" + if not self.is_bound and channel: + self._channel = maybe_channel(channel) + self.when_bound() + self._is_bound = True + return self + + def revive(self, channel): + """Revive channel after the connection has been re-established. + + Used by :meth:`~kombu.Connection.ensure`. + + """ + if self.is_bound: + self._channel = channel + self.when_bound() + + def when_bound(self): + """Callback called when the class is bound.""" + pass + + def __repr__(self, item=''): + if self.is_bound: + return '<%s bound to chan:%s>' % ( + item or type(self).__name__, self.channel.channel_id) + return '' % (item, ) + + @property + def is_bound(self): + """Flag set if the channel is bound.""" + return self._is_bound and self._channel is not None + + @property + def channel(self): + """Current channel if the object is bound.""" + channel = self._channel + if channel is None: + raise NotBoundError( + "Can't call method on %s not bound to a channel" % ( + self.__class__.__name__)) + if isinstance(channel, ChannelPromise): + channel = self._channel = channel() + return channel diff --git a/awx/lib/site-packages/kombu/clocks.py b/awx/lib/site-packages/kombu/clocks.py new file mode 100644 index 0000000000..e9419cde15 --- /dev/null +++ b/awx/lib/site-packages/kombu/clocks.py @@ -0,0 +1,99 @@ +""" +kombu.clocks +============ + +Logical Clocks and Synchronization. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import threading + +from itertools import islice, izip + +__all__ = ['LamportClock'] + + +class LamportClock(object): + """Lamport's logical clock. + + From Wikipedia: + + A Lamport logical clock is a monotonically incrementing software counter + maintained in each process. It follows some simple rules: + + * A process increments its counter before each event in that process; + * When a process sends a message, it includes its counter value with + the message; + * On receiving a message, the receiver process sets its counter to be + greater than the maximum of its own value and the received value + before it considers the message received. + + Conceptually, this logical clock can be thought of as a clock that only + has meaning in relation to messages moving between processes. When a + process receives a message, it resynchronizes its logical clock with + the sender. + + .. seealso:: + + * `Lamport timestamps`_ + + * `Lamports distributed mutex`_ + + .. _`Lamport Timestamps`: http://en.wikipedia.org/wiki/Lamport_timestamps + .. _`Lamports distributed mutex`: http://bit.ly/p99ybE + + *Usage* + + When sending a message use :meth:`forward` to increment the clock, + when receiving a message use :meth:`adjust` to sync with + the time stamp of the incoming message. + + """ + #: The clocks current value. + value = 0 + + def __init__(self, initial_value=0): + self.value = initial_value + self.mutex = threading.Lock() + + def adjust(self, other): + with self.mutex: + self.value = max(self.value, other) + 1 + return self.value + + def forward(self): + with self.mutex: + self.value += 1 + return self.value + + def sort_heap(self, h): + """List of tuples containing at least two elements, representing + an event, where the first element is the event's scalar clock value, + and the second element is the id of the process (usually + ``"hostname:pid"``): ``sh([(clock, processid, ...?), (...)])`` + + The list must already be sorted, which is why we refer to it as a + heap. + + The tuple will not be unpacked, so more than two elements can be + present. Returns the latest event. + + """ + if h[0][0] == h[1][0]: + same = [] + for PN in izip(h, islice(h, 1, None)): + if PN[0][0] != PN[1][0]: + break # Prev and Next's clocks differ + same.append(PN[0]) + # return first item sorted by process id + return sorted(same, key=lambda event: event[1])[0] + # clock values unique, return first item + return h[0] + + def __str__(self): + return str(self.value) + + def __repr__(self): + return '' % (self.value, ) diff --git a/awx/lib/site-packages/kombu/common.py b/awx/lib/site-packages/kombu/common.py new file mode 100644 index 0000000000..27b3f32207 --- /dev/null +++ b/awx/lib/site-packages/kombu/common.py @@ -0,0 +1,379 @@ +""" +kombu.common +============ + +Common Utilities. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import os +import socket +import threading +import uuid as _uuid + +from collections import deque +from contextlib import contextmanager +from functools import partial +from itertools import count + +from . import serialization +from .entity import Exchange, Queue +from .exceptions import StdChannelError +from .log import get_logger +from .messaging import Consumer as _Consumer +from .utils import uuid + +try: + from thread import get_ident # noqa +except ImportError: # pragma: no cover + try: + from dummy_thread import get_ident # noqa + except ImportError: # pragma: no cover + from _thread import get_ident # noqa + +__all__ = ['Broadcast', 'maybe_declare', 'uuid', + 'itermessages', 'send_reply', 'isend_reply', + 'collect_replies', 'insured', 'ipublish', 'drain_consumer', + 'eventloop'] + +#: Prefetch count can't exceed short. +PREFETCH_COUNT_MAX = 0xFFFF + +logger = get_logger(__name__) +_nodeid = _uuid.getnode() + + +def generate_oid(node_id, process_id, thread_id, instance): + ent = '%x-%x-%x-%x' % (node_id, process_id, thread_id, id(instance)) + return str(_uuid.uuid3(_uuid.NAMESPACE_OID, ent)) + + +def oid_from(instance): + return generate_oid(_nodeid, os.getpid(), get_ident(), instance) + + +class Broadcast(Queue): + """Convenience class used to define broadcast queues. + + Every queue instance will have a unique name, + and both the queue and exchange is configured with auto deletion. + + :keyword name: This is used as the name of the exchange. + :keyword queue: By default a unique id is used for the queue + name for every consumer. You can specify a custom queue + name here. + :keyword \*\*kwargs: See :class:`~kombu.Queue` for a list + of additional keyword arguments supported. + + """ + + def __init__(self, name=None, queue=None, **kwargs): + return super(Broadcast, self).__init__( + name=queue or 'bcast.%s' % (uuid(), ), + **dict({'alias': name, + 'auto_delete': True, + 'exchange': Exchange(name, type='fanout')}, **kwargs)) + + +def declaration_cached(entity, channel): + return entity in channel.connection.client.declared_entities + + +def maybe_declare(entity, channel=None, retry=False, **retry_policy): + if not entity.is_bound: + assert channel + entity = entity.bind(channel) + if retry: + return _imaybe_declare(entity, **retry_policy) + return _maybe_declare(entity) + + +def _maybe_declare(entity): + channel = entity.channel + if not channel.connection: + raise StdChannelError("channel disconnected") + declared = channel.connection.client.declared_entities + if entity not in declared or getattr(entity, 'auto_delete', None): + entity.declare() + declared.add(entity) + return True + return False + + +def _imaybe_declare(entity, **retry_policy): + return entity.channel.connection.client.ensure( + entity, _maybe_declare, **retry_policy)(entity) + + +def drain_consumer(consumer, limit=1, timeout=None, callbacks=None): + acc = deque() + + def on_message(body, message): + acc.append((body, message)) + + consumer.callbacks = [on_message] + (callbacks or []) + + with consumer: + for _ in eventloop(consumer.channel.connection.client, + limit=limit, timeout=timeout, ignore_timeouts=True): + try: + yield acc.popleft() + except IndexError: + pass + + +def itermessages(conn, channel, queue, limit=1, timeout=None, + Consumer=_Consumer, callbacks=None, **kwargs): + return drain_consumer(Consumer(channel, queues=[queue], **kwargs), + limit=limit, timeout=timeout, callbacks=callbacks) + + +def eventloop(conn, limit=None, timeout=None, ignore_timeouts=False): + """Best practice generator wrapper around ``Connection.drain_events``. + + Able to drain events forever, with a limit, and optionally ignoring + timeout errors (a timeout of 1 is often used in environments where + the socket can get "stuck", and is a best practice for Kombu consumers). + + **Examples** + + ``eventloop`` is a generator:: + + >>> from kombu.common import eventloop + + >>> it = eventloop(connection, timeout=1, ignore_timeouts=True) + >>> it.next() # one event consumed, or timed out. + + >>> for _ in eventloop(connection, timeout=1, ignore_timeouts=True): + ... pass # loop forever. + + It also takes an optional limit parameter, and timeout errors + are propagated by default:: + + for _ in eventloop(connection, limit=1, timeout=1): + pass + + .. seealso:: + + :func:`itermessages`, which is an event loop bound to one or more + consumers, that yields any messages received. + + """ + for i in limit and xrange(limit) or count(): + try: + yield conn.drain_events(timeout=timeout) + except socket.timeout: + if timeout and not ignore_timeouts: # pragma: no cover + raise + except socket.error: # pragma: no cover + pass + + +def send_reply(exchange, req, msg, producer=None, **props): + content_type = req.content_type + serializer = serialization.registry.type_to_name[content_type] + maybe_declare(exchange, producer.channel) + producer.publish( + msg, exchange=exchange, + **dict({'routing_key': req.properties['reply_to'], + 'correlation_id': req.properties.get('correlation_id'), + 'serializer': serializer}, **props)) + + +def isend_reply(pool, exchange, req, msg, props, **retry_policy): + return ipublish(pool, send_reply, + (exchange, req, msg), props, **retry_policy) + + +def collect_replies(conn, channel, queue, *args, **kwargs): + no_ack = kwargs.setdefault('no_ack', True) + received = False + try: + for body, message in itermessages(conn, channel, queue, + *args, **kwargs): + if not no_ack: + message.ack() + received = True + yield body + finally: + if received: + channel.after_reply_message_received(queue.name) + + +def _ensure_errback(exc, interval): + logger.error( + 'Connection error: %r. Retry in %ss\n', exc, interval, + exc_info=True, + ) + + +@contextmanager +def _ignore_errors(conn): + try: + yield + except conn.connection_errors + conn.channel_errors: + pass + + +def ignore_errors(conn, fun=None, *args, **kwargs): + """Ignore connection and channel errors. + + The first argument must be a connection object, or any other object + with ``connection_error`` and ``channel_error`` attributes. + + Can be used as a function:: + + >>> ignore_errors(conn, consumer.channel.close) + + or as a context manager:: + + >>> with ignore_errors(conn): + ... consumer.channel.close() + + + .. note:: + + Connection and channel errors should be properly handled, + and not ignored. Using this function is only acceptible in a cleanup + phase, like when a connection is lost or at shutdown. + + """ + if fun: + with _ignore_errors(conn): + return fun(*args, **kwargs) + return _ignore_errors(conn) + + +def revive_connection(connection, channel, on_revive=None): + if on_revive: + on_revive(channel) + + +def revive_producer(producer, channel, on_revive=None): + revive_connection(producer.connection, channel) + if on_revive: + on_revive(channel) + + +def insured(pool, fun, args, kwargs, errback=None, on_revive=None, **opts): + """Ensures function performing broker commands completes + despite intermittent connection failures.""" + errback = errback or _ensure_errback + + with pool.acquire(block=True) as conn: + conn.ensure_connection(errback=errback) + # we cache the channel for subsequent calls, this has to be + # reset on revival. + channel = conn.default_channel + revive = partial(revive_connection, conn, on_revive=on_revive) + insured = conn.autoretry(fun, channel, errback=errback, + on_revive=revive, **opts) + retval, _ = insured(*args, **dict(kwargs, connection=conn)) + return retval + + +def ipublish(pool, fun, args=(), kwargs={}, + errback=None, on_revive=None, **retry_policy): + with pool.acquire(block=True) as producer: + errback = errback or _ensure_errback + revive = partial(revive_producer, producer, on_revive=on_revive) + f = producer.connection.ensure(producer, fun, on_revive=revive, + errback=errback, **retry_policy) + return f(*args, **dict(kwargs, producer=producer)) + + +def entry_to_queue(queue, **options): + return Queue.from_dict(queue, **options) + + +class QoS(object): + """Thread safe increment/decrement of a channels prefetch_count. + + :param callback: Function used to set new prefetch count, + e.g. ``consumer.qos`` or ``channel.basic_qos``. Will be called + with a single ``prefetch_count`` keyword argument. + :param initial_value: Initial prefetch count value. + + **Example usage** + + .. code-block:: python + + >>> consumer = Consumer(connection) + >>> qos = QoS(consumer.qos, initial_prefetch_count=2) + >>> qos.update() # set initial + + >>> qos.value + 2 + + >>> def in_some_thread(): + ... qos.increment_eventually() + + >>> def in_some_other_thread(): + ... qos.decrement_eventually() + + >>> while some_loop: + ... if qos.prev != qos.value: + ... qos.update() # prefetch changed so update. + + It can be used with any function supporting a ``prefetch_count`` keyword + argument:: + + >>> channel = connection.channel() + >>> QoS(channel.basic_qos, 10) + + + >>> def set_qos(prefetch_count): + ... some_object.change(prefetch=prefetch_count) + >>> QoS(set_qos, 10) + + """ + prev = None + + def __init__(self, callback, initial_value): + self.callback = callback + self._mutex = threading.RLock() + self.value = initial_value or 0 + + def increment_eventually(self, n=1): + """Increment the value, but do not update the channels QoS. + + The MainThread will be responsible for calling :meth:`update` + when necessary. + + """ + with self._mutex: + if self.value: + self.value = self.value + max(n, 0) + return self.value + + def decrement_eventually(self, n=1): + """Decrement the value, but do not update the channels QoS. + + The MainThread will be responsible for calling :meth:`update` + when necessary. + + """ + with self._mutex: + if self.value: + self.value -= n + return self.value + + def set(self, pcount): + """Set channel prefetch_count setting.""" + if pcount != self.prev: + new_value = pcount + if pcount > PREFETCH_COUNT_MAX: + logger.warn('QoS: Disabled: prefetch_count exceeds %r', + PREFETCH_COUNT_MAX) + new_value = 0 + logger.debug('basic.qos: prefetch_count->%s', new_value) + self.callback(prefetch_count=new_value) + self.prev = pcount + return pcount + + def update(self): + """Update prefetch count with current value.""" + with self._mutex: + return self.set(self.value) diff --git a/awx/lib/site-packages/kombu/compat.py b/awx/lib/site-packages/kombu/compat.py new file mode 100644 index 0000000000..94bd2cdc68 --- /dev/null +++ b/awx/lib/site-packages/kombu/compat.py @@ -0,0 +1,214 @@ +""" +kombu.compat +============ + +Carrot compatible interface for :class:`Publisher` and :class:`Producer`. + +See http://packages.python.org/pypi/carrot for documentation. + +""" +from __future__ import absolute_import + +from itertools import count + +from . import messaging +from .entity import Exchange, Queue + +__all__ = ['Publisher', 'Consumer'] + +# XXX compat attribute +entry_to_queue = Queue.from_dict + + +def _iterconsume(connection, consumer, no_ack=False, limit=None): + consumer.consume(no_ack=no_ack) + for iteration in count(0): # for infinity + if limit and iteration >= limit: + raise StopIteration + yield connection.drain_events() + + +class Publisher(messaging.Producer): + exchange = '' + exchange_type = 'direct' + routing_key = '' + durable = True + auto_delete = False + _closed = False + + def __init__(self, connection, exchange=None, routing_key=None, + exchange_type=None, durable=None, auto_delete=None, + channel=None, **kwargs): + if channel: + connection = channel + + self.exchange = exchange or self.exchange + self.exchange_type = exchange_type or self.exchange_type + self.routing_key = routing_key or self.routing_key + + if auto_delete is not None: + self.auto_delete = auto_delete + if durable is not None: + self.durable = durable + + if not isinstance(self.exchange, Exchange): + self.exchange = Exchange(name=self.exchange, + type=self.exchange_type, + routing_key=self.routing_key, + auto_delete=self.auto_delete, + durable=self.durable) + super(Publisher, self).__init__(connection, self.exchange, **kwargs) + + def send(self, *args, **kwargs): + return self.publish(*args, **kwargs) + + def close(self): + super(Publisher, self).close() + self._closed = True + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + + @property + def backend(self): + return self.channel + + +class Consumer(messaging.Consumer): + queue = '' + exchange = '' + routing_key = '' + exchange_type = 'direct' + durable = True + exclusive = False + auto_delete = False + exchange_type = 'direct' + _closed = False + + def __init__(self, connection, queue=None, exchange=None, + routing_key=None, exchange_type=None, durable=None, + exclusive=None, auto_delete=None, **kwargs): + self.backend = connection.channel() + + if durable is not None: + self.durable = durable + if exclusive is not None: + self.exclusive = exclusive + if auto_delete is not None: + self.auto_delete = auto_delete + + self.queue = queue or self.queue + self.exchange = exchange or self.exchange + self.exchange_type = exchange_type or self.exchange_type + self.routing_key = routing_key or self.routing_key + + exchange = Exchange(self.exchange, + type=self.exchange_type, + routing_key=self.routing_key, + auto_delete=self.auto_delete, + durable=self.durable) + queue = Queue(self.queue, + exchange=exchange, + routing_key=self.routing_key, + durable=self.durable, + exclusive=self.exclusive, + auto_delete=self.auto_delete) + super(Consumer, self).__init__(self.backend, queue, **kwargs) + + def revive(self, channel): + self.backend = channel + super(Consumer, self).revive(channel) + + def close(self): + self.cancel() + self.backend.close() + self._closed = True + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + + def __iter__(self): + return self.iterqueue(infinite=True) + + def fetch(self, no_ack=None, enable_callbacks=False): + if no_ack is None: + no_ack = self.no_ack + message = self.queues[0].get(no_ack) + if message: + if enable_callbacks: + self.receive(message.payload, message) + return message + + def process_next(self): + raise NotImplementedError('Use fetch(enable_callbacks=True)') + + def discard_all(self, filterfunc=None): + if filterfunc is not None: + raise NotImplementedError( + 'discard_all does not implement filters') + return self.purge() + + def iterconsume(self, limit=None, no_ack=None): + return _iterconsume(self.connection, self, no_ack, limit) + + def wait(self, limit=None): + it = self.iterconsume(limit) + return list(it) + + def iterqueue(self, limit=None, infinite=False): + for items_since_start in count(): # for infinity + item = self.fetch() + if (not infinite and item is None) or \ + (limit and items_since_start >= limit): + raise StopIteration + yield item + + +class ConsumerSet(messaging.Consumer): + + def __init__(self, connection, from_dict=None, consumers=None, + channel=None, **kwargs): + if channel: + self._provided_channel = True + self.backend = channel + else: + self._provided_channel = False + self.backend = connection.channel() + + queues = [] + if consumers: + for consumer in consumers: + queues.extend(consumer.queues) + if from_dict: + for queue_name, queue_options in from_dict.items(): + queues.append(Queue.from_dict(queue_name, **queue_options)) + + super(ConsumerSet, self).__init__(self.backend, queues, **kwargs) + + def iterconsume(self, limit=None, no_ack=False): + return _iterconsume(self.connection, self, no_ack, limit) + + def discard_all(self): + return self.purge() + + def add_consumer_from_dict(self, queue, **options): + return self.add_queue_from_dict(queue, **options) + + def add_consumer(self, consumer): + for queue in consumer.queues: + self.add_queue(queue) + + def revive(self, channel): + self.backend = channel + super(ConsumerSet, self).revive(channel) + + def close(self): + self.cancel() + if not self._provided_channel: + self.channel.close() diff --git a/awx/lib/site-packages/kombu/compression.py b/awx/lib/site-packages/kombu/compression.py new file mode 100644 index 0000000000..34daa7a202 --- /dev/null +++ b/awx/lib/site-packages/kombu/compression.py @@ -0,0 +1,83 @@ +""" +kombu.compression +================= + +Compression utilities. + +""" +from __future__ import absolute_import + +from kombu.utils.encoding import ensure_bytes, bytes_to_str + +import zlib + +_aliases = {} +_encoders = {} +_decoders = {} + +__all__ = ['register', 'encoders', 'get_encoder', + 'get_decoder', 'compress', 'decompress'] + + +def register(encoder, decoder, content_type, aliases=[]): + """Register new compression method. + + :param encoder: Function used to compress text. + :param decoder: Function used to decompress previously compressed text. + :param content_type: The mime type this compression method identifies as. + :param aliases: A list of names to associate with this compression method. + + """ + _encoders[content_type] = encoder + _decoders[content_type] = decoder + _aliases.update((alias, content_type) for alias in aliases) + + +def encoders(): + """Returns a list of available compression methods.""" + return list(_encoders) + + +def get_encoder(t): + """Get encoder by alias name.""" + t = _aliases.get(t, t) + return _encoders[t], t + + +def get_decoder(t): + """Get decoder by alias name.""" + return _decoders[_aliases.get(t, t)] + + +def compress(body, content_type): + """Compress text. + + :param body: The text to compress. + :param content_type: mime-type of compression method to use. + + """ + encoder, content_type = get_encoder(content_type) + return encoder(ensure_bytes(body)), content_type + + +def decompress(body, content_type): + """Decompress compressed text. + + :param body: Previously compressed text to uncompress. + :param content_type: mime-type of compression method used. + + """ + return bytes_to_str(get_decoder(content_type)(body)) + + +register(zlib.compress, + zlib.decompress, + 'application/x-gzip', aliases=['gzip', 'zlib']) +try: + import bz2 +except ImportError: + pass # Jython? +else: + register(bz2.compress, + bz2.decompress, + 'application/x-bz2', aliases=['bzip2', 'bzip']) diff --git a/awx/lib/site-packages/kombu/connection.py b/awx/lib/site-packages/kombu/connection.py new file mode 100644 index 0000000000..9a6146dc9b --- /dev/null +++ b/awx/lib/site-packages/kombu/connection.py @@ -0,0 +1,1051 @@ +""" +kombu.connection +================ + +Broker connection and pools. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import errno +import os +import socket + +from contextlib import contextmanager +from functools import partial +from itertools import count, cycle +from urllib import quote +from Queue import Empty + +# jython breaks on relative import for .exceptions for some reason +# (Issue #112) +from kombu import exceptions +from .log import get_logger +from .transport import get_transport_cls, supports_librabbitmq +from .utils import cached_property, retry_over_time, shufflecycle +from .utils.compat import ( + OrderedDict, LifoQueue as _LifoQueue, next, get_errno, +) +from .utils.functional import promise +from .utils.url import parse_url + +__all__ = ['Connection', 'ConnectionPool', 'ChannelPool'] + +RESOLVE_ALIASES = {'pyamqp': 'amqp', + 'librabbitmq': 'amqp'} + +_LOG_CONNECTION = os.environ.get('KOMBU_LOG_CONNECTION', False) +_LOG_CHANNEL = os.environ.get('KOMBU_LOG_CHANNEL', False) + +#: List of URI schemes that should not be parsed, but sent +#: directly to the transport instead. +URI_PASSTHROUGH = frozenset(['sqla', 'sqlalchemy', 'zeromq', 'zmq']) + +logger = get_logger(__name__) +roundrobin_failover = cycle + +failover_strategies = { + 'round-robin': roundrobin_failover, + 'shuffle': shufflecycle, +} + + +class Connection(object): + """A connection to the broker. + + :param URL: Broker URL, or a list of URLs, e.g. + + .. code-block:: python + + Connection('amqp://guest:guest@localhost:5672//') + Connection('amqp://foo;amqp://bar', failover_strategy='round-robin') + Connection('redis://', transport_options={ + 'visibility_timeout': 3000, + }) + + import ssl + Connection('amqp://', login_method='EXTERNAL', ssl={ + 'ca_certs': '/etc/pki/tls/certs/something.crt', + 'keyfile': '/etc/something/system.key', + 'certfile': '/etc/something/system.cert', + 'cert_reqs': ssl.CERT_REQUIRED, + }) + + .. admonition:: SSL compatibility + + SSL currently only works with the py-amqp & amqplib transports. + For other transports you can use stunnel. + + :keyword hostname: Default host name/address if not provided in the URL. + :keyword userid: Default user name if not provided in the URL. + :keyword password: Default password if not provided in the URL. + :keyword virtual_host: Default virtual host if not provided in the URL. + :keyword port: Default port if not provided in the URL. + :keyword ssl: Use SSL to connect to the server. Default is ``False``. + May not be supported by the specified transport. + :keyword transport: Default transport if not specified in the URL. + :keyword connect_timeout: Timeout in seconds for connecting to the + server. May not be supported by the specified transport. + :keyword transport_options: A dict of additional connection arguments to + pass to alternate kombu channel implementations. Consult the transport + documentation for available options. + :keyword heartbeat: Heartbeat interval in int/float seconds. + Note that if heartbeats are enabled then the :meth:`heartbeat_check` + method must be called at an interval twice the frequency of the + heartbeat: e.g. if the heartbeat is 10, then the heartbeats must be + checked every 5 seconds (the rate can also be controlled by + the ``rate`` argument to :meth:`heartbeat_check``). + + .. note:: + + The connection is established lazily when needed. If you need the + connection to be established, then force it by calling + :meth:`connect`:: + + >>> conn.connect() + + and always remember to close the connection:: + + >>> conn.release() + + """ + port = None + virtual_host = '/' + connect_timeout = 5 + + _closed = None + _connection = None + _default_channel = None + _transport = None + _logger = False + uri_prefix = None + + #: The cache of declared entities is per connection, + #: in case the server loses data. + declared_entities = None + + #: This is set to True if there is still more data to read + #: after a call to :meth:`drain_nowait`. + more_to_read = False + + #: Iterator returning the next broker URL to try in the event + #: of connection failure (initialized by :attr:`failover_strategy`). + cycle = None + + #: Additional transport specific options, + #: passed on to the transport instance. + transport_options = None + + #: Strategy used to select new hosts when reconnecting after connection + #: failure. One of "round-robin", "shuffle" or any custom iterator + #: constantly yielding new URLs to try. + failover_strategy = 'round-robin' + + #: Heartbeat value, currently only supported by the py-amqp transport. + heartbeat = None + + hostname = userid = password = ssl = login_method = None + + def __init__(self, hostname='localhost', userid=None, + password=None, virtual_host=None, port=None, insist=False, + ssl=False, transport=None, connect_timeout=5, + transport_options=None, login_method=None, uri_prefix=None, + heartbeat=0, failover_strategy='round-robin', **kwargs): + alt = [] + # have to spell the args out, just to get nice docstrings :( + params = self._initial_params = { + 'hostname': hostname, 'userid': userid, + 'password': password, 'virtual_host': virtual_host, + 'port': port, 'insist': insist, 'ssl': ssl, + 'transport': transport, 'connect_timeout': connect_timeout, + 'login_method': login_method, 'heartbeat': heartbeat + } + + if hostname and not isinstance(hostname, basestring): + alt.extend(hostname) + hostname = alt[0] + if hostname and '://' in hostname: + if ';' in hostname: + alt.extend(hostname.split(';')) + hostname = alt[0] + if '+' in hostname[:hostname.index('://')]: + # e.g. sqla+mysql://root:masterkey@localhost/ + params['transport'], params['hostname'] = \ + hostname.split('+', 1) + self.uri_prefix = params['transport'] + else: + if transport not in URI_PASSTHROUGH: + params.update(parse_url(hostname)) + self._init_params(**params) + + # fallback hosts + self.alt = alt + self.failover_strategy = failover_strategies.get( + failover_strategy or 'round-robin') or failover_strategy + if self.alt: + self.cycle = self.failover_strategy(self.alt) + next(self.cycle) # skip first entry + + # backend_cls argument will be removed shortly. + self.transport_cls = self.transport_cls or kwargs.get('backend_cls') + + if transport_options is None: + transport_options = {} + self.transport_options = transport_options + + if _LOG_CONNECTION: # pragma: no cover + self._logger = True + + if uri_prefix: + self.uri_prefix = uri_prefix + + self.declared_entities = set() + + def switch(self, url): + """Switch connection parameters to use a new URL (does not + reconnect)""" + self.close() + self._closed = False + self._init_params(**dict(self._initial_params, **parse_url(url))) + + def maybe_switch_next(self): + """Switch to next URL given by the current failover strategy (if + any).""" + if self.cycle: + self.switch(next(self.cycle)) + + def _init_params(self, hostname, userid, password, virtual_host, port, + insist, ssl, transport, connect_timeout, + login_method, heartbeat): + transport = transport or 'amqp' + if transport == 'amqp' and supports_librabbitmq(): + transport = 'librabbitmq' + self.hostname = hostname + self.userid = userid + self.password = password + self.login_method = login_method + self.virtual_host = virtual_host or self.virtual_host + self.port = port or self.port + self.insist = insist + self.connect_timeout = connect_timeout + self.ssl = ssl + self.transport_cls = transport + self.heartbeat = heartbeat and float(heartbeat) + + def _debug(self, msg, *args, **kwargs): + fmt = '[Kombu connection:0x%(id)x] %(msg)s' + if self._logger: # pragma: no cover + logger.debug(fmt % {'id': id(self), 'msg': unicode(msg)}, + *args, **kwargs) + + def connect(self): + """Establish connection to server immediately.""" + self._closed = False + return self.connection + + def channel(self): + """Create and return a new channel.""" + self._debug('create channel') + chan = self.transport.create_channel(self.connection) + if _LOG_CHANNEL: # pragma: no cover + from .utils.debug import Logwrapped + return Logwrapped(chan, 'kombu.channel', + '[Kombu channel:%(channel_id)s] ') + return chan + + def heartbeat_check(self, rate=2): + """Verify that hartbeats are sent and received. + + If the current transport does not support heartbeats then + this is a noop operation. + + :keyword rate: Rate is how often the tick is called + compared to the actual heartbeat value. E.g. if + the heartbeat is set to 3 seconds, and the tick + is called every 3 / 2 seconds, then the rate is 2. + + """ + return self.transport.heartbeat_check(self.connection, rate=rate) + + def drain_events(self, **kwargs): + """Wait for a single event from the server. + + :keyword timeout: Timeout in seconds before we give up. + Raises :exc:`socket.timeout` if the timeout is exceeded. + + Usually used from an event loop. + + """ + return self.transport.drain_events(self.connection, **kwargs) + + def drain_nowait(self, *args, **kwargs): + """Non-blocking version of :meth:`drain_events`. + + Sets :attr:`more_to_read` if there is more data to read. + The application MUST call this method until this is unset, and before + calling select/epoll/kqueue's poll() again. + + """ + try: + self.drain_events(timeout=0) + except socket.timeout: + self.more_to_read = False + return False + except socket.error, exc: + if get_errno(exc) in (errno.EAGAIN, errno.EINTR): + self.more_to_read = False + return False + raise + self.more_to_read = True + return True + + def maybe_close_channel(self, channel): + """Close given channel, but ignore connection and channel errors.""" + try: + channel.close() + except (self.connection_errors + self.channel_errors): + pass + + def _do_close_self(self): + # Closes only the connection and channel(s) not transport. + self.declared_entities.clear() + if self._default_channel: + self.maybe_close_channel(self._default_channel) + if self._connection: + try: + self.transport.close_connection(self._connection) + except self.connection_errors + (AttributeError, socket.error): + pass + self._connection = None + + def _close(self): + """Really close connection, even if part of a connection pool.""" + self._do_close_self() + if self._transport: + self._transport.client = None + self._transport = None + self._debug('closed') + self._closed = True + + def release(self): + """Close the connection (if open).""" + self._close() + close = release + + def ensure_connection(self, errback=None, max_retries=None, + interval_start=2, interval_step=2, interval_max=30, + callback=None): + """Ensure we have a connection to the server. + + If not retry establishing the connection with the settings + specified. + + :keyword errback: Optional callback called each time the connection + can't be established. Arguments provided are the exception + raised and the interval that will be slept ``(exc, interval)``. + + :keyword max_retries: Maximum number of times to retry. + If this limit is exceeded the connection error will be re-raised. + + :keyword interval_start: The number of seconds we start sleeping for. + :keyword interval_step: How many seconds added to the interval + for each retry. + :keyword interval_max: Maximum number of seconds to sleep between + each retry. + :keyword callback: Optional callback that is called for every + internal iteration (1 s) + + """ + def on_error(exc, intervals, retries, interval=0): + round = self.completes_cycle(retries) + if round: + interval = next(intervals) + if errback: + errback(exc, interval) + self.maybe_switch_next() # select next host + + return interval if round else 0 + + retry_over_time(self.connect, self.recoverable_connection_errors, + (), {}, on_error, max_retries, + interval_start, interval_step, interval_max, callback) + return self + + def completes_cycle(self, retries): + """Returns true if the cycle is complete after number of `retries`.""" + return not (retries + 1) % len(self.alt) if self.alt else True + + def revive(self, new_channel): + """Revive connection after connection re-established.""" + if self._default_channel: + self.maybe_close_channel(self._default_channel) + self._default_channel = None + + def _default_ensure_callback(self, exc, interval): + logger.error("Ensure: Operation error: %r. Retry in %ss", + exc, interval, exc_info=True) + + def ensure(self, obj, fun, errback=None, max_retries=None, + interval_start=1, interval_step=1, interval_max=1, + on_revive=None): + """Ensure operation completes, regardless of any channel/connection + errors occurring. + + Will retry by establishing the connection, and reapplying + the function. + + :param fun: Method to apply. + + :keyword errback: Optional callback called each time the connection + can't be established. Arguments provided are the exception + raised and the interval that will be slept ``(exc, interval)``. + + :keyword max_retries: Maximum number of times to retry. + If this limit is exceeded the connection error will be re-raised. + + :keyword interval_start: The number of seconds we start sleeping for. + :keyword interval_step: How many seconds added to the interval + for each retry. + :keyword interval_max: Maximum number of seconds to sleep between + each retry. + + **Example** + + This is an example ensuring a publish operation:: + + >>> def errback(exc, interval): + ... print("Couldn't publish message: %r. Retry in %ds" % ( + ... exc, interval)) + >>> publish = conn.ensure(producer, producer.publish, + ... errback=errback, max_retries=3) + >>> publish(message, routing_key) + + """ + def _ensured(*args, **kwargs): + got_connection = 0 + for retries in count(0): # for infinity + try: + return fun(*args, **kwargs) + except self.recoverable_connection_errors, exc: + if got_connection: + raise + if max_retries is not None and retries > max_retries: + raise + self._debug('ensure connection error: %r', exc, exc_info=1) + self._connection = None + self._do_close_self() + errback and errback(exc, 0) + remaining_retries = None + if max_retries is not None: + remaining_retries = max(max_retries - retries, 1) + self.ensure_connection(errback, + remaining_retries, + interval_start, + interval_step, + interval_max) + new_channel = self.channel() + self.revive(new_channel) + obj.revive(new_channel) + if on_revive: + on_revive(new_channel) + got_connection += 1 + except self.recoverable_channel_errors, exc: + if max_retries is not None and retries > max_retries: + raise + self._debug('ensure channel error: %r', exc, exc_info=1) + errback and errback(exc, 0) + _ensured.__name__ = "%s(ensured)" % fun.__name__ + _ensured.__doc__ = fun.__doc__ + _ensured.__module__ = fun.__module__ + return _ensured + + def autoretry(self, fun, channel=None, **ensure_options): + """Decorator for functions supporting a ``channel`` keyword argument. + + The resulting callable will retry calling the function if + it raises connection or channel related errors. + The return value will be a tuple of ``(retval, last_created_channel)``. + + If a ``channel`` is not provided, then one will be automatically + acquired (remember to close it afterwards). + + See :meth:`ensure` for the full list of supported keyword arguments. + + Example usage:: + + channel = connection.channel() + try: + ret, channel = connection.autoretry(publish_messages, channel) + finally: + channel.close() + """ + channels = [channel] + create_channel = self.channel + + class Revival(object): + __name__ = fun.__name__ + __module__ = fun.__module__ + __doc__ = fun.__doc__ + + def revive(self, channel): + channels[0] = channel + + def __call__(self, *args, **kwargs): + if channels[0] is None: + self.revive(create_channel()) + kwargs['channel'] = channels[0] + return fun(*args, **kwargs), channels[0] + + revive = Revival() + return self.ensure(revive, revive, **ensure_options) + + def create_transport(self): + return self.get_transport_cls()(client=self) + create_backend = create_transport # FIXME + + def get_transport_cls(self): + """Get the currently used transport class.""" + transport_cls = self.transport_cls + if not transport_cls or isinstance(transport_cls, basestring): + transport_cls = get_transport_cls(transport_cls) + return transport_cls + + def clone(self, **kwargs): + """Create a copy of the connection with the same connection + settings.""" + return self.__class__(**dict(self._info(resolve=False), **kwargs)) + + def _info(self, resolve=True): + transport_cls = self.transport_cls + if resolve: + transport_cls = RESOLVE_ALIASES.get(transport_cls, transport_cls) + D = self.transport.default_connection_params + + if self.alt: + hostname = ";".join(self.alt) + else: + hostname = self.hostname or D.get('hostname') + if self.uri_prefix: + hostname = '%s+%s' % (self.uri_prefix, hostname) + + info = (('hostname', hostname), + ('userid', self.userid or D.get('userid')), + ('password', self.password or D.get('password')), + ('virtual_host', self.virtual_host or D.get('virtual_host')), + ('port', self.port or D.get('port')), + ('insist', self.insist), + ('ssl', self.ssl), + ('transport', transport_cls), + ('connect_timeout', self.connect_timeout), + ('transport_options', self.transport_options), + ('login_method', self.login_method or D.get('login_method')), + ('uri_prefix', self.uri_prefix), + ('heartbeat', self.heartbeat)) + return info + + def info(self): + """Get connection info.""" + return OrderedDict(self._info()) + + def __eqhash__(self): + return hash('%s|%s|%s|%s|%s|%s' % ( + self.transport_cls, self.hostname, self.userid, + self.password, self.virtual_host, self.port)) + + def as_uri(self, include_password=False): + """Convert connection parameters to URL form.""" + if self.transport_cls in URI_PASSTHROUGH: + return self.transport_cls + '+' + (self.hostname or 'localhost') + quoteS = partial(quote, safe='') # strict quote + fields = self.info() + port = fields['port'] + userid = fields['userid'] + password = fields['password'] + transport = fields['transport'] + url = '%s://' % transport + if userid: + url += quoteS(userid) + if include_password and password: + url += ':' + quoteS(password) + url += '@' + url += quoteS(fields['hostname']) + + # If the transport equals 'mongodb' the + # hostname contains a full mongodb connection + # URI. Let pymongo retreive the port from there. + if port and transport != 'mongodb': + url += ':' + str(port) + + url += '/' + quote(fields['virtual_host']) + if self.uri_prefix: + return '%s+%s' % (self.uri_prefix, url) + return url + + def Pool(self, limit=None, preload=None): + """Pool of connections. + + See :class:`ConnectionPool`. + + :keyword limit: Maximum number of active connections. + Default is no limit. + :keyword preload: Number of connections to preload + when the pool is created. Default is 0. + + *Example usage*:: + + >>> pool = connection.Pool(2) + >>> c1 = pool.acquire() + >>> c2 = pool.acquire() + >>> c3 = pool.acquire() + Traceback (most recent call last): + File "", line 1, in + File "kombu/connection.py", line 354, in acquire + raise ConnectionLimitExceeded(self.limit) + kombu.exceptions.ConnectionLimitExceeded: 2 + >>> c1.release() + >>> c3 = pool.acquire() + + """ + return ConnectionPool(self, limit, preload) + + def ChannelPool(self, limit=None, preload=None): + """Pool of channels. + + See :class:`ChannelPool`. + + :keyword limit: Maximum number of active channels. + Default is no limit. + :keyword preload: Number of channels to preload + when the pool is created. Default is 0. + + *Example usage*:: + + >>> pool = connection.ChannelPool(2) + >>> c1 = pool.acquire() + >>> c2 = pool.acquire() + >>> c3 = pool.acquire() + Traceback (most recent call last): + File "", line 1, in + File "kombu/connection.py", line 354, in acquire + raise ChannelLimitExceeded(self.limit) + kombu.connection.ChannelLimitExceeded: 2 + >>> c1.release() + >>> c3 = pool.acquire() + + """ + return ChannelPool(self, limit, preload) + + def Producer(self, channel=None, *args, **kwargs): + """Create new :class:`kombu.Producer` instance using this + connection.""" + from .messaging import Producer + return Producer(channel or self, *args, **kwargs) + + def Consumer(self, queues=None, channel=None, *args, **kwargs): + """Create new :class:`kombu.Consumer` instance using this + connection.""" + from .messaging import Consumer + return Consumer(channel or self, queues, *args, **kwargs) + + def SimpleQueue(self, name, no_ack=None, queue_opts=None, + exchange_opts=None, channel=None, **kwargs): + """Create new :class:`~kombu.simple.SimpleQueue`, using a channel + from this connection. + + If ``name`` is a string, a queue and exchange will be automatically + created using that name as the name of the queue and exchange, + also it will be used as the default routing key. + + :param name: Name of the queue/or a :class:`~kombu.Queue`. + :keyword no_ack: Disable acknowledgements. Default is false. + :keyword queue_opts: Additional keyword arguments passed to the + constructor of the automatically created + :class:`~kombu.Queue`. + :keyword exchange_opts: Additional keyword arguments passed to the + constructor of the automatically created + :class:`~kombu.Exchange`. + :keyword channel: Channel to use. If not specified a new channel + from the current connection will be used. Remember to call + :meth:`~kombu.simple.SimpleQueue.close` when done with the + object. + + """ + from .simple import SimpleQueue + return SimpleQueue(channel or self, name, no_ack, queue_opts, + exchange_opts, **kwargs) + + def SimpleBuffer(self, name, no_ack=None, queue_opts=None, + exchange_opts=None, channel=None, **kwargs): + """Create new :class:`~kombu.simple.SimpleQueue` using a channel + from this connection. + + Same as :meth:`SimpleQueue`, but configured with buffering + semantics. The resulting queue and exchange will not be durable, also + auto delete is enabled. Messages will be transient (not persistent), + and acknowledgements are disabled (``no_ack``). + + """ + from .simple import SimpleBuffer + return SimpleBuffer(channel or self, name, no_ack, queue_opts, + exchange_opts, **kwargs) + + def _establish_connection(self): + self._debug('establishing connection...') + conn = self.transport.establish_connection() + self._debug('connection established: %r' % (conn, )) + return conn + + def __repr__(self): + """``x.__repr__() <==> repr(x)``""" + return '' % (self.as_uri(), id(self)) + + def __copy__(self): + """``x.__copy__() <==> copy(x)``""" + return self.clone() + + def __reduce__(self): + return (self.__class__, tuple(self.info().values()), None) + + def __enter__(self): + return self + + def __exit__(self, *args): + self.release() + + @property + def connected(self): + """Returns true if the connection has been established.""" + return (not self._closed and + self._connection is not None and + self.transport.verify_connection(self._connection)) + + @property + def connection(self): + """The underlying connection object. + + .. warning:: + This instance is transport specific, so do not + depend on the interface of this object. + + """ + if not self._closed: + if not self.connected: + self.declared_entities.clear() + self._default_channel = None + self._connection = self._establish_connection() + self._closed = False + return self._connection + + @property + def default_channel(self): + """Default channel, created upon access and closed when the connection + is closed. + + Can be used for automatic channel handling when you only need one + channel, and also it is the channel implicitly used if a connection + is passed instead of a channel, to functions that require a channel. + + """ + # make sure we're still connected, and if not refresh. + self.connection + if self._default_channel is None: + self._default_channel = self.channel() + return self._default_channel + + @property + def host(self): + """The host as a host name/port pair separated by colon.""" + return ':'.join([self.hostname, str(self.port)]) + + @property + def transport(self): + if self._transport is None: + self._transport = self.create_transport() + return self._transport + + @cached_property + def manager(self): + """Experimental manager that can be used to manage/monitor the broker + instance. Not available for all transports.""" + return self.transport.manager + + def get_manager(self, *args, **kwargs): + return self.transport.get_manager(*args, **kwargs) + + @cached_property + def recoverable_connection_errors(self): + try: + return self.transport.recoverable_connection_errors + except AttributeError: + # There were no such classification before, + # and all errors were assumed to be recoverable, + # so this is a fallback for transports that do + # not support the new recoverable/irrecoverable classes. + return self.connection_errors + self.channel_errors + + @cached_property + def recoverable_channel_errors(self): + try: + return self.transport.recoverable_channel_errors + except AttributeError: + return () + + @cached_property + def connection_errors(self): + """List of exceptions that may be raised by the connection.""" + return self.transport.connection_errors + + @cached_property + def channel_errors(self): + """List of exceptions that may be raised by the channel.""" + return self.transport.channel_errors + + @property + def eventmap(self): + """Map of events to be registered in an eventloop for this connection + to be used in non-blocking fashion.""" + return self.transport.eventmap(self.connection) + + @property + def supports_heartbeats(self): + return self.transport.supports_heartbeats + + @property + def is_evented(self): + return self.transport.supports_ev +BrokerConnection = Connection + + +class Resource(object): + LimitExceeded = exceptions.LimitExceeded + + def __init__(self, limit=None, preload=None): + self.limit = limit + self.preload = preload or 0 + + self._resource = _LifoQueue() + self._dirty = set() + self.setup() + + def setup(self): + raise NotImplementedError('subclass responsibility') + + def _add_when_empty(self): + if self.limit and len(self._dirty) >= self.limit: + raise self.LimitExceeded(self.limit) + # All taken, put new on the queue and + # try get again, this way the first in line + # will get the resource. + self._resource.put_nowait(self.new()) + + def acquire(self, block=False, timeout=None): + """Acquire resource. + + :keyword block: If the limit is exceeded, + block until there is an available item. + :keyword timeout: Timeout to wait + if ``block`` is true. Default is :const:`None` (forever). + + :raises LimitExceeded: if block is false + and the limit has been exceeded. + + """ + if self.limit: + while 1: + try: + R = self._resource.get(block=block, timeout=timeout) + except Empty: + self._add_when_empty() + else: + try: + R = self.prepare(R) + except BaseException: + if isinstance(R, promise): + # no evaluated yet, just put it back + self._resource.put_nowait(R) + else: + # evaluted so must try to release/close first. + self.release(R) + raise + self._dirty.add(R) + break + else: + R = self.prepare(self.new()) + + def release(): + """Release resource so it can be used by another thread. + + The caller is responsible for discarding the object, + and to never use the resource again. A new resource must + be acquired if so needed. + + """ + self.release(R) + R.release = release + + return R + + def prepare(self, resource): + return resource + + def close_resource(self, resource): + resource.close() + + def release_resource(self, resource): + pass + + def replace(self, resource): + """Replace resource with a new instance. This can be used in case + of defective resources.""" + if self.limit: + self._dirty.discard(resource) + self.close_resource(resource) + + def release(self, resource): + if self.limit: + self._dirty.discard(resource) + self._resource.put_nowait(resource) + self.release_resource(resource) + else: + self.close_resource(resource) + + def force_close_all(self): + """Closes and removes all resources in the pool (also those in use). + + Can be used to close resources from parent processes + after fork (e.g. sockets/connections). + + """ + dirty = self._dirty + resource = self._resource + while 1: + try: + dres = dirty.pop() + except KeyError: + break + try: + self.close_resource(dres) + except AttributeError: # Issue #78 + pass + + mutex = getattr(resource, 'mutex', None) + if mutex: + mutex.acquire() + try: + while 1: + try: + res = resource.queue.pop() + except IndexError: + break + try: + self.close_resource(res) + except AttributeError: + pass # Issue #78 + finally: + if mutex: # pragma: no cover + mutex.release() + + if os.environ.get('KOMBU_DEBUG_POOL'): # pragma: no cover + _orig_acquire = acquire + _orig_release = release + + _next_resource_id = 0 + + def acquire(self, *args, **kwargs): # noqa + import traceback + id = self._next_resource_id = self._next_resource_id + 1 + print('+%s ACQUIRE %s' % (id, self.__class__.__name__, )) + r = self._orig_acquire(*args, **kwargs) + r._resource_id = id + print('-%s ACQUIRE %s' % (id, self.__class__.__name__, )) + if not hasattr(r, 'acquired_by'): + r.acquired_by = [] + r.acquired_by.append(traceback.format_stack()) + return r + + def release(self, resource): # noqa + id = resource._resource_id + print('+%s RELEASE %s' % (id, self.__class__.__name__, )) + r = self._orig_release(resource) + print('-%s RELEASE %s' % (id, self.__class__.__name__, )) + self._next_resource_id -= 1 + return r + + +class ConnectionPool(Resource): + LimitExceeded = exceptions.ConnectionLimitExceeded + + def __init__(self, connection, limit=None, preload=None): + self.connection = connection + super(ConnectionPool, self).__init__(limit=limit, + preload=preload) + + def new(self): + return self.connection.clone() + + def release_resource(self, resource): + try: + resource._debug('released') + except AttributeError: + pass + + def close_resource(self, resource): + resource._close() + + @contextmanager + def acquire_channel(self, block=False): + with self.acquire(block=block) as connection: + yield connection, connection.default_channel + + def setup(self): + if self.limit: + for i in xrange(self.limit): + if i < self.preload: + conn = self.new() + conn.connect() + else: + conn = promise(self.new) + self._resource.put_nowait(conn) + + def prepare(self, resource): + if callable(resource): + resource = resource() + resource._debug('acquired') + return resource + + +class ChannelPool(Resource): + LimitExceeded = exceptions.ChannelLimitExceeded + + def __init__(self, connection, limit=None, preload=None): + self.connection = connection + super(ChannelPool, self).__init__(limit=limit, + preload=preload) + + def new(self): + return promise(self.connection.channel) + + def setup(self): + channel = self.new() + if self.limit: + for i in xrange(self.limit): + self._resource.put_nowait( + i < self.preload and channel() or promise(channel)) + + def prepare(self, channel): + if callable(channel): + channel = channel() + return channel + + +def maybe_channel(channel): + """Returns channel, or returns the default_channel if it's a + connection.""" + if isinstance(channel, Connection): + return channel.default_channel + return channel + + +def is_connection(obj): + return isinstance(obj, Connection) diff --git a/awx/lib/site-packages/kombu/entity.py b/awx/lib/site-packages/kombu/entity.py new file mode 100644 index 0000000000..32b199c377 --- /dev/null +++ b/awx/lib/site-packages/kombu/entity.py @@ -0,0 +1,699 @@ +""" +kombu.entity +================ + +Exchange and Queue declarations. + +""" +from __future__ import absolute_import + +from .abstract import MaybeChannelBound + +TRANSIENT_DELIVERY_MODE = 1 +PERSISTENT_DELIVERY_MODE = 2 +DELIVERY_MODES = {'transient': TRANSIENT_DELIVERY_MODE, + 'persistent': PERSISTENT_DELIVERY_MODE} + +__all__ = ['Exchange', 'Queue'] + + +def pretty_bindings(bindings): + return '[%s]' % (', '.join(map(str, bindings))) + + +class Exchange(MaybeChannelBound): + """An Exchange declaration. + + :keyword name: See :attr:`name`. + :keyword type: See :attr:`type`. + :keyword channel: See :attr:`channel`. + :keyword durable: See :attr:`durable`. + :keyword auto_delete: See :attr:`auto_delete`. + :keyword delivery_mode: See :attr:`delivery_mode`. + :keyword arguments: See :attr:`arguments`. + + .. attribute:: name + + Name of the exchange. Default is no name (the default exchange). + + .. attribute:: type + + AMQP defines four default exchange types (routing algorithms) that + covers most of the common messaging use cases. An AMQP broker can + also define additional exchange types, so see your broker + manual for more information about available exchange types. + + * `direct` (*default*) + + Direct match between the routing key in the message, and the + routing criteria used when a queue is bound to this exchange. + + * `topic` + + Wildcard match between the routing key and the routing pattern + specified in the exchange/queue binding. The routing key is + treated as zero or more words delimited by `"."` and + supports special wildcard characters. `"*"` matches a + single word and `"#"` matches zero or more words. + + * `fanout` + + Queues are bound to this exchange with no arguments. Hence any + message sent to this exchange will be forwarded to all queues + bound to this exchange. + + * `headers` + + Queues are bound to this exchange with a table of arguments + containing headers and values (optional). A special argument + named "x-match" determines the matching algorithm, where + `"all"` implies an `AND` (all pairs must match) and + `"any"` implies `OR` (at least one pair must match). + + :attr:`arguments` is used to specify the arguments. + + This description of AMQP exchange types was shamelessly stolen + from the blog post `AMQP in 10 minutes: Part 4`_ by + Rajith Attapattu. This article is recommended reading. + + .. _`AMQP in 10 minutes: Part 4`: + http://bit.ly/amqp-exchange-types + + .. attribute:: channel + + The channel the exchange is bound to (if bound). + + .. attribute:: durable + + Durable exchanges remain active when a server restarts. Non-durable + exchanges (transient exchanges) are purged when a server restarts. + Default is :const:`True`. + + .. attribute:: auto_delete + + If set, the exchange is deleted when all queues have finished + using it. Default is :const:`False`. + + .. attribute:: delivery_mode + + The default delivery mode used for messages. The value is an integer, + or alias string. + + * 1 or `"transient"` + + The message is transient. Which means it is stored in + memory only, and is lost if the server dies or restarts. + + * 2 or "persistent" (*default*) + The message is persistent. Which means the message is + stored both in-memory, and on disk, and therefore + preserved if the server dies or restarts. + + The default value is 2 (persistent). + + .. attribute:: arguments + + Additional arguments to specify when the exchange is declared. + + """ + TRANSIENT_DELIVERY_MODE = TRANSIENT_DELIVERY_MODE + PERSISTENT_DELIVERY_MODE = PERSISTENT_DELIVERY_MODE + + name = '' + type = 'direct' + durable = True + auto_delete = False + delivery_mode = PERSISTENT_DELIVERY_MODE + + attrs = ( + ('name', None), + ('type', None), + ('arguments', None), + ('durable', bool), + ('auto_delete', bool), + ('delivery_mode', lambda m: DELIVERY_MODES.get(m) or m), + ) + + def __init__(self, name='', type='', channel=None, **kwargs): + super(Exchange, self).__init__(**kwargs) + self.name = name or self.name + self.type = type or self.type + self.maybe_bind(channel) + + def __hash__(self): + return hash('E|%s' % (self.name, )) + + def declare(self, nowait=False, passive=False): + """Declare the exchange. + + Creates the exchange on the broker. + + :keyword nowait: If set the server will not respond, and a + response will not be waited for. Default is :const:`False`. + + """ + if self.name: + return self.channel.exchange_declare( + exchange=self.name, type=self.type, durable=self.durable, + auto_delete=self.auto_delete, arguments=self.arguments, + nowait=nowait, passive=passive, + ) + + def bind_to(self, exchange='', routing_key='', + arguments=None, nowait=False, **kwargs): + """Binds the exchange to another exchange. + + :keyword nowait: If set the server will not respond, and the call + will not block waiting for a response. Default is :const:`False`. + + """ + if isinstance(exchange, Exchange): + exchange = exchange.name + return self.channel.exchange_bind(destination=self.name, + source=exchange, + routing_key=routing_key, + nowait=nowait, + arguments=arguments) + + def unbind_from(self, source='', routing_key='', + nowait=False, arguments=None): + """Delete previously created exchange binding from the server.""" + if isinstance(source, Exchange): + source = source.name + return self.channel.exchange_unbind(destination=self.name, + source=source, + routing_key=routing_key, + nowait=nowait, + arguments=arguments) + + def Message(self, body, delivery_mode=None, priority=None, + content_type=None, content_encoding=None, + properties=None, headers=None): + """Create message instance to be sent with :meth:`publish`. + + :param body: Message body. + + :keyword delivery_mode: Set custom delivery mode. Defaults + to :attr:`delivery_mode`. + + :keyword priority: Message priority, 0 to 9. (currently not + supported by RabbitMQ). + + :keyword content_type: The messages content_type. If content_type + is set, no serialization occurs as it is assumed this is either + a binary object, or you've done your own serialization. + Leave blank if using built-in serialization as our library + properly sets content_type. + + :keyword content_encoding: The character set in which this object + is encoded. Use "binary" if sending in raw binary objects. + Leave blank if using built-in serialization as our library + properly sets content_encoding. + + :keyword properties: Message properties. + + :keyword headers: Message headers. + + """ + properties = {} if properties is None else properties + dm = delivery_mode or self.delivery_mode + properties['delivery_mode'] = \ + DELIVERY_MODES[dm] if (dm != 2 and dm != 1) else dm + return self.channel.prepare_message(body, + properties=properties, + priority=priority, + content_type=content_type, + content_encoding=content_encoding, + headers=headers) + + def publish(self, message, routing_key=None, mandatory=False, + immediate=False, exchange=None): + """Publish message. + + :param message: :meth:`Message` instance to publish. + :param routing_key: Routing key. + :param mandatory: Currently not supported. + :param immediate: Currently not supported. + + """ + exchange = exchange or self.name + return self.channel.basic_publish(message, + exchange=exchange, + routing_key=routing_key, + mandatory=mandatory, + immediate=immediate) + + def delete(self, if_unused=False, nowait=False): + """Delete the exchange declaration on server. + + :keyword if_unused: Delete only if the exchange has no bindings. + Default is :const:`False`. + + :keyword nowait: If set the server will not respond, and a + response will not be waited for. Default is :const:`False`. + + """ + return self.channel.exchange_delete(exchange=self.name, + if_unused=if_unused, + nowait=nowait) + + def __eq__(self, other): + if isinstance(other, Exchange): + return (self.name == other.name and + self.type == other.type and + self.arguments == other.arguments and + self.durable == other.durable and + self.auto_delete == other.auto_delete and + self.delivery_mode == other.delivery_mode) + return False + + def __repr__(self): + return super(Exchange, self).__repr__(str(self)) + + def __str__(self): + return 'Exchange %s(%s)' % (self.name or repr(''), self.type) + + @property + def can_cache_declaration(self): + return self.durable + + +class binding(object): + """Represents a queue or exchange binding. + + :keyword exchange: Exchange to bind to. + :keyword routing_key: Routing key used as binding key. + :keyword arguments: Arguments for bind operation. + :keyword unbind_arguments: Arguments for unbind operation. + + """ + + def __init__(self, exchange=None, routing_key='', + arguments=None, unbind_arguments=None): + self.exchange = exchange + self.routing_key = routing_key + self.arguments = arguments + self.unbind_arguments = unbind_arguments + + def declare(self, channel, nowait=False): + """Declare destination exchange.""" + if self.exchange and self.exchange.name: + ex = self.exchange(channel) + ex.declare(nowait=nowait) + + def bind(self, entity, nowait=False): + """Bind entity to this binding.""" + entity.bind_to(exchange=self.exchange, + routing_key=self.routing_key, + arguments=self.arguments, + nowait=nowait) + + def unbind(self, entity, nowait=False): + """Unbind entity from this binding.""" + entity.unbind_from(self.exchange, + routing_key=self.routing_key, + arguments=self.unbind_arguments, + nowait=nowait) + + def __repr__(self): + return '' % (self, ) + + def __str__(self): + return '%s->%s' % (self.exchange.name, self.routing_key) + + +class Queue(MaybeChannelBound): + """A Queue declaration. + + :keyword name: See :attr:`name`. + :keyword exchange: See :attr:`exchange`. + :keyword routing_key: See :attr:`routing_key`. + :keyword channel: See :attr:`channel`. + :keyword durable: See :attr:`durable`. + :keyword exclusive: See :attr:`exclusive`. + :keyword auto_delete: See :attr:`auto_delete`. + :keyword queue_arguments: See :attr:`queue_arguments`. + :keyword binding_arguments: See :attr:`binding_arguments`. + :keyword on_declared: See :attr:`on_declared` + + .. attribute:: name + + Name of the queue. Default is no name (default queue destination). + + .. attribute:: exchange + + The :class:`Exchange` the queue binds to. + + .. attribute:: routing_key + + The routing key (if any), also called *binding key*. + + The interpretation of the routing key depends on + the :attr:`Exchange.type`. + + * direct exchange + + Matches if the routing key property of the message and + the :attr:`routing_key` attribute are identical. + + * fanout exchange + + Always matches, even if the binding does not have a key. + + * topic exchange + + Matches the routing key property of the message by a primitive + pattern matching scheme. The message routing key then consists + of words separated by dots (`"."`, like domain names), and + two special characters are available; star (`"*"`) and hash + (`"#"`). The star matches any word, and the hash matches + zero or more words. For example `"*.stock.#"` matches the + routing keys `"usd.stock"` and `"eur.stock.db"` but not + `"stock.nasdaq"`. + + .. attribute:: channel + + The channel the Queue is bound to (if bound). + + .. attribute:: durable + + Durable queues remain active when a server restarts. + Non-durable queues (transient queues) are purged if/when + a server restarts. + Note that durable queues do not necessarily hold persistent + messages, although it does not make sense to send + persistent messages to a transient queue. + + Default is :const:`True`. + + .. attribute:: exclusive + + Exclusive queues may only be consumed from by the + current connection. Setting the 'exclusive' flag + always implies 'auto-delete'. + + Default is :const:`False`. + + .. attribute:: auto_delete + + If set, the queue is deleted when all consumers have + finished using it. Last consumer can be cancelled + either explicitly or because its channel is closed. If + there was no consumer ever on the queue, it won't be + deleted. + + .. attribute:: queue_arguments + + Additional arguments used when declaring the queue. + + .. attribute:: binding_arguments + + Additional arguments used when binding the queue. + + .. attribute:: alias + + Unused in Kombu, but applications can take advantage of this. + For example to give alternate names to queues with automatically + generated queue names. + + .. attribute:: on_declared + + Optional callback to be applied when the queue has been + declared (the ``queue_declare`` method returns). + This must be function with a signature that accepts at least 3 + positional arguments: ``(name, messages, consumers)``. + + """ + name = '' + exchange = Exchange('') + routing_key = '' + + durable = True + exclusive = False + auto_delete = False + no_ack = False + + attrs = ( + ('name', None), + ('exchange', None), + ('routing_key', None), + ('queue_arguments', None), + ('binding_arguments', None), + ('durable', bool), + ('exclusive', bool), + ('auto_delete', bool), + ('no_ack', None), + ('alias', None), + ('bindings', list), + ) + + def __init__(self, name='', exchange=None, routing_key='', + channel=None, bindings=None, on_declared=None, + **kwargs): + super(Queue, self).__init__(**kwargs) + self.name = name or self.name + self.exchange = exchange or self.exchange + self.routing_key = routing_key or self.routing_key + self.bindings = set(bindings or []) + self.on_declared = on_declared + + # allows Queue('name', [binding(...), binding(...), ...]) + if isinstance(exchange, (list, tuple, set)): + self.bindings |= set(exchange) + if self.bindings: + self.exchange = None + + # exclusive implies auto-delete. + if self.exclusive: + self.auto_delete = True + self.maybe_bind(channel) + + def bind(self, channel): + on_declared = self.on_declared + bound = super(Queue, self).bind(channel) + bound.on_declared = on_declared + return bound + + def __hash__(self): + return hash('Q|%s' % (self.name, )) + + def when_bound(self): + if self.exchange: + self.exchange = self.exchange(self.channel) + + def declare(self, nowait=False): + """Declares the queue, the exchange and binds the queue to + the exchange.""" + # - declare main binding. + if self.exchange: + self.exchange.declare(nowait) + self.queue_declare(nowait, passive=False) + + if self.exchange is not None: + self.queue_bind(nowait) + + # - declare extra/multi-bindings. + for B in self.bindings: + B.declare(self.channel) + B.bind(self, nowait=nowait) + return self.name + + def queue_declare(self, nowait=False, passive=False): + """Declare queue on the server. + + :keyword nowait: Do not wait for a reply. + :keyword passive: If set, the server will not create the queue. + The client can use this to check whether a queue exists + without modifying the server state. + + """ + ret = self.channel.queue_declare(queue=self.name, + passive=passive, + durable=self.durable, + exclusive=self.exclusive, + auto_delete=self.auto_delete, + arguments=self.queue_arguments, + nowait=nowait) + if not self.name: + self.name = ret[0] + if self.on_declared: + self.on_declared(*ret) + return ret + + def queue_bind(self, nowait=False): + """Create the queue binding on the server.""" + return self.bind_to(self.exchange, self.routing_key, + self.binding_arguments, nowait=nowait) + + def bind_to(self, exchange='', routing_key='', + arguments=None, nowait=False): + if isinstance(exchange, Exchange): + exchange = exchange.name + return self.channel.queue_bind(queue=self.name, + exchange=exchange, + routing_key=routing_key, + arguments=arguments, + nowait=nowait) + + def get(self, no_ack=None): + """Poll the server for a new message. + + Returns the message instance if a message was available, + or :const:`None` otherwise. + + :keyword no_ack: If set messages received does not have to + be acknowledged. + + This method provides direct access to the messages in a + queue using a synchronous dialogue, designed for + specific types of applications where synchronous functionality + is more important than performance. + + """ + no_ack = self.no_ack if no_ack is None else no_ack + message = self.channel.basic_get(queue=self.name, no_ack=no_ack) + if message is not None: + m2p = getattr(self.channel, 'message_to_python', None) + if m2p: + message = m2p(message) + return message + + def purge(self, nowait=False): + """Remove all ready messages from the queue.""" + return self.channel.queue_purge(queue=self.name, + nowait=nowait) or 0 + + def consume(self, consumer_tag='', callback=None, + no_ack=None, nowait=False): + """Start a queue consumer. + + Consumers last as long as the channel they were created on, or + until the client cancels them. + + :keyword consumer_tag: Unique identifier for the consumer. The + consumer tag is local to a connection, so two clients + can use the same consumer tags. If this field is empty + the server will generate a unique tag. + + :keyword no_ack: If set messages received does not have to + be acknowledged. + + :keyword nowait: Do not wait for a reply. + + :keyword callback: callback called for each delivered message + + """ + if no_ack is None: + no_ack = self.no_ack + return self.channel.basic_consume(queue=self.name, + no_ack=no_ack, + consumer_tag=consumer_tag or '', + callback=callback, + nowait=nowait) + + def cancel(self, consumer_tag): + """Cancel a consumer by consumer tag.""" + return self.channel.basic_cancel(consumer_tag) + + def delete(self, if_unused=False, if_empty=False, nowait=False): + """Delete the queue. + + :keyword if_unused: If set, the server will only delete the queue + if it has no consumers. A channel error will be raised + if the queue has consumers. + + :keyword if_empty: If set, the server will only delete the queue + if it is empty. If it is not empty a channel error will be raised. + + :keyword nowait: Do not wait for a reply. + + """ + return self.channel.queue_delete(queue=self.name, + if_unused=if_unused, + if_empty=if_empty, + nowait=nowait) + + def queue_unbind(self, arguments=None, nowait=False): + return self.unbind_from(self.exchange, self.routing_key, + arguments, nowait) + + def unbind_from(self, exchange='', routing_key='', + arguments=None, nowait=False): + """Unbind queue by deleting the binding from the server.""" + return self.channel.queue_unbind(queue=self.name, + exchange=exchange.name, + routing_key=routing_key, + arguments=arguments, + nowait=nowait) + + def __eq__(self, other): + if isinstance(other, Queue): + return (self.name == other.name and + self.exchange == other.exchange and + self.routing_key == other.routing_key and + self.queue_arguments == other.queue_arguments and + self.binding_arguments == other.binding_arguments and + self.durable == other.durable and + self.exclusive == other.exclusive and + self.auto_delete == other.auto_delete) + return False + + def __repr__(self): + s = super(Queue, self).__repr__ + if self.bindings: + return s('Queue %r -> %s' % ( + self.name, + pretty_bindings(self.bindings), + )) + return s('Queue %r -> %s -> %r' % ( + self.name, + self.exchange, + self.routing_key or '', + )) + + @property + def can_cache_declaration(self): + return self.durable + + @classmethod + def from_dict(self, queue, **options): + binding_key = options.get('binding_key') or options.get('routing_key') + + e_durable = options.get('exchange_durable') + if e_durable is None: + e_durable = options.get('durable') + + e_auto_delete = options.get('exchange_auto_delete') + if e_auto_delete is None: + e_auto_delete = options.get('auto_delete') + + q_durable = options.get('queue_durable') + if q_durable is None: + q_durable = options.get('durable') + + q_auto_delete = options.get('queue_auto_delete') + if q_auto_delete is None: + q_auto_delete = options.get('auto_delete') + + e_arguments = options.get('exchange_arguments') + q_arguments = options.get('queue_arguments') + b_arguments = options.get('binding_arguments') + bindings = options.get('bindings') + + exchange = Exchange(options.get('exchange'), + type=options.get('exchange_type'), + delivery_mode=options.get('delivery_mode'), + routing_key=options.get('routing_key'), + durable=e_durable, + auto_delete=e_auto_delete, + arguments=e_arguments) + return Queue(queue, + exchange=exchange, + routing_key=binding_key, + durable=q_durable, + exclusive=options.get('exclusive'), + auto_delete=q_auto_delete, + no_ack=options.get('no_ack'), + queue_arguments=q_arguments, + binding_arguments=b_arguments, + bindings=bindings) diff --git a/awx/lib/site-packages/kombu/exceptions.py b/awx/lib/site-packages/kombu/exceptions.py new file mode 100644 index 0000000000..32261506bf --- /dev/null +++ b/awx/lib/site-packages/kombu/exceptions.py @@ -0,0 +1,74 @@ +""" +kombu.exceptions +================ + +Exceptions. + +""" +from __future__ import absolute_import + +import socket + +__all__ = ['NotBoundError', 'MessageStateError', 'TimeoutError', + 'LimitExceeded', 'ConnectionLimitExceeded', + 'ChannelLimitExceeded', 'StdConnectionError', + 'StdChannelError', 'VersionMismatch', 'SerializerNotInstalled'] + +TimeoutError = socket.timeout + + +class KombuError(Exception): + """Common subclass for all Kombu exceptions.""" + + +class NotBoundError(KombuError): + """Trying to call channel dependent method on unbound entity.""" + pass + + +class MessageStateError(KombuError): + """The message has already been acknowledged.""" + pass + + +class LimitExceeded(KombuError): + """Limit exceeded.""" + pass + + +class ConnectionLimitExceeded(LimitExceeded): + """Maximum number of simultaneous connections exceeded.""" + pass + + +class ChannelLimitExceeded(LimitExceeded): + """Maximum number of simultaneous channels exceeded.""" + pass + + +class StdConnectionError(KombuError): + pass + + +class StdChannelError(KombuError): + pass + + +class VersionMismatch(KombuError): + pass + + +class SerializerNotInstalled(KombuError): + """Support for the requested serialization type is not installed""" + pass + + +class ContentDisallowed(SerializerNotInstalled): + """Consumer does not allow this content-type.""" + pass + + +class InconsistencyError(StdConnectionError): + """Data or environment has been found to be inconsistent, + depending on the cause it may be possible to retry the operation.""" + pass diff --git a/awx/lib/site-packages/kombu/log.py b/awx/lib/site-packages/kombu/log.py new file mode 100644 index 0000000000..c3a7a912e7 --- /dev/null +++ b/awx/lib/site-packages/kombu/log.py @@ -0,0 +1,151 @@ +from __future__ import absolute_import + +import os +import logging +import sys + +from .utils import cached_property +from .utils.compat import WatchedFileHandler +from .utils.encoding import safe_repr, safe_str +from .utils.functional import maybe_promise + +__all__ = ['LogMixin', 'LOG_LEVELS', 'get_loglevel', 'setup_logging'] + +LOG_LEVELS = dict(logging._levelNames) +LOG_LEVELS['FATAL'] = logging.FATAL +LOG_LEVELS[logging.FATAL] = 'FATAL' +DISABLE_TRACEBACKS = os.environ.get('DISABLE_TRACEBACKS') + + +class NullHandler(logging.Handler): + + def emit(self, record): + pass + + +def get_logger(logger): + if isinstance(logger, basestring): + logger = logging.getLogger(logger) + if not logger.handlers: + logger.addHandler(NullHandler()) + return logger + + +def anon_logger(name): + logger = logging.getLogger(name) + logger.addHandler(NullHandler()) + return logger + + +def get_loglevel(level): + if isinstance(level, basestring): + return LOG_LEVELS[level] + return level + + +def naive_format_parts(fmt): + l = fmt.split('%') + for i, e in enumerate(l[1:]): + if not e or not l[i - 1]: + yield + elif e[0] in ['r', 's']: + yield e[0] + + +def safeify_format(fmt, *args): + for index, type in enumerate(naive_format_parts(fmt)): + if not type: + yield args[index] + elif type == 'r': + yield safe_repr(args[index]) + elif type == 's': + yield safe_str(args[index]) + + +class LogMixin(object): + + def debug(self, *args, **kwargs): + return self.log(logging.DEBUG, *args, **kwargs) + + def info(self, *args, **kwargs): + return self.log(logging.INFO, *args, **kwargs) + + def warn(self, *args, **kwargs): + return self.log(logging.WARN, *args, **kwargs) + + def error(self, *args, **kwargs): + return self._error(logging.ERROR, *args, **kwargs) + + def critical(self, *args, **kwargs): + return self._error(logging.CRITICAL, *args, **kwargs) + + def _error(self, severity, *args, **kwargs): + kwargs.setdefault('exc_info', True) + if DISABLE_TRACEBACKS: + kwargs.pop('exc_info', None) + return self.log(severity, *args, **kwargs) + + def annotate(self, text): + return '%s - %s' % (self.logger_name, text) + + def log(self, severity, *args, **kwargs): + if self.logger.isEnabledFor(severity): + log = self.logger.log + if len(args) > 1 and isinstance(args[0], basestring): + expand = [maybe_promise(arg) for arg in args[1:]] + return log(severity, + self.annotate(args[0].replace('%r', '%s')), + *list(safeify_format(args[0], *expand)), **kwargs) + else: + return self.logger.log( + severity, self.annotate(' '.join(map(safe_str, args))), + **kwargs) + + def get_logger(self): + return get_logger(self.logger_name) + + def is_enabled_for(self, level): + return self.logger.isEnabledFor(self.get_loglevel(level)) + + def get_loglevel(self, level): + if not isinstance(level, int): + return LOG_LEVELS[level] + return level + + @cached_property + def logger(self): + return self.get_logger() + + @property + def logger_name(self): + return self.__class__.__name__ + + +class Log(LogMixin): + + def __init__(self, name, logger=None): + self._logger_name = name + self._logger = logger + + def get_logger(self): + if self._logger: + return self._logger + return LogMixin.get_logger(self) + + @property + def logger_name(self): + return self._logger_name + + +def setup_logging(loglevel=None, logfile=None): + logger = logging.getLogger() + loglevel = get_loglevel(loglevel or 'ERROR') + logfile = logfile if logfile else sys.__stderr__ + if not logger.handlers: + if hasattr(logfile, 'write'): + handler = logging.StreamHandler(logfile) + else: + handler = WatchedFileHandler(logfile) + logger.addHandler(handler) + logger.setLevel(loglevel) + return logger diff --git a/awx/lib/site-packages/kombu/messaging.py b/awx/lib/site-packages/kombu/messaging.py new file mode 100644 index 0000000000..3067fc7b34 --- /dev/null +++ b/awx/lib/site-packages/kombu/messaging.py @@ -0,0 +1,572 @@ +""" +kombu.messaging +=============== + +Sending and receiving messages. + +""" +from __future__ import absolute_import + +from itertools import count + +from .connection import maybe_channel, is_connection +from .entity import Exchange, Queue, DELIVERY_MODES +from .compression import compress +from .serialization import encode, registry +from .utils import ChannelPromise, maybe_list + +__all__ = ['Exchange', 'Queue', 'Producer', 'Consumer'] + +# XXX compat attribute +entry_to_queue = Queue.from_dict + + +class Producer(object): + """Message Producer. + + :param channel: Connection or channel. + :keyword exchange: Optional default exchange. + :keyword routing_key: Optional default routing key. + :keyword serializer: Default serializer. Default is `"json"`. + :keyword compression: Default compression method. Default is no + compression. + :keyword auto_declare: Automatically declare the default exchange + at instantiation. Default is :const:`True`. + :keyword on_return: Callback to call for undeliverable messages, + when the `mandatory` or `immediate` arguments to + :meth:`publish` is used. This callback needs the following + signature: `(exception, exchange, routing_key, message)`. + Note that the producer needs to drain events to use this feature. + + """ + + #: Default exchange + exchange = None + + #: Default routing key. + routing_key = '' + + #: Default serializer to use. Default is JSON. + serializer = None + + #: Default compression method. Disabled by default. + compression = None + + #: By default the exchange is declared at instantiation. + #: If you want to declare manually then you can set this + #: to :const:`False`. + auto_declare = True + + #: Basic return callback. + on_return = None + + #: Set if channel argument was a Connection instance (using + #: default_channel). + __connection__ = None + + def __init__(self, channel, exchange=None, routing_key=None, + serializer=None, auto_declare=None, compression=None, + on_return=None): + self._channel = channel + self.exchange = exchange + self.routing_key = routing_key or self.routing_key + self.serializer = serializer or self.serializer + self.compression = compression or self.compression + self.on_return = on_return or self.on_return + self._channel_promise = None + if self.exchange is None: + self.exchange = Exchange('') + if auto_declare is not None: + self.auto_declare = auto_declare + + if self._channel: + self.revive(self._channel) + + def __reduce__(self): + return self.__class__, self.__reduce_args__() + + def __reduce_args__(self): + return (None, self.exchange, self.routing_key, self.serializer, + self.auto_declare, self.compression) + + def declare(self): + """Declare the exchange. + + This happens automatically at instantiation if + :attr:`auto_declare` is enabled. + + """ + if self.exchange.name: + self.exchange.declare() + + def maybe_declare(self, entity, retry=False, **retry_policy): + """Declare the exchange if it hasn't already been declared + during this session.""" + if entity: + from .common import maybe_declare + return maybe_declare(entity, self.channel, retry, **retry_policy) + + def publish(self, body, routing_key=None, delivery_mode=None, + mandatory=False, immediate=False, priority=0, + content_type=None, content_encoding=None, serializer=None, + headers=None, compression=None, exchange=None, retry=False, + retry_policy=None, declare=[], **properties): + """Publish message to the specified exchange. + + :param body: Message body. + :keyword routing_key: Message routing key. + :keyword delivery_mode: See :attr:`delivery_mode`. + :keyword mandatory: Currently not supported. + :keyword immediate: Currently not supported. + :keyword priority: Message priority. A number between 0 and 9. + :keyword content_type: Content type. Default is auto-detect. + :keyword content_encoding: Content encoding. Default is auto-detect. + :keyword serializer: Serializer to use. Default is auto-detect. + :keyword compression: Compression method to use. Default is none. + :keyword headers: Mapping of arbitrary headers to pass along + with the message body. + :keyword exchange: Override the exchange. Note that this exchange + must have been declared. + :keyword declare: Optional list of required entities that must + have been declared before publishing the message. The entities + will be declared using :func:`~kombu.common.maybe_declare`. + :keyword retry: Retry publishing, or declaring entities if the + connection is lost. + :keyword retry_policy: Retry configuration, this is the keywords + supported by :meth:`~kombu.Connection.ensure`. + :keyword \*\*properties: Additional message properties, see AMQP spec. + + """ + headers = {} if headers is None else headers + retry_policy = {} if retry_policy is None else retry_policy + routing_key = self.routing_key if routing_key is None else routing_key + compression = self.compression if compression is None else compression + exchange = exchange or self.exchange + + if isinstance(exchange, Exchange): + delivery_mode = delivery_mode or exchange.delivery_mode + exchange = exchange.name + else: + delivery_mode = delivery_mode or self.exchange.delivery_mode + if not isinstance(delivery_mode, (int, long)): + delivery_mode = DELIVERY_MODES[delivery_mode] + properties['delivery_mode'] = delivery_mode + + body, content_type, content_encoding = self._prepare( + body, serializer, content_type, content_encoding, + compression, headers) + + publish = self._publish + if retry: + publish = self.connection.ensure(self, publish, **retry_policy) + return publish(body, priority, content_type, + content_encoding, headers, properties, + routing_key, mandatory, immediate, exchange, declare) + + def _publish(self, body, priority, content_type, content_encoding, + headers, properties, routing_key, mandatory, + immediate, exchange, declare): + channel = self.channel + message = channel.prepare_message( + body, priority, content_type, + content_encoding, headers, properties, + ) + if declare: + maybe_declare = self.maybe_declare + [maybe_declare(entity) for entity in declare] + return channel.basic_publish( + message, + exchange=exchange, routing_key=routing_key, + mandatory=mandatory, immediate=immediate, + ) + + def _get_channel(self): + channel = self._channel + if isinstance(channel, ChannelPromise): + channel = self._channel = channel() + self.exchange.revive(channel) + if self.on_return: + channel.events['basic_return'].add(self.on_return) + return channel + + def _set_channel(self, channel): + self._channel = channel + channel = property(_get_channel, _set_channel) + + def revive(self, channel): + """Revive the producer after connection loss.""" + if is_connection(channel): + connection = channel + self.__connection__ = connection + channel = ChannelPromise(lambda: connection.default_channel) + if isinstance(channel, ChannelPromise): + self._channel = channel + self.exchange = self.exchange(channel) + else: + # Channel already concrete + self._channel = channel + if self.on_return: + self._channel.events['basic_return'].add(self.on_return) + self.exchange = self.exchange(channel) + if self.auto_declare: + # auto_decare is not recommended as this will force + # evaluation of the channel. + self.declare() + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.release() + + def release(self): + pass + close = release + + def _prepare(self, body, serializer=None, content_type=None, + content_encoding=None, compression=None, headers=None): + + # No content_type? Then we're serializing the data internally. + if not content_type: + serializer = serializer or self.serializer + (content_type, content_encoding, + body) = encode(body, serializer=serializer) + else: + # If the programmer doesn't want us to serialize, + # make sure content_encoding is set. + if isinstance(body, unicode): + if not content_encoding: + content_encoding = 'utf-8' + body = body.encode(content_encoding) + + # If they passed in a string, we can't know anything + # about it. So assume it's binary data. + elif not content_encoding: + content_encoding = 'binary' + + if compression: + body, headers['compression'] = compress(body, compression) + + return body, content_type, content_encoding + + @property + def connection(self): + try: + return self.__connection__ or self.channel.connection.client + except AttributeError: + pass + + +class Consumer(object): + """Message consumer. + + :param channel: see :attr:`channel`. + :param queues: see :attr:`queues`. + :keyword no_ack: see :attr:`no_ack`. + :keyword auto_declare: see :attr:`auto_declare` + :keyword callbacks: see :attr:`callbacks`. + :keyword on_message: See :attr:`on_message` + :keyword on_decode_error: see :attr:`on_decode_error`. + + """ + #: The connection/channel to use for this consumer. + channel = None + + #: A single :class:`~kombu.Queue`, or a list of queues to + #: consume from. + queues = None + + #: Flag for message acknowledgment disabled/enabled. + #: Enabled by default. + no_ack = None + + #: By default all entities will be declared at instantiation, if you + #: want to handle this manually you can set this to :const:`False`. + auto_declare = True + + #: List of callbacks called in order when a message is received. + #: + #: The signature of the callbacks must take two arguments: + #: `(body, message)`, which is the decoded message body and + #: the `Message` instance (a subclass of + #: :class:`~kombu.transport.base.Message`). + callbacks = None + + #: Optional function called whenever a message is received. + #: + #: When defined this function will be called instead of the + #: :meth:`receive` method, and :attr:`callbacks` will be disabled. + #: + #: So this can be used as an alternative to :attr:`callbacks` when + #: you don't want the body to be automatically decoded. + #: Note that the message will still be decompressed if the message + #: has the ``compression`` header set. + #: + #: The signature of the callback must take a single argument, + #: which is the raw message object (a subclass of + #: :class:`~kombu.transport.base.Message`). + #: + #: Also note that the ``message.body`` attribute, which is the raw + #: contents of the message body, may in some cases be a read-only + #: :class:`buffer` object. + on_message = None + + #: Callback called when a message can't be decoded. + #: + #: The signature of the callback must take two arguments: `(message, + #: exc)`, which is the message that can't be decoded and the exception + #: that occurred while trying to decode it. + on_decode_error = None + + #: List of accepted content-types. + #: + #: An exception will be raised if the consumer receives + #: a message with an untrusted content type. + #: By default all content-types are accepted, but not if + #: :func:`kombu.disable_untrusted_serializers` was called, + #: in which case only json is allowed. + accept = None + + _next_tag = count(1).next # global + + def __init__(self, channel, queues=None, no_ack=None, auto_declare=None, + callbacks=None, on_decode_error=None, on_message=None, + accept=None): + self.channel = channel + self.queues = self.queues or [] if queues is None else queues + self.no_ack = self.no_ack if no_ack is None else no_ack + self.callbacks = (self.callbacks or [] if callbacks is None + else callbacks) + self.on_message = on_message + self._active_tags = {} + if auto_declare is not None: + self.auto_declare = auto_declare + if on_decode_error is not None: + self.on_decode_error = on_decode_error + self.accept = accept + + if self.accept is not None: + self.accept = set( + n if '/' in n else registry.name_to_type[n] + for n in self.accept + ) + + if self.channel: + self.revive(self.channel) + + def revive(self, channel): + """Revive consumer after connection loss.""" + self._active_tags.clear() + channel = self.channel = maybe_channel(channel) + self.queues = [queue(self.channel) + for queue in maybe_list(self.queues)] + for queue in self.queues: + queue.revive(channel) + + if self.auto_declare: + self.declare() + + def declare(self): + """Declare queues, exchanges and bindings. + + This is done automatically at instantiation if :attr:`auto_declare` + is set. + + """ + for queue in self.queues: + queue.declare() + + def register_callback(self, callback): + """Register a new callback to be called when a message + is received. + + The signature of the callback needs to accept two arguments: + `(body, message)`, which is the decoded message body + and the `Message` instance (a subclass of + :class:`~kombu.transport.base.Message`. + + """ + self.callbacks.append(callback) + + def __enter__(self): + self.consume() + return self + + def __exit__(self, *exc_info): + try: + self.cancel() + except Exception: + pass + + def add_queue(self, queue): + queue = queue(self.channel) + if self.auto_declare: + queue.declare() + self.queues.append(queue) + return queue + + def add_queue_from_dict(self, queue, **options): + return self.add_queue(Queue.from_dict(queue, **options)) + + def consume(self, no_ack=None): + if self.queues: + no_ack = self.no_ack if no_ack is None else no_ack + + H, T = self.queues[:-1], self.queues[-1] + for queue in H: + self._basic_consume(queue, no_ack=no_ack, nowait=True) + self._basic_consume(T, no_ack=no_ack, nowait=False) + + def cancel(self): + """End all active queue consumers. + + This does not affect already delivered messages, but it does + mean the server will not send any more messages for this consumer. + + """ + cancel = self.channel.basic_cancel + for tag in self._active_tags.itervalues(): + cancel(tag) + self._active_tags.clear() + close = cancel + + def cancel_by_queue(self, queue): + """Cancel consumer by queue name.""" + try: + tag = self._active_tags.pop(queue) + except KeyError: + pass + else: + self.queues[:] = [q for q in self.queues if q.name != queue] + self.channel.basic_cancel(tag) + + def consuming_from(self, queue): + name = queue + if isinstance(queue, Queue): + name = queue.name + return any(q.name == name for q in self.queues) + + def purge(self): + """Purge messages from all queues. + + .. warning:: + This will *delete all ready messages*, there is no + undo operation. + + """ + return sum(queue.purge() for queue in self.queues) + + def flow(self, active): + """Enable/disable flow from peer. + + This is a simple flow-control mechanism that a peer can use + to avoid overflowing its queues or otherwise finding itself + receiving more messages than it can process. + + The peer that receives a request to stop sending content + will finish sending the current content (if any), and then wait + until flow is reactivated. + + """ + self.channel.flow(active) + + def qos(self, prefetch_size=0, prefetch_count=0, apply_global=False): + """Specify quality of service. + + The client can request that messages should be sent in + advance so that when the client finishes processing a message, + the following message is already held locally, rather than needing + to be sent down the channel. Prefetching gives a performance + improvement. + + The prefetch window is Ignored if the :attr:`no_ack` option is set. + + :param prefetch_size: Specify the prefetch window in octets. + The server will send a message in advance if it is equal to + or smaller in size than the available prefetch size (and + also falls within other prefetch limits). May be set to zero, + meaning "no specific limit", although other prefetch limits + may still apply. + + :param prefetch_count: Specify the prefetch window in terms of + whole messages. + + :param apply_global: Apply new settings globally on all channels. + Currently not supported by RabbitMQ. + + """ + return self.channel.basic_qos(prefetch_size, + prefetch_count, + apply_global) + + def recover(self, requeue=False): + """Redeliver unacknowledged messages. + + Asks the broker to redeliver all unacknowledged messages + on the specified channel. + + :keyword requeue: By default the messages will be redelivered + to the original recipient. With `requeue` set to true, the + server will attempt to requeue the message, potentially then + delivering it to an alternative subscriber. + + """ + return self.channel.basic_recover(requeue=requeue) + + def receive(self, body, message): + """Method called when a message is received. + + This dispatches to the registered :attr:`callbacks`. + + :param body: The decoded message body. + :param message: The `Message` instance. + + :raises NotImplementedError: If no consumer callbacks have been + registered. + + """ + callbacks = self.callbacks + if not callbacks: + raise NotImplementedError('Consumer does not have any callbacks') + [callback(body, message) for callback in callbacks] + + def _basic_consume(self, queue, consumer_tag=None, + no_ack=no_ack, nowait=True): + tag = self._active_tags.get(queue.name) + if tag is None: + tag = self._add_tag(queue, consumer_tag) + queue.consume(tag, self._receive_callback, + no_ack=no_ack, nowait=nowait) + return tag + + def _add_tag(self, queue, consumer_tag=None): + tag = consumer_tag or str(self._next_tag()) + self._active_tags[queue.name] = tag + return tag + + def _receive_callback(self, message): + accept = self.accept + if accept is not None: + message.accept = accept + on_m, channel, decoded = self.on_message, self.channel, None + try: + m2p = getattr(channel, 'message_to_python', None) + if m2p: + message = m2p(message) + decoded = None if on_m else message.decode() + except Exception, exc: + if not self.on_decode_error: + raise + self.on_decode_error(message, exc) + else: + return on_m(message) if on_m else self.receive(decoded, message) + + def __repr__(self): + return '' % (self.queues, ) + + @property + def connection(self): + try: + return self.channel.connection.client + except AttributeError: + pass diff --git a/awx/lib/site-packages/kombu/mixins.py b/awx/lib/site-packages/kombu/mixins.py new file mode 100644 index 0000000000..82b9733e92 --- /dev/null +++ b/awx/lib/site-packages/kombu/mixins.py @@ -0,0 +1,235 @@ +""" +kombu.mixins +============ + +Useful mixin classes. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import socket + +from contextlib import contextmanager +from functools import partial +from itertools import count + +from .common import ignore_errors +from .messaging import Consumer +from .log import get_logger +from .utils import cached_property, nested +from .utils.encoding import safe_repr +from .utils.limits import TokenBucket + +__all__ = ['ConsumerMixin'] + +logger = get_logger(__name__) +debug, info, warn, error = logger.debug, logger.info, logger.warn, logger.error + + +class ConsumerMixin(object): + """Convenience mixin for implementing consumer threads. + + It can be used outside of threads, with threads, or greenthreads + (eventlet/gevent) too. + + The basic class would need a :attr:`connection` attribute + which must be a :class:`~kombu.Connection` instance, + and define a :meth:`get_consumers` method that returns a list + of :class:`kombu.Consumer` instances to use. + Supporting multiple consumers is important so that multiple + channels can be used for different QoS requirements. + + **Example**: + + .. code-block:: python + + + class Worker(ConsumerMixin): + task_queue = Queue('tasks', Exchange('tasks'), 'tasks')) + + def __init__(self, connection): + self.connection = None + + def get_consumers(self, Consumer, channel): + return [Consumer(queues=[self.task_queue], + callback=[self.on_task])] + + def on_task(self, body, message): + print('Got task: %r' % (body, )) + message.ack() + + **Additional handler methods**: + + * :meth:`extra_context` + + Optional extra context manager that will be entered + after the connection and consumers have been set up. + + Takes arguments ``(connection, channel)``. + + * :meth:`on_connection_error` + + Handler called if the connection is lost/ or + is unavailable. + + Takes arguments ``(exc, interval)``, where interval + is the time in seconds when the connection will be retried. + + The default handler will log the exception. + + * :meth:`on_connection_revived` + + Handler called when the connection is re-established + after connection failure. + + Takes no arguments. + + * :meth:`on_consume_ready` + + Handler called when the consumer is ready to accept + messages. + + Takes arguments ``(connection, channel, consumers)``. + Also keyword arguments to ``consume`` are forwarded + to this handler. + + * :meth:`on_consume_end` + + Handler called after the consumers are cancelled. + Takes arguments ``(connection, channel)``. + + * :meth:`on_iteration` + + Handler called for every iteration while draining + events. + + Takes no arguments. + + * :meth:`on_decode_error` + + Handler called if a consumer was unable to decode + the body of a message. + + Takes arguments ``(message, exc)`` where message is the + original message object. + + The default handler will log the error and + acknowledge the message, so if you override make + sure to call super, or perform these steps yourself. + + """ + + #: maximum number of retries trying to re-establish the connection, + #: if the connection is lost/unavailable. + connect_max_retries = None + + #: When this is set to true the consumer should stop consuming + #: and return, so that it can be joined if it is the implementation + #: of a thread. + should_stop = False + + def get_consumers(self, Consumer, channel): + raise NotImplementedError('Subclass responsibility') + + def on_connection_revived(self): + pass + + def on_consume_ready(self, connection, channel, consumers, **kwargs): + pass + + def on_consume_end(self, connection, channel): + pass + + def on_iteration(self): + pass + + def on_decode_error(self, message, exc): + error("Can't decode message body: %r (type:%r encoding:%r raw:%r')", + exc, message.content_type, message.content_encoding, + safe_repr(message.body)) + message.ack() + + def on_connection_error(self, exc, interval): + warn('Broker connection error: %r. ' + 'Trying again in %s seconds.', exc, interval) + + @contextmanager + def extra_context(self, connection, channel): + yield + + def run(self): + while not self.should_stop: + try: + if self.restart_limit.can_consume(1): + for _ in self.consume(limit=None): + pass + except self.connection.connection_errors: + warn('Connection to broker lost. ' + 'Trying to re-establish the connection...') + + def consume(self, limit=None, timeout=None, safety_interval=1, **kwargs): + elapsed = 0 + with self.Consumer() as (connection, channel, consumers): + with self.extra_context(connection, channel): + self.on_consume_ready(connection, channel, consumers, **kwargs) + for i in limit and xrange(limit) or count(): + if self.should_stop: + break + self.on_iteration() + try: + connection.drain_events(timeout=safety_interval) + except socket.timeout: + elapsed += safety_interval + if timeout and elapsed >= timeout: + raise socket.timeout() + except socket.error: + if not self.should_stop: + raise + else: + yield + elapsed = 0 + debug('consume exiting') + + def maybe_conn_error(self, fun): + """Use :func:`kombu.common.ignore_errors` instead.""" + return ignore_errors(self, fun) + + @contextmanager + def establish_connection(self): + with self.connection.clone() as conn: + conn.ensure_connection(self.on_connection_error, + self.connect_max_retries) + yield conn + + @contextmanager + def Consumer(self): + with self.establish_connection() as conn: + self.on_connection_revived() + info('Connected to %s', conn.as_uri()) + channel = conn.default_channel + cls = partial(Consumer, channel, + on_decode_error=self.on_decode_error) + with self._consume_from(*self.get_consumers(cls, channel)) as c: + yield conn, channel, c + debug('Consumers cancelled') + self.on_consume_end(conn, channel) + debug('Connection closed') + + def _consume_from(self, *consumers): + return nested(*consumers) + + @cached_property + def restart_limit(self): + # the AttributeError that can be catched from amqplib + # poses problems for the too often restarts protection + # in Connection.ensure_connection + return TokenBucket(1) + + @cached_property + def connection_errors(self): + return self.connection.connection_errors + + @cached_property + def channel_errors(self): + return self.connection.channel_errors diff --git a/awx/lib/site-packages/kombu/pidbox.py b/awx/lib/site-packages/kombu/pidbox.py new file mode 100644 index 0000000000..cc29ce1a99 --- /dev/null +++ b/awx/lib/site-packages/kombu/pidbox.py @@ -0,0 +1,331 @@ +""" +kombu.pidbox +=============== + +Generic process mailbox. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import socket +import warnings + +from collections import defaultdict, deque +from copy import copy +from itertools import count +from threading import local +from time import time + +from . import Exchange, Queue, Consumer, Producer +from .clocks import LamportClock +from .common import maybe_declare, oid_from +from .utils import cached_property, kwdict, uuid + +REPLY_QUEUE_EXPIRES = 10 + +W_PIDBOX_IN_USE = """\ +A node named %(hostname)r is already using this process mailbox! + +Maybe you forgot to shutdown the other node or did not do so properly? +Or if you meant to start multiple nodes on the same host please make sure +you give each node a unique node name! +""" + +__all__ = ['Node', 'Mailbox'] + + +class Node(object): + + #: hostname of the node. + hostname = None + + #: the :class:`Mailbox` this is a node for. + mailbox = None + + #: map of method name/handlers. + handlers = None + + #: current context (passed on to handlers) + state = None + + #: current channel. + channel = None + + def __init__(self, hostname, state=None, channel=None, + handlers=None, mailbox=None): + self.channel = channel + self.mailbox = mailbox + self.hostname = hostname + self.state = state + self.adjust_clock = self.mailbox.clock.adjust + if handlers is None: + handlers = {} + self.handlers = handlers + + def Consumer(self, channel=None, **options): + options.setdefault('no_ack', True) + options.setdefault('accept', self.mailbox.accept) + queue = self.mailbox.get_queue(self.hostname) + + def verify_exclusive(name, messages, consumers): + if consumers: + warnings.warn(W_PIDBOX_IN_USE % {'hostname': self.hostname}) + queue.on_declared = verify_exclusive + + return Consumer(channel or self.channel, [queue], **options) + + def handler(self, fun): + self.handlers[fun.__name__] = fun + return fun + + def listen(self, channel=None, callback=None): + callback = callback or self.handle_message + consumer = self.Consumer(channel=channel, + callbacks=[callback or self.handle_message]) + consumer.consume() + return consumer + + def dispatch(self, method, arguments=None, + reply_to=None, ticket=None, **kwargs): + arguments = arguments or {} + handle = reply_to and self.handle_call or self.handle_cast + try: + reply = handle(method, kwdict(arguments)) + except SystemExit: + raise + except Exception, exc: + reply = {'error': repr(exc)} + + if reply_to: + self.reply({self.hostname: reply}, + exchange=reply_to['exchange'], + routing_key=reply_to['routing_key'], + ticket=ticket) + return reply + + def handle(self, method, arguments={}): + return self.handlers[method](self.state, **arguments) + + def handle_call(self, method, arguments): + return self.handle(method, arguments) + + def handle_cast(self, method, arguments): + return self.handle(method, arguments) + + def handle_message(self, body, message=None): + destination = body.get('destination') + if message: + self.adjust_clock(message.headers.get('clock') or 0) + if not destination or self.hostname in destination: + return self.dispatch(**kwdict(body)) + dispatch_from_message = handle_message + + def reply(self, data, exchange, routing_key, ticket, **kwargs): + self.mailbox._publish_reply(data, exchange, routing_key, ticket, + channel=self.channel) + + +class Mailbox(object): + node_cls = Node + exchange_fmt = '%s.pidbox' + reply_exchange_fmt = 'reply.%s.pidbox' + + #: Name of application. + namespace = None + + #: Connection (if bound). + connection = None + + #: Exchange type (usually direct, or fanout for broadcast). + type = 'direct' + + #: mailbox exchange (init by constructor). + exchange = None + + #: exchange to send replies to. + reply_exchange = None + + def __init__(self, namespace, + type='direct', connection=None, clock=None, accept=None): + self.namespace = namespace + self.connection = connection + self.type = type + self.clock = LamportClock() if clock is None else clock + self.exchange = self._get_exchange(self.namespace, self.type) + self.reply_exchange = self._get_reply_exchange(self.namespace) + self._tls = local() + self.unclaimed = defaultdict(deque) + self.accept = accept + + def __call__(self, connection): + bound = copy(self) + bound.connection = connection + return bound + + def Node(self, hostname=None, state=None, channel=None, handlers=None): + hostname = hostname or socket.gethostname() + return self.node_cls(hostname, state, channel, handlers, mailbox=self) + + def call(self, destination, command, kwargs={}, + timeout=None, callback=None, channel=None): + return self._broadcast(command, kwargs, destination, + reply=True, timeout=timeout, + callback=callback, + channel=channel) + + def cast(self, destination, command, kwargs={}): + return self._broadcast(command, kwargs, destination, reply=False) + + def abcast(self, command, kwargs={}): + return self._broadcast(command, kwargs, reply=False) + + def multi_call(self, command, kwargs={}, timeout=1, + limit=None, callback=None, channel=None): + return self._broadcast(command, kwargs, reply=True, + timeout=timeout, limit=limit, + callback=callback, + channel=channel) + + def get_reply_queue(self): + oid = self.oid + return Queue('%s.%s' % (oid, self.reply_exchange.name), + exchange=self.reply_exchange, + routing_key=oid, + durable=False, + auto_delete=True, + queue_arguments={ + 'x-expires': int(REPLY_QUEUE_EXPIRES * 1000), + }) + + @cached_property + def reply_queue(self): + return self.get_reply_queue() + + def get_queue(self, hostname): + return Queue('%s.%s.pidbox' % (hostname, self.namespace), + exchange=self.exchange, + durable=False, + auto_delete=True) + + def _publish_reply(self, reply, exchange, routing_key, ticket, + channel=None): + chan = channel or self.connection.default_channel + exchange = Exchange(exchange, exchange_type='direct', + delivery_mode='transient', + durable=False) + producer = Producer(chan, auto_declare=False) + producer.publish(reply, exchange=exchange, routing_key=routing_key, + declare=[exchange], headers={ + 'ticket': ticket, 'clock': self.clock.forward()}) + + def _publish(self, type, arguments, destination=None, + reply_ticket=None, channel=None, timeout=None): + message = {'method': type, + 'arguments': arguments, + 'destination': destination} + chan = channel or self.connection.default_channel + exchange = self.exchange + if reply_ticket: + maybe_declare(self.reply_queue(channel)) + message.update(ticket=reply_ticket, + reply_to={'exchange': self.reply_exchange.name, + 'routing_key': self.oid}) + producer = Producer(chan, auto_declare=False) + producer.publish( + message, exchange=exchange.name, declare=[exchange], + headers={'clock': self.clock.forward(), + 'expires': time() + timeout if timeout else None}, + ) + + def _broadcast(self, command, arguments=None, destination=None, + reply=False, timeout=1, limit=None, + callback=None, channel=None): + if destination is not None and \ + not isinstance(destination, (list, tuple)): + raise ValueError( + 'destination must be a list/tuple not %s' % ( + type(destination), )) + + arguments = arguments or {} + reply_ticket = reply and uuid() or None + chan = channel or self.connection.default_channel + + # Set reply limit to number of destinations (if specified) + if limit is None and destination: + limit = destination and len(destination) or None + + self._publish(command, arguments, destination=destination, + reply_ticket=reply_ticket, + channel=chan, + timeout=timeout) + + if reply_ticket: + return self._collect(reply_ticket, limit=limit, + timeout=timeout, + callback=callback, + channel=chan) + + def _collect(self, ticket, + limit=None, timeout=1, callback=None, + channel=None, accept=None): + if accept is None: + accept = self.accept + chan = channel or self.connection.default_channel + queue = self.reply_queue + consumer = Consumer(channel, [queue], accept=accept, no_ack=True) + responses = [] + unclaimed = self.unclaimed + adjust_clock = self.clock.adjust + + try: + return unclaimed.pop(ticket) + except KeyError: + pass + + def on_message(body, message): + # ticket header added in kombu 2.5 + header = message.headers.get + adjust_clock(header('clock') or 0) + expires = header('expires') + if expires and time() > expires: + return + this_id = header('ticket', ticket) + if this_id == ticket: + if callback: + callback(body) + responses.append(body) + else: + unclaimed[this_id].append(body) + + consumer.register_callback(on_message) + try: + with consumer: + for i in limit and range(limit) or count(): + try: + self.connection.drain_events(timeout=timeout) + except socket.timeout: + break + return responses + finally: + chan.after_reply_message_received(queue.name) + + def _get_exchange(self, namespace, type): + return Exchange(self.exchange_fmt % namespace, + type=type, + durable=False, + delivery_mode='transient') + + def _get_reply_exchange(self, namespace): + return Exchange(self.reply_exchange_fmt % namespace, + type='direct', + durable=False, + delivery_mode='transient') + + @cached_property + def oid(self): + try: + return self._tls.OID + except AttributeError: + oid = self._tls.OID = oid_from(self) + return oid diff --git a/awx/lib/site-packages/kombu/pools.py b/awx/lib/site-packages/kombu/pools.py new file mode 100644 index 0000000000..7d0ed519ca --- /dev/null +++ b/awx/lib/site-packages/kombu/pools.py @@ -0,0 +1,152 @@ +""" +kombu.pools +=========== + +Public resource pools. + +""" +from __future__ import absolute_import + +import os + +from itertools import chain + +from .connection import Resource +from .messaging import Producer +from .utils import EqualityDict +from .utils.functional import promise + +__all__ = ['ProducerPool', 'PoolGroup', 'register_group', + 'connections', 'producers', 'get_limit', 'set_limit', 'reset'] +_limit = [200] +_used = [False] +_groups = [] +use_global_limit = object() +disable_limit_protection = os.environ.get('KOMBU_DISABLE_LIMIT_PROTECTION') + + +class ProducerPool(Resource): + Producer = Producer + + def __init__(self, connections, *args, **kwargs): + self.connections = connections + self.Producer = kwargs.pop('Producer', None) or self.Producer + super(ProducerPool, self).__init__(*args, **kwargs) + + def _acquire_connection(self): + return self.connections.acquire(block=True) + + def create_producer(self): + conn = self._acquire_connection() + try: + return self.Producer(conn) + except BaseException: + conn.release() + raise + + def new(self): + return promise(self.create_producer) + + def setup(self): + if self.limit: + for _ in xrange(self.limit): + self._resource.put_nowait(self.new()) + + def close_resource(self, resource): + pass + + def prepare(self, p): + if callable(p): + p = p() + if p._channel is None: + conn = self._acquire_connection() + try: + p.revive(conn) + except BaseException: + conn.release() + raise + return p + + def release(self, resource): + if resource.__connection__: + resource.__connection__.release() + resource.channel = None + super(ProducerPool, self).release(resource) + + +class PoolGroup(EqualityDict): + + def __init__(self, limit=None): + self.limit = limit + + def create(self, resource, limit): + raise NotImplementedError('PoolGroups must define ``create``') + + def __missing__(self, resource): + limit = self.limit + if limit is use_global_limit: + limit = get_limit() + if not _used[0]: + _used[0] = True + k = self[resource] = self.create(resource, limit) + return k + + +def register_group(group): + _groups.append(group) + return group + + +class Connections(PoolGroup): + + def create(self, connection, limit): + return connection.Pool(limit=limit) +connections = register_group(Connections(limit=use_global_limit)) + + +class Producers(PoolGroup): + + def create(self, connection, limit): + return ProducerPool(connections[connection], limit=limit) +producers = register_group(Producers(limit=use_global_limit)) + + +def _all_pools(): + return chain(*[(g.itervalues() if g else iter([])) for g in _groups]) + + +def get_limit(): + return _limit[0] + + +def set_limit(limit, force=False, reset_after=False): + limit = limit or 0 + glimit = _limit[0] or 0 + if limit < glimit: + if not disable_limit_protection and (_used[0] and not force): + raise RuntimeError("Can't lower limit after pool in use.") + reset_after = True + if limit != glimit: + _limit[0] = limit + for pool in _all_pools(): + pool.limit = limit + if reset_after: + reset() + return limit + + +def reset(*args, **kwargs): + for pool in _all_pools(): + try: + pool.force_close_all() + except Exception: + pass + for group in _groups: + group.clear() + _used[0] = False + +try: + from multiprocessing.util import register_after_fork + register_after_fork(connections, reset) +except ImportError: # pragma: no cover + pass diff --git a/awx/lib/site-packages/kombu/serialization.py b/awx/lib/site-packages/kombu/serialization.py new file mode 100644 index 0000000000..aeac047703 --- /dev/null +++ b/awx/lib/site-packages/kombu/serialization.py @@ -0,0 +1,446 @@ +""" +ns +kombu.serialization +=================== + +Serialization utilities. + +""" +from __future__ import absolute_import + +import codecs +import os +import sys + +import pickle as pypickle +try: + import cPickle as cpickle +except ImportError: # pragma: no cover + cpickle = None # noqa + +from .exceptions import SerializerNotInstalled, ContentDisallowed +from .utils import entrypoints +from .utils.encoding import str_to_bytes, bytes_t + +__all__ = ['pickle', 'encode', 'decode', + 'register', 'unregister'] +SKIP_DECODE = frozenset(['binary', 'ascii-8bit']) + +if sys.platform.startswith('java'): # pragma: no cover + + def _decode(t, coding): + return codecs.getdecoder(coding)(t)[0] +else: + _decode = codecs.decode + +if sys.version_info < (2, 6): # pragma: no cover + # cPickle is broken in Python <= 2.5. + # It unsafely and incorrectly uses relative instead of absolute + # imports, + # so e.g.: + # exceptions.KeyError + # becomes: + # kombu.exceptions.KeyError + # + # Your best choice is to upgrade to Python 2.6, + # as while the pure pickle version has worse performance, + # it is the only safe option for older Python versions. + pickle = pypickle + pickle_load = pypickle.load + pickle_loads = pypickle.loads +else: + pickle = cpickle or pypickle + pickle_load = pickle.load + pickle_loads = pickle.loads + + +# cPickle.loads does not support buffer() objects, +# but we can just create a StringIO and use load. +if sys.version_info[0] == 3: + from io import BytesIO +else: + try: + from cStringIO import StringIO as BytesIO # noqa + except ImportError: + from StringIO import StringIO as BytesIO # noqa + +#: Kombu requires Python 2.5 or later so we use protocol 2 by default. +#: There's a new protocol (3) but this is only supported by Python 3. +pickle_protocol = int(os.environ.get('PICKLE_PROTOCOL', 2)) + + +def pickle_loads(s, load=pickle_load): + # used to support buffer objects + return load(BytesIO(s)) + + +def parenthesize_alias(first, second): + return '%s (%s)' % (first, second) if first else second + + +class SerializerRegistry(object): + """The registry keeps track of serialization methods.""" + + def __init__(self): + self._encoders = {} + self._decoders = {} + self._default_encode = None + self._default_content_type = None + self._default_content_encoding = None + self._disabled_content_types = set() + self.type_to_name = {} + self.name_to_type = {} + + def register(self, name, encoder, decoder, content_type, + content_encoding='utf-8'): + if encoder: + self._encoders[name] = (content_type, content_encoding, encoder) + if decoder: + self._decoders[content_type] = decoder + self.type_to_name[content_type] = name + self.name_to_type[name] = content_type + + def enable(self, name): + if '/' not in name: + name = self.name_to_type[name] + self._disabled_content_types.remove(name) + + def disable(self, name): + if '/' not in name: + name = self.name_to_type[name] + self._disabled_content_types.add(name) + + def unregister(self, name): + try: + content_type = self.name_to_type[name] + self._decoders.pop(content_type, None) + self._encoders.pop(name, None) + self.type_to_name.pop(content_type, None) + self.name_to_type.pop(name, None) + except KeyError: + raise SerializerNotInstalled( + 'No encoder/decoder installed for %s' % name) + + def _set_default_serializer(self, name): + """ + Set the default serialization method used by this library. + + :param name: The name of the registered serialization method. + For example, `json` (default), `pickle`, `yaml`, `msgpack`, + or any custom methods registered using :meth:`register`. + + :raises SerializerNotInstalled: If the serialization method + requested is not available. + """ + try: + (self._default_content_type, self._default_content_encoding, + self._default_encode) = self._encoders[name] + except KeyError: + raise SerializerNotInstalled( + 'No encoder installed for %s' % name) + + def encode(self, data, serializer=None): + if serializer == 'raw': + return raw_encode(data) + if serializer and not self._encoders.get(serializer): + raise SerializerNotInstalled( + 'No encoder installed for %s' % serializer) + + # If a raw string was sent, assume binary encoding + # (it's likely either ASCII or a raw binary file, and a character + # set of 'binary' will encompass both, even if not ideal. + if not serializer and isinstance(data, bytes_t): + # In Python 3+, this would be "bytes"; allow binary data to be + # sent as a message without getting encoder errors + return 'application/data', 'binary', data + + # For Unicode objects, force it into a string + if not serializer and isinstance(data, unicode): + payload = data.encode('utf-8') + return 'text/plain', 'utf-8', payload + + if serializer: + content_type, content_encoding, encoder = \ + self._encoders[serializer] + else: + encoder = self._default_encode + content_type = self._default_content_type + content_encoding = self._default_content_encoding + + payload = encoder(data) + return content_type, content_encoding, payload + + def decode(self, data, content_type, content_encoding, + accept=None, force=False): + if accept is not None: + if content_type not in accept: + raise self._for_untrusted_content(content_type, 'untrusted') + else: + if content_type in self._disabled_content_types and not force: + raise self._for_untrusted_content(content_type, 'disabled') + content_type = content_type or 'application/data' + content_encoding = (content_encoding or 'utf-8').lower() + + if data: + decode = self._decoders.get(content_type) + if decode: + return decode(data) + if content_encoding not in SKIP_DECODE and \ + not isinstance(data, unicode): + return _decode(data, content_encoding) + return data + + def _for_untrusted_content(self, ctype, why): + return ContentDisallowed( + 'Refusing to decode %(why)s content of type %(type)s' % { + 'why': why, + 'type': parenthesize_alias(self.type_to_name[ctype], ctype), + }, + ) + + +#: Global registry of serializers/deserializers. +registry = SerializerRegistry() + + +""" +.. function:: encode(data, serializer=default_serializer) + + Serialize a data structure into a string suitable for sending + as an AMQP message body. + + :param data: The message data to send. Can be a list, + dictionary or a string. + + :keyword serializer: An optional string representing + the serialization method you want the data marshalled + into. (For example, `json`, `raw`, or `pickle`). + + If :const:`None` (default), then json will be used, unless + `data` is a :class:`str` or :class:`unicode` object. In this + latter case, no serialization occurs as it would be + unnecessary. + + Note that if `serializer` is specified, then that + serialization method will be used even if a :class:`str` + or :class:`unicode` object is passed in. + + :returns: A three-item tuple containing the content type + (e.g., `application/json`), content encoding, (e.g., + `utf-8`) and a string containing the serialized + data. + + :raises SerializerNotInstalled: If the serialization method + requested is not available. +""" +encode = registry.encode + +""" +.. function:: decode(data, content_type, content_encoding): + + Deserialize a data stream as serialized using `encode` + based on `content_type`. + + :param data: The message data to deserialize. + + :param content_type: The content-type of the data. + (e.g., `application/json`). + + :param content_encoding: The content-encoding of the data. + (e.g., `utf-8`, `binary`, or `us-ascii`). + + :returns: The unserialized data. + +""" +decode = registry.decode + + +""" +.. function:: register(name, encoder, decoder, content_type, + content_encoding='utf-8'): + Register a new encoder/decoder. + + :param name: A convenience name for the serialization method. + + :param encoder: A method that will be passed a python data structure + and should return a string representing the serialized data. + If :const:`None`, then only a decoder will be registered. Encoding + will not be possible. + + :param decoder: A method that will be passed a string representing + serialized data and should return a python data structure. + If :const:`None`, then only an encoder will be registered. + Decoding will not be possible. + + :param content_type: The mime-type describing the serialized + structure. + + :param content_encoding: The content encoding (character set) that + the `decoder` method will be returning. Will usually be + utf-8`, `us-ascii`, or `binary`. + +""" +register = registry.register + + +""" +.. function:: unregister(name): + Unregister registered encoder/decoder. + + :param name: Registered serialization method name. + +""" +unregister = registry.unregister + + +def raw_encode(data): + """Special case serializer.""" + content_type = 'application/data' + payload = data + if isinstance(payload, unicode): + content_encoding = 'utf-8' + payload = payload.encode(content_encoding) + else: + content_encoding = 'binary' + return content_type, content_encoding, payload + + +def register_json(): + """Register a encoder/decoder for JSON serialization.""" + from anyjson import loads, dumps + + def _loads(obj): + if isinstance(obj, bytes_t): + obj = obj.decode() + return loads(obj) + + registry.register('json', dumps, _loads, + content_type='application/json', + content_encoding='utf-8') + + +def register_yaml(): + """Register a encoder/decoder for YAML serialization. + + It is slower than JSON, but allows for more data types + to be serialized. Useful if you need to send data such as dates""" + try: + import yaml + registry.register('yaml', yaml.safe_dump, yaml.safe_load, + content_type='application/x-yaml', + content_encoding='utf-8') + except ImportError: + + def not_available(*args, **kwargs): + """In case a client receives a yaml message, but yaml + isn't installed.""" + raise SerializerNotInstalled( + 'No decoder installed for YAML. Install the PyYAML library') + registry.register('yaml', None, not_available, 'application/x-yaml') + + +if sys.version_info[0] == 3: + + def unpickle(s): + return pickle_loads(str_to_bytes(s)) + +else: + unpickle = pickle_loads # noqa + + +def register_pickle(): + """The fastest serialization method, but restricts + you to python clients.""" + + def dumps(obj, dumper=pickle.dumps): + return dumper(obj, protocol=pickle_protocol) + + registry.register('pickle', dumps, unpickle, + content_type='application/x-python-serialize', + content_encoding='binary') + + +def register_msgpack(): + """See http://msgpack.sourceforge.net/""" + try: + try: + from msgpack import packb as dumps, unpackb + loads = lambda s: unpackb(s, encoding='utf-8') + except ImportError: + # msgpack < 0.2.0 and Python 2.5 + from msgpack import packs as dumps, unpacks as loads # noqa + registry.register( + 'msgpack', dumps, loads, + content_type='application/x-msgpack', + content_encoding='binary') + except ImportError: + + def not_available(*args, **kwargs): + """In case a client receives a msgpack message, but yaml + isn't installed.""" + raise SerializerNotInstalled( + 'No decoder installed for msgpack. ' + 'Please install the msgpack library') + registry.register('msgpack', None, not_available, + 'application/x-msgpack') + +# Register the base serialization methods. +register_json() +register_pickle() +register_yaml() +register_msgpack() + +# JSON is assumed to always be available, so is the default. +# (this matches the historical use of kombu.) +registry._set_default_serializer('json') + + +_setupfuns = { + 'json': register_json, + 'pickle': register_pickle, + 'yaml': register_yaml, + 'msgpack': register_msgpack, + 'application/json': register_json, + 'application/x-yaml': register_yaml, + 'application/x-python-serialize': register_pickle, + 'application/x-msgpack': register_msgpack, +} + + +def enable_insecure_serializers(choices=['pickle', 'yaml', 'msgpack']): + """Enable serializers that are considered to be unsafe. + + Will enable ``pickle``, ``yaml`` and ``msgpack`` by default, + but you can also specify a list of serializers (by name or content type) + to enable. + + """ + for choice in choices: + try: + registry.enable(choice) + except KeyError: + pass + + +def disable_insecure_serializers(allowed=['json']): + """Disable untrusted serializers. + + Will disable all serializers except ``json`` + or you can specify a list of deserializers to allow. + + .. note:: + + Producers will still be able to serialize data + in these formats, but consumers will not accept + incoming data using the untrusted content types. + + """ + for name in registry._decoders: + registry.disable(name) + if allowed is not None: + for name in allowed: + registry.enable(name) + +# Load entrypoints from installed extensions +for ep, args in entrypoints('kombu.serializers'): + register(ep.name, *args) diff --git a/awx/lib/site-packages/kombu/simple.py b/awx/lib/site-packages/kombu/simple.py new file mode 100644 index 0000000000..e4ca5056bd --- /dev/null +++ b/awx/lib/site-packages/kombu/simple.py @@ -0,0 +1,134 @@ +""" +kombu.simple +============ + +Simple interface. + +""" +from __future__ import absolute_import + +import socket + +from collections import deque +from time import time +from Queue import Empty + +from . import entity +from . import messaging +from .connection import maybe_channel + +__all__ = ['SimpleQueue', 'SimpleBuffer'] + + +class SimpleBase(object): + _consuming = False + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + + def __init__(self, channel, producer, consumer, no_ack=False): + self.channel = maybe_channel(channel) + self.producer = producer + self.consumer = consumer + self.no_ack = no_ack + self.queue = self.consumer.queues[0] + self.buffer = deque() + self.consumer.register_callback(self._receive) + + def get(self, block=True, timeout=None): + if not block: + return self.get_nowait() + self._consume() + elapsed = 0.0 + remaining = timeout + while True: + time_start = time() + if self.buffer: + return self.buffer.pop() + try: + self.channel.connection.client.drain_events( + timeout=timeout and remaining) + except socket.timeout: + raise Empty() + elapsed += time() - time_start + remaining = timeout and timeout - elapsed or None + + def get_nowait(self): + m = self.queue.get(no_ack=self.no_ack) + if not m: + raise Empty() + return m + + def put(self, message, serializer=None, headers=None, compression=None, + routing_key=None, **kwargs): + self.producer.publish(message, + serializer=serializer, + routing_key=routing_key, + headers=headers, + compression=compression, + **kwargs) + + def clear(self): + return self.consumer.purge() + + def qsize(self): + _, size, _ = self.queue.queue_declare(passive=True) + return size + + def close(self): + self.consumer.cancel() + + def _receive(self, message_data, message): + self.buffer.append(message) + + def _consume(self): + if not self._consuming: + self.consumer.consume(no_ack=self.no_ack) + self._consuming = True + + def __len__(self): + """`len(self) -> self.qsize()`""" + return self.qsize() + + def __nonzero__(self): + return True + + +class SimpleQueue(SimpleBase): + no_ack = False + queue_opts = {} + exchange_opts = {} + + def __init__(self, channel, name, no_ack=None, queue_opts=None, + exchange_opts=None, serializer=None, + compression=None, **kwargs): + queue = name + queue_opts = dict(self.queue_opts, **queue_opts or {}) + exchange_opts = dict(self.exchange_opts, **exchange_opts or {}) + if no_ack is None: + no_ack = self.no_ack + if not isinstance(queue, entity.Queue): + exchange = entity.Exchange(name, 'direct', **exchange_opts) + queue = entity.Queue(name, exchange, name, **queue_opts) + else: + name = queue.name + exchange = queue.exchange + producer = messaging.Producer(channel, exchange, + serializer=serializer, + routing_key=name, + compression=compression) + consumer = messaging.Consumer(channel, queue) + super(SimpleQueue, self).__init__(channel, producer, + consumer, no_ack, **kwargs) + + +class SimpleBuffer(SimpleQueue): + no_ack = True + queue_opts = dict(durable=False, + auto_delete=True) + exchange_opts = dict(durable=False, + delivery_mode='transient', + auto_delete=True) diff --git a/awx/lib/site-packages/kombu/syn.py b/awx/lib/site-packages/kombu/syn.py new file mode 100644 index 0000000000..7f6e809960 --- /dev/null +++ b/awx/lib/site-packages/kombu/syn.py @@ -0,0 +1,53 @@ +""" +kombu.syn +========= + +""" +from __future__ import absolute_import + +import sys + +__all__ = ['detect_environment'] + +_environment = None + + +def blocking(fun, *args, **kwargs): + return fun(*args, **kwargs) + + +def select_blocking_method(type): + pass + + +def _detect_environment(): + ## -eventlet- + if 'eventlet' in sys.modules: + try: + from eventlet.patcher import is_monkey_patched as is_eventlet + import socket + + if is_eventlet(socket): + return 'eventlet' + except ImportError: + pass + + # -gevent- + if 'gevent' in sys.modules: + try: + from gevent import socket as _gsocket + import socket + + if socket.socket is _gsocket.socket: + return 'gevent' + except ImportError: + pass + + return 'default' + + +def detect_environment(): + global _environment + if _environment is None: + _environment = _detect_environment() + return _environment diff --git a/awx/lib/site-packages/kombu/tests/__init__.py b/awx/lib/site-packages/kombu/tests/__init__.py new file mode 100644 index 0000000000..6a13a75074 --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/__init__.py @@ -0,0 +1,72 @@ +from __future__ import absolute_import + +import anyjson +import os +import sys + +from kombu.exceptions import VersionMismatch + +# avoid json implementation inconsistencies. +try: + import json # noqa + anyjson.force_implementation('json') +except ImportError: + anyjson.force_implementation('simplejson') + + +def find_distribution_modules(name=__name__, file=__file__): + current_dist_depth = len(name.split('.')) - 1 + current_dist = os.path.join(os.path.dirname(file), + *([os.pardir] * current_dist_depth)) + abs = os.path.abspath(current_dist) + dist_name = os.path.basename(abs) + + for dirpath, dirnames, filenames in os.walk(abs): + package = (dist_name + dirpath[len(abs):]).replace('/', '.') + if '__init__.py' in filenames: + yield package + for filename in filenames: + if filename.endswith('.py') and filename != '__init__.py': + yield '.'.join([package, filename])[:-3] + + +def import_all_modules(name=__name__, file=__file__, skip=[]): + for module in find_distribution_modules(name, file): + if module not in skip: + print('preimporting %r for coverage...' % (module, )) + try: + __import__(module) + except (ImportError, VersionMismatch, AttributeError): + pass + + +def is_in_coverage(): + return (os.environ.get('COVER_ALL_MODULES') or + '--with-coverage3' in sys.argv) + + +def setup_django_env(): + try: + from django.conf import settings + except ImportError: + return + + if not settings.configured: + settings.configure( + DATABASES={ + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': ':memory:', + }, + }, + DATABASE_ENGINE='sqlite3', + DATABASE_NAME=':memory:', + INSTALLED_APPS=('kombu.transport.django', ), + ) + + +def setup(): + # so coverage sees all our modules. + setup_django_env() + if is_in_coverage(): + import_all_modules() diff --git a/awx/lib/site-packages/kombu/tests/compat.py b/awx/lib/site-packages/kombu/tests/compat.py new file mode 100644 index 0000000000..e750552bc2 --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/compat.py @@ -0,0 +1,87 @@ +from __future__ import absolute_import + +import sys + + +class WarningMessage(object): + + """Holds the result of a single showwarning() call.""" + + _WARNING_DETAILS = ('message', 'category', 'filename', 'lineno', 'file', + 'line') + + def __init__(self, message, category, filename, lineno, + file=None, line=None): + local_values = locals() + for attr in self._WARNING_DETAILS: + setattr(self, attr, local_values[attr]) + + self._category_name = category and category.__name__ or None + + def __str__(self): + return ('{message : %r, category : %r, filename : %r, lineno : %s, ' + 'line : %r}' % (self.message, self._category_name, + self.filename, self.lineno, self.line)) + + +class catch_warnings(object): + + """A context manager that copies and restores the warnings filter upon + exiting the context. + + The 'record' argument specifies whether warnings should be captured by a + custom implementation of warnings.showwarning() and be appended to a list + returned by the context manager. Otherwise None is returned by the context + manager. The objects appended to the list are arguments whose attributes + mirror the arguments to showwarning(). + + The 'module' argument is to specify an alternative module to the module + named 'warnings' and imported under that name. This argument is only + useful when testing the warnings module itself. + + """ + + def __init__(self, record=False, module=None): + """Specify whether to record warnings and if an alternative module + should be used other than sys.modules['warnings']. + + For compatibility with Python 3.0, please consider all arguments to be + keyword-only. + + """ + self._record = record + self._module = module is None and sys.modules['warnings'] or module + self._entered = False + + def __repr__(self): + args = [] + if self._record: + args.append('record=True') + if self._module is not sys.modules['warnings']: + args.append('module=%r' % self._module) + name = type(self).__name__ + return '%s(%s)' % (name, ', '.join(args)) + + def __enter__(self): + if self._entered: + raise RuntimeError('Cannot enter %r twice' % self) + self._entered = True + self._filters = self._module.filters + self._module.filters = self._filters[:] + self._showwarning = self._module.showwarning + if self._record: + log = [] + + def showwarning(*args, **kwargs): + log.append(WarningMessage(*args, **kwargs)) + + self._module.showwarning = showwarning + return log + else: + return None + + def __exit__(self, *exc_info): + if not self._entered: + raise RuntimeError('Cannot exit %r without entering first' % self) + self._module.filters = self._filters + self._module.showwarning = self._showwarning diff --git a/awx/lib/site-packages/kombu/tests/mocks.py b/awx/lib/site-packages/kombu/tests/mocks.py new file mode 100644 index 0000000000..4d38da5641 --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/mocks.py @@ -0,0 +1,148 @@ +from __future__ import absolute_import + +from itertools import count + +import anyjson + +from kombu.transport import base + + +class Message(base.Message): + + def __init__(self, *args, **kwargs): + self.throw_decode_error = kwargs.get('throw_decode_error', False) + super(Message, self).__init__(*args, **kwargs) + + def decode(self): + if self.throw_decode_error: + raise ValueError("can't decode message") + return super(Message, self).decode() + + +class Channel(base.StdChannel): + open = True + throw_decode_error = False + _ids = count(1).next + + def __init__(self, connection): + self.connection = connection + self.called = [] + self.deliveries = count(1).next + self.to_deliver = [] + self.events = {'basic_return': set()} + self.channel_id = self._ids() + + def _called(self, name): + self.called.append(name) + + def __contains__(self, key): + return key in self.called + + def exchange_declare(self, *args, **kwargs): + self._called('exchange_declare') + + def prepare_message(self, body, priority=0, content_type=None, + content_encoding=None, headers=None, properties={}): + self._called('prepare_message') + return dict(body=body, + headers=headers, + properties=properties, + priority=priority, + content_type=content_type, + content_encoding=content_encoding) + + def basic_publish(self, message, exchange='', routing_key='', + mandatory=False, immediate=False, **kwargs): + self._called('basic_publish') + return message, exchange, routing_key + + def exchange_delete(self, *args, **kwargs): + self._called('exchange_delete') + + def queue_declare(self, *args, **kwargs): + self._called('queue_declare') + + def queue_bind(self, *args, **kwargs): + self._called('queue_bind') + + def queue_unbind(self, *args, **kwargs): + self._called('queue_unbind') + + def queue_delete(self, queue, if_unused=False, if_empty=False, **kwargs): + self._called('queue_delete') + + def basic_get(self, *args, **kwargs): + self._called('basic_get') + try: + return self.to_deliver.pop() + except IndexError: + pass + + def queue_purge(self, *args, **kwargs): + self._called('queue_purge') + + def basic_consume(self, *args, **kwargs): + self._called('basic_consume') + + def basic_cancel(self, *args, **kwargs): + self._called('basic_cancel') + + def basic_ack(self, *args, **kwargs): + self._called('basic_ack') + + def basic_recover(self, requeue=False): + self._called('basic_recover') + + def exchange_bind(self, *args, **kwargs): + self._called('exchange_bind') + + def exchange_unbind(self, *args, **kwargs): + self._called('exchange_unbind') + + def close(self): + self._called('close') + + def message_to_python(self, message, *args, **kwargs): + self._called('message_to_python') + return Message(self, body=anyjson.dumps(message), + delivery_tag=self.deliveries(), + throw_decode_error=self.throw_decode_error, + content_type='application/json', + content_encoding='utf-8') + + def flow(self, active): + self._called('flow') + + def basic_reject(self, delivery_tag, requeue=False): + if requeue: + return self._called('basic_reject:requeue') + return self._called('basic_reject') + + def basic_qos(self, prefetch_size=0, prefetch_count=0, + apply_global=False): + self._called('basic_qos') + + +class Connection(object): + connected = True + + def __init__(self, client): + self.client = client + + def channel(self): + return Channel(self) + + +class Transport(base.Transport): + + def establish_connection(self): + return Connection(self.client) + + def create_channel(self, connection): + return connection.channel() + + def drain_events(self, connection, **kwargs): + return 'event' + + def close_connection(self, connection): + connection.connected = False diff --git a/awx/lib/site-packages/kombu/tests/test_clocks.py b/awx/lib/site-packages/kombu/tests/test_clocks.py new file mode 100644 index 0000000000..ed8c9fa8e6 --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/test_clocks.py @@ -0,0 +1,55 @@ +from __future__ import absolute_import + +from heapq import heappush + +from kombu.clocks import LamportClock + +from .utils import TestCase + + +class test_LamportClock(TestCase): + + def test_clocks(self): + c1 = LamportClock() + c2 = LamportClock() + + c1.forward() + c2.forward() + c1.forward() + c1.forward() + c2.adjust(c1.value) + self.assertEqual(c2.value, c1.value + 1) + self.assertTrue(repr(c1)) + + c2_val = c2.value + c2.forward() + c2.forward() + c2.adjust(c1.value) + self.assertEqual(c2.value, c2_val + 2 + 1) + + c1.adjust(c2.value) + self.assertEqual(c1.value, c2.value + 1) + + def test_sort(self): + c = LamportClock() + pid1 = 'a.example.com:312' + pid2 = 'b.example.com:311' + + events = [] + + m1 = (c.forward(), pid1) + heappush(events, m1) + m2 = (c.forward(), pid2) + heappush(events, m2) + m3 = (c.forward(), pid1) + heappush(events, m3) + m4 = (30, pid1) + heappush(events, m4) + m5 = (30, pid2) + heappush(events, m5) + + self.assertEqual(str(c), str(c.value)) + + self.assertEqual(c.sort_heap(events), m1) + self.assertEqual(c.sort_heap([m4, m5]), m4) + self.assertEqual(c.sort_heap([m4, m5, m1]), m4) diff --git a/awx/lib/site-packages/kombu/tests/test_common.py b/awx/lib/site-packages/kombu/tests/test_common.py new file mode 100644 index 0000000000..d315fdbb3b --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/test_common.py @@ -0,0 +1,472 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import socket + +from mock import patch + +from kombu import common +from kombu.common import ( + Broadcast, maybe_declare, + send_reply, isend_reply, collect_replies, + declaration_cached, ignore_errors, + QoS, PREFETCH_COUNT_MAX, + entry_to_queue, +) +from kombu.exceptions import StdChannelError + +from .utils import TestCase +from .utils import ContextMock, Mock, MockPool + + +class test_ignore_errors(TestCase): + + def test_ignored(self): + connection = Mock() + connection.channel_errors = (KeyError, ) + connection.connection_errors = (KeyError, ) + + with ignore_errors(connection): + raise KeyError() + + def raising(): + raise KeyError() + + ignore_errors(connection, raising) + + connection.channel_errors = connection.connection_errors = \ + () + + with self.assertRaises(KeyError): + with ignore_errors(connection): + raise KeyError() + + +class test_declaration_cached(TestCase): + + def test_when_cached(self): + chan = Mock() + chan.connection.client.declared_entities = ['foo'] + self.assertTrue(declaration_cached('foo', chan)) + + def test_when_not_cached(self): + chan = Mock() + chan.connection.client.declared_entities = ['bar'] + self.assertFalse(declaration_cached('foo', chan)) + + +class test_Broadcast(TestCase): + + def test_arguments(self): + q = Broadcast(name='test_Broadcast') + self.assertTrue(q.name.startswith('bcast.')) + self.assertEqual(q.alias, 'test_Broadcast') + self.assertTrue(q.auto_delete) + self.assertEqual(q.exchange.name, 'test_Broadcast') + self.assertEqual(q.exchange.type, 'fanout') + + q = Broadcast('test_Broadcast', 'explicit_queue_name') + self.assertEqual(q.name, 'explicit_queue_name') + self.assertEqual(q.exchange.name, 'test_Broadcast') + + +class test_maybe_declare(TestCase): + + def test_cacheable(self): + channel = Mock() + client = channel.connection.client = Mock() + client.declared_entities = set() + entity = Mock() + entity.can_cache_declaration = True + entity.auto_delete = False + entity.is_bound = True + entity.channel = channel + + maybe_declare(entity, channel) + self.assertEqual(entity.declare.call_count, 1) + self.assertIn(entity, channel.connection.client.declared_entities) + + maybe_declare(entity, channel) + self.assertEqual(entity.declare.call_count, 1) + + entity.channel.connection = None + with self.assertRaises(StdChannelError): + maybe_declare(entity) + + def test_binds_entities(self): + channel = Mock() + channel.connection.client.declared_entities = set() + entity = Mock() + entity.can_cache_declaration = True + entity.is_bound = False + entity.bind.return_value = entity + entity.bind.return_value.channel = channel + + maybe_declare(entity, channel) + entity.bind.assert_called_with(channel) + + def test_with_retry(self): + channel = Mock() + entity = Mock() + entity.can_cache_declaration = True + entity.is_bound = True + entity.channel = channel + + maybe_declare(entity, channel, retry=True) + self.assertTrue(channel.connection.client.ensure.call_count) + + +class test_replies(TestCase): + + def test_send_reply(self): + req = Mock() + req.content_type = 'application/json' + req.properties = {'reply_to': 'hello', + 'correlation_id': 'world'} + channel = Mock() + exchange = Mock() + exchange.is_bound = True + exchange.channel = channel + producer = Mock() + producer.channel = channel + producer.channel.connection.client.declared_entities = set() + send_reply(exchange, req, {'hello': 'world'}, producer) + + self.assertTrue(producer.publish.call_count) + args = producer.publish.call_args + self.assertDictEqual(args[0][0], {'hello': 'world'}) + self.assertDictEqual(args[1], {'exchange': exchange, + 'routing_key': 'hello', + 'correlation_id': 'world', + 'serializer': 'json'}) + + exchange.declare.assert_called_with() + + @patch('kombu.common.ipublish') + def test_isend_reply(self, ipublish): + pool, exchange, req, msg, props = (Mock(), Mock(), Mock(), + Mock(), Mock()) + + isend_reply(pool, exchange, req, msg, props) + ipublish.assert_called_with(pool, send_reply, + (exchange, req, msg), props) + + @patch('kombu.common.itermessages') + def test_collect_replies_with_ack(self, itermessages): + conn, channel, queue = Mock(), Mock(), Mock() + body, message = Mock(), Mock() + itermessages.return_value = [(body, message)] + it = collect_replies(conn, channel, queue, no_ack=False) + m = it.next() + self.assertIs(m, body) + itermessages.assert_called_with(conn, channel, queue, no_ack=False) + message.ack.assert_called_with() + + with self.assertRaises(StopIteration): + it.next() + + channel.after_reply_message_received.assert_called_with(queue.name) + + @patch('kombu.common.itermessages') + def test_collect_replies_no_ack(self, itermessages): + conn, channel, queue = Mock(), Mock(), Mock() + body, message = Mock(), Mock() + itermessages.return_value = [(body, message)] + it = collect_replies(conn, channel, queue) + m = it.next() + self.assertIs(m, body) + itermessages.assert_called_with(conn, channel, queue, no_ack=True) + self.assertFalse(message.ack.called) + + @patch('kombu.common.itermessages') + def test_collect_replies_no_replies(self, itermessages): + conn, channel, queue = Mock(), Mock(), Mock() + itermessages.return_value = [] + it = collect_replies(conn, channel, queue) + with self.assertRaises(StopIteration): + it.next() + + self.assertFalse(channel.after_reply_message_received.called) + + +class test_insured(TestCase): + + @patch('kombu.common.logger') + def test_ensure_errback(self, logger): + common._ensure_errback('foo', 30) + self.assertTrue(logger.error.called) + + def test_revive_connection(self): + on_revive = Mock() + channel = Mock() + common.revive_connection(Mock(), channel, on_revive) + on_revive.assert_called_with(channel) + + common.revive_connection(Mock(), channel, None) + + def test_revive_producer(self): + on_revive = Mock() + channel = Mock() + common.revive_producer(Mock(), channel, on_revive) + on_revive.assert_called_with(channel) + + common.revive_producer(Mock(), channel, None) + + def get_insured_mocks(self, insured_returns=('works', 'ignored')): + conn = ContextMock() + pool = MockPool(conn) + fun = Mock() + insured = conn.autoretry.return_value = Mock() + insured.return_value = insured_returns + return conn, pool, fun, insured + + def test_insured(self): + conn, pool, fun, insured = self.get_insured_mocks() + + ret = common.insured(pool, fun, (2, 2), {'foo': 'bar'}) + self.assertEqual(ret, 'works') + conn.ensure_connection.assert_called_with( + errback=common._ensure_errback, + ) + + self.assertTrue(insured.called) + i_args, i_kwargs = insured.call_args + self.assertTupleEqual(i_args, (2, 2)) + self.assertDictEqual(i_kwargs, {'foo': 'bar', + 'connection': conn}) + + self.assertTrue(conn.autoretry.called) + ar_args, ar_kwargs = conn.autoretry.call_args + self.assertTupleEqual(ar_args, (fun, conn.default_channel)) + self.assertTrue(ar_kwargs.get('on_revive')) + self.assertTrue(ar_kwargs.get('errback')) + + def test_insured_custom_errback(self): + conn, pool, fun, insured = self.get_insured_mocks() + + custom_errback = Mock() + common.insured(pool, fun, (2, 2), {'foo': 'bar'}, + errback=custom_errback) + conn.ensure_connection.assert_called_with(errback=custom_errback) + + def get_ipublish_args(self, ensure_returns=None): + producer = ContextMock() + pool = MockPool(producer) + fun = Mock() + ensure_returns = ensure_returns or Mock() + + producer.connection.ensure.return_value = ensure_returns + + return producer, pool, fun, ensure_returns + + def test_ipublish(self): + producer, pool, fun, ensure_returns = self.get_ipublish_args() + ensure_returns.return_value = 'works' + + ret = common.ipublish(pool, fun, (2, 2), {'foo': 'bar'}) + self.assertEqual(ret, 'works') + + self.assertTrue(producer.connection.ensure.called) + e_args, e_kwargs = producer.connection.ensure.call_args + self.assertTupleEqual(e_args, (producer, fun)) + self.assertTrue(e_kwargs.get('on_revive')) + self.assertEqual(e_kwargs.get('errback'), common._ensure_errback) + + ensure_returns.assert_called_with(2, 2, foo='bar', producer=producer) + + def test_ipublish_with_custom_errback(self): + producer, pool, fun, _ = self.get_ipublish_args() + + errback = Mock() + common.ipublish(pool, fun, (2, 2), {'foo': 'bar'}, errback=errback) + _, e_kwargs = producer.connection.ensure.call_args + self.assertEqual(e_kwargs.get('errback'), errback) + + +class MockConsumer(object): + consumers = set() + + def __init__(self, channel, queues=None, callbacks=None, **kwargs): + self.channel = channel + self.queues = queues + self.callbacks = callbacks + + def __enter__(self): + self.consumers.add(self) + return self + + def __exit__(self, *exc_info): + self.consumers.discard(self) + + +class test_itermessages(TestCase): + + class MockConnection(object): + should_raise_timeout = False + + def drain_events(self, **kwargs): + if self.should_raise_timeout: + raise socket.timeout() + for consumer in MockConsumer.consumers: + for callback in consumer.callbacks: + callback('body', 'message') + + def test_default(self): + conn = self.MockConnection() + channel = Mock() + channel.connection.client = conn + it = common.itermessages(conn, channel, 'q', limit=1, + Consumer=MockConsumer) + + ret = it.next() + self.assertTupleEqual(ret, ('body', 'message')) + + with self.assertRaises(StopIteration): + it.next() + + def test_when_raises_socket_timeout(self): + conn = self.MockConnection() + conn.should_raise_timeout = True + channel = Mock() + channel.connection.client = conn + it = common.itermessages(conn, channel, 'q', limit=1, + Consumer=MockConsumer) + + with self.assertRaises(StopIteration): + it.next() + + @patch('kombu.common.deque') + def test_when_raises_IndexError(self, deque): + deque_instance = deque.return_value = Mock() + deque_instance.popleft.side_effect = IndexError() + conn = self.MockConnection() + channel = Mock() + it = common.itermessages(conn, channel, 'q', limit=1, + Consumer=MockConsumer) + + with self.assertRaises(StopIteration): + it.next() + + +class test_entry_to_queue(TestCase): + + def test_calls_Queue_from_dict(self): + with patch('kombu.common.Queue') as Queue: + entry_to_queue('name', exchange='bar') + Queue.from_dict.assert_called_with('name', exchange='bar') + + +class test_QoS(TestCase): + + class _QoS(QoS): + def __init__(self, value): + self.value = value + QoS.__init__(self, None, value) + + def set(self, value): + return value + + def test_qos_exceeds_16bit(self): + with patch('kombu.common.logger') as logger: + callback = Mock() + qos = QoS(callback, 10) + qos.prev = 100 + # cannot use 2 ** 32 because of a bug on OSX Py2.5: + # https://jira.mongodb.org/browse/PYTHON-389 + qos.set(4294967296) + self.assertTrue(logger.warn.called) + callback.assert_called_with(prefetch_count=0) + + def test_qos_increment_decrement(self): + qos = self._QoS(10) + self.assertEqual(qos.increment_eventually(), 11) + self.assertEqual(qos.increment_eventually(3), 14) + self.assertEqual(qos.increment_eventually(-30), 14) + self.assertEqual(qos.decrement_eventually(7), 7) + self.assertEqual(qos.decrement_eventually(), 6) + + def test_qos_disabled_increment_decrement(self): + qos = self._QoS(0) + self.assertEqual(qos.increment_eventually(), 0) + self.assertEqual(qos.increment_eventually(3), 0) + self.assertEqual(qos.increment_eventually(-30), 0) + self.assertEqual(qos.decrement_eventually(7), 0) + self.assertEqual(qos.decrement_eventually(), 0) + self.assertEqual(qos.decrement_eventually(10), 0) + + def test_qos_thread_safe(self): + qos = self._QoS(10) + + def add(): + for i in range(1000): + qos.increment_eventually() + + def sub(): + for i in range(1000): + qos.decrement_eventually() + + def threaded(funs): + from threading import Thread + threads = [Thread(target=fun) for fun in funs] + for thread in threads: + thread.start() + for thread in threads: + thread.join() + + threaded([add, add]) + self.assertEqual(qos.value, 2010) + + qos.value = 1000 + threaded([add, sub]) # n = 2 + self.assertEqual(qos.value, 1000) + + def test_exceeds_short(self): + qos = QoS(Mock(), PREFETCH_COUNT_MAX - 1) + qos.update() + self.assertEqual(qos.value, PREFETCH_COUNT_MAX - 1) + qos.increment_eventually() + self.assertEqual(qos.value, PREFETCH_COUNT_MAX) + qos.increment_eventually() + self.assertEqual(qos.value, PREFETCH_COUNT_MAX + 1) + qos.decrement_eventually() + self.assertEqual(qos.value, PREFETCH_COUNT_MAX) + qos.decrement_eventually() + self.assertEqual(qos.value, PREFETCH_COUNT_MAX - 1) + + def test_consumer_increment_decrement(self): + mconsumer = Mock() + qos = QoS(mconsumer.qos, 10) + qos.update() + self.assertEqual(qos.value, 10) + mconsumer.qos.assert_called_with(prefetch_count=10) + qos.decrement_eventually() + qos.update() + self.assertEqual(qos.value, 9) + mconsumer.qos.assert_called_with(prefetch_count=9) + qos.decrement_eventually() + self.assertEqual(qos.value, 8) + mconsumer.qos.assert_called_with(prefetch_count=9) + self.assertIn({'prefetch_count': 9}, mconsumer.qos.call_args) + + # Does not decrement 0 value + qos.value = 0 + qos.decrement_eventually() + self.assertEqual(qos.value, 0) + qos.increment_eventually() + self.assertEqual(qos.value, 0) + + def test_consumer_decrement_eventually(self): + mconsumer = Mock() + qos = QoS(mconsumer.qos, 10) + qos.decrement_eventually() + self.assertEqual(qos.value, 9) + qos.value = 0 + qos.decrement_eventually() + self.assertEqual(qos.value, 0) + + def test_set(self): + mconsumer = Mock() + qos = QoS(mconsumer.qos, 10) + qos.set(12) + self.assertEqual(qos.prev, 12) + qos.set(qos.prev) diff --git a/awx/lib/site-packages/kombu/tests/test_compat.py b/awx/lib/site-packages/kombu/tests/test_compat.py new file mode 100644 index 0000000000..7e80e21fbf --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/test_compat.py @@ -0,0 +1,324 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from mock import patch + +from kombu import Connection, Exchange, Queue +from kombu import compat + +from .mocks import Transport, Channel +from .utils import TestCase +from .utils import Mock + + +class test_misc(TestCase): + + def test_iterconsume(self): + + class MyConnection(object): + drained = 0 + + def drain_events(self, *args, **kwargs): + self.drained += 1 + return self.drained + + class Consumer(object): + active = False + + def consume(self, *args, **kwargs): + self.active = True + + conn = MyConnection() + consumer = Consumer() + it = compat._iterconsume(conn, consumer) + self.assertEqual(it.next(), 1) + self.assertTrue(consumer.active) + + it2 = compat._iterconsume(conn, consumer, limit=10) + self.assertEqual(list(it2), [2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) + + def test_Queue_from_dict(self): + defs = {'binding_key': 'foo.#', + 'exchange': 'fooex', + 'exchange_type': 'topic', + 'durable': True, + 'auto_delete': False} + + q1 = Queue.from_dict('foo', **dict(defs)) + self.assertEqual(q1.name, 'foo') + self.assertEqual(q1.routing_key, 'foo.#') + self.assertEqual(q1.exchange.name, 'fooex') + self.assertEqual(q1.exchange.type, 'topic') + self.assertTrue(q1.durable) + self.assertTrue(q1.exchange.durable) + self.assertFalse(q1.auto_delete) + self.assertFalse(q1.exchange.auto_delete) + + q2 = Queue.from_dict('foo', **dict(defs, + exchange_durable=False)) + self.assertTrue(q2.durable) + self.assertFalse(q2.exchange.durable) + + q3 = Queue.from_dict('foo', **dict(defs, + exchange_auto_delete=True)) + self.assertFalse(q3.auto_delete) + self.assertTrue(q3.exchange.auto_delete) + + q4 = Queue.from_dict('foo', **dict(defs, + queue_durable=False)) + self.assertFalse(q4.durable) + self.assertTrue(q4.exchange.durable) + + q5 = Queue.from_dict('foo', **dict(defs, + queue_auto_delete=True)) + self.assertTrue(q5.auto_delete) + self.assertFalse(q5.exchange.auto_delete) + + self.assertEqual(Queue.from_dict('foo', **dict(defs)), + Queue.from_dict('foo', **dict(defs))) + + +class test_Publisher(TestCase): + + def setUp(self): + self.connection = Connection(transport=Transport) + + def test_constructor(self): + pub = compat.Publisher(self.connection, + exchange='test_Publisher_constructor', + routing_key='rkey') + self.assertIsInstance(pub.backend, Channel) + self.assertEqual(pub.exchange.name, 'test_Publisher_constructor') + self.assertTrue(pub.exchange.durable) + self.assertFalse(pub.exchange.auto_delete) + self.assertEqual(pub.exchange.type, 'direct') + + pub2 = compat.Publisher(self.connection, + exchange='test_Publisher_constructor2', + routing_key='rkey', + auto_delete=True, + durable=False) + self.assertTrue(pub2.exchange.auto_delete) + self.assertFalse(pub2.exchange.durable) + + explicit = Exchange('test_Publisher_constructor_explicit', + type='topic') + pub3 = compat.Publisher(self.connection, + exchange=explicit) + self.assertEqual(pub3.exchange, explicit) + + compat.Publisher(self.connection, + exchange='test_Publisher_constructor3', + channel=self.connection.default_channel) + + def test_send(self): + pub = compat.Publisher(self.connection, + exchange='test_Publisher_send', + routing_key='rkey') + pub.send({'foo': 'bar'}) + self.assertIn('basic_publish', pub.backend) + pub.close() + + def test__enter__exit__(self): + pub = compat.Publisher(self.connection, + exchange='test_Publisher_send', + routing_key='rkey') + x = pub.__enter__() + self.assertIs(x, pub) + x.__exit__() + self.assertTrue(pub._closed) + + +class test_Consumer(TestCase): + + def setUp(self): + self.connection = Connection(transport=Transport) + + @patch('kombu.compat._iterconsume') + def test_iterconsume_calls__iterconsume(self, it, n='test_iterconsume'): + c = compat.Consumer(self.connection, queue=n, exchange=n) + c.iterconsume(limit=10, no_ack=True) + it.assert_called_with(c.connection, c, True, 10) + + def test_constructor(self, n='test_Consumer_constructor'): + c = compat.Consumer(self.connection, queue=n, exchange=n, + routing_key='rkey') + self.assertIsInstance(c.backend, Channel) + q = c.queues[0] + self.assertTrue(q.durable) + self.assertTrue(q.exchange.durable) + self.assertFalse(q.auto_delete) + self.assertFalse(q.exchange.auto_delete) + self.assertEqual(q.name, n) + self.assertEqual(q.exchange.name, n) + + c2 = compat.Consumer(self.connection, queue=n + '2', + exchange=n + '2', + routing_key='rkey', durable=False, + auto_delete=True, exclusive=True) + q2 = c2.queues[0] + self.assertFalse(q2.durable) + self.assertFalse(q2.exchange.durable) + self.assertTrue(q2.auto_delete) + self.assertTrue(q2.exchange.auto_delete) + + def test__enter__exit__(self, n='test__enter__exit__'): + c = compat.Consumer(self.connection, queue=n, exchange=n, + routing_key='rkey') + x = c.__enter__() + self.assertIs(x, c) + x.__exit__() + self.assertTrue(c._closed) + + def test_revive(self, n='test_revive'): + c = compat.Consumer(self.connection, queue=n, exchange=n) + + with self.connection.channel() as c2: + c.revive(c2) + self.assertIs(c.backend, c2) + + def test__iter__(self, n='test__iter__'): + c = compat.Consumer(self.connection, queue=n, exchange=n) + c.iterqueue = Mock() + + c.__iter__() + c.iterqueue.assert_called_with(infinite=True) + + def test_iter(self, n='test_iterqueue'): + c = compat.Consumer(self.connection, queue=n, exchange=n, + routing_key='rkey') + c.close() + + def test_process_next(self, n='test_process_next'): + c = compat.Consumer(self.connection, queue=n, exchange=n, + routing_key='rkey') + with self.assertRaises(NotImplementedError): + c.process_next() + c.close() + + def test_iterconsume(self, n='test_iterconsume'): + c = compat.Consumer(self.connection, queue=n, exchange=n, + routing_key='rkey') + c.close() + + def test_discard_all(self, n='test_discard_all'): + c = compat.Consumer(self.connection, queue=n, exchange=n, + routing_key='rkey') + c.discard_all() + self.assertIn('queue_purge', c.backend) + + def test_fetch(self, n='test_fetch'): + c = compat.Consumer(self.connection, queue=n, exchange=n, + routing_key='rkey') + self.assertIsNone(c.fetch()) + self.assertIsNone(c.fetch(no_ack=True)) + self.assertIn('basic_get', c.backend) + + callback_called = [False] + + def receive(payload, message): + callback_called[0] = True + + c.backend.to_deliver.append('42') + self.assertEqual(c.fetch().payload, '42') + c.backend.to_deliver.append('46') + c.register_callback(receive) + self.assertEqual(c.fetch(enable_callbacks=True).payload, '46') + self.assertTrue(callback_called[0]) + + def test_discard_all_filterfunc_not_supported(self, n='xjf21j21'): + c = compat.Consumer(self.connection, queue=n, exchange=n, + routing_key='rkey') + with self.assertRaises(NotImplementedError): + c.discard_all(filterfunc=lambda x: x) + c.close() + + def test_wait(self, n='test_wait'): + + class C(compat.Consumer): + + def iterconsume(self, limit=None): + for i in range(limit): + yield i + + c = C(self.connection, + queue=n, exchange=n, routing_key='rkey') + self.assertEqual(c.wait(10), range(10)) + c.close() + + def test_iterqueue(self, n='test_iterqueue'): + i = [0] + + class C(compat.Consumer): + + def fetch(self, limit=None): + z = i[0] + i[0] += 1 + return z + + c = C(self.connection, + queue=n, exchange=n, routing_key='rkey') + self.assertEqual(list(c.iterqueue(limit=10)), range(10)) + c.close() + + +class test_ConsumerSet(TestCase): + + def setUp(self): + self.connection = Connection(transport=Transport) + + @patch('kombu.compat._iterconsume') + def test_iterconsume(self, _iterconsume, n='test_iterconsume'): + c = compat.Consumer(self.connection, queue=n, exchange=n) + cs = compat.ConsumerSet(self.connection, consumers=[c]) + cs.iterconsume(limit=10, no_ack=True) + _iterconsume.assert_called_with(c.connection, cs, True, 10) + + def test_revive(self, n='test_revive'): + c = compat.Consumer(self.connection, queue=n, exchange=n) + cs = compat.ConsumerSet(self.connection, consumers=[c]) + + with self.connection.channel() as c2: + cs.revive(c2) + self.assertIs(cs.backend, c2) + + def test_constructor(self, prefix='0daf8h21'): + dcon = {'%s.xyx' % prefix: {'exchange': '%s.xyx' % prefix, + 'routing_key': 'xyx'}, + '%s.xyz' % prefix: {'exchange': '%s.xyz' % prefix, + 'routing_key': 'xyz'}} + consumers = [compat.Consumer(self.connection, queue=prefix + str(i), + exchange=prefix + str(i)) + for i in range(3)] + c = compat.ConsumerSet(self.connection, consumers=consumers) + c2 = compat.ConsumerSet(self.connection, from_dict=dcon) + + self.assertEqual(len(c.queues), 3) + self.assertEqual(len(c2.queues), 2) + + c.add_consumer(compat.Consumer(self.connection, + queue=prefix + 'xaxxxa', + exchange=prefix + 'xaxxxa')) + self.assertEqual(len(c.queues), 4) + for cq in c.queues: + self.assertIs(cq.channel, c.channel) + + c2.add_consumer_from_dict({ + '%s.xxx' % prefix: { + 'exchange': '%s.xxx' % prefix, + 'routing_key': 'xxx', + }, + }) + self.assertEqual(len(c2.queues), 3) + for c2q in c2.queues: + self.assertIs(c2q.channel, c2.channel) + + c.discard_all() + self.assertEqual(c.channel.called.count('queue_purge'), 4) + c.consume() + + c.close() + c2.close() + self.assertIn('basic_cancel', c.channel) + self.assertIn('close', c.channel) + self.assertIn('close', c2.channel) diff --git a/awx/lib/site-packages/kombu/tests/test_compression.py b/awx/lib/site-packages/kombu/tests/test_compression.py new file mode 100644 index 0000000000..2df3b38854 --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/test_compression.py @@ -0,0 +1,53 @@ +from __future__ import absolute_import + +import sys + +from nose import SkipTest + +from kombu import compression + +from .utils import TestCase +from .utils import mask_modules + + +class test_compression(TestCase): + + def setUp(self): + try: + import bz2 # noqa + except ImportError: + self.has_bzip2 = False + else: + self.has_bzip2 = True + + @mask_modules('bz2') + def test_no_bz2(self): + c = sys.modules.pop('kombu.compression') + try: + import kombu.compression + self.assertFalse(hasattr(kombu.compression, 'bz2')) + finally: + if c is not None: + sys.modules['kombu.compression'] = c + + def test_encoders(self): + encoders = compression.encoders() + self.assertIn('application/x-gzip', encoders) + if self.has_bzip2: + self.assertIn('application/x-bz2', encoders) + + def test_compress__decompress__zlib(self): + text = 'The Quick Brown Fox Jumps Over The Lazy Dog' + c, ctype = compression.compress(text, 'zlib') + self.assertNotEqual(text, c) + d = compression.decompress(c, ctype) + self.assertEqual(d, text) + + def test_compress__decompress__bzip2(self): + if not self.has_bzip2: + raise SkipTest('bzip2 not available') + text = 'The Brown Quick Fox Over The Lazy Dog Jumps' + c, ctype = compression.compress(text, 'bzip2') + self.assertNotEqual(text, c) + d = compression.decompress(c, ctype) + self.assertEqual(d, text) diff --git a/awx/lib/site-packages/kombu/tests/test_connection.py b/awx/lib/site-packages/kombu/tests/test_connection.py new file mode 100644 index 0000000000..f3c4120a81 --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/test_connection.py @@ -0,0 +1,658 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import errno +import pickle +import socket + +from copy import copy +from mock import patch +from nose import SkipTest + +from kombu import Connection, Consumer, Producer, parse_url +from kombu.connection import Resource + +from .mocks import Transport +from .utils import TestCase + +from .utils import Mock, skip_if_not_module + + +class test_connection_utils(TestCase): + + def setUp(self): + self.url = 'amqp://user:pass@localhost:5672/my/vhost' + self.nopass = 'amqp://user@localhost:5672/my/vhost' + self.expected = { + 'transport': 'amqp', + 'userid': 'user', + 'password': 'pass', + 'hostname': 'localhost', + 'port': 5672, + 'virtual_host': 'my/vhost', + } + + def test_parse_url(self): + result = parse_url(self.url) + self.assertDictEqual(result, self.expected) + + def test_parse_url_mongodb(self): + result = parse_url('mongodb://example.com/') + self.assertEqual(result['hostname'], 'example.com/') + + def test_parse_generated_as_uri(self): + conn = Connection(self.url) + info = conn.info() + for k, v in self.expected.items(): + self.assertEqual(info[k], v) + # by default almost the same- no password + self.assertEqual(conn.as_uri(), self.nopass) + self.assertEqual(conn.as_uri(include_password=True), self.url) + + @skip_if_not_module('pymongo') + def test_as_uri_when_mongodb(self): + x = Connection('mongodb://localhost') + self.assertTrue(x.as_uri()) + + def test_bogus_scheme(self): + with self.assertRaises(KeyError): + Connection('bogus://localhost:7421').transport + + def assert_info(self, conn, **fields): + info = conn.info() + for field, expected in fields.iteritems(): + self.assertEqual(info[field], expected) + + def test_rabbitmq_example_urls(self): + # see Appendix A of http://www.rabbitmq.com/uri-spec.html + + self.assert_info( + Connection('amqp://user:pass@host:10000/vhost'), + userid='user', password='pass', hostname='host', + port=10000, virtual_host='vhost', + ) + + self.assert_info( + Connection('amqp://user%61:%61pass@ho%61st:10000/v%2fhost'), + userid='usera', password='apass', hostname='hoast', + port=10000, virtual_host='v/host', + ) + + self.assert_info( + Connection('amqp://'), + userid='guest', password='guest', hostname='localhost', + port=5672, virtual_host='/', + ) + + self.assert_info( + Connection('amqp://:@/'), + userid='guest', password='guest', hostname='localhost', + port=5672, virtual_host='/', + ) + + self.assert_info( + Connection('amqp://user@/'), + userid='user', password='guest', hostname='localhost', + port=5672, virtual_host='/', + ) + + self.assert_info( + Connection('amqp://user:pass@/'), + userid='user', password='pass', hostname='localhost', + port=5672, virtual_host='/', + ) + + self.assert_info( + Connection('amqp://host'), + userid='guest', password='guest', hostname='host', + port=5672, virtual_host='/', + ) + + self.assert_info( + Connection('amqp://:10000'), + userid='guest', password='guest', hostname='localhost', + port=10000, virtual_host='/', + ) + + self.assert_info( + Connection('amqp:///vhost'), + userid='guest', password='guest', hostname='localhost', + port=5672, virtual_host='vhost', + ) + + self.assert_info( + Connection('amqp://host/'), + userid='guest', password='guest', hostname='host', + port=5672, virtual_host='/', + ) + + self.assert_info( + Connection('amqp://host/%2f'), + userid='guest', password='guest', hostname='host', + port=5672, virtual_host='/', + ) + + def test_url_IPV6(self): + raise SkipTest("urllib can't parse ipv6 urls") + + self.assert_info( + Connection('amqp://[::1]'), + userid='guest', password='guest', hostname='[::1]', + port=5672, virtual_host='/', + ) + + +class test_Connection(TestCase): + + def setUp(self): + self.conn = Connection(port=5672, transport=Transport) + + def test_establish_connection(self): + conn = self.conn + conn.connect() + self.assertTrue(conn.connection.connected) + self.assertEqual(conn.host, 'localhost:5672') + channel = conn.channel() + self.assertTrue(channel.open) + self.assertEqual(conn.drain_events(), 'event') + _connection = conn.connection + conn.close() + self.assertFalse(_connection.connected) + self.assertIsInstance(conn.transport, Transport) + + def test_multiple_urls(self): + conn1 = Connection('amqp://foo;amqp://bar') + self.assertEqual(conn1.hostname, 'foo') + self.assertListEqual(conn1.alt, ['amqp://foo', 'amqp://bar']) + + conn2 = Connection(['amqp://foo', 'amqp://bar']) + self.assertEqual(conn2.hostname, 'foo') + self.assertListEqual(conn2.alt, ['amqp://foo', 'amqp://bar']) + + def test_uri_passthrough(self): + from kombu import connection as mod + prev, mod.URI_PASSTHROUGH = mod.URI_PASSTHROUGH, set(['foo']) + try: + with patch('kombu.connection.parse_url') as parse_url: + c = Connection('foo+mysql://some_host') + self.assertEqual(c.transport_cls, 'foo') + self.assertFalse(parse_url.called) + self.assertEqual(c.hostname, 'mysql://some_host') + self.assertTrue(c.as_uri().startswith('foo+')) + with patch('kombu.connection.parse_url') as parse_url: + c = Connection('mysql://some_host', transport='foo') + self.assertEqual(c.transport_cls, 'foo') + self.assertFalse(parse_url.called) + self.assertEqual(c.hostname, 'mysql://some_host') + finally: + mod.URI_PASSTHROUGH = prev + c = Connection('amqp+sqlite://some_host') + self.assertTrue(c.as_uri().startswith('amqp+')) + + def test_default_ensure_callback(self): + with patch('kombu.connection.logger') as logger: + c = Connection(transport=Mock) + c._default_ensure_callback(KeyError(), 3) + self.assertTrue(logger.error.called) + + def test_ensure_connection_on_error(self): + c = Connection('amqp://A;amqp://B') + with patch('kombu.connection.retry_over_time') as rot: + c.ensure_connection() + self.assertTrue(rot.called) + + args = rot.call_args[0] + cb = args[4] + intervals = iter([1, 2, 3, 4, 5]) + self.assertEqual(cb(KeyError(), intervals, 0), 0) + self.assertEqual(cb(KeyError(), intervals, 1), 1) + self.assertEqual(cb(KeyError(), intervals, 2), 0) + self.assertEqual(cb(KeyError(), intervals, 3), 2) + self.assertEqual(cb(KeyError(), intervals, 4), 0) + self.assertEqual(cb(KeyError(), intervals, 5), 3) + self.assertEqual(cb(KeyError(), intervals, 6), 0) + self.assertEqual(cb(KeyError(), intervals, 7), 4) + + errback = Mock() + c.ensure_connection(errback=errback) + args = rot.call_args[0] + cb = args[4] + self.assertEqual(cb(KeyError(), intervals, 0), 0) + self.assertTrue(errback.called) + + def test_drain_nowait(self): + c = Connection(transport=Mock) + c.drain_events = Mock() + c.drain_events.side_effect = socket.timeout() + + c.more_to_read = True + self.assertFalse(c.drain_nowait()) + self.assertFalse(c.more_to_read) + + c.drain_events.side_effect = socket.error() + c.drain_events.side_effect.errno = errno.EAGAIN + c.more_to_read = True + self.assertFalse(c.drain_nowait()) + self.assertFalse(c.more_to_read) + + c.drain_events.side_effect = socket.error() + c.drain_events.side_effect.errno = errno.EPERM + with self.assertRaises(socket.error): + c.drain_nowait() + + c.more_to_read = False + c.drain_events = Mock() + self.assertTrue(c.drain_nowait()) + c.drain_events.assert_called_with(timeout=0) + self.assertTrue(c.more_to_read) + + def test_supports_heartbeats(self): + c = Connection(transport=Mock) + c.transport.supports_heartbeats = False + self.assertFalse(c.supports_heartbeats) + + def test_is_evented(self): + c = Connection(transport=Mock) + c.transport.supports_ev = False + self.assertFalse(c.is_evented) + + def test_eventmap(self): + c = Connection(transport=Mock) + c.transport.eventmap.return_value = {1: 1, 2: 2} + self.assertDictEqual(c.eventmap, {1: 1, 2: 2}) + c.transport.eventmap.assert_called_with(c.connection) + + def test_manager(self): + c = Connection(transport=Mock) + self.assertIs(c.manager, c.transport.manager) + + def test_copy(self): + c = Connection('amqp://example.com') + self.assertEqual(copy(c).info(), c.info()) + + def test_switch(self): + c = Connection('amqp://foo') + c._closed = True + c.switch('redis://example.com//3') + self.assertFalse(c._closed) + self.assertEqual(c.hostname, 'example.com') + self.assertEqual(c.transport_cls, 'redis') + self.assertEqual(c.virtual_host, '/3') + + def test_maybe_switch_next(self): + c = Connection('amqp://foo;redis://example.com//3') + c.maybe_switch_next() + self.assertFalse(c._closed) + self.assertEqual(c.hostname, 'example.com') + self.assertEqual(c.transport_cls, 'redis') + self.assertEqual(c.virtual_host, '/3') + + def test_maybe_switch_next_no_cycle(self): + c = Connection('amqp://foo') + c.maybe_switch_next() + self.assertFalse(c._closed) + self.assertEqual(c.hostname, 'foo') + self.assertIn(c.transport_cls, ('librabbitmq', 'pyamqp', 'amqp')) + + def test_heartbeat_check(self): + c = Connection(transport=Transport) + c.transport.heartbeat_check = Mock() + c.heartbeat_check(3) + c.transport.heartbeat_check.assert_called_with(c.connection, rate=3) + + def test_completes_cycle_no_cycle(self): + c = Connection('amqp://') + self.assertTrue(c.completes_cycle(0)) + self.assertTrue(c.completes_cycle(1)) + + def test_completes_cycle(self): + c = Connection('amqp://a;amqp://b;amqp://c') + self.assertFalse(c.completes_cycle(0)) + self.assertFalse(c.completes_cycle(1)) + self.assertTrue(c.completes_cycle(2)) + + def test__enter____exit__(self): + conn = self.conn + context = conn.__enter__() + self.assertIs(context, conn) + conn.connect() + self.assertTrue(conn.connection.connected) + conn.__exit__() + self.assertIsNone(conn.connection) + conn.close() # again + + def test_close_survives_connerror(self): + + class _CustomError(Exception): + pass + + class MyTransport(Transport): + connection_errors = (_CustomError, ) + + def close_connection(self, connection): + raise _CustomError('foo') + + conn = Connection(transport=MyTransport) + conn.connect() + conn.close() + self.assertTrue(conn._closed) + + def test_close_when_default_channel(self): + conn = self.conn + conn._default_channel = Mock() + conn._close() + conn._default_channel.close.assert_called_with() + + def test_close_when_default_channel_close_raises(self): + + class Conn(Connection): + + @property + def connection_errors(self): + return (KeyError, ) + + conn = Conn('memory://') + conn._default_channel = Mock() + conn._default_channel.close.side_effect = KeyError() + + conn._close() + conn._default_channel.close.assert_called_with() + + def test_revive_when_default_channel(self): + conn = self.conn + defchan = conn._default_channel = Mock() + conn.revive(Mock()) + + defchan.close.assert_called_with() + self.assertIsNone(conn._default_channel) + + def test_ensure_connection(self): + self.assertTrue(self.conn.ensure_connection()) + + def test_ensure_success(self): + def publish(): + return 'foobar' + + ensured = self.conn.ensure(None, publish) + self.assertEqual(ensured(), 'foobar') + + def test_ensure_failure(self): + class _CustomError(Exception): + pass + + def publish(): + raise _CustomError('bar') + + ensured = self.conn.ensure(None, publish) + with self.assertRaises(_CustomError): + ensured() + + def test_ensure_connection_failure(self): + class _ConnectionError(Exception): + pass + + def publish(): + raise _ConnectionError('failed connection') + + self.conn.transport.connection_errors = (_ConnectionError,) + ensured = self.conn.ensure(self.conn, publish) + with self.assertRaises(_ConnectionError): + ensured() + + def test_autoretry(self): + myfun = Mock() + myfun.__name__ = 'test_autoretry' + + self.conn.transport.connection_errors = (KeyError, ) + + def on_call(*args, **kwargs): + myfun.side_effect = None + raise KeyError('foo') + + myfun.side_effect = on_call + insured = self.conn.autoretry(myfun) + insured() + + self.assertTrue(myfun.called) + + def test_SimpleQueue(self): + conn = self.conn + q = conn.SimpleQueue('foo') + self.assertIs(q.channel, conn.default_channel) + chan = conn.channel() + q2 = conn.SimpleQueue('foo', channel=chan) + self.assertIs(q2.channel, chan) + + def test_SimpleBuffer(self): + conn = self.conn + q = conn.SimpleBuffer('foo') + self.assertIs(q.channel, conn.default_channel) + chan = conn.channel() + q2 = conn.SimpleBuffer('foo', channel=chan) + self.assertIs(q2.channel, chan) + + def test_Producer(self): + conn = self.conn + self.assertIsInstance(conn.Producer(), Producer) + self.assertIsInstance(conn.Producer(conn.default_channel), Producer) + + def test_Consumer(self): + conn = self.conn + self.assertIsInstance(conn.Consumer(queues=[]), Consumer) + self.assertIsInstance(conn.Consumer(queues=[], + channel=conn.default_channel), Consumer) + + def test__repr__(self): + self.assertTrue(repr(self.conn)) + + def test__reduce__(self): + x = pickle.loads(pickle.dumps(self.conn)) + self.assertDictEqual(x.info(), self.conn.info()) + + def test_channel_errors(self): + + class MyTransport(Transport): + channel_errors = (KeyError, ValueError) + + conn = Connection(transport=MyTransport) + self.assertTupleEqual(conn.channel_errors, (KeyError, ValueError)) + + def test_connection_errors(self): + + class MyTransport(Transport): + connection_errors = (KeyError, ValueError) + + conn = Connection(transport=MyTransport) + self.assertTupleEqual(conn.connection_errors, (KeyError, ValueError)) + + +class test_Connection_with_transport_options(TestCase): + + transport_options = {'pool_recycler': 3600, 'echo': True} + + def setUp(self): + self.conn = Connection(port=5672, transport=Transport, + transport_options=self.transport_options) + + def test_establish_connection(self): + conn = self.conn + self.assertEqual(conn.transport_options, self.transport_options) + + +class xResource(Resource): + + def setup(self): + pass + + +class ResourceCase(TestCase): + abstract = True + + def create_resource(self, limit, preload): + raise NotImplementedError('subclass responsibility') + + def assertState(self, P, avail, dirty): + self.assertEqual(P._resource.qsize(), avail) + self.assertEqual(len(P._dirty), dirty) + + def test_setup(self): + if self.abstract: + with self.assertRaises(NotImplementedError): + Resource() + + def test_acquire__release(self): + if self.abstract: + return + P = self.create_resource(10, 0) + self.assertState(P, 10, 0) + chans = [P.acquire() for _ in xrange(10)] + self.assertState(P, 0, 10) + with self.assertRaises(P.LimitExceeded): + P.acquire() + chans.pop().release() + self.assertState(P, 1, 9) + [chan.release() for chan in chans] + self.assertState(P, 10, 0) + + def test_acquire_prepare_raises(self): + if self.abstract: + return + P = self.create_resource(10, 0) + + self.assertEqual(len(P._resource.queue), 10) + P.prepare = Mock() + P.prepare.side_effect = IOError() + with self.assertRaises(IOError): + P.acquire(block=True) + self.assertEqual(len(P._resource.queue), 10) + + def test_acquire_no_limit(self): + if self.abstract: + return + P = self.create_resource(None, 0) + P.acquire().release() + + def test_replace_when_limit(self): + if self.abstract: + return + P = self.create_resource(10, 0) + r = P.acquire() + P._dirty = Mock() + P.close_resource = Mock() + + P.replace(r) + P._dirty.discard.assert_called_with(r) + P.close_resource.assert_called_with(r) + + def test_replace_no_limit(self): + if self.abstract: + return + P = self.create_resource(None, 0) + r = P.acquire() + P._dirty = Mock() + P.close_resource = Mock() + + P.replace(r) + self.assertFalse(P._dirty.discard.called) + P.close_resource.assert_called_with(r) + + def test_interface_prepare(self): + if not self.abstract: + return + x = xResource() + self.assertEqual(x.prepare(10), 10) + + def test_force_close_all_handles_AttributeError(self): + if self.abstract: + return + P = self.create_resource(10, 10) + cr = P.close_resource = Mock() + cr.side_effect = AttributeError('x') + + P.acquire() + self.assertTrue(P._dirty) + + P.force_close_all() + + def test_force_close_all_no_mutex(self): + if self.abstract: + return + P = self.create_resource(10, 10) + P.close_resource = Mock() + + m = P._resource = Mock() + m.mutex = None + m.queue.pop.side_effect = IndexError + + P.force_close_all() + + def test_add_when_empty(self): + if self.abstract: + return + P = self.create_resource(None, None) + P._resource.queue[:] = [] + self.assertFalse(P._resource.queue) + P._add_when_empty() + self.assertTrue(P._resource.queue) + + +class test_ConnectionPool(ResourceCase): + abstract = False + + def create_resource(self, limit, preload): + return Connection(port=5672, transport=Transport).Pool(limit, preload) + + def test_setup(self): + P = self.create_resource(10, 2) + q = P._resource.queue + self.assertIsNotNone(q[0]._connection) + self.assertIsNotNone(q[1]._connection) + self.assertIsNone(q[2]()._connection) + + def test_release_no__debug(self): + P = self.create_resource(10, 2) + R = Mock() + R._debug.side_effect = AttributeError() + P.release_resource(R) + + def test_setup_no_limit(self): + P = self.create_resource(None, None) + self.assertFalse(P._resource.queue) + self.assertIsNone(P.limit) + + def test_prepare_not_callable(self): + P = self.create_resource(None, None) + conn = Connection('memory://') + self.assertIs(P.prepare(conn), conn) + + def test_acquire_channel(self): + P = self.create_resource(10, 0) + with P.acquire_channel() as (conn, channel): + self.assertIs(channel, conn.default_channel) + + +class test_ChannelPool(ResourceCase): + abstract = False + + def create_resource(self, limit, preload): + return Connection(port=5672, transport=Transport) \ + .ChannelPool(limit, preload) + + def test_setup(self): + P = self.create_resource(10, 2) + q = P._resource.queue + self.assertTrue(q[0].basic_consume) + self.assertTrue(q[1].basic_consume) + with self.assertRaises(AttributeError): + getattr(q[2], 'basic_consume') + + def test_setup_no_limit(self): + P = self.create_resource(None, None) + self.assertFalse(P._resource.queue) + self.assertIsNone(P.limit) + + def test_prepare_not_callable(self): + P = self.create_resource(10, 0) + conn = Connection('memory://') + chan = conn.default_channel + self.assertIs(P.prepare(chan), chan) diff --git a/awx/lib/site-packages/kombu/tests/test_entities.py b/awx/lib/site-packages/kombu/tests/test_entities.py new file mode 100644 index 0000000000..8e89f84ea8 --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/test_entities.py @@ -0,0 +1,331 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import pickle + +from mock import call + +from kombu import Connection, Exchange, Queue, binding +from kombu.exceptions import NotBoundError + +from .mocks import Transport +from .utils import TestCase +from .utils import Mock + + +def get_conn(): + return Connection(transport=Transport) + + +class test_binding(TestCase): + + def test_constructor(self): + x = binding( + Exchange('foo'), 'rkey', + arguments={'barg': 'bval'}, + unbind_arguments={'uarg': 'uval'}, + ) + self.assertEqual(x.exchange, Exchange('foo')) + self.assertEqual(x.routing_key, 'rkey') + self.assertDictEqual(x.arguments, {'barg': 'bval'}) + self.assertDictEqual(x.unbind_arguments, {'uarg': 'uval'}) + + def test_declare(self): + chan = get_conn().channel() + x = binding(Exchange('foo'), 'rkey') + x.declare(chan) + self.assertIn('exchange_declare', chan) + + def test_declare_no_exchange(self): + chan = get_conn().channel() + x = binding() + x.declare(chan) + self.assertNotIn('exchange_declare', chan) + + def test_bind(self): + chan = get_conn().channel() + x = binding(Exchange('foo')) + x.bind(Exchange('bar')(chan)) + self.assertIn('exchange_bind', chan) + + def test_unbind(self): + chan = get_conn().channel() + x = binding(Exchange('foo')) + x.unbind(Exchange('bar')(chan)) + self.assertIn('exchange_unbind', chan) + + def test_repr(self): + b = binding(Exchange('foo'), 'rkey') + self.assertIn('foo', repr(b)) + self.assertIn('rkey', repr(b)) + + +class test_Exchange(TestCase): + + def test_bound(self): + exchange = Exchange('foo', 'direct') + self.assertFalse(exchange.is_bound) + self.assertIn('= (3, 0): + from io import StringIO, BytesIO +else: + from StringIO import StringIO, StringIO as BytesIO # noqa + +from kombu import utils +from kombu.utils.compat import next + +from .utils import ( + TestCase, + redirect_stdouts, mask_modules, module_exists, skip_if_module, +) + + +class OldString(object): + + def __init__(self, value): + self.value = value + + def __str__(self): + return self.value + + def split(self, *args, **kwargs): + return self.value.split(*args, **kwargs) + + def rsplit(self, *args, **kwargs): + return self.value.rsplit(*args, **kwargs) + + +class test_kombu_module(TestCase): + + def test_dir(self): + import kombu + self.assertTrue(dir(kombu)) + + +class test_utils(TestCase): + + def test_maybe_list(self): + self.assertEqual(utils.maybe_list(None), []) + self.assertEqual(utils.maybe_list(1), [1]) + self.assertEqual(utils.maybe_list([1, 2, 3]), [1, 2, 3]) + + def test_fxrange_no_repeatlast(self): + self.assertEqual(list(utils.fxrange(1.0, 3.0, 1.0)), + [1.0, 2.0, 3.0]) + + def test_fxrangemax(self): + self.assertEqual(list(utils.fxrangemax(1.0, 3.0, 1.0, 30.0)), + [1.0, 2.0, 3.0, 3.0, 3.0, 3.0, + 3.0, 3.0, 3.0, 3.0, 3.0]) + self.assertEqual(list(utils.fxrangemax(1.0, None, 1.0, 30.0)), + [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0]) + + def test_reprkwargs(self): + self.assertTrue(utils.reprkwargs({'foo': 'bar', 1: 2, u'k': 'v'})) + + def test_reprcall(self): + self.assertTrue( + utils.reprcall('add', (2, 2), {'copy': True}), + ) + + +class test_UUID(TestCase): + + def test_uuid4(self): + self.assertNotEqual(utils.uuid4(), + utils.uuid4()) + + def test_uuid(self): + i1 = utils.uuid() + i2 = utils.uuid() + self.assertIsInstance(i1, str) + self.assertNotEqual(i1, i2) + + @skip_if_module('__pypy__') + def test_uuid_without_ctypes(self): + old_utils = sys.modules.pop('kombu.utils') + + @mask_modules('ctypes') + def with_ctypes_masked(): + from kombu.utils import ctypes, uuid + + self.assertIsNone(ctypes) + tid = uuid() + self.assertTrue(tid) + self.assertIsInstance(tid, basestring) + + try: + with_ctypes_masked() + finally: + sys.modules['celery.utils'] = old_utils + + +class test_Misc(TestCase): + + def test_kwdict(self): + + def f(**kwargs): + return kwargs + + kw = {u'foo': 'foo', + u'bar': 'bar'} + self.assertTrue(f(**utils.kwdict(kw))) + + +class MyStringIO(StringIO): + + def close(self): + pass + + +class MyBytesIO(BytesIO): + + def close(self): + pass + + +class test_emergency_dump_state(TestCase): + + @redirect_stdouts + def test_dump(self, stdout, stderr): + fh = MyBytesIO() + + utils.emergency_dump_state({'foo': 'bar'}, open_file=lambda n, m: fh) + self.assertDictEqual(pickle.loads(fh.getvalue()), {'foo': 'bar'}) + self.assertTrue(stderr.getvalue()) + self.assertFalse(stdout.getvalue()) + + @redirect_stdouts + def test_dump_second_strategy(self, stdout, stderr): + fh = MyStringIO() + + def raise_something(*args, **kwargs): + raise KeyError('foo') + + utils.emergency_dump_state( + {'foo': 'bar'}, + open_file=lambda n, m: fh, dump=raise_something, + ) + self.assertIn("'foo': 'bar'", fh.getvalue()) + self.assertTrue(stderr.getvalue()) + self.assertFalse(stdout.getvalue()) + + +def insomnia(fun): + + @wraps(fun) + def _inner(*args, **kwargs): + def mysleep(i): + pass + + prev_sleep = utils.sleep + utils.sleep = mysleep + try: + return fun(*args, **kwargs) + finally: + utils.sleep = prev_sleep + + return _inner + + +class test_retry_over_time(TestCase): + + def setUp(self): + self.index = 0 + + class Predicate(Exception): + pass + + def myfun(self): + if self.index < 9: + raise self.Predicate() + return 42 + + def errback(self, exc, intervals, retries): + interval = next(intervals) + sleepvals = (None, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0, 16.0, 16.0) + self.index += 1 + self.assertEqual(interval, sleepvals[self.index]) + return interval + + @insomnia + def test_simple(self): + prev_count, utils.count = utils.count, Mock() + try: + utils.count.return_value = range(1) + x = utils.retry_over_time(self.myfun, self.Predicate, + errback=None, interval_max=14) + self.assertIsNone(x) + utils.count.return_value = range(10) + cb = Mock() + x = utils.retry_over_time(self.myfun, self.Predicate, + errback=self.errback, callback=cb, + interval_max=14) + self.assertEqual(x, 42) + self.assertEqual(self.index, 9) + cb.assert_called_with() + finally: + utils.count = prev_count + + @insomnia + def test_retry_once(self): + self.assertRaises( + self.Predicate, utils.retry_over_time, + self.myfun, self.Predicate, + max_retries=1, errback=self.errback, interval_max=14, + ) + self.assertEqual(self.index, 2) + # no errback + self.assertRaises( + self.Predicate, utils.retry_over_time, + self.myfun, self.Predicate, + max_retries=1, errback=None, interval_max=14, + ) + + @insomnia + def test_retry_never(self): + self.assertRaises( + self.Predicate, utils.retry_over_time, + self.myfun, self.Predicate, + max_retries=0, errback=self.errback, interval_max=14, + ) + self.assertEqual(self.index, 1) + + +class test_cached_property(TestCase): + + def test_deleting(self): + + class X(object): + xx = False + + @utils.cached_property + def foo(self): + return 42 + + @foo.deleter # noqa + def foo(self, value): + self.xx = value + + x = X() + del(x.foo) + self.assertFalse(x.xx) + x.__dict__['foo'] = 'here' + del(x.foo) + self.assertEqual(x.xx, 'here') + + def test_when_access_from_class(self): + + class X(object): + xx = None + + @utils.cached_property + def foo(self): + return 42 + + @foo.setter # noqa + def foo(self, value): + self.xx = 10 + + desc = X.__dict__['foo'] + self.assertIs(X.foo, desc) + + self.assertIs(desc.__get__(None), desc) + self.assertIs(desc.__set__(None, 1), desc) + self.assertIs(desc.__delete__(None), desc) + self.assertTrue(desc.setter(1)) + + x = X() + x.foo = 30 + self.assertEqual(x.xx, 10) + + del(x.foo) + + +class test_symbol_by_name(TestCase): + + def test_instance_returns_instance(self): + instance = object() + self.assertIs(utils.symbol_by_name(instance), instance) + + def test_returns_default(self): + default = object() + self.assertIs( + utils.symbol_by_name('xyz.ryx.qedoa.weq:foz', default=default), + default, + ) + + def test_no_default(self): + with self.assertRaises(ImportError): + utils.symbol_by_name('xyz.ryx.qedoa.weq:foz') + + def test_imp_reraises_ValueError(self): + imp = Mock() + imp.side_effect = ValueError() + with self.assertRaises(ValueError): + utils.symbol_by_name('kombu.Connection', imp=imp) + + def test_package(self): + from kombu.entity import Exchange + self.assertIs( + utils.symbol_by_name('.entity:Exchange', package='kombu'), + Exchange, + ) + self.assertTrue(utils.symbol_by_name(':Consumer', package='kombu')) + + +class test_ChannelPromise(TestCase): + + def test_repr(self): + self.assertEqual( + repr(utils.ChannelPromise(lambda: 'foo')), + "", + ) + + +class test_entrypoints(TestCase): + + @mask_modules('pkg_resources') + def test_without_pkg_resources(self): + self.assertListEqual(list(utils.entrypoints('kombu.test')), []) + + @module_exists('pkg_resources') + def test_with_pkg_resources(self): + with patch('pkg_resources.iter_entry_points', create=True) as iterep: + eps = iterep.return_value = [Mock(), Mock()] + + self.assertTrue(list(utils.entrypoints('kombu.test'))) + iterep.assert_called_with('kombu.test') + eps[0].load.assert_called_with() + eps[1].load.assert_called_with() + + +class test_shufflecycle(TestCase): + + def test_shuffles(self): + prev_repeat, utils.repeat = utils.repeat, Mock() + try: + utils.repeat.return_value = range(10) + values = set(['A', 'B', 'C']) + cycle = utils.shufflecycle(values) + seen = set() + for i in xrange(10): + cycle.next() + utils.repeat.assert_called_with(None) + self.assertTrue(seen.issubset(values)) + with self.assertRaises(StopIteration): + cycle.next() + cycle.next() + finally: + utils.repeat = prev_repeat diff --git a/awx/lib/site-packages/kombu/tests/transport/__init__.py b/awx/lib/site-packages/kombu/tests/transport/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/kombu/tests/transport/test_amqplib.py b/awx/lib/site-packages/kombu/tests/transport/test_amqplib.py new file mode 100644 index 0000000000..185cc3490c --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/transport/test_amqplib.py @@ -0,0 +1,165 @@ +from __future__ import absolute_import + +import sys + +from nose import SkipTest + +from kombu import Connection + +from kombu.tests.utils import TestCase +from kombu.tests.utils import mask_modules, Mock + + +class MockConnection(dict): + + def __setattr__(self, key, value): + self[key] = value + +try: + __import__('amqplib') +except ImportError: + amqplib = Channel = None +else: + from kombu.transport import amqplib + + class Channel(amqplib.Channel): + wait_returns = [] + + def _x_open(self, *args, **kwargs): + pass + + def wait(self, *args, **kwargs): + return self.wait_returns + + def _send_method(self, *args, **kwargs): + pass + + +class amqplibCase(TestCase): + + def setUp(self): + if amqplib is None: + raise SkipTest('amqplib not installed') + self.setup() + + def setup(self): + pass + + +class test_Channel(amqplibCase): + + def setup(self): + self.conn = Mock() + self.conn.channels = {} + self.channel = Channel(self.conn, 0) + + def test_init(self): + self.assertFalse(self.channel.no_ack_consumers) + + def test_prepare_message(self): + self.assertTrue(self.channel.prepare_message( + 'foobar', 10, 'application/data', 'utf-8', + properties={}, + )) + + def test_message_to_python(self): + message = Mock() + message.headers = {} + message.properties = {} + self.assertTrue(self.channel.message_to_python(message)) + + def test_close_resolves_connection_cycle(self): + self.assertIsNotNone(self.channel.connection) + self.channel.close() + self.assertIsNone(self.channel.connection) + + def test_basic_consume_registers_ack_status(self): + self.channel.wait_returns = 'my-consumer-tag' + self.channel.basic_consume('foo', no_ack=True) + self.assertIn('my-consumer-tag', self.channel.no_ack_consumers) + + self.channel.wait_returns = 'other-consumer-tag' + self.channel.basic_consume('bar', no_ack=False) + self.assertNotIn('other-consumer-tag', self.channel.no_ack_consumers) + + self.channel.basic_cancel('my-consumer-tag') + self.assertNotIn('my-consumer-tag', self.channel.no_ack_consumers) + + +class test_Transport(amqplibCase): + + def setup(self): + self.connection = Connection('amqplib://') + self.transport = self.connection.transport + + def test_create_channel(self): + connection = Mock() + self.transport.create_channel(connection) + connection.channel.assert_called_with() + + def test_drain_events(self): + connection = Mock() + self.transport.drain_events(connection, timeout=10.0) + connection.drain_events.assert_called_with(timeout=10.0) + + def test_dnspython_localhost_resolve_bug(self): + + class Conn(object): + + def __init__(self, **kwargs): + vars(self).update(kwargs) + + self.transport.Connection = Conn + self.transport.client.hostname = 'localhost' + conn1 = self.transport.establish_connection() + self.assertEqual(conn1.host, '127.0.0.1:5672') + + self.transport.client.hostname = 'example.com' + conn2 = self.transport.establish_connection() + self.assertEqual(conn2.host, 'example.com:5672') + + def test_close_connection(self): + connection = Mock() + connection.client = Mock() + self.transport.close_connection(connection) + + self.assertIsNone(connection.client) + connection.close.assert_called_with() + + def test_verify_connection(self): + connection = Mock() + connection.channels = None + self.assertFalse(self.transport.verify_connection(connection)) + + connection.channels = {1: 1, 2: 2} + self.assertTrue(self.transport.verify_connection(connection)) + + @mask_modules('ssl') + def test_import_no_ssl(self): + pm = sys.modules.pop('kombu.transport.amqplib') + try: + from kombu.transport.amqplib import SSLError + self.assertEqual(SSLError.__module__, 'kombu.transport.amqplib') + finally: + if pm is not None: + sys.modules['kombu.transport.amqplib'] = pm + + +class test_amqplib(amqplibCase): + + def test_default_port(self): + + class Transport(amqplib.Transport): + Connection = MockConnection + + c = Connection(port=None, transport=Transport).connect() + self.assertEqual(c['host'], + '127.0.0.1:%s' % (Transport.default_port, )) + + def test_custom_port(self): + + class Transport(amqplib.Transport): + Connection = MockConnection + + c = Connection(port=1337, transport=Transport).connect() + self.assertEqual(c['host'], '127.0.0.1:1337') diff --git a/awx/lib/site-packages/kombu/tests/transport/test_base.py b/awx/lib/site-packages/kombu/tests/transport/test_base.py new file mode 100644 index 0000000000..0c4e52b7a8 --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/transport/test_base.py @@ -0,0 +1,108 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from kombu import Connection, Consumer, Exchange, Producer, Queue +from kombu.transport.base import Message, StdChannel, Transport + +from kombu.tests.utils import TestCase +from kombu.tests.utils import Mock + + +class test_StdChannel(TestCase): + + def setUp(self): + self.conn = Connection('memory://') + self.channel = self.conn.channel() + self.channel.queues.clear() + self.conn.connection.state.clear() + + def test_Consumer(self): + q = Queue('foo', Exchange('foo')) + print(self.channel.queues) + cons = self.channel.Consumer(q) + self.assertIsInstance(cons, Consumer) + self.assertIs(cons.channel, self.channel) + + def test_Producer(self): + prod = self.channel.Producer() + self.assertIsInstance(prod, Producer) + self.assertIs(prod.channel, self.channel) + + def test_interface_get_bindings(self): + with self.assertRaises(NotImplementedError): + StdChannel().get_bindings() + + def test_interface_after_reply_message_received(self): + self.assertIsNone( + StdChannel().after_reply_message_received(Queue('foo')), + ) + + +class test_Message(TestCase): + + def setUp(self): + self.conn = Connection('memory://') + self.channel = self.conn.channel() + self.message = Message(self.channel, delivery_tag=313) + + def test_ack_respects_no_ack_consumers(self): + self.channel.no_ack_consumers = set(['abc']) + self.message.delivery_info['consumer_tag'] = 'abc' + ack = self.channel.basic_ack = Mock() + + self.message.ack() + self.assertNotEqual(self.message._state, 'ACK') + self.assertFalse(ack.called) + + def test_ack_missing_consumer_tag(self): + self.channel.no_ack_consumers = set(['abc']) + self.message.delivery_info = {} + ack = self.channel.basic_ack = Mock() + + self.message.ack() + ack.assert_called_with(self.message.delivery_tag) + + def test_ack_not_no_ack(self): + self.channel.no_ack_consumers = set() + self.message.delivery_info['consumer_tag'] = 'abc' + ack = self.channel.basic_ack = Mock() + + self.message.ack() + ack.assert_called_with(self.message.delivery_tag) + + def test_ack_log_error_when_no_error(self): + ack = self.message.ack = Mock() + self.message.ack_log_error(Mock(), KeyError) + ack.assert_called_with() + + def test_ack_log_error_when_error(self): + ack = self.message.ack = Mock() + ack.side_effect = KeyError('foo') + logger = Mock() + self.message.ack_log_error(logger, KeyError) + ack.assert_called_with() + self.assertTrue(logger.critical.called) + self.assertIn("Couldn't ack", logger.critical.call_args[0][0]) + + +class test_interface(TestCase): + + def test_establish_connection(self): + with self.assertRaises(NotImplementedError): + Transport(None).establish_connection() + + def test_close_connection(self): + with self.assertRaises(NotImplementedError): + Transport(None).close_connection(None) + + def test_create_channel(self): + with self.assertRaises(NotImplementedError): + Transport(None).create_channel(None) + + def test_close_channel(self): + with self.assertRaises(NotImplementedError): + Transport(None).close_channel(None) + + def test_drain_events(self): + with self.assertRaises(NotImplementedError): + Transport(None).drain_events(None) diff --git a/awx/lib/site-packages/kombu/tests/transport/test_filesystem.py b/awx/lib/site-packages/kombu/tests/transport/test_filesystem.py new file mode 100644 index 0000000000..b1d7c0cdda --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/transport/test_filesystem.py @@ -0,0 +1,123 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import tempfile + +from nose import SkipTest + +from kombu import Connection, Exchange, Queue, Consumer, Producer + +from kombu.tests.utils import TestCase + + +class test_FilesystemTransport(TestCase): + + def setUp(self): + try: + data_folder_in = tempfile.mkdtemp() + data_folder_out = tempfile.mkdtemp() + except Exception: + raise SkipTest('filesystem transport: cannot create tempfiles') + self.c = Connection(transport='filesystem', + transport_options={ + 'data_folder_in': data_folder_in, + 'data_folder_out': data_folder_out, + }) + self.p = Connection(transport='filesystem', + transport_options={ + 'data_folder_in': data_folder_out, + 'data_folder_out': data_folder_in, + }) + self.e = Exchange('test_transport_filesystem') + self.q = Queue('test_transport_filesystem', + exchange=self.e, + routing_key='test_transport_filesystem') + self.q2 = Queue('test_transport_filesystem2', + exchange=self.e, + routing_key='test_transport_filesystem2') + + def test_produce_consume_noack(self): + producer = Producer(self.p.channel(), self.e) + consumer = Consumer(self.c.channel(), self.q, no_ack=True) + + for i in range(10): + producer.publish({'foo': i}, + routing_key='test_transport_filesystem') + + _received = [] + + def callback(message_data, message): + _received.append(message) + + consumer.register_callback(callback) + consumer.consume() + + while 1: + if len(_received) == 10: + break + self.c.drain_events() + + self.assertEqual(len(_received), 10) + + def test_produce_consume(self): + producer_channel = self.p.channel() + consumer_channel = self.c.channel() + producer = Producer(producer_channel, self.e) + consumer1 = Consumer(consumer_channel, self.q) + consumer2 = Consumer(consumer_channel, self.q2) + self.q2(consumer_channel).declare() + + for i in range(10): + producer.publish({'foo': i}, + routing_key='test_transport_filesystem') + for i in range(10): + producer.publish({'foo': i}, + routing_key='test_transport_filesystem2') + + _received1 = [] + _received2 = [] + + def callback1(message_data, message): + _received1.append(message) + message.ack() + + def callback2(message_data, message): + _received2.append(message) + message.ack() + + consumer1.register_callback(callback1) + consumer2.register_callback(callback2) + + consumer1.consume() + consumer2.consume() + + while 1: + if len(_received1) + len(_received2) == 20: + break + self.c.drain_events() + + self.assertEqual(len(_received1) + len(_received2), 20) + + # compression + producer.publish({'compressed': True}, + routing_key='test_transport_filesystem', + compression='zlib') + m = self.q(consumer_channel).get() + self.assertDictEqual(m.payload, {'compressed': True}) + + # queue.delete + for i in range(10): + producer.publish({'foo': i}, + routing_key='test_transport_filesystem') + self.assertTrue(self.q(consumer_channel).get()) + self.q(consumer_channel).delete() + self.q(consumer_channel).declare() + self.assertIsNone(self.q(consumer_channel).get()) + + # queue.purge + for i in range(10): + producer.publish({'foo': i}, + routing_key='test_transport_filesystem2') + self.assertTrue(self.q2(consumer_channel).get()) + self.q2(consumer_channel).purge() + self.assertIsNone(self.q2(consumer_channel).get()) diff --git a/awx/lib/site-packages/kombu/tests/transport/test_memory.py b/awx/lib/site-packages/kombu/tests/transport/test_memory.py new file mode 100644 index 0000000000..d138e49429 --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/transport/test_memory.py @@ -0,0 +1,135 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import socket + +from kombu import Connection, Exchange, Queue, Consumer, Producer + +from kombu.tests.utils import TestCase + + +class test_MemoryTransport(TestCase): + + def setUp(self): + self.c = Connection(transport='memory') + self.e = Exchange('test_transport_memory') + self.q = Queue('test_transport_memory', + exchange=self.e, + routing_key='test_transport_memory') + self.q2 = Queue('test_transport_memory2', + exchange=self.e, + routing_key='test_transport_memory2') + + def test_produce_consume_noack(self): + channel = self.c.channel() + producer = Producer(channel, self.e) + consumer = Consumer(channel, self.q, no_ack=True) + + for i in range(10): + producer.publish({'foo': i}, routing_key='test_transport_memory') + + _received = [] + + def callback(message_data, message): + _received.append(message) + + consumer.register_callback(callback) + consumer.consume() + + while 1: + if len(_received) == 10: + break + self.c.drain_events() + + self.assertEqual(len(_received), 10) + + def test_produce_consume(self): + channel = self.c.channel() + producer = Producer(channel, self.e) + consumer1 = Consumer(channel, self.q) + consumer2 = Consumer(channel, self.q2) + self.q2(channel).declare() + + for i in range(10): + producer.publish({'foo': i}, routing_key='test_transport_memory') + for i in range(10): + producer.publish({'foo': i}, routing_key='test_transport_memory2') + + _received1 = [] + _received2 = [] + + def callback1(message_data, message): + _received1.append(message) + message.ack() + + def callback2(message_data, message): + _received2.append(message) + message.ack() + + consumer1.register_callback(callback1) + consumer2.register_callback(callback2) + + consumer1.consume() + consumer2.consume() + + while 1: + if len(_received1) + len(_received2) == 20: + break + self.c.drain_events() + + self.assertEqual(len(_received1) + len(_received2), 20) + + # compression + producer.publish({'compressed': True}, + routing_key='test_transport_memory', + compression='zlib') + m = self.q(channel).get() + self.assertDictEqual(m.payload, {'compressed': True}) + + # queue.delete + for i in range(10): + producer.publish({'foo': i}, routing_key='test_transport_memory') + self.assertTrue(self.q(channel).get()) + self.q(channel).delete() + self.q(channel).declare() + self.assertIsNone(self.q(channel).get()) + + # queue.purge + for i in range(10): + producer.publish({'foo': i}, routing_key='test_transport_memory2') + self.assertTrue(self.q2(channel).get()) + self.q2(channel).purge() + self.assertIsNone(self.q2(channel).get()) + + def test_drain_events(self): + with self.assertRaises(socket.timeout): + self.c.drain_events(timeout=0.1) + + c1 = self.c.channel() + c2 = self.c.channel() + + with self.assertRaises(socket.timeout): + self.c.drain_events(timeout=0.1) + + del(c1) # so pyflakes doesn't complain. + del(c2) + + def test_drain_events_unregistered_queue(self): + c1 = self.c.channel() + + class Cycle(object): + + def get(self, timeout=None): + return ('foo', 'foo'), c1 + + self.c.transport.cycle = Cycle() + with self.assertRaises(KeyError): + self.c.drain_events() + + def test_queue_for(self): + chan = self.c.channel() + chan.queues.clear() + + x = chan._queue_for('foo') + self.assertTrue(x) + self.assertIs(chan._queue_for('foo'), x) diff --git a/awx/lib/site-packages/kombu/tests/transport/test_mongodb.py b/awx/lib/site-packages/kombu/tests/transport/test_mongodb.py new file mode 100644 index 0000000000..522df04aef --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/transport/test_mongodb.py @@ -0,0 +1,92 @@ +from __future__ import absolute_import + +from nose import SkipTest + +from kombu import Connection + +from kombu.tests.utils import TestCase, skip_if_not_module + + +class MockConnection(dict): + + def __setattr__(self, key, value): + self[key] = value + + +class test_mongodb(TestCase): + + @skip_if_not_module('pymongo') + def test_url_parser(self): + from kombu.transport import mongodb + from pymongo.errors import ConfigurationError + + raise SkipTest( + 'Test is functional: it actually connects to mongod') + + class Transport(mongodb.Transport): + Connection = MockConnection + + url = 'mongodb://' + c = Connection(url, transport=Transport).connect() + client = c.channels[0].client + self.assertEquals(client.name, 'kombu_default') + self.assertEquals(client.connection.host, '127.0.0.1') + + url = 'mongodb://localhost' + c = Connection(url, transport=Transport).connect() + client = c.channels[0].client + self.assertEquals(client.name, 'kombu_default') + + url = 'mongodb://localhost/dbname' + c = Connection(url, transport=Transport).connect() + client = c.channels[0].client + self.assertEquals(client.name, 'dbname') + + url = 'mongodb://localhost,localhost:29017/dbname' + c = Connection(url, transport=Transport).connect() + client = c.channels[0].client + + nodes = client.connection.nodes + # If there's just 1 node it is because we're connecting to a single + # server instead of a repl / mongoss. + if len(nodes) == 2: + self.assertTrue(('localhost', 29017) in nodes) + self.assertEquals(client.name, 'dbname') + + # Passing options breaks kombu's _init_params method + # url = 'mongodb://localhost,localhost2:29017/dbname?safe=true' + # c = Connection(url, transport=Transport).connect() + # client = c.channels[0].client + + url = 'mongodb://localhost:27017,localhost2:29017/dbname' + c = Connection(url, transport=Transport).connect() + client = c.channels[0].client + + # Login to admin db since there's no db specified + url = "mongodb://adminusername:adminpassword@localhost" + c = Connection(url, transport=Transport).connect() + client = c.channels[0].client + self.assertEquals(client.name, "kombu_default") + + # Lets make sure that using admin db doesn't break anything + # when no user is specified + url = "mongodb://localhost" + c = Connection(url, transport=Transport).connect() + client = c.channels[0].client + + # Assuming there's user 'username' with password 'password' + # configured in mongodb + url = "mongodb://username:password@localhost/dbname" + c = Connection(url, transport=Transport).connect() + client = c.channels[0].client + + # Assuming there's no user 'nousername' with password 'nopassword' + # configured in mongodb + url = "mongodb://nousername:nopassword@localhost/dbname" + c = Connection(url, transport=Transport).connect() + + # Needed, otherwise the error would be rose before + # the assertRaises is called + def get_client(): + c.channels[0].client + self.assertRaises(ConfigurationError, get_client) diff --git a/awx/lib/site-packages/kombu/tests/transport/test_pyamqp.py b/awx/lib/site-packages/kombu/tests/transport/test_pyamqp.py new file mode 100644 index 0000000000..745c014254 --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/transport/test_pyamqp.py @@ -0,0 +1,181 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import sys + +from mock import patch +from nose import SkipTest + +try: + import amqp # noqa +except ImportError: + pyamqp = None # noqa +else: + from kombu.transport import pyamqp +from kombu import Connection + +from kombu.tests.utils import TestCase +from kombu.tests.utils import mask_modules, Mock + + +class MockConnection(dict): + + def __setattr__(self, key, value): + self[key] = value + + +class test_Channel(TestCase): + + def setUp(self): + if pyamqp is None: + raise SkipTest('py-amqp not installed') + + class Channel(pyamqp.Channel): + wait_returns = [] + + def _x_open(self, *args, **kwargs): + pass + + def wait(self, *args, **kwargs): + return self.wait_returns + + def _send_method(self, *args, **kwargs): + pass + + self.conn = Mock() + self.conn.channels = {} + self.channel = Channel(self.conn, 0) + + def test_init(self): + self.assertFalse(self.channel.no_ack_consumers) + + def test_prepare_message(self): + self.assertTrue(self.channel.prepare_message( + 'foobar', 10, 'application/data', 'utf-8', + properties={}, + )) + + def test_message_to_python(self): + message = Mock() + message.headers = {} + message.properties = {} + self.assertTrue(self.channel.message_to_python(message)) + + def test_close_resolves_connection_cycle(self): + self.assertIsNotNone(self.channel.connection) + self.channel.close() + self.assertIsNone(self.channel.connection) + + def test_basic_consume_registers_ack_status(self): + self.channel.wait_returns = 'my-consumer-tag' + self.channel.basic_consume('foo', no_ack=True) + self.assertIn('my-consumer-tag', self.channel.no_ack_consumers) + + self.channel.wait_returns = 'other-consumer-tag' + self.channel.basic_consume('bar', no_ack=False) + self.assertNotIn('other-consumer-tag', self.channel.no_ack_consumers) + + self.channel.basic_cancel('my-consumer-tag') + self.assertNotIn('my-consumer-tag', self.channel.no_ack_consumers) + + +class test_Transport(TestCase): + + def setUp(self): + if pyamqp is None: + raise SkipTest('py-amqp not installed') + self.connection = Connection('pyamqp://') + self.transport = self.connection.transport + + def test_create_channel(self): + connection = Mock() + self.transport.create_channel(connection) + connection.channel.assert_called_with() + + def test_drain_events(self): + connection = Mock() + self.transport.drain_events(connection, timeout=10.0) + connection.drain_events.assert_called_with(timeout=10.0) + + def test_dnspython_localhost_resolve_bug(self): + + class Conn(object): + + def __init__(self, **kwargs): + vars(self).update(kwargs) + + self.transport.Connection = Conn + self.transport.client.hostname = 'localhost' + conn1 = self.transport.establish_connection() + self.assertEqual(conn1.host, '127.0.0.1:5672') + + self.transport.client.hostname = 'example.com' + conn2 = self.transport.establish_connection() + self.assertEqual(conn2.host, 'example.com:5672') + + def test_close_connection(self): + connection = Mock() + connection.client = Mock() + self.transport.close_connection(connection) + + self.assertIsNone(connection.client) + connection.close.assert_called_with() + + @mask_modules('ssl') + def test_import_no_ssl(self): + pm = sys.modules.pop('amqp.connection') + try: + from amqp.connection import SSLError + self.assertEqual(SSLError.__module__, 'amqp.connection') + finally: + if pm is not None: + sys.modules['amqp.connection'] = pm + + +class test_pyamqp(TestCase): + + def setUp(self): + if pyamqp is None: + raise SkipTest('py-amqp not installed') + + def test_default_port(self): + + class Transport(pyamqp.Transport): + Connection = MockConnection + + c = Connection(port=None, transport=Transport).connect() + self.assertEqual(c['host'], + '127.0.0.1:%s' % (Transport.default_port, )) + + def test_custom_port(self): + + class Transport(pyamqp.Transport): + Connection = MockConnection + + c = Connection(port=1337, transport=Transport).connect() + self.assertEqual(c['host'], '127.0.0.1:1337') + + def test_eventmap(self): + t = pyamqp.Transport(Mock()) + conn = Mock() + self.assertDictEqual( + t.eventmap(conn), + {conn.sock: t.client.drain_nowait}, + ) + + def test_event_interface(self): + t = pyamqp.Transport(Mock()) + t.on_poll_init(Mock()) + t.on_poll_start() + + def test_heartbeat_check(self): + t = pyamqp.Transport(Mock()) + conn = Mock() + t.heartbeat_check(conn, rate=4.331) + conn.heartbeat_tick.assert_called_with(rate=4.331) + + def test_get_manager(self): + with patch('kombu.transport.pyamqp.get_manager') as get_manager: + t = pyamqp.Transport(Mock()) + t.get_manager(1, kw=2) + get_manager.assert_called_with(t.client, 1, kw=2) diff --git a/awx/lib/site-packages/kombu/tests/transport/test_redis.py b/awx/lib/site-packages/kombu/tests/transport/test_redis.py new file mode 100644 index 0000000000..90e3a318a9 --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/transport/test_redis.py @@ -0,0 +1,805 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import socket +import types + +from anyjson import dumps +from collections import defaultdict +from itertools import count +from Queue import Empty, Queue as _Queue + +from kombu import Connection, Exchange, Queue, Consumer, Producer +from kombu.exceptions import InconsistencyError, VersionMismatch +from kombu.utils import eventio # patch poll + +from kombu.tests.utils import TestCase +from kombu.tests.utils import Mock, module_exists, skip_if_not_module + + +class _poll(eventio._select): + + def poll(self, timeout): + events = [] + for fd in self._rfd: + if fd.data: + events.append((fd.fileno(), eventio.READ)) + return events + + +eventio.poll = _poll +from kombu.transport import redis # must import after poller patch + + +class ResponseError(Exception): + pass + + +class Client(object): + queues = {} + sets = defaultdict(set) + hashes = defaultdict(dict) + shard_hint = None + + def __init__(self, db=None, port=None, connection_pool=None, **kwargs): + self._called = [] + self._connection = None + self.bgsave_raises_ResponseError = False + self.connection = self._sconnection(self) + + def bgsave(self): + self._called.append('BGSAVE') + if self.bgsave_raises_ResponseError: + raise ResponseError() + + def delete(self, key): + self.queues.pop(key, None) + + def exists(self, key): + return key in self.queues or key in self.sets + + def hset(self, key, k, v): + self.hashes[key][k] = v + + def hget(self, key, k): + return self.hashes[key].get(k) + + def hdel(self, key, k): + self.hashes[key].pop(k, None) + + def sadd(self, key, member, *args): + self.sets[key].add(member) + zadd = sadd + + def smembers(self, key): + return self.sets.get(key, set()) + + def srem(self, key, *args): + self.sets.pop(key, None) + zrem = srem + + def llen(self, key): + try: + return self.queues[key].qsize() + except KeyError: + return 0 + + def lpush(self, key, value): + self.queues[key].put_nowait(value) + + def parse_response(self, connection, type, **options): + cmd, queues = self.connection._sock.data.pop() + assert cmd == type + self.connection._sock.data = [] + if type == 'BRPOP': + item = self.brpop(queues, 0.001) + if item: + return item + raise Empty() + + def brpop(self, keys, timeout=None): + key = keys[0] + try: + item = self.queues[key].get(timeout=timeout) + except Empty: + pass + else: + return key, item + + def rpop(self, key): + try: + return self.queues[key].get_nowait() + except KeyError: + pass + + def __contains__(self, k): + return k in self._called + + def pipeline(self): + return Pipeline(self) + + def encode(self, value): + return str(value) + + def _new_queue(self, key): + self.queues[key] = _Queue() + + class _sconnection(object): + disconnected = False + + class _socket(object): + blocking = True + next_fileno = count(30).next + + def __init__(self, *args): + self._fileno = self.next_fileno() + self.data = [] + + def fileno(self): + return self._fileno + + def setblocking(self, blocking): + self.blocking = blocking + + def __init__(self, client): + self.client = client + self._sock = self._socket() + + def disconnect(self): + self.disconnected = True + + def send_command(self, cmd, *args): + self._sock.data.append((cmd, args)) + + def info(self): + return {'foo': 1} + + def pubsub(self, *args, **kwargs): + connection = self.connection + + class ConnectionPool(object): + + def get_connection(self, *args, **kwargs): + return connection + self.connection_pool = ConnectionPool() + + return self + + +class Pipeline(object): + + def __init__(self, client): + self.client = client + self.stack = [] + + def __getattr__(self, key): + if key not in self.__dict__: + + def _add(*args, **kwargs): + self.stack.append((getattr(self.client, key), args, kwargs)) + return self + + return _add + return self.__dict__[key] + + def execute(self): + stack = list(self.stack) + self.stack[:] = [] + return [fun(*args, **kwargs) for fun, args, kwargs in stack] + + +class Channel(redis.Channel): + + def _get_client(self): + return Client + + def _get_pool(self): + return Mock() + + def _get_response_error(self): + return ResponseError + + def _new_queue(self, queue, **kwargs): + self.client._new_queue(queue) + + def pipeline(self): + return Pipeline(Client()) + + +class Transport(redis.Transport): + Channel = Channel + + def _get_errors(self): + return ((KeyError, ), (IndexError, )) + + +class test_Channel(TestCase): + + def setUp(self): + self.connection = Connection(transport=Transport) + self.channel = self.connection.channel() + + def test_basic_consume_when_fanout_queue(self): + self.channel.exchange_declare(exchange='txconfan', type='fanout') + self.channel.queue_declare(queue='txconfanq') + self.channel.queue_bind(queue='txconfanq', exchange='txconfan') + + self.assertIn('txconfanq', self.channel._fanout_queues) + self.channel.basic_consume('txconfanq', False, None, 1) + self.assertIn('txconfanq', self.channel.active_fanout_queues) + self.assertEqual(self.channel._fanout_to_queue.get('txconfan'), + 'txconfanq') + + def test_basic_cancel_unknown_delivery_tag(self): + self.assertIsNone(self.channel.basic_cancel('txaseqwewq')) + + def test_subscribe_no_queues(self): + self.channel.subclient = Mock() + self.channel.active_fanout_queues.clear() + self.channel._subscribe() + + self.assertFalse(self.channel.subclient.subscribe.called) + + def test_subscribe(self): + self.channel.subclient = Mock() + self.channel.active_fanout_queues.add('a') + self.channel.active_fanout_queues.add('b') + self.channel._fanout_queues.update(a='a', b='b') + + self.channel._subscribe() + self.assertTrue(self.channel.subclient.subscribe.called) + s_args, _ = self.channel.subclient.subscribe.call_args + self.assertItemsEqual(s_args[0], ['a', 'b']) + + self.channel.subclient.connection._sock = None + self.channel._subscribe() + self.channel.subclient.connection.connect.assert_called_with() + + def test_handle_unsubscribe_message(self): + s = self.channel.subclient + s.subscribed = True + self.channel._handle_message(s, ['unsubscribe', 'a', 0]) + self.assertFalse(s.subscribed) + + def test_handle_pmessage_message(self): + self.assertDictEqual( + self.channel._handle_message( + self.channel.subclient, + ['pmessage', 'pattern', 'channel', 'data'], + ), + { + 'type': 'pmessage', + 'pattern': 'pattern', + 'channel': 'channel', + 'data': 'data', + }, + ) + + def test_handle_message(self): + self.assertDictEqual( + self.channel._handle_message( + self.channel.subclient, + ['type', 'channel', 'data'], + ), + { + 'type': 'type', + 'pattern': None, + 'channel': 'channel', + 'data': 'data', + }, + ) + + def test_brpop_start_but_no_queues(self): + self.assertIsNone(self.channel._brpop_start()) + + def test_receive(self): + s = self.channel.subclient = Mock() + self.channel._fanout_to_queue['a'] = 'b' + s.parse_response.return_value = ['message', 'a', + dumps({'hello': 'world'})] + payload, queue = self.channel._receive() + self.assertDictEqual(payload, {'hello': 'world'}) + self.assertEqual(queue, 'b') + + def test_receive_raises(self): + self.channel._in_listen = True + s = self.channel.subclient = Mock() + s.parse_response.side_effect = KeyError('foo') + + with self.assertRaises(redis.Empty): + self.channel._receive() + self.assertFalse(self.channel._in_listen) + + def test_receive_empty(self): + s = self.channel.subclient = Mock() + s.parse_response.return_value = None + + with self.assertRaises(redis.Empty): + self.channel._receive() + + def test_receive_different_message_Type(self): + s = self.channel.subclient = Mock() + s.parse_response.return_value = ['pmessage', '/foo/', 0, 'data'] + + with self.assertRaises(redis.Empty): + self.channel._receive() + + def test_brpop_read_raises(self): + c = self.channel.client = Mock() + c.parse_response.side_effect = KeyError('foo') + + with self.assertRaises(redis.Empty): + self.channel._brpop_read() + + c.connection.disconnect.assert_called_with() + + def test_brpop_read_gives_None(self): + c = self.channel.client = Mock() + c.parse_response.return_value = None + + with self.assertRaises(redis.Empty): + self.channel._brpop_read() + + def test_poll_error(self): + c = self.channel.client = Mock() + c.parse_response = Mock() + self.channel._poll_error('BRPOP') + + c.parse_response.assert_called_with('BRPOP') + + c.parse_response.side_effect = KeyError('foo') + self.assertIsNone(self.channel._poll_error('BRPOP')) + + def test_put_fanout(self): + self.channel._in_poll = False + c = self.channel.client = Mock() + + body = {'hello': 'world'} + self.channel._put_fanout('exchange', body) + c.publish.assert_called_with('exchange', dumps(body)) + + def test_delete(self): + x = self.channel + self.channel._in_poll = False + delete = x.client.delete = Mock() + srem = x.client.srem = Mock() + + x._delete('queue', 'exchange', 'routing_key', None) + delete.assert_has_call('queue') + srem.assert_has_call(x.keyprefix_queue % ('exchange', ), + x.sep.join(['routing_key', '', 'queue'])) + + def test_has_queue(self): + self.channel._in_poll = False + exists = self.channel.client.exists = Mock() + exists.return_value = True + self.assertTrue(self.channel._has_queue('foo')) + exists.assert_has_call('foo') + + exists.return_value = False + self.assertFalse(self.channel._has_queue('foo')) + + def test_close_when_closed(self): + self.channel.closed = True + self.channel.close() + + def test_close_client_close_raises(self): + c = self.channel.client = Mock() + c.connection.disconnect.side_effect = self.channel.ResponseError() + + self.channel.close() + c.connection.disconnect.assert_called_with() + + def test_invalid_database_raises_ValueError(self): + + with self.assertRaises(ValueError): + self.channel.connection.client.virtual_host = 'dwqeq' + self.channel._connparams() + + @skip_if_not_module('redis') + def test_get_client(self): + import redis as R + KombuRedis = redis.Channel._get_client(self.channel) + self.assertTrue(KombuRedis) + + Rv = getattr(R, 'VERSION', None) + try: + R.VERSION = (2, 4, 0) + with self.assertRaises(VersionMismatch): + redis.Channel._get_client(self.channel) + finally: + if Rv is not None: + R.VERSION = Rv + + @skip_if_not_module('redis') + def test_get_response_error(self): + from redis.exceptions import ResponseError + self.assertIs(redis.Channel._get_response_error(self.channel), + ResponseError) + + def test_avail_client_when_not_in_poll(self): + self.channel._in_poll = False + c = self.channel.client = Mock() + + with self.channel.conn_or_acquire() as client: + self.assertIs(client, c) + + def test_avail_client_when_in_poll(self): + self.channel._in_poll = True + self.channel._pool = Mock() + cc = self.channel._create_client = Mock() + client = cc.return_value = Mock() + + with self.channel.conn_or_acquire(): + pass + self.channel.pool.release.assert_called_with(client.connection) + cc.assert_called_with() + + @skip_if_not_module('redis') + def test_transport_get_errors(self): + self.assertTrue(redis.Transport._get_errors(self.connection.transport)) + + @skip_if_not_module('redis') + def test_transport_get_errors_when_InvalidData_used(self): + from redis import exceptions + + class ID(Exception): + pass + + DataError = getattr(exceptions, 'DataError', None) + InvalidData = getattr(exceptions, 'InvalidData', None) + exceptions.InvalidData = ID + exceptions.DataError = None + try: + errors = redis.Transport._get_errors(self.connection.transport) + self.assertTrue(errors) + self.assertIn(ID, errors[1]) + finally: + if DataError is not None: + exceptions.DataError = DataError + if InvalidData is not None: + exceptions.InvalidData = InvalidData + + def test_empty_queues_key(self): + channel = self.channel + channel._in_poll = False + key = channel.keyprefix_queue % 'celery' + + # Everything is fine, there is a list of queues. + channel.client.sadd(key, 'celery\x06\x16\x06\x16celery') + self.assertListEqual(channel.get_table('celery'), + [('celery', '', 'celery')]) + + # ... then for some reason, the _kombu.binding.celery key gets lost + channel.client.srem(key) + + # which raises a channel error so that the consumer/publisher + # can recover by redeclaring the required entities. + with self.assertRaises(InconsistencyError): + self.channel.get_table('celery') + + +class test_Redis(TestCase): + + def setUp(self): + self.connection = Connection(transport=Transport) + self.exchange = Exchange('test_Redis', type='direct') + self.queue = Queue('test_Redis', self.exchange, 'test_Redis') + + def tearDown(self): + self.connection.close() + + def test_publish__get(self): + channel = self.connection.channel() + producer = Producer(channel, self.exchange, routing_key='test_Redis') + self.queue(channel).declare() + + producer.publish({'hello': 'world'}) + + self.assertDictEqual(self.queue(channel).get().payload, + {'hello': 'world'}) + self.assertIsNone(self.queue(channel).get()) + self.assertIsNone(self.queue(channel).get()) + self.assertIsNone(self.queue(channel).get()) + + def test_publish__consume(self): + connection = Connection(transport=Transport) + channel = connection.channel() + producer = Producer(channel, self.exchange, routing_key='test_Redis') + consumer = Consumer(channel, self.queue) + + producer.publish({'hello2': 'world2'}) + _received = [] + + def callback(message_data, message): + _received.append(message_data) + message.ack() + + consumer.register_callback(callback) + consumer.consume() + + self.assertIn(channel, channel.connection.cycle._channels) + try: + connection.drain_events(timeout=1) + self.assertTrue(_received) + with self.assertRaises(socket.timeout): + connection.drain_events(timeout=0.01) + finally: + channel.close() + + def test_purge(self): + channel = self.connection.channel() + producer = Producer(channel, self.exchange, routing_key='test_Redis') + self.queue(channel).declare() + + for i in range(10): + producer.publish({'hello': 'world-%s' % (i, )}) + + self.assertEqual(channel._size('test_Redis'), 10) + self.assertEqual(self.queue(channel).purge(), 10) + channel.close() + + def test_db_values(self): + Connection(virtual_host=1, + transport=Transport).channel() + + Connection(virtual_host='1', + transport=Transport).channel() + + Connection(virtual_host='/1', + transport=Transport).channel() + + with self.assertRaises(Exception): + Connection('redis:///foo').channel() + + def test_db_port(self): + c1 = Connection(port=None, transport=Transport).channel() + c1.close() + + c2 = Connection(port=9999, transport=Transport).channel() + c2.close() + + def test_close_poller_not_active(self): + c = Connection(transport=Transport).channel() + cycle = c.connection.cycle + c.client.connection + c.close() + self.assertNotIn(c, cycle._channels) + + def test_close_ResponseError(self): + c = Connection(transport=Transport).channel() + c.client.bgsave_raises_ResponseError = True + c.close() + + def test_close_disconnects(self): + c = Connection(transport=Transport).channel() + conn1 = c.client.connection + conn2 = c.subclient.connection + c.close() + self.assertTrue(conn1.disconnected) + self.assertTrue(conn2.disconnected) + + def test_get__Empty(self): + channel = self.connection.channel() + with self.assertRaises(Empty): + channel._get('does-not-exist') + channel.close() + + def test_get_client(self): + + myredis, exceptions = _redis_modules() + + @module_exists(myredis, exceptions) + def _do_test(): + conn = Connection(transport=Transport) + chan = conn.channel() + self.assertTrue(chan.Client) + self.assertTrue(chan.ResponseError) + self.assertTrue(conn.transport.connection_errors) + self.assertTrue(conn.transport.channel_errors) + + _do_test() + + +def _redis_modules(): + + class ConnectionError(Exception): + pass + + class AuthenticationError(Exception): + pass + + class InvalidData(Exception): + pass + + class InvalidResponse(Exception): + pass + + class ResponseError(Exception): + pass + + exceptions = types.ModuleType('redis.exceptions') + exceptions.ConnectionError = ConnectionError + exceptions.AuthenticationError = AuthenticationError + exceptions.InvalidData = InvalidData + exceptions.InvalidResponse = InvalidResponse + exceptions.ResponseError = ResponseError + + class Redis(object): + pass + + myredis = types.ModuleType('redis') + myredis.exceptions = exceptions + myredis.Redis = Redis + + return myredis, exceptions + + +class test_MultiChannelPoller(TestCase): + Poller = redis.MultiChannelPoller + + def test_close_unregisters_fds(self): + p = self.Poller() + poller = p.poller = Mock() + p._chan_to_sock.update({1: 1, 2: 2, 3: 3}) + + p.close() + + self.assertEqual(poller.unregister.call_count, 3) + u_args = poller.unregister.call_args_list + + self.assertItemsEqual(u_args, [((1, ), {}), + ((2, ), {}), + ((3, ), {})]) + + def test_close_when_unregister_raises_KeyError(self): + p = self.Poller() + p.poller = Mock() + p._chan_to_sock.update({1: 1}) + p.poller.unregister.side_effect = KeyError(1) + p.close() + + def test_close_resets_state(self): + p = self.Poller() + p.poller = Mock() + p._channels = Mock() + p._fd_to_chan = Mock() + p._chan_to_sock = Mock() + + p._chan_to_sock.itervalues.return_value = [] + p._chan_to_sock.values.return_value = [] # py3k + + p.close() + p._channels.clear.assert_called_with() + p._fd_to_chan.clear.assert_called_with() + p._chan_to_sock.clear.assert_called_with() + self.assertIsNone(p.poller) + + def test_register_when_registered_reregisters(self): + p = self.Poller() + p.poller = Mock() + channel, client, type = Mock(), Mock(), Mock() + sock = client.connection._sock = Mock() + sock.fileno.return_value = 10 + + p._chan_to_sock = {(channel, client, type): 6} + p._register(channel, client, type) + p.poller.unregister.assert_called_with(6) + self.assertTupleEqual(p._fd_to_chan[10], (channel, type)) + self.assertEqual(p._chan_to_sock[(channel, client, type)], sock) + p.poller.register.assert_called_with(sock, p.eventflags) + + # when client not connected yet + client.connection._sock = None + + def after_connected(): + client.connection._sock = Mock() + client.connection.connect.side_effect = after_connected + + p._register(channel, client, type) + client.connection.connect.assert_called_with() + + def test_register_BRPOP(self): + p = self.Poller() + channel = Mock() + channel.client.connection._sock = None + p._register = Mock() + + channel._in_poll = False + p._register_BRPOP(channel) + self.assertEqual(channel._brpop_start.call_count, 1) + self.assertEqual(p._register.call_count, 1) + + channel.client.connection._sock = Mock() + p._chan_to_sock[(channel, channel.client, 'BRPOP')] = True + channel._in_poll = True + p._register_BRPOP(channel) + self.assertEqual(channel._brpop_start.call_count, 1) + self.assertEqual(p._register.call_count, 1) + + def test_register_LISTEN(self): + p = self.Poller() + channel = Mock() + channel.subclient.connection._sock = None + channel._in_listen = False + p._register = Mock() + + p._register_LISTEN(channel) + p._register.assert_called_with(channel, channel.subclient, 'LISTEN') + self.assertEqual(p._register.call_count, 1) + self.assertEqual(channel._subscribe.call_count, 1) + + channel._in_listen = True + channel.subclient.connection._sock = Mock() + p._register_LISTEN(channel) + self.assertEqual(p._register.call_count, 1) + self.assertEqual(channel._subscribe.call_count, 1) + + def create_get(self, events=None, queues=None, fanouts=None): + _pr = [] if events is None else events + _aq = [] if queues is None else queues + _af = [] if fanouts is None else fanouts + p = self.Poller() + p.poller = Mock() + p.poller.poll.return_value = _pr + + p._register_BRPOP = Mock() + p._register_LISTEN = Mock() + + channel = Mock() + p._channels = [channel] + channel.active_queues = _aq + channel.active_fanout_queues = _af + + return p, channel + + def test_get_no_actions(self): + p, channel = self.create_get() + + with self.assertRaises(redis.Empty): + p.get() + + def test_get_brpop_qos_allow(self): + p, channel = self.create_get(queues=['a_queue']) + channel.qos.can_consume.return_value = True + + with self.assertRaises(redis.Empty): + p.get() + + p._register_BRPOP.assert_called_with(channel) + + def test_get_brpop_qos_disallow(self): + p, channel = self.create_get(queues=['a_queue']) + channel.qos.can_consume.return_value = False + + with self.assertRaises(redis.Empty): + p.get() + + self.assertFalse(p._register_BRPOP.called) + + def test_get_listen(self): + p, channel = self.create_get(fanouts=['f_queue']) + + with self.assertRaises(redis.Empty): + p.get() + + p._register_LISTEN.assert_called_with(channel) + + def test_get_receives_ERR(self): + p, channel = self.create_get(events=[(1, eventio.ERR)]) + p._fd_to_chan[1] = (channel, 'BRPOP') + + with self.assertRaises(redis.Empty): + p.get() + + channel._poll_error.assert_called_with('BRPOP') + + def test_get_receives_multiple(self): + p, channel = self.create_get(events=[(1, eventio.ERR), + (1, eventio.ERR)]) + p._fd_to_chan[1] = (channel, 'BRPOP') + + with self.assertRaises(redis.Empty): + p.get() + + channel._poll_error.assert_called_with('BRPOP') diff --git a/awx/lib/site-packages/kombu/tests/transport/test_sqlalchemy.py b/awx/lib/site-packages/kombu/tests/transport/test_sqlalchemy.py new file mode 100644 index 0000000000..27cc43232e --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/transport/test_sqlalchemy.py @@ -0,0 +1,39 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from mock import patch +from nose import SkipTest + +from kombu import Connection +from kombu.tests.utils import TestCase + + +class test_sqlalchemy(TestCase): + + def setUp(self): + try: + import sqlalchemy # noqa + except ImportError: + raise SkipTest('sqlalchemy not installed') + + def test_url_parser(self): + with patch('kombu.transport.sqlalchemy.Channel._open'): + url = 'sqlalchemy+sqlite:///celerydb.sqlite' + Connection(url).connect() + + url = 'sqla+sqlite:///celerydb.sqlite' + Connection(url).connect() + + # Should prevent regression fixed by f187ccd + url = 'sqlb+sqlite:///celerydb.sqlite' + with self.assertRaises(KeyError): + Connection(url).connect() + + def test_clone(self): + hostname = 'sqlite:///celerydb.sqlite' + x = Connection('+'.join(['sqla', hostname])) + self.assertEqual(x.uri_prefix, 'sqla') + self.assertEqual(x.hostname, hostname) + clone = x.clone() + self.assertEqual(clone.hostname, hostname) + self.assertEqual(clone.uri_prefix, 'sqla') diff --git a/awx/lib/site-packages/kombu/tests/transport/test_transport.py b/awx/lib/site-packages/kombu/tests/transport/test_transport.py new file mode 100644 index 0000000000..11cdcbe3b6 --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/transport/test_transport.py @@ -0,0 +1,27 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from mock import patch + +from kombu import transport + +from kombu.tests.utils import TestCase + + +class test_transport(TestCase): + + def test_resolve_transport_when_callable(self): + from kombu.transport.memory import Transport + self.assertIs(transport.resolve_transport( + 'kombu.transport.memory:Transport'), + Transport) + + +class test_transport_gettoq(TestCase): + + @patch('warnings.warn') + def test_compat(self, warn): + x = transport._ghettoq('Redis', 'redis', 'redis') + + self.assertEqual(x(), 'kombu.transport.redis.Transport') + self.assertTrue(warn.called) diff --git a/awx/lib/site-packages/kombu/tests/transport/virtual/__init__.py b/awx/lib/site-packages/kombu/tests/transport/virtual/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/kombu/tests/transport/virtual/test_base.py b/awx/lib/site-packages/kombu/tests/transport/virtual/test_base.py new file mode 100644 index 0000000000..28714cc4c9 --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/transport/virtual/test_base.py @@ -0,0 +1,513 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import warnings + +from mock import patch + +from kombu import Connection +from kombu.exceptions import StdChannelError +from kombu.transport import virtual +from kombu.utils import uuid +from kombu.compression import compress + +from kombu.tests.compat import catch_warnings +from kombu.tests.utils import TestCase +from kombu.tests.utils import Mock, redirect_stdouts + + +def client(**kwargs): + return Connection(transport='kombu.transport.virtual.Transport', **kwargs) + + +def memory_client(): + return Connection(transport='memory') + + +class test_BrokerState(TestCase): + + def test_constructor(self): + s = virtual.BrokerState() + self.assertTrue(hasattr(s, 'exchanges')) + self.assertTrue(hasattr(s, 'bindings')) + + t = virtual.BrokerState(exchanges=16, bindings=32) + self.assertEqual(t.exchanges, 16) + self.assertEqual(t.bindings, 32) + + +class test_QoS(TestCase): + + def setUp(self): + self.q = virtual.QoS(client().channel(), prefetch_count=10) + + def tearDown(self): + self.q._on_collect.cancel() + + def test_constructor(self): + self.assertTrue(self.q.channel) + self.assertTrue(self.q.prefetch_count) + self.assertFalse(self.q._delivered.restored) + self.assertTrue(self.q._on_collect) + + @redirect_stdouts + def test_can_consume(self, stdout, stderr): + _restored = [] + + class RestoreChannel(virtual.Channel): + do_restore = True + + def _restore(self, message): + _restored.append(message) + + self.assertTrue(self.q.can_consume()) + for i in range(self.q.prefetch_count - 1): + self.q.append(i, uuid()) + self.assertTrue(self.q.can_consume()) + self.q.append(i + 1, uuid()) + self.assertFalse(self.q.can_consume()) + + tag1 = iter(self.q._delivered).next() + self.q.ack(tag1) + self.assertTrue(self.q.can_consume()) + + tag2 = uuid() + self.q.append(i + 2, tag2) + self.assertFalse(self.q.can_consume()) + self.q.reject(tag2) + self.assertTrue(self.q.can_consume()) + + self.q.channel = RestoreChannel(self.q.channel.connection) + tag3 = uuid() + self.q.append(i + 3, tag3) + self.q.reject(tag3, requeue=True) + self.q._flush() + self.q.restore_unacked_once() + self.assertListEqual(_restored, [11, 9, 8, 7, 6, 5, 4, 3, 2, 1]) + self.assertTrue(self.q._delivered.restored) + self.assertFalse(self.q._delivered) + + self.q.restore_unacked_once() + self.q._delivered.restored = False + self.q.restore_unacked_once() + + self.assertTrue(stderr.getvalue()) + self.assertFalse(stdout.getvalue()) + + self.q.restore_at_shutdown = False + self.q.restore_unacked_once() + + def test_get(self): + self.q._delivered['foo'] = 1 + self.assertEqual(self.q.get('foo'), 1) + + +class test_Message(TestCase): + + def test_create(self): + c = client().channel() + data = c.prepare_message('the quick brown fox...') + tag = data['properties']['delivery_tag'] = uuid() + message = c.message_to_python(data) + self.assertIsInstance(message, virtual.Message) + self.assertIs(message, c.message_to_python(message)) + + self.assertEqual(message.body, + 'the quick brown fox...'.encode('utf-8')) + self.assertTrue(message.delivery_tag, tag) + + def test_create_no_body(self): + virtual.Message(Mock(), { + 'body': None, + 'properties': {'delivery_tag': 1}}) + + def test_serializable(self): + c = client().channel() + body, content_type = compress('the quick brown fox...', 'gzip') + data = c.prepare_message(body, headers={'compression': content_type}) + tag = data['properties']['delivery_tag'] = uuid() + message = c.message_to_python(data) + dict_ = message.serializable() + self.assertEqual(dict_['body'], + 'the quick brown fox...'.encode('utf-8')) + self.assertEqual(dict_['properties']['delivery_tag'], tag) + self.assertFalse('compression' in dict_['headers']) + + +class test_AbstractChannel(TestCase): + + def test_get(self): + with self.assertRaises(NotImplementedError): + virtual.AbstractChannel()._get('queue') + + def test_put(self): + with self.assertRaises(NotImplementedError): + virtual.AbstractChannel()._put('queue', 'm') + + def test_size(self): + self.assertEqual(virtual.AbstractChannel()._size('queue'), 0) + + def test_purge(self): + with self.assertRaises(NotImplementedError): + virtual.AbstractChannel()._purge('queue') + + def test_delete(self): + with self.assertRaises(NotImplementedError): + virtual.AbstractChannel()._delete('queue') + + def test_new_queue(self): + self.assertIsNone(virtual.AbstractChannel()._new_queue('queue')) + + def test_has_queue(self): + self.assertTrue(virtual.AbstractChannel()._has_queue('queue')) + + def test_poll(self): + + class Cycle(object): + called = False + + def get(self): + self.called = True + return True + + cycle = Cycle() + self.assertTrue(virtual.AbstractChannel()._poll(cycle)) + self.assertTrue(cycle.called) + + +class test_Channel(TestCase): + + def setUp(self): + self.channel = client().channel() + + def tearDown(self): + if self.channel._qos is not None: + self.channel._qos._on_collect.cancel() + + def test_exchange_bind_interface(self): + with self.assertRaises(NotImplementedError): + self.channel.exchange_bind('dest', 'src', 'key') + + def test_exchange_unbind_interface(self): + with self.assertRaises(NotImplementedError): + self.channel.exchange_unbind('dest', 'src', 'key') + + def test_queue_unbind_interface(self): + with self.assertRaises(NotImplementedError): + self.channel.queue_unbind('dest', 'ex', 'key') + + def test_management(self): + m = self.channel.connection.client.get_manager() + self.assertTrue(m) + m.get_bindings() + m.close() + + def test_exchange_declare(self): + c = self.channel + + with self.assertRaises(StdChannelError): + c.exchange_declare('test_exchange_declare', 'direct', + durable=True, auto_delete=True, passive=True) + c.exchange_declare('test_exchange_declare', 'direct', + durable=True, auto_delete=True) + c.exchange_declare('test_exchange_declare', 'direct', + durable=True, auto_delete=True, passive=True) + self.assertIn('test_exchange_declare', c.state.exchanges) + # can declare again with same values + c.exchange_declare('test_exchange_declare', 'direct', + durable=True, auto_delete=True) + self.assertIn('test_exchange_declare', c.state.exchanges) + + # using different values raises NotEquivalentError + with self.assertRaises(virtual.NotEquivalentError): + c.exchange_declare('test_exchange_declare', 'direct', + durable=False, auto_delete=True) + + def test_exchange_delete(self, ex='test_exchange_delete'): + + class PurgeChannel(virtual.Channel): + purged = [] + + def _purge(self, queue): + self.purged.append(queue) + + c = PurgeChannel(self.channel.connection) + + c.exchange_declare(ex, 'direct', durable=True, auto_delete=True) + self.assertIn(ex, c.state.exchanges) + self.assertNotIn(ex, c.state.bindings) # no bindings yet + c.exchange_delete(ex) + self.assertNotIn(ex, c.state.exchanges) + + c.exchange_declare(ex, 'direct', durable=True, auto_delete=True) + c.queue_declare(ex) + c.queue_bind(ex, ex, ex) + self.assertTrue(c.state.bindings[ex]) + c.exchange_delete(ex) + self.assertNotIn(ex, c.state.bindings) + self.assertIn(ex, c.purged) + + def test_queue_delete__if_empty(self, n='test_queue_delete__if_empty'): + class PurgeChannel(virtual.Channel): + purged = [] + size = 30 + + def _purge(self, queue): + self.purged.append(queue) + + def _size(self, queue): + return self.size + + c = PurgeChannel(self.channel.connection) + c.exchange_declare(n) + c.queue_declare(n) + c.queue_bind(n, n, n) + c.queue_bind(n, n, n) # tests code path that returns + # if queue already bound. + + c.queue_delete(n, if_empty=True) + self.assertIn(n, c.state.bindings) + + c.size = 0 + c.queue_delete(n, if_empty=True) + self.assertNotIn(n, c.state.bindings) + self.assertIn(n, c.purged) + + def test_queue_purge(self, n='test_queue_purge'): + + class PurgeChannel(virtual.Channel): + purged = [] + + def _purge(self, queue): + self.purged.append(queue) + + c = PurgeChannel(self.channel.connection) + c.exchange_declare(n) + c.queue_declare(n) + c.queue_bind(n, n, n) + c.queue_purge(n) + self.assertIn(n, c.purged) + + def test_basic_publish__get__consume__restore(self, + n='test_basic_publish'): + c = memory_client().channel() + + c.exchange_declare(n) + c.queue_declare(n) + c.queue_bind(n, n, n) + c.queue_declare(n + '2') + c.queue_bind(n + '2', n, n) + + m = c.prepare_message('nthex quick brown fox...') + c.basic_publish(m, n, n) + + r1 = c.message_to_python(c.basic_get(n)) + self.assertTrue(r1) + self.assertEqual(r1.body, + 'nthex quick brown fox...'.encode('utf-8')) + self.assertIsNone(c.basic_get(n)) + + consumer_tag = uuid() + + c.basic_consume(n + '2', False, + consumer_tag=consumer_tag, callback=lambda *a: None) + self.assertIn(n + '2', c._active_queues) + r2, _ = c.drain_events() + r2 = c.message_to_python(r2) + self.assertEqual(r2.body, + 'nthex quick brown fox...'.encode('utf-8')) + self.assertEqual(r2.delivery_info['exchange'], n) + self.assertEqual(r2.delivery_info['routing_key'], n) + with self.assertRaises(virtual.Empty): + c.drain_events() + c.basic_cancel(consumer_tag) + + c._restore(r2) + r3 = c.message_to_python(c.basic_get(n)) + self.assertTrue(r3) + self.assertEqual(r3.body, 'nthex quick brown fox...'.encode('utf-8')) + self.assertIsNone(c.basic_get(n)) + + def test_basic_ack(self): + + class MockQoS(virtual.QoS): + was_acked = False + + def ack(self, delivery_tag): + self.was_acked = True + + self.channel._qos = MockQoS(self.channel) + self.channel.basic_ack('foo') + self.assertTrue(self.channel._qos.was_acked) + + def test_basic_recover__requeue(self): + + class MockQoS(virtual.QoS): + was_restored = False + + def restore_unacked(self): + self.was_restored = True + + self.channel._qos = MockQoS(self.channel) + self.channel.basic_recover(requeue=True) + self.assertTrue(self.channel._qos.was_restored) + + def test_restore_unacked_raises_BaseException(self): + q = self.channel.qos + q._flush = Mock() + q._delivered = {1: 1} + + q.channel._restore = Mock() + q.channel._restore.side_effect = SystemExit + + errors = q.restore_unacked() + self.assertIsInstance(errors[0][0], SystemExit) + self.assertEqual(errors[0][1], 1) + self.assertFalse(q._delivered) + + @patch('kombu.transport.virtual.emergency_dump_state') + @patch('kombu.transport.virtual.say') + def test_restore_unacked_once_when_unrestored(self, say, + emergency_dump_state): + q = self.channel.qos + q._flush = Mock() + + class State(dict): + restored = False + + q._delivered = State({1: 1}) + ru = q.restore_unacked = Mock() + exc = None + try: + raise KeyError() + except KeyError, exc_: + exc = exc_ + ru.return_value = [(exc, 1)] + + self.channel.do_restore = True + q.restore_unacked_once() + self.assertTrue(say.called) + self.assertTrue(emergency_dump_state.called) + + def test_basic_recover(self): + with self.assertRaises(NotImplementedError): + self.channel.basic_recover(requeue=False) + + def test_basic_reject(self): + + class MockQoS(virtual.QoS): + was_rejected = False + + def reject(self, delivery_tag, requeue=False): + self.was_rejected = True + + self.channel._qos = MockQoS(self.channel) + self.channel.basic_reject('foo') + self.assertTrue(self.channel._qos.was_rejected) + + def test_basic_qos(self): + self.channel.basic_qos(prefetch_count=128) + self.assertEqual(self.channel._qos.prefetch_count, 128) + + def test_lookup__undeliverable(self, n='test_lookup__undeliverable'): + warnings.resetwarnings() + with catch_warnings(record=True) as log: + self.assertListEqual( + self.channel._lookup(n, n, 'ae.undeliver'), + ['ae.undeliver'], + ) + self.assertTrue(log) + self.assertIn('could not be delivered', log[0].message.args[0]) + + def test_context(self): + x = self.channel.__enter__() + self.assertIs(x, self.channel) + x.__exit__() + self.assertTrue(x.closed) + + def test_cycle_property(self): + self.assertTrue(self.channel.cycle) + + def test_flow(self): + with self.assertRaises(NotImplementedError): + self.channel.flow(False) + + def test_close_when_no_connection(self): + self.channel.connection = None + self.channel.close() + self.assertTrue(self.channel.closed) + + def test_drain_events_has_get_many(self): + c = self.channel + c._get_many = Mock() + c._poll = Mock() + c._consumers = [1] + c._qos = Mock() + c._qos.can_consume.return_value = True + + c.drain_events(timeout=10.0) + c._get_many.assert_called_with(c._active_queues, timeout=10.0) + + def test_get_exchanges(self): + self.channel.exchange_declare(exchange='foo') + self.assertTrue(self.channel.get_exchanges()) + + def test_basic_cancel_not_in_active_queues(self): + c = self.channel + c._consumers.add('x') + c._tag_to_queue['x'] = 'foo' + c._active_queues = Mock() + c._active_queues.remove.side_effect = ValueError() + + c.basic_cancel('x') + c._active_queues.remove.assert_called_with('foo') + + def test_basic_cancel_unknown_ctag(self): + self.assertIsNone(self.channel.basic_cancel('unknown-tag')) + + def test_list_bindings(self): + c = self.channel + c.exchange_declare(exchange='foo') + c.queue_declare(queue='q') + c.queue_bind(queue='q', exchange='foo', routing_key='rk') + + self.assertIn(('q', 'foo', 'rk'), list(c.list_bindings())) + + def test_after_reply_message_received(self): + c = self.channel + c.queue_delete = Mock() + c.after_reply_message_received('foo') + c.queue_delete.assert_called_with('foo') + + def test_queue_delete_unknown_queue(self): + self.assertIsNone(self.channel.queue_delete('xiwjqjwel')) + + def test_queue_declare_passive(self): + has_queue = self.channel._has_queue = Mock() + has_queue.return_value = False + with self.assertRaises(StdChannelError): + self.channel.queue_declare(queue='21wisdjwqe', passive=True) + + +class test_Transport(TestCase): + + def setUp(self): + self.transport = client().transport + + def test_custom_polling_interval(self): + x = client(transport_options=dict(polling_interval=32.3)) + self.assertEqual(x.transport.polling_interval, 32.3) + + def test_close_connection(self): + c1 = self.transport.create_channel(self.transport) + c2 = self.transport.create_channel(self.transport) + self.assertEqual(len(self.transport.channels), 2) + self.transport.close_connection(self.transport) + self.assertFalse(self.transport.channels) + del(c1) # so pyflakes doesn't complain + del(c2) + + def test_drain_channel(self): + channel = self.transport.create_channel(self.transport) + with self.assertRaises(virtual.Empty): + self.transport._drain_channel(channel) diff --git a/awx/lib/site-packages/kombu/tests/transport/virtual/test_exchange.py b/awx/lib/site-packages/kombu/tests/transport/virtual/test_exchange.py new file mode 100644 index 0000000000..90127ba074 --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/transport/virtual/test_exchange.py @@ -0,0 +1,161 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from kombu import Connection +from kombu.transport.virtual import exchange + +from kombu.tests.mocks import Transport +from kombu.tests.utils import TestCase +from kombu.tests.utils import Mock + + +class ExchangeCase(TestCase): + type = None + + def setUp(self): + if self.type: + self.e = self.type(Connection(transport=Transport).channel()) + + +class test_Direct(ExchangeCase): + type = exchange.DirectExchange + table = [('rFoo', None, 'qFoo'), + ('rFoo', None, 'qFox'), + ('rBar', None, 'qBar'), + ('rBaz', None, 'qBaz')] + + def test_lookup(self): + self.assertListEqual( + self.e.lookup(self.table, 'eFoo', 'rFoo', None), + ['qFoo', 'qFox'], + ) + self.assertListEqual( + self.e.lookup(self.table, 'eMoz', 'rMoz', 'DEFAULT'), + [], + ) + self.assertListEqual( + self.e.lookup(self.table, 'eBar', 'rBar', None), + ['qBar'], + ) + + +class test_Fanout(ExchangeCase): + type = exchange.FanoutExchange + table = [(None, None, 'qFoo'), + (None, None, 'qFox'), + (None, None, 'qBar')] + + def test_lookup(self): + self.assertListEqual( + self.e.lookup(self.table, 'eFoo', 'rFoo', None), + ['qFoo', 'qFox', 'qBar'], + ) + + def test_deliver_when_fanout_supported(self): + self.e.channel = Mock() + self.e.channel.supports_fanout = True + message = Mock() + + self.e.deliver(message, 'exchange', None) + self.e.channel._put_fanout.assert_called_with('exchange', message) + + def test_deliver_when_fanout_unsupported(self): + self.e.channel = Mock() + self.e.channel.supports_fanout = False + + self.e.deliver(Mock(), 'exchange', None) + self.assertFalse(self.e.channel._put_fanout.called) + + +class test_Topic(ExchangeCase): + type = exchange.TopicExchange + table = [ + ('stock.#', None, 'rFoo'), + ('stock.us.*', None, 'rBar'), + ] + + def setUp(self): + super(test_Topic, self).setUp() + self.table = [(rkey, self.e.key_to_pattern(rkey), queue) + for rkey, _, queue in self.table] + + def test_prepare_bind(self): + x = self.e.prepare_bind('qFoo', 'eFoo', 'stock.#', {}) + self.assertTupleEqual(x, ('stock.#', r'^stock\..*?$', 'qFoo')) + + def test_lookup(self): + self.assertListEqual( + self.e.lookup(self.table, 'eFoo', 'stock.us.nasdaq', None), + ['rFoo', 'rBar'], + ) + self.assertTrue(self.e._compiled) + self.assertListEqual( + self.e.lookup(self.table, 'eFoo', 'stock.europe.OSE', None), + ['rFoo'], + ) + self.assertListEqual( + self.e.lookup(self.table, 'eFoo', 'stockxeuropexOSE', None), + [], + ) + self.assertListEqual( + self.e.lookup(self.table, 'eFoo', + 'candy.schleckpulver.snap_crackle', None), + [], + ) + + def test_deliver(self): + self.e.channel = Mock() + self.e.channel._lookup.return_value = ('a', 'b') + message = Mock() + self.e.deliver(message, 'exchange', 'rkey') + + expected = [(('a', message), {}), + (('b', message), {})] + self.assertListEqual(self.e.channel._put.call_args_list, expected) + + +class test_ExchangeType(ExchangeCase): + type = exchange.ExchangeType + + def test_lookup(self): + with self.assertRaises(NotImplementedError): + self.e.lookup([], 'eFoo', 'rFoo', None) + + def test_prepare_bind(self): + self.assertTupleEqual( + self.e.prepare_bind('qFoo', 'eFoo', 'rFoo', {}), + ('rFoo', None, 'qFoo'), + ) + + def test_equivalent(self): + e1 = dict( + type='direct', + durable=True, + auto_delete=True, + arguments={}, + ) + self.assertTrue( + self.e.equivalent(e1, 'eFoo', 'direct', True, True, {}), + ) + self.assertFalse( + self.e.equivalent(e1, 'eFoo', 'topic', True, True, {}), + ) + self.assertFalse( + self.e.equivalent(e1, 'eFoo', 'direct', False, True, {}), + ) + self.assertFalse( + self.e.equivalent(e1, 'eFoo', 'direct', True, False, {}), + ) + self.assertFalse( + self.e.equivalent(e1, 'eFoo', 'direct', True, True, + {'expires': 3000}), + ) + e2 = dict(e1, arguments={'expires': 3000}) + self.assertTrue( + self.e.equivalent(e2, 'eFoo', 'direct', True, True, + {'expires': 3000}), + ) + self.assertFalse( + self.e.equivalent(e2, 'eFoo', 'direct', True, True, + {'expires': 6000}), + ) diff --git a/awx/lib/site-packages/kombu/tests/transport/virtual/test_scheduling.py b/awx/lib/site-packages/kombu/tests/transport/virtual/test_scheduling.py new file mode 100644 index 0000000000..6f2d24f15a --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/transport/virtual/test_scheduling.py @@ -0,0 +1,68 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from kombu.transport.virtual.scheduling import FairCycle + +from kombu.tests.utils import TestCase + + +class MyEmpty(Exception): + pass + + +def consume(fun, n): + r = [] + for i in range(n): + r.append(fun()) + return r + + +class test_FairCycle(TestCase): + + def test_cycle(self): + resources = ['a', 'b', 'c', 'd', 'e'] + + def echo(r, timeout=None): + return r + + # cycle should be ['a', 'b', 'c', 'd', 'e', ... repeat] + cycle = FairCycle(echo, resources, MyEmpty) + for i in range(len(resources)): + self.assertEqual(cycle.get(), (resources[i], + resources[i])) + for i in range(len(resources)): + self.assertEqual(cycle.get(), (resources[i], + resources[i])) + + def test_cycle_breaks(self): + resources = ['a', 'b', 'c', 'd', 'e'] + + def echo(r): + if r == 'c': + raise MyEmpty(r) + return r + + cycle = FairCycle(echo, resources, MyEmpty) + self.assertEqual( + consume(cycle.get, len(resources)), + [('a', 'a'), ('b', 'b'), ('d', 'd'), + ('e', 'e'), ('a', 'a')], + ) + self.assertEqual( + consume(cycle.get, len(resources)), + [('b', 'b'), ('d', 'd'), ('e', 'e'), + ('a', 'a'), ('b', 'b')], + ) + cycle2 = FairCycle(echo, ['c', 'c'], MyEmpty) + with self.assertRaises(MyEmpty): + consume(cycle2.get, 3) + + def test_cycle_no_resources(self): + cycle = FairCycle(None, [], MyEmpty) + cycle.pos = 10 + + with self.assertRaises(MyEmpty): + cycle._next() + + def test__repr__(self): + self.assertTrue(repr(FairCycle(lambda x: x, [1, 2, 3], MyEmpty))) diff --git a/awx/lib/site-packages/kombu/tests/utilities/__init__.py b/awx/lib/site-packages/kombu/tests/utilities/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/kombu/tests/utilities/test_amq_manager.py b/awx/lib/site-packages/kombu/tests/utilities/test_amq_manager.py new file mode 100644 index 0000000000..ccf4ec088a --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/utilities/test_amq_manager.py @@ -0,0 +1,38 @@ +from __future__ import absolute_import +from __future__ import with_statement + +from mock import patch + +from kombu import Connection +from kombu.tests.utils import TestCase, mask_modules, module_exists + + +class test_get_manager(TestCase): + + @mask_modules('pyrabbit') + def test_without_pyrabbit(self): + with self.assertRaises(ImportError): + Connection('amqp://').get_manager() + + @module_exists('pyrabbit') + def test_with_pyrabbit(self): + with patch('pyrabbit.Client', create=True) as Client: + manager = Connection('amqp://').get_manager() + self.assertIsNotNone(manager) + Client.assert_called_with( + 'localhost:55672', 'guest', 'guest', + ) + + @module_exists('pyrabbit') + def test_transport_options(self): + with patch('pyrabbit.Client', create=True) as Client: + manager = Connection('amqp://', transport_options={ + 'manager_hostname': 'admin.mq.vandelay.com', + 'manager_port': 808, + 'manager_userid': 'george', + 'manager_password': 'bosco', + }).get_manager() + self.assertIsNotNone(manager) + Client.assert_called_with( + 'admin.mq.vandelay.com:808', 'george', 'bosco', + ) diff --git a/awx/lib/site-packages/kombu/tests/utilities/test_debug.py b/awx/lib/site-packages/kombu/tests/utilities/test_debug.py new file mode 100644 index 0000000000..b36f9fcfbb --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/utilities/test_debug.py @@ -0,0 +1,57 @@ +from __future__ import absolute_import +from __future__ import with_statement + +import logging + +from mock import Mock, patch + +from kombu.utils.debug import ( + setup_logging, + Logwrapped, +) +from kombu.tests.utils import TestCase + + +class test_setup_logging(TestCase): + + def test_adds_handlers_sets_level(self): + with patch('kombu.utils.debug.get_logger') as get_logger: + logger = get_logger.return_value = Mock() + setup_logging(loggers=['kombu.test']) + + get_logger.assert_called_with('kombu.test') + + self.assertTrue(logger.addHandler.called) + logger.setLevel.assert_called_with(logging.DEBUG) + + +class test_Logwrapped(TestCase): + + def test_wraps(self): + with patch('kombu.utils.debug.get_logger') as get_logger: + logger = get_logger.return_value = Mock() + + W = Logwrapped(Mock(), 'kombu.test') + get_logger.assert_called_with('kombu.test') + self.assertIsNotNone(W.instance) + self.assertIs(W.logger, logger) + + W.instance.__repr__ = lambda s: 'foo' + self.assertEqual(repr(W), 'foo') + W.instance.some_attr = 303 + self.assertEqual(W.some_attr, 303) + + W.instance.some_method.__name__ = 'some_method' + W.some_method(1, 2, kw=1) + W.instance.some_method.assert_called_with(1, 2, kw=1) + + W.some_method() + W.instance.some_method.assert_called_with() + + W.some_method(kw=1) + W.instance.some_method.assert_called_with(kw=1) + + W.ident = 'ident' + W.some_method(kw=1) + self.assertTrue(logger.debug.called) + self.assertIn('ident', logger.debug.call_args[0][0]) diff --git a/awx/lib/site-packages/kombu/tests/utilities/test_encoding.py b/awx/lib/site-packages/kombu/tests/utilities/test_encoding.py new file mode 100644 index 0000000000..b2942b671d --- /dev/null +++ b/awx/lib/site-packages/kombu/tests/utilities/test_encoding.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import with_statement + +import sys + +from contextlib import contextmanager +from mock import patch +from nose import SkipTest + +from kombu.utils.encoding import safe_str + +from kombu.tests.utils import TestCase + + +@contextmanager +def clean_encoding(): + old_encoding = sys.modules.pop('kombu.utils.encoding', None) + import kombu.utils.encoding + try: + yield kombu.utils.encoding + finally: + if old_encoding: + sys.modules['kombu.utils.encoding'] = old_encoding + + +class test_default_encoding(TestCase): + + @patch('sys.getfilesystemencoding') + def test_default(self, getfilesystemencoding): + getfilesystemencoding.return_value = 'ascii' + with clean_encoding() as encoding: + enc = encoding.default_encoding() + if sys.platform.startswith('java'): + self.assertEqual(enc, 'utf-8') + else: + self.assertEqual(enc, 'ascii') + getfilesystemencoding.assert_called_with() + + +class test_encoding_utils(TestCase): + + def setUp(self): + if sys.version_info >= (3, 0): + raise SkipTest('not relevant on py3k') + + def test_str_to_bytes(self): + with clean_encoding() as e: + self.assertIsInstance(e.str_to_bytes(u'foobar'), str) + self.assertIsInstance(e.str_to_bytes('foobar'), str) + + def test_from_utf8(self): + with clean_encoding() as e: + self.assertIsInstance(e.from_utf8(u'foobar'), str) + + def test_default_encode(self): + with clean_encoding() as e: + self.assertTrue(e.default_encode('foo')) + + +class test_safe_str(TestCase): + + def test_when_str(self): + self.assertEqual(safe_str('foo'), 'foo') + + def test_when_unicode(self): + self.assertIsInstance(safe_str(u'foo'), str) + + def test_when_containing_high_chars(self): + s = u'The quiæk fåx jømps øver the lazy dåg' + res = safe_str(s) + self.assertIsInstance(res, str) + + def test_when_not_string(self): + o = object() + self.assertEqual(safe_str(o), repr(o)) + + def test_when_unrepresentable(self): + + class O(object): + + def __repr__(self): + raise KeyError('foo') + + self.assertIn(' '-' + + +def maybe_int(x): + try: + return int(x) + except ValueError: + return x +BOTO_VERSION = tuple(maybe_int(part) for part in boto.__version__.split('.')) +W_LONG_POLLING = BOTO_VERSION >= (2, 8) + + +class Table(Domain): + """Amazon SimpleDB domain describing the message routing table.""" + # caches queues already bound, so we don't have to declare them again. + _already_bound = set() + + def routes_for(self, exchange): + """Iterator giving all routes for an exchange.""" + return self.select("""WHERE exchange = '%s'""" % exchange) + + def get_queue(self, queue): + """Get binding for queue.""" + qid = self._get_queue_id(queue) + if qid: + return self.get_item(qid) + + def create_binding(self, queue): + """Get binding item for queue. + + Creates the item if it doesn't exist. + + """ + item = self.get_queue(queue) + if item: + return item, item['id'] + id = uuid() + return self.new_item(id), id + + def queue_bind(self, exchange, routing_key, pattern, queue): + if queue not in self._already_bound: + binding, id = self.create_binding(queue) + binding.update(exchange=exchange, + routing_key=routing_key or '', + pattern=pattern or '', + queue=queue or '', + id=id) + binding.save() + self._already_bound.add(queue) + + def queue_delete(self, queue): + """delete queue by name.""" + self._already_bound.discard(queue) + item = self._get_queue_item(queue) + if item: + self.delete_item(item) + + def exchange_delete(self, exchange): + """Delete all routes for `exchange`.""" + for item in self.routes_for(exchange): + self.delete_item(item['id']) + + def get_item(self, item_name): + """Uses `consistent_read` by default.""" + # Domain is an old-style class, can't use super(). + for consistent_read in (False, True): + item = Domain.get_item(self, item_name, consistent_read) + if item: + return item + + def select(self, query='', next_token=None, + consistent_read=True, max_items=None): + """Uses `consistent_read` by default.""" + query = """SELECT * FROM `%s` %s""" % (self.name, query) + return Domain.select(self, query, next_token, + consistent_read, max_items) + + def _try_first(self, query='', **kwargs): + for c in (False, True): + for item in self.select(query, consistent_read=c, **kwargs): + return item + + def get_exchanges(self): + return list(set(i['exchange'] for i in self.select())) + + def _get_queue_item(self, queue): + return self._try_first("""WHERE queue = '%s' limit 1""" % queue) + + def _get_queue_id(self, queue): + item = self._get_queue_item(queue) + if item: + return item['id'] + + +class Channel(virtual.Channel): + Table = Table + + default_region = 'us-east-1' + default_visibility_timeout = 1800 # 30 minutes. + # 20 seconds is the max value currently supported by SQS. + default_wait_time_seconds = 1 # disabled: see Issue #198 + domain_format = 'kombu%(vhost)s' + _sdb = None + _sqs = None + _queue_cache = {} + _noack_queues = set() + + def __init__(self, *args, **kwargs): + super(Channel, self).__init__(*args, **kwargs) + + # SQS blows up when you try to create a new queue if one already + # exists with a different visibility_timeout, so this prepopulates + # the queue_cache to protect us from recreating + # queues that are known to already exist. + queues = self.sqs.get_all_queues(prefix=self.queue_name_prefix) + for queue in queues: + self._queue_cache[queue.name] = queue + + def basic_consume(self, queue, no_ack, *args, **kwargs): + if no_ack: + self._noack_queues.add(queue) + return super(Channel, self).basic_consume(queue, no_ack, + *args, **kwargs) + + def basic_cancel(self, consumer_tag): + if consumer_tag in self._consumers: + queue = self._tag_to_queue[consumer_tag] + self._noack_queues.discard(queue) + return super(Channel, self).basic_cancel(consumer_tag) + + def entity_name(self, name, table=CHARS_REPLACE_TABLE): + """Format AMQP queue name into a legal SQS queue name.""" + return unicode(safe_str(name)).translate(table) + + def _new_queue(self, queue, **kwargs): + """Ensures a queue exists in SQS.""" + # Translate to SQS name for consistency with initial + # _queue_cache population. + queue = self.entity_name(self.queue_name_prefix + queue) + try: + return self._queue_cache[queue] + except KeyError: + q = self._queue_cache[queue] = self.sqs.create_queue( + queue, self.visibility_timeout, + ) + return q + + def _queue_bind(self, *args): + """Bind ``queue`` to ``exchange`` with routing key. + + Route will be stored in SDB if so enabled. + + """ + if self.supports_fanout: + self.table.queue_bind(*args) + + def get_table(self, exchange): + """Get routing table. + + Retrieved from SDB if :attr:`supports_fanout`. + + """ + if self.supports_fanout: + return [(r['routing_key'], r['pattern'], r['queue']) + for r in self.table.routes_for(exchange)] + return super(Channel, self).get_table(exchange) + + def get_exchanges(self): + if self.supports_fanout: + return self.table.get_exchanges() + return super(Channel, self).get_exchanges() + + def _delete(self, queue, *args): + """delete queue by name.""" + self._queue_cache.pop(queue, None) + if self.supports_fanout: + self.table.queue_delete(queue) + super(Channel, self)._delete(queue) + + def exchange_delete(self, exchange, **kwargs): + """Delete exchange by name.""" + if self.supports_fanout: + self.table.exchange_delete(exchange) + super(Channel, self).exchange_delete(exchange, **kwargs) + + def _has_queue(self, queue, **kwargs): + """Returns True if ``queue`` has been previously declared.""" + if self.supports_fanout: + return bool(self.table.get_queue(queue)) + return super(Channel, self)._has_queue(queue) + + def _put(self, queue, message, **kwargs): + """Put message onto queue.""" + q = self._new_queue(queue) + m = Message() + m.set_body(dumps(message)) + q.write(m) + + def _put_fanout(self, exchange, message, **kwargs): + """Deliver fanout message to all queues in ``exchange``.""" + for route in self.table.routes_for(exchange): + self._put(route['queue'], message, **kwargs) + + def _get(self, queue): + """Try to retrieve a single message off ``queue``.""" + q = self._new_queue(queue) + if W_LONG_POLLING: + rs = q.get_messages(1, wait_time_seconds=self.wait_time_seconds) + else: # boto < 2.8 + rs = q.get_messages(1) + if rs: + m = rs[0] + payload = loads(rs[0].get_body()) + if queue in self._noack_queues: + q.delete_message(m) + else: + payload['properties']['delivery_info'].update({ + 'sqs_message': m, 'sqs_queue': q, }) + return payload + raise Empty() + + def _restore(self, message, + unwanted_delivery_info=('sqs_message', 'sqs_queue')): + for unwanted_key in unwanted_delivery_info: + # Remove objects that aren't JSON serializable (Issue #1108). + message.delivery_info.pop(unwanted_key, None) + return super(Channel, self)._restore(message) + + def basic_ack(self, delivery_tag): + delivery_info = self.qos.get(delivery_tag).delivery_info + try: + queue = delivery_info['sqs_queue'] + except KeyError: + pass + else: + queue.delete_message(delivery_info['sqs_message']) + super(Channel, self).basic_ack(delivery_tag) + + def _size(self, queue): + """Returns the number of messages in a queue.""" + return self._new_queue(queue).count() + + def _purge(self, queue): + """Deletes all current messages in a queue.""" + q = self._new_queue(queue) + # SQS is slow at registering messages, so run for a few + # iterations to ensure messages are deleted. + size = 0 + for i in xrange(10): + size += q.count() + if not size: + break + q.clear() + return size + + def close(self): + super(Channel, self).close() + for conn in (self._sqs, self._sdb): + if conn: + try: + conn.close() + except AttributeError, exc: # FIXME ??? + if "can't set attribute" not in str(exc): + raise + + def _get_regioninfo(self, regions): + if self.region: + for _r in regions: + if _r.name == self.region: + return _r + + def _aws_connect_to(self, fun, regions): + conninfo = self.conninfo + region = self._get_regioninfo(regions) + return fun(region=region, + aws_access_key_id=conninfo.userid, + aws_secret_access_key=conninfo.password, + port=conninfo.port) + + def _next_delivery_tag(self): + return uuid() # See #73 + + @property + def sqs(self): + if self._sqs is None: + self._sqs = self._aws_connect_to(SQSConnection, _sqs.regions()) + return self._sqs + + @property + def sdb(self): + if self._sdb is None: + self._sdb = self._aws_connect_to(SDBConnection, _sdb.regions()) + return self._sdb + + @property + def table(self): + name = self.entity_name( + self.domain_format % {'vhost': self.conninfo.virtual_host}) + d = self.sdb.get_object( + 'CreateDomain', {'DomainName': name}, self.Table) + d.name = name + return d + + @property + def conninfo(self): + return self.connection.client + + @property + def transport_options(self): + return self.connection.client.transport_options + + @cached_property + def visibility_timeout(self): + return (self.transport_options.get('visibility_timeout') or + self.default_visibility_timeout) + + @cached_property + def queue_name_prefix(self): + return self.transport_options.get('queue_name_prefix', '') + + @cached_property + def supports_fanout(self): + return self.transport_options.get('sdb_persistence', False) + + @cached_property + def region(self): + return self.transport_options.get('region') or self.default_region + + @cached_property + def wait_time_seconds(self): + return (self.transport_options.get('wait_time_seconds') or + self.default_wait_time_seconds) + + +class Transport(virtual.Transport): + Channel = Channel + + polling_interval = 0 + wait_time_seconds = 20 + default_port = None + connection_errors = (StdConnectionError, exception.SQSError, socket.error) + channel_errors = (exception.SQSDecodeError, StdChannelError) + driver_type = 'sqs' + driver_name = 'sqs' diff --git a/awx/lib/site-packages/kombu/transport/__init__.py b/awx/lib/site-packages/kombu/transport/__init__.py new file mode 100644 index 0000000000..212ef3f6b0 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/__init__.py @@ -0,0 +1,106 @@ +""" +kombu.transport +=============== + +Built-in transports. + +""" +from __future__ import absolute_import + +from kombu.syn import _detect_environment +from kombu.utils import symbol_by_name + + +def supports_librabbitmq(): + if _detect_environment() == 'default': + try: + import librabbitmq # noqa + return True + except ImportError: + pass + return False + + +def _ghettoq(name, new, alias=None): + xxx = new # stupid enclosing + + def __inner(): + import warnings + _new = callable(xxx) and xxx() or xxx + gtransport = 'ghettoq.taproot.%s' % name + ktransport = 'kombu.transport.%s.Transport' % _new + this = alias or name + warnings.warn(""" + Ghettoq does not work with Kombu, but there is now a built-in version + of the %s transport. + + You should replace %r with simply: %r + """ % (name, gtransport, this)) + return ktransport + + return __inner + + +TRANSPORT_ALIASES = { + 'amqp': 'kombu.transport.pyamqp:Transport', + 'pyamqp': 'kombu.transport.pyamqp:Transport', + 'librabbitmq': 'kombu.transport.librabbitmq:Transport', + 'memory': 'kombu.transport.memory:Transport', + 'redis': 'kombu.transport.redis:Transport', + 'SQS': 'kombu.transport.SQS:Transport', + 'sqs': 'kombu.transport.SQS:Transport', + 'beanstalk': 'kombu.transport.beanstalk:Transport', + 'mongodb': 'kombu.transport.mongodb:Transport', + 'couchdb': 'kombu.transport.couchdb:Transport', + 'zookeeper': 'kombu.transport.zookeeper:Transport', + 'django': 'kombu.transport.django:Transport', + 'sqlalchemy': 'kombu.transport.sqlalchemy:Transport', + 'sqla': 'kombu.transport.sqlalchemy:Transport', + 'ghettoq.taproot.Redis': _ghettoq('Redis', 'redis', 'redis'), + 'ghettoq.taproot.Database': _ghettoq('Database', 'django', 'django'), + 'ghettoq.taproot.MongoDB': _ghettoq('MongoDB', 'mongodb'), + 'ghettoq.taproot.Beanstalk': _ghettoq('Beanstalk', 'beanstalk'), + 'ghettoq.taproot.CouchDB': _ghettoq('CouchDB', 'couchdb'), + 'filesystem': 'kombu.transport.filesystem:Transport', + 'zeromq': 'kombu.transport.zmq:Transport', + 'zmq': 'kombu.transport.zmq:Transport', + 'amqplib': 'kombu.transport.amqplib:Transport', +} + +_transport_cache = {} + + +def resolve_transport(transport=None): + if isinstance(transport, basestring): + try: + transport = TRANSPORT_ALIASES[transport] + except KeyError: + if '.' not in transport and ':' not in transport: + from kombu.utils.text import fmatch_best + alt = fmatch_best(transport, TRANSPORT_ALIASES) + if alt: + raise KeyError( + 'No such transport: %s. Did you mean %s?' % ( + transport, alt)) + raise KeyError('No such transport: %s' % transport) + else: + if callable(transport): + transport = transport() + return symbol_by_name(transport) + return transport + + +def get_transport_cls(transport=None): + """Get transport class by name. + + The transport string is the full path to a transport class, e.g.:: + + "kombu.transport.pyamqp:Transport" + + If the name does not include `"."` (is not fully qualified), + the alias table will be consulted. + + """ + if transport not in _transport_cache: + _transport_cache[transport] = resolve_transport(transport) + return _transport_cache[transport] diff --git a/awx/lib/site-packages/kombu/transport/amqplib.py b/awx/lib/site-packages/kombu/transport/amqplib.py new file mode 100644 index 0000000000..bcdc57d057 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/amqplib.py @@ -0,0 +1,388 @@ +""" +kombu.transport.amqplib +======================= + +amqplib transport. + +""" +from __future__ import absolute_import + +import errno +import socket + +try: + from ssl import SSLError +except ImportError: + class SSLError(Exception): # noqa + pass +from struct import unpack + +from amqplib import client_0_8 as amqp +from amqplib.client_0_8 import transport +from amqplib.client_0_8.channel import Channel as _Channel +from amqplib.client_0_8.exceptions import AMQPConnectionException +from amqplib.client_0_8.exceptions import AMQPChannelException + +from kombu.exceptions import StdConnectionError, StdChannelError +from kombu.utils.encoding import str_to_bytes +from kombu.utils.amq_manager import get_manager + +from . import base + +DEFAULT_PORT = 5672 +HAS_MSG_PEEK = hasattr(socket, 'MSG_PEEK') + +# amqplib's handshake mistakenly identifies as protocol version 1191, +# this breaks in RabbitMQ tip, which no longer falls back to +# 0-8 for unknown ids. +transport.AMQP_PROTOCOL_HEADER = str_to_bytes('AMQP\x01\x01\x08\x00') + + +# - fixes warnings when socket is not connected. +class TCPTransport(transport.TCPTransport): + + def read_frame(self): + frame_type, channel, size = unpack('>BHI', self._read(7, True)) + payload = self._read(size) + ch = ord(self._read(1)) + if ch == 206: # '\xce' + return frame_type, channel, payload + else: + raise Exception( + 'Framing Error, received 0x%02x while expecting 0xce' % ch) + + def _read(self, n, initial=False): + while len(self._read_buffer) < n: + try: + s = self.sock.recv(65536) + except socket.error, exc: + if not initial and exc.errno in (errno.EAGAIN, errno.EINTR): + continue + raise + if not s: + raise IOError('Socket closed') + self._read_buffer += s + + result = self._read_buffer[:n] + self._read_buffer = self._read_buffer[n:] + + return result + + def __del__(self): + try: + self.close() + except Exception: + pass + finally: + self.sock = None + +transport.TCPTransport = TCPTransport + + +class SSLTransport(transport.SSLTransport): + + def __init__(self, host, connect_timeout, ssl): + if isinstance(ssl, dict): + self.sslopts = ssl + self.sslobj = None + + transport._AbstractTransport.__init__(self, host, connect_timeout) + + def read_frame(self): + frame_type, channel, size = unpack('>BHI', self._read(7, True)) + payload = self._read(size) + ch = ord(self._read(1)) + if ch == 206: # '\xce' + return frame_type, channel, payload + else: + raise Exception( + 'Framing Error, received 0x%02x while expecting 0xce' % ch) + + def _read(self, n, initial=False): + result = '' + + while len(result) < n: + try: + s = self.sslobj.read(n - len(result)) + except socket.error, exc: + if not initial and exc.errno in (errno.EAGAIN, errno.EINTR): + continue + raise + if not s: + raise IOError('Socket closed') + result += s + + return result + + def __del__(self): + try: + self.close() + except Exception: + pass + finally: + self.sock = None +transport.SSLTransport = SSLTransport + + +class Connection(amqp.Connection): # pragma: no cover + + def _do_close(self, *args, **kwargs): + # amqplib does not ignore socket errors when connection + # is closed on the remote end. + try: + super(Connection, self)._do_close(*args, **kwargs) + except socket.error: + pass + + def _dispatch_basic_return(self, channel, args, msg): + reply_code = args.read_short() + reply_text = args.read_shortstr() + exchange = args.read_shortstr() + routing_key = args.read_shortstr() + + exc = AMQPChannelException(reply_code, reply_text, (50, 60)) + if channel.events['basic_return']: + for callback in channel.events['basic_return']: + callback(exc, exchange, routing_key, msg) + else: + raise exc + + def __init__(self, *args, **kwargs): + super(Connection, self).__init__(*args, **kwargs) + self._method_override = {(60, 50): self._dispatch_basic_return} + + def drain_events(self, timeout=None): + """Wait for an event on a channel.""" + chanmap = self.channels + chanid, method_sig, args, content = self._wait_multiple( + chanmap, None, timeout=timeout) + + channel = chanmap[chanid] + + if (content + and channel.auto_decode + and hasattr(content, 'content_encoding')): + try: + content.body = content.body.decode(content.content_encoding) + except Exception: + pass + + amqp_method = self._method_override.get(method_sig) or \ + channel._METHOD_MAP.get(method_sig, None) + + if amqp_method is None: + raise Exception('Unknown AMQP method (%d, %d)' % method_sig) + + if content is None: + return amqp_method(channel, args) + else: + return amqp_method(channel, args, content) + + def read_timeout(self, timeout=None): + if timeout is None: + return self.method_reader.read_method() + sock = self.transport.sock + prev = sock.gettimeout() + if prev != timeout: + sock.settimeout(timeout) + try: + try: + return self.method_reader.read_method() + except SSLError, exc: + # http://bugs.python.org/issue10272 + if 'timed out' in str(exc): + raise socket.timeout() + # Non-blocking SSL sockets can throw SSLError + if 'The operation did not complete' in str(exc): + raise socket.timeout() + raise + finally: + if prev != timeout: + sock.settimeout(prev) + + def _wait_multiple(self, channels, allowed_methods, timeout=None): + for channel_id, channel in channels.iteritems(): + method_queue = channel.method_queue + for queued_method in method_queue: + method_sig = queued_method[0] + if (allowed_methods is None + or method_sig in allowed_methods + or method_sig == (20, 40)): + method_queue.remove(queued_method) + method_sig, args, content = queued_method + return channel_id, method_sig, args, content + + # Nothing queued, need to wait for a method from the peer + read_timeout = self.read_timeout + wait = self.wait + while 1: + channel, method_sig, args, content = read_timeout(timeout) + + if (channel in channels + and allowed_methods is None + or method_sig in allowed_methods + or method_sig == (20, 40)): + return channel, method_sig, args, content + + # Not the channel and/or method we were looking for. Queue + # this method for later + channels[channel].method_queue.append((method_sig, args, content)) + + # + # If we just queued up a method for channel 0 (the Connection + # itself) it's probably a close method in reaction to some + # error, so deal with it right away. + # + if channel == 0: + wait() + + def channel(self, channel_id=None): + try: + return self.channels[channel_id] + except KeyError: + return Channel(self, channel_id) + + +class Message(base.Message): + + def __init__(self, channel, msg, **kwargs): + props = msg.properties + super(Message, self).__init__( + channel, + body=msg.body, + delivery_tag=msg.delivery_tag, + content_type=props.get('content_type'), + content_encoding=props.get('content_encoding'), + delivery_info=msg.delivery_info, + properties=msg.properties, + headers=props.get('application_headers') or {}, + **kwargs) + + +class Channel(_Channel, base.StdChannel): + Message = Message + events = {'basic_return': set()} + + def __init__(self, *args, **kwargs): + self.no_ack_consumers = set() + super(Channel, self).__init__(*args, **kwargs) + + def prepare_message(self, body, priority=None, content_type=None, + content_encoding=None, headers=None, properties=None): + """Encapsulate data into a AMQP message.""" + return amqp.Message(body, priority=priority, + content_type=content_type, + content_encoding=content_encoding, + application_headers=headers, + **properties) + + def message_to_python(self, raw_message): + """Convert encoded message body back to a Python value.""" + return self.Message(self, raw_message) + + def close(self): + try: + super(Channel, self).close() + finally: + self.connection = None + + def basic_consume(self, *args, **kwargs): + consumer_tag = super(Channel, self).basic_consume(*args, **kwargs) + if kwargs['no_ack']: + self.no_ack_consumers.add(consumer_tag) + return consumer_tag + + def basic_cancel(self, consumer_tag, **kwargs): + self.no_ack_consumers.discard(consumer_tag) + return super(Channel, self).basic_cancel(consumer_tag, **kwargs) + + +class Transport(base.Transport): + Connection = Connection + + default_port = DEFAULT_PORT + + # it's very annoying that amqplib sometimes raises AttributeError + # if the connection is lost, but nothing we can do about that here. + connection_errors = (StdConnectionError, + AMQPConnectionException, + socket.error, + IOError, + OSError, + AttributeError) + channel_errors = (StdChannelError, AMQPChannelException, ) + + nb_keep_draining = True + driver_name = "amqplib" + driver_type = "amqp" + supports_ev = True + + def __init__(self, client, **kwargs): + self.client = client + self.default_port = kwargs.get("default_port") or self.default_port + + def create_channel(self, connection): + return connection.channel() + + def drain_events(self, connection, **kwargs): + return connection.drain_events(**kwargs) + + def establish_connection(self): + """Establish connection to the AMQP broker.""" + conninfo = self.client + for name, default_value in self.default_connection_params.items(): + if not getattr(conninfo, name, None): + setattr(conninfo, name, default_value) + if conninfo.hostname == 'localhost': + conninfo.hostname = '127.0.0.1' + conn = self.Connection(host=conninfo.host, + userid=conninfo.userid, + password=conninfo.password, + login_method=conninfo.login_method, + virtual_host=conninfo.virtual_host, + insist=conninfo.insist, + ssl=conninfo.ssl, + connect_timeout=conninfo.connect_timeout) + conn.client = self.client + return conn + + def close_connection(self, connection): + """Close the AMQP broker connection.""" + connection.client = None + connection.close() + + def is_alive(self, connection): + if HAS_MSG_PEEK: + sock = connection.transport.sock + prev = sock.gettimeout() + sock.settimeout(0.0001) + try: + sock.recv(1, socket.MSG_PEEK) + except socket.timeout: + pass + except socket.error: + return False + finally: + sock.settimeout(prev) + return True + + def verify_connection(self, connection): + return connection.channels is not None and self.is_alive(connection) + + def eventmap(self, connection): + return {connection.method_reader.source.sock: self.client.drain_nowait} + + def on_poll_init(self, poller): + pass + + def on_poll_start(self): + return {} + + @property + def default_connection_params(self): + return {'userid': 'guest', 'password': 'guest', + 'port': self.default_port, + 'hostname': 'localhost', 'login_method': 'AMQPLAIN'} + + def get_manager(self, *args, **kwargs): + return get_manager(self.client, *args, **kwargs) diff --git a/awx/lib/site-packages/kombu/transport/base.py b/awx/lib/site-packages/kombu/transport/base.py new file mode 100644 index 0000000000..7a2a6e9180 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/base.py @@ -0,0 +1,257 @@ +""" +kombu.transport.base +==================== + +Base transport interface. + +""" +from __future__ import absolute_import + +from kombu.compression import decompress +from kombu.exceptions import MessageStateError +from kombu.serialization import decode +from kombu.utils import cached_property + +ACKNOWLEDGED_STATES = frozenset(['ACK', 'REJECTED', 'REQUEUED']) + + +class StdChannel(object): + no_ack_consumers = None + + def Consumer(self, *args, **kwargs): + from kombu.messaging import Consumer + return Consumer(self, *args, **kwargs) + + def Producer(self, *args, **kwargs): + from kombu.messaging import Producer + return Producer(self, *args, **kwargs) + + def get_bindings(self): + raise NotImplementedError('%r does not implement list_bindings' % ( + self.__class__, )) + + def after_reply_message_received(self, queue): + """reply queue semantics: can be used to delete the queue + after transient reply message received.""" + pass + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + + +class Message(object): + """Base class for received messages.""" + __slots__ = ('_state', 'channel', 'delivery_tag', + 'content_type', 'content_encoding', + 'delivery_info', 'headers', 'properties', + 'body', '_decoded_cache', 'accept', '__dict__') + MessageStateError = MessageStateError + + def __init__(self, channel, body=None, delivery_tag=None, + content_type=None, content_encoding=None, delivery_info={}, + properties=None, headers=None, postencode=None, + accept=None, **kwargs): + self.channel = channel + self.delivery_tag = delivery_tag + self.content_type = content_type + self.content_encoding = content_encoding + self.delivery_info = delivery_info + self.headers = headers or {} + self.properties = properties or {} + self._decoded_cache = None + self._state = 'RECEIVED' + self.accept = accept + + try: + body = decompress(body, self.headers['compression']) + except KeyError: + pass + if postencode and isinstance(body, unicode): + body = body.encode(postencode) + self.body = body + + def ack(self): + """Acknowledge this message as being processed., + This will remove the message from the queue. + + :raises MessageStateError: If the message has already been + acknowledged/requeued/rejected. + + """ + if self.channel.no_ack_consumers is not None: + try: + consumer_tag = self.delivery_info['consumer_tag'] + except KeyError: + pass + else: + if consumer_tag in self.channel.no_ack_consumers: + return + if self.acknowledged: + raise self.MessageStateError( + 'Message already acknowledged with state: %s' % self._state) + self.channel.basic_ack(self.delivery_tag) + self._state = 'ACK' + + def ack_log_error(self, logger, errors): + try: + self.ack() + except errors, exc: + logger.critical("Couldn't ack %r, reason:%r", + self.delivery_tag, exc, exc_info=True) + + def reject_log_error(self, logger, errors): + try: + self.reject() + except errors, exc: + logger.critical("Couldn't ack %r, reason: %r", + self.delivery_tag, exc, exc_info=True) + + def reject(self): + """Reject this message. + + The message will be discarded by the server. + + :raises MessageStateError: If the message has already been + acknowledged/requeued/rejected. + + """ + if self.acknowledged: + raise self.MessageStateError( + 'Message already acknowledged with state: %s' % self._state) + self.channel.basic_reject(self.delivery_tag, requeue=False) + self._state = 'REJECTED' + + def requeue(self): + """Reject this message and put it back on the queue. + + You must not use this method as a means of selecting messages + to process. + + :raises MessageStateError: If the message has already been + acknowledged/requeued/rejected. + + """ + if self.acknowledged: + raise self.MessageStateError( + 'Message already acknowledged with state: %s' % self._state) + self.channel.basic_reject(self.delivery_tag, requeue=True) + self._state = 'REQUEUED' + + def decode(self): + """Deserialize the message body, returning the original + python structure sent by the publisher.""" + return decode(self.body, self.content_type, + self.content_encoding, accept=self.accept) + + @property + def acknowledged(self): + """Set to true if the message has been acknowledged.""" + return self._state in ACKNOWLEDGED_STATES + + @property + def payload(self): + """The decoded message body.""" + if not self._decoded_cache: + self._decoded_cache = self.decode() + return self._decoded_cache + + +class Management(object): + + def __init__(self, transport): + self.transport = transport + + def get_bindings(self): + raise NotImplementedError( + 'Your transport does not implement list_bindings') + + +class Transport(object): + """Base class for transports.""" + Management = Management + + #: The :class:`~kombu.Connection` owning this instance. + client = None + + #: Default port used when no port has been specified. + default_port = None + + #: Tuple of errors that can happen due to connection failure. + connection_errors = () + + #: Tuple of errors that can happen due to channel/method failure. + channel_errors = () + + #: For non-blocking use, an eventloop should keep + #: draining events as long as ``connection.more_to_read`` is True. + nb_keep_draining = False + + #: Type of driver, can be used to separate transports + #: using the AMQP protocol (driver_type: 'amqp'), + #: Redis (driver_type: 'redis'), etc... + driver_type = 'N/A' + + #: Name of driver library (e.g. 'py-amqp', 'redis', 'beanstalkc'). + driver_name = 'N/A' + + #: Whether this transports support heartbeats, + #: and that the :meth:`heartbeat_check` method has any effect. + supports_heartbeats = False + + #: Set to true if the transport supports the AIO interface. + supports_ev = False + + def __init__(self, client, **kwargs): + self.client = client + + def establish_connection(self): + raise NotImplementedError('Subclass responsibility') + + def close_connection(self, connection): + raise NotImplementedError('Subclass responsibility') + + def create_channel(self, connection): + raise NotImplementedError('Subclass responsibility') + + def close_channel(self, connection): + raise NotImplementedError('Subclass responsibility') + + def drain_events(self, connection, **kwargs): + raise NotImplementedError('Subclass responsibility') + + def heartbeat_check(self, connection, rate=2): + pass + + def driver_version(self): + return 'N/A' + + def eventmap(self, connection): + """Map of fd -> event handler for event based use. + Unconvenient to use, and limited transport support.""" + return {} + + def on_poll_init(self, poller): + pass + + def on_poll_start(self): + raise NotImplementedError('transport: no eventloop support') + + def on_poll_empty(self): + pass + + def verify_connection(self, connection): + return True + + @property + def default_connection_params(self): + return {} + + def get_manager(self, *args, **kwargs): + return self.Management(self) + + @cached_property + def manager(self): + return self.get_manager() diff --git a/awx/lib/site-packages/kombu/transport/beanstalk.py b/awx/lib/site-packages/kombu/transport/beanstalk.py new file mode 100644 index 0000000000..c8ed79a916 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/beanstalk.py @@ -0,0 +1,141 @@ +""" +kombu.transport.beanstalk +========================= + +Beanstalk transport. + +:copyright: (c) 2010 - 2012 by David Ziegler. +:license: BSD, see LICENSE for more details. + +""" +from __future__ import absolute_import + +import beanstalkc +import socket + +from anyjson import loads, dumps +from Queue import Empty + +from kombu.exceptions import StdConnectionError, StdChannelError + +from . import virtual + +DEFAULT_PORT = 11300 + +__author__ = 'David Ziegler ' + + +class Channel(virtual.Channel): + _client = None + + def _parse_job(self, job): + item, dest = None, None + if job: + try: + item = loads(job.body) + dest = job.stats()['tube'] + except Exception: + job.bury() + else: + job.delete() + else: + raise Empty() + return item, dest + + def _put(self, queue, message, **kwargs): + extra = {} + priority = message['properties']['delivery_info']['priority'] + ttr = message['properties'].get('ttr') + if ttr is not None: + extra['ttr'] = ttr + + self.client.use(queue) + self.client.put(dumps(message), priority=priority, **extra) + + def _get(self, queue): + if queue not in self.client.watching(): + self.client.watch(queue) + + [self.client.ignore(active) for active in self.client.watching() + if active != queue] + + job = self.client.reserve(timeout=1) + item, dest = self._parse_job(job) + return item + + def _get_many(self, queues, timeout=1): + # timeout of None will cause beanstalk to timeout waiting + # for a new request + if timeout is None: + timeout = 1 + + watching = self.client.watching() + + [self.client.watch(active) for active in queues + if active not in watching] + + [self.client.ignore(active) for active in watching + if active not in queues] + + job = self.client.reserve(timeout=timeout) + return self._parse_job(job) + + def _purge(self, queue): + if queue not in self.client.watching(): + self.client.watch(queue) + + [self.client.ignore(active) + for active in self.client.watching() + if active != queue] + count = 0 + while 1: + job = self.client.reserve(timeout=1) + if job: + job.delete() + count += 1 + else: + break + return count + + def _size(self, queue): + return 0 + + def _open(self): + conninfo = self.connection.client + host = conninfo.hostname or 'localhost' + port = conninfo.port or DEFAULT_PORT + conn = beanstalkc.Connection(host=host, port=port) + conn.connect() + return conn + + def close(self): + if self._client is not None: + return self._client.close() + super(Channel, self).close() + + @property + def client(self): + if self._client is None: + self._client = self._open() + return self._client + + +class Transport(virtual.Transport): + Channel = Channel + + polling_interval = 1 + default_port = DEFAULT_PORT + connection_errors = (StdConnectionError, + socket.error, + beanstalkc.SocketError, + IOError) + channel_errors = (StdChannelError, + socket.error, + IOError, + beanstalkc.SocketError, + beanstalkc.BeanstalkcException) + driver_type = 'beanstalk' + driver_name = 'beanstalkc' + + def driver_version(self): + return beanstalkc.__version__ diff --git a/awx/lib/site-packages/kombu/transport/couchdb.py b/awx/lib/site-packages/kombu/transport/couchdb.py new file mode 100644 index 0000000000..b1aeecd203 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/couchdb.py @@ -0,0 +1,126 @@ +""" +kombu.transport.couchdb +======================= + +CouchDB transport. + +:copyright: (c) 2010 - 2012 by David Clymer. +:license: BSD, see LICENSE for more details. + +""" +from __future__ import absolute_import + +from Queue import Empty + +import socket +import couchdb + +from anyjson import loads, dumps + +from kombu.exceptions import StdConnectionError, StdChannelError +from kombu.utils import uuid4 + +from . import virtual + +DEFAULT_PORT = 5984 +DEFAULT_DATABASE = 'kombu_default' + +__author__ = 'David Clymer ' + + +def create_message_view(db): + from couchdb import design + + view = design.ViewDefinition('kombu', 'messages', """ + function (doc) { + if (doc.queue && doc.payload) + emit(doc.queue, doc); + } + """) + if not view.get_doc(db): + view.sync(db) + + +class Channel(virtual.Channel): + _client = None + + view_created = False + + def _put(self, queue, message, **kwargs): + self.client.save({'_id': uuid4().hex, + 'queue': queue, + 'payload': dumps(message)}) + + def _get(self, queue): + result = self._query(queue, limit=1) + if not result: + raise Empty() + + item = result.rows[0].value + self.client.delete(item) + return loads(item['payload']) + + def _purge(self, queue): + result = self._query(queue) + for item in result: + self.client.delete(item.value) + return len(result) + + def _size(self, queue): + return len(self._query(queue)) + + def _open(self): + conninfo = self.connection.client + dbname = conninfo.virtual_host + proto = conninfo.ssl and 'https' or 'http' + if not dbname or dbname == '/': + dbname = DEFAULT_DATABASE + port = conninfo.port or DEFAULT_PORT + server = couchdb.Server('%s://%s:%s/' % (proto, + conninfo.hostname, + port)) + # Use username and password if avaliable + try: + server.resource.credentials = (conninfo.userid, conninfo.password) + except AttributeError: + pass + try: + return server[dbname] + except couchdb.http.ResourceNotFound: + return server.create(dbname) + + def _query(self, queue, **kwargs): + if not self.view_created: + # if the message view is not yet set up, we'll need it now. + create_message_view(self.client) + self.view_created = True + return self.client.view('kombu/messages', key=queue, **kwargs) + + @property + def client(self): + if self._client is None: + self._client = self._open() + return self._client + + +class Transport(virtual.Transport): + Channel = Channel + + polling_interval = 1 + default_port = DEFAULT_PORT + connection_errors = (StdConnectionError, + socket.error, + couchdb.HTTPError, + couchdb.ServerError, + couchdb.Unauthorized) + channel_errors = (StdChannelError, + couchdb.HTTPError, + couchdb.ServerError, + couchdb.PreconditionFailed, + couchdb.ResourceConflict, + couchdb.ResourceNotFound) + driver_type = 'couchdb' + driver_name = 'couchdb' + + def driver_version(self): + return couchdb.__version__ diff --git a/awx/lib/site-packages/kombu/transport/django/__init__.py b/awx/lib/site-packages/kombu/transport/django/__init__.py new file mode 100644 index 0000000000..594354f329 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/django/__init__.py @@ -0,0 +1,70 @@ +"""Kombu transport using the Django database as a message store.""" +from __future__ import absolute_import + +from Queue import Empty + +from anyjson import loads, dumps + +from django.conf import settings +from django.core import exceptions as errors + +from kombu.transport import virtual +from kombu.exceptions import StdConnectionError, StdChannelError + +from .models import Queue + +VERSION = (1, 0, 0) +__version__ = '.'.join(map(str, VERSION)) + +POLLING_INTERVAL = getattr(settings, 'KOMBU_POLLING_INTERVAL', + getattr(settings, 'DJKOMBU_POLLING_INTERVAL', 5.0)) + + +class Channel(virtual.Channel): + + def _new_queue(self, queue, **kwargs): + Queue.objects.get_or_create(name=queue) + + def _put(self, queue, message, **kwargs): + Queue.objects.publish(queue, dumps(message)) + + def basic_consume(self, queue, *args, **kwargs): + qinfo = self.state.bindings[queue] + exchange = qinfo[0] + if self.typeof(exchange).type == 'fanout': + return + super(Channel, self).basic_consume(queue, *args, **kwargs) + + def _get(self, queue): + #self.refresh_connection() + m = Queue.objects.fetch(queue) + if m: + return loads(m) + raise Empty() + + def _size(self, queue): + return Queue.objects.size(queue) + + def _purge(self, queue): + return Queue.objects.purge(queue) + + def refresh_connection(self): + from django import db + db.close_connection() + + +class Transport(virtual.Transport): + Channel = Channel + + default_port = 0 + polling_interval = POLLING_INTERVAL + connection_errors = (StdConnectionError, ) + channel_errors = (StdChannelError, + errors.ObjectDoesNotExist, + errors.MultipleObjectsReturned) + driver_type = 'sql' + driver_name = 'django' + + def driver_version(self): + import django + return '.'.join(map(str, django.VERSION)) diff --git a/awx/lib/site-packages/kombu/transport/django/management/__init__.py b/awx/lib/site-packages/kombu/transport/django/management/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/kombu/transport/django/management/commands/__init__.py b/awx/lib/site-packages/kombu/transport/django/management/commands/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/kombu/transport/django/management/commands/clean_kombu_messages.py b/awx/lib/site-packages/kombu/transport/django/management/commands/clean_kombu_messages.py new file mode 100644 index 0000000000..d9d9475c0f --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/django/management/commands/clean_kombu_messages.py @@ -0,0 +1,22 @@ +from __future__ import absolute_import + +from django.core.management.base import BaseCommand + + +def pluralize(desc, value): + if value > 1: + return desc + 's' + return desc + + +class Command(BaseCommand): + requires_model_validation = True + + def handle(self, *args, **options): + from kombu.transport.django.models import Message + + count = Message.objects.filter(visible=False).count() + + print('Removing %s invisible %s... ' % ( + count, pluralize('message', count))) + Message.objects.cleanup() diff --git a/awx/lib/site-packages/kombu/transport/django/managers.py b/awx/lib/site-packages/kombu/transport/django/managers.py new file mode 100644 index 0000000000..6b3f378857 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/django/managers.py @@ -0,0 +1,86 @@ +from __future__ import absolute_import + +from django.db import transaction, connection, models +try: + from django.db import connections, router +except ImportError: # pre-Django 1.2 + connections = router = None # noqa + + +class QueueManager(models.Manager): + + def publish(self, queue_name, payload): + queue, created = self.get_or_create(name=queue_name) + queue.messages.create(payload=payload) + + def fetch(self, queue_name): + try: + queue = self.get(name=queue_name) + except self.model.DoesNotExist: + return + + return queue.messages.pop() + + def size(self, queue_name): + return self.get(name=queue_name).messages.count() + + def purge(self, queue_name): + try: + queue = self.get(name=queue_name) + except self.model.DoesNotExist: + return + + messages = queue.messages.all() + count = messages.count() + messages.delete() + return count + + +def select_for_update(qs): + try: + return qs.select_for_update() + except AttributeError: + return qs + + +class MessageManager(models.Manager): + _messages_received = [0] + cleanup_every = 10 + + @transaction.commit_manually + def pop(self): + try: + resultset = select_for_update( + self.filter(visible=True).order_by('sent_at', 'id') + ) + result = resultset[0:1].get() + result.visible = False + result.save() + recv = self.__class__._messages_received + recv[0] += 1 + if not recv[0] % self.cleanup_every: + self.cleanup() + transaction.commit() + return result.payload + except self.model.DoesNotExist: + transaction.commit() + except: + transaction.rollback() + + def cleanup(self): + cursor = self.connection_for_write().cursor() + try: + cursor.execute( + 'DELETE FROM %s WHERE visible=%%s' % ( + self.model._meta.db_table, ), + (False, ) + ) + except: + transaction.rollback_unless_managed() + else: + transaction.commit_unless_managed() + + def connection_for_write(self): + if connections: + return connections[router.db_for_write(self.model)] + return connection diff --git a/awx/lib/site-packages/kombu/transport/django/migrations/0001_initial.py b/awx/lib/site-packages/kombu/transport/django/migrations/0001_initial.py new file mode 100644 index 0000000000..a746bf1725 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/django/migrations/0001_initial.py @@ -0,0 +1,55 @@ +# encoding: utf-8 +# flake8: noqa +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + + # Adding model 'Queue' + db.create_table('djkombu_queue', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=200)), + )) + db.send_create_signal('django', ['Queue']) + + # Adding model 'Message' + db.create_table('djkombu_message', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('visible', self.gf('django.db.models.fields.BooleanField')(default=True, db_index=True)), + ('sent_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_index=True, blank=True)), + ('payload', self.gf('django.db.models.fields.TextField')()), + ('queue', self.gf('django.db.models.fields.related.ForeignKey')(related_name='messages', to=orm['django.Queue'])), + )) + db.send_create_signal('django', ['Message']) + + + def backwards(self, orm): + + # Deleting model 'Queue' + db.delete_table('djkombu_queue') + + # Deleting model 'Message' + db.delete_table('djkombu_message') + + + models = { + 'django.message': { + 'Meta': {'object_name': 'Message', 'db_table': "'djkombu_message'"}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'payload': ('django.db.models.fields.TextField', [], {}), + 'queue': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'messages'", 'to': "orm['django.Queue']"}), + 'sent_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}), + 'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}) + }, + 'django.queue': { + 'Meta': {'object_name': 'Queue', 'db_table': "'djkombu_queue'"}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}) + } + } + + complete_apps = ['django'] diff --git a/awx/lib/site-packages/kombu/transport/django/migrations/__init__.py b/awx/lib/site-packages/kombu/transport/django/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/kombu/transport/django/models.py b/awx/lib/site-packages/kombu/transport/django/models.py new file mode 100644 index 0000000000..df6a462d20 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/django/models.py @@ -0,0 +1,32 @@ +from __future__ import absolute_import + +from django.db import models +from django.utils.translation import ugettext_lazy as _ + +from .managers import QueueManager, MessageManager + + +class Queue(models.Model): + name = models.CharField(_('name'), max_length=200, unique=True) + + objects = QueueManager() + + class Meta: + db_table = 'djkombu_queue' + verbose_name = _('queue') + verbose_name_plural = _('queues') + + +class Message(models.Model): + visible = models.BooleanField(default=True, db_index=True) + sent_at = models.DateTimeField(null=True, blank=True, db_index=True, + auto_now_add=True) + payload = models.TextField(_('payload'), null=False) + queue = models.ForeignKey(Queue, related_name='messages') + + objects = MessageManager() + + class Meta: + db_table = 'djkombu_message' + verbose_name = _('message') + verbose_name_plural = _('messages') diff --git a/awx/lib/site-packages/kombu/transport/filesystem.py b/awx/lib/site-packages/kombu/transport/filesystem.py new file mode 100644 index 0000000000..423bbe30ee --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/filesystem.py @@ -0,0 +1,197 @@ +""" +kombu.transport.filesystem +========================== + +Transport using the file system as the message store. + +""" +from __future__ import absolute_import + +from Queue import Empty + +from anyjson import loads, dumps + +import os +import shutil +import time +import uuid +import tempfile + +from . import virtual +from kombu.exceptions import StdConnectionError, StdChannelError +from kombu.utils import cached_property + +VERSION = (1, 0, 0) +__version__ = ".".join(map(str, VERSION)) + +# needs win32all to work on Windows +if os.name == 'nt': + + import win32con + import win32file + import pywintypes + + LOCK_EX = win32con.LOCKFILE_EXCLUSIVE_LOCK + # 0 is the default + LOCK_SH = 0 # noqa + LOCK_NB = win32con.LOCKFILE_FAIL_IMMEDIATELY # noqa + __overlapped = pywintypes.OVERLAPPED() + + def lock(file, flags): + hfile = win32file._get_osfhandle(file.fileno()) + win32file.LockFileEx(hfile, flags, 0, 0xffff0000, __overlapped) + + def unlock(file): + hfile = win32file._get_osfhandle(file.fileno()) + win32file.UnlockFileEx(hfile, 0, 0xffff0000, __overlapped) + +elif os.name == 'posix': + + import fcntl + from fcntl import LOCK_EX, LOCK_SH, LOCK_NB # noqa + + def lock(file, flags): # noqa + fcntl.flock(file.fileno(), flags) + + def unlock(file): # noqa + fcntl.flock(file.fileno(), fcntl.LOCK_UN) +else: + raise RuntimeError( + 'Filesystem plugin only defined for NT and POSIX platforms') + + +class Channel(virtual.Channel): + + def _put(self, queue, payload, **kwargs): + """Put `message` onto `queue`.""" + + filename = '%s_%s.%s.msg' % (int(round(time.time() * 1000)), + uuid.uuid4(), queue) + filename = os.path.join(self.data_folder_out, filename) + + try: + f = open(filename, 'wb') + lock(f, LOCK_EX) + f.write(dumps(payload)) + except (IOError, OSError): + raise StdChannelError( + 'Filename [%s] could not be placed into folder.' % filename) + finally: + unlock(f) + f.close() + + def _get(self, queue): + """Get next message from `queue`.""" + + queue_find = '.' + queue + '.msg' + folder = os.listdir(self.data_folder_in) + folder = sorted(folder) + while len(folder) > 0: + filename = folder.pop(0) + + # only handle message for the requested queue + if filename.find(queue_find) < 0: + continue + + if self.store_processed: + processed_folder = self.processed_folder + else: + processed_folder = tempfile.gettempdir() + + try: + # move the file to the tmp/processed folder + shutil.move(os.path.join(self.data_folder_in, filename), + processed_folder) + except IOError: + pass # file could be locked, or removed in meantime so ignore + + filename = os.path.join(processed_folder, filename) + try: + f = open(filename, 'rb') + payload = f.read() + f.close() + if not self.store_processed: + os.remove(filename) + except (IOError, OSError): + raise StdChannelError( + 'Filename [%s] could not be read from queue.' % filename) + + return loads(payload) + + raise Empty() + + def _purge(self, queue): + """Remove all messages from `queue`.""" + count = 0 + queue_find = '.' + queue + '.msg' + + folder = os.listdir(self.data_folder_in) + while len(folder) > 0: + filename = folder.pop() + try: + # only purge messages for the requested queue + if filename.find(queue_find) < 0: + continue + + filename = os.path.join(self.data_folder_in, filename) + os.remove(filename) + + count += 1 + + except OSError: + # we simply ignore its existence, as it was probably + # processed by another worker + pass + + return count + + def _size(self, queue): + """Return the number of messages in `queue` as an :class:`int`.""" + count = 0 + + queue_find = "." + queue + '.msg' + folder = os.listdir(self.data_folder_in) + while len(folder) > 0: + filename = folder.pop() + + # only handle message for the requested queue + if filename.find(queue_find) < 0: + continue + + count += 1 + + return count + + @property + def transport_options(self): + return self.connection.client.transport_options + + @cached_property + def data_folder_in(self): + return self.transport_options.get('data_folder_in', 'data_in') + + @cached_property + def data_folder_out(self): + return self.transport_options.get('data_folder_out', 'data_out') + + @cached_property + def store_processed(self): + return self.transport_options.get('store_processed', False) + + @cached_property + def processed_folder(self): + return self.transport_options.get('processed_folder', 'processed') + + +class Transport(virtual.Transport): + Channel = Channel + + default_port = 0 + connection_errors = (StdConnectionError, ) + channel_errors = (StdChannelError, ) + + driver_type = 'filesystem' + driver_name = 'filesystem' + + def driver_version(self): + return 'N/A' diff --git a/awx/lib/site-packages/kombu/transport/librabbitmq.py b/awx/lib/site-packages/kombu/transport/librabbitmq.py new file mode 100644 index 0000000000..acfa33e701 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/librabbitmq.py @@ -0,0 +1,136 @@ +""" +kombu.transport.librabbitmq +=========================== + +`librabbitmq`_ transport. + +.. _`librabbitmq`: http://pypi.python.org/librabbitmq/ + +""" +from __future__ import absolute_import + +import socket + +try: + import librabbitmq as amqp + from librabbitmq import ChannelError, ConnectionError +except ImportError: + try: + import pylibrabbitmq as amqp # noqa + from pylibrabbitmq import ChannelError, ConnectionError # noqa + except ImportError: + raise ImportError("No module named librabbitmq") + +from kombu.exceptions import StdConnectionError, StdChannelError +from kombu.utils.amq_manager import get_manager + +from . import base + +DEFAULT_PORT = 5672 + + +class Message(base.Message): + + def __init__(self, channel, props, info, body): + super(Message, self).__init__( + channel, + body=body, + delivery_info=info, + properties=props, + delivery_tag=info.get('delivery_tag'), + content_type=props.get('content_type'), + content_encoding=props.get('content_encoding'), + headers=props.get('headers')) + + +class Channel(amqp.Channel, base.StdChannel): + Message = Message + + def prepare_message(self, body, priority=None, + content_type=None, content_encoding=None, + headers=None, properties=None): + """Encapsulate data into a AMQP message.""" + properties = properties if properties is not None else {} + properties.update({'content_type': content_type, + 'content_encoding': content_encoding, + 'headers': headers, + 'priority': priority}) + return body, properties + + +class Connection(amqp.Connection): + Channel = Channel + Message = Message + + +class Transport(base.Transport): + Connection = Connection + + default_port = DEFAULT_PORT + connection_errors = (StdConnectionError, + ConnectionError, + socket.error, + IOError, + OSError) + channel_errors = (StdChannelError, ChannelError, ) + driver_type = 'amqp' + driver_name = 'librabbitmq' + + supports_ev = True + nb_keep_draining = True + + def __init__(self, client, **kwargs): + self.client = client + self.default_port = kwargs.get('default_port') or self.default_port + + def driver_version(self): + return amqp.__version__ + + def create_channel(self, connection): + return connection.channel() + + def drain_events(self, connection, **kwargs): + return connection.drain_events(**kwargs) + + def establish_connection(self): + """Establish connection to the AMQP broker.""" + conninfo = self.client + for name, default_value in self.default_connection_params.items(): + if not getattr(conninfo, name, None): + setattr(conninfo, name, default_value) + conn = self.Connection(host=conninfo.host, + userid=conninfo.userid, + password=conninfo.password, + virtual_host=conninfo.virtual_host, + login_method=conninfo.login_method, + insist=conninfo.insist, + ssl=conninfo.ssl, + connect_timeout=conninfo.connect_timeout) + conn.client = self.client + self.client.drain_events = conn.drain_events + return conn + + def close_connection(self, connection): + """Close the AMQP broker connection.""" + connection.close() + + def verify_connection(self, connection): + return connection.connected + + def on_poll_init(self, poller): + pass + + def on_poll_start(self): + return {} + + def eventmap(self, connection): + return {connection.fileno(): self.client.drain_nowait} + + def get_manager(self, *args, **kwargs): + return get_manager(self.client, *args, **kwargs) + + @property + def default_connection_params(self): + return {'userid': 'guest', 'password': 'guest', + 'port': self.default_port, + 'hostname': 'localhost', 'login_method': 'AMQPLAIN'} diff --git a/awx/lib/site-packages/kombu/transport/memory.py b/awx/lib/site-packages/kombu/transport/memory.py new file mode 100644 index 0000000000..e6ae789522 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/memory.py @@ -0,0 +1,63 @@ +""" +kombu.transport.memory +====================== + +In-memory transport. + +""" +from __future__ import absolute_import + +from Queue import Queue + +from . import virtual + + +class Channel(virtual.Channel): + queues = {} + do_restore = False + + def _has_queue(self, queue, **kwargs): + return queue in self.queues + + def _new_queue(self, queue, **kwargs): + if queue not in self.queues: + self.queues[queue] = Queue() + + def _get(self, queue, timeout=None): + return self._queue_for(queue).get(block=False) + + def _queue_for(self, queue): + if queue not in self.queues: + self.queues[queue] = Queue() + return self.queues[queue] + + def _put(self, queue, message, **kwargs): + self._queue_for(queue).put(message) + + def _size(self, queue): + return self._queue_for(queue).qsize() + + def _delete(self, queue, *args): + self.queues.pop(queue, None) + + def _purge(self, queue): + q = self._queue_for(queue) + size = q.qsize() + q.queue.clear() + return size + + def after_reply_message_received(self, queue): + pass + + +class Transport(virtual.Transport): + Channel = Channel + + #: memory backend state is global. + state = virtual.BrokerState() + + driver_type = 'memory' + driver_name = 'memory' + + def driver_version(self): + return 'N/A' diff --git a/awx/lib/site-packages/kombu/transport/mongodb.py b/awx/lib/site-packages/kombu/transport/mongodb.py new file mode 100644 index 0000000000..1a09d1f42b --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/mongodb.py @@ -0,0 +1,225 @@ +""" +kombu.transport.mongodb +======================= + +MongoDB transport. + +:copyright: (c) 2010 - 2012 by Flavio Percoco Premoli. +:license: BSD, see LICENSE for more details. + +""" +from __future__ import absolute_import + +from Queue import Empty + +import pymongo + +from pymongo import errors +from anyjson import loads, dumps +from pymongo.connection import Connection + +from kombu.exceptions import StdConnectionError, StdChannelError + +from . import virtual + +DEFAULT_HOST = '127.0.0.1' +DEFAULT_PORT = 27017 + +__author__ = """\ +Flavio [FlaPer87] Percoco Premoli ;\ +Scott Lyons ;\ +""" + + +class Channel(virtual.Channel): + _client = None + supports_fanout = True + _fanout_queues = {} + + def __init__(self, *vargs, **kwargs): + super_ = super(Channel, self) + super_.__init__(*vargs, **kwargs) + + self._queue_cursors = {} + self._queue_readcounts = {} + + def _new_queue(self, queue, **kwargs): + pass + + def _get(self, queue): + try: + if queue in self._fanout_queues: + msg = self._queue_cursors[queue].next() + self._queue_readcounts[queue] += 1 + return loads(msg['payload']) + else: + msg = self.client.command( + 'findandmodify', 'messages', + query={'queue': queue}, + sort={'_id': pymongo.ASCENDING}, remove=True, + ) + except errors.OperationFailure, exc: + if 'No matching object found' in exc.args[0]: + raise Empty() + raise + except StopIteration: + raise Empty() + + # as of mongo 2.0 empty results won't raise an error + if msg['value'] is None: + raise Empty() + return loads(msg['value']['payload']) + + def _size(self, queue): + if queue in self._fanout_queues: + return (self._queue_cursors[queue].count() - + self._queue_readcounts[queue]) + + return self.client.messages.find({'queue': queue}).count() + + def _put(self, queue, message, **kwargs): + self.client.messages.insert({'payload': dumps(message), + 'queue': queue}) + + def _purge(self, queue): + size = self._size(queue) + if queue in self._fanout_queues: + cursor = self._queue_cursors[queue] + cursor.rewind() + self._queue_cursors[queue] = cursor.skip(cursor.count()) + else: + self.client.messages.remove({'queue': queue}) + return size + + def close(self): + super(Channel, self).close() + if self._client: + self._client.connection.end_request() + + def _open(self): + """ + See mongodb uri documentation: + http://www.mongodb.org/display/DOCS/Connections + """ + conninfo = self.connection.client + + dbname = None + hostname = None + + if not conninfo.hostname: + conninfo.hostname = DEFAULT_HOST + + for part in conninfo.hostname.split('/'): + if not hostname: + hostname = 'mongodb://' + part + continue + + dbname = part + if '?' in part: + # In case someone is passing options + # to the mongodb connection. Right now + # it is not permitted by kombu + dbname, options = part.split('?') + hostname += '/?' + options + + hostname = "%s/%s" % ( + hostname, dbname in [None, "/"] and "admin" or dbname, + ) + if not dbname or dbname == "/": + dbname = "kombu_default" + + # At this point we expect the hostname to be something like + # (considering replica set form too): + # + # mongodb://[username:password@]host1[:port1][,host2[:port2], + # ...[,hostN[:portN]]][/[?options]] + mongoconn = Connection(host=hostname, ssl=conninfo.ssl) + version = mongoconn.server_info()['version'] + if tuple(map(int, version.split('.')[:2])) < (1, 3): + raise NotImplementedError( + 'Kombu requires MongoDB version 1.3+, but connected to %s' % ( + version, )) + + database = getattr(mongoconn, dbname) + + # This is done by the connection uri + # if conninfo.userid: + # database.authenticate(conninfo.userid, conninfo.password) + self.db = database + col = database.messages + col.ensure_index([('queue', 1), ('_id', 1)], background=True) + + if 'messages.broadcast' not in database.collection_names(): + capsize = conninfo.transport_options.get( + 'capped_queue_size') or 100000 + database.create_collection('messages.broadcast', + size=capsize, capped=True) + + self.bcast = getattr(database, 'messages.broadcast') + self.bcast.ensure_index([('queue', 1)]) + + self.routing = getattr(database, 'messages.routing') + self.routing.ensure_index([('queue', 1), ('exchange', 1)]) + return database + + #TODO: Store a more complete exchange metatable in the routing collection + def get_table(self, exchange): + """Get table of bindings for ``exchange``.""" + localRoutes = frozenset(self.state.exchanges[exchange]['table']) + brokerRoutes = self.client.messages.routing.find( + {'exchange': exchange} + ) + + return localRoutes | frozenset((r['routing_key'], + r['pattern'], + r['queue']) for r in brokerRoutes) + + def _put_fanout(self, exchange, message, **kwargs): + """Deliver fanout message.""" + self.client.messages.broadcast.insert({'payload': dumps(message), + 'queue': exchange}) + + def _queue_bind(self, exchange, routing_key, pattern, queue): + if self.typeof(exchange).type == 'fanout': + cursor = self.bcast.find(query={'queue': exchange}, + sort=[('$natural', 1)], tailable=True) + # Fast forward the cursor past old events + self._queue_cursors[queue] = cursor.skip(cursor.count()) + self._queue_readcounts[queue] = cursor.count() + self._fanout_queues[queue] = exchange + + meta = {'exchange': exchange, + 'queue': queue, + 'routing_key': routing_key, + 'pattern': pattern} + self.client.messages.routing.update(meta, meta, upsert=True) + + def queue_delete(self, queue, **kwargs): + self.routing.remove({'queue': queue}) + super(Channel, self).queue_delete(queue, **kwargs) + if queue in self._fanout_queues: + self._queue_cursors[queue].close() + self._queue_cursors.pop(queue, None) + self._fanout_queues.pop(queue, None) + + @property + def client(self): + if self._client is None: + self._client = self._open() + return self._client + + +class Transport(virtual.Transport): + Channel = Channel + + polling_interval = 1 + default_port = DEFAULT_PORT + connection_errors = (StdConnectionError, errors.ConnectionFailure) + channel_errors = (StdChannelError, + errors.ConnectionFailure, + errors.OperationFailure) + driver_type = 'mongodb' + driver_name = 'pymongo' + + def driver_version(self): + return pymongo.version diff --git a/awx/lib/site-packages/kombu/transport/pyamqp.py b/awx/lib/site-packages/kombu/transport/pyamqp.py new file mode 100644 index 0000000000..8eb62dcccb --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/pyamqp.py @@ -0,0 +1,137 @@ +""" +kombu.transport.pyamqp +====================== + +pure python amqp transport. + +""" +from __future__ import absolute_import + +import amqp + +from kombu.exceptions import ( + StdConnectionError, + StdChannelError, + VersionMismatch, +) +from kombu.utils.amq_manager import get_manager + +from . import base + +DEFAULT_PORT = 5672 + +if amqp.VERSION < (0, 9, 3): # pragma: no cover + raise VersionMismatch('Please install amqp version 0.9.3 or higher.') + + +class Message(base.Message): + + def __init__(self, channel, msg, **kwargs): + props = msg.properties + super(Message, self).__init__( + channel, + body=msg.body, + delivery_tag=msg.delivery_tag, + content_type=props.get('content_type'), + content_encoding=props.get('content_encoding'), + delivery_info=msg.delivery_info, + properties=msg.properties, + headers=props.get('application_headers') or {}, + **kwargs) + + +class Channel(amqp.Channel, base.StdChannel): + Message = Message + + def prepare_message(self, body, priority=None, + content_type=None, content_encoding=None, + headers=None, properties=None): + """Encapsulate data into a AMQP message.""" + return amqp.Message(body, priority=priority, + content_type=content_type, + content_encoding=content_encoding, + application_headers=headers, + **properties) + + def message_to_python(self, raw_message): + """Convert encoded message body back to a Python value.""" + return self.Message(self, raw_message) + + +class Connection(amqp.Connection): + Channel = Channel + + +class Transport(base.Transport): + Connection = Connection + + default_port = DEFAULT_PORT + + # it's very annoying that pyamqp sometimes raises AttributeError + # if the connection is lost, but nothing we can do about that here. + connection_errors = ( + (StdConnectionError, ) + amqp.Connection.connection_errors + ) + channel_errors = (StdChannelError, ) + amqp.Connection.channel_errors + + nb_keep_draining = True + driver_name = "py-amqp" + driver_type = "amqp" + supports_heartbeats = True + supports_ev = True + + def __init__(self, client, **kwargs): + self.client = client + self.default_port = kwargs.get("default_port") or self.default_port + + def create_channel(self, connection): + return connection.channel() + + def drain_events(self, connection, **kwargs): + return connection.drain_events(**kwargs) + + def establish_connection(self): + """Establish connection to the AMQP broker.""" + conninfo = self.client + for name, default_value in self.default_connection_params.items(): + if not getattr(conninfo, name, None): + setattr(conninfo, name, default_value) + if conninfo.hostname == 'localhost': + conninfo.hostname = '127.0.0.1' + conn = self.Connection(host=conninfo.host, + userid=conninfo.userid, + password=conninfo.password, + login_method=conninfo.login_method, + virtual_host=conninfo.virtual_host, + insist=conninfo.insist, + ssl=conninfo.ssl, + connect_timeout=conninfo.connect_timeout, + heartbeat=conninfo.heartbeat) + conn.client = self.client + return conn + + def close_connection(self, connection): + """Close the AMQP broker connection.""" + connection.client = None + connection.close() + + def eventmap(self, connection): + return {connection.sock: self.client.drain_nowait} + + def on_poll_init(self, poller): + pass + + def on_poll_start(self): + return {} + + def heartbeat_check(self, connection, rate=2): + return connection.heartbeat_tick(rate=rate) + + @property + def default_connection_params(self): + return {'userid': 'guest', 'password': 'guest', + 'port': self.default_port, + 'hostname': 'localhost', 'login_method': 'AMQPLAIN'} + + def get_manager(self, *args, **kwargs): + return get_manager(self.client, *args, **kwargs) diff --git a/awx/lib/site-packages/kombu/transport/redis.py b/awx/lib/site-packages/kombu/transport/redis.py new file mode 100644 index 0000000000..bf9e9f49ad --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/redis.py @@ -0,0 +1,788 @@ +""" +kombu.transport.redis +===================== + +Redis transport. + +""" +from __future__ import absolute_import +from __future__ import with_statement + +import socket + +from bisect import bisect +from contextlib import contextmanager +from time import time +from Queue import Empty + +from anyjson import loads, dumps + +from kombu.exceptions import ( + InconsistencyError, + StdConnectionError, + StdChannelError, + VersionMismatch, +) +from kombu.log import get_logger +from kombu.utils import cached_property, uuid +from kombu.utils.eventio import poll, READ, ERR + +NO_ROUTE_ERROR = """ +Cannot route message for exchange %r: Table empty or key no longer exists. +Probably the key (%r) has been removed from the Redis database. +""" + +try: + from billiard.util import register_after_fork +except ImportError: + try: + from multiprocessing.util import register_after_fork # noqa + except ImportError: + def register_after_fork(*args, **kwargs): # noqa + pass + +try: + import redis +except ImportError: + redis = None # noqa + + +from . import virtual + +logger = get_logger('kombu.transport.redis') + +DEFAULT_PORT = 6379 +DEFAULT_DB = 0 + +PRIORITY_STEPS = [0, 3, 6, 9] + +# This implementation may seem overly complex, but I assure you there is +# a good reason for doing it this way. +# +# Consuming from several connections enables us to emulate channels, +# which means we can have different service guarantees for individual +# channels. +# +# So we need to consume messages from multiple connections simultaneously, +# and using epoll means we don't have to do so using multiple threads. +# +# Also it means we can easily use PUBLISH/SUBSCRIBE to do fanout +# exchanges (broadcast), as an alternative to pushing messages to fanout-bound +# queues manually. + + +class MutexHeld(Exception): + pass + + +@contextmanager +def Mutex(client, name, expire): + lock_id = uuid() + i_won = client.setnx(name, lock_id) + try: + if i_won: + client.expire(name, expire) + yield + else: + if not client.ttl(name): + client.expire(name, expire) + raise MutexHeld() + finally: + if i_won: + pipe = client.pipeline(True) + try: + pipe.watch(name) + if pipe.get(name) == lock_id: + pipe.multi() + pipe.delete(name) + pipe.execute() + pipe.unwatch() + except redis.WatchError: + pass + + +class QoS(virtual.QoS): + restore_at_shutdown = True + + def __init__(self, *args, **kwargs): + super(QoS, self).__init__(*args, **kwargs) + self._vrestore_count = 0 + + def append(self, message, delivery_tag): + delivery = message.delivery_info + EX, RK = delivery['exchange'], delivery['routing_key'] + with self.pipe_or_acquire() as pipe: + pipe.zadd(self.unacked_index_key, delivery_tag, time()) \ + .hset(self.unacked_key, delivery_tag, + dumps([message._raw, EX, RK])) \ + .execute() + super(QoS, self).append(message, delivery_tag) + + def restore_unacked(self): + for tag in self._delivered: + self.restore_by_tag(tag) + self._delivered.clear() + + def ack(self, delivery_tag): + self._remove_from_indices(delivery_tag).execute() + super(QoS, self).ack(delivery_tag) + + def reject(self, delivery_tag, requeue=False): + self.ack(delivery_tag) + + @contextmanager + def pipe_or_acquire(self, pipe=None): + if pipe: + yield pipe + else: + with self.channel.conn_or_acquire() as client: + yield client.pipeline() + + def _remove_from_indices(self, delivery_tag, pipe=None): + with self.pipe_or_acquire(pipe) as pipe: + return pipe.zrem(self.unacked_index_key, delivery_tag) \ + .hdel(self.unacked_key, delivery_tag) + + def restore_visible(self, start=0, num=10, interval=10): + self._vrestore_count += 1 + if (self._vrestore_count - 1) % interval: + return + with self.channel.conn_or_acquire() as client: + ceil = time() - self.visibility_timeout + try: + with Mutex(client, self.unacked_mutex_key, + self.unacked_mutex_expire): + visible = client.zrevrangebyscore( + self.unacked_index_key, ceil, 0, + start=num and start, num=num, withscores=True) + for tag, score in visible or []: + self.restore_by_tag(tag, client) + except MutexHeld: + pass + + def restore_by_tag(self, tag, client=None): + with self.channel.conn_or_acquire(client) as client: + p, _, _ = self._remove_from_indices( + tag, client.pipeline().hget(self.unacked_key, tag)).execute() + if p: + M, EX, RK = loads(p) + self.channel._do_restore_message(M, EX, RK, client) + + @cached_property + def unacked_key(self): + return self.channel.unacked_key + + @cached_property + def unacked_index_key(self): + return self.channel.unacked_index_key + + @cached_property + def unacked_mutex_key(self): + return self.channel.unacked_mutex_key + + @cached_property + def unacked_mutex_expire(self): + return self.channel.unacked_mutex_expire + + @cached_property + def visibility_timeout(self): + return self.channel.visibility_timeout + + +class MultiChannelPoller(object): + eventflags = READ | ERR + + def __init__(self): + # active channels + self._channels = set() + # file descriptor -> channel map. + self._fd_to_chan = {} + # channel -> socket map + self._chan_to_sock = {} + # poll implementation (epoll/kqueue/select) + self.poller = poll() + + def close(self): + for fd in self._chan_to_sock.itervalues(): + try: + self.poller.unregister(fd) + except KeyError: + pass + self._channels.clear() + self._fd_to_chan.clear() + self._chan_to_sock.clear() + self.poller = None + + def add(self, channel): + self._channels.add(channel) + + def discard(self, channel): + self._channels.discard(channel) + + def _register(self, channel, client, type): + if (channel, client, type) in self._chan_to_sock: + self._unregister(channel, client, type) + if client.connection._sock is None: # not connected yet. + client.connection.connect() + sock = client.connection._sock + self._fd_to_chan[sock.fileno()] = (channel, type) + self._chan_to_sock[(channel, client, type)] = sock + self.poller.register(sock, self.eventflags) + + def _unregister(self, channel, client, type): + self.poller.unregister(self._chan_to_sock[(channel, client, type)]) + + def _register_BRPOP(self, channel): + """enable BRPOP mode for channel.""" + ident = channel, channel.client, 'BRPOP' + if channel.client.connection._sock is None or \ + ident not in self._chan_to_sock: + channel._in_poll = False + self._register(*ident) + + if not channel._in_poll: # send BRPOP + channel._brpop_start() + + def _register_LISTEN(self, channel): + """enable LISTEN mode for channel.""" + if channel.subclient.connection._sock is None: + channel._in_listen = False + self._register(channel, channel.subclient, 'LISTEN') + if not channel._in_listen: + channel._subscribe() # send SUBSCRIBE + + def on_poll_start(self): + for channel in self._channels: + if channel.active_queues: # BRPOP mode? + if channel.qos.can_consume(): + self._register_BRPOP(channel) + if channel.active_fanout_queues: # LISTEN mode? + self._register_LISTEN(channel) + + def on_poll_init(self, poller): + self.poller = poller + for channel in self._channels: + return channel.qos.restore_visible( + num=channel.unacked_restore_limit, + ) + + def on_poll_empty(self): + for channel in self._channels: + if channel.active_queues: + # only need to do this once, as they are not local to channel. + return channel.qos.restore_visible( + num=channel.unacked_restore_limit, + ) + + def handle_event(self, fileno, event): + if event & READ: + chan, type = self._fd_to_chan[fileno] + if chan.qos.can_consume(): + return chan.handlers[type](), self + elif event & ERR: + chan, type = self._fd_to_chan[fileno] + chan._poll_error(type) + + def get(self, timeout=None): + for channel in self._channels: + if channel.active_queues: # BRPOP mode? + if channel.qos.can_consume(): + self._register_BRPOP(channel) + if channel.active_fanout_queues: # LISTEN mode? + self._register_LISTEN(channel) + + events = self.poller.poll(timeout) + for fileno, event in events or []: + ret = self.handle_event(fileno, event) + if ret: + return ret + + # - no new data, so try to restore messages. + # - reset active redis commands. + self.on_poll_empty() + + raise Empty() + + @property + def fds(self): + return self._fd_to_chan + + +class Channel(virtual.Channel): + QoS = QoS + + _client = None + _subclient = None + supports_fanout = True + keyprefix_queue = '_kombu.binding.%s' + sep = '\x06\x16' + _in_poll = False + _in_listen = False + _fanout_queues = {} + ack_emulation = True + unacked_key = 'unacked' + unacked_index_key = 'unacked_index' + unacked_mutex_key = 'unacked_mutex' + unacked_mutex_expire = 300 # 5 minutes + unacked_restore_limit = None + visibility_timeout = 3600 # 1 hour + priority_steps = PRIORITY_STEPS + socket_timeout = None + max_connections = 10 + _pool = None + + from_transport_options = ( + virtual.Channel.from_transport_options + + ('ack_emulation', + 'unacked_key', + 'unacked_index_key', + 'unacked_mutex_key', + 'unacked_mutex_expire', + 'visibility_timeout', + 'unacked_restore_limit', + 'socket_timeout', + 'max_connections', + 'priority_steps') # <-- do not add comma here! + ) + + def __init__(self, *args, **kwargs): + super_ = super(Channel, self) + super_.__init__(*args, **kwargs) + + if not self.ack_emulation: # disable visibility timeout + self.QoS = virtual.QoS + + self._queue_cycle = [] + self.Client = self._get_client() + self.ResponseError = self._get_response_error() + self.active_fanout_queues = set() + self.auto_delete_queues = set() + self._fanout_to_queue = {} + self.handlers = {'BRPOP': self._brpop_read, 'LISTEN': self._receive} + + # Evaluate connection. + try: + self.client.info() + except Exception: + if self._pool: + self._pool.disconnect() + raise + + self.connection.cycle.add(self) # add to channel poller. + # copy errors, in case channel closed but threads still + # are still waiting for data. + self.connection_errors = self.connection.connection_errors + + register_after_fork(self, self._after_fork) + + def _after_fork(self): + if self._pool is not None: + self._pool.disconnect() + + def _do_restore_message(self, payload, exchange, routing_key, client=None): + with self.conn_or_acquire(client) as client: + try: + try: + payload['headers']['redelivered'] = True + except KeyError: + pass + for queue in self._lookup(exchange, routing_key): + client.lpush(queue, dumps(payload)) + except Exception: + logger.critical('Could not restore message: %r', payload, + exc_info=True) + + def _restore(self, message, payload=None): + tag = message.delivery_tag + with self.conn_or_acquire() as client: + P, _ = client.pipeline() \ + .hget(self.unacked_key, tag) \ + .hdel(self.unacked_key, tag) \ + .execute() + if P: + M, EX, RK = loads(P) + self._do_restore_message(M, EX, RK, client) + + def _next_delivery_tag(self): + return uuid() + + def basic_consume(self, queue, *args, **kwargs): + if queue in self._fanout_queues: + exchange = self._fanout_queues[queue] + self.active_fanout_queues.add(queue) + self._fanout_to_queue[exchange] = queue + ret = super(Channel, self).basic_consume(queue, *args, **kwargs) + self._update_cycle() + return ret + + def basic_cancel(self, consumer_tag): + try: + queue = self._tag_to_queue[consumer_tag] + except KeyError: + return + try: + self.active_fanout_queues.discard(queue) + self._fanout_to_queue.pop(self._fanout_queues[queue]) + except KeyError: + pass + ret = super(Channel, self).basic_cancel(consumer_tag) + self._update_cycle() + return ret + + def _subscribe(self): + keys = [self._fanout_queues[queue] + for queue in self.active_fanout_queues] + if not keys: + return + c = self.subclient + if c.connection._sock is None: + c.connection.connect() + self._in_listen = True + self.subclient.subscribe(keys) + + def _handle_message(self, client, r): + if r[0] == 'unsubscribe' and r[2] == 0: + client.subscribed = False + elif r[0] == 'pmessage': + return {'type': r[0], 'pattern': r[1], + 'channel': r[2], 'data': r[3]} + else: + return {'type': r[0], 'pattern': None, + 'channel': r[1], 'data': r[2]} + + def _receive(self): + c = self.subclient + response = None + try: + response = c.parse_response() + except self.connection_errors: + self._in_listen = False + if response is not None: + payload = self._handle_message(c, response) + if payload['type'] == 'message': + return (loads(payload['data']), + self._fanout_to_queue[payload['channel']]) + raise Empty() + + def _brpop_start(self, timeout=1): + queues = self._consume_cycle() + if not queues: + return + keys = [self._q_for_pri(queue, pri) for pri in PRIORITY_STEPS + for queue in queues] + [timeout or 0] + self._in_poll = True + self.client.connection.send_command('BRPOP', *keys) + + def _brpop_read(self, **options): + try: + try: + dest__item = self.client.parse_response(self.client.connection, + 'BRPOP', + **options) + except self.connection_errors: + # if there's a ConnectionError, disconnect so the next + # iteration will reconnect automatically. + self.client.connection.disconnect() + raise Empty() + if dest__item: + dest, item = dest__item + dest = dest.rsplit(self.sep, 1)[0] + self._rotate_cycle(dest) + return loads(item), dest + else: + raise Empty() + finally: + self._in_poll = False + + def _poll_error(self, type, **options): + try: + self.client.parse_response(type) + except self.connection_errors: + pass + + def _get(self, queue): + with self.conn_or_acquire() as client: + for pri in PRIORITY_STEPS: + item = client.rpop(self._q_for_pri(queue, pri)) + if item: + return loads(item) + raise Empty() + + def _size(self, queue): + with self.conn_or_acquire() as client: + cmds = client.pipeline() + for pri in PRIORITY_STEPS: + cmds = cmds.llen(self._q_for_pri(queue, pri)) + sizes = cmds.execute() + return sum(size for size in sizes if isinstance(size, int)) + + def _q_for_pri(self, queue, pri): + pri = self.priority(pri) + return '%s%s%s' % ((queue, self.sep, pri) if pri else (queue, '', '')) + + def priority(self, n): + steps = self.priority_steps + return steps[bisect(steps, n) - 1] + + def _put(self, queue, message, **kwargs): + """Deliver message.""" + try: + pri = max(min(int( + message['properties']['delivery_info']['priority']), 9), 0) + except (TypeError, ValueError, KeyError): + pri = 0 + with self.conn_or_acquire() as client: + client.lpush(self._q_for_pri(queue, pri), dumps(message)) + + def _put_fanout(self, exchange, message, **kwargs): + """Deliver fanout message.""" + with self.conn_or_acquire() as client: + client.publish(exchange, dumps(message)) + + def _new_queue(self, queue, auto_delete=False, **kwargs): + if auto_delete: + self.auto_delete_queues.add(queue) + + def _queue_bind(self, exchange, routing_key, pattern, queue): + if self.typeof(exchange).type == 'fanout': + # Mark exchange as fanout. + self._fanout_queues[queue] = exchange + with self.conn_or_acquire() as client: + client.sadd(self.keyprefix_queue % (exchange, ), + self.sep.join([routing_key or '', + pattern or '', + queue or ''])) + + def _delete(self, queue, exchange, routing_key, pattern, *args): + self.auto_delete_queues.discard(queue) + with self.conn_or_acquire() as client: + client.srem(self.keyprefix_queue % (exchange, ), + self.sep.join([routing_key or '', + pattern or '', + queue or ''])) + cmds = client.pipeline() + for pri in PRIORITY_STEPS: + cmds = cmds.delete(self._q_for_pri(queue, pri)) + cmds.execute() + + def _has_queue(self, queue, **kwargs): + with self.conn_or_acquire() as client: + cmds = client.pipeline() + for pri in PRIORITY_STEPS: + cmds = cmds.exists(self._q_for_pri(queue, pri)) + return any(cmds.execute()) + + def get_table(self, exchange): + key = self.keyprefix_queue % exchange + with self.conn_or_acquire() as client: + values = client.smembers(key) + if not values: + raise InconsistencyError(NO_ROUTE_ERROR % (exchange, key)) + return [tuple(val.split(self.sep)) for val in values] + + def _purge(self, queue): + with self.conn_or_acquire() as client: + cmds = client.pipeline() + for pri in PRIORITY_STEPS: + priq = self._q_for_pri(queue, pri) + cmds = cmds.llen(priq).delete(priq) + sizes = cmds.execute() + return sum(sizes[::2]) + + def close(self): + if self._pool: + self._pool.disconnect() + if not self.closed: + # remove from channel poller. + self.connection.cycle.discard(self) + + # delete fanout bindings + for queue in self._fanout_queues: + if queue in self.auto_delete_queues: + self.queue_delete(queue) + + # Close connections + for attr in 'client', 'subclient': + try: + self.__dict__[attr].connection.disconnect() + except (KeyError, AttributeError, self.ResponseError): + pass + super(Channel, self).close() + + def _connparams(self): + conninfo = self.connection.client + database = conninfo.virtual_host + if not isinstance(database, int): + if not database or database == '/': + database = DEFAULT_DB + elif database.startswith('/'): + database = database[1:] + try: + database = int(database) + except ValueError: + raise ValueError( + 'Database name must be int between 0 and limit - 1') + return {'host': conninfo.hostname or '127.0.0.1', + 'port': conninfo.port or DEFAULT_PORT, + 'db': database, + 'password': conninfo.password, + 'max_connections': self.max_connections, + 'socket_timeout': self.socket_timeout} + + def _create_client(self): + return self.Client(connection_pool=self.pool) + + def _get_pool(self): + return redis.ConnectionPool(**self._connparams()) + + def _get_client(self): + if redis.VERSION < (2, 4, 4): + raise VersionMismatch( + 'Redis transport requires redis-py versions 2.4.4 or later. ' + 'You have %r' % (redis.__version__, )) + + # KombuRedis maintains a connection attribute on it's instance and + # uses that when executing commands + # This was added after redis-py was changed. + class KombuRedis(redis.Redis): # pragma: no cover + + def __init__(self, *args, **kwargs): + super(KombuRedis, self).__init__(*args, **kwargs) + self.connection = self.connection_pool.get_connection('_') + + return KombuRedis + + @contextmanager + def conn_or_acquire(self, client=None): + if client: + yield client + else: + if self._in_poll: + client = self._create_client() + try: + yield client + finally: + self.pool.release(client.connection) + else: + yield self.client + + @property + def pool(self): + if self._pool is None: + self._pool = self._get_pool() + return self._pool + + @cached_property + def client(self): + """Client used to publish messages, BRPOP etc.""" + return self._create_client() + + @cached_property + def subclient(self): + """Pub/Sub connection used to consume fanout queues.""" + client = self._create_client() + pubsub = client.pubsub() + pool = pubsub.connection_pool + pubsub.connection = pool.get_connection('pubsub', pubsub.shard_hint) + return pubsub + + def _update_cycle(self): + """Update fair cycle between queues. + + We cycle between queues fairly to make sure that + each queue is equally likely to be consumed from, + so that a very busy queue will not block others. + + This works by using Redis's `BRPOP` command and + by rotating the most recently used queue to the + and of the list. See Kombu github issue #166 for + more discussion of this method. + + """ + self._queue_cycle = list(self.active_queues) + + def _consume_cycle(self): + """Get a fresh list of queues from the queue cycle.""" + active = len(self.active_queues) + return self._queue_cycle[0:active] + + def _rotate_cycle(self, used): + """ + Move most recently used queue to end of list + """ + index = self._queue_cycle.index(used) + self._queue_cycle.append(self._queue_cycle.pop(index)) + + def _get_response_error(self): + from redis import exceptions + return exceptions.ResponseError + + @property + def active_queues(self): + """Set of queues being consumed from (excluding fanout queues).""" + return set(queue for queue in self._active_queues + if queue not in self.active_fanout_queues) + + +class Transport(virtual.Transport): + Channel = Channel + + polling_interval = None # disable sleep between unsuccessful polls. + default_port = DEFAULT_PORT + supports_ev = True + driver_type = 'redis' + driver_name = 'redis' + + def __init__(self, *args, **kwargs): + super(Transport, self).__init__(*args, **kwargs) + + # Get redis-py exceptions. + self.connection_errors, self.channel_errors = self._get_errors() + # All channels share the same poller. + self.cycle = MultiChannelPoller() + + def driver_version(self): + return redis.__version__ + + def on_poll_init(self, poller): + """Called when hub starts.""" + self.cycle.on_poll_init(poller) + + def on_poll_start(self): + """Called by hub before each ``poll()``""" + cycle = self.cycle + cycle.on_poll_start() + return dict((fd, self.handle_event) for fd in cycle.fds) + + def on_poll_empty(self): + self.cycle.on_poll_empty() + + def handle_event(self, fileno, event): + """Handle AIO event for one of our file descriptors.""" + ret = self.cycle.handle_event(fileno, event) + if ret: + item, channel = ret + message, queue = item + if not queue or queue not in self._callbacks: + raise KeyError( + "Received message for queue '%s' without consumers: %s" % ( + queue, message)) + self._callbacks[queue](message) + + def _get_errors(self): + """Utility to import redis-py's exceptions at runtime.""" + from redis import exceptions + # This exception suddenly changed name between redis-py versions + if hasattr(exceptions, 'InvalidData'): + DataError = exceptions.InvalidData + else: + DataError = exceptions.DataError + return ((StdConnectionError, + InconsistencyError, + socket.timeout, + exceptions.ConnectionError, + exceptions.AuthenticationError), + (DataError, + exceptions.InvalidResponse, + exceptions.ResponseError, + StdChannelError)) diff --git a/awx/lib/site-packages/kombu/transport/sqlalchemy/__init__.py b/awx/lib/site-packages/kombu/transport/sqlalchemy/__init__.py new file mode 100644 index 0000000000..7bce1c6bc3 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/sqlalchemy/__init__.py @@ -0,0 +1,120 @@ +"""Kombu transport using SQLAlchemy as the message store.""" +# SQLAlchemy overrides != False to have special meaning and pep8 complains +# flake8: noqa + +from Queue import Empty + +from anyjson import loads, dumps +from sqlalchemy import create_engine +from sqlalchemy.exc import OperationalError +from sqlalchemy.orm import sessionmaker + +from kombu.transport import virtual +from kombu.exceptions import StdConnectionError, StdChannelError + +from .models import Queue, Message, metadata + + +VERSION = (1, 1, 0) +__version__ = '.'.join(map(str, VERSION)) + + +class Channel(virtual.Channel): + _session = None + _engines = {} # engine cache + + def _engine_from_config(self): + conninfo = self.connection.client + configuration = dict(conninfo.transport_options) + url = conninfo.hostname + return create_engine(url, **configuration) + + def _open(self): + conninfo = self.connection.client + if conninfo.hostname not in self._engines: + engine = self._engine_from_config() + Session = sessionmaker(bind=engine) + metadata.create_all(engine) + self._engines[conninfo.hostname] = engine, Session + return self._engines[conninfo.hostname] + + @property + def session(self): + if self._session is None: + _, Session = self._open() + self._session = Session() + return self._session + + def _get_or_create(self, queue): + obj = self.session.query(Queue) \ + .filter(Queue.name == queue).first() + if not obj: + obj = Queue(queue) + self.session.add(obj) + try: + self.session.commit() + except OperationalError: + self.session.rollback() + return obj + + def _new_queue(self, queue, **kwargs): + self._get_or_create(queue) + + def _put(self, queue, payload, **kwargs): + obj = self._get_or_create(queue) + message = Message(dumps(payload), obj) + self.session.add(message) + try: + self.session.commit() + except OperationalError: + self.session.rollback() + + def _get(self, queue): + obj = self._get_or_create(queue) + if self.session.bind.name == 'sqlite': + self.session.execute('BEGIN IMMEDIATE TRANSACTION') + try: + msg = self.session.query(Message) \ + .with_lockmode('update') \ + .filter(Message.queue_id == obj.id) \ + .filter(Message.visible != False) \ + .order_by(Message.sent_at) \ + .order_by(Message.id) \ + .limit(1) \ + .first() + if msg: + msg.visible = False + return loads(msg.payload) + raise Empty() + finally: + self.session.commit() + + def _query_all(self, queue): + obj = self._get_or_create(queue) + return self.session.query(Message) \ + .filter(Message.queue_id == obj.id) + + def _purge(self, queue): + count = self._query_all(queue).delete(synchronize_session=False) + try: + self.session.commit() + except OperationalError: + self.session.rollback() + return count + + def _size(self, queue): + return self._query_all(queue).count() + + +class Transport(virtual.Transport): + Channel = Channel + + default_port = 0 + connection_errors = (StdConnectionError, ) + channel_errors = (StdChannelError, ) + driver_type = 'sql' + driver_name = 'sqlalchemy' + + def driver_version(self): + import sqlalchemy + return sqlalchemy.__version__ diff --git a/awx/lib/site-packages/kombu/transport/sqlalchemy/models.py b/awx/lib/site-packages/kombu/transport/sqlalchemy/models.py new file mode 100644 index 0000000000..507be591ad --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/sqlalchemy/models.py @@ -0,0 +1,53 @@ +import datetime + +from sqlalchemy import (Column, Integer, String, Text, DateTime, + Sequence, Boolean, ForeignKey, SmallInteger) +from sqlalchemy.orm import relation +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.schema import MetaData + +metadata = MetaData() +ModelBase = declarative_base(metadata=metadata) + + +class Queue(ModelBase): + __tablename__ = 'kombu_queue' + __table_args__ = {'sqlite_autoincrement': True, 'mysql_engine': 'InnoDB'} + + id = Column(Integer, Sequence('queue_id_sequence'), primary_key=True, + autoincrement=True) + name = Column(String(200), unique=True) + messages = relation('Message', backref='queue', lazy='noload') + + def __init__(self, name): + self.name = name + + def __str__(self): + return '' % (self.name) + + +class Message(ModelBase): + __tablename__ = 'kombu_message' + __table_args__ = {'sqlite_autoincrement': True, 'mysql_engine': 'InnoDB'} + + id = Column(Integer, Sequence('message_id_sequence'), + primary_key=True, autoincrement=True) + visible = Column(Boolean, default=True, index=True) + sent_at = Column('timestamp', DateTime, nullable=True, index=True, + onupdate=datetime.datetime.now) + payload = Column(Text, nullable=False) + queue_id = Column(Integer, ForeignKey('kombu_queue.id', + name='FK_kombu_message_queue')) + version = Column(SmallInteger, nullable=False, default=1) + + __mapper_args__ = {'version_id_col': version} + + def __init__(self, payload, queue): + self.payload = payload + self.queue = queue + + def __str__(self): + return '' % (self.visible, + self.sent_at, + self.payload, + self.queue_id) diff --git a/awx/lib/site-packages/kombu/transport/virtual/__init__.py b/awx/lib/site-packages/kombu/transport/virtual/__init__.py new file mode 100644 index 0000000000..6936b1ce73 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/virtual/__init__.py @@ -0,0 +1,787 @@ +""" +kombu.transport.virtual +======================= + +Virtual transport implementation. + +Emulates the AMQ API for non-AMQ transports. + +""" +from __future__ import absolute_import + +import base64 +import socket +import warnings + +from itertools import count +from time import sleep, time +from Queue import Empty + +from kombu.exceptions import StdChannelError +from kombu.utils import emergency_dump_state, say, uuid +from kombu.utils.compat import OrderedDict +from kombu.utils.encoding import str_to_bytes, bytes_to_str +from kombu.utils.finalize import Finalize + +from kombu.transport import base + +from .scheduling import FairCycle +from .exchange import STANDARD_EXCHANGE_TYPES + +UNDELIVERABLE_FMT = """\ +Message could not be delivered: No queues bound to exchange %(exchange)r +using binding key %(routing_key)r +""" + + +class Base64(object): + + def encode(self, s): + return bytes_to_str(base64.b64encode(str_to_bytes(s))) + + def decode(self, s): + return base64.b64decode(str_to_bytes(s)) + + +class NotEquivalentError(Exception): + """Entity declaration is not equivalent to the previous declaration.""" + pass + + +class UndeliverableWarning(UserWarning): + """The message could not be delivered to a queue.""" + pass + + +class BrokerState(object): + + #: exchange declarations. + exchanges = None + + #: active bindings. + bindings = None + + def __init__(self, exchanges=None, bindings=None): + self.exchanges = {} if exchanges is None else exchanges + self.bindings = {} if bindings is None else bindings + + def clear(self): + self.exchanges.clear() + self.bindings.clear() + + +class QoS(object): + """Quality of Service guarantees. + + Only supports `prefetch_count` at this point. + + :param channel: AMQ Channel. + :keyword prefetch_count: Initial prefetch count (defaults to 0). + + """ + + #: current prefetch count value + prefetch_count = 0 + + #: :class:`~collections.OrderedDict` of active messages. + #: *NOTE*: Can only be modified by the consuming thread. + _delivered = None + + #: acks can be done by other threads than the consuming thread. + #: Instead of a mutex, which doesn't perform well here, we mark + #: the delivery tags as dirty, so subsequent calls to append() can remove + #: them. + _dirty = None + + #: If disabled, unacked messages won't be restored at shutdown. + restore_at_shutdown = True + + def __init__(self, channel, prefetch_count=0): + self.channel = channel + self.prefetch_count = prefetch_count or 0 + + self._delivered = OrderedDict() + self._delivered.restored = False + self._dirty = set() + self._on_collect = Finalize(self, + self.restore_unacked_once, + exitpriority=1) + + def can_consume(self): + """Returns true if the channel can be consumed from. + + Used to ensure the client adhers to currently active + prefetch limits. + + """ + pcount = self.prefetch_count + return not pcount or len(self._delivered) - len(self._dirty) < pcount + + def append(self, message, delivery_tag): + """Append message to transactional state.""" + if self._dirty: + self._flush() + self._delivered[delivery_tag] = message + + def get(self, delivery_tag): + return self._delivered[delivery_tag] + + def _flush(self): + """Flush dirty (acked/rejected) tags from.""" + dirty = self._dirty + delivered = self._delivered + while 1: + try: + dirty_tag = dirty.pop() + except KeyError: + break + delivered.pop(dirty_tag, None) + + def ack(self, delivery_tag): + """Acknowledge message and remove from transactional state.""" + self._dirty.add(delivery_tag) + + def reject(self, delivery_tag, requeue=False): + """Remove from transactional state and requeue message.""" + if requeue: + self.channel._restore(self._delivered[delivery_tag]) + self._dirty.add(delivery_tag) + + def restore_unacked(self): + """Restore all unacknowledged messages.""" + self._flush() + delivered = self._delivered + errors = [] + + while delivered: + try: + _, message = delivered.popitem() + except KeyError: # pragma: no cover + break + + try: + self.channel._restore(message) + except BaseException, exc: + errors.append((exc, message)) + delivered.clear() + return errors + + def restore_unacked_once(self): + """Restores all unacknowledged message at shutdown/gc collect. + + Will only be done once for each instance. + + """ + self._on_collect.cancel() + self._flush() + state = self._delivered + + if not self.restore_at_shutdown: + return + elif not self.channel.do_restore or getattr(state, 'restored', None): + assert not state + return + + try: + if state: + say('Restoring %r unacknowledged message(s).', + len(self._delivered)) + unrestored = self.restore_unacked() + + if unrestored: + errors, messages = zip(*unrestored) + say('UNABLE TO RESTORE %s MESSAGES: %s', + len(errors), errors) + emergency_dump_state(messages) + finally: + state.restored = True + + def restore_visible(self, start=0, num=10, interval=10): + pass + + +class Message(base.Message): + + def __init__(self, channel, payload, **kwargs): + self._raw = payload + properties = payload['properties'] + body = payload.get('body') + if body: + body = channel.decode_body(body, properties.get('body_encoding')) + fields = {'body': body, + 'delivery_tag': properties['delivery_tag'], + 'content_type': payload.get('content-type'), + 'content_encoding': payload.get('content-encoding'), + 'headers': payload.get('headers'), + 'properties': properties, + 'delivery_info': properties.get('delivery_info'), + 'postencode': 'utf-8'} + super(Message, self).__init__(channel, **dict(kwargs, **fields)) + + def serializable(self): + props = self.properties + body, _ = self.channel.encode_body(self.body, + props.get('body_encoding')) + headers = dict(self.headers) + # remove compression header + headers.pop('compression', None) + return {'body': body, + 'properties': props, + 'content-type': self.content_type, + 'content-encoding': self.content_encoding, + 'headers': headers} + + +class AbstractChannel(object): + """This is an abstract class defining the channel methods + you'd usually want to implement in a virtual channel. + + Do not subclass directly, but rather inherit from :class:`Channel` + instead. + + """ + + def _get(self, queue, timeout=None): + """Get next message from `queue`.""" + raise NotImplementedError('Virtual channels must implement _get') + + def _put(self, queue, message): + """Put `message` onto `queue`.""" + raise NotImplementedError('Virtual channels must implement _put') + + def _purge(self, queue): + """Remove all messages from `queue`.""" + raise NotImplementedError('Virtual channels must implement _purge') + + def _size(self, queue): + """Return the number of messages in `queue` as an :class:`int`.""" + return 0 + + def _delete(self, queue, *args, **kwargs): + """Delete `queue`. + + This just purges the queue, if you need to do more you can + override this method. + + """ + self._purge(queue) + + def _new_queue(self, queue, **kwargs): + """Create new queue. + + Some implementations needs to do additional actions when + the queue is created. You can do so by overriding this + method. + + """ + pass + + def _has_queue(self, queue, **kwargs): + """Verify that queue exists. + + Should return :const:`True` if the queue exists or :const:`False` + otherwise. + + """ + return True + + def _poll(self, cycle, timeout=None): + """Poll a list of queues for available messages.""" + return cycle.get() + + +class Channel(AbstractChannel, base.StdChannel): + """Virtual channel. + + :param connection: The transport instance this channel is part of. + + """ + #: message class used. + Message = Message + + #: QoS class used. + QoS = QoS + + #: flag to restore unacked messages when channel + #: goes out of scope. + do_restore = True + + #: mapping of exchange types and corresponding classes. + exchange_types = dict(STANDARD_EXCHANGE_TYPES) + + #: flag set if the channel supports fanout exchanges. + supports_fanout = False + + #: Binary <-> ASCII codecs. + codecs = {'base64': Base64()} + + #: Default body encoding. + #: NOTE: ``transport_options['body_encoding']`` will override this value. + body_encoding = 'base64' + + #: counter used to generate delivery tags for this channel. + _next_delivery_tag = count(1).next + + #: Optional queue where messages with no route is delivered. + #: Set by ``transport_options['deadletter_queue']``. + deadletter_queue = None + + # List of options to transfer from :attr:`transport_options`. + from_transport_options = ('body_encoding', 'deadletter_queue') + + def __init__(self, connection, **kwargs): + self.connection = connection + self._consumers = set() + self._cycle = None + self._tag_to_queue = {} + self._active_queues = [] + self._qos = None + self.closed = False + + # instantiate exchange types + self.exchange_types = dict( + (typ, cls(self)) for typ, cls in self.exchange_types.items() + ) + + self.channel_id = self.connection._next_channel_id() + + topts = self.connection.client.transport_options + for opt_name in self.from_transport_options: + try: + setattr(self, opt_name, topts[opt_name]) + except KeyError: + pass + + def exchange_declare(self, exchange=None, type='direct', durable=False, + auto_delete=False, arguments=None, + nowait=False, passive=False): + """Declare exchange.""" + type = type or 'direct' + exchange = exchange or 'amq.%s' % (type, ) + if passive: + if exchange not in self.state.exchanges: + raise StdChannelError( + '404', + u'NOT_FOUND - no exchange %r in vhost %r' % ( + exchange, self.connection.client.virtual_host or '/'), + (50, 10), 'Channel.exchange_declare') + return + try: + prev = self.state.exchanges[exchange] + if not self.typeof(exchange).equivalent(prev, exchange, type, + durable, auto_delete, + arguments): + raise NotEquivalentError( + 'Cannot redeclare exchange %r in vhost %r with ' + 'different type, durable or autodelete value' % ( + exchange, self.connection.client.virtual_host or '/')) + except KeyError: + self.state.exchanges[exchange] = { + 'type': type, + 'durable': durable, + 'auto_delete': auto_delete, + 'arguments': arguments or {}, + 'table': [], + } + + def exchange_delete(self, exchange, if_unused=False, nowait=False): + """Delete `exchange` and all its bindings.""" + for rkey, _, queue in self.get_table(exchange): + self.queue_delete(queue, if_unused=True, if_empty=True) + self.state.exchanges.pop(exchange, None) + + def queue_declare(self, queue=None, passive=False, **kwargs): + """Declare queue.""" + queue = queue or 'amq.gen-%s' % uuid() + if passive and not self._has_queue(queue, **kwargs): + raise StdChannelError( + '404', + u'NOT_FOUND - no queue %r in vhost %r' % ( + queue, self.connection.client.virtual_host or '/'), + (50, 10), 'Channel.queue_declare') + else: + self._new_queue(queue, **kwargs) + return queue, self._size(queue), 0 + + def queue_delete(self, queue, if_unusued=False, if_empty=False, **kwargs): + """Delete queue.""" + if if_empty and self._size(queue): + return + try: + exchange, routing_key, arguments = self.state.bindings[queue] + except KeyError: + return + meta = self.typeof(exchange).prepare_bind(queue, exchange, + routing_key, arguments) + self._delete(queue, exchange, *meta) + self.state.bindings.pop(queue, None) + + def after_reply_message_received(self, queue): + self.queue_delete(queue) + + def exchange_bind(self, destination, source='', routing_key='', + nowait=False, arguments=None): + raise NotImplementedError('transport does not support exchange_bind') + + def exchange_unbind(self, destination, source='', routing_key='', + nowait=False, arguments=None): + raise NotImplementedError('transport does not support exchange_unbind') + + def queue_bind(self, queue, exchange=None, routing_key='', + arguments=None, **kwargs): + """Bind `queue` to `exchange` with `routing key`.""" + if queue in self.state.bindings: + return + exchange = exchange or 'amq.direct' + table = self.state.exchanges[exchange].setdefault('table', []) + self.state.bindings[queue] = exchange, routing_key, arguments + meta = self.typeof(exchange).prepare_bind(queue, + exchange, + routing_key, + arguments) + table.append(meta) + if self.supports_fanout: + self._queue_bind(exchange, *meta) + + def queue_unbind(self, queue, exchange=None, routing_key='', + arguments=None, **kwargs): + raise NotImplementedError('transport does not support queue_unbind') + + def list_bindings(self): + for exchange in self.state.exchanges: + table = self.get_table(exchange) + for routing_key, pattern, queue in table: + yield queue, exchange, routing_key + + def queue_purge(self, queue, **kwargs): + """Remove all ready messages from queue.""" + return self._purge(queue) + + def basic_publish(self, message, exchange, routing_key, **kwargs): + """Publish message.""" + props = message['properties'] + message['body'], props['body_encoding'] = \ + self.encode_body(message['body'], self.body_encoding) + props['delivery_info']['exchange'] = exchange + props['delivery_info']['routing_key'] = routing_key + props['delivery_tag'] = self._next_delivery_tag() + self.typeof(exchange).deliver(message, + exchange, routing_key, **kwargs) + + def basic_consume(self, queue, no_ack, callback, consumer_tag, **kwargs): + """Consume from `queue`""" + self._tag_to_queue[consumer_tag] = queue + self._active_queues.append(queue) + + def _callback(raw_message): + message = self.Message(self, raw_message) + if not no_ack: + self.qos.append(message, message.delivery_tag) + return callback(message) + + self.connection._callbacks[queue] = _callback + self._consumers.add(consumer_tag) + + self._reset_cycle() + + def basic_cancel(self, consumer_tag): + """Cancel consumer by consumer tag.""" + if consumer_tag in self._consumers: + self._consumers.remove(consumer_tag) + self._reset_cycle() + queue = self._tag_to_queue.pop(consumer_tag, None) + try: + self._active_queues.remove(queue) + except ValueError: + pass + self.connection._callbacks.pop(queue, None) + + def basic_get(self, queue, no_ack=False, **kwargs): + """Get message by direct access (synchronous).""" + try: + message = self.Message(self, self._get(queue)) + if not no_ack: + self.qos.append(message, message.delivery_tag) + return message + except Empty: + pass + + def basic_ack(self, delivery_tag): + """Acknowledge message.""" + self.qos.ack(delivery_tag) + + def basic_recover(self, requeue=False): + """Recover unacked messages.""" + if requeue: + return self.qos.restore_unacked() + raise NotImplementedError('Does not support recover(requeue=False)') + + def basic_reject(self, delivery_tag, requeue=False): + """Reject message.""" + self.qos.reject(delivery_tag, requeue=requeue) + + def basic_qos(self, prefetch_size=0, prefetch_count=0, + apply_global=False): + """Change QoS settings for this channel. + + Only `prefetch_count` is supported. + + """ + self.qos.prefetch_count = prefetch_count + + def get_exchanges(self): + return list(self.state.exchanges) + + def get_table(self, exchange): + """Get table of bindings for `exchange`.""" + return self.state.exchanges[exchange]['table'] + + def typeof(self, exchange, default='direct'): + """Get the exchange type instance for `exchange`.""" + try: + type = self.state.exchanges[exchange]['type'] + except KeyError: + type = default + return self.exchange_types[type] + + def _lookup(self, exchange, routing_key, default=None): + """Find all queues matching `routing_key` for the given `exchange`. + + Returns `default` if no queues matched. + + """ + if default is None: + default = self.deadletter_queue + try: + R = self.typeof(exchange).lookup(self.get_table(exchange), + exchange, routing_key, default) + except KeyError: + R = [] + + if not R and default is not None: + warnings.warn(UndeliverableWarning(UNDELIVERABLE_FMT % { + 'exchange': exchange, 'routing_key': routing_key})) + self._new_queue(default) + R = [default] + return R + + def _restore(self, message): + """Redeliver message to its original destination.""" + delivery_info = message.delivery_info + message = message.serializable() + message['redelivered'] = True + for queue in self._lookup(delivery_info['exchange'], + delivery_info['routing_key']): + self._put(queue, message) + + def drain_events(self, timeout=None): + if self._consumers and self.qos.can_consume(): + if hasattr(self, '_get_many'): + return self._get_many(self._active_queues, timeout=timeout) + return self._poll(self.cycle, timeout=timeout) + raise Empty() + + def message_to_python(self, raw_message): + """Convert raw message to :class:`Message` instance.""" + if not isinstance(raw_message, self.Message): + return self.Message(self, payload=raw_message) + return raw_message + + def prepare_message(self, body, priority=None, content_type=None, + content_encoding=None, headers=None, properties=None): + """Prepare message data.""" + properties = properties or {} + info = properties.setdefault('delivery_info', {}) + info['priority'] = priority or 0 + + return {'body': body, + 'content-encoding': content_encoding, + 'content-type': content_type, + 'headers': headers or {}, + 'properties': properties or {}} + + def flow(self, active=True): + """Enable/disable message flow. + + :raises NotImplementedError: as flow + is not implemented by the base virtual implementation. + + """ + raise NotImplementedError('virtual channels does not support flow.') + + def close(self): + """Close channel, cancel all consumers, and requeue unacked + messages.""" + if not self.closed: + self.closed = True + for consumer in list(self._consumers): + self.basic_cancel(consumer) + if self._qos: + self._qos.restore_unacked_once() + if self._cycle is not None: + self._cycle.close() + self._cycle = None + if self.connection is not None: + self.connection.close_channel(self) + self.exchange_types = None + + def encode_body(self, body, encoding=None): + if encoding: + return self.codecs.get(encoding).encode(body), encoding + return body, encoding + + def decode_body(self, body, encoding=None): + if encoding: + return self.codecs.get(encoding).decode(body) + return body + + def _reset_cycle(self): + self._cycle = FairCycle(self._get, self._active_queues, Empty) + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + + @property + def state(self): + """Broker state containing exchanges and bindings.""" + return self.connection.state + + @property + def qos(self): + """:class:`QoS` manager for this channel.""" + if self._qos is None: + self._qos = self.QoS(self) + return self._qos + + @property + def cycle(self): + if self._cycle is None: + self._reset_cycle() + return self._cycle + + +class Management(base.Management): + + def __init__(self, transport): + super(Management, self).__init__(transport) + self.channel = transport.client.channel() + + def get_bindings(self): + return [dict(destination=q, source=e, routing_key=r) + for q, e, r in self.channel.list_bindings()] + + def close(self): + self.channel.close() + + +class Transport(base.Transport): + """Virtual transport. + + :param client: :class:`~kombu.Connection` instance + + """ + Channel = Channel + Cycle = FairCycle + Management = Management + + #: :class:`BrokerState` containing declared exchanges and + #: bindings (set by constructor). + state = BrokerState() + + #: :class:`~kombu.transport.virtual.scheduling.FairCycle` instance + #: used to fairly drain events from channels (set by constructor). + cycle = None + + #: port number used when no port is specified. + default_port = None + + #: active channels. + channels = None + + #: queue/callback map. + _callbacks = None + + #: Time to sleep between unsuccessful polls. + polling_interval = 1.0 + + def __init__(self, client, **kwargs): + self.client = client + self.channels = [] + self._avail_channels = [] + self._callbacks = {} + self.cycle = self.Cycle(self._drain_channel, self.channels, Empty) + self._next_channel_id = count(1).next + polling_interval = client.transport_options.get('polling_interval') + if polling_interval is not None: + self.polling_interval = polling_interval + + def create_channel(self, connection): + try: + return self._avail_channels.pop() + except IndexError: + channel = self.Channel(connection) + self.channels.append(channel) + return channel + + def close_channel(self, channel): + try: + try: + self.channels.remove(channel) + except ValueError: + pass + finally: + channel.connection = None + + def establish_connection(self): + # creates channel to verify connection. + # this channel is then used as the next requested channel. + # (returned by ``create_channel``). + self._avail_channels.append(self.create_channel(self)) + return self # for drain events + + def close_connection(self, connection): + self.cycle.close() + for l in self._avail_channels, self.channels: + while l: + try: + channel = l.pop() + except (IndexError, KeyError): # pragma: no cover + pass + else: + channel.close() + + def drain_events(self, connection, timeout=None): + loop = 0 + time_start = time() + get = self.cycle.get + polling_interval = self.polling_interval + while 1: + try: + item, channel = get(timeout=timeout) + except Empty: + if timeout and time() - time_start >= timeout: + raise socket.timeout() + loop += 1 + if polling_interval is not None: + sleep(polling_interval) + else: + break + + message, queue = item + + if not queue or queue not in self._callbacks: + raise KeyError( + "Received message for queue '%s' without consumers: %s" % ( + queue, message)) + + self._callbacks[queue](message) + + def _drain_channel(self, channel, timeout=None): + return channel.drain_events(timeout=timeout) + + @property + def default_connection_params(self): + return {'port': self.default_port, 'hostname': 'localhost'} diff --git a/awx/lib/site-packages/kombu/transport/virtual/exchange.py b/awx/lib/site-packages/kombu/transport/virtual/exchange.py new file mode 100644 index 0000000000..c090a5466a --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/virtual/exchange.py @@ -0,0 +1,133 @@ +""" +kombu.transport.virtual.exchange +================================ + +Implementations of the standard exchanges defined +by the AMQ protocol (excluding the `headers` exchange). + +""" +from __future__ import absolute_import + +from kombu.utils import escape_regex + +import re + + +class ExchangeType(object): + """Implements the specifics for an exchange type. + + :param channel: AMQ Channel + + """ + type = None + + def __init__(self, channel): + self.channel = channel + + def lookup(self, table, exchange, routing_key, default): + """Lookup all queues matching `routing_key` in `exchange`. + + :returns: `default` if no queues matched. + + """ + raise NotImplementedError('subclass responsibility') + + def prepare_bind(self, queue, exchange, routing_key, arguments): + """Returns tuple of `(routing_key, regex, queue)` to be stored + for bindings to this exchange.""" + return routing_key, None, queue + + def equivalent(self, prev, exchange, type, + durable, auto_delete, arguments): + """Returns true if `prev` and `exchange` is equivalent.""" + return (type == prev['type'] and + durable == prev['durable'] and + auto_delete == prev['auto_delete'] and + (arguments or {}) == (prev['arguments'] or {})) + + +class DirectExchange(ExchangeType): + """The `direct` exchange routes based on exact routing keys.""" + type = 'direct' + + def lookup(self, table, exchange, routing_key, default): + return [queue for rkey, _, queue in table + if rkey == routing_key] + + def deliver(self, message, exchange, routing_key, **kwargs): + _lookup = self.channel._lookup + _put = self.channel._put + for queue in _lookup(exchange, routing_key): + _put(queue, message, **kwargs) + + +class TopicExchange(ExchangeType): + """The `topic` exchange routes messages based on words separated by + dots, using wildcard characters ``*`` (any single word), and ``#`` + (one or more words).""" + type = 'topic' + + #: map of wildcard to regex conversions + wildcards = {'*': r'.*?[^\.]', + '#': r'.*?'} + + #: compiled regex cache + _compiled = {} + + def lookup(self, table, exchange, routing_key, default): + return [queue for rkey, pattern, queue in table + if self._match(pattern, routing_key)] + + def deliver(self, message, exchange, routing_key, **kwargs): + _lookup = self.channel._lookup + _put = self.channel._put + deadletter = self.channel.deadletter_queue + for queue in [q for q in _lookup(exchange, routing_key) + if q and q != deadletter]: + _put(queue, message, **kwargs) + + def prepare_bind(self, queue, exchange, routing_key, arguments): + return routing_key, self.key_to_pattern(routing_key), queue + + def key_to_pattern(self, rkey): + """Get the corresponding regex for any routing key.""" + return '^%s$' % ('\.'.join( + self.wildcards.get(word, word) + for word in escape_regex(rkey, '.#*').split('.') + )) + + def _match(self, pattern, string): + """Same as :func:`re.match`, except the regex is compiled and cached, + then reused on subsequent matches with the same pattern.""" + try: + compiled = self._compiled[pattern] + except KeyError: + compiled = self._compiled[pattern] = re.compile(pattern, re.U) + return compiled.match(string) + + +class FanoutExchange(ExchangeType): + """The `fanout` exchange implements broadcast messaging by delivering + copies of all messages to all queues bound the the exchange. + + To support fanout the virtual channel needs to store the table + as shared state. This requires that the `Channel.supports_fanout` + attribute is set to true, and the `Channel._queue_bind` and + `Channel.get_table` methods are implemented. See the redis backend + for an example implementation of these methods. + + """ + type = 'fanout' + + def lookup(self, table, exchange, routing_key, default): + return [queue for _, _, queue in table] + + def deliver(self, message, exchange, routing_key, **kwargs): + if self.channel.supports_fanout: + self.channel._put_fanout(exchange, message, **kwargs) + + +#: Map of standard exchange types and corresponding classes. +STANDARD_EXCHANGE_TYPES = {'direct': DirectExchange, + 'topic': TopicExchange, + 'fanout': FanoutExchange} diff --git a/awx/lib/site-packages/kombu/transport/virtual/scheduling.py b/awx/lib/site-packages/kombu/transport/virtual/scheduling.py new file mode 100644 index 0000000000..7c4e6e0c86 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/virtual/scheduling.py @@ -0,0 +1,49 @@ +""" + kombu.transport.virtual.scheduling + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Consumer utilities. + +""" +from __future__ import absolute_import + +from itertools import count + + +class FairCycle(object): + """Consume from a set of resources, where each resource gets + an equal chance to be consumed from.""" + + def __init__(self, fun, resources, predicate=Exception): + self.fun = fun + self.resources = resources + self.predicate = predicate + self.pos = 0 + + def _next(self): + while 1: + try: + resource = self.resources[self.pos] + self.pos += 1 + return resource + except IndexError: + self.pos = 0 + if not self.resources: + raise self.predicate() + + def get(self, **kwargs): + for tried in count(0): # for infinity + resource = self._next() + + try: + return self.fun(resource, **kwargs), resource + except self.predicate: + if tried >= len(self.resources) - 1: + raise + + def close(self): + pass + + def __repr__(self): + return '' % (self.pos, len(self.resources), + self.resources, ) diff --git a/awx/lib/site-packages/kombu/transport/zmq.py b/awx/lib/site-packages/kombu/transport/zmq.py new file mode 100644 index 0000000000..f6a423f0e8 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/zmq.py @@ -0,0 +1,300 @@ +""" +kombu.transport.zmq +=================== + +ZeroMQ transport. + +""" +from __future__ import absolute_import + +import errno +import os +import socket + +from cPickle import loads, dumps +from Queue import Empty + +try: + import zmq + from zmq import ZMQError +except ImportError: + zmq = ZMQError = None # noqa + +from kombu.exceptions import StdConnectionError, StdChannelError +from kombu.log import get_logger +from kombu.utils import cached_property +from kombu.utils.eventio import poll, READ + +from . import virtual + +logger = get_logger('kombu.transport.zmq') + +DEFAULT_PORT = 5555 +DEFAULT_HWM = 128 +DEFAULT_INCR = 1 + + +class MultiChannelPoller(object): + eventflags = READ + + def __init__(self): + # active channels + self._channels = set() + # file descriptor -> channel map + self._fd_to_chan = {} + # poll implementation (epoll/kqueue/select) + self.poller = poll() + + def close(self): + for fd in self._fd_to_chan: + try: + self.poller.unregister(fd) + except KeyError: + pass + self._channels.clear() + self._fd_to_chan.clear() + self.poller = None + + def add(self, channel): + self._channels.add(channel) + + def discard(self, channel): + self._channels.discard(channel) + self._fd_to_chan.pop(channel.client.connection.fd, None) + + def _register(self, channel): + conn = channel.client.connection + self._fd_to_chan[conn.fd] = channel + self.poller.register(conn.fd, self.eventflags) + + def on_poll_start(self): + for channel in self._channels: + self._register(channel) + + def handle_event(self, fileno, event): + chan = self._fd_to_chan[fileno] + return (chan.drain_events(), chan) + + def get(self, timeout=None): + self.on_poll_start() + + events = self.poller.poll(timeout) + for fileno, event in events or []: + return self.handle_event(fileno, event) + + raise Empty() + + @property + def fds(self): + return self._fd_to_chan + + +class Client(object): + def __init__(self, uri='tcp://127.0.0.1', port=DEFAULT_PORT, + hwm=DEFAULT_HWM, swap_size=None, enable_sink=True, + context=None): + try: + scheme, parts = uri.split('://') + except ValueError: + scheme = 'tcp' + parts = uri + endpoints = parts.split(';') + + if scheme != 'tcp': + raise NotImplementedError('Currently only TCP can be used') + + self.context = context or zmq.Context.instance() + + if enable_sink: + self.sink = self.context.socket(zmq.PULL) + self.sink.bind('tcp://*:%s' % port) + else: + self.sink = None + + self.vent = self.context.socket(zmq.PUSH) + + if hasattr(zmq, 'SNDHWM'): + self.vent.setsockopt(zmq.SNDHWM, hwm) + else: + self.vent.setsockopt(zmq.HWM, hwm) + + if swap_size: + self.vent.setsockopt(zmq.SWAP, swap_size) + + for endpoint in endpoints: + if scheme == 'tcp' and ':' not in endpoint: + endpoint += ':' + str(DEFAULT_PORT) + + endpoint = ''.join([scheme, '://', endpoint]) + + self.connect(endpoint) + + def connect(self, endpoint): + self.vent.connect(endpoint) + + def get(self, queue=None, timeout=None): + try: + return self.sink.recv(flags=zmq.NOBLOCK) + except ZMQError, e: + if e.errno == zmq.EAGAIN: + raise socket.error(errno.EAGAIN, e.strerror) + else: + raise + + def put(self, queue, message, **kwargs): + return self.vent.send(message) + + def close(self): + if self.sink and not self.sink.closed: + self.sink.close() + if not self.vent.closed: + self.vent.close() + + @property + def connection(self): + if self.sink: + return self.sink + return self.vent + + +class Channel(virtual.Channel): + Client = Client + + hwm = DEFAULT_HWM + swap_size = None + enable_sink = True + port_incr = DEFAULT_INCR + + from_transport_options = ( + virtual.Channel.from_transport_options + + ('hwm', 'swap_size', 'enable_sink', 'port_incr') + ) + + def __init__(self, *args, **kwargs): + super_ = super(Channel, self) + super_.__init__(*args, **kwargs) + + # Evaluate socket + self.client.connection.closed + + self.connection.cycle.add(self) + self.connection_errors = self.connection.connection_errors + + def _get(self, queue, timeout=None): + try: + return loads(self.client.get(queue, timeout)) + except socket.error, exc: + if exc.errno == errno.EAGAIN and timeout != 0: + raise Empty() + else: + raise + + def _put(self, queue, message, **kwargs): + self.client.put(queue, dumps(message, -1), **kwargs) + + def _purge(self, queue): + return 0 + + def _poll(self, cycle, timeout=None): + return cycle.get(timeout=timeout) + + def close(self): + if not self.closed: + self.connection.cycle.discard(self) + try: + self.__dict__['client'].close() + except KeyError: + pass + super(Channel, self).close() + + def _prepare_port(self, port): + return (port + self.channel_id - 1) * self.port_incr + + def _create_client(self): + conninfo = self.connection.client + port = self._prepare_port(conninfo.port or DEFAULT_PORT) + return self.Client(uri=conninfo.hostname or 'tcp://127.0.0.1', + port=port, + hwm=self.hwm, + swap_size=self.swap_size, + enable_sink=self.enable_sink, + context=self.connection.context) + + @cached_property + def client(self): + return self._create_client() + + +class Transport(virtual.Transport): + Channel = Channel + + default_port = DEFAULT_PORT + driver_type = 'zeromq' + driver_name = 'zmq' + + connection_errors = (StdConnectionError, ZMQError,) + channel_errors = (StdChannelError, ) + + supports_ev = True + polling_interval = None + nb_keep_draining = True + + def __init__(self, *args, **kwargs): + if zmq is None: + raise ImportError('The zmq library is not installed') + super(Transport, self).__init__(*args, **kwargs) + self.cycle = MultiChannelPoller() + + def driver_version(self): + return zmq.__version__ + + def on_poll_init(self, poller): + self.cycle.poller = poller + + def on_poll_start(self): + cycle = self.cycle + cycle.on_poll_start() + return dict((fd, self.handle_event) for fd in cycle.fds) + + def handle_event(self, fileno, event): + evt = self.cycle.handle_event(fileno, event) + self._handle_event(evt) + + def drain_events(self, connection, timeout=None): + more_to_read = False + for channel in connection.channels: + try: + evt = channel.cycle.get(timeout=timeout) + except socket.error, e: + if e.errno == errno.EAGAIN: + continue + raise + else: + connection._handle_event((evt, channel)) + more_to_read = True + if not more_to_read: + raise socket.error(errno.EAGAIN, os.strerror(errno.EAGAIN)) + + def _handle_event(self, evt): + item, channel = evt + message, queue = item + if not queue or queue not in self._callbacks: + raise KeyError( + "Received message for queue '%s' without consumers: %s" % ( + queue, message)) + self._callbacks[queue](message) + + def establish_connection(self): + self.context.closed + return super(Transport, self).establish_connection() + + def close_connection(self, connection): + super(Transport, self).close_connection(connection) + try: + connection.__dict__['context'].term() + except KeyError: + pass + + @cached_property + def context(self): + return zmq.Context(1) diff --git a/awx/lib/site-packages/kombu/transport/zookeeper.py b/awx/lib/site-packages/kombu/transport/zookeeper.py new file mode 100644 index 0000000000..0f6045b060 --- /dev/null +++ b/awx/lib/site-packages/kombu/transport/zookeeper.py @@ -0,0 +1,183 @@ +""" +kombu.transport.zookeeper +========================= + +Zookeeper transport. + +:copyright: (c) 2010 - 2012 by Mahendra M. +:license: BSD, see LICENSE for more details. + +**Synopsis** + +- Connects to a zookeeper node as :/ + The becomes the base for all the other znodes. So we can use + it like a vhost +- A queue is a znode under the path +- Creates a new sequential node under the queue and writes the message to it +- If priority is used, we will use it in the node name, so that higher + priority messages are picked up first +- Keep consuming messages from the top of the queue, till we + are able to delete a particular message. If deletion raises a + NoNode exception, we try again with the next message + +**References** + +- https://zookeeper.apache.org/doc/trunk/recipes.html#sc_recipes_Queues +- http://bit.ly/cZHf9g + +**Limitations** + +- A queue cannot handle more than 2^32 messages. This is an internal + limitation with zookeeper. This has to be handled internally in this + module. + +""" +from __future__ import absolute_import + +from Queue import Empty + +import socket + +from anyjson import loads, dumps + +from kombu.exceptions import StdConnectionError, StdChannelError +from . import virtual + +try: + import kazoo + + KZ_CONNECTION_ERRORS = ( + kazoo.zkclient.SystemErrorException, + kazoo.zkclient.ConnectionLossException, + kazoo.zkclient.MarshallingErrorException, + kazoo.zkclient.UnimplementedException, + kazoo.zkclient.OperationTimeoutException, + kazoo.zkclient.NoAuthException, + kazoo.zkclient.InvalidACLException, + kazoo.zkclient.AuthFailedException, + kazoo.zkclient.SessionExpiredException, + ) + + KZ_CHANNEL_ERRORS = ( + kazoo.zkclient.RuntimeInconsistencyException, + kazoo.zkclient.DataInconsistencyException, + kazoo.zkclient.BadArgumentsException, + kazoo.zkclient.MarshallingErrorException, + kazoo.zkclient.UnimplementedException, + kazoo.zkclient.OperationTimeoutException, + kazoo.zkclient.ApiErrorException, + kazoo.zkclient.NoNodeException, + kazoo.zkclient.NoAuthException, + kazoo.zkclient.NodeExistsException, + kazoo.zkclient.NoChildrenForEphemeralsException, + kazoo.zkclient.NotEmptyException, + kazoo.zkclient.SessionExpiredException, + kazoo.zkclient.InvalidCallbackException, + ) +except ImportError: + kazoo = None # noqa + KZ_CONNECTION_ERRORS = KZ_CHANNEL_ERRORS = () # noqa + +DEFAULT_PORT = 2181 + +__author__ = 'Mahendra M ' + + +class Channel(virtual.Channel): + + _client = None + + def _get_queue(self, queue): + return '/%s' % (queue, ) + + def _put(self, queue, message, **kwargs): + try: + priority = message['properties']['delivery_info']['priority'] + except KeyError: + priority = 0 + + msg_id = '%s/msg-%02d' % (self._get_queue(queue), priority % 10) + self.client.create(msg_id, dumps(message), sequence=True) + + def _get_msg(self, queue, msgs): + msgs.sort() # this is a bad hack, but required + + for msg_id in msgs: + msg_id = '%s/%s' % (queue, msg_id) + try: + message, headers = self.client.get(msg_id) + self.client.delete(msg_id) + except kazoo.zkclient.NoNodeException: + pass # Someone has got this message + else: + return loads(message) + + raise Empty() + + def _get(self, queue): + queue = self._get_queue(queue) + msgs = self.client.get_children(queue) + return self._get_msg(queue, msgs) + + def _purge(self, queue): + failures = 0 + queue = self._get_queue(queue) + + for count, msg_id in enumerate(self.client.get_children(queue)): + try: + self.client.delete('%s/%s' % (queue, msg_id)) + except kazoo.zkclient.NoNodeException: + failures += 1 + return count - failures + + def _delete(self, queue, *args, **kwargs): + if self._has_queue(queue): + queue = self._get_queue(queue) + self._purge(queue) + self.client.delete(queue) + + def _size(self, queue): + _, meta = self.client.get(self._get_queue(queue)) + return meta['numChildren'] + + def _new_queue(self, queue, **kwargs): + if not self._has_queue(queue): + self.client.create(self._get_queue(queue), '') + + def _has_queue(self, queue): + return self.client.exists(self._get_queue(queue)) is not None + + def _open(self): + conninfo = self.connection.client + port = conninfo.port or DEFAULT_PORT + conn_str = '%s:%s' % (conninfo.hostname, port) + conn_str += '/' + conninfo.virtual_host[0:-1] + + conn = kazoo.ZooKeeperClient(conn_str) + conn.connect(timeout=conninfo.connect_timeout) + return conn + + @property + def client(self): + if self._client is None: + self._client = self._open() + return self._client + + +class Transport(virtual.Transport): + Channel = Channel + polling_interval = 1 + default_port = DEFAULT_PORT + connection_errors = (StdConnectionError, ) + KZ_CONNECTION_ERRORS + channel_errors = (StdChannelError, socket.error) + KZ_CHANNEL_ERRORS + driver_type = 'zookeeper' + driver_name = 'kazoo' + + def __init__(self, *args, **kwargs): + if kazoo is None: + raise ImportError('The kazoo library is not installed') + + super(Transport, self).__init__(*args, **kwargs) + + def driver_version(self): + return kazoo.__version__ diff --git a/awx/lib/site-packages/kombu/utils/__init__.py b/awx/lib/site-packages/kombu/utils/__init__.py new file mode 100644 index 0000000000..532fb883b4 --- /dev/null +++ b/awx/lib/site-packages/kombu/utils/__init__.py @@ -0,0 +1,403 @@ +""" +kombu.utils +=========== + +Internal utilities. + +""" +from __future__ import absolute_import + +import importlib +import random +import sys + +from contextlib import contextmanager +from itertools import count, repeat +from time import sleep +from uuid import UUID, uuid4 as _uuid4, _uuid_generate_random + +from .encoding import safe_repr as _safe_repr + +try: + import ctypes +except: + ctypes = None # noqa + +__all__ = ['EqualityDict', 'say', 'uuid', 'kwdict', 'maybe_list', + 'fxrange', 'fxrangemax', 'retry_over_time', + 'emergency_dump_state', 'cached_property', + 'reprkwargs', 'reprcall', 'nested'] + + +def symbol_by_name(name, aliases={}, imp=None, package=None, + sep='.', default=None, **kwargs): + """Get symbol by qualified name. + + The name should be the full dot-separated path to the class:: + + modulename.ClassName + + Example:: + + celery.concurrency.processes.TaskPool + ^- class name + + or using ':' to separate module and symbol:: + + celery.concurrency.processes:TaskPool + + If `aliases` is provided, a dict containing short name/long name + mappings, the name is looked up in the aliases first. + + Examples: + + >>> symbol_by_name('celery.concurrency.processes.TaskPool') + + + >>> symbol_by_name('default', { + ... 'default': 'celery.concurrency.processes.TaskPool'}) + + + # Does not try to look up non-string names. + >>> from celery.concurrency.processes import TaskPool + >>> symbol_by_name(TaskPool) is TaskPool + True + + """ + if imp is None: + imp = importlib.import_module + + if not isinstance(name, basestring): + return name # already a class + + name = aliases.get(name) or name + sep = ':' if ':' in name else sep + module_name, _, cls_name = name.rpartition(sep) + if not module_name: + cls_name, module_name = None, package if package else cls_name + try: + try: + module = imp(module_name, package=package, **kwargs) + except ValueError, exc: + raise ValueError, ValueError( + "Couldn't import %r: %s" % (name, exc)), sys.exc_info()[2] + return getattr(module, cls_name) if cls_name else module + except (ImportError, AttributeError): + if default is None: + raise + return default + + +def eqhash(o): + try: + return o.__eqhash__() + except AttributeError: + return hash(o) + + +class EqualityDict(dict): + + def __getitem__(self, key): + h = eqhash(key) + if h not in self: + return self.__missing__(key) + return dict.__getitem__(self, h) + + def __setitem__(self, key, value): + return dict.__setitem__(self, eqhash(key), value) + + def __delitem__(self, key): + return dict.__delitem__(self, eqhash(key)) + + +def say(m, *s): + sys.stderr.write(str(m) % s + '\n') + + +def uuid4(): + # Workaround for http://bugs.python.org/issue4607 + if ctypes and _uuid_generate_random: # pragma: no cover + buffer = ctypes.create_string_buffer(16) + _uuid_generate_random(buffer) + return UUID(bytes=buffer.raw) + return _uuid4() + + +def uuid(): + """Generate a unique id, having - hopefully - a very small chance of + collision. + + For now this is provided by :func:`uuid.uuid4`. + """ + return str(uuid4()) +gen_unique_id = uuid + + +if sys.version_info >= (2, 6, 5): + + def kwdict(kwargs): + return kwargs +else: + def kwdict(kwargs): # pragma: no cover # noqa + """Make sure keyword arguments are not in Unicode. + + This should be fixed in newer Python versions, + see: http://bugs.python.org/issue4978. + + """ + return dict((key.encode('utf-8'), value) + for key, value in kwargs.items()) + + +def maybe_list(v): + if v is None: + return [] + if hasattr(v, '__iter__'): + return v + return [v] + + +def fxrange(start=1.0, stop=None, step=1.0, repeatlast=False): + cur = start * 1.0 + while 1: + if not stop or cur <= stop: + yield cur + cur += step + else: + if not repeatlast: + break + yield cur - step + + +def fxrangemax(start=1.0, stop=None, step=1.0, max=100.0): + sum_, cur = 0, start * 1.0 + while 1: + if sum_ >= max: + break + yield cur + if stop: + cur = min(cur + step, stop) + else: + cur += step + sum_ += cur + + +def retry_over_time(fun, catch, args=[], kwargs={}, errback=None, + max_retries=None, interval_start=2, interval_step=2, + interval_max=30, callback=None): + """Retry the function over and over until max retries is exceeded. + + For each retry we sleep a for a while before we try again, this interval + is increased for every retry until the max seconds is reached. + + :param fun: The function to try + :param catch: Exceptions to catch, can be either tuple or a single + exception class. + :keyword args: Positional arguments passed on to the function. + :keyword kwargs: Keyword arguments passed on to the function. + :keyword errback: Callback for when an exception in ``catch`` is raised. + The callback must take two arguments: ``exc`` and ``interval``, where + ``exc`` is the exception instance, and ``interval`` is the time in + seconds to sleep next.. + :keyword max_retries: Maximum number of retries before we give up. + If this is not set, we will retry forever. + :keyword interval_start: How long (in seconds) we start sleeping between + retries. + :keyword interval_step: By how much the interval is increased for each + retry. + :keyword interval_max: Maximum number of seconds to sleep between retries. + + """ + retries = 0 + interval_range = fxrange(interval_start, + interval_max + interval_start, + interval_step, repeatlast=True) + for retries in count(): + try: + return fun(*args, **kwargs) + except catch, exc: + if max_retries is not None and retries > max_retries: + raise + if callback: + callback() + tts = (errback(exc, interval_range, retries) if errback + else next(interval_range)) + if tts: + for i in range(int(tts / interval_step)): + if callback: + callback() + sleep(interval_step) + + +def emergency_dump_state(state, open_file=open, dump=None): + from pprint import pformat + from tempfile import mktemp + + if dump is None: + import pickle + dump = pickle.dump + persist = mktemp() + say('EMERGENCY DUMP STATE TO FILE -> %s <-' % persist) + fh = open_file(persist, 'w') + try: + try: + dump(state, fh, protocol=0) + except Exception, exc: + say('Cannot pickle state: %r. Fallback to pformat.' % (exc, )) + fh.write(pformat(state)) + finally: + fh.flush() + fh.close() + return persist + + +class cached_property(object): + """Property descriptor that caches the return value + of the get function. + + *Examples* + + .. code-block:: python + + @cached_property + def connection(self): + return Connection() + + @connection.setter # Prepares stored value + def connection(self, value): + if value is None: + raise TypeError('Connection must be a connection') + return value + + @connection.deleter + def connection(self, value): + # Additional action to do at del(self.attr) + if value is not None: + print('Connection %r deleted' % (value, )) + + """ + + def __init__(self, fget=None, fset=None, fdel=None, doc=None): + self.__get = fget + self.__set = fset + self.__del = fdel + self.__doc__ = doc or fget.__doc__ + self.__name__ = fget.__name__ + self.__module__ = fget.__module__ + + def __get__(self, obj, type=None): + if obj is None: + return self + try: + return obj.__dict__[self.__name__] + except KeyError: + value = obj.__dict__[self.__name__] = self.__get(obj) + return value + + def __set__(self, obj, value): + if obj is None: + return self + if self.__set is not None: + value = self.__set(obj, value) + obj.__dict__[self.__name__] = value + + def __delete__(self, obj): + if obj is None: + return self + try: + value = obj.__dict__.pop(self.__name__) + except KeyError: + pass + else: + if self.__del is not None: + self.__del(obj, value) + + def setter(self, fset): + return self.__class__(self.__get, fset, self.__del) + + def deleter(self, fdel): + return self.__class__(self.__get, self.__set, fdel) + + +def reprkwargs(kwargs, sep=', ', fmt='%s=%s'): + return sep.join(fmt % (k, _safe_repr(v)) for k, v in kwargs.iteritems()) + + +def reprcall(name, args=(), kwargs={}, sep=', '): + return '%s(%s%s%s)' % (name, sep.join(map(_safe_repr, args or ())), + (args and kwargs) and sep or '', + reprkwargs(kwargs, sep)) + + +@contextmanager +def nested(*managers): # pragma: no cover + # flake8: noqa + """Combine multiple context managers into a single nested + context manager.""" + exits = [] + vars = [] + exc = (None, None, None) + try: + try: + for mgr in managers: + exit = mgr.__exit__ + enter = mgr.__enter__ + vars.append(enter()) + exits.append(exit) + yield vars + except: + exc = sys.exc_info() + finally: + while exits: + exit = exits.pop() + try: + if exit(*exc): + exc = (None, None, None) + except: + exc = sys.exc_info() + if exc != (None, None, None): + # Don't rely on sys.exc_info() still containing + # the right information. Another exception may + # have been raised and caught by an exit method + raise exc[0], exc[1], exc[2] + finally: + del(exc) + + +def shufflecycle(it): + it = list(it) # don't modify callers list + shuffle = random.shuffle + for _ in repeat(None): + shuffle(it) + yield it[0] + + +def entrypoints(namespace): + try: + from pkg_resources import iter_entry_points + except ImportError: + return iter([]) + return ((ep, ep.load()) for ep in iter_entry_points(namespace)) + + +class ChannelPromise(object): + + def __init__(self, contract): + self.__contract__ = contract + + def __call__(self): + try: + return self.__value__ + except AttributeError: + value = self.__value__ = self.__contract__() + return value + + def __repr__(self): + return '' % (self(), ) + + +def escape_regex(p, white=''): + # what's up with re.escape? that code must be neglected or someting + return ''.join(c if c.isalnum() or c in white + else ('\\000' if c == '\000' else '\\' + c) + for c in p) diff --git a/awx/lib/site-packages/kombu/utils/amq_manager.py b/awx/lib/site-packages/kombu/utils/amq_manager.py new file mode 100644 index 0000000000..08ee393968 --- /dev/null +++ b/awx/lib/site-packages/kombu/utils/amq_manager.py @@ -0,0 +1,18 @@ +from __future__ import absolute_import + + +def get_manager(client, hostname=None, port=None, userid=None, + password=None): + import pyrabbit + opt = client.transport_options.get + + def get(name, val, default): + return (val if val is not None + else opt('manager_%s' % name) + or getattr(client, name, None) or default) + + host = get('hostname', hostname, 'localhost') + port = port if port is not None else opt('manager_port', 55672) + userid = get('userid', userid, 'guest') + password = get('password', password, 'guest') + return pyrabbit.Client('%s:%s' % (host, port), userid, password) diff --git a/awx/lib/site-packages/kombu/utils/compat.py b/awx/lib/site-packages/kombu/utils/compat.py new file mode 100644 index 0000000000..9442687c31 --- /dev/null +++ b/awx/lib/site-packages/kombu/utils/compat.py @@ -0,0 +1,143 @@ +""" +kombu.utils.compat +================== + +Helps compatibility with older Python versions. + +""" +############## py3k ######################################################### +import sys +is_py3k = sys.version_info[0] == 3 + +if is_py3k: # pragma: no cover + from io import StringIO, BytesIO + from .encoding import bytes_to_str + + class WhateverIO(StringIO): + + def write(self, data): + StringIO.write(self, bytes_to_str(data)) +else: + from StringIO import StringIO # noqa + BytesIO = WhateverIO = StringIO # noqa + +############## __builtins__.next ############################################# +try: + next = next +except NameError: + def next(it, *args): # noqa + try: + return it.next() + except StopIteration: + if not args: + raise + return args[0] + +############## socket.error.errno ############################################ + + +def get_errno(exc): + """:exc:`socket.error` and :exc:`IOError` first got + the ``.errno`` attribute in Py2.7""" + try: + return exc.errno + except AttributeError: + try: + # e.args = (errno, reason) + if isinstance(exc.args, tuple) and len(exc.args) == 2: + return exc.args[0] + except AttributeError: + pass + return 0 + +############## collections.OrderedDict ####################################### +try: + from collections import OrderedDict +except ImportError: + from ordereddict import OrderedDict # noqa + +############## queue.LifoQueue ############################################## +from Queue import Queue + + +class LifoQueue(Queue): + + def _init(self, maxsize): + self.queue = [] + self.maxsize = maxsize + + def _qsize(self, len=len): + return len(self.queue) + + def _put(self, item): + self.queue.append(item) + + def _get(self): + return self.queue.pop() + +############## logging.handlers.WatchedFileHandler ########################## +import logging +import os +import platform as _platform + +from stat import ST_DEV, ST_INO + +if _platform.system() == 'Windows': + #since windows doesn't go with WatchedFileHandler use FileHandler instead + WatchedFileHandler = logging.FileHandler +else: + try: + from logging.handlers import WatchedFileHandler + except ImportError: + class WatchedFileHandler(logging.FileHandler): # noqa + """ + A handler for logging to a file, which watches the file + to see if it has changed while in use. This can happen because of + usage of programs such as newsyslog and logrotate which perform + log file rotation. This handler, intended for use under Unix, + watches the file to see if it has changed since the last emit. + (A file has changed if its device or inode have changed.) + If it has changed, the old file stream is closed, and the file + opened to get a new stream. + + This handler is not appropriate for use under Windows, because + under Windows open files cannot be moved or renamed - logging + opens the files with exclusive locks - and so there is no need + for such a handler. Furthermore, ST_INO is not supported under + Windows; stat always returns zero for this value. + + This handler is based on a suggestion and patch by Chad J. + Schroeder. + """ + def __init__(self, *args, **kwargs): + logging.FileHandler.__init__(self, *args, **kwargs) + + if not os.path.exists(self.baseFilename): + self.dev, self.ino = -1, -1 + else: + stat = os.stat(self.baseFilename) + self.dev, self.ino = stat[ST_DEV], stat[ST_INO] + + def emit(self, record): + """ + Emit a record. + + First check if the underlying file has changed, and if it + has, close the old stream and reopen the file to get the + current stream. + """ + if not os.path.exists(self.baseFilename): + stat = None + changed = 1 + else: + stat = os.stat(self.baseFilename) + changed = ((stat[ST_DEV] != self.dev) or + (stat[ST_INO] != self.ino)) + if changed and self.stream is not None: + self.stream.flush() + self.stream.close() + self.stream = self._open() + if stat is None: + stat = os.stat(self.baseFilename) + self.dev, self.ino = stat[ST_DEV], stat[ST_INO] + logging.FileHandler.emit(self, record) diff --git a/awx/lib/site-packages/kombu/utils/debug.py b/awx/lib/site-packages/kombu/utils/debug.py new file mode 100644 index 0000000000..9df2768803 --- /dev/null +++ b/awx/lib/site-packages/kombu/utils/debug.py @@ -0,0 +1,64 @@ +""" +kombu.utils.debug +================= + +Debugging support. + +""" +from __future__ import absolute_import + +import logging + +from functools import wraps + +from kombu.log import get_logger + +__all__ = ['setup_logging', 'Logwrapped'] + + +def setup_logging(loglevel=logging.DEBUG, loggers=['kombu.connection', + 'kombu.channel']): + for logger in loggers: + l = get_logger(logger) + l.addHandler(logging.StreamHandler()) + l.setLevel(loglevel) + + +class Logwrapped(object): + __ignore = ('__enter__', '__exit__') + + def __init__(self, instance, logger=None, ident=None): + self.instance = instance + self.logger = get_logger(logger) + self.ident = ident + + def __getattr__(self, key): + meth = getattr(self.instance, key) + + if not callable(meth) or key in self.__ignore: + return meth + + @wraps(meth) + def __wrapped(*args, **kwargs): + info = '' + if self.ident: + info += self.ident % vars(self.instance) + info += '%s(' % (meth.__name__, ) + if args: + info += ', '.join(map(repr, args)) + if kwargs: + if args: + info += ', ' + info += ', '.join('%s=%r' % (key, value) + for key, value in kwargs.iteritems()) + info += ')' + self.logger.debug(info) + return meth(*args, **kwargs) + + return __wrapped + + def __repr__(self): + return repr(self.instance) + + def __dir__(self): + return dir(self.instance) diff --git a/awx/lib/site-packages/kombu/utils/encoding.py b/awx/lib/site-packages/kombu/utils/encoding.py new file mode 100644 index 0000000000..efeeb32957 --- /dev/null +++ b/awx/lib/site-packages/kombu/utils/encoding.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +""" +kombu.utils.encoding +~~~~~~~~~~~~~~~~~~~~~ + +Utilities to encode text, and to safely emit text from running +applications without crashing with the infamous :exc:`UnicodeDecodeError` +exception. + +""" +from __future__ import absolute_import + +import sys +import traceback + +is_py3k = sys.version_info >= (3, 0) + +if sys.platform.startswith('java'): # pragma: no cover + + def default_encoding(): + return 'utf-8' +else: + + def default_encoding(): # noqa + return sys.getfilesystemencoding() + +if is_py3k: # pragma: no cover + + def str_to_bytes(s): + if isinstance(s, str): + return s.encode() + return s + + def bytes_to_str(s): + if isinstance(s, bytes): + return s.decode() + return s + + def from_utf8(s, *args, **kwargs): + return s + + def ensure_bytes(s): + if not isinstance(s, bytes): + return str_to_bytes(s) + return s + + def default_encode(obj): + return obj + + str_t = str + +else: + + def str_to_bytes(s): # noqa + if isinstance(s, unicode): + return s.encode() + return s + + def bytes_to_str(s): # noqa + return s + + def from_utf8(s, *args, **kwargs): # noqa + return s.encode('utf-8', *args, **kwargs) + + def default_encode(obj): # noqa + return unicode(obj, default_encoding()) + + str_t = unicode + ensure_bytes = str_to_bytes + + +try: + bytes_t = bytes +except NameError: + bytes_t = str # noqa + + +def safe_str(s, errors='replace'): + s = bytes_to_str(s) + if not isinstance(s, basestring): + return safe_repr(s, errors) + return _safe_str(s, errors) + + +def _safe_str(s, errors='replace'): + if is_py3k: # pragma: no cover + if isinstance(s, str): + return s + try: + return str(s) + except Exception, exc: + return '' % ( + type(s), exc, '\n'.join(traceback.format_stack())) + encoding = default_encoding() + try: + if isinstance(s, unicode): + return s.encode(encoding, errors) + return unicode(s, encoding, errors) + except Exception, exc: + return '' % ( + type(s), exc, '\n'.join(traceback.format_stack())) + + +def safe_repr(o, errors='replace'): + try: + return repr(o) + except Exception: + return _safe_str(o, errors) diff --git a/awx/lib/site-packages/kombu/utils/eventio.py b/awx/lib/site-packages/kombu/utils/eventio.py new file mode 100644 index 0000000000..e9acc923e8 --- /dev/null +++ b/awx/lib/site-packages/kombu/utils/eventio.py @@ -0,0 +1,235 @@ +""" +kombu.utils.eventio +=================== + +Evented IO support for multiple platforms. + +""" +from __future__ import absolute_import + +import errno +import socket + +from select import select as _selectf + +try: + from select import epoll +except ImportError: + epoll = None # noqa + +try: + from select import ( + kqueue, + kevent, + KQ_EV_ADD, + KQ_EV_DELETE, + KQ_EV_EOF, + KQ_EV_ERROR, + KQ_EV_ENABLE, + KQ_EV_CLEAR, + KQ_FILTER_WRITE, + KQ_FILTER_READ, + KQ_FILTER_VNODE, + KQ_NOTE_WRITE, + KQ_NOTE_EXTEND, + KQ_NOTE_DELETE, + KQ_NOTE_ATTRIB, + ) +except ImportError: + kqueue = kevent = None # noqa + KQ_EV_ADD = KQ_EV_DELETE = KQ_EV_EOF = KQ_EV_ERROR = 0 # noqa + KQ_EV_ENABLE = KQ_EV_CLEAR = KQ_EV_VNODE = 0 # noqa + KQ_FILTER_WRITE = KQ_FILTER_READ = 0 # noqa + KQ_NOTE_WRITE = KQ_NOTE_EXTEND = 0 # noqa + KQ_NOTE_ATTRIB = KQ_NOTE_DELETE = 0 # noqa + +from kombu.syn import detect_environment + +from .compat import get_errno + +__all__ = ['poll'] + +READ = POLL_READ = 0x001 +WRITE = POLL_WRITE = 0x004 +ERR = POLL_ERR = 0x008 | 0x010 + + +class Poller(object): + + def poll(self, timeout): + try: + return self._poll(timeout) + except Exception, exc: + if get_errno(exc) != errno.EINTR: + raise + + +class _epoll(Poller): + + def __init__(self): + self._epoll = epoll() + + def register(self, fd, events): + try: + self._epoll.register(fd, events) + except Exception, exc: + if get_errno(exc) != errno.EEXIST: + raise + + def unregister(self, fd): + try: + self._epoll.unregister(fd) + except socket.error: + pass + except ValueError: + pass + except IOError, exc: + if get_errno(exc) != errno.ENOENT: + raise + + def _poll(self, timeout): + return self._epoll.poll(timeout if timeout is not None else -1) + + def close(self): + self._epoll.close() + + +class _kqueue(Poller): + w_fflags = (KQ_NOTE_WRITE | KQ_NOTE_EXTEND | + KQ_NOTE_ATTRIB | KQ_NOTE_DELETE) + + def __init__(self): + self._kqueue = kqueue() + self._active = {} + self.on_file_change = None + self._kcontrol = self._kqueue.control + + def register(self, fd, events): + self._control(fd, events, KQ_EV_ADD) + self._active[fd] = events + + def unregister(self, fd): + events = self._active.pop(fd, None) + if events: + try: + self._control(fd, events, KQ_EV_DELETE) + except socket.error: + pass + + def watch_file(self, fd): + ev = kevent(fd, + filter=KQ_FILTER_VNODE, + flags=KQ_EV_ADD | KQ_EV_ENABLE | KQ_EV_CLEAR, + fflags=self.w_fflags) + self._kcontrol([ev], 0) + + def unwatch_file(self, fd): + ev = kevent(fd, + filter=KQ_FILTER_VNODE, + flags=KQ_EV_DELETE, + fflags=self.w_fflags) + self._kcontrol([ev], 0) + + def _control(self, fd, events, flags): + if not events: + return + kevents = [] + if events & WRITE: + kevents.append(kevent(fd, + filter=KQ_FILTER_WRITE, + flags=flags)) + if not kevents or events & READ: + kevents.append( + kevent(fd, filter=KQ_FILTER_READ, flags=flags), + ) + control = self._kcontrol + for e in kevents: + try: + control([e], 0) + except ValueError: + pass + + def _poll(self, timeout): + kevents = self._kcontrol(None, 1000, timeout) + events, file_changes = {}, [] + for k in kevents: + fd = k.ident + if k.filter == KQ_FILTER_READ: + events[fd] = events.get(fd, 0) | READ + elif k.filter == KQ_FILTER_WRITE: + if k.flags & KQ_EV_EOF: + events[fd] = ERR + else: + events[fd] = events.get(fd, 0) | WRITE + elif k.filter == KQ_EV_ERROR: + events[fd] = events.get(fd, 0) | ERR + elif k.filter == KQ_FILTER_VNODE: + if k.fflags & KQ_NOTE_DELETE: + self.unregister(fd) + file_changes.append(k) + if file_changes: + self.on_file_change(file_changes) + return events.items() + + def close(self): + self._kqueue.close() + + +class _select(Poller): + + def __init__(self): + self._all = (self._rfd, + self._wfd, + self._efd) = set(), set(), set() + + def register(self, fd, events): + if events & ERR: + self._efd.add(fd) + self._rfd.add(fd) + if events & WRITE: + self._wfd.add(fd) + if events & READ: + self._rfd.add(fd) + + def unregister(self, fd): + self._rfd.discard(fd) + self._wfd.discard(fd) + self._efd.discard(fd) + + def _poll(self, timeout): + read, write, error = _selectf(self._rfd, self._wfd, self._efd, timeout) + events = {} + for fd in read: + if not isinstance(fd, int): + fd = fd.fileno() + events[fd] = events.get(fd, 0) | READ + for fd in write: + if not isinstance(fd, int): + fd = fd.fileno() + events[fd] = events.get(fd, 0) | WRITE + for fd in error: + if not isinstance(fd, int): + fd = fd.fileno() + events[fd] = events.get(fd, 0) | ERR + return events.items() + + def close(self): + pass + + +def _get_poller(): + if detect_environment() != 'default': + # greenlet + return _select + elif epoll: + # Py2.6+ Linux + return _epoll + elif kqueue: + # Py2.6+ on BSD / Darwin + return _kqueue + else: + return _select + + +def poll(*args, **kwargs): + return _get_poller()(*args, **kwargs) diff --git a/awx/lib/site-packages/kombu/utils/finalize.py b/awx/lib/site-packages/kombu/utils/finalize.py new file mode 100644 index 0000000000..723af4130c --- /dev/null +++ b/awx/lib/site-packages/kombu/utils/finalize.py @@ -0,0 +1,85 @@ +""" +kombu.utils.finalize +==================== + +Execute cleanup handlers when objects go out of scope. + +Taken from :class:`multiprocessing.util.Finalize`. + +""" +from __future__ import absolute_import + +import weakref + +from itertools import count + +__all__ = ['Finalize'] + + +class Finalize(object): + """Object finalization using weakrefs.""" + _count = count().next + _registry = {} + + def __init__(self, obj, callback, args=(), kwargs=None, + exitpriority=None): + + if obj is not None: + self._weakref = weakref.ref(obj, self) + else: + assert exitpriority is not None + + self._callback = callback + self._args = args + self._kwargs = kwargs or {} + self._key = (exitpriority, self._count()) + + self._registry[self._key] = self + + def __call__(self, wr=None): + """Run the callback unless it has already been called or + cancelled.""" + try: + self._registry.pop(self._key) + except KeyError: + pass + else: + try: + return self._callback(*self._args, **self._kwargs) + finally: + self._reset() + + def _reset(self): + self._weakref = self._callback = self._args = \ + self._kwargs = self._key = None + + def cancel(self): + """Cancel finalization of the object.""" + try: + self._registry.pop(self._key) + except KeyError: + pass + else: + self._reset() + + def still_active(self): + self._key in self._registry + + def __repr__(self): + try: + obj = self._weakref() + except (AttributeError, TypeError): + return '' + + if obj is None: + return + + x = '' diff --git a/awx/lib/site-packages/kombu/utils/functional.py b/awx/lib/site-packages/kombu/utils/functional.py new file mode 100644 index 0000000000..34ff06570f --- /dev/null +++ b/awx/lib/site-packages/kombu/utils/functional.py @@ -0,0 +1,51 @@ +class promise(object): + """A promise. + + Evaluated when called or if the :meth:`evaluate` method is called. + The function is evaluated on every access, so the value is not + memoized (see :class:`mpromise`). + + Overloaded operations that will evaluate the promise: + :meth:`__str__`, :meth:`__repr__`, :meth:`__cmp__`. + + """ + + def __init__(self, fun, *args, **kwargs): + self._fun = fun + self._args = args + self._kwargs = kwargs + + def __call__(self): + return self.evaluate() + + def evaluate(self): + return self._fun(*self._args, **self._kwargs) + + def __str__(self): + return str(self()) + + def __repr__(self): + return repr(self()) + + def __cmp__(self, rhs): + if isinstance(rhs, self.__class__): + return -cmp(rhs, self()) + return cmp(self(), rhs) + + def __eq__(self, rhs): + return self() == rhs + + def __deepcopy__(self, memo): + memo[id(self)] = self + return self + + def __reduce__(self): + return (self.__class__, (self._fun, ), {'_args': self._args, + '_kwargs': self._kwargs}) + + +def maybe_promise(value): + """Evaluates if the value is a promise.""" + if isinstance(value, promise): + return value.evaluate() + return value diff --git a/awx/lib/site-packages/kombu/utils/limits.py b/awx/lib/site-packages/kombu/utils/limits.py new file mode 100644 index 0000000000..b6fd818187 --- /dev/null +++ b/awx/lib/site-packages/kombu/utils/limits.py @@ -0,0 +1,68 @@ +""" +kombu.utils.limits +================== + +Token bucket implementation for rate limiting. + +""" +import time + +__all__ = ['TokenBucket'] + + +class TokenBucket(object): + """Token Bucket Algorithm. + + See http://en.wikipedia.org/wiki/Token_Bucket + Most of this code was stolen from an entry in the ASPN Python Cookbook: + http://code.activestate.com/recipes/511490/ + + .. admonition:: Thread safety + + This implementation may not be thread safe. + + """ + + #: The rate in tokens/second that the bucket will be refilled + fill_rate = None + + #: Maximum number of tokensin the bucket. + capacity = 1 + + #: Timestamp of the last time a token was taken out of the bucket. + timestamp = None + + def __init__(self, fill_rate, capacity=1): + self.capacity = float(capacity) + self._tokens = capacity + self.fill_rate = float(fill_rate) + self.timestamp = time.time() + + def can_consume(self, tokens=1): + """Returns :const:`True` if `tokens` number of tokens can be consumed + from the bucket.""" + if tokens <= self._get_tokens(): + self._tokens -= tokens + return True + return False + + def expected_time(self, tokens=1): + """Returns the expected time in seconds when a new token should be + available. + + .. admonition:: Warning + + This consumes a token from the bucket. + + """ + _tokens = self._get_tokens() + tokens = max(tokens, _tokens) + return (tokens - _tokens) / self.fill_rate + + def _get_tokens(self): + if self._tokens < self.capacity: + now = time.time() + delta = self.fill_rate * (now - self.timestamp) + self._tokens = min(self.capacity, self._tokens + delta) + self.timestamp = now + return self._tokens diff --git a/awx/lib/site-packages/kombu/utils/text.py b/awx/lib/site-packages/kombu/utils/text.py new file mode 100644 index 0000000000..20444710cb --- /dev/null +++ b/awx/lib/site-packages/kombu/utils/text.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +from difflib import SequenceMatcher + + +def fmatch_iter(needle, haystack, min_ratio=0.6): + for key in haystack: + ratio = SequenceMatcher(None, needle, key).ratio() + if ratio >= min_ratio: + yield ratio, key + + +def fmatch_best(needle, haystack, min_ratio=0.6): + try: + return sorted( + fmatch_iter(needle, haystack, min_ratio), reverse=True, + )[0][1] + except IndexError: + pass diff --git a/awx/lib/site-packages/kombu/utils/url.py b/awx/lib/site-packages/kombu/utils/url.py new file mode 100644 index 0000000000..af703381eb --- /dev/null +++ b/awx/lib/site-packages/kombu/utils/url.py @@ -0,0 +1,37 @@ +from urllib import unquote +from urlparse import urlparse +try: + from urlparse import parse_qsl +except ImportError: # pragma: no cover + from cgi import parse_qsl # noqa + +from . import kwdict + + +def _parse_url(url): + scheme = urlparse(url).scheme + schemeless = url[len(scheme) + 3:] + # parse with HTTP URL semantics + parts = urlparse('http://' + schemeless) + + # The first pymongo.Connection() argument (host) can be + # a mongodb connection URI. If this is the case, don't + # use port but let pymongo get the port(s) from the URI instead. + # This enables the use of replica sets and sharding. + # See pymongo.Connection() for more info. + port = scheme != 'mongodb' and parts.port or None + hostname = schemeless if scheme == 'mongodb' else parts.hostname + path = parts.path or '' + path = path[1:] if path and path[0] == '/' else path + return (scheme, unquote(hostname or '') or None, port, + unquote(parts.username or '') or None, + unquote(parts.password or '') or None, + unquote(path or '') or None, + kwdict(dict(parse_qsl(parts.query)))) + + +def parse_url(url): + scheme, host, port, user, password, path, query = _parse_url(url) + return dict(transport=scheme, hostname=host, + port=port, userid=user, + password=password, virtual_host=path, **query) diff --git a/awx/lib/site-packages/markdown/__init__.py b/awx/lib/site-packages/markdown/__init__.py new file mode 100644 index 0000000000..1dbadddc32 --- /dev/null +++ b/awx/lib/site-packages/markdown/__init__.py @@ -0,0 +1,443 @@ +""" +Python Markdown +=============== + +Python Markdown converts Markdown to HTML and can be used as a library or +called from the command line. + +## Basic usage as a module: + + import markdown + html = markdown.markdown(your_text_string) + +See for more +information and instructions on how to extend the functionality of +Python Markdown. Read that before you try modifying this file. + +## Authors and License + +Started by [Manfred Stienstra](http://www.dwerg.net/). Continued and +maintained by [Yuri Takhteyev](http://www.freewisdom.org), [Waylan +Limberg](http://achinghead.com/) and [Artem Yunusov](http://blog.splyer.com). + +Contact: markdown@freewisdom.org + +Copyright 2007-2013 The Python Markdown Project (v. 1.7 and later) +Copyright 200? Django Software Foundation (OrderedDict implementation) +Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b) +Copyright 2004 Manfred Stienstra (the original version) + +License: BSD (see LICENSE for details). +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from .__version__ import version, version_info +import re +import codecs +import sys +import logging +from . import util +from .preprocessors import build_preprocessors +from .blockprocessors import build_block_parser +from .treeprocessors import build_treeprocessors +from .inlinepatterns import build_inlinepatterns +from .postprocessors import build_postprocessors +from .extensions import Extension +from .serializers import to_html_string, to_xhtml_string + +__all__ = ['Markdown', 'markdown', 'markdownFromFile'] + +logger = logging.getLogger('MARKDOWN') + + +class Markdown(object): + """Convert Markdown to HTML.""" + + doc_tag = "div" # Element used to wrap document - later removed + + option_defaults = { + 'html_replacement_text' : '[HTML_REMOVED]', + 'tab_length' : 4, + 'enable_attributes' : True, + 'smart_emphasis' : True, + 'lazy_ol' : True, + } + + output_formats = { + 'html' : to_html_string, + 'html4' : to_html_string, + 'html5' : to_html_string, + 'xhtml' : to_xhtml_string, + 'xhtml1': to_xhtml_string, + 'xhtml5': to_xhtml_string, + } + + ESCAPED_CHARS = ['\\', '`', '*', '_', '{', '}', '[', ']', + '(', ')', '>', '#', '+', '-', '.', '!'] + + def __init__(self, *args, **kwargs): + """ + Creates a new Markdown instance. + + Keyword arguments: + + * extensions: A list of extensions. + If they are of type string, the module mdx_name.py will be loaded. + If they are a subclass of markdown.Extension, they will be used + as-is. + * extension_configs: Configuration settingis for extensions. + * output_format: Format of output. Supported formats are: + * "xhtml1": Outputs XHTML 1.x. Default. + * "xhtml5": Outputs XHTML style tags of HTML 5 + * "xhtml": Outputs latest supported version of XHTML (currently XHTML 1.1). + * "html4": Outputs HTML 4 + * "html5": Outputs HTML style tags of HTML 5 + * "html": Outputs latest supported version of HTML (currently HTML 4). + Note that it is suggested that the more specific formats ("xhtml1" + and "html4") be used as "xhtml" or "html" may change in the future + if it makes sense at that time. + * safe_mode: Disallow raw html. One of "remove", "replace" or "escape". + * html_replacement_text: Text used when safe_mode is set to "replace". + * tab_length: Length of tabs in the source. Default: 4 + * enable_attributes: Enable the conversion of attributes. Default: True + * smart_emphasis: Treat `_connected_words_` intelegently Default: True + * lazy_ol: Ignore number of first item of ordered lists. Default: True + + """ + + # For backward compatibility, loop through old positional args + pos = ['extensions', 'extension_configs', 'safe_mode', 'output_format'] + c = 0 + for arg in args: + if pos[c] not in kwargs: + kwargs[pos[c]] = arg + c += 1 + if c == len(pos): + # ignore any additional args + break + + # Loop through kwargs and assign defaults + for option, default in self.option_defaults.items(): + setattr(self, option, kwargs.get(option, default)) + + self.safeMode = kwargs.get('safe_mode', False) + if self.safeMode and 'enable_attributes' not in kwargs: + # Disable attributes in safeMode when not explicitly set + self.enable_attributes = False + + self.registeredExtensions = [] + self.docType = "" + self.stripTopLevelTags = True + + self.build_parser() + + self.references = {} + self.htmlStash = util.HtmlStash() + self.set_output_format(kwargs.get('output_format', 'xhtml1')) + self.registerExtensions(extensions=kwargs.get('extensions', []), + configs=kwargs.get('extension_configs', {})) + self.reset() + + def build_parser(self): + """ Build the parser from the various parts. """ + self.preprocessors = build_preprocessors(self) + self.parser = build_block_parser(self) + self.inlinePatterns = build_inlinepatterns(self) + self.treeprocessors = build_treeprocessors(self) + self.postprocessors = build_postprocessors(self) + return self + + def registerExtensions(self, extensions, configs): + """ + Register extensions with this instance of Markdown. + + Keyword arguments: + + * extensions: A list of extensions, which can either + be strings or objects. See the docstring on Markdown. + * configs: A dictionary mapping module names to config options. + + """ + for ext in extensions: + if isinstance(ext, util.string_type): + ext = self.build_extension(ext, configs.get(ext, [])) + if isinstance(ext, Extension): + ext.extendMarkdown(self, globals()) + elif ext is not None: + raise TypeError( + 'Extension "%s.%s" must be of type: "markdown.Extension"' + % (ext.__class__.__module__, ext.__class__.__name__)) + + return self + + def build_extension(self, ext_name, configs = []): + """Build extension by name, then return the module. + + The extension name may contain arguments as part of the string in the + following format: "extname(key1=value1,key2=value2)" + + """ + + # Parse extensions config params (ignore the order) + configs = dict(configs) + pos = ext_name.find("(") # find the first "(" + if pos > 0: + ext_args = ext_name[pos+1:-1] + ext_name = ext_name[:pos] + pairs = [x.split("=") for x in ext_args.split(",")] + configs.update([(x.strip(), y.strip()) for (x, y) in pairs]) + + # Setup the module name + module_name = ext_name + if '.' not in ext_name: + module_name = '.'.join(['markdown.extensions', ext_name]) + + # Try loading the extension first from one place, then another + try: # New style (markdown.extensons.) + module = __import__(module_name, {}, {}, [module_name.rpartition('.')[0]]) + except ImportError: + module_name_old_style = '_'.join(['mdx', ext_name]) + try: # Old style (mdx_) + module = __import__(module_name_old_style) + except ImportError as e: + message = "Failed loading extension '%s' from '%s' or '%s'" \ + % (ext_name, module_name, module_name_old_style) + e.args = (message,) + e.args[1:] + raise + + # If the module is loaded successfully, we expect it to define a + # function called makeExtension() + try: + return module.makeExtension(configs.items()) + except AttributeError as e: + message = e.args[0] + message = "Failed to initiate extension " \ + "'%s': %s" % (ext_name, message) + e.args = (message,) + e.args[1:] + raise + + def registerExtension(self, extension): + """ This gets called by the extension """ + self.registeredExtensions.append(extension) + return self + + def reset(self): + """ + Resets all state variables so that we can start with a new text. + """ + self.htmlStash.reset() + self.references.clear() + + for extension in self.registeredExtensions: + if hasattr(extension, 'reset'): + extension.reset() + + return self + + def set_output_format(self, format): + """ Set the output format for the class instance. """ + self.output_format = format.lower() + try: + self.serializer = self.output_formats[self.output_format] + except KeyError as e: + valid_formats = list(self.output_formats.keys()) + valid_formats.sort() + message = 'Invalid Output Format: "%s". Use one of %s.' \ + % (self.output_format, + '"' + '", "'.join(valid_formats) + '"') + e.args = (message,) + e.args[1:] + raise + return self + + def convert(self, source): + """ + Convert markdown to serialized XHTML or HTML. + + Keyword arguments: + + * source: Source text as a Unicode string. + + Markdown processing takes place in five steps: + + 1. A bunch of "preprocessors" munge the input text. + 2. BlockParser() parses the high-level structural elements of the + pre-processed text into an ElementTree. + 3. A bunch of "treeprocessors" are run against the ElementTree. One + such treeprocessor runs InlinePatterns against the ElementTree, + detecting inline markup. + 4. Some post-processors are run against the text after the ElementTree + has been serialized into text. + 5. The output is written to a string. + + """ + + # Fixup the source text + if not source.strip(): + return '' # a blank unicode string + + try: + source = util.text_type(source) + except UnicodeDecodeError as e: + # Customise error message while maintaining original trackback + e.reason += '. -- Note: Markdown only accepts unicode input!' + raise + + # Split into lines and run the line preprocessors. + self.lines = source.split("\n") + for prep in self.preprocessors.values(): + self.lines = prep.run(self.lines) + + # Parse the high-level elements. + root = self.parser.parseDocument(self.lines).getroot() + + # Run the tree-processors + for treeprocessor in self.treeprocessors.values(): + newRoot = treeprocessor.run(root) + if newRoot: + root = newRoot + + # Serialize _properly_. Strip top-level tags. + output = self.serializer(root) + if self.stripTopLevelTags: + try: + start = output.index('<%s>'%self.doc_tag)+len(self.doc_tag)+2 + end = output.rindex(''%self.doc_tag) + output = output[start:end].strip() + except ValueError: + if output.strip().endswith('<%s />'%self.doc_tag): + # We have an empty document + output = '' + else: + # We have a serious problem + raise ValueError('Markdown failed to strip top-level tags. Document=%r' % output.strip()) + + # Run the text post-processors + for pp in self.postprocessors.values(): + output = pp.run(output) + + return output.strip() + + def convertFile(self, input=None, output=None, encoding=None): + """Converts a markdown file and returns the HTML as a unicode string. + + Decodes the file using the provided encoding (defaults to utf-8), + passes the file content to markdown, and outputs the html to either + the provided stream or the file with provided name, using the same + encoding as the source file. The 'xmlcharrefreplace' error handler is + used when encoding the output. + + **Note:** This is the only place that decoding and encoding of unicode + takes place in Python-Markdown. (All other code is unicode-in / + unicode-out.) + + Keyword arguments: + + * input: File object or path. Reads from stdin if `None`. + * output: File object or path. Writes to stdout if `None`. + * encoding: Encoding of input and output files. Defaults to utf-8. + + """ + + encoding = encoding or "utf-8" + + # Read the source + if input: + if isinstance(input, util.string_type): + input_file = codecs.open(input, mode="r", encoding=encoding) + else: + input_file = codecs.getreader(encoding)(input) + text = input_file.read() + input_file.close() + else: + text = sys.stdin.read() + if not isinstance(text, util.text_type): + text = text.decode(encoding) + + text = text.lstrip('\ufeff') # remove the byte-order mark + + # Convert + html = self.convert(text) + + # Write to file or stdout + if output: + if isinstance(output, util.string_type): + output_file = codecs.open(output, "w", + encoding=encoding, + errors="xmlcharrefreplace") + output_file.write(html) + output_file.close() + else: + writer = codecs.getwriter(encoding) + output_file = writer(output, errors="xmlcharrefreplace") + output_file.write(html) + # Don't close here. User may want to write more. + else: + # Encode manually and write bytes to stdout. + html = html.encode(encoding, "xmlcharrefreplace") + try: + # Write bytes directly to buffer (Python 3). + sys.stdout.buffer.write(html) + except AttributeError: + # Probably Python 2, which works with bytes by default. + sys.stdout.write(html) + + return self + + +""" +EXPORTED FUNCTIONS +============================================================================= + +Those are the two functions we really mean to export: markdown() and +markdownFromFile(). +""" + +def markdown(text, *args, **kwargs): + """Convert a markdown string to HTML and return HTML as a unicode string. + + This is a shortcut function for `Markdown` class to cover the most + basic use case. It initializes an instance of Markdown, loads the + necessary extensions and runs the parser on the given text. + + Keyword arguments: + + * text: Markdown formatted text as Unicode or ASCII string. + * Any arguments accepted by the Markdown class. + + Returns: An HTML document as a string. + + """ + md = Markdown(*args, **kwargs) + return md.convert(text) + + +def markdownFromFile(*args, **kwargs): + """Read markdown code from a file and write it to a file or a stream. + + This is a shortcut function which initializes an instance of Markdown, + and calls the convertFile method rather than convert. + + Keyword arguments: + + * input: a file name or readable object. + * output: a file name or writable object. + * encoding: Encoding of input and output. + * Any arguments accepted by the Markdown class. + + """ + # For backward compatibility loop through positional args + pos = ['input', 'output', 'extensions', 'encoding'] + c = 0 + for arg in args: + if pos[c] not in kwargs: + kwargs[pos[c]] = arg + c += 1 + if c == len(pos): + break + + md = Markdown(**kwargs) + md.convertFile(kwargs.get('input', None), + kwargs.get('output', None), + kwargs.get('encoding', None)) + diff --git a/awx/lib/site-packages/markdown/__main__.py b/awx/lib/site-packages/markdown/__main__.py new file mode 100644 index 0000000000..8ee8c8222e --- /dev/null +++ b/awx/lib/site-packages/markdown/__main__.py @@ -0,0 +1,87 @@ +""" +COMMAND-LINE SPECIFIC STUFF +============================================================================= + +""" + +import markdown +import sys +import optparse + +import logging +from logging import DEBUG, INFO, CRITICAL + +logger = logging.getLogger('MARKDOWN') + +def parse_options(): + """ + Define and parse `optparse` options for command-line usage. + """ + usage = """%prog [options] [INPUTFILE] + (STDIN is assumed if no INPUTFILE is given)""" + desc = "A Python implementation of John Gruber's Markdown. " \ + "http://packages.python.org/Markdown/" + ver = "%%prog %s" % markdown.version + + parser = optparse.OptionParser(usage=usage, description=desc, version=ver) + parser.add_option("-f", "--file", dest="filename", default=None, + help="Write output to OUTPUT_FILE. Defaults to STDOUT.", + metavar="OUTPUT_FILE") + parser.add_option("-e", "--encoding", dest="encoding", + help="Encoding for input and output files.",) + parser.add_option("-q", "--quiet", default = CRITICAL, + action="store_const", const=CRITICAL+10, dest="verbose", + help="Suppress all warnings.") + parser.add_option("-v", "--verbose", + action="store_const", const=INFO, dest="verbose", + help="Print all warnings.") + parser.add_option("-s", "--safe", dest="safe", default=False, + metavar="SAFE_MODE", + help="'replace', 'remove' or 'escape' HTML tags in input") + parser.add_option("-o", "--output_format", dest="output_format", + default='xhtml1', metavar="OUTPUT_FORMAT", + help="'xhtml1' (default), 'html4' or 'html5'.") + parser.add_option("--noisy", + action="store_const", const=DEBUG, dest="verbose", + help="Print debug messages.") + parser.add_option("-x", "--extension", action="append", dest="extensions", + help = "Load extension EXTENSION.", metavar="EXTENSION") + parser.add_option("-n", "--no_lazy_ol", dest="lazy_ol", + action='store_false', default=True, + help="Observe number of first item of ordered lists.") + + (options, args) = parser.parse_args() + + if len(args) == 0: + input_file = None + else: + input_file = args[0] + + if not options.extensions: + options.extensions = [] + + return {'input': input_file, + 'output': options.filename, + 'safe_mode': options.safe, + 'extensions': options.extensions, + 'encoding': options.encoding, + 'output_format': options.output_format, + 'lazy_ol': options.lazy_ol}, options.verbose + +def run(): + """Run Markdown from the command line.""" + + # Parse options and adjust logging level if necessary + options, logging_level = parse_options() + if not options: sys.exit(2) + logger.setLevel(logging_level) + logger.addHandler(logging.StreamHandler()) + + # Run + markdown.markdownFromFile(**options) + +if __name__ == '__main__': + # Support running module as a commandline command. + # Python 2.5 & 2.6 do: `python -m markdown.__main__ [options] [args]`. + # Python 2.7 & 3.x do: `python -m markdown [options] [args]`. + run() diff --git a/awx/lib/site-packages/markdown/__version__.py b/awx/lib/site-packages/markdown/__version__.py new file mode 100644 index 0000000000..a13559c4e3 --- /dev/null +++ b/awx/lib/site-packages/markdown/__version__.py @@ -0,0 +1,28 @@ +# +# markdown/__version__.py +# +# version_info should conform to PEP 386 +# (major, minor, micro, alpha/beta/rc/final, #) +# (1, 1, 2, 'alpha', 0) => "1.1.2.dev" +# (1, 2, 0, 'beta', 2) => "1.2b2" +version_info = (2, 3, 1, 'final', 0) + +def _get_version(): + " Returns a PEP 386-compliant version number from version_info. " + assert len(version_info) == 5 + assert version_info[3] in ('alpha', 'beta', 'rc', 'final') + + parts = 2 if version_info[2] == 0 else 3 + main = '.'.join(map(str, version_info[:parts])) + + sub = '' + if version_info[3] == 'alpha' and version_info[4] == 0: + # TODO: maybe append some sort of git info here?? + sub = '.dev' + elif version_info[3] != 'final': + mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} + sub = mapping[version_info[3]] + str(version_info[4]) + + return str(main + sub) + +version = _get_version() diff --git a/awx/lib/site-packages/markdown/blockparser.py b/awx/lib/site-packages/markdown/blockparser.py new file mode 100644 index 0000000000..4504a16f51 --- /dev/null +++ b/awx/lib/site-packages/markdown/blockparser.py @@ -0,0 +1,99 @@ +from __future__ import unicode_literals +from __future__ import absolute_import +from . import util +from . import odict + +class State(list): + """ Track the current and nested state of the parser. + + This utility class is used to track the state of the BlockParser and + support multiple levels if nesting. It's just a simple API wrapped around + a list. Each time a state is set, that state is appended to the end of the + list. Each time a state is reset, that state is removed from the end of + the list. + + Therefore, each time a state is set for a nested block, that state must be + reset when we back out of that level of nesting or the state could be + corrupted. + + While all the methods of a list object are available, only the three + defined below need be used. + + """ + + def set(self, state): + """ Set a new state. """ + self.append(state) + + def reset(self): + """ Step back one step in nested state. """ + self.pop() + + def isstate(self, state): + """ Test that top (current) level is of given state. """ + if len(self): + return self[-1] == state + else: + return False + +class BlockParser: + """ Parse Markdown blocks into an ElementTree object. + + A wrapper class that stitches the various BlockProcessors together, + looping through them and creating an ElementTree object. + """ + + def __init__(self, markdown): + self.blockprocessors = odict.OrderedDict() + self.state = State() + self.markdown = markdown + + def parseDocument(self, lines): + """ Parse a markdown document into an ElementTree. + + Given a list of lines, an ElementTree object (not just a parent Element) + is created and the root element is passed to the parser as the parent. + The ElementTree object is returned. + + This should only be called on an entire document, not pieces. + + """ + # Create a ElementTree from the lines + self.root = util.etree.Element(self.markdown.doc_tag) + self.parseChunk(self.root, '\n'.join(lines)) + return util.etree.ElementTree(self.root) + + def parseChunk(self, parent, text): + """ Parse a chunk of markdown text and attach to given etree node. + + While the ``text`` argument is generally assumed to contain multiple + blocks which will be split on blank lines, it could contain only one + block. Generally, this method would be called by extensions when + block parsing is required. + + The ``parent`` etree Element passed in is altered in place. + Nothing is returned. + + """ + self.parseBlocks(parent, text.split('\n\n')) + + def parseBlocks(self, parent, blocks): + """ Process blocks of markdown text and attach to given etree node. + + Given a list of ``blocks``, each blockprocessor is stepped through + until there are no blocks left. While an extension could potentially + call this method directly, it's generally expected to be used internally. + + This is a public method as an extension may need to add/alter additional + BlockProcessors which call this method to recursively parse a nested + block. + + """ + while blocks: + for processor in self.blockprocessors.values(): + if processor.test(parent, blocks[0]): + if processor.run(parent, blocks) is not False: + # run returns True or None + break + + diff --git a/awx/lib/site-packages/markdown/blockprocessors.py b/awx/lib/site-packages/markdown/blockprocessors.py new file mode 100644 index 0000000000..61977b49a8 --- /dev/null +++ b/awx/lib/site-packages/markdown/blockprocessors.py @@ -0,0 +1,558 @@ +""" +CORE MARKDOWN BLOCKPARSER +=========================================================================== + +This parser handles basic parsing of Markdown blocks. It doesn't concern itself +with inline elements such as **bold** or *italics*, but rather just catches +blocks, lists, quotes, etc. + +The BlockParser is made up of a bunch of BlockProssors, each handling a +different type of block. Extensions may add/replace/remove BlockProcessors +as they need to alter how markdown blocks are parsed. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals +import logging +import re +from . import util +from .blockparser import BlockParser + +logger = logging.getLogger('MARKDOWN') + + +def build_block_parser(md_instance, **kwargs): + """ Build the default block parser used by Markdown. """ + parser = BlockParser(md_instance) + parser.blockprocessors['empty'] = EmptyBlockProcessor(parser) + parser.blockprocessors['indent'] = ListIndentProcessor(parser) + parser.blockprocessors['code'] = CodeBlockProcessor(parser) + parser.blockprocessors['hashheader'] = HashHeaderProcessor(parser) + parser.blockprocessors['setextheader'] = SetextHeaderProcessor(parser) + parser.blockprocessors['hr'] = HRProcessor(parser) + parser.blockprocessors['olist'] = OListProcessor(parser) + parser.blockprocessors['ulist'] = UListProcessor(parser) + parser.blockprocessors['quote'] = BlockQuoteProcessor(parser) + parser.blockprocessors['paragraph'] = ParagraphProcessor(parser) + return parser + + +class BlockProcessor: + """ Base class for block processors. + + Each subclass will provide the methods below to work with the source and + tree. Each processor will need to define it's own ``test`` and ``run`` + methods. The ``test`` method should return True or False, to indicate + whether the current block should be processed by this processor. If the + test passes, the parser will call the processors ``run`` method. + + """ + + def __init__(self, parser): + self.parser = parser + self.tab_length = parser.markdown.tab_length + + def lastChild(self, parent): + """ Return the last child of an etree element. """ + if len(parent): + return parent[-1] + else: + return None + + def detab(self, text): + """ Remove a tab from the front of each line of the given text. """ + newtext = [] + lines = text.split('\n') + for line in lines: + if line.startswith(' '*self.tab_length): + newtext.append(line[self.tab_length:]) + elif not line.strip(): + newtext.append('') + else: + break + return '\n'.join(newtext), '\n'.join(lines[len(newtext):]) + + def looseDetab(self, text, level=1): + """ Remove a tab from front of lines but allowing dedented lines. """ + lines = text.split('\n') + for i in range(len(lines)): + if lines[i].startswith(' '*self.tab_length*level): + lines[i] = lines[i][self.tab_length*level:] + return '\n'.join(lines) + + def test(self, parent, block): + """ Test for block type. Must be overridden by subclasses. + + As the parser loops through processors, it will call the ``test`` method + on each to determine if the given block of text is of that type. This + method must return a boolean ``True`` or ``False``. The actual method of + testing is left to the needs of that particular block type. It could + be as simple as ``block.startswith(some_string)`` or a complex regular + expression. As the block type may be different depending on the parent + of the block (i.e. inside a list), the parent etree element is also + provided and may be used as part of the test. + + Keywords: + + * ``parent``: A etree element which will be the parent of the block. + * ``block``: A block of text from the source which has been split at + blank lines. + """ + pass + + def run(self, parent, blocks): + """ Run processor. Must be overridden by subclasses. + + When the parser determines the appropriate type of a block, the parser + will call the corresponding processor's ``run`` method. This method + should parse the individual lines of the block and append them to + the etree. + + Note that both the ``parent`` and ``etree`` keywords are pointers + to instances of the objects which should be edited in place. Each + processor must make changes to the existing objects as there is no + mechanism to return new/different objects to replace them. + + This means that this method should be adding SubElements or adding text + to the parent, and should remove (``pop``) or add (``insert``) items to + the list of blocks. + + Keywords: + + * ``parent``: A etree element which is the parent of the current block. + * ``blocks``: A list of all remaining blocks of the document. + """ + pass + + +class ListIndentProcessor(BlockProcessor): + """ Process children of list items. + + Example: + * a list item + process this part + + or this part + + """ + + ITEM_TYPES = ['li'] + LIST_TYPES = ['ul', 'ol'] + + def __init__(self, *args): + BlockProcessor.__init__(self, *args) + self.INDENT_RE = re.compile(r'^(([ ]{%s})+)'% self.tab_length) + + def test(self, parent, block): + return block.startswith(' '*self.tab_length) and \ + not self.parser.state.isstate('detabbed') and \ + (parent.tag in self.ITEM_TYPES or \ + (len(parent) and parent[-1] and \ + (parent[-1].tag in self.LIST_TYPES) + ) + ) + + def run(self, parent, blocks): + block = blocks.pop(0) + level, sibling = self.get_level(parent, block) + block = self.looseDetab(block, level) + + self.parser.state.set('detabbed') + if parent.tag in self.ITEM_TYPES: + # It's possible that this parent has a 'ul' or 'ol' child list + # with a member. If that is the case, then that should be the + # parent. This is intended to catch the edge case of an indented + # list whose first member was parsed previous to this point + # see OListProcessor + if len(parent) and parent[-1].tag in self.LIST_TYPES: + self.parser.parseBlocks(parent[-1], [block]) + else: + # The parent is already a li. Just parse the child block. + self.parser.parseBlocks(parent, [block]) + elif sibling.tag in self.ITEM_TYPES: + # The sibling is a li. Use it as parent. + self.parser.parseBlocks(sibling, [block]) + elif len(sibling) and sibling[-1].tag in self.ITEM_TYPES: + # The parent is a list (``ol`` or ``ul``) which has children. + # Assume the last child li is the parent of this block. + if sibling[-1].text: + # If the parent li has text, that text needs to be moved to a p + # The p must be 'inserted' at beginning of list in the event + # that other children already exist i.e.; a nested sublist. + p = util.etree.Element('p') + p.text = sibling[-1].text + sibling[-1].text = '' + sibling[-1].insert(0, p) + self.parser.parseChunk(sibling[-1], block) + else: + self.create_item(sibling, block) + self.parser.state.reset() + + def create_item(self, parent, block): + """ Create a new li and parse the block with it as the parent. """ + li = util.etree.SubElement(parent, 'li') + self.parser.parseBlocks(li, [block]) + + def get_level(self, parent, block): + """ Get level of indent based on list level. """ + # Get indent level + m = self.INDENT_RE.match(block) + if m: + indent_level = len(m.group(1))/self.tab_length + else: + indent_level = 0 + if self.parser.state.isstate('list'): + # We're in a tightlist - so we already are at correct parent. + level = 1 + else: + # We're in a looselist - so we need to find parent. + level = 0 + # Step through children of tree to find matching indent level. + while indent_level > level: + child = self.lastChild(parent) + if child and (child.tag in self.LIST_TYPES or child.tag in self.ITEM_TYPES): + if child.tag in self.LIST_TYPES: + level += 1 + parent = child + else: + # No more child levels. If we're short of indent_level, + # we have a code block. So we stop here. + break + return level, parent + + +class CodeBlockProcessor(BlockProcessor): + """ Process code blocks. """ + + def test(self, parent, block): + return block.startswith(' '*self.tab_length) + + def run(self, parent, blocks): + sibling = self.lastChild(parent) + block = blocks.pop(0) + theRest = '' + if sibling and sibling.tag == "pre" and len(sibling) \ + and sibling[0].tag == "code": + # The previous block was a code block. As blank lines do not start + # new code blocks, append this block to the previous, adding back + # linebreaks removed from the split into a list. + code = sibling[0] + block, theRest = self.detab(block) + code.text = util.AtomicString('%s\n%s\n' % (code.text, block.rstrip())) + else: + # This is a new codeblock. Create the elements and insert text. + pre = util.etree.SubElement(parent, 'pre') + code = util.etree.SubElement(pre, 'code') + block, theRest = self.detab(block) + code.text = util.AtomicString('%s\n' % block.rstrip()) + if theRest: + # This block contained unindented line(s) after the first indented + # line. Insert these lines as the first block of the master blocks + # list for future processing. + blocks.insert(0, theRest) + + +class BlockQuoteProcessor(BlockProcessor): + + RE = re.compile(r'(^|\n)[ ]{0,3}>[ ]?(.*)') + + def test(self, parent, block): + return bool(self.RE.search(block)) + + def run(self, parent, blocks): + block = blocks.pop(0) + m = self.RE.search(block) + if m: + before = block[:m.start()] # Lines before blockquote + # Pass lines before blockquote in recursively for parsing forst. + self.parser.parseBlocks(parent, [before]) + # Remove ``> `` from begining of each line. + block = '\n'.join([self.clean(line) for line in + block[m.start():].split('\n')]) + sibling = self.lastChild(parent) + if sibling and sibling.tag == "blockquote": + # Previous block was a blockquote so set that as this blocks parent + quote = sibling + else: + # This is a new blockquote. Create a new parent element. + quote = util.etree.SubElement(parent, 'blockquote') + # Recursively parse block with blockquote as parent. + # change parser state so blockquotes embedded in lists use p tags + self.parser.state.set('blockquote') + self.parser.parseChunk(quote, block) + self.parser.state.reset() + + def clean(self, line): + """ Remove ``>`` from beginning of a line. """ + m = self.RE.match(line) + if line.strip() == ">": + return "" + elif m: + return m.group(2) + else: + return line + +class OListProcessor(BlockProcessor): + """ Process ordered list blocks. """ + + TAG = 'ol' + # Detect an item (``1. item``). ``group(1)`` contains contents of item. + RE = re.compile(r'^[ ]{0,3}\d+\.[ ]+(.*)') + # Detect items on secondary lines. they can be of either list type. + CHILD_RE = re.compile(r'^[ ]{0,3}((\d+\.)|[*+-])[ ]+(.*)') + # Detect indented (nested) items of either type + INDENT_RE = re.compile(r'^[ ]{4,7}((\d+\.)|[*+-])[ ]+.*') + # The integer (python string) with which the lists starts (default=1) + # Eg: If list is intialized as) + # 3. Item + # The ol tag will get starts="3" attribute + STARTSWITH = '1' + # List of allowed sibling tags. + SIBLING_TAGS = ['ol', 'ul'] + + def test(self, parent, block): + return bool(self.RE.match(block)) + + def run(self, parent, blocks): + # Check fr multiple items in one block. + items = self.get_items(blocks.pop(0)) + sibling = self.lastChild(parent) + + if sibling and sibling.tag in self.SIBLING_TAGS: + # Previous block was a list item, so set that as parent + lst = sibling + # make sure previous item is in a p- if the item has text, then it + # it isn't in a p + if lst[-1].text: + # since it's possible there are other children for this sibling, + # we can't just SubElement the p, we need to insert it as the + # first item + p = util.etree.Element('p') + p.text = lst[-1].text + lst[-1].text = '' + lst[-1].insert(0, p) + # if the last item has a tail, then the tail needs to be put in a p + # likely only when a header is not followed by a blank line + lch = self.lastChild(lst[-1]) + if lch is not None and lch.tail: + p = util.etree.SubElement(lst[-1], 'p') + p.text = lch.tail.lstrip() + lch.tail = '' + + # parse first block differently as it gets wrapped in a p. + li = util.etree.SubElement(lst, 'li') + self.parser.state.set('looselist') + firstitem = items.pop(0) + self.parser.parseBlocks(li, [firstitem]) + self.parser.state.reset() + elif parent.tag in ['ol', 'ul']: + # this catches the edge case of a multi-item indented list whose + # first item is in a blank parent-list item: + # * * subitem1 + # * subitem2 + # see also ListIndentProcessor + lst = parent + else: + # This is a new list so create parent with appropriate tag. + lst = util.etree.SubElement(parent, self.TAG) + # Check if a custom start integer is set + if not self.parser.markdown.lazy_ol and self.STARTSWITH !='1': + lst.attrib['start'] = self.STARTSWITH + + self.parser.state.set('list') + # Loop through items in block, recursively parsing each with the + # appropriate parent. + for item in items: + if item.startswith(' '*self.tab_length): + # Item is indented. Parse with last item as parent + self.parser.parseBlocks(lst[-1], [item]) + else: + # New item. Create li and parse with it as parent + li = util.etree.SubElement(lst, 'li') + self.parser.parseBlocks(li, [item]) + self.parser.state.reset() + + def get_items(self, block): + """ Break a block into list items. """ + items = [] + for line in block.split('\n'): + m = self.CHILD_RE.match(line) + if m: + # This is a new list item + # Check first item for the start index + if not items and self.TAG=='ol': + # Detect the integer value of first list item + INTEGER_RE = re.compile('(\d+)') + self.STARTSWITH = INTEGER_RE.match(m.group(1)).group() + # Append to the list + items.append(m.group(3)) + elif self.INDENT_RE.match(line): + # This is an indented (possibly nested) item. + if items[-1].startswith(' '*self.tab_length): + # Previous item was indented. Append to that item. + items[-1] = '%s\n%s' % (items[-1], line) + else: + items.append(line) + else: + # This is another line of previous item. Append to that item. + items[-1] = '%s\n%s' % (items[-1], line) + return items + + +class UListProcessor(OListProcessor): + """ Process unordered list blocks. """ + + TAG = 'ul' + RE = re.compile(r'^[ ]{0,3}[*+-][ ]+(.*)') + + +class HashHeaderProcessor(BlockProcessor): + """ Process Hash Headers. """ + + # Detect a header at start of any line in block + RE = re.compile(r'(^|\n)(?P#{1,6})(?P
.*?)#*(\n|$)') + + def test(self, parent, block): + return bool(self.RE.search(block)) + + def run(self, parent, blocks): + block = blocks.pop(0) + m = self.RE.search(block) + if m: + before = block[:m.start()] # All lines before header + after = block[m.end():] # All lines after header + if before: + # As the header was not the first line of the block and the + # lines before the header must be parsed first, + # recursively parse this lines as a block. + self.parser.parseBlocks(parent, [before]) + # Create header using named groups from RE + h = util.etree.SubElement(parent, 'h%d' % len(m.group('level'))) + h.text = m.group('header').strip() + if after: + # Insert remaining lines as first block for future parsing. + blocks.insert(0, after) + else: + # This should never happen, but just in case... + logger.warn("We've got a problem header: %r" % block) + + +class SetextHeaderProcessor(BlockProcessor): + """ Process Setext-style Headers. """ + + # Detect Setext-style header. Must be first 2 lines of block. + RE = re.compile(r'^.*?\n[=-]+[ ]*(\n|$)', re.MULTILINE) + + def test(self, parent, block): + return bool(self.RE.match(block)) + + def run(self, parent, blocks): + lines = blocks.pop(0).split('\n') + # Determine level. ``=`` is 1 and ``-`` is 2. + if lines[1].startswith('='): + level = 1 + else: + level = 2 + h = util.etree.SubElement(parent, 'h%d' % level) + h.text = lines[0].strip() + if len(lines) > 2: + # Block contains additional lines. Add to master blocks for later. + blocks.insert(0, '\n'.join(lines[2:])) + + +class HRProcessor(BlockProcessor): + """ Process Horizontal Rules. """ + + RE = r'^[ ]{0,3}((-+[ ]{0,2}){3,}|(_+[ ]{0,2}){3,}|(\*+[ ]{0,2}){3,})[ ]*' + # Detect hr on any line of a block. + SEARCH_RE = re.compile(RE, re.MULTILINE) + + def test(self, parent, block): + m = self.SEARCH_RE.search(block) + # No atomic grouping in python so we simulate it here for performance. + # The regex only matches what would be in the atomic group - the HR. + # Then check if we are at end of block or if next char is a newline. + if m and (m.end() == len(block) or block[m.end()] == '\n'): + # Save match object on class instance so we can use it later. + self.match = m + return True + return False + + def run(self, parent, blocks): + block = blocks.pop(0) + # Check for lines in block before hr. + prelines = block[:self.match.start()].rstrip('\n') + if prelines: + # Recursively parse lines before hr so they get parsed first. + self.parser.parseBlocks(parent, [prelines]) + # create hr + util.etree.SubElement(parent, 'hr') + # check for lines in block after hr. + postlines = block[self.match.end():].lstrip('\n') + if postlines: + # Add lines after hr to master blocks for later parsing. + blocks.insert(0, postlines) + + + +class EmptyBlockProcessor(BlockProcessor): + """ Process blocks that are empty or start with an empty line. """ + + def test(self, parent, block): + return not block or block.startswith('\n') + + def run(self, parent, blocks): + block = blocks.pop(0) + filler = '\n\n' + if block: + # Starts with empty line + # Only replace a single line. + filler = '\n' + # Save the rest for later. + theRest = block[1:] + if theRest: + # Add remaining lines to master blocks for later. + blocks.insert(0, theRest) + sibling = self.lastChild(parent) + if sibling and sibling.tag == 'pre' and len(sibling) and sibling[0].tag == 'code': + # Last block is a codeblock. Append to preserve whitespace. + sibling[0].text = util.AtomicString('%s%s' % (sibling[0].text, filler)) + + +class ParagraphProcessor(BlockProcessor): + """ Process Paragraph blocks. """ + + def test(self, parent, block): + return True + + def run(self, parent, blocks): + block = blocks.pop(0) + if block.strip(): + # Not a blank block. Add to parent, otherwise throw it away. + if self.parser.state.isstate('list'): + # The parent is a tight-list. + # + # Check for any children. This will likely only happen in a + # tight-list when a header isn't followed by a blank line. + # For example: + # + # * # Header + # Line 2 of list item - not part of header. + sibling = self.lastChild(parent) + if sibling is not None: + # Insetrt after sibling. + if sibling.tail: + sibling.tail = '%s\n%s' % (sibling.tail, block) + else: + sibling.tail = '\n%s' % block + else: + # Append to parent.text + if parent.text: + parent.text = '%s\n%s' % (parent.text, block) + else: + parent.text = block.lstrip() + else: + # Create a regular paragraph + p = util.etree.SubElement(parent, 'p') + p.text = block.lstrip() diff --git a/awx/lib/site-packages/markdown/extensions/__init__.py b/awx/lib/site-packages/markdown/extensions/__init__.py new file mode 100644 index 0000000000..184c4d1b1d --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/__init__.py @@ -0,0 +1,53 @@ +""" +Extensions +----------------------------------------------------------------------------- +""" + +from __future__ import unicode_literals + +class Extension(object): + """ Base class for extensions to subclass. """ + def __init__(self, configs = {}): + """Create an instance of an Extention. + + Keyword arguments: + + * configs: A dict of configuration setting used by an Extension. + """ + self.config = configs + + def getConfig(self, key, default=''): + """ Return a setting for the given key or an empty string. """ + if key in self.config: + return self.config[key][0] + else: + return default + + def getConfigs(self): + """ Return all configs settings as a dict. """ + return dict([(key, self.getConfig(key)) for key in self.config.keys()]) + + def getConfigInfo(self): + """ Return all config descriptions as a list of tuples. """ + return [(key, self.config[key][1]) for key in self.config.keys()] + + def setConfig(self, key, value): + """ Set a config setting for `key` with the given `value`. """ + self.config[key][0] = value + + def extendMarkdown(self, md, md_globals): + """ + Add the various proccesors and patterns to the Markdown Instance. + + This method must be overriden by every extension. + + Keyword arguments: + + * md: The Markdown instance. + + * md_globals: Global variables in the markdown module namespace. + + """ + raise NotImplementedError('Extension "%s.%s" must define an "extendMarkdown"' \ + 'method.' % (self.__class__.__module__, self.__class__.__name__)) + diff --git a/awx/lib/site-packages/markdown/extensions/abbr.py b/awx/lib/site-packages/markdown/extensions/abbr.py new file mode 100644 index 0000000000..5e46f1dc38 --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/abbr.py @@ -0,0 +1,96 @@ +''' +Abbreviation Extension for Python-Markdown +========================================== + +This extension adds abbreviation handling to Python-Markdown. + +Simple Usage: + + >>> import markdown + >>> text = """ + ... Some text with an ABBR and a REF. Ignore REFERENCE and ref. + ... + ... *[ABBR]: Abbreviation + ... *[REF]: Abbreviation Reference + ... """ + >>> print markdown.markdown(text, ['abbr']) +

Some text with an ABBR and a REF. Ignore REFERENCE and ref.

+ +Copyright 2007-2008 +* [Waylan Limberg](http://achinghead.com/) +* [Seemant Kulleen](http://www.kulleen.org/) + + +''' + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..preprocessors import Preprocessor +from ..inlinepatterns import Pattern +from ..util import etree +import re + +# Global Vars +ABBR_REF_RE = re.compile(r'[*]\[(?P[^\]]*)\][ ]?:\s*(?P.*)') + +class AbbrExtension(Extension): + """ Abbreviation Extension for Python-Markdown. """ + + def extendMarkdown(self, md, md_globals): + """ Insert AbbrPreprocessor before ReferencePreprocessor. """ + md.preprocessors.add('abbr', AbbrPreprocessor(md), '<reference') + + +class AbbrPreprocessor(Preprocessor): + """ Abbreviation Preprocessor - parse text for abbr references. """ + + def run(self, lines): + ''' + Find and remove all Abbreviation references from the text. + Each reference is set as a new AbbrPattern in the markdown instance. + + ''' + new_text = [] + for line in lines: + m = ABBR_REF_RE.match(line) + if m: + abbr = m.group('abbr').strip() + title = m.group('title').strip() + self.markdown.inlinePatterns['abbr-%s'%abbr] = \ + AbbrPattern(self._generate_pattern(abbr), title) + else: + new_text.append(line) + return new_text + + def _generate_pattern(self, text): + ''' + Given a string, returns an regex pattern to match that string. + + 'HTML' -> r'(?P<abbr>[H][T][M][L])' + + Note: we force each char as a literal match (in brackets) as we don't + know what they will be beforehand. + + ''' + chars = list(text) + for i in range(len(chars)): + chars[i] = r'[%s]' % chars[i] + return r'(?P<abbr>\b%s\b)' % (r''.join(chars)) + + +class AbbrPattern(Pattern): + """ Abbreviation inline pattern. """ + + def __init__(self, pattern, title): + super(AbbrPattern, self).__init__(pattern) + self.title = title + + def handleMatch(self, m): + abbr = etree.Element('abbr') + abbr.text = m.group('abbr') + abbr.set('title', self.title) + return abbr + +def makeExtension(configs=None): + return AbbrExtension(configs=configs) diff --git a/awx/lib/site-packages/markdown/extensions/admonition.py b/awx/lib/site-packages/markdown/extensions/admonition.py new file mode 100644 index 0000000000..9a45b9249c --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/admonition.py @@ -0,0 +1,118 @@ +""" +Admonition extension for Python-Markdown +======================================== + +Adds rST-style admonitions. Inspired by [rST][] feature with the same name. + +The syntax is (followed by an indented block with the contents): + !!! [type] [optional explicit title] + +Where `type` is used as a CSS class name of the div. If not present, `title` +defaults to the capitalized `type`, so "note" -> "Note". + +rST suggests the following `types`, but you're free to use whatever you want: + attention, caution, danger, error, hint, important, note, tip, warning + + +A simple example: + !!! note + This is the first line inside the box. + +Outputs: + <div class="admonition note"> + <p class="admonition-title">Note</p> + <p>This is the first line inside the box</p> + </div> + +You can also specify the title and CSS class of the admonition: + !!! custom "Did you know?" + Another line here. + +Outputs: + <div class="admonition custom"> + <p class="admonition-title">Did you know?</p> + <p>Another line here.</p> + </div> + +[rST]: http://docutils.sourceforge.net/docs/ref/rst/directives.html#specific-admonitions + +By [Tiago Serafim](http://www.tiagoserafim.com/). + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..blockprocessors import BlockProcessor +from ..util import etree +import re + + +class AdmonitionExtension(Extension): + """ Admonition extension for Python-Markdown. """ + + def extendMarkdown(self, md, md_globals): + """ Add Admonition to Markdown instance. """ + md.registerExtension(self) + + md.parser.blockprocessors.add('admonition', + AdmonitionProcessor(md.parser), + '_begin') + + +class AdmonitionProcessor(BlockProcessor): + + CLASSNAME = 'admonition' + CLASSNAME_TITLE = 'admonition-title' + RE = re.compile(r'(?:^|\n)!!!\ ?([\w\-]+)(?:\ "(.*?)")?') + + def test(self, parent, block): + sibling = self.lastChild(parent) + return self.RE.search(block) or \ + (block.startswith(' ' * self.tab_length) and sibling and \ + sibling.get('class', '').find(self.CLASSNAME) != -1) + + def run(self, parent, blocks): + sibling = self.lastChild(parent) + block = blocks.pop(0) + m = self.RE.search(block) + + if m: + block = block[m.end() + 1:] # removes the first line + + block, theRest = self.detab(block) + + if m: + klass, title = self.get_class_and_title(m) + div = etree.SubElement(parent, 'div') + div.set('class', '%s %s' % (self.CLASSNAME, klass)) + if title: + p = etree.SubElement(div, 'p') + p.text = title + p.set('class', self.CLASSNAME_TITLE) + else: + div = sibling + + self.parser.parseChunk(div, block) + + if theRest: + # This block contained unindented line(s) after the first indented + # line. Insert these lines as the first block of the master blocks + # list for future processing. + blocks.insert(0, theRest) + + def get_class_and_title(self, match): + klass, title = match.group(1).lower(), match.group(2) + if title is None: + # no title was provided, use the capitalized classname as title + # e.g.: `!!! note` will render `<p class="admonition-title">Note</p>` + title = klass.capitalize() + elif title == '': + # an explicit blank title should not be rendered + # e.g.: `!!! warning ""` will *not* render `p` with a title + title = None + return klass, title + + +def makeExtension(configs={}): + return AdmonitionExtension(configs=configs) diff --git a/awx/lib/site-packages/markdown/extensions/attr_list.py b/awx/lib/site-packages/markdown/extensions/attr_list.py new file mode 100644 index 0000000000..c98aa850a6 --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/attr_list.py @@ -0,0 +1,140 @@ +""" +Attribute List Extension for Python-Markdown +============================================ + +Adds attribute list syntax. Inspired by +[maruku](http://maruku.rubyforge.org/proposal.html#attribute_lists)'s +feature of the same name. + +Copyright 2011 [Waylan Limberg](http://achinghead.com/). + +Contact: markdown@freewisdom.org + +License: BSD (see ../LICENSE.md for details) + +Dependencies: +* [Python 2.4+](http://python.org) +* [Markdown 2.1+](http://packages.python.org/Markdown/) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..treeprocessors import Treeprocessor +from ..util import isBlockLevel +import re + +try: + Scanner = re.Scanner +except AttributeError: + # must be on Python 2.4 + from sre import Scanner + +def _handle_double_quote(s, t): + k, v = t.split('=') + return k, v.strip('"') + +def _handle_single_quote(s, t): + k, v = t.split('=') + return k, v.strip("'") + +def _handle_key_value(s, t): + return t.split('=') + +def _handle_word(s, t): + if t.startswith('.'): + return '.', t[1:] + if t.startswith('#'): + return 'id', t[1:] + return t, t + +_scanner = Scanner([ + (r'[^ ]+=".*?"', _handle_double_quote), + (r"[^ ]+='.*?'", _handle_single_quote), + (r'[^ ]+=[^ ]*', _handle_key_value), + (r'[^ ]+', _handle_word), + (r' ', None) +]) + +def get_attrs(str): + """ Parse attribute list and return a list of attribute tuples. """ + return _scanner.scan(str)[0] + +def isheader(elem): + return elem.tag in ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'] + +class AttrListTreeprocessor(Treeprocessor): + + BASE_RE = r'\{\:?([^\}]*)\}' + HEADER_RE = re.compile(r'[ ]*%s[ ]*$' % BASE_RE) + BLOCK_RE = re.compile(r'\n[ ]*%s[ ]*$' % BASE_RE) + INLINE_RE = re.compile(r'^%s' % BASE_RE) + NAME_RE = re.compile(r'[^A-Z_a-z\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u02ff\u0370-\u037d' + r'\u037f-\u1fff\u200c-\u200d\u2070-\u218f\u2c00-\u2fef' + r'\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd' + r'\:\-\.0-9\u00b7\u0300-\u036f\u203f-\u2040]+') + + def run(self, doc): + for elem in doc.getiterator(): + if isBlockLevel(elem.tag): + # Block level: check for attrs on last line of text + RE = self.BLOCK_RE + if isheader(elem): + # header: check for attrs at end of line + RE = self.HEADER_RE + if len(elem) and elem[-1].tail: + # has children. Get from tail of last child + m = RE.search(elem[-1].tail) + if m: + self.assign_attrs(elem, m.group(1)) + elem[-1].tail = elem[-1].tail[:m.start()] + if isheader(elem): + # clean up trailing #s + elem[-1].tail = elem[-1].tail.rstrip('#').rstrip() + elif elem.text: + # no children. Get from text. + m = RE.search(elem.text) + if m: + self.assign_attrs(elem, m.group(1)) + elem.text = elem.text[:m.start()] + if isheader(elem): + # clean up trailing #s + elem.text = elem.text.rstrip('#').rstrip() + else: + # inline: check for attrs at start of tail + if elem.tail: + m = self.INLINE_RE.match(elem.tail) + if m: + self.assign_attrs(elem, m.group(1)) + elem.tail = elem.tail[m.end():] + + def assign_attrs(self, elem, attrs): + """ Assign attrs to element. """ + for k, v in get_attrs(attrs): + if k == '.': + # add to class + cls = elem.get('class') + if cls: + elem.set('class', '%s %s' % (cls, v)) + else: + elem.set('class', v) + else: + # assign attr k with v + elem.set(self.sanitize_name(k), v) + + def sanitize_name(self, name): + """ + Sanitize name as 'an XML Name, minus the ":"'. + See http://www.w3.org/TR/REC-xml-names/#NT-NCName + """ + return self.NAME_RE.sub('_', name) + + +class AttrListExtension(Extension): + def extendMarkdown(self, md, md_globals): + md.treeprocessors.add('attr_list', AttrListTreeprocessor(md), '>prettify') + + +def makeExtension(configs={}): + return AttrListExtension(configs=configs) diff --git a/awx/lib/site-packages/markdown/extensions/codehilite.py b/awx/lib/site-packages/markdown/extensions/codehilite.py new file mode 100644 index 0000000000..72b40fde78 --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/codehilite.py @@ -0,0 +1,240 @@ +""" +CodeHilite Extension for Python-Markdown +======================================== + +Adds code/syntax highlighting to standard Python-Markdown code blocks. + +Copyright 2006-2008 [Waylan Limberg](http://achinghead.com/). + +Project website: <http://packages.python.org/Markdown/extensions/code_hilite.html> +Contact: markdown@freewisdom.org + +License: BSD (see ../LICENSE.md for details) + +Dependencies: +* [Python 2.3+](http://python.org/) +* [Markdown 2.0+](http://packages.python.org/Markdown/) +* [Pygments](http://pygments.org/) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..treeprocessors import Treeprocessor +import warnings +try: + from pygments import highlight + from pygments.lexers import get_lexer_by_name, guess_lexer, TextLexer + from pygments.formatters import HtmlFormatter + pygments = True +except ImportError: + pygments = False + +# ------------------ The Main CodeHilite Class ---------------------- +class CodeHilite(object): + """ + Determine language of source code, and pass it into the pygments hilighter. + + Basic Usage: + >>> code = CodeHilite(src = 'some text') + >>> html = code.hilite() + + * src: Source string or any object with a .readline attribute. + + * linenums: (Boolean) Set line numbering to 'on' (True), 'off' (False) or 'auto'(None). + Set to 'auto' by default. + + * guess_lang: (Boolean) Turn language auto-detection 'on' or 'off' (on by default). + + * css_class: Set class name of wrapper div ('codehilite' by default). + + Low Level Usage: + >>> code = CodeHilite() + >>> code.src = 'some text' # String or anything with a .readline attr. + >>> code.linenos = True # True or False; Turns line numbering on or of. + >>> html = code.hilite() + + """ + + def __init__(self, src=None, linenums=None, guess_lang=True, + css_class="codehilite", lang=None, style='default', + noclasses=False, tab_length=4): + self.src = src + self.lang = lang + self.linenums = linenums + self.guess_lang = guess_lang + self.css_class = css_class + self.style = style + self.noclasses = noclasses + self.tab_length = tab_length + + def hilite(self): + """ + Pass code to the [Pygments](http://pygments.pocoo.org/) highliter with + optional line numbers. The output should then be styled with css to + your liking. No styles are applied by default - only styling hooks + (i.e.: <span class="k">). + + returns : A string of html. + + """ + + self.src = self.src.strip('\n') + + if self.lang is None: + self._getLang() + + if pygments: + try: + lexer = get_lexer_by_name(self.lang) + except ValueError: + try: + if self.guess_lang: + lexer = guess_lexer(self.src) + else: + lexer = TextLexer() + except ValueError: + lexer = TextLexer() + formatter = HtmlFormatter(linenos=self.linenums, + cssclass=self.css_class, + style=self.style, + noclasses=self.noclasses) + return highlight(self.src, lexer, formatter) + else: + # just escape and build markup usable by JS highlighting libs + txt = self.src.replace('&', '&') + txt = txt.replace('<', '<') + txt = txt.replace('>', '>') + txt = txt.replace('"', '"') + classes = [] + if self.lang: + classes.append('language-%s' % self.lang) + if self.linenums: + classes.append('linenums') + class_str = '' + if classes: + class_str = ' class="%s"' % ' '.join(classes) + return '<pre class="%s"><code%s>%s</code></pre>\n'% \ + (self.css_class, class_str, txt) + + def _getLang(self): + """ + Determines language of a code block from shebang line and whether said + line should be removed or left in place. If the sheband line contains a + path (even a single /) then it is assumed to be a real shebang line and + left alone. However, if no path is given (e.i.: #!python or :::python) + then it is assumed to be a mock shebang for language identifitation of a + code fragment and removed from the code block prior to processing for + code highlighting. When a mock shebang (e.i: #!python) is found, line + numbering is turned on. When colons are found in place of a shebang + (e.i.: :::python), line numbering is left in the current state - off + by default. + + """ + + import re + + #split text into lines + lines = self.src.split("\n") + #pull first line to examine + fl = lines.pop(0) + + c = re.compile(r''' + (?:(?:^::+)|(?P<shebang>^[#]!)) # Shebang or 2 or more colons. + (?P<path>(?:/\w+)*[/ ])? # Zero or 1 path + (?P<lang>[\w+-]*) # The language + ''', re.VERBOSE) + # search first line for shebang + m = c.search(fl) + if m: + # we have a match + try: + self.lang = m.group('lang').lower() + except IndexError: + self.lang = None + if m.group('path'): + # path exists - restore first line + lines.insert(0, fl) + if self.linenums is None and m.group('shebang'): + # Overridable and Shebang exists - use line numbers + self.linenums = True + else: + # No match + lines.insert(0, fl) + + self.src = "\n".join(lines).strip("\n") + + + +# ------------------ The Markdown Extension ------------------------------- +class HiliteTreeprocessor(Treeprocessor): + """ Hilight source code in code blocks. """ + + def run(self, root): + """ Find code blocks and store in htmlStash. """ + blocks = root.getiterator('pre') + for block in blocks: + children = block.getchildren() + if len(children) == 1 and children[0].tag == 'code': + code = CodeHilite(children[0].text, + linenums=self.config['linenums'], + guess_lang=self.config['guess_lang'], + css_class=self.config['css_class'], + style=self.config['pygments_style'], + noclasses=self.config['noclasses'], + tab_length=self.markdown.tab_length) + placeholder = self.markdown.htmlStash.store(code.hilite(), + safe=True) + # Clear codeblock in etree instance + block.clear() + # Change to p element which will later + # be removed when inserting raw html + block.tag = 'p' + block.text = placeholder + + +class CodeHiliteExtension(Extension): + """ Add source code hilighting to markdown codeblocks. """ + + def __init__(self, configs): + # define default configs + self.config = { + 'linenums': [None, "Use lines numbers. True=yes, False=no, None=auto"], + 'force_linenos' : [False, "Depreciated! Use 'linenums' instead. Force line numbers - Default: False"], + 'guess_lang' : [True, "Automatic language detection - Default: True"], + 'css_class' : ["codehilite", + "Set class name for wrapper <div> - Default: codehilite"], + 'pygments_style' : ['default', 'Pygments HTML Formatter Style (Colorscheme) - Default: default'], + 'noclasses': [False, 'Use inline styles instead of CSS classes - Default false'] + } + + # Override defaults with user settings + for key, value in configs: + # convert strings to booleans + if value == 'True': value = True + if value == 'False': value = False + if value == 'None': value = None + + if key == 'force_linenos': + warnings.warn('The "force_linenos" config setting' + ' to the CodeHilite extension is deprecrecated.' + ' Use "linenums" instead.', PendingDeprecationWarning) + if value: + # Carry 'force_linenos' over to new 'linenos'. + self.setConfig('linenums', True) + + self.setConfig(key, value) + + def extendMarkdown(self, md, md_globals): + """ Add HilitePostprocessor to Markdown instance. """ + hiliter = HiliteTreeprocessor(md) + hiliter.config = self.getConfigs() + md.treeprocessors.add("hilite", hiliter, "<inline") + + md.registerExtension(self) + + +def makeExtension(configs={}): + return CodeHiliteExtension(configs=configs) + diff --git a/awx/lib/site-packages/markdown/extensions/def_list.py b/awx/lib/site-packages/markdown/extensions/def_list.py new file mode 100644 index 0000000000..868465266c --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/def_list.py @@ -0,0 +1,118 @@ +""" +Definition List Extension for Python-Markdown +============================================= + +Added parsing of Definition Lists to Python-Markdown. + +A simple example: + + Apple + : Pomaceous fruit of plants of the genus Malus in + the family Rosaceae. + : An american computer company. + + Orange + : The fruit of an evergreen tree of the genus Citrus. + +Copyright 2008 - [Waylan Limberg](http://achinghead.com) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..blockprocessors import BlockProcessor, ListIndentProcessor +from ..util import etree +import re + + +class DefListProcessor(BlockProcessor): + """ Process Definition Lists. """ + + RE = re.compile(r'(^|\n)[ ]{0,3}:[ ]{1,3}(.*?)(\n|$)') + NO_INDENT_RE = re.compile(r'^[ ]{0,3}[^ :]') + + def test(self, parent, block): + return bool(self.RE.search(block)) + + def run(self, parent, blocks): + + raw_block = blocks.pop(0) + m = self.RE.search(raw_block) + terms = [l.strip() for l in raw_block[:m.start()].split('\n') if l.strip()] + block = raw_block[m.end():] + no_indent = self.NO_INDENT_RE.match(block) + if no_indent: + d, theRest = (block, None) + else: + d, theRest = self.detab(block) + if d: + d = '%s\n%s' % (m.group(2), d) + else: + d = m.group(2) + sibling = self.lastChild(parent) + if not terms and sibling is None: + # This is not a definition item. Most likely a paragraph that + # starts with a colon at the begining of a document or list. + blocks.insert(0, raw_block) + return False + if not terms and sibling.tag == 'p': + # The previous paragraph contains the terms + state = 'looselist' + terms = sibling.text.split('\n') + parent.remove(sibling) + # Aquire new sibling + sibling = self.lastChild(parent) + else: + state = 'list' + + if sibling and sibling.tag == 'dl': + # This is another item on an existing list + dl = sibling + if len(dl) and dl[-1].tag == 'dd' and len(dl[-1]): + state = 'looselist' + else: + # This is a new list + dl = etree.SubElement(parent, 'dl') + # Add terms + for term in terms: + dt = etree.SubElement(dl, 'dt') + dt.text = term + # Add definition + self.parser.state.set(state) + dd = etree.SubElement(dl, 'dd') + self.parser.parseBlocks(dd, [d]) + self.parser.state.reset() + + if theRest: + blocks.insert(0, theRest) + +class DefListIndentProcessor(ListIndentProcessor): + """ Process indented children of definition list items. """ + + ITEM_TYPES = ['dd'] + LIST_TYPES = ['dl'] + + def create_item(self, parent, block): + """ Create a new dd and parse the block with it as the parent. """ + dd = etree.SubElement(parent, 'dd') + self.parser.parseBlocks(dd, [block]) + + + +class DefListExtension(Extension): + """ Add definition lists to Markdown. """ + + def extendMarkdown(self, md, md_globals): + """ Add an instance of DefListProcessor to BlockParser. """ + md.parser.blockprocessors.add('defindent', + DefListIndentProcessor(md.parser), + '>indent') + md.parser.blockprocessors.add('deflist', + DefListProcessor(md.parser), + '>ulist') + + +def makeExtension(configs={}): + return DefListExtension(configs=configs) + diff --git a/awx/lib/site-packages/markdown/extensions/extra.py b/awx/lib/site-packages/markdown/extensions/extra.py new file mode 100644 index 0000000000..e6a1e820ef --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/extra.py @@ -0,0 +1,54 @@ +""" +Python-Markdown Extra Extension +=============================== + +A compilation of various Python-Markdown extensions that imitates +[PHP Markdown Extra](http://michelf.com/projects/php-markdown/extra/). + +Note that each of the individual extensions still need to be available +on your PYTHONPATH. This extension simply wraps them all up as a +convenience so that only one extension needs to be listed when +initiating Markdown. See the documentation for each individual +extension for specifics about that extension. + +In the event that one or more of the supported extensions are not +available for import, Markdown will issue a warning and simply continue +without that extension. + +There may be additional extensions that are distributed with +Python-Markdown that are not included here in Extra. Those extensions +are not part of PHP Markdown Extra, and therefore, not part of +Python-Markdown Extra. If you really would like Extra to include +additional extensions, we suggest creating your own clone of Extra +under a differant name. You could also edit the `extensions` global +variable defined below, but be aware that such changes may be lost +when you upgrade to any future version of Python-Markdown. + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension + +extensions = ['smart_strong', + 'fenced_code', + 'footnotes', + 'attr_list', + 'def_list', + 'tables', + 'abbr', + ] + + +class ExtraExtension(Extension): + """ Add various extensions to Markdown class.""" + + def extendMarkdown(self, md, md_globals): + """ Register extension instances. """ + md.registerExtensions(extensions, self.config) + if not md.safeMode: + # Turn on processing of markdown text within raw html + md.preprocessors['html_block'].markdown_in_raw = True + +def makeExtension(configs={}): + return ExtraExtension(configs=dict(configs)) diff --git a/awx/lib/site-packages/markdown/extensions/fenced_code.py b/awx/lib/site-packages/markdown/extensions/fenced_code.py new file mode 100644 index 0000000000..ecdb20d439 --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/fenced_code.py @@ -0,0 +1,161 @@ +""" +Fenced Code Extension for Python Markdown +========================================= + +This extension adds Fenced Code Blocks to Python-Markdown. + + >>> import markdown + >>> text = ''' + ... A paragraph before a fenced code block: + ... + ... ~~~ + ... Fenced code block + ... ~~~ + ... ''' + >>> html = markdown.markdown(text, extensions=['fenced_code']) + >>> print html + <p>A paragraph before a fenced code block:</p> + <pre><code>Fenced code block + </code></pre> + +Works with safe_mode also (we check this because we are using the HtmlStash): + + >>> print markdown.markdown(text, extensions=['fenced_code'], safe_mode='replace') + <p>A paragraph before a fenced code block:</p> + <pre><code>Fenced code block + </code></pre> + +Include tilde's in a code block and wrap with blank lines: + + >>> text = ''' + ... ~~~~~~~~ + ... + ... ~~~~ + ... ~~~~~~~~''' + >>> print markdown.markdown(text, extensions=['fenced_code']) + <pre><code> + ~~~~ + </code></pre> + +Language tags: + + >>> text = ''' + ... ~~~~{.python} + ... # Some python code + ... ~~~~''' + >>> print markdown.markdown(text, extensions=['fenced_code']) + <pre><code class="python"># Some python code + </code></pre> + +Optionally backticks instead of tildes as per how github's code block markdown is identified: + + >>> text = ''' + ... ````` + ... # Arbitrary code + ... ~~~~~ # these tildes will not close the block + ... `````''' + >>> print markdown.markdown(text, extensions=['fenced_code']) + <pre><code># Arbitrary code + ~~~~~ # these tildes will not close the block + </code></pre> + +Copyright 2007-2008 [Waylan Limberg](http://achinghead.com/). + +Project website: <http://packages.python.org/Markdown/extensions/fenced_code_blocks.html> +Contact: markdown@freewisdom.org + +License: BSD (see ../docs/LICENSE for details) + +Dependencies: +* [Python 2.4+](http://python.org) +* [Markdown 2.0+](http://packages.python.org/Markdown/) +* [Pygments (optional)](http://pygments.org) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..preprocessors import Preprocessor +from .codehilite import CodeHilite, CodeHiliteExtension +import re + +# Global vars +FENCED_BLOCK_RE = re.compile( \ + r'(?P<fence>^(?:~{3,}|`{3,}))[ ]*(\{?\.?(?P<lang>[a-zA-Z0-9_+-]*)\}?)?[ ]*\n(?P<code>.*?)(?<=\n)(?P=fence)[ ]*$', + re.MULTILINE|re.DOTALL + ) +CODE_WRAP = '<pre><code%s>%s</code></pre>' +LANG_TAG = ' class="%s"' + +class FencedCodeExtension(Extension): + + def extendMarkdown(self, md, md_globals): + """ Add FencedBlockPreprocessor to the Markdown instance. """ + md.registerExtension(self) + + md.preprocessors.add('fenced_code_block', + FencedBlockPreprocessor(md), + ">normalize_whitespace") + + +class FencedBlockPreprocessor(Preprocessor): + + def __init__(self, md): + super(FencedBlockPreprocessor, self).__init__(md) + + self.checked_for_codehilite = False + self.codehilite_conf = {} + + def run(self, lines): + """ Match and store Fenced Code Blocks in the HtmlStash. """ + + # Check for code hilite extension + if not self.checked_for_codehilite: + for ext in self.markdown.registeredExtensions: + if isinstance(ext, CodeHiliteExtension): + self.codehilite_conf = ext.config + break + + self.checked_for_codehilite = True + + text = "\n".join(lines) + while 1: + m = FENCED_BLOCK_RE.search(text) + if m: + lang = '' + if m.group('lang'): + lang = LANG_TAG % m.group('lang') + + # If config is not empty, then the codehighlite extension + # is enabled, so we call it to highlite the code + if self.codehilite_conf: + highliter = CodeHilite(m.group('code'), + linenums=self.codehilite_conf['linenums'][0], + guess_lang=self.codehilite_conf['guess_lang'][0], + css_class=self.codehilite_conf['css_class'][0], + style=self.codehilite_conf['pygments_style'][0], + lang=(m.group('lang') or None), + noclasses=self.codehilite_conf['noclasses'][0]) + + code = highliter.hilite() + else: + code = CODE_WRAP % (lang, self._escape(m.group('code'))) + + placeholder = self.markdown.htmlStash.store(code, safe=True) + text = '%s\n%s\n%s'% (text[:m.start()], placeholder, text[m.end():]) + else: + break + return text.split("\n") + + def _escape(self, txt): + """ basic html escaping """ + txt = txt.replace('&', '&') + txt = txt.replace('<', '<') + txt = txt.replace('>', '>') + txt = txt.replace('"', '"') + return txt + + +def makeExtension(configs=None): + return FencedCodeExtension(configs=configs) diff --git a/awx/lib/site-packages/markdown/extensions/footnotes.py b/awx/lib/site-packages/markdown/extensions/footnotes.py new file mode 100644 index 0000000000..65ed597a7b --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/footnotes.py @@ -0,0 +1,313 @@ +""" +========================= FOOTNOTES ================================= + +This section adds footnote handling to markdown. It can be used as +an example for extending python-markdown with relatively complex +functionality. While in this case the extension is included inside +the module itself, it could just as easily be added from outside the +module. Not that all markdown classes above are ignorant about +footnotes. All footnote functionality is provided separately and +then added to the markdown instance at the run time. + +Footnote functionality is attached by calling extendMarkdown() +method of FootnoteExtension. The method also registers the +extension to allow it's state to be reset by a call to reset() +method. + +Example: + Footnotes[^1] have a label[^label] and a definition[^!DEF]. + + [^1]: This is a footnote + [^label]: A footnote on "label" + [^!DEF]: The footnote for definition + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..preprocessors import Preprocessor +from ..inlinepatterns import Pattern +from ..treeprocessors import Treeprocessor +from ..postprocessors import Postprocessor +from ..util import etree, text_type +from ..odict import OrderedDict +import re + +FN_BACKLINK_TEXT = "zz1337820767766393qq" +NBSP_PLACEHOLDER = "qq3936677670287331zz" +DEF_RE = re.compile(r'[ ]{0,3}\[\^([^\]]*)\]:\s*(.*)') +TABBED_RE = re.compile(r'((\t)|( ))(.*)') + +class FootnoteExtension(Extension): + """ Footnote Extension. """ + + def __init__ (self, configs): + """ Setup configs. """ + self.config = {'PLACE_MARKER': + ["///Footnotes Go Here///", + "The text string that marks where the footnotes go"], + 'UNIQUE_IDS': + [False, + "Avoid name collisions across " + "multiple calls to reset()."], + "BACKLINK_TEXT": + ["↩", + "The text string that links from the footnote to the reader's place."] + } + + for key, value in configs: + self.config[key][0] = value + + # In multiple invocations, emit links that don't get tangled. + self.unique_prefix = 0 + + self.reset() + + def extendMarkdown(self, md, md_globals): + """ Add pieces to Markdown. """ + md.registerExtension(self) + self.parser = md.parser + self.md = md + self.sep = ':' + if self.md.output_format in ['html5', 'xhtml5']: + self.sep = '-' + # Insert a preprocessor before ReferencePreprocessor + md.preprocessors.add("footnote", FootnotePreprocessor(self), + "<reference") + # Insert an inline pattern before ImageReferencePattern + FOOTNOTE_RE = r'\[\^([^\]]*)\]' # blah blah [^1] blah + md.inlinePatterns.add("footnote", FootnotePattern(FOOTNOTE_RE, self), + "<reference") + # Insert a tree-processor that would actually add the footnote div + # This must be before all other treeprocessors (i.e., inline and + # codehilite) so they can run on the the contents of the div. + md.treeprocessors.add("footnote", FootnoteTreeprocessor(self), + "_begin") + # Insert a postprocessor after amp_substitute oricessor + md.postprocessors.add("footnote", FootnotePostprocessor(self), + ">amp_substitute") + + def reset(self): + """ Clear the footnotes on reset, and prepare for a distinct document. """ + self.footnotes = OrderedDict() + self.unique_prefix += 1 + + def findFootnotesPlaceholder(self, root): + """ Return ElementTree Element that contains Footnote placeholder. """ + def finder(element): + for child in element: + if child.text: + if child.text.find(self.getConfig("PLACE_MARKER")) > -1: + return child, element, True + if child.tail: + if child.tail.find(self.getConfig("PLACE_MARKER")) > -1: + return child, element, False + finder(child) + return None + + res = finder(root) + return res + + def setFootnote(self, id, text): + """ Store a footnote for later retrieval. """ + self.footnotes[id] = text + + def makeFootnoteId(self, id): + """ Return footnote link id. """ + if self.getConfig("UNIQUE_IDS"): + return 'fn%s%d-%s' % (self.sep, self.unique_prefix, id) + else: + return 'fn%s%s' % (self.sep, id) + + def makeFootnoteRefId(self, id): + """ Return footnote back-link id. """ + if self.getConfig("UNIQUE_IDS"): + return 'fnref%s%d-%s' % (self.sep, self.unique_prefix, id) + else: + return 'fnref%s%s' % (self.sep, id) + + def makeFootnotesDiv(self, root): + """ Return div of footnotes as et Element. """ + + if not list(self.footnotes.keys()): + return None + + div = etree.Element("div") + div.set('class', 'footnote') + etree.SubElement(div, "hr") + ol = etree.SubElement(div, "ol") + + for id in self.footnotes.keys(): + li = etree.SubElement(ol, "li") + li.set("id", self.makeFootnoteId(id)) + self.parser.parseChunk(li, self.footnotes[id]) + backlink = etree.Element("a") + backlink.set("href", "#" + self.makeFootnoteRefId(id)) + if self.md.output_format not in ['html5', 'xhtml5']: + backlink.set("rev", "footnote") # Invalid in HTML5 + backlink.set("class", "footnote-backref") + backlink.set("title", "Jump back to footnote %d in the text" % \ + (self.footnotes.index(id)+1)) + backlink.text = FN_BACKLINK_TEXT + + if li.getchildren(): + node = li[-1] + if node.tag == "p": + node.text = node.text + NBSP_PLACEHOLDER + node.append(backlink) + else: + p = etree.SubElement(li, "p") + p.append(backlink) + return div + + +class FootnotePreprocessor(Preprocessor): + """ Find all footnote references and store for later use. """ + + def __init__ (self, footnotes): + self.footnotes = footnotes + + def run(self, lines): + """ + Loop through lines and find, set, and remove footnote definitions. + + Keywords: + + * lines: A list of lines of text + + Return: A list of lines of text with footnote definitions removed. + + """ + newlines = [] + i = 0 + while True: + m = DEF_RE.match(lines[i]) + if m: + fn, _i = self.detectTabbed(lines[i+1:]) + fn.insert(0, m.group(2)) + i += _i-1 # skip past footnote + self.footnotes.setFootnote(m.group(1), "\n".join(fn)) + else: + newlines.append(lines[i]) + if len(lines) > i+1: + i += 1 + else: + break + return newlines + + def detectTabbed(self, lines): + """ Find indented text and remove indent before further proccesing. + + Keyword arguments: + + * lines: an array of strings + + Returns: a list of post processed items and the index of last line. + + """ + items = [] + blank_line = False # have we encountered a blank line yet? + i = 0 # to keep track of where we are + + def detab(line): + match = TABBED_RE.match(line) + if match: + return match.group(4) + + for line in lines: + if line.strip(): # Non-blank line + detabbed_line = detab(line) + if detabbed_line: + items.append(detabbed_line) + i += 1 + continue + elif not blank_line and not DEF_RE.match(line): + # not tabbed but still part of first par. + items.append(line) + i += 1 + continue + else: + return items, i+1 + + else: # Blank line: _maybe_ we are done. + blank_line = True + i += 1 # advance + + # Find the next non-blank line + for j in range(i, len(lines)): + if lines[j].strip(): + next_line = lines[j]; break + else: + break # There is no more text; we are done. + + # Check if the next non-blank line is tabbed + if detab(next_line): # Yes, more work to do. + items.append("") + continue + else: + break # No, we are done. + else: + i += 1 + + return items, i + + +class FootnotePattern(Pattern): + """ InlinePattern for footnote markers in a document's body text. """ + + def __init__(self, pattern, footnotes): + super(FootnotePattern, self).__init__(pattern) + self.footnotes = footnotes + + def handleMatch(self, m): + id = m.group(2) + if id in self.footnotes.footnotes.keys(): + sup = etree.Element("sup") + a = etree.SubElement(sup, "a") + sup.set('id', self.footnotes.makeFootnoteRefId(id)) + a.set('href', '#' + self.footnotes.makeFootnoteId(id)) + if self.footnotes.md.output_format not in ['html5', 'xhtml5']: + a.set('rel', 'footnote') # invalid in HTML5 + a.set('class', 'footnote-ref') + a.text = text_type(self.footnotes.footnotes.index(id) + 1) + return sup + else: + return None + + +class FootnoteTreeprocessor(Treeprocessor): + """ Build and append footnote div to end of document. """ + + def __init__ (self, footnotes): + self.footnotes = footnotes + + def run(self, root): + footnotesDiv = self.footnotes.makeFootnotesDiv(root) + if footnotesDiv: + result = self.footnotes.findFootnotesPlaceholder(root) + if result: + child, parent, isText = result + ind = parent.getchildren().index(child) + if isText: + parent.remove(child) + parent.insert(ind, footnotesDiv) + else: + parent.insert(ind + 1, footnotesDiv) + child.tail = None + else: + root.append(footnotesDiv) + +class FootnotePostprocessor(Postprocessor): + """ Replace placeholders with html entities. """ + def __init__(self, footnotes): + self.footnotes = footnotes + + def run(self, text): + text = text.replace(FN_BACKLINK_TEXT, self.footnotes.getConfig("BACKLINK_TEXT")) + return text.replace(NBSP_PLACEHOLDER, " ") + +def makeExtension(configs=[]): + """ Return an instance of the FootnoteExtension """ + return FootnoteExtension(configs=configs) + diff --git a/awx/lib/site-packages/markdown/extensions/headerid.py b/awx/lib/site-packages/markdown/extensions/headerid.py new file mode 100644 index 0000000000..7681b8d499 --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/headerid.py @@ -0,0 +1,199 @@ +""" +HeaderID Extension for Python-Markdown +====================================== + +Auto-generate id attributes for HTML headers. + +Basic usage: + + >>> import markdown + >>> text = "# Some Header #" + >>> md = markdown.markdown(text, ['headerid']) + >>> print md + <h1 id="some-header">Some Header</h1> + +All header IDs are unique: + + >>> text = ''' + ... #Header + ... #Header + ... #Header''' + >>> md = markdown.markdown(text, ['headerid']) + >>> print md + <h1 id="header">Header</h1> + <h1 id="header_1">Header</h1> + <h1 id="header_2">Header</h1> + +To fit within a html template's hierarchy, set the header base level: + + >>> text = ''' + ... #Some Header + ... ## Next Level''' + >>> md = markdown.markdown(text, ['headerid(level=3)']) + >>> print md + <h3 id="some-header">Some Header</h3> + <h4 id="next-level">Next Level</h4> + +Works with inline markup. + + >>> text = '#Some *Header* with [markup](http://example.com).' + >>> md = markdown.markdown(text, ['headerid']) + >>> print md + <h1 id="some-header-with-markup">Some <em>Header</em> with <a href="http://example.com">markup</a>.</h1> + +Turn off auto generated IDs: + + >>> text = ''' + ... # Some Header + ... # Another Header''' + >>> md = markdown.markdown(text, ['headerid(forceid=False)']) + >>> print md + <h1>Some Header</h1> + <h1>Another Header</h1> + +Use with MetaData extension: + + >>> text = '''header_level: 2 + ... header_forceid: Off + ... + ... # A Header''' + >>> md = markdown.markdown(text, ['headerid', 'meta']) + >>> print md + <h2>A Header</h2> + +Copyright 2007-2011 [Waylan Limberg](http://achinghead.com/). + +Project website: <http://packages.python.org/Markdown/extensions/header_id.html> +Contact: markdown@freewisdom.org + +License: BSD (see ../docs/LICENSE for details) + +Dependencies: +* [Python 2.3+](http://python.org) +* [Markdown 2.0+](http://packages.python.org/Markdown/) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..treeprocessors import Treeprocessor +import re +import logging +import unicodedata + +logger = logging.getLogger('MARKDOWN') + +IDCOUNT_RE = re.compile(r'^(.*)_([0-9]+)$') + + +def slugify(value, separator): + """ Slugify a string, to make it URL friendly. """ + value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') + value = re.sub('[^\w\s-]', '', value.decode('ascii')).strip().lower() + return re.sub('[%s\s]+' % separator, separator, value) + + +def unique(id, ids): + """ Ensure id is unique in set of ids. Append '_1', '_2'... if not """ + while id in ids or not id: + m = IDCOUNT_RE.match(id) + if m: + id = '%s_%d'% (m.group(1), int(m.group(2))+1) + else: + id = '%s_%d'% (id, 1) + ids.add(id) + return id + + +def itertext(elem): + """ Loop through all children and return text only. + + Reimplements method of same name added to ElementTree in Python 2.7 + + """ + if elem.text: + yield elem.text + for e in elem: + for s in itertext(e): + yield s + if e.tail: + yield e.tail + + +class HeaderIdTreeprocessor(Treeprocessor): + """ Assign IDs to headers. """ + + IDs = set() + + def run(self, doc): + start_level, force_id = self._get_meta() + slugify = self.config['slugify'] + sep = self.config['separator'] + for elem in doc.getiterator(): + if elem.tag in ['h1', 'h2', 'h3', 'h4', 'h5', 'h6']: + if force_id: + if "id" in elem.attrib: + id = elem.get('id') + else: + id = slugify(''.join(itertext(elem)), sep) + elem.set('id', unique(id, self.IDs)) + if start_level: + level = int(elem.tag[-1]) + start_level + if level > 6: + level = 6 + elem.tag = 'h%d' % level + + + def _get_meta(self): + """ Return meta data suported by this ext as a tuple """ + level = int(self.config['level']) - 1 + force = self._str2bool(self.config['forceid']) + if hasattr(self.md, 'Meta'): + if 'header_level' in self.md.Meta: + level = int(self.md.Meta['header_level'][0]) - 1 + if 'header_forceid' in self.md.Meta: + force = self._str2bool(self.md.Meta['header_forceid'][0]) + return level, force + + def _str2bool(self, s, default=False): + """ Convert a string to a booleen value. """ + s = str(s) + if s.lower() in ['0', 'f', 'false', 'off', 'no', 'n']: + return False + elif s.lower() in ['1', 't', 'true', 'on', 'yes', 'y']: + return True + return default + + +class HeaderIdExtension(Extension): + def __init__(self, configs): + # set defaults + self.config = { + 'level' : ['1', 'Base level for headers.'], + 'forceid' : ['True', 'Force all headers to have an id.'], + 'separator' : ['-', 'Word separator.'], + 'slugify' : [slugify, 'Callable to generate anchors'], + } + + for key, value in configs: + self.setConfig(key, value) + + def extendMarkdown(self, md, md_globals): + md.registerExtension(self) + self.processor = HeaderIdTreeprocessor() + self.processor.md = md + self.processor.config = self.getConfigs() + if 'attr_list' in md.treeprocessors.keys(): + # insert after attr_list treeprocessor + md.treeprocessors.add('headerid', self.processor, '>attr_list') + else: + # insert after 'prettify' treeprocessor. + md.treeprocessors.add('headerid', self.processor, '>prettify') + + def reset(self): + self.processor.IDs = set() + + +def makeExtension(configs=None): + return HeaderIdExtension(configs=configs) diff --git a/awx/lib/site-packages/markdown/extensions/meta.py b/awx/lib/site-packages/markdown/extensions/meta.py new file mode 100644 index 0000000000..aaff4365a8 --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/meta.py @@ -0,0 +1,93 @@ +""" +Meta Data Extension for Python-Markdown +======================================= + +This extension adds Meta Data handling to markdown. + +Basic Usage: + + >>> import markdown + >>> text = '''Title: A Test Doc. + ... Author: Waylan Limberg + ... John Doe + ... Blank_Data: + ... + ... The body. This is paragraph one. + ... ''' + >>> md = markdown.Markdown(['meta']) + >>> print md.convert(text) + <p>The body. This is paragraph one.</p> + >>> print md.Meta + {u'blank_data': [u''], u'author': [u'Waylan Limberg', u'John Doe'], u'title': [u'A Test Doc.']} + +Make sure text without Meta Data still works (markdown < 1.6b returns a <p>). + + >>> text = ' Some Code - not extra lines of meta data.' + >>> md = markdown.Markdown(['meta']) + >>> print md.convert(text) + <pre><code>Some Code - not extra lines of meta data. + </code></pre> + >>> md.Meta + {} + +Copyright 2007-2008 [Waylan Limberg](http://achinghead.com). + +Project website: <http://packages.python.org/Markdown/meta_data.html> +Contact: markdown@freewisdom.org + +License: BSD (see ../LICENSE.md for details) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..preprocessors import Preprocessor +import re + +# Global Vars +META_RE = re.compile(r'^[ ]{0,3}(?P<key>[A-Za-z0-9_-]+):\s*(?P<value>.*)') +META_MORE_RE = re.compile(r'^[ ]{4,}(?P<value>.*)') + +class MetaExtension (Extension): + """ Meta-Data extension for Python-Markdown. """ + + def extendMarkdown(self, md, md_globals): + """ Add MetaPreprocessor to Markdown instance. """ + + md.preprocessors.add("meta", MetaPreprocessor(md), "_begin") + + +class MetaPreprocessor(Preprocessor): + """ Get Meta-Data. """ + + def run(self, lines): + """ Parse Meta-Data and store in Markdown.Meta. """ + meta = {} + key = None + while 1: + line = lines.pop(0) + if line.strip() == '': + break # blank line - done + m1 = META_RE.match(line) + if m1: + key = m1.group('key').lower().strip() + value = m1.group('value').strip() + try: + meta[key].append(value) + except KeyError: + meta[key] = [value] + else: + m2 = META_MORE_RE.match(line) + if m2 and key: + # Add another line to existing key + meta[key].append(m2.group('value').strip()) + else: + lines.insert(0, line) + break # no meta data - done + self.markdown.Meta = meta + return lines + + +def makeExtension(configs={}): + return MetaExtension(configs=configs) diff --git a/awx/lib/site-packages/markdown/extensions/nl2br.py b/awx/lib/site-packages/markdown/extensions/nl2br.py new file mode 100644 index 0000000000..da4b339958 --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/nl2br.py @@ -0,0 +1,38 @@ +""" +NL2BR Extension +=============== + +A Python-Markdown extension to treat newlines as hard breaks; like +GitHub-flavored Markdown does. + +Usage: + + >>> import markdown + >>> print markdown.markdown('line 1\\nline 2', extensions=['nl2br']) + <p>line 1<br /> + line 2</p> + +Copyright 2011 [Brian Neal](http://deathofagremmie.com/) + +Dependencies: +* [Python 2.4+](http://python.org) +* [Markdown 2.1+](http://packages.python.org/Markdown/) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..inlinepatterns import SubstituteTagPattern + +BR_RE = r'\n' + +class Nl2BrExtension(Extension): + + def extendMarkdown(self, md, md_globals): + br_tag = SubstituteTagPattern(BR_RE, 'br') + md.inlinePatterns.add('nl', br_tag, '_end') + + +def makeExtension(configs=None): + return Nl2BrExtension(configs) diff --git a/awx/lib/site-packages/markdown/extensions/sane_lists.py b/awx/lib/site-packages/markdown/extensions/sane_lists.py new file mode 100644 index 0000000000..23e9a7f4a6 --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/sane_lists.py @@ -0,0 +1,51 @@ +""" +Sane List Extension for Python-Markdown +======================================= + +Modify the behavior of Lists in Python-Markdown t act in a sane manor. + +In standard Markdown sytex, the following would constitute a single +ordered list. However, with this extension, the output would include +two lists, the first an ordered list and the second and unordered list. + + 1. ordered + 2. list + + * unordered + * list + +Copyright 2011 - [Waylan Limberg](http://achinghead.com) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..blockprocessors import OListProcessor, UListProcessor +import re + + +class SaneOListProcessor(OListProcessor): + + CHILD_RE = re.compile(r'^[ ]{0,3}((\d+\.))[ ]+(.*)') + SIBLING_TAGS = ['ol'] + + +class SaneUListProcessor(UListProcessor): + + CHILD_RE = re.compile(r'^[ ]{0,3}(([*+-]))[ ]+(.*)') + SIBLING_TAGS = ['ul'] + + +class SaneListExtension(Extension): + """ Add sane lists to Markdown. """ + + def extendMarkdown(self, md, md_globals): + """ Override existing Processors. """ + md.parser.blockprocessors['olist'] = SaneOListProcessor(md.parser) + md.parser.blockprocessors['ulist'] = SaneUListProcessor(md.parser) + + +def makeExtension(configs={}): + return SaneListExtension(configs=configs) + diff --git a/awx/lib/site-packages/markdown/extensions/smart_strong.py b/awx/lib/site-packages/markdown/extensions/smart_strong.py new file mode 100644 index 0000000000..4818cf9ea8 --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/smart_strong.py @@ -0,0 +1,42 @@ +''' +Smart_Strong Extension for Python-Markdown +========================================== + +This extention adds smarter handling of double underscores within words. + +Simple Usage: + + >>> import markdown + >>> print markdown.markdown('Text with double__underscore__words.', + ... extensions=['smart_strong']) + <p>Text with double__underscore__words.</p> + >>> print markdown.markdown('__Strong__ still works.', + ... extensions=['smart_strong']) + <p><strong>Strong</strong> still works.</p> + >>> print markdown.markdown('__this__works__too__.', + ... extensions=['smart_strong']) + <p><strong>this__works__too</strong>.</p> + +Copyright 2011 +[Waylan Limberg](http://achinghead.com) + +''' + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..inlinepatterns import SimpleTagPattern + +SMART_STRONG_RE = r'(?<!\w)(_{2})(?!_)(.+?)(?<!_)\2(?!\w)' +STRONG_RE = r'(\*{2})(.+?)\2' + +class SmartEmphasisExtension(Extension): + """ Add smart_emphasis extension to Markdown class.""" + + def extendMarkdown(self, md, md_globals): + """ Modify inline patterns. """ + md.inlinePatterns['strong'] = SimpleTagPattern(STRONG_RE, 'strong') + md.inlinePatterns.add('strong2', SimpleTagPattern(SMART_STRONG_RE, 'strong'), '>emphasis2') + +def makeExtension(configs={}): + return SmartEmphasisExtension(configs=dict(configs)) diff --git a/awx/lib/site-packages/markdown/extensions/tables.py b/awx/lib/site-packages/markdown/extensions/tables.py new file mode 100644 index 0000000000..ad52ec11c7 --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/tables.py @@ -0,0 +1,100 @@ +""" +Tables Extension for Python-Markdown +==================================== + +Added parsing of tables to Python-Markdown. + +A simple example: + + First Header | Second Header + ------------- | ------------- + Content Cell | Content Cell + Content Cell | Content Cell + +Copyright 2009 - [Waylan Limberg](http://achinghead.com) +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..blockprocessors import BlockProcessor +from ..util import etree + +class TableProcessor(BlockProcessor): + """ Process Tables. """ + + def test(self, parent, block): + rows = block.split('\n') + return (len(rows) > 2 and '|' in rows[0] and + '|' in rows[1] and '-' in rows[1] and + rows[1].strip()[0] in ['|', ':', '-']) + + def run(self, parent, blocks): + """ Parse a table block and build table. """ + block = blocks.pop(0).split('\n') + header = block[0].strip() + seperator = block[1].strip() + rows = block[2:] + # Get format type (bordered by pipes or not) + border = False + if header.startswith('|'): + border = True + # Get alignment of columns + align = [] + for c in self._split_row(seperator, border): + if c.startswith(':') and c.endswith(':'): + align.append('center') + elif c.startswith(':'): + align.append('left') + elif c.endswith(':'): + align.append('right') + else: + align.append(None) + # Build table + table = etree.SubElement(parent, 'table') + thead = etree.SubElement(table, 'thead') + self._build_row(header, thead, align, border) + tbody = etree.SubElement(table, 'tbody') + for row in rows: + self._build_row(row.strip(), tbody, align, border) + + def _build_row(self, row, parent, align, border): + """ Given a row of text, build table cells. """ + tr = etree.SubElement(parent, 'tr') + tag = 'td' + if parent.tag == 'thead': + tag = 'th' + cells = self._split_row(row, border) + # We use align here rather than cells to ensure every row + # contains the same number of columns. + for i, a in enumerate(align): + c = etree.SubElement(tr, tag) + try: + c.text = cells[i].strip() + except IndexError: + c.text = "" + if a: + c.set('align', a) + + def _split_row(self, row, border): + """ split a row of text into list of cells. """ + if border: + if row.startswith('|'): + row = row[1:] + if row.endswith('|'): + row = row[:-1] + return row.split('|') + + +class TableExtension(Extension): + """ Add tables to Markdown. """ + + def extendMarkdown(self, md, md_globals): + """ Add an instance of TableProcessor to BlockParser. """ + md.parser.blockprocessors.add('table', + TableProcessor(md.parser), + '<hashheader') + + +def makeExtension(configs={}): + return TableExtension(configs=configs) diff --git a/awx/lib/site-packages/markdown/extensions/toc.py b/awx/lib/site-packages/markdown/extensions/toc.py new file mode 100644 index 0000000000..73b0844517 --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/toc.py @@ -0,0 +1,221 @@ +""" +Table of Contents Extension for Python-Markdown +* * * + +(c) 2008 [Jack Miller](http://codezen.org) + +Dependencies: +* [Markdown 2.1+](http://packages.python.org/Markdown/) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..treeprocessors import Treeprocessor +from ..util import etree +from .headerid import slugify, unique, itertext +import re + + +def order_toc_list(toc_list): + """Given an unsorted list with errors and skips, return a nested one. + [{'level': 1}, {'level': 2}] + => + [{'level': 1, 'children': [{'level': 2, 'children': []}]}] + + A wrong list is also converted: + [{'level': 2}, {'level': 1}] + => + [{'level': 2, 'children': []}, {'level': 1, 'children': []}] + """ + + def build_correct(remaining_list, prev_elements=[{'level': 1000}]): + + if not remaining_list: + return [], [] + + current = remaining_list.pop(0) + if not 'children' in current.keys(): + current['children'] = [] + + if not prev_elements: + # This happens for instance with [8, 1, 1], ie. when some + # header level is outside a scope. We treat it as a + # top-level + next_elements, children = build_correct(remaining_list, [current]) + current['children'].append(children) + return [current] + next_elements, [] + + prev_element = prev_elements.pop() + children = [] + next_elements = [] + # Is current part of the child list or next list? + if current['level'] > prev_element['level']: + #print "%d is a child of %d" % (current['level'], prev_element['level']) + prev_elements.append(prev_element) + prev_elements.append(current) + prev_element['children'].append(current) + next_elements2, children2 = build_correct(remaining_list, prev_elements) + children += children2 + next_elements += next_elements2 + else: + #print "%d is ancestor of %d" % (current['level'], prev_element['level']) + if not prev_elements: + #print "No previous elements, so appending to the next set" + next_elements.append(current) + prev_elements = [current] + next_elements2, children2 = build_correct(remaining_list, prev_elements) + current['children'].extend(children2) + else: + #print "Previous elements, comparing to those first" + remaining_list.insert(0, current) + next_elements2, children2 = build_correct(remaining_list, prev_elements) + children.extend(children2) + next_elements += next_elements2 + + return next_elements, children + + ordered_list, __ = build_correct(toc_list) + return ordered_list + + +class TocTreeprocessor(Treeprocessor): + + # Iterator wrapper to get parent and child all at once + def iterparent(self, root): + for parent in root.getiterator(): + for child in parent: + yield parent, child + + def add_anchor(self, c, elem_id): #@ReservedAssignment + if self.use_anchors: + anchor = etree.Element("a") + anchor.text = c.text + anchor.attrib["href"] = "#" + elem_id + anchor.attrib["class"] = "toclink" + c.text = "" + for elem in c.getchildren(): + anchor.append(elem) + c.remove(elem) + c.append(anchor) + + def build_toc_etree(self, div, toc_list): + # Add title to the div + if self.config["title"]: + header = etree.SubElement(div, "span") + header.attrib["class"] = "toctitle" + header.text = self.config["title"] + + def build_etree_ul(toc_list, parent): + ul = etree.SubElement(parent, "ul") + for item in toc_list: + # List item link, to be inserted into the toc div + li = etree.SubElement(ul, "li") + link = etree.SubElement(li, "a") + link.text = item.get('name', '') + link.attrib["href"] = '#' + item.get('id', '') + if item['children']: + build_etree_ul(item['children'], li) + return ul + + return build_etree_ul(toc_list, div) + + def run(self, doc): + + div = etree.Element("div") + div.attrib["class"] = "toc" + header_rgx = re.compile("[Hh][123456]") + + self.use_anchors = self.config["anchorlink"] in [1, '1', True, 'True', 'true'] + + # Get a list of id attributes + used_ids = set() + for c in doc.getiterator(): + if "id" in c.attrib: + used_ids.add(c.attrib["id"]) + + toc_list = [] + marker_found = False + for (p, c) in self.iterparent(doc): + text = ''.join(itertext(c)).strip() + if not text: + continue + + # To keep the output from screwing up the + # validation by putting a <div> inside of a <p> + # we actually replace the <p> in its entirety. + # We do not allow the marker inside a header as that + # would causes an enless loop of placing a new TOC + # inside previously generated TOC. + if c.text and c.text.strip() == self.config["marker"] and \ + not header_rgx.match(c.tag) and c.tag not in ['pre', 'code']: + for i in range(len(p)): + if p[i] == c: + p[i] = div + break + marker_found = True + + if header_rgx.match(c.tag): + + # Do not override pre-existing ids + if not "id" in c.attrib: + elem_id = unique(self.config["slugify"](text, '-'), used_ids) + c.attrib["id"] = elem_id + else: + elem_id = c.attrib["id"] + + tag_level = int(c.tag[-1]) + + toc_list.append({'level': tag_level, + 'id': elem_id, + 'name': text}) + + self.add_anchor(c, elem_id) + + toc_list_nested = order_toc_list(toc_list) + self.build_toc_etree(div, toc_list_nested) + prettify = self.markdown.treeprocessors.get('prettify') + if prettify: prettify.run(div) + if not marker_found: + # serialize and attach to markdown instance. + toc = self.markdown.serializer(div) + for pp in self.markdown.postprocessors.values(): + toc = pp.run(toc) + self.markdown.toc = toc + + +class TocExtension(Extension): + + TreeProcessorClass = TocTreeprocessor + + def __init__(self, configs=[]): + self.config = { "marker" : ["[TOC]", + "Text to find and replace with Table of Contents -" + "Defaults to \"[TOC]\""], + "slugify" : [slugify, + "Function to generate anchors based on header text-" + "Defaults to the headerid ext's slugify function."], + "title" : [None, + "Title to insert into TOC <div> - " + "Defaults to None"], + "anchorlink" : [0, + "1 if header should be a self link" + "Defaults to 0"]} + + for key, value in configs: + self.setConfig(key, value) + + def extendMarkdown(self, md, md_globals): + tocext = self.TreeProcessorClass(md) + tocext.config = self.getConfigs() + # Headerid ext is set to '>prettify'. With this set to '_end', + # it should always come after headerid ext (and honor ids assinged + # by the header id extension) if both are used. Same goes for + # attr_list extension. This must come last because we don't want + # to redefine ids after toc is created. But we do want toc prettified. + md.treeprocessors.add("toc", tocext, "_end") + + +def makeExtension(configs={}): + return TocExtension(configs=configs) diff --git a/awx/lib/site-packages/markdown/extensions/wikilinks.py b/awx/lib/site-packages/markdown/extensions/wikilinks.py new file mode 100644 index 0000000000..877890b8ab --- /dev/null +++ b/awx/lib/site-packages/markdown/extensions/wikilinks.py @@ -0,0 +1,151 @@ +''' +WikiLinks Extension for Python-Markdown +====================================== + +Converts [[WikiLinks]] to relative links. Requires Python-Markdown 2.0+ + +Basic usage: + + >>> import markdown + >>> text = "Some text with a [[WikiLink]]." + >>> html = markdown.markdown(text, ['wikilinks']) + >>> print html + <p>Some text with a <a class="wikilink" href="/WikiLink/">WikiLink</a>.</p> + +Whitespace behavior: + + >>> print markdown.markdown('[[ foo bar_baz ]]', ['wikilinks']) + <p><a class="wikilink" href="/foo_bar_baz/">foo bar_baz</a></p> + >>> print markdown.markdown('foo [[ ]] bar', ['wikilinks']) + <p>foo bar</p> + +To define custom settings the simple way: + + >>> print markdown.markdown(text, + ... ['wikilinks(base_url=/wiki/,end_url=.html,html_class=foo)'] + ... ) + <p>Some text with a <a class="foo" href="/wiki/WikiLink.html">WikiLink</a>.</p> + +Custom settings the complex way: + + >>> md = markdown.Markdown( + ... extensions = ['wikilinks'], + ... extension_configs = {'wikilinks': [ + ... ('base_url', 'http://example.com/'), + ... ('end_url', '.html'), + ... ('html_class', '') ]}, + ... safe_mode = True) + >>> print md.convert(text) + <p>Some text with a <a href="http://example.com/WikiLink.html">WikiLink</a>.</p> + +Use MetaData with mdx_meta.py (Note the blank html_class in MetaData): + + >>> text = """wiki_base_url: http://example.com/ + ... wiki_end_url: .html + ... wiki_html_class: + ... + ... Some text with a [[WikiLink]].""" + >>> md = markdown.Markdown(extensions=['meta', 'wikilinks']) + >>> print md.convert(text) + <p>Some text with a <a href="http://example.com/WikiLink.html">WikiLink</a>.</p> + +MetaData should not carry over to next document: + + >>> print md.convert("No [[MetaData]] here.") + <p>No <a class="wikilink" href="/MetaData/">MetaData</a> here.</p> + +Define a custom URL builder: + + >>> def my_url_builder(label, base, end): + ... return '/bar/' + >>> md = markdown.Markdown(extensions=['wikilinks'], + ... extension_configs={'wikilinks' : [('build_url', my_url_builder)]}) + >>> print md.convert('[[foo]]') + <p><a class="wikilink" href="/bar/">foo</a></p> + +From the command line: + + python markdown.py -x wikilinks(base_url=http://example.com/,end_url=.html,html_class=foo) src.txt + +By [Waylan Limberg](http://achinghead.com/). + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +Dependencies: +* [Python 2.3+](http://python.org) +* [Markdown 2.0+](http://packages.python.org/Markdown/) +''' + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..inlinepatterns import Pattern +from ..util import etree +import re + +def build_url(label, base, end): + """ Build a url from the label, a base, and an end. """ + clean_label = re.sub(r'([ ]+_)|(_[ ]+)|([ ]+)', '_', label) + return '%s%s%s'% (base, clean_label, end) + + +class WikiLinkExtension(Extension): + def __init__(self, configs): + # set extension defaults + self.config = { + 'base_url' : ['/', 'String to append to beginning or URL.'], + 'end_url' : ['/', 'String to append to end of URL.'], + 'html_class' : ['wikilink', 'CSS hook. Leave blank for none.'], + 'build_url' : [build_url, 'Callable formats URL from label.'], + } + + # Override defaults with user settings + for key, value in configs : + self.setConfig(key, value) + + def extendMarkdown(self, md, md_globals): + self.md = md + + # append to end of inline patterns + WIKILINK_RE = r'\[\[([\w0-9_ -]+)\]\]' + wikilinkPattern = WikiLinks(WIKILINK_RE, self.getConfigs()) + wikilinkPattern.md = md + md.inlinePatterns.add('wikilink', wikilinkPattern, "<not_strong") + + +class WikiLinks(Pattern): + def __init__(self, pattern, config): + super(WikiLinks, self).__init__(pattern) + self.config = config + + def handleMatch(self, m): + if m.group(2).strip(): + base_url, end_url, html_class = self._getMeta() + label = m.group(2).strip() + url = self.config['build_url'](label, base_url, end_url) + a = etree.Element('a') + a.text = label + a.set('href', url) + if html_class: + a.set('class', html_class) + else: + a = '' + return a + + def _getMeta(self): + """ Return meta data or config data. """ + base_url = self.config['base_url'] + end_url = self.config['end_url'] + html_class = self.config['html_class'] + if hasattr(self.md, 'Meta'): + if 'wiki_base_url' in self.md.Meta: + base_url = self.md.Meta['wiki_base_url'][0] + if 'wiki_end_url' in self.md.Meta: + end_url = self.md.Meta['wiki_end_url'][0] + if 'wiki_html_class' in self.md.Meta: + html_class = self.md.Meta['wiki_html_class'][0] + return base_url, end_url, html_class + + +def makeExtension(configs=None) : + return WikiLinkExtension(configs=configs) diff --git a/awx/lib/site-packages/markdown/inlinepatterns.py b/awx/lib/site-packages/markdown/inlinepatterns.py new file mode 100644 index 0000000000..de957ef480 --- /dev/null +++ b/awx/lib/site-packages/markdown/inlinepatterns.py @@ -0,0 +1,483 @@ +""" +INLINE PATTERNS +============================================================================= + +Inline patterns such as *emphasis* are handled by means of auxiliary +objects, one per pattern. Pattern objects must be instances of classes +that extend markdown.Pattern. Each pattern object uses a single regular +expression and needs support the following methods: + + pattern.getCompiledRegExp() # returns a regular expression + + pattern.handleMatch(m) # takes a match object and returns + # an ElementTree element or just plain text + +All of python markdown's built-in patterns subclass from Pattern, +but you can add additional patterns that don't. + +Also note that all the regular expressions used by inline must +capture the whole block. For this reason, they all start with +'^(.*)' and end with '(.*)!'. In case with built-in expression +Pattern takes care of adding the "^(.*)" and "(.*)!". + +Finally, the order in which regular expressions are applied is very +important - e.g. if we first replace http://.../ links with <a> tags +and _then_ try to replace inline html, we would end up with a mess. +So, we apply the expressions in the following order: + +* escape and backticks have to go before everything else, so + that we can preempt any markdown patterns by escaping them. + +* then we handle auto-links (must be done before inline html) + +* then we handle inline HTML. At this point we will simply + replace all inline HTML strings with a placeholder and add + the actual HTML to a hash. + +* then inline images (must be done before links) + +* then bracketed links, first regular then reference-style + +* finally we apply strong and emphasis +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import util +from . import odict +import re +try: + from urllib.parse import urlparse, urlunparse +except ImportError: + from urlparse import urlparse, urlunparse +try: + from html import entities +except ImportError: + import htmlentitydefs as entities + + +def build_inlinepatterns(md_instance, **kwargs): + """ Build the default set of inline patterns for Markdown. """ + inlinePatterns = odict.OrderedDict() + inlinePatterns["backtick"] = BacktickPattern(BACKTICK_RE) + inlinePatterns["escape"] = EscapePattern(ESCAPE_RE, md_instance) + inlinePatterns["reference"] = ReferencePattern(REFERENCE_RE, md_instance) + inlinePatterns["link"] = LinkPattern(LINK_RE, md_instance) + inlinePatterns["image_link"] = ImagePattern(IMAGE_LINK_RE, md_instance) + inlinePatterns["image_reference"] = \ + ImageReferencePattern(IMAGE_REFERENCE_RE, md_instance) + inlinePatterns["short_reference"] = \ + ReferencePattern(SHORT_REF_RE, md_instance) + inlinePatterns["autolink"] = AutolinkPattern(AUTOLINK_RE, md_instance) + inlinePatterns["automail"] = AutomailPattern(AUTOMAIL_RE, md_instance) + inlinePatterns["linebreak"] = SubstituteTagPattern(LINE_BREAK_RE, 'br') + if md_instance.safeMode != 'escape': + inlinePatterns["html"] = HtmlPattern(HTML_RE, md_instance) + inlinePatterns["entity"] = HtmlPattern(ENTITY_RE, md_instance) + inlinePatterns["not_strong"] = SimpleTextPattern(NOT_STRONG_RE) + inlinePatterns["strong_em"] = DoubleTagPattern(STRONG_EM_RE, 'strong,em') + inlinePatterns["strong"] = SimpleTagPattern(STRONG_RE, 'strong') + inlinePatterns["emphasis"] = SimpleTagPattern(EMPHASIS_RE, 'em') + if md_instance.smart_emphasis: + inlinePatterns["emphasis2"] = SimpleTagPattern(SMART_EMPHASIS_RE, 'em') + else: + inlinePatterns["emphasis2"] = SimpleTagPattern(EMPHASIS_2_RE, 'em') + return inlinePatterns + +""" +The actual regular expressions for patterns +----------------------------------------------------------------------------- +""" + +NOBRACKET = r'[^\]\[]*' +BRK = ( r'\[(' + + (NOBRACKET + r'(\[')*6 + + (NOBRACKET+ r'\])*')*6 + + NOBRACKET + r')\]' ) +NOIMG = r'(?<!\!)' + +BACKTICK_RE = r'(?<!\\)(`+)(.+?)(?<!`)\2(?!`)' # `e=f()` or ``e=f("`")`` +ESCAPE_RE = r'\\(.)' # \< +EMPHASIS_RE = r'(\*)([^\*]+)\2' # *emphasis* +STRONG_RE = r'(\*{2}|_{2})(.+?)\2' # **strong** +STRONG_EM_RE = r'(\*{3}|_{3})(.+?)\2' # ***strong*** +SMART_EMPHASIS_RE = r'(?<!\w)(_)(?!_)(.+?)(?<!_)\2(?!\w)' # _smart_emphasis_ +EMPHASIS_2_RE = r'(_)(.+?)\2' # _emphasis_ +LINK_RE = NOIMG + BRK + \ +r'''\(\s*(<.*?>|((?:(?:\(.*?\))|[^\(\)]))*?)\s*((['"])(.*?)\12\s*)?\)''' +# [text](url) or [text](<url>) or [text](url "title") + +IMAGE_LINK_RE = r'\!' + BRK + r'\s*\((<.*?>|([^\)]*))\)' +# ![alttxt](http://x.com/) or ![alttxt](<http://x.com/>) +REFERENCE_RE = NOIMG + BRK+ r'\s?\[([^\]]*)\]' # [Google][3] +SHORT_REF_RE = NOIMG + r'\[([^\]]+)\]' # [Google] +IMAGE_REFERENCE_RE = r'\!' + BRK + '\s?\[([^\]]*)\]' # ![alt text][2] +NOT_STRONG_RE = r'((^| )(\*|_)( |$))' # stand-alone * or _ +AUTOLINK_RE = r'<((?:[Ff]|[Hh][Tt])[Tt][Pp][Ss]?://[^>]*)>' # <http://www.123.com> +AUTOMAIL_RE = r'<([^> \!]*@[^> ]*)>' # <me@example.com> + +HTML_RE = r'(\<([a-zA-Z/][^\>]*?|\!--.*?--)\>)' # <...> +ENTITY_RE = r'(&[\#a-zA-Z0-9]*;)' # & +LINE_BREAK_RE = r' \n' # two spaces at end of line + + +def dequote(string): + """Remove quotes from around a string.""" + if ( ( string.startswith('"') and string.endswith('"')) + or (string.startswith("'") and string.endswith("'")) ): + return string[1:-1] + else: + return string + +ATTR_RE = re.compile("\{@([^\}]*)=([^\}]*)}") # {@id=123} + +def handleAttributes(text, parent): + """Set values of an element based on attribute definitions ({@id=123}).""" + def attributeCallback(match): + parent.set(match.group(1), match.group(2).replace('\n', ' ')) + return ATTR_RE.sub(attributeCallback, text) + + +""" +The pattern classes +----------------------------------------------------------------------------- +""" + +class Pattern(object): + """Base class that inline patterns subclass. """ + + def __init__(self, pattern, markdown_instance=None): + """ + Create an instant of an inline pattern. + + Keyword arguments: + + * pattern: A regular expression that matches a pattern + + """ + self.pattern = pattern + self.compiled_re = re.compile("^(.*?)%s(.*?)$" % pattern, + re.DOTALL | re.UNICODE) + + # Api for Markdown to pass safe_mode into instance + self.safe_mode = False + if markdown_instance: + self.markdown = markdown_instance + + def getCompiledRegExp(self): + """ Return a compiled regular expression. """ + return self.compiled_re + + def handleMatch(self, m): + """Return a ElementTree element from the given match. + + Subclasses should override this method. + + Keyword arguments: + + * m: A re match object containing a match of the pattern. + + """ + pass + + def type(self): + """ Return class name, to define pattern type """ + return self.__class__.__name__ + + def unescape(self, text): + """ Return unescaped text given text with an inline placeholder. """ + try: + stash = self.markdown.treeprocessors['inline'].stashed_nodes + except KeyError: + return text + def itertext(el): + ' Reimplement Element.itertext for older python versions ' + tag = el.tag + if not isinstance(tag, util.string_type) and tag is not None: + return + if el.text: + yield el.text + for e in el: + for s in itertext(e): + yield s + if e.tail: + yield e.tail + def get_stash(m): + id = m.group(1) + if id in stash: + value = stash.get(id) + if isinstance(value, util.string_type): + return value + else: + # An etree Element - return text content only + return ''.join(itertext(value)) + return util.INLINE_PLACEHOLDER_RE.sub(get_stash, text) + + +class SimpleTextPattern(Pattern): + """ Return a simple text of group(2) of a Pattern. """ + def handleMatch(self, m): + text = m.group(2) + if text == util.INLINE_PLACEHOLDER_PREFIX: + return None + return text + + +class EscapePattern(Pattern): + """ Return an escaped character. """ + + def handleMatch(self, m): + char = m.group(2) + if char in self.markdown.ESCAPED_CHARS: + return '%s%s%s' % (util.STX, ord(char), util.ETX) + else: + return '\\%s' % char + + +class SimpleTagPattern(Pattern): + """ + Return element of type `tag` with a text attribute of group(3) + of a Pattern. + + """ + def __init__ (self, pattern, tag): + Pattern.__init__(self, pattern) + self.tag = tag + + def handleMatch(self, m): + el = util.etree.Element(self.tag) + el.text = m.group(3) + return el + + +class SubstituteTagPattern(SimpleTagPattern): + """ Return an element of type `tag` with no children. """ + def handleMatch (self, m): + return util.etree.Element(self.tag) + + +class BacktickPattern(Pattern): + """ Return a `<code>` element containing the matching text. """ + def __init__ (self, pattern): + Pattern.__init__(self, pattern) + self.tag = "code" + + def handleMatch(self, m): + el = util.etree.Element(self.tag) + el.text = util.AtomicString(m.group(3).strip()) + return el + + +class DoubleTagPattern(SimpleTagPattern): + """Return a ElementTree element nested in tag2 nested in tag1. + + Useful for strong emphasis etc. + + """ + def handleMatch(self, m): + tag1, tag2 = self.tag.split(",") + el1 = util.etree.Element(tag1) + el2 = util.etree.SubElement(el1, tag2) + el2.text = m.group(3) + return el1 + + +class HtmlPattern(Pattern): + """ Store raw inline html and return a placeholder. """ + def handleMatch (self, m): + rawhtml = self.unescape(m.group(2)) + place_holder = self.markdown.htmlStash.store(rawhtml) + return place_holder + + def unescape(self, text): + """ Return unescaped text given text with an inline placeholder. """ + try: + stash = self.markdown.treeprocessors['inline'].stashed_nodes + except KeyError: + return text + def get_stash(m): + id = m.group(1) + value = stash.get(id) + if value is not None: + try: + return self.markdown.serializer(value) + except: + return '\%s' % value + + return util.INLINE_PLACEHOLDER_RE.sub(get_stash, text) + + +class LinkPattern(Pattern): + """ Return a link element from the given match. """ + def handleMatch(self, m): + el = util.etree.Element("a") + el.text = m.group(2) + title = m.group(13) + href = m.group(9) + + if href: + if href[0] == "<": + href = href[1:-1] + el.set("href", self.sanitize_url(self.unescape(href.strip()))) + else: + el.set("href", "") + + if title: + title = dequote(self.unescape(title)) + el.set("title", title) + return el + + def sanitize_url(self, url): + """ + Sanitize a url against xss attacks in "safe_mode". + + Rather than specifically blacklisting `javascript:alert("XSS")` and all + its aliases (see <http://ha.ckers.org/xss.html>), we whitelist known + safe url formats. Most urls contain a network location, however some + are known not to (i.e.: mailto links). Script urls do not contain a + location. Additionally, for `javascript:...`, the scheme would be + "javascript" but some aliases will appear to `urlparse()` to have no + scheme. On top of that relative links (i.e.: "foo/bar.html") have no + scheme. Therefore we must check "path", "parameters", "query" and + "fragment" for any literal colons. We don't check "scheme" for colons + because it *should* never have any and "netloc" must allow the form: + `username:password@host:port`. + + """ + url = url.replace(' ', '%20') + if not self.markdown.safeMode: + # Return immediately bipassing parsing. + return url + + try: + scheme, netloc, path, params, query, fragment = url = urlparse(url) + except ValueError: + # Bad url - so bad it couldn't be parsed. + return '' + + locless_schemes = ['', 'mailto', 'news'] + allowed_schemes = locless_schemes + ['http', 'https', 'ftp', 'ftps'] + if scheme not in allowed_schemes: + # Not a known (allowed) scheme. Not safe. + return '' + + if netloc == '' and scheme not in locless_schemes: + # This should not happen. Treat as suspect. + return '' + + for part in url[2:]: + if ":" in part: + # A colon in "path", "parameters", "query" or "fragment" is suspect. + return '' + + # Url passes all tests. Return url as-is. + return urlunparse(url) + +class ImagePattern(LinkPattern): + """ Return a img element from the given match. """ + def handleMatch(self, m): + el = util.etree.Element("img") + src_parts = m.group(9).split() + if src_parts: + src = src_parts[0] + if src[0] == "<" and src[-1] == ">": + src = src[1:-1] + el.set('src', self.sanitize_url(self.unescape(src))) + else: + el.set('src', "") + if len(src_parts) > 1: + el.set('title', dequote(self.unescape(" ".join(src_parts[1:])))) + + if self.markdown.enable_attributes: + truealt = handleAttributes(m.group(2), el) + else: + truealt = m.group(2) + + el.set('alt', self.unescape(truealt)) + return el + +class ReferencePattern(LinkPattern): + """ Match to a stored reference and return link element. """ + + NEWLINE_CLEANUP_RE = re.compile(r'[ ]?\n', re.MULTILINE) + + def handleMatch(self, m): + try: + id = m.group(9).lower() + except IndexError: + id = None + if not id: + # if we got something like "[Google][]" or "[Goggle]" + # we'll use "google" as the id + id = m.group(2).lower() + + # Clean up linebreaks in id + id = self.NEWLINE_CLEANUP_RE.sub(' ', id) + if not id in self.markdown.references: # ignore undefined refs + return None + href, title = self.markdown.references[id] + + text = m.group(2) + return self.makeTag(href, title, text) + + def makeTag(self, href, title, text): + el = util.etree.Element('a') + + el.set('href', self.sanitize_url(href)) + if title: + el.set('title', title) + + el.text = text + return el + + +class ImageReferencePattern(ReferencePattern): + """ Match to a stored reference and return img element. """ + def makeTag(self, href, title, text): + el = util.etree.Element("img") + el.set("src", self.sanitize_url(href)) + if title: + el.set("title", title) + + if self.markdown.enable_attributes: + text = handleAttributes(text, el) + + el.set("alt", self.unescape(text)) + return el + + +class AutolinkPattern(Pattern): + """ Return a link Element given an autolink (`<http://example/com>`). """ + def handleMatch(self, m): + el = util.etree.Element("a") + el.set('href', self.unescape(m.group(2))) + el.text = util.AtomicString(m.group(2)) + return el + +class AutomailPattern(Pattern): + """ + Return a mailto link Element given an automail link (`<foo@example.com>`). + """ + def handleMatch(self, m): + el = util.etree.Element('a') + email = self.unescape(m.group(2)) + if email.startswith("mailto:"): + email = email[len("mailto:"):] + + def codepoint2name(code): + """Return entity definition by code, or the code if not defined.""" + entity = entities.codepoint2name.get(code) + if entity: + return "%s%s;" % (util.AMP_SUBSTITUTE, entity) + else: + return "%s#%d;" % (util.AMP_SUBSTITUTE, code) + + letters = [codepoint2name(ord(letter)) for letter in email] + el.text = util.AtomicString(''.join(letters)) + + mailto = "mailto:" + email + mailto = "".join([util.AMP_SUBSTITUTE + '#%d;' % + ord(letter) for letter in mailto]) + el.set('href', mailto) + return el + diff --git a/awx/lib/site-packages/markdown/odict.py b/awx/lib/site-packages/markdown/odict.py new file mode 100644 index 0000000000..8089ece21a --- /dev/null +++ b/awx/lib/site-packages/markdown/odict.py @@ -0,0 +1,194 @@ +from __future__ import unicode_literals +from __future__ import absolute_import +from . import util + +from copy import deepcopy + +def iteritems_compat(d): + """Return an iterator over the (key, value) pairs of a dictionary. + Copied from `six` module.""" + return iter(getattr(d, _iteritems)()) + +class OrderedDict(dict): + """ + A dictionary that keeps its keys in the order in which they're inserted. + + Copied from Django's SortedDict with some modifications. + + """ + def __new__(cls, *args, **kwargs): + instance = super(OrderedDict, cls).__new__(cls, *args, **kwargs) + instance.keyOrder = [] + return instance + + def __init__(self, data=None): + if data is None or isinstance(data, dict): + data = data or [] + super(OrderedDict, self).__init__(data) + self.keyOrder = list(data) if data else [] + else: + super(OrderedDict, self).__init__() + super_set = super(OrderedDict, self).__setitem__ + for key, value in data: + # Take the ordering from first key + if key not in self: + self.keyOrder.append(key) + # But override with last value in data (dict() does this) + super_set(key, value) + + def __deepcopy__(self, memo): + return self.__class__([(key, deepcopy(value, memo)) + for key, value in self.items()]) + + def __copy__(self): + # The Python's default copy implementation will alter the state + # of self. The reason for this seems complex but is likely related to + # subclassing dict. + return self.copy() + + def __setitem__(self, key, value): + if key not in self: + self.keyOrder.append(key) + super(OrderedDict, self).__setitem__(key, value) + + def __delitem__(self, key): + super(OrderedDict, self).__delitem__(key) + self.keyOrder.remove(key) + + def __iter__(self): + return iter(self.keyOrder) + + def __reversed__(self): + return reversed(self.keyOrder) + + def pop(self, k, *args): + result = super(OrderedDict, self).pop(k, *args) + try: + self.keyOrder.remove(k) + except ValueError: + # Key wasn't in the dictionary in the first place. No problem. + pass + return result + + def popitem(self): + result = super(OrderedDict, self).popitem() + self.keyOrder.remove(result[0]) + return result + + def _iteritems(self): + for key in self.keyOrder: + yield key, self[key] + + def _iterkeys(self): + for key in self.keyOrder: + yield key + + def _itervalues(self): + for key in self.keyOrder: + yield self[key] + + if util.PY3: + items = _iteritems + keys = _iterkeys + values = _itervalues + else: + iteritems = _iteritems + iterkeys = _iterkeys + itervalues = _itervalues + + def items(self): + return [(k, self[k]) for k in self.keyOrder] + + def keys(self): + return self.keyOrder[:] + + def values(self): + return [self[k] for k in self.keyOrder] + + def update(self, dict_): + for k, v in iteritems_compat(dict_): + self[k] = v + + def setdefault(self, key, default): + if key not in self: + self.keyOrder.append(key) + return super(OrderedDict, self).setdefault(key, default) + + def value_for_index(self, index): + """Returns the value of the item at the given zero-based index.""" + return self[self.keyOrder[index]] + + def insert(self, index, key, value): + """Inserts the key, value pair before the item with the given index.""" + if key in self.keyOrder: + n = self.keyOrder.index(key) + del self.keyOrder[n] + if n < index: + index -= 1 + self.keyOrder.insert(index, key) + super(OrderedDict, self).__setitem__(key, value) + + def copy(self): + """Returns a copy of this object.""" + # This way of initializing the copy means it works for subclasses, too. + return self.__class__(self) + + def __repr__(self): + """ + Replaces the normal dict.__repr__ with a version that returns the keys + in their Ordered order. + """ + return '{%s}' % ', '.join(['%r: %r' % (k, v) for k, v in iteritems_compat(self)]) + + def clear(self): + super(OrderedDict, self).clear() + self.keyOrder = [] + + def index(self, key): + """ Return the index of a given key. """ + try: + return self.keyOrder.index(key) + except ValueError: + raise ValueError("Element '%s' was not found in OrderedDict" % key) + + def index_for_location(self, location): + """ Return index or None for a given location. """ + if location == '_begin': + i = 0 + elif location == '_end': + i = None + elif location.startswith('<') or location.startswith('>'): + i = self.index(location[1:]) + if location.startswith('>'): + if i >= len(self): + # last item + i = None + else: + i += 1 + else: + raise ValueError('Not a valid location: "%s". Location key ' + 'must start with a ">" or "<".' % location) + return i + + def add(self, key, value, location): + """ Insert by key location. """ + i = self.index_for_location(location) + if i is not None: + self.insert(i, key, value) + else: + self.__setitem__(key, value) + + def link(self, key, location): + """ Change location of an existing item. """ + n = self.keyOrder.index(key) + del self.keyOrder[n] + try: + i = self.index_for_location(location) + if i is not None: + self.keyOrder.insert(i, key) + else: + self.keyOrder.append(key) + except Exception as e: + # restore to prevent data loss and reraise + self.keyOrder.insert(n, key) + raise e diff --git a/awx/lib/site-packages/markdown/postprocessors.py b/awx/lib/site-packages/markdown/postprocessors.py new file mode 100644 index 0000000000..5f3f032c15 --- /dev/null +++ b/awx/lib/site-packages/markdown/postprocessors.py @@ -0,0 +1,104 @@ +""" +POST-PROCESSORS +============================================================================= + +Markdown also allows post-processors, which are similar to preprocessors in +that they need to implement a "run" method. However, they are run after core +processing. + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import util +from . import odict +import re + + +def build_postprocessors(md_instance, **kwargs): + """ Build the default postprocessors for Markdown. """ + postprocessors = odict.OrderedDict() + postprocessors["raw_html"] = RawHtmlPostprocessor(md_instance) + postprocessors["amp_substitute"] = AndSubstitutePostprocessor() + postprocessors["unescape"] = UnescapePostprocessor() + return postprocessors + + +class Postprocessor(util.Processor): + """ + Postprocessors are run after the ElementTree it converted back into text. + + Each Postprocessor implements a "run" method that takes a pointer to a + text string, modifies it as necessary and returns a text string. + + Postprocessors must extend markdown.Postprocessor. + + """ + + def run(self, text): + """ + Subclasses of Postprocessor should implement a `run` method, which + takes the html document as a single text string and returns a + (possibly modified) string. + + """ + pass + + +class RawHtmlPostprocessor(Postprocessor): + """ Restore raw html to the document. """ + + def run(self, text): + """ Iterate over html stash and restore "safe" html. """ + for i in range(self.markdown.htmlStash.html_counter): + html, safe = self.markdown.htmlStash.rawHtmlBlocks[i] + if self.markdown.safeMode and not safe: + if str(self.markdown.safeMode).lower() == 'escape': + html = self.escape(html) + elif str(self.markdown.safeMode).lower() == 'remove': + html = '' + else: + html = self.markdown.html_replacement_text + if self.isblocklevel(html) and (safe or not self.markdown.safeMode): + text = text.replace("<p>%s</p>" % + (self.markdown.htmlStash.get_placeholder(i)), + html + "\n") + text = text.replace(self.markdown.htmlStash.get_placeholder(i), + html) + return text + + def escape(self, html): + """ Basic html escaping """ + html = html.replace('&', '&') + html = html.replace('<', '<') + html = html.replace('>', '>') + return html.replace('"', '"') + + def isblocklevel(self, html): + m = re.match(r'^\<\/?([^ >]+)', html) + if m: + if m.group(1)[0] in ('!', '?', '@', '%'): + # Comment, php etc... + return True + return util.isBlockLevel(m.group(1)) + return False + + +class AndSubstitutePostprocessor(Postprocessor): + """ Restore valid entities """ + + def run(self, text): + text = text.replace(util.AMP_SUBSTITUTE, "&") + return text + + +class UnescapePostprocessor(Postprocessor): + """ Restore escaped chars """ + + RE = re.compile('%s(\d+)%s' % (util.STX, util.ETX)) + + def unescape(self, m): + return util.int2str(int(m.group(1))) + + def run(self, text): + return self.RE.sub(self.unescape, text) diff --git a/awx/lib/site-packages/markdown/preprocessors.py b/awx/lib/site-packages/markdown/preprocessors.py new file mode 100644 index 0000000000..72b2ed6f35 --- /dev/null +++ b/awx/lib/site-packages/markdown/preprocessors.py @@ -0,0 +1,298 @@ +""" +PRE-PROCESSORS +============================================================================= + +Preprocessors work on source text before we start doing anything too +complicated. +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import util +from . import odict +import re + + +def build_preprocessors(md_instance, **kwargs): + """ Build the default set of preprocessors used by Markdown. """ + preprocessors = odict.OrderedDict() + preprocessors['normalize_whitespace'] = NormalizeWhitespace(md_instance) + if md_instance.safeMode != 'escape': + preprocessors["html_block"] = HtmlBlockPreprocessor(md_instance) + preprocessors["reference"] = ReferencePreprocessor(md_instance) + return preprocessors + + +class Preprocessor(util.Processor): + """ + Preprocessors are run after the text is broken into lines. + + Each preprocessor implements a "run" method that takes a pointer to a + list of lines of the document, modifies it as necessary and returns + either the same pointer or a pointer to a new list. + + Preprocessors must extend markdown.Preprocessor. + + """ + def run(self, lines): + """ + Each subclass of Preprocessor should override the `run` method, which + takes the document as a list of strings split by newlines and returns + the (possibly modified) list of lines. + + """ + pass + + +class NormalizeWhitespace(Preprocessor): + """ Normalize whitespace for consistant parsing. """ + + def run(self, lines): + source = '\n'.join(lines) + source = source.replace(util.STX, "").replace(util.ETX, "") + source = source.replace("\r\n", "\n").replace("\r", "\n") + "\n\n" + source = source.expandtabs(self.markdown.tab_length) + source = re.sub(r'(?<=\n) +\n', '\n', source) + return source.split('\n') + + +class HtmlBlockPreprocessor(Preprocessor): + """Remove html blocks from the text and store them for later retrieval.""" + + right_tag_patterns = ["</%s>", "%s>"] + attrs_pattern = r""" + \s+(?P<attr>[^>"'/= ]+)=(?P<q>['"])(?P<value>.*?)(?P=q) # attr="value" + | # OR + \s+(?P<attr1>[^>"'/= ]+)=(?P<value1>[^> ]+) # attr=value + | # OR + \s+(?P<attr2>[^>"'/= ]+) # attr + """ + left_tag_pattern = r'^\<(?P<tag>[^> ]+)(?P<attrs>(%s)*)\s*\/?\>?' % attrs_pattern + attrs_re = re.compile(attrs_pattern, re.VERBOSE) + left_tag_re = re.compile(left_tag_pattern, re.VERBOSE) + markdown_in_raw = False + + def _get_left_tag(self, block): + m = self.left_tag_re.match(block) + if m: + tag = m.group('tag') + raw_attrs = m.group('attrs') + attrs = {} + if raw_attrs: + for ma in self.attrs_re.finditer(raw_attrs): + if ma.group('attr'): + if ma.group('value'): + attrs[ma.group('attr').strip()] = ma.group('value') + else: + attrs[ma.group('attr').strip()] = "" + elif ma.group('attr1'): + if ma.group('value1'): + attrs[ma.group('attr1').strip()] = ma.group('value1') + else: + attrs[ma.group('attr1').strip()] = "" + elif ma.group('attr2'): + attrs[ma.group('attr2').strip()] = "" + return tag, len(m.group(0)), attrs + else: + tag = block[1:].split(">", 1)[0].lower() + return tag, len(tag)+2, {} + + def _recursive_tagfind(self, ltag, rtag, start_index, block): + while 1: + i = block.find(rtag, start_index) + if i == -1: + return -1 + j = block.find(ltag, start_index) + # if no ltag, or rtag found before another ltag, return index + if (j > i or j == -1): + return i + len(rtag) + # another ltag found before rtag, use end of ltag as starting + # point and search again + j = block.find('>', j) + start_index = self._recursive_tagfind(ltag, rtag, j + 1, block) + if start_index == -1: + # HTML potentially malformed- ltag has no corresponding + # rtag + return -1 + + def _get_right_tag(self, left_tag, left_index, block): + for p in self.right_tag_patterns: + tag = p % left_tag + i = self._recursive_tagfind("<%s" % left_tag, tag, left_index, block) + if i > 2: + return tag.lstrip("<").rstrip(">"), i + return block.rstrip()[-left_index:-1].lower(), len(block) + + def _equal_tags(self, left_tag, right_tag): + if left_tag[0] in ['?', '@', '%']: # handle PHP, etc. + return True + if ("/" + left_tag) == right_tag: + return True + if (right_tag == "--" and left_tag == "--"): + return True + elif left_tag == right_tag[1:] \ + and right_tag[0] == "/": + return True + else: + return False + + def _is_oneliner(self, tag): + return (tag in ['hr', 'hr/']) + + def run(self, lines): + text = "\n".join(lines) + new_blocks = [] + text = text.rsplit("\n\n") + items = [] + left_tag = '' + right_tag = '' + in_tag = False # flag + + while text: + block = text[0] + if block.startswith("\n"): + block = block[1:] + text = text[1:] + + if block.startswith("\n"): + block = block[1:] + + if not in_tag: + if block.startswith("<") and len(block.strip()) > 1: + + if block[1] == "!": + # is a comment block + left_tag, left_index, attrs = "--", 2, {} + else: + left_tag, left_index, attrs = self._get_left_tag(block) + right_tag, data_index = self._get_right_tag(left_tag, + left_index, + block) + # keep checking conditions below and maybe just append + + if data_index < len(block) \ + and (util.isBlockLevel(left_tag) + or left_tag == '--'): + text.insert(0, block[data_index:]) + block = block[:data_index] + + if not (util.isBlockLevel(left_tag) \ + or block[1] in ["!", "?", "@", "%"]): + new_blocks.append(block) + continue + + if self._is_oneliner(left_tag): + new_blocks.append(block.strip()) + continue + + if block.rstrip().endswith(">") \ + and self._equal_tags(left_tag, right_tag): + if self.markdown_in_raw and 'markdown' in attrs.keys(): + start = re.sub(r'\smarkdown(=[\'"]?[^> ]*[\'"]?)?', + '', block[:left_index]) + end = block[-len(right_tag)-2:] + block = block[left_index:-len(right_tag)-2] + new_blocks.append( + self.markdown.htmlStash.store(start)) + new_blocks.append(block) + new_blocks.append( + self.markdown.htmlStash.store(end)) + else: + new_blocks.append( + self.markdown.htmlStash.store(block.strip())) + continue + else: + # if is block level tag and is not complete + + if util.isBlockLevel(left_tag) or left_tag == "--" \ + and not block.rstrip().endswith(">"): + items.append(block.strip()) + in_tag = True + else: + new_blocks.append( + self.markdown.htmlStash.store(block.strip())) + + continue + + new_blocks.append(block) + + else: + items.append(block) + + right_tag, data_index = self._get_right_tag(left_tag, 0, block) + + if self._equal_tags(left_tag, right_tag): + # if find closing tag + + if data_index < len(block): + # we have more text after right_tag + items[-1] = block[:data_index] + text.insert(0, block[data_index:]) + + in_tag = False + if self.markdown_in_raw and 'markdown' in attrs.keys(): + start = re.sub(r'\smarkdown(=[\'"]?[^> ]*[\'"]?)?', + '', items[0][:left_index]) + items[0] = items[0][left_index:] + end = items[-1][-len(right_tag)-2:] + items[-1] = items[-1][:-len(right_tag)-2] + new_blocks.append( + self.markdown.htmlStash.store(start)) + new_blocks.extend(items) + new_blocks.append( + self.markdown.htmlStash.store(end)) + else: + new_blocks.append( + self.markdown.htmlStash.store('\n\n'.join(items))) + items = [] + + if items: + if self.markdown_in_raw and 'markdown' in attrs.keys(): + start = re.sub(r'\smarkdown(=[\'"]?[^> ]*[\'"]?)?', + '', items[0][:left_index]) + items[0] = items[0][left_index:] + end = items[-1][-len(right_tag)-2:] + items[-1] = items[-1][:-len(right_tag)-2] + new_blocks.append( + self.markdown.htmlStash.store(start)) + new_blocks.extend(items) + if end.strip(): + new_blocks.append( + self.markdown.htmlStash.store(end)) + else: + new_blocks.append( + self.markdown.htmlStash.store('\n\n'.join(items))) + #new_blocks.append(self.markdown.htmlStash.store('\n\n'.join(items))) + new_blocks.append('\n') + + new_text = "\n\n".join(new_blocks) + return new_text.split("\n") + + +class ReferencePreprocessor(Preprocessor): + """ Remove reference definitions from text and store for later use. """ + + TITLE = r'[ ]*(\"(.*)\"|\'(.*)\'|\((.*)\))[ ]*' + RE = re.compile(r'^[ ]{0,3}\[([^\]]*)\]:\s*([^ ]*)[ ]*(%s)?$' % TITLE, re.DOTALL) + TITLE_RE = re.compile(r'^%s$' % TITLE) + + def run (self, lines): + new_text = []; + while lines: + line = lines.pop(0) + m = self.RE.match(line) + if m: + id = m.group(1).strip().lower() + link = m.group(2).lstrip('<').rstrip('>') + t = m.group(5) or m.group(6) or m.group(7) + if not t: + # Check next line for title + tm = self.TITLE_RE.match(lines[0]) + if tm: + lines.pop(0) + t = tm.group(2) or tm.group(3) or tm.group(4) + self.markdown.references[id] = (link, t) + else: + new_text.append(line) + + return new_text #+ "\n" diff --git a/awx/lib/site-packages/markdown/serializers.py b/awx/lib/site-packages/markdown/serializers.py new file mode 100644 index 0000000000..b19d61c93d --- /dev/null +++ b/awx/lib/site-packages/markdown/serializers.py @@ -0,0 +1,277 @@ +# markdown/searializers.py +# +# Add x/html serialization to Elementree +# Taken from ElementTree 1.3 preview with slight modifications +# +# Copyright (c) 1999-2007 by Fredrik Lundh. All rights reserved. +# +# fredrik@pythonware.com +# http://www.pythonware.com +# +# -------------------------------------------------------------------- +# The ElementTree toolkit is +# +# Copyright (c) 1999-2007 by Fredrik Lundh +# +# By obtaining, using, and/or copying this software and/or its +# associated documentation, you agree that you have read, understood, +# and will comply with the following terms and conditions: +# +# Permission to use, copy, modify, and distribute this software and +# its associated documentation for any purpose and without fee is +# hereby granted, provided that the above copyright notice appears in +# all copies, and that both that copyright notice and this permission +# notice appear in supporting documentation, and that the name of +# Secret Labs AB or the author not be used in advertising or publicity +# pertaining to distribution of the software without specific, written +# prior permission. +# +# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD +# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- +# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR +# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY +# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE +# OF THIS SOFTWARE. +# -------------------------------------------------------------------- + + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import util +ElementTree = util.etree.ElementTree +QName = util.etree.QName +if hasattr(util.etree, 'test_comment'): + Comment = util.etree.test_comment +else: + Comment = util.etree.Comment +PI = util.etree.PI +ProcessingInstruction = util.etree.ProcessingInstruction + +__all__ = ['to_html_string', 'to_xhtml_string'] + +HTML_EMPTY = ("area", "base", "basefont", "br", "col", "frame", "hr", + "img", "input", "isindex", "link", "meta" "param") + +try: + HTML_EMPTY = set(HTML_EMPTY) +except NameError: + pass + +_namespace_map = { + # "well-known" namespace prefixes + "http://www.w3.org/XML/1998/namespace": "xml", + "http://www.w3.org/1999/xhtml": "html", + "http://www.w3.org/1999/02/22-rdf-syntax-ns#": "rdf", + "http://schemas.xmlsoap.org/wsdl/": "wsdl", + # xml schema + "http://www.w3.org/2001/XMLSchema": "xs", + "http://www.w3.org/2001/XMLSchema-instance": "xsi", + # dublic core + "http://purl.org/dc/elements/1.1/": "dc", +} + + +def _raise_serialization_error(text): + raise TypeError( + "cannot serialize %r (type %s)" % (text, type(text).__name__) + ) + +def _encode(text, encoding): + try: + return text.encode(encoding, "xmlcharrefreplace") + except (TypeError, AttributeError): + _raise_serialization_error(text) + +def _escape_cdata(text): + # escape character data + try: + # it's worth avoiding do-nothing calls for strings that are + # shorter than 500 character, or so. assume that's, by far, + # the most common case in most applications. + if "&" in text: + text = text.replace("&", "&") + if "<" in text: + text = text.replace("<", "<") + if ">" in text: + text = text.replace(">", ">") + return text + except (TypeError, AttributeError): + _raise_serialization_error(text) + + +def _escape_attrib(text): + # escape attribute value + try: + if "&" in text: + text = text.replace("&", "&") + if "<" in text: + text = text.replace("<", "<") + if ">" in text: + text = text.replace(">", ">") + if "\"" in text: + text = text.replace("\"", """) + if "\n" in text: + text = text.replace("\n", " ") + return text + except (TypeError, AttributeError): + _raise_serialization_error(text) + +def _escape_attrib_html(text): + # escape attribute value + try: + if "&" in text: + text = text.replace("&", "&") + if "<" in text: + text = text.replace("<", "<") + if ">" in text: + text = text.replace(">", ">") + if "\"" in text: + text = text.replace("\"", """) + return text + except (TypeError, AttributeError): + _raise_serialization_error(text) + + +def _serialize_html(write, elem, qnames, namespaces, format): + tag = elem.tag + text = elem.text + if tag is Comment: + write("<!--%s-->" % _escape_cdata(text)) + elif tag is ProcessingInstruction: + write("<?%s?>" % _escape_cdata(text)) + else: + tag = qnames[tag] + if tag is None: + if text: + write(_escape_cdata(text)) + for e in elem: + _serialize_html(write, e, qnames, None, format) + else: + write("<" + tag) + items = elem.items() + if items or namespaces: + items.sort() # lexical order + for k, v in items: + if isinstance(k, QName): + k = k.text + if isinstance(v, QName): + v = qnames[v.text] + else: + v = _escape_attrib_html(v) + if qnames[k] == v and format == 'html': + # handle boolean attributes + write(" %s" % v) + else: + write(" %s=\"%s\"" % (qnames[k], v)) + if namespaces: + items = namespaces.items() + items.sort(key=lambda x: x[1]) # sort on prefix + for v, k in items: + if k: + k = ":" + k + write(" xmlns%s=\"%s\"" % (k, _escape_attrib(v))) + if format == "xhtml" and tag in HTML_EMPTY: + write(" />") + else: + write(">") + tag = tag.lower() + if text: + if tag == "script" or tag == "style": + write(text) + else: + write(_escape_cdata(text)) + for e in elem: + _serialize_html(write, e, qnames, None, format) + if tag not in HTML_EMPTY: + write("</" + tag + ">") + if elem.tail: + write(_escape_cdata(elem.tail)) + +def _write_html(root, + encoding=None, + default_namespace=None, + format="html"): + assert root is not None + data = [] + write = data.append + qnames, namespaces = _namespaces(root, default_namespace) + _serialize_html(write, root, qnames, namespaces, format) + if encoding is None: + return "".join(data) + else: + return _encode("".join(data)) + + +# -------------------------------------------------------------------- +# serialization support + +def _namespaces(elem, default_namespace=None): + # identify namespaces used in this tree + + # maps qnames to *encoded* prefix:local names + qnames = {None: None} + + # maps uri:s to prefixes + namespaces = {} + if default_namespace: + namespaces[default_namespace] = "" + + def add_qname(qname): + # calculate serialized qname representation + try: + if qname[:1] == "{": + uri, tag = qname[1:].split("}", 1) + prefix = namespaces.get(uri) + if prefix is None: + prefix = _namespace_map.get(uri) + if prefix is None: + prefix = "ns%d" % len(namespaces) + if prefix != "xml": + namespaces[uri] = prefix + if prefix: + qnames[qname] = "%s:%s" % (prefix, tag) + else: + qnames[qname] = tag # default element + else: + if default_namespace: + raise ValueError( + "cannot use non-qualified names with " + "default_namespace option" + ) + qnames[qname] = qname + except TypeError: + _raise_serialization_error(qname) + + # populate qname and namespaces table + try: + iterate = elem.iter + except AttributeError: + iterate = elem.getiterator # cET compatibility + for elem in iterate(): + tag = elem.tag + if isinstance(tag, QName) and tag.text not in qnames: + add_qname(tag.text) + elif isinstance(tag, util.string_type): + if tag not in qnames: + add_qname(tag) + elif tag is not None and tag is not Comment and tag is not PI: + _raise_serialization_error(tag) + for key, value in elem.items(): + if isinstance(key, QName): + key = key.text + if key not in qnames: + add_qname(key) + if isinstance(value, QName) and value.text not in qnames: + add_qname(value.text) + text = elem.text + if isinstance(text, QName) and text.text not in qnames: + add_qname(text.text) + return qnames, namespaces + +def to_html_string(element): + return _write_html(ElementTree(element).getroot(), format="html") + +def to_xhtml_string(element): + return _write_html(ElementTree(element).getroot(), format="xhtml") diff --git a/awx/lib/site-packages/markdown/treeprocessors.py b/awx/lib/site-packages/markdown/treeprocessors.py new file mode 100644 index 0000000000..e6d3dc9381 --- /dev/null +++ b/awx/lib/site-packages/markdown/treeprocessors.py @@ -0,0 +1,360 @@ +from __future__ import unicode_literals +from __future__ import absolute_import +from . import util +from . import odict +from . import inlinepatterns + + +def build_treeprocessors(md_instance, **kwargs): + """ Build the default treeprocessors for Markdown. """ + treeprocessors = odict.OrderedDict() + treeprocessors["inline"] = InlineProcessor(md_instance) + treeprocessors["prettify"] = PrettifyTreeprocessor(md_instance) + return treeprocessors + + +def isString(s): + """ Check if it's string """ + if not isinstance(s, util.AtomicString): + return isinstance(s, util.string_type) + return False + + +class Treeprocessor(util.Processor): + """ + Treeprocessors are run on the ElementTree object before serialization. + + Each Treeprocessor implements a "run" method that takes a pointer to an + ElementTree, modifies it as necessary and returns an ElementTree + object. + + Treeprocessors must extend markdown.Treeprocessor. + + """ + def run(self, root): + """ + Subclasses of Treeprocessor should implement a `run` method, which + takes a root ElementTree. This method can return another ElementTree + object, and the existing root ElementTree will be replaced, or it can + modify the current tree and return None. + """ + pass + + +class InlineProcessor(Treeprocessor): + """ + A Treeprocessor that traverses a tree, applying inline patterns. + """ + + def __init__(self, md): + self.__placeholder_prefix = util.INLINE_PLACEHOLDER_PREFIX + self.__placeholder_suffix = util.ETX + self.__placeholder_length = 4 + len(self.__placeholder_prefix) \ + + len(self.__placeholder_suffix) + self.__placeholder_re = util.INLINE_PLACEHOLDER_RE + self.markdown = md + + def __makePlaceholder(self, type): + """ Generate a placeholder """ + id = "%04d" % len(self.stashed_nodes) + hash = util.INLINE_PLACEHOLDER % id + return hash, id + + def __findPlaceholder(self, data, index): + """ + Extract id from data string, start from index + + Keyword arguments: + + * data: string + * index: index, from which we start search + + Returns: placeholder id and string index, after the found placeholder. + + """ + m = self.__placeholder_re.search(data, index) + if m: + return m.group(1), m.end() + else: + return None, index + 1 + + def __stashNode(self, node, type): + """ Add node to stash """ + placeholder, id = self.__makePlaceholder(type) + self.stashed_nodes[id] = node + return placeholder + + def __handleInline(self, data, patternIndex=0): + """ + Process string with inline patterns and replace it + with placeholders + + Keyword arguments: + + * data: A line of Markdown text + * patternIndex: The index of the inlinePattern to start with + + Returns: String with placeholders. + + """ + if not isinstance(data, util.AtomicString): + startIndex = 0 + while patternIndex < len(self.markdown.inlinePatterns): + data, matched, startIndex = self.__applyPattern( + self.markdown.inlinePatterns.value_for_index(patternIndex), + data, patternIndex, startIndex) + if not matched: + patternIndex += 1 + return data + + def __processElementText(self, node, subnode, isText=True): + """ + Process placeholders in Element.text or Element.tail + of Elements popped from self.stashed_nodes. + + Keywords arguments: + + * node: parent node + * subnode: processing node + * isText: bool variable, True - it's text, False - it's tail + + Returns: None + + """ + if isText: + text = subnode.text + subnode.text = None + else: + text = subnode.tail + subnode.tail = None + + childResult = self.__processPlaceholders(text, subnode) + + if not isText and node is not subnode: + pos = node.getchildren().index(subnode) + node.remove(subnode) + else: + pos = 0 + + childResult.reverse() + for newChild in childResult: + node.insert(pos, newChild) + + def __processPlaceholders(self, data, parent): + """ + Process string with placeholders and generate ElementTree tree. + + Keyword arguments: + + * data: string with placeholders instead of ElementTree elements. + * parent: Element, which contains processing inline data + + Returns: list with ElementTree elements with applied inline patterns. + + """ + def linkText(text): + if text: + if result: + if result[-1].tail: + result[-1].tail += text + else: + result[-1].tail = text + else: + if parent.text: + parent.text += text + else: + parent.text = text + result = [] + strartIndex = 0 + while data: + index = data.find(self.__placeholder_prefix, strartIndex) + if index != -1: + id, phEndIndex = self.__findPlaceholder(data, index) + + if id in self.stashed_nodes: + node = self.stashed_nodes.get(id) + + if index > 0: + text = data[strartIndex:index] + linkText(text) + + if not isString(node): # it's Element + for child in [node] + node.getchildren(): + if child.tail: + if child.tail.strip(): + self.__processElementText(node, child,False) + if child.text: + if child.text.strip(): + self.__processElementText(child, child) + else: # it's just a string + linkText(node) + strartIndex = phEndIndex + continue + + strartIndex = phEndIndex + result.append(node) + + else: # wrong placeholder + end = index + len(self.__placeholder_prefix) + linkText(data[strartIndex:end]) + strartIndex = end + else: + text = data[strartIndex:] + if isinstance(data, util.AtomicString): + # We don't want to loose the AtomicString + text = util.AtomicString(text) + linkText(text) + data = "" + + return result + + def __applyPattern(self, pattern, data, patternIndex, startIndex=0): + """ + Check if the line fits the pattern, create the necessary + elements, add it to stashed_nodes. + + Keyword arguments: + + * data: the text to be processed + * pattern: the pattern to be checked + * patternIndex: index of current pattern + * startIndex: string index, from which we start searching + + Returns: String with placeholders instead of ElementTree elements. + + """ + match = pattern.getCompiledRegExp().match(data[startIndex:]) + leftData = data[:startIndex] + + if not match: + return data, False, 0 + + node = pattern.handleMatch(match) + + if node is None: + return data, True, len(leftData)+match.span(len(match.groups()))[0] + + if not isString(node): + if not isinstance(node.text, util.AtomicString): + # We need to process current node too + for child in [node] + node.getchildren(): + if not isString(node): + if child.text: + child.text = self.__handleInline(child.text, + patternIndex + 1) + if child.tail: + child.tail = self.__handleInline(child.tail, + patternIndex) + + placeholder = self.__stashNode(node, pattern.type()) + + return "%s%s%s%s" % (leftData, + match.group(1), + placeholder, match.groups()[-1]), True, 0 + + def run(self, tree): + """Apply inline patterns to a parsed Markdown tree. + + Iterate over ElementTree, find elements with inline tag, apply inline + patterns and append newly created Elements to tree. If you don't + want to process your data with inline paterns, instead of normal string, + use subclass AtomicString: + + node.text = markdown.AtomicString("This will not be processed.") + + Arguments: + + * tree: ElementTree object, representing Markdown tree. + + Returns: ElementTree object with applied inline patterns. + + """ + self.stashed_nodes = {} + + stack = [tree] + + while stack: + currElement = stack.pop() + insertQueue = [] + for child in currElement.getchildren(): + if child.text and not isinstance(child.text, util.AtomicString): + text = child.text + child.text = None + lst = self.__processPlaceholders(self.__handleInline( + text), child) + stack += lst + insertQueue.append((child, lst)) + if child.tail: + tail = self.__handleInline(child.tail) + dumby = util.etree.Element('d') + tailResult = self.__processPlaceholders(tail, dumby) + if dumby.text: + child.tail = dumby.text + else: + child.tail = None + pos = currElement.getchildren().index(child) + 1 + tailResult.reverse() + for newChild in tailResult: + currElement.insert(pos, newChild) + if child.getchildren(): + stack.append(child) + + for element, lst in insertQueue: + if self.markdown.enable_attributes: + if element.text and isString(element.text): + element.text = \ + inlinepatterns.handleAttributes(element.text, + element) + i = 0 + for newChild in lst: + if self.markdown.enable_attributes: + # Processing attributes + if newChild.tail and isString(newChild.tail): + newChild.tail = \ + inlinepatterns.handleAttributes(newChild.tail, + element) + if newChild.text and isString(newChild.text): + newChild.text = \ + inlinepatterns.handleAttributes(newChild.text, + newChild) + element.insert(i, newChild) + i += 1 + return tree + + +class PrettifyTreeprocessor(Treeprocessor): + """ Add linebreaks to the html document. """ + + def _prettifyETree(self, elem): + """ Recursively add linebreaks to ElementTree children. """ + + i = "\n" + if util.isBlockLevel(elem.tag) and elem.tag not in ['code', 'pre']: + if (not elem.text or not elem.text.strip()) \ + and len(elem) and util.isBlockLevel(elem[0].tag): + elem.text = i + for e in elem: + if util.isBlockLevel(e.tag): + self._prettifyETree(e) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + if not elem.tail or not elem.tail.strip(): + elem.tail = i + + def run(self, root): + """ Add linebreaks to ElementTree root object. """ + + self._prettifyETree(root) + # Do <br />'s seperately as they are often in the middle of + # inline content and missed by _prettifyETree. + brs = root.getiterator('br') + for br in brs: + if not br.tail or not br.tail.strip(): + br.tail = '\n' + else: + br.tail = '\n%s' % br.tail + # Clean up extra empty lines at end of code blocks. + pres = root.getiterator('pre') + for pre in pres: + if len(pre) and pre[0].tag == 'code': + pre[0].text = pre[0].text.rstrip() + '\n' diff --git a/awx/lib/site-packages/markdown/util.py b/awx/lib/site-packages/markdown/util.py new file mode 100644 index 0000000000..1036197c47 --- /dev/null +++ b/awx/lib/site-packages/markdown/util.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +import re +import sys + + +""" +Python 3 Stuff +============================================================================= +""" +PY3 = sys.version_info[0] == 3 + +if PY3: + string_type = str + text_type = str + int2str = chr +else: + string_type = basestring + text_type = unicode + int2str = unichr + + +""" +Constants you might want to modify +----------------------------------------------------------------------------- +""" + +BLOCK_LEVEL_ELEMENTS = re.compile("^(p|div|h[1-6]|blockquote|pre|table|dl|ol|ul" + "|script|noscript|form|fieldset|iframe|math" + "|hr|hr/|style|li|dt|dd|thead|tbody" + "|tr|th|td|section|footer|header|group|figure" + "|figcaption|aside|article|canvas|output" + "|progress|video)$", re.IGNORECASE) +# Placeholders +STX = '\u0002' # Use STX ("Start of text") for start-of-placeholder +ETX = '\u0003' # Use ETX ("End of text") for end-of-placeholder +INLINE_PLACEHOLDER_PREFIX = STX+"klzzwxh:" +INLINE_PLACEHOLDER = INLINE_PLACEHOLDER_PREFIX + "%s" + ETX +INLINE_PLACEHOLDER_RE = re.compile(INLINE_PLACEHOLDER % r'([0-9]{4})') +AMP_SUBSTITUTE = STX+"amp"+ETX + +""" +Constants you probably do not need to change +----------------------------------------------------------------------------- +""" + +RTL_BIDI_RANGES = ( ('\u0590', '\u07FF'), + # Hebrew (0590-05FF), Arabic (0600-06FF), + # Syriac (0700-074F), Arabic supplement (0750-077F), + # Thaana (0780-07BF), Nko (07C0-07FF). + ('\u2D30', '\u2D7F'), # Tifinagh + ) + +# Extensions should use "markdown.util.etree" instead of "etree" (or do `from +# markdown.util import etree`). Do not import it by yourself. + +try: # Is the C implemenation of ElementTree available? + import xml.etree.cElementTree as etree + from xml.etree.ElementTree import Comment + # Serializers (including ours) test with non-c Comment + etree.test_comment = Comment + if etree.VERSION < "1.0.5": + raise RuntimeError("cElementTree version 1.0.5 or higher is required.") +except (ImportError, RuntimeError): + # Use the Python implementation of ElementTree? + import xml.etree.ElementTree as etree + if etree.VERSION < "1.1": + raise RuntimeError("ElementTree version 1.1 or higher is required") + + +""" +AUXILIARY GLOBAL FUNCTIONS +============================================================================= +""" + + +def isBlockLevel(tag): + """Check if the tag is a block level HTML tag.""" + if isinstance(tag, string_type): + return BLOCK_LEVEL_ELEMENTS.match(tag) + # Some ElementTree tags are not strings, so return False. + return False + +""" +MISC AUXILIARY CLASSES +============================================================================= +""" + +class AtomicString(text_type): + """A string which should not be further processed.""" + pass + + +class Processor(object): + def __init__(self, markdown_instance=None): + if markdown_instance: + self.markdown = markdown_instance + + +class HtmlStash(object): + """ + This class is used for stashing HTML objects that we extract + in the beginning and replace with place-holders. + """ + + def __init__ (self): + """ Create a HtmlStash. """ + self.html_counter = 0 # for counting inline html segments + self.rawHtmlBlocks=[] + + def store(self, html, safe=False): + """ + Saves an HTML segment for later reinsertion. Returns a + placeholder string that needs to be inserted into the + document. + + Keyword arguments: + + * html: an html segment + * safe: label an html segment as safe for safemode + + Returns : a placeholder string + + """ + self.rawHtmlBlocks.append((html, safe)) + placeholder = self.get_placeholder(self.html_counter) + self.html_counter += 1 + return placeholder + + def reset(self): + self.html_counter = 0 + self.rawHtmlBlocks = [] + + def get_placeholder(self, key): + return "%swzxhzdk:%d%s" % (STX, key, ETX) + diff --git a/awx/lib/site-packages/ordereddict.py b/awx/lib/site-packages/ordereddict.py new file mode 100644 index 0000000000..5b0303f5a3 --- /dev/null +++ b/awx/lib/site-packages/ordereddict.py @@ -0,0 +1,127 @@ +# Copyright (c) 2009 Raymond Hettinger +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation files +# (the "Software"), to deal in the Software without restriction, +# including without limitation the rights to use, copy, modify, merge, +# publish, distribute, sublicense, and/or sell copies of the Software, +# and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. + +from UserDict import DictMixin + +class OrderedDict(dict, DictMixin): + + def __init__(self, *args, **kwds): + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__end + except AttributeError: + self.clear() + self.update(*args, **kwds) + + def clear(self): + self.__end = end = [] + end += [None, end, end] # sentinel node for doubly linked list + self.__map = {} # key --> [key, prev, next] + dict.clear(self) + + def __setitem__(self, key, value): + if key not in self: + end = self.__end + curr = end[1] + curr[2] = end[1] = self.__map[key] = [key, curr, end] + dict.__setitem__(self, key, value) + + def __delitem__(self, key): + dict.__delitem__(self, key) + key, prev, next = self.__map.pop(key) + prev[2] = next + next[1] = prev + + def __iter__(self): + end = self.__end + curr = end[2] + while curr is not end: + yield curr[0] + curr = curr[2] + + def __reversed__(self): + end = self.__end + curr = end[1] + while curr is not end: + yield curr[0] + curr = curr[1] + + def popitem(self, last=True): + if not self: + raise KeyError('dictionary is empty') + if last: + key = reversed(self).next() + else: + key = iter(self).next() + value = self.pop(key) + return key, value + + def __reduce__(self): + items = [[k, self[k]] for k in self] + tmp = self.__map, self.__end + del self.__map, self.__end + inst_dict = vars(self).copy() + self.__map, self.__end = tmp + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def keys(self): + return list(self) + + setdefault = DictMixin.setdefault + update = DictMixin.update + pop = DictMixin.pop + values = DictMixin.values + items = DictMixin.items + iterkeys = DictMixin.iterkeys + itervalues = DictMixin.itervalues + iteritems = DictMixin.iteritems + + def __repr__(self): + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + + def copy(self): + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + if isinstance(other, OrderedDict): + if len(self) != len(other): + return False + for p, q in zip(self.items(), other.items()): + if p != q: + return False + return True + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other diff --git a/awx/lib/site-packages/pexpect.py b/awx/lib/site-packages/pexpect.py new file mode 100644 index 0000000000..2ecdabd3c6 --- /dev/null +++ b/awx/lib/site-packages/pexpect.py @@ -0,0 +1,1850 @@ +"""Pexpect is a Python module for spawning child applications and controlling +them automatically. Pexpect can be used for automating interactive applications +such as ssh, ftp, passwd, telnet, etc. It can be used to a automate setup +scripts for duplicating software package installations on different servers. It +can be used for automated software testing. Pexpect is in the spirit of Don +Libes' Expect, but Pexpect is pure Python. Other Expect-like modules for Python +require TCL and Expect or require C extensions to be compiled. Pexpect does not +use C, Expect, or TCL extensions. It should work on any platform that supports +the standard Python pty module. The Pexpect interface focuses on ease of use so +that simple tasks are easy. + +There are two main interfaces to Pexpect -- the function, run() and the class, +spawn. You can call the run() function to execute a command and return the +output. This is a handy replacement for os.system(). + +For example:: + + pexpect.run('ls -la') + +The more powerful interface is the spawn class. You can use this to spawn an +external child command and then interact with the child by sending lines and +expecting responses. + +For example:: + + child = pexpect.spawn('scp foo myname@host.example.com:.') + child.expect ('Password:') + child.sendline (mypassword) + +This works even for commands that ask for passwords or other input outside of +the normal stdio streams. + +Credits: Noah Spurrier, Richard Holden, Marco Molteni, Kimberley Burchett, +Robert Stone, Hartmut Goebel, Chad Schroeder, Erick Tryzelaar, Dave Kirby, Ids +vander Molen, George Todd, Noel Taylor, Nicolas D. Cesar, Alexander Gattin, +Jacques-Etienne Baudoux, Geoffrey Marshall, Francisco Lourenco, Glen Mabey, +Karthik Gurusamy, Fernando Perez, Corey Minyard, Jon Cohen, Guillaume +Chazarain, Andrew Ryan, Nick Craig-Wood, Andrew Stone, Jorgen Grahn, John +Spiegel, Jan Grant (Let me know if I forgot anyone.) + +Free, open source, and all that good stuff. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +Pexpect Copyright (c) 2008 Noah Spurrier +http://pexpect.sourceforge.net/ + +$Id: pexpect.py 516 2008-05-23 20:46:01Z noah $ +""" + +try: + import os, sys, time + import select + import string + import re + import struct + import resource + import types + import pty + import tty + import termios + import fcntl + import errno + import traceback + import signal +except ImportError, e: + raise ImportError (str(e) + """ + +A critical module was not found. Probably this operating system does not +support it. Pexpect is intended for UNIX-like operating systems.""") + +__version__ = '2.4' +__revision__ = '$Revision: 516 $' +__all__ = ['ExceptionPexpect', 'EOF', 'TIMEOUT', 'spawn', 'run', 'which', + 'split_command_line', '__version__', '__revision__'] + +# Exception classes used by this module. +class ExceptionPexpect(Exception): + + """Base class for all exceptions raised by this module. + """ + + def __init__(self, value): + + self.value = value + + def __str__(self): + + return str(self.value) + + def get_trace(self): + + """This returns an abbreviated stack trace with lines that only concern + the caller. In other words, the stack trace inside the Pexpect module + is not included. """ + + tblist = traceback.extract_tb(sys.exc_info()[2]) + #tblist = filter(self.__filter_not_pexpect, tblist) + tblist = [item for item in tblist if self.__filter_not_pexpect(item)] + tblist = traceback.format_list(tblist) + return ''.join(tblist) + + def __filter_not_pexpect(self, trace_list_item): + + """This returns True if list item 0 the string 'pexpect.py' in it. """ + + if trace_list_item[0].find('pexpect.py') == -1: + return True + else: + return False + +class EOF(ExceptionPexpect): + + """Raised when EOF is read from a child. This usually means the child has exited.""" + +class TIMEOUT(ExceptionPexpect): + + """Raised when a read time exceeds the timeout. """ + +##class TIMEOUT_PATTERN(TIMEOUT): +## """Raised when the pattern match time exceeds the timeout. +## This is different than a read TIMEOUT because the child process may +## give output, thus never give a TIMEOUT, but the output +## may never match a pattern. +## """ +##class MAXBUFFER(ExceptionPexpect): +## """Raised when a scan buffer fills before matching an expected pattern.""" + +def run (command, timeout=-1, withexitstatus=False, events=None, extra_args=None, logfile=None, cwd=None, env=None): + + """ + This function runs the given command; waits for it to finish; then + returns all output as a string. STDERR is included in output. If the full + path to the command is not given then the path is searched. + + Note that lines are terminated by CR/LF (\\r\\n) combination even on + UNIX-like systems because this is the standard for pseudo ttys. If you set + 'withexitstatus' to true, then run will return a tuple of (command_output, + exitstatus). If 'withexitstatus' is false then this returns just + command_output. + + The run() function can often be used instead of creating a spawn instance. + For example, the following code uses spawn:: + + from pexpect import * + child = spawn('scp foo myname@host.example.com:.') + child.expect ('(?i)password') + child.sendline (mypassword) + + The previous code can be replace with the following:: + + from pexpect import * + run ('scp foo myname@host.example.com:.', events={'(?i)password': mypassword}) + + Examples + ======== + + Start the apache daemon on the local machine:: + + from pexpect import * + run ("/usr/local/apache/bin/apachectl start") + + Check in a file using SVN:: + + from pexpect import * + run ("svn ci -m 'automatic commit' my_file.py") + + Run a command and capture exit status:: + + from pexpect import * + (command_output, exitstatus) = run ('ls -l /bin', withexitstatus=1) + + Tricky Examples + =============== + + The following will run SSH and execute 'ls -l' on the remote machine. The + password 'secret' will be sent if the '(?i)password' pattern is ever seen:: + + run ("ssh username@machine.example.com 'ls -l'", events={'(?i)password':'secret\\n'}) + + This will start mencoder to rip a video from DVD. This will also display + progress ticks every 5 seconds as it runs. For example:: + + from pexpect import * + def print_ticks(d): + print d['event_count'], + run ("mencoder dvd://1 -o video.avi -oac copy -ovc copy", events={TIMEOUT:print_ticks}, timeout=5) + + The 'events' argument should be a dictionary of patterns and responses. + Whenever one of the patterns is seen in the command out run() will send the + associated response string. Note that you should put newlines in your + string if Enter is necessary. The responses may also contain callback + functions. Any callback is function that takes a dictionary as an argument. + The dictionary contains all the locals from the run() function, so you can + access the child spawn object or any other variable defined in run() + (event_count, child, and extra_args are the most useful). A callback may + return True to stop the current run process otherwise run() continues until + the next event. A callback may also return a string which will be sent to + the child. 'extra_args' is not used by directly run(). It provides a way to + pass data to a callback function through run() through the locals + dictionary passed to a callback. """ + + if timeout == -1: + child = spawn(command, maxread=2000, logfile=logfile, cwd=cwd, env=env) + else: + child = spawn(command, timeout=timeout, maxread=2000, logfile=logfile, cwd=cwd, env=env) + if events is not None: + patterns = events.keys() + responses = events.values() + else: + patterns=None # We assume that EOF or TIMEOUT will save us. + responses=None + child_result_list = [] + event_count = 0 + while 1: + try: + index = child.expect (patterns) + if type(child.after) in types.StringTypes: + child_result_list.append(child.before + child.after) + else: # child.after may have been a TIMEOUT or EOF, so don't cat those. + child_result_list.append(child.before) + if type(responses[index]) in types.StringTypes: + child.send(responses[index]) + elif type(responses[index]) is types.FunctionType: + callback_result = responses[index](locals()) + sys.stdout.flush() + if type(callback_result) in types.StringTypes: + child.send(callback_result) + elif callback_result: + break + else: + raise TypeError ('The callback must be a string or function type.') + event_count = event_count + 1 + except TIMEOUT, e: + child_result_list.append(child.before) + break + except EOF, e: + child_result_list.append(child.before) + break + child_result = ''.join(child_result_list) + if withexitstatus: + child.close() + return (child_result, child.exitstatus) + else: + return child_result + +class spawn (object): + + """This is the main class interface for Pexpect. Use this class to start + and control child applications. """ + + def __init__(self, command, args=[], timeout=30, maxread=2000, searchwindowsize=None, logfile=None, cwd=None, env=None): + + """This is the constructor. The command parameter may be a string that + includes a command and any arguments to the command. For example:: + + child = pexpect.spawn ('/usr/bin/ftp') + child = pexpect.spawn ('/usr/bin/ssh user@example.com') + child = pexpect.spawn ('ls -latr /tmp') + + You may also construct it with a list of arguments like so:: + + child = pexpect.spawn ('/usr/bin/ftp', []) + child = pexpect.spawn ('/usr/bin/ssh', ['user@example.com']) + child = pexpect.spawn ('ls', ['-latr', '/tmp']) + + After this the child application will be created and will be ready to + talk to. For normal use, see expect() and send() and sendline(). + + Remember that Pexpect does NOT interpret shell meta characters such as + redirect, pipe, or wild cards (>, |, or *). This is a common mistake. + If you want to run a command and pipe it through another command then + you must also start a shell. For example:: + + child = pexpect.spawn('/bin/bash -c "ls -l | grep LOG > log_list.txt"') + child.expect(pexpect.EOF) + + The second form of spawn (where you pass a list of arguments) is useful + in situations where you wish to spawn a command and pass it its own + argument list. This can make syntax more clear. For example, the + following is equivalent to the previous example:: + + shell_cmd = 'ls -l | grep LOG > log_list.txt' + child = pexpect.spawn('/bin/bash', ['-c', shell_cmd]) + child.expect(pexpect.EOF) + + The maxread attribute sets the read buffer size. This is maximum number + of bytes that Pexpect will try to read from a TTY at one time. Setting + the maxread size to 1 will turn off buffering. Setting the maxread + value higher may help performance in cases where large amounts of + output are read back from the child. This feature is useful in + conjunction with searchwindowsize. + + The searchwindowsize attribute sets the how far back in the incomming + seach buffer Pexpect will search for pattern matches. Every time + Pexpect reads some data from the child it will append the data to the + incomming buffer. The default is to search from the beginning of the + imcomming buffer each time new data is read from the child. But this is + very inefficient if you are running a command that generates a large + amount of data where you want to match The searchwindowsize does not + effect the size of the incomming data buffer. You will still have + access to the full buffer after expect() returns. + + The logfile member turns on or off logging. All input and output will + be copied to the given file object. Set logfile to None to stop + logging. This is the default. Set logfile to sys.stdout to echo + everything to standard output. The logfile is flushed after each write. + + Example log input and output to a file:: + + child = pexpect.spawn('some_command') + fout = file('mylog.txt','w') + child.logfile = fout + + Example log to stdout:: + + child = pexpect.spawn('some_command') + child.logfile = sys.stdout + + The logfile_read and logfile_send members can be used to separately log + the input from the child and output sent to the child. Sometimes you + don't want to see everything you write to the child. You only want to + log what the child sends back. For example:: + + child = pexpect.spawn('some_command') + child.logfile_read = sys.stdout + + To separately log output sent to the child use logfile_send:: + + self.logfile_send = fout + + The delaybeforesend helps overcome a weird behavior that many users + were experiencing. The typical problem was that a user would expect() a + "Password:" prompt and then immediately call sendline() to send the + password. The user would then see that their password was echoed back + to them. Passwords don't normally echo. The problem is caused by the + fact that most applications print out the "Password" prompt and then + turn off stdin echo, but if you send your password before the + application turned off echo, then you get your password echoed. + Normally this wouldn't be a problem when interacting with a human at a + real keyboard. If you introduce a slight delay just before writing then + this seems to clear up the problem. This was such a common problem for + many users that I decided that the default pexpect behavior should be + to sleep just before writing to the child application. 1/20th of a + second (50 ms) seems to be enough to clear up the problem. You can set + delaybeforesend to 0 to return to the old behavior. Most Linux machines + don't like this to be below 0.03. I don't know why. + + Note that spawn is clever about finding commands on your path. + It uses the same logic that "which" uses to find executables. + + If you wish to get the exit status of the child you must call the + close() method. The exit or signal status of the child will be stored + in self.exitstatus or self.signalstatus. If the child exited normally + then exitstatus will store the exit return code and signalstatus will + be None. If the child was terminated abnormally with a signal then + signalstatus will store the signal value and exitstatus will be None. + If you need more detail you can also read the self.status member which + stores the status returned by os.waitpid. You can interpret this using + os.WIFEXITED/os.WEXITSTATUS or os.WIFSIGNALED/os.TERMSIG. """ + + self.STDIN_FILENO = pty.STDIN_FILENO + self.STDOUT_FILENO = pty.STDOUT_FILENO + self.STDERR_FILENO = pty.STDERR_FILENO + self.stdin = sys.stdin + self.stdout = sys.stdout + self.stderr = sys.stderr + + self.searcher = None + self.ignorecase = False + self.before = None + self.after = None + self.match = None + self.match_index = None + self.terminated = True + self.exitstatus = None + self.signalstatus = None + self.status = None # status returned by os.waitpid + self.flag_eof = False + self.pid = None + self.child_fd = -1 # initially closed + self.timeout = timeout + self.delimiter = EOF + self.logfile = logfile + self.logfile_read = None # input from child (read_nonblocking) + self.logfile_send = None # output to send (send, sendline) + self.maxread = maxread # max bytes to read at one time into buffer + self.buffer = '' # This is the read buffer. See maxread. + self.searchwindowsize = searchwindowsize # Anything before searchwindowsize point is preserved, but not searched. + # Most Linux machines don't like delaybeforesend to be below 0.03 (30 ms). + self.delaybeforesend = 0.05 # Sets sleep time used just before sending data to child. Time in seconds. + self.delayafterclose = 0.1 # Sets delay in close() method to allow kernel time to update process status. Time in seconds. + self.delayafterterminate = 0.1 # Sets delay in terminate() method to allow kernel time to update process status. Time in seconds. + self.softspace = False # File-like object. + self.name = '<' + repr(self) + '>' # File-like object. + self.encoding = None # File-like object. + self.closed = True # File-like object. + self.cwd = cwd + self.env = env + self.__irix_hack = (sys.platform.lower().find('irix')>=0) # This flags if we are running on irix + # Solaris uses internal __fork_pty(). All others use pty.fork(). + if (sys.platform.lower().find('solaris')>=0) or (sys.platform.lower().find('sunos5')>=0): + self.use_native_pty_fork = False + else: + self.use_native_pty_fork = True + + + # allow dummy instances for subclasses that may not use command or args. + if command is None: + self.command = None + self.args = None + self.name = '<pexpect factory incomplete>' + else: + self._spawn (command, args) + + def __del__(self): + + """This makes sure that no system resources are left open. Python only + garbage collects Python objects. OS file descriptors are not Python + objects, so they must be handled explicitly. If the child file + descriptor was opened outside of this class (passed to the constructor) + then this does not close it. """ + + if not self.closed: + # It is possible for __del__ methods to execute during the + # teardown of the Python VM itself. Thus self.close() may + # trigger an exception because os.close may be None. + # -- Fernando Perez + try: + self.close() + except: + pass + + def __str__(self): + + """This returns a human-readable string that represents the state of + the object. """ + + s = [] + s.append(repr(self)) + s.append('version: ' + __version__ + ' (' + __revision__ + ')') + s.append('command: ' + str(self.command)) + s.append('args: ' + str(self.args)) + s.append('searcher: ' + str(self.searcher)) + s.append('buffer (last 100 chars): ' + str(self.buffer)[-100:]) + s.append('before (last 100 chars): ' + str(self.before)[-100:]) + s.append('after: ' + str(self.after)) + s.append('match: ' + str(self.match)) + s.append('match_index: ' + str(self.match_index)) + s.append('exitstatus: ' + str(self.exitstatus)) + s.append('flag_eof: ' + str(self.flag_eof)) + s.append('pid: ' + str(self.pid)) + s.append('child_fd: ' + str(self.child_fd)) + s.append('closed: ' + str(self.closed)) + s.append('timeout: ' + str(self.timeout)) + s.append('delimiter: ' + str(self.delimiter)) + s.append('logfile: ' + str(self.logfile)) + s.append('logfile_read: ' + str(self.logfile_read)) + s.append('logfile_send: ' + str(self.logfile_send)) + s.append('maxread: ' + str(self.maxread)) + s.append('ignorecase: ' + str(self.ignorecase)) + s.append('searchwindowsize: ' + str(self.searchwindowsize)) + s.append('delaybeforesend: ' + str(self.delaybeforesend)) + s.append('delayafterclose: ' + str(self.delayafterclose)) + s.append('delayafterterminate: ' + str(self.delayafterterminate)) + return '\n'.join(s) + + def _spawn(self,command,args=[]): + + """This starts the given command in a child process. This does all the + fork/exec type of stuff for a pty. This is called by __init__. If args + is empty then command will be parsed (split on spaces) and args will be + set to parsed arguments. """ + + # The pid and child_fd of this object get set by this method. + # Note that it is difficult for this method to fail. + # You cannot detect if the child process cannot start. + # So the only way you can tell if the child process started + # or not is to try to read from the file descriptor. If you get + # EOF immediately then it means that the child is already dead. + # That may not necessarily be bad because you may haved spawned a child + # that performs some task; creates no stdout output; and then dies. + + # If command is an int type then it may represent a file descriptor. + if type(command) == type(0): + raise ExceptionPexpect ('Command is an int type. If this is a file descriptor then maybe you want to use fdpexpect.fdspawn which takes an existing file descriptor instead of a command string.') + + if type (args) != type([]): + raise TypeError ('The argument, args, must be a list.') + + if args == []: + self.args = split_command_line(command) + self.command = self.args[0] + else: + self.args = args[:] # work with a copy + self.args.insert (0, command) + self.command = command + + command_with_path = which(self.command) + if command_with_path is None: + raise ExceptionPexpect ('The command was not found or was not executable: %s.' % self.command) + self.command = command_with_path + self.args[0] = self.command + + self.name = '<' + ' '.join (self.args) + '>' + + assert self.pid is None, 'The pid member should be None.' + assert self.command is not None, 'The command member should not be None.' + + if self.use_native_pty_fork: + try: + self.pid, self.child_fd = pty.fork() + except OSError, e: + raise ExceptionPexpect('Error! pty.fork() failed: ' + str(e)) + else: # Use internal __fork_pty + self.pid, self.child_fd = self.__fork_pty() + + if self.pid == 0: # Child + try: + self.child_fd = sys.stdout.fileno() # used by setwinsize() + self.setwinsize(24, 80) + except: + # Some platforms do not like setwinsize (Cygwin). + # This will cause problem when running applications that + # are very picky about window size. + # This is a serious limitation, but not a show stopper. + pass + # Do not allow child to inherit open file descriptors from parent. + max_fd = resource.getrlimit(resource.RLIMIT_NOFILE)[0] + for i in range (3, max_fd): + try: + os.close (i) + except OSError: + pass + + # I don't know why this works, but ignoring SIGHUP fixes a + # problem when trying to start a Java daemon with sudo + # (specifically, Tomcat). + signal.signal(signal.SIGHUP, signal.SIG_IGN) + + if self.cwd is not None: + os.chdir(self.cwd) + if self.env is None: + os.execv(self.command, self.args) + else: + os.execvpe(self.command, self.args, self.env) + + # Parent + self.terminated = False + self.closed = False + + def __fork_pty(self): + + """This implements a substitute for the forkpty system call. This + should be more portable than the pty.fork() function. Specifically, + this should work on Solaris. + + Modified 10.06.05 by Geoff Marshall: Implemented __fork_pty() method to + resolve the issue with Python's pty.fork() not supporting Solaris, + particularly ssh. Based on patch to posixmodule.c authored by Noah + Spurrier:: + + http://mail.python.org/pipermail/python-dev/2003-May/035281.html + + """ + + parent_fd, child_fd = os.openpty() + if parent_fd < 0 or child_fd < 0: + raise ExceptionPexpect, "Error! Could not open pty with os.openpty()." + + pid = os.fork() + if pid < 0: + raise ExceptionPexpect, "Error! Failed os.fork()." + elif pid == 0: + # Child. + os.close(parent_fd) + self.__pty_make_controlling_tty(child_fd) + + os.dup2(child_fd, 0) + os.dup2(child_fd, 1) + os.dup2(child_fd, 2) + + if child_fd > 2: + os.close(child_fd) + else: + # Parent. + os.close(child_fd) + + return pid, parent_fd + + def __pty_make_controlling_tty(self, tty_fd): + + """This makes the pseudo-terminal the controlling tty. This should be + more portable than the pty.fork() function. Specifically, this should + work on Solaris. """ + + child_name = os.ttyname(tty_fd) + + # Disconnect from controlling tty if still connected. + try: + fd = os.open("/dev/tty", os.O_RDWR | os.O_NOCTTY); + if fd >= 0: + os.close(fd) + except: + # We are already disconnected. Perhaps we are running inside cron. + pass + + os.setsid() + + # Verify we are disconnected from controlling tty + try: + fd = os.open("/dev/tty", os.O_RDWR | os.O_NOCTTY); + if fd >= 0: + os.close(fd) + raise ExceptionPexpect, "Error! We are not disconnected from a controlling tty." + except: + # Good! We are disconnected from a controlling tty. + pass + + # Verify we can open child pty. + fd = os.open(child_name, os.O_RDWR); + if fd < 0: + raise ExceptionPexpect, "Error! Could not open child pty, " + child_name + else: + os.close(fd) + + # Verify we now have a controlling tty. + fd = os.open("/dev/tty", os.O_WRONLY) + if fd < 0: + raise ExceptionPexpect, "Error! Could not open controlling tty, /dev/tty" + else: + os.close(fd) + + def fileno (self): # File-like object. + + """This returns the file descriptor of the pty for the child. + """ + + return self.child_fd + + def close (self, force=True): # File-like object. + + """This closes the connection with the child application. Note that + calling close() more than once is valid. This emulates standard Python + behavior with files. Set force to True if you want to make sure that + the child is terminated (SIGKILL is sent if the child ignores SIGHUP + and SIGINT). """ + + if not self.closed: + self.flush() + os.close (self.child_fd) + time.sleep(self.delayafterclose) # Give kernel time to update process status. + if self.isalive(): + if not self.terminate(force): + raise ExceptionPexpect ('close() could not terminate the child using terminate()') + self.child_fd = -1 + self.closed = True + #self.pid = None + + def flush (self): # File-like object. + + """This does nothing. It is here to support the interface for a + File-like object. """ + + pass + + def isatty (self): # File-like object. + + """This returns True if the file descriptor is open and connected to a + tty(-like) device, else False. """ + + return os.isatty(self.child_fd) + + def waitnoecho (self, timeout=-1): + + """This waits until the terminal ECHO flag is set False. This returns + True if the echo mode is off. This returns False if the ECHO flag was + not set False before the timeout. This can be used to detect when the + child is waiting for a password. Usually a child application will turn + off echo mode when it is waiting for the user to enter a password. For + example, instead of expecting the "password:" prompt you can wait for + the child to set ECHO off:: + + p = pexpect.spawn ('ssh user@example.com') + p.waitnoecho() + p.sendline(mypassword) + + If timeout is None then this method to block forever until ECHO flag is + False. + + """ + + if timeout == -1: + timeout = self.timeout + if timeout is not None: + end_time = time.time() + timeout + while True: + if not self.getecho(): + return True + if timeout < 0 and timeout is not None: + return False + if timeout is not None: + timeout = end_time - time.time() + time.sleep(0.1) + + def getecho (self): + + """This returns the terminal echo mode. This returns True if echo is + on or False if echo is off. Child applications that are expecting you + to enter a password often set ECHO False. See waitnoecho(). """ + + attr = termios.tcgetattr(self.child_fd) + if attr[3] & termios.ECHO: + return True + return False + + def setecho (self, state): + + """This sets the terminal echo mode on or off. Note that anything the + child sent before the echo will be lost, so you should be sure that + your input buffer is empty before you call setecho(). For example, the + following will work as expected:: + + p = pexpect.spawn('cat') + p.sendline ('1234') # We will see this twice (once from tty echo and again from cat). + p.expect (['1234']) + p.expect (['1234']) + p.setecho(False) # Turn off tty echo + p.sendline ('abcd') # We will set this only once (echoed by cat). + p.sendline ('wxyz') # We will set this only once (echoed by cat) + p.expect (['abcd']) + p.expect (['wxyz']) + + The following WILL NOT WORK because the lines sent before the setecho + will be lost:: + + p = pexpect.spawn('cat') + p.sendline ('1234') # We will see this twice (once from tty echo and again from cat). + p.setecho(False) # Turn off tty echo + p.sendline ('abcd') # We will set this only once (echoed by cat). + p.sendline ('wxyz') # We will set this only once (echoed by cat) + p.expect (['1234']) + p.expect (['1234']) + p.expect (['abcd']) + p.expect (['wxyz']) + """ + + self.child_fd + attr = termios.tcgetattr(self.child_fd) + if state: + attr[3] = attr[3] | termios.ECHO + else: + attr[3] = attr[3] & ~termios.ECHO + # I tried TCSADRAIN and TCSAFLUSH, but these were inconsistent + # and blocked on some platforms. TCSADRAIN is probably ideal if it worked. + termios.tcsetattr(self.child_fd, termios.TCSANOW, attr) + + def read_nonblocking (self, size = 1, timeout = -1): + + """This reads at most size characters from the child application. It + includes a timeout. If the read does not complete within the timeout + period then a TIMEOUT exception is raised. If the end of file is read + then an EOF exception will be raised. If a log file was set using + setlog() then all data will also be written to the log file. + + If timeout is None then the read may block indefinitely. If timeout is -1 + then the self.timeout value is used. If timeout is 0 then the child is + polled and if there was no data immediately ready then this will raise + a TIMEOUT exception. + + The timeout refers only to the amount of time to read at least one + character. This is not effected by the 'size' parameter, so if you call + read_nonblocking(size=100, timeout=30) and only one character is + available right away then one character will be returned immediately. + It will not wait for 30 seconds for another 99 characters to come in. + + This is a wrapper around os.read(). It uses select.select() to + implement the timeout. """ + + if self.closed: + raise ValueError ('I/O operation on closed file in read_nonblocking().') + + if timeout == -1: + timeout = self.timeout + + # Note that some systems such as Solaris do not give an EOF when + # the child dies. In fact, you can still try to read + # from the child_fd -- it will block forever or until TIMEOUT. + # For this case, I test isalive() before doing any reading. + # If isalive() is false, then I pretend that this is the same as EOF. + if not self.isalive(): + r,w,e = self.__select([self.child_fd], [], [], 0) # timeout of 0 means "poll" + if not r: + self.flag_eof = True + raise EOF ('End Of File (EOF) in read_nonblocking(). Braindead platform.') + elif self.__irix_hack: + # This is a hack for Irix. It seems that Irix requires a long delay before checking isalive. + # This adds a 2 second delay, but only when the child is terminated. + r, w, e = self.__select([self.child_fd], [], [], 2) + if not r and not self.isalive(): + self.flag_eof = True + raise EOF ('End Of File (EOF) in read_nonblocking(). Pokey platform.') + + r,w,e = self.__select([self.child_fd], [], [], timeout) + + if not r: + if not self.isalive(): + # Some platforms, such as Irix, will claim that their processes are alive; + # then timeout on the select; and then finally admit that they are not alive. + self.flag_eof = True + raise EOF ('End of File (EOF) in read_nonblocking(). Very pokey platform.') + else: + raise TIMEOUT ('Timeout exceeded in read_nonblocking().') + + if self.child_fd in r: + try: + s = os.read(self.child_fd, size) + except OSError, e: # Linux does this + self.flag_eof = True + raise EOF ('End Of File (EOF) in read_nonblocking(). Exception style platform.') + if s == '': # BSD style + self.flag_eof = True + raise EOF ('End Of File (EOF) in read_nonblocking(). Empty string style platform.') + + if self.logfile is not None: + self.logfile.write (s) + self.logfile.flush() + if self.logfile_read is not None: + self.logfile_read.write (s) + self.logfile_read.flush() + + return s + + raise ExceptionPexpect ('Reached an unexpected state in read_nonblocking().') + + def read (self, size = -1): # File-like object. + + """This reads at most "size" bytes from the file (less if the read hits + EOF before obtaining size bytes). If the size argument is negative or + omitted, read all data until EOF is reached. The bytes are returned as + a string object. An empty string is returned when EOF is encountered + immediately. """ + + if size == 0: + return '' + if size < 0: + self.expect (self.delimiter) # delimiter default is EOF + return self.before + + # I could have done this more directly by not using expect(), but + # I deliberately decided to couple read() to expect() so that + # I would catch any bugs early and ensure consistant behavior. + # It's a little less efficient, but there is less for me to + # worry about if I have to later modify read() or expect(). + # Note, it's OK if size==-1 in the regex. That just means it + # will never match anything in which case we stop only on EOF. + cre = re.compile('.{%d}' % size, re.DOTALL) + index = self.expect ([cre, self.delimiter]) # delimiter default is EOF + if index == 0: + return self.after ### self.before should be ''. Should I assert this? + return self.before + + def readline (self, size = -1): # File-like object. + + """This reads and returns one entire line. A trailing newline is kept + in the string, but may be absent when a file ends with an incomplete + line. Note: This readline() looks for a \\r\\n pair even on UNIX + because this is what the pseudo tty device returns. So contrary to what + you may expect you will receive the newline as \\r\\n. An empty string + is returned when EOF is hit immediately. Currently, the size argument is + mostly ignored, so this behavior is not standard for a file-like + object. If size is 0 then an empty string is returned. """ + + if size == 0: + return '' + index = self.expect (['\r\n', self.delimiter]) # delimiter default is EOF + if index == 0: + return self.before + '\r\n' + else: + return self.before + + def __iter__ (self): # File-like object. + + """This is to support iterators over a file-like object. + """ + + return self + + def next (self): # File-like object. + + """This is to support iterators over a file-like object. + """ + + result = self.readline() + if result == "": + raise StopIteration + return result + + def readlines (self, sizehint = -1): # File-like object. + + """This reads until EOF using readline() and returns a list containing + the lines thus read. The optional "sizehint" argument is ignored. """ + + lines = [] + while True: + line = self.readline() + if not line: + break + lines.append(line) + return lines + + def write(self, s): # File-like object. + + """This is similar to send() except that there is no return value. + """ + + self.send (s) + + def writelines (self, sequence): # File-like object. + + """This calls write() for each element in the sequence. The sequence + can be any iterable object producing strings, typically a list of + strings. This does not add line separators There is no return value. + """ + + for s in sequence: + self.write (s) + + def send(self, s): + + """This sends a string to the child process. This returns the number of + bytes written. If a log file was set then the data is also written to + the log. """ + + time.sleep(self.delaybeforesend) + if self.logfile is not None: + self.logfile.write (s) + self.logfile.flush() + if self.logfile_send is not None: + self.logfile_send.write (s) + self.logfile_send.flush() + c = os.write(self.child_fd, s) + return c + + def sendline(self, s=''): + + """This is like send(), but it adds a line feed (os.linesep). This + returns the number of bytes written. """ + + n = self.send(s) + n = n + self.send (os.linesep) + return n + + def sendcontrol(self, char): + + """This sends a control character to the child such as Ctrl-C or + Ctrl-D. For example, to send a Ctrl-G (ASCII 7):: + + child.sendcontrol('g') + + See also, sendintr() and sendeof(). + """ + + char = char.lower() + a = ord(char) + if a>=97 and a<=122: + a = a - ord('a') + 1 + return self.send (chr(a)) + d = {'@':0, '`':0, + '[':27, '{':27, + '\\':28, '|':28, + ']':29, '}': 29, + '^':30, '~':30, + '_':31, + '?':127} + if char not in d: + return 0 + return self.send (chr(d[char])) + + def sendeof(self): + + """This sends an EOF to the child. This sends a character which causes + the pending parent output buffer to be sent to the waiting child + program without waiting for end-of-line. If it is the first character + of the line, the read() in the user program returns 0, which signifies + end-of-file. This means to work as expected a sendeof() has to be + called at the beginning of a line. This method does not send a newline. + It is the responsibility of the caller to ensure the eof is sent at the + beginning of a line. """ + + ### Hmmm... how do I send an EOF? + ###C if ((m = write(pty, *buf, p - *buf)) < 0) + ###C return (errno == EWOULDBLOCK) ? n : -1; + #fd = sys.stdin.fileno() + #old = termios.tcgetattr(fd) # remember current state + #attr = termios.tcgetattr(fd) + #attr[3] = attr[3] | termios.ICANON # ICANON must be set to recognize EOF + #try: # use try/finally to ensure state gets restored + # termios.tcsetattr(fd, termios.TCSADRAIN, attr) + # if hasattr(termios, 'CEOF'): + # os.write (self.child_fd, '%c' % termios.CEOF) + # else: + # # Silly platform does not define CEOF so assume CTRL-D + # os.write (self.child_fd, '%c' % 4) + #finally: # restore state + # termios.tcsetattr(fd, termios.TCSADRAIN, old) + if hasattr(termios, 'VEOF'): + char = termios.tcgetattr(self.child_fd)[6][termios.VEOF] + else: + # platform does not define VEOF so assume CTRL-D + char = chr(4) + self.send(char) + + def sendintr(self): + + """This sends a SIGINT to the child. It does not require + the SIGINT to be the first character on a line. """ + + if hasattr(termios, 'VINTR'): + char = termios.tcgetattr(self.child_fd)[6][termios.VINTR] + else: + # platform does not define VINTR so assume CTRL-C + char = chr(3) + self.send (char) + + def eof (self): + + """This returns True if the EOF exception was ever raised. + """ + + return self.flag_eof + + def terminate(self, force=False): + + """This forces a child process to terminate. It starts nicely with + SIGHUP and SIGINT. If "force" is True then moves onto SIGKILL. This + returns True if the child was terminated. This returns False if the + child could not be terminated. """ + + if not self.isalive(): + return True + try: + self.kill(signal.SIGHUP) + time.sleep(self.delayafterterminate) + if not self.isalive(): + return True + self.kill(signal.SIGCONT) + time.sleep(self.delayafterterminate) + if not self.isalive(): + return True + self.kill(signal.SIGINT) + time.sleep(self.delayafterterminate) + if not self.isalive(): + return True + if force: + self.kill(signal.SIGKILL) + time.sleep(self.delayafterterminate) + if not self.isalive(): + return True + else: + return False + return False + except OSError, e: + # I think there are kernel timing issues that sometimes cause + # this to happen. I think isalive() reports True, but the + # process is dead to the kernel. + # Make one last attempt to see if the kernel is up to date. + time.sleep(self.delayafterterminate) + if not self.isalive(): + return True + else: + return False + + def wait(self): + + """This waits until the child exits. This is a blocking call. This will + not read any data from the child, so this will block forever if the + child has unread output and has terminated. In other words, the child + may have printed output then called exit(); but, technically, the child + is still alive until its output is read. """ + + if self.isalive(): + pid, status = os.waitpid(self.pid, 0) + else: + raise ExceptionPexpect ('Cannot wait for dead child process.') + self.exitstatus = os.WEXITSTATUS(status) + if os.WIFEXITED (status): + self.status = status + self.exitstatus = os.WEXITSTATUS(status) + self.signalstatus = None + self.terminated = True + elif os.WIFSIGNALED (status): + self.status = status + self.exitstatus = None + self.signalstatus = os.WTERMSIG(status) + self.terminated = True + elif os.WIFSTOPPED (status): + raise ExceptionPexpect ('Wait was called for a child process that is stopped. This is not supported. Is some other process attempting job control with our child pid?') + return self.exitstatus + + def isalive(self): + + """This tests if the child process is running or not. This is + non-blocking. If the child was terminated then this will read the + exitstatus or signalstatus of the child. This returns True if the child + process appears to be running or False if not. It can take literally + SECONDS for Solaris to return the right status. """ + + if self.terminated: + return False + + if self.flag_eof: + # This is for Linux, which requires the blocking form of waitpid to get + # status of a defunct process. This is super-lame. The flag_eof would have + # been set in read_nonblocking(), so this should be safe. + waitpid_options = 0 + else: + waitpid_options = os.WNOHANG + + try: + pid, status = os.waitpid(self.pid, waitpid_options) + except OSError, e: # No child processes + if e[0] == errno.ECHILD: + raise ExceptionPexpect ('isalive() encountered condition where "terminated" is 0, but there was no child process. Did someone else call waitpid() on our process?') + else: + raise e + + # I have to do this twice for Solaris. I can't even believe that I figured this out... + # If waitpid() returns 0 it means that no child process wishes to + # report, and the value of status is undefined. + if pid == 0: + try: + pid, status = os.waitpid(self.pid, waitpid_options) ### os.WNOHANG) # Solaris! + except OSError, e: # This should never happen... + if e[0] == errno.ECHILD: + raise ExceptionPexpect ('isalive() encountered condition that should never happen. There was no child process. Did someone else call waitpid() on our process?') + else: + raise e + + # If pid is still 0 after two calls to waitpid() then + # the process really is alive. This seems to work on all platforms, except + # for Irix which seems to require a blocking call on waitpid or select, so I let read_nonblocking + # take care of this situation (unfortunately, this requires waiting through the timeout). + if pid == 0: + return True + + if pid == 0: + return True + + if os.WIFEXITED (status): + self.status = status + self.exitstatus = os.WEXITSTATUS(status) + self.signalstatus = None + self.terminated = True + elif os.WIFSIGNALED (status): + self.status = status + self.exitstatus = None + self.signalstatus = os.WTERMSIG(status) + self.terminated = True + elif os.WIFSTOPPED (status): + raise ExceptionPexpect ('isalive() encountered condition where child process is stopped. This is not supported. Is some other process attempting job control with our child pid?') + return False + + def kill(self, sig): + + """This sends the given signal to the child application. In keeping + with UNIX tradition it has a misleading name. It does not necessarily + kill the child unless you send the right signal. """ + + # Same as os.kill, but the pid is given for you. + if self.isalive(): + os.kill(self.pid, sig) + + def compile_pattern_list(self, patterns): + + """This compiles a pattern-string or a list of pattern-strings. + Patterns must be a StringType, EOF, TIMEOUT, SRE_Pattern, or a list of + those. Patterns may also be None which results in an empty list (you + might do this if waiting for an EOF or TIMEOUT condition without + expecting any pattern). + + This is used by expect() when calling expect_list(). Thus expect() is + nothing more than:: + + cpl = self.compile_pattern_list(pl) + return self.expect_list(cpl, timeout) + + If you are using expect() within a loop it may be more + efficient to compile the patterns first and then call expect_list(). + This avoid calls in a loop to compile_pattern_list():: + + cpl = self.compile_pattern_list(my_pattern) + while some_condition: + ... + i = self.expect_list(clp, timeout) + ... + """ + + if patterns is None: + return [] + if type(patterns) is not types.ListType: + patterns = [patterns] + + compile_flags = re.DOTALL # Allow dot to match \n + if self.ignorecase: + compile_flags = compile_flags | re.IGNORECASE + compiled_pattern_list = [] + for p in patterns: + if type(p) in types.StringTypes: + compiled_pattern_list.append(re.compile(p, compile_flags)) + elif p is EOF: + compiled_pattern_list.append(EOF) + elif p is TIMEOUT: + compiled_pattern_list.append(TIMEOUT) + elif type(p) is type(re.compile('')): + compiled_pattern_list.append(p) + else: + raise TypeError ('Argument must be one of StringTypes, EOF, TIMEOUT, SRE_Pattern, or a list of those type. %s' % str(type(p))) + + return compiled_pattern_list + + def expect(self, pattern, timeout = -1, searchwindowsize=-1): + + """This seeks through the stream until a pattern is matched. The + pattern is overloaded and may take several types. The pattern can be a + StringType, EOF, a compiled re, or a list of any of those types. + Strings will be compiled to re types. This returns the index into the + pattern list. If the pattern was not a list this returns index 0 on a + successful match. This may raise exceptions for EOF or TIMEOUT. To + avoid the EOF or TIMEOUT exceptions add EOF or TIMEOUT to the pattern + list. That will cause expect to match an EOF or TIMEOUT condition + instead of raising an exception. + + If you pass a list of patterns and more than one matches, the first match + in the stream is chosen. If more than one pattern matches at that point, + the leftmost in the pattern list is chosen. For example:: + + # the input is 'foobar' + index = p.expect (['bar', 'foo', 'foobar']) + # returns 1 ('foo') even though 'foobar' is a "better" match + + Please note, however, that buffering can affect this behavior, since + input arrives in unpredictable chunks. For example:: + + # the input is 'foobar' + index = p.expect (['foobar', 'foo']) + # returns 0 ('foobar') if all input is available at once, + # but returs 1 ('foo') if parts of the final 'bar' arrive late + + After a match is found the instance attributes 'before', 'after' and + 'match' will be set. You can see all the data read before the match in + 'before'. You can see the data that was matched in 'after'. The + re.MatchObject used in the re match will be in 'match'. If an error + occurred then 'before' will be set to all the data read so far and + 'after' and 'match' will be None. + + If timeout is -1 then timeout will be set to the self.timeout value. + + A list entry may be EOF or TIMEOUT instead of a string. This will + catch these exceptions and return the index of the list entry instead + of raising the exception. The attribute 'after' will be set to the + exception type. The attribute 'match' will be None. This allows you to + write code like this:: + + index = p.expect (['good', 'bad', pexpect.EOF, pexpect.TIMEOUT]) + if index == 0: + do_something() + elif index == 1: + do_something_else() + elif index == 2: + do_some_other_thing() + elif index == 3: + do_something_completely_different() + + instead of code like this:: + + try: + index = p.expect (['good', 'bad']) + if index == 0: + do_something() + elif index == 1: + do_something_else() + except EOF: + do_some_other_thing() + except TIMEOUT: + do_something_completely_different() + + These two forms are equivalent. It all depends on what you want. You + can also just expect the EOF if you are waiting for all output of a + child to finish. For example:: + + p = pexpect.spawn('/bin/ls') + p.expect (pexpect.EOF) + print p.before + + If you are trying to optimize for speed then see expect_list(). + """ + + compiled_pattern_list = self.compile_pattern_list(pattern) + return self.expect_list(compiled_pattern_list, timeout, searchwindowsize) + + def expect_list(self, pattern_list, timeout = -1, searchwindowsize = -1): + + """This takes a list of compiled regular expressions and returns the + index into the pattern_list that matched the child output. The list may + also contain EOF or TIMEOUT (which are not compiled regular + expressions). This method is similar to the expect() method except that + expect_list() does not recompile the pattern list on every call. This + may help if you are trying to optimize for speed, otherwise just use + the expect() method. This is called by expect(). If timeout==-1 then + the self.timeout value is used. If searchwindowsize==-1 then the + self.searchwindowsize value is used. """ + + return self.expect_loop(searcher_re(pattern_list), timeout, searchwindowsize) + + def expect_exact(self, pattern_list, timeout = -1, searchwindowsize = -1): + + """This is similar to expect(), but uses plain string matching instead + of compiled regular expressions in 'pattern_list'. The 'pattern_list' + may be a string; a list or other sequence of strings; or TIMEOUT and + EOF. + + This call might be faster than expect() for two reasons: string + searching is faster than RE matching and it is possible to limit the + search to just the end of the input buffer. + + This method is also useful when you don't want to have to worry about + escaping regular expression characters that you want to match.""" + + if type(pattern_list) in types.StringTypes or pattern_list in (TIMEOUT, EOF): + pattern_list = [pattern_list] + return self.expect_loop(searcher_string(pattern_list), timeout, searchwindowsize) + + def expect_loop(self, searcher, timeout = -1, searchwindowsize = -1): + + """This is the common loop used inside expect. The 'searcher' should be + an instance of searcher_re or searcher_string, which describes how and what + to search for in the input. + + See expect() for other arguments, return value and exceptions. """ + + self.searcher = searcher + + if timeout == -1: + timeout = self.timeout + if timeout is not None: + end_time = time.time() + timeout + if searchwindowsize == -1: + searchwindowsize = self.searchwindowsize + + try: + incoming = self.buffer + freshlen = len(incoming) + while True: # Keep reading until exception or return. + index = searcher.search(incoming, freshlen, searchwindowsize) + if index >= 0: + self.buffer = incoming[searcher.end : ] + self.before = incoming[ : searcher.start] + self.after = incoming[searcher.start : searcher.end] + self.match = searcher.match + self.match_index = index + return self.match_index + # No match at this point + if timeout < 0 and timeout is not None: + raise TIMEOUT ('Timeout exceeded in expect_any().') + # Still have time left, so read more data + c = self.read_nonblocking (self.maxread, timeout) + freshlen = len(c) + time.sleep (0.0001) + incoming = incoming + c + if timeout is not None: + timeout = end_time - time.time() + except EOF, e: + self.buffer = '' + self.before = incoming + self.after = EOF + index = searcher.eof_index + if index >= 0: + self.match = EOF + self.match_index = index + return self.match_index + else: + self.match = None + self.match_index = None + raise EOF (str(e) + '\n' + str(self)) + except TIMEOUT, e: + self.buffer = incoming + self.before = incoming + self.after = TIMEOUT + index = searcher.timeout_index + if index >= 0: + self.match = TIMEOUT + self.match_index = index + return self.match_index + else: + self.match = None + self.match_index = None + raise TIMEOUT (str(e) + '\n' + str(self)) + except: + self.before = incoming + self.after = None + self.match = None + self.match_index = None + raise + + def getwinsize(self): + + """This returns the terminal window size of the child tty. The return + value is a tuple of (rows, cols). """ + + TIOCGWINSZ = getattr(termios, 'TIOCGWINSZ', 1074295912L) + s = struct.pack('HHHH', 0, 0, 0, 0) + x = fcntl.ioctl(self.fileno(), TIOCGWINSZ, s) + return struct.unpack('HHHH', x)[0:2] + + def setwinsize(self, r, c): + + """This sets the terminal window size of the child tty. This will cause + a SIGWINCH signal to be sent to the child. This does not change the + physical window size. It changes the size reported to TTY-aware + applications like vi or curses -- applications that respond to the + SIGWINCH signal. """ + + # Check for buggy platforms. Some Python versions on some platforms + # (notably OSF1 Alpha and RedHat 7.1) truncate the value for + # termios.TIOCSWINSZ. It is not clear why this happens. + # These platforms don't seem to handle the signed int very well; + # yet other platforms like OpenBSD have a large negative value for + # TIOCSWINSZ and they don't have a truncate problem. + # Newer versions of Linux have totally different values for TIOCSWINSZ. + # Note that this fix is a hack. + TIOCSWINSZ = getattr(termios, 'TIOCSWINSZ', -2146929561) + if TIOCSWINSZ == 2148037735L: # L is not required in Python >= 2.2. + TIOCSWINSZ = -2146929561 # Same bits, but with sign. + # Note, assume ws_xpixel and ws_ypixel are zero. + s = struct.pack('HHHH', r, c, 0, 0) + fcntl.ioctl(self.fileno(), TIOCSWINSZ, s) + + def interact(self, escape_character = chr(29), input_filter = None, output_filter = None): + + """This gives control of the child process to the interactive user (the + human at the keyboard). Keystrokes are sent to the child process, and + the stdout and stderr output of the child process is printed. This + simply echos the child stdout and child stderr to the real stdout and + it echos the real stdin to the child stdin. When the user types the + escape_character this method will stop. The default for + escape_character is ^]. This should not be confused with ASCII 27 -- + the ESC character. ASCII 29 was chosen for historical merit because + this is the character used by 'telnet' as the escape character. The + escape_character will not be sent to the child process. + + You may pass in optional input and output filter functions. These + functions should take a string and return a string. The output_filter + will be passed all the output from the child process. The input_filter + will be passed all the keyboard input from the user. The input_filter + is run BEFORE the check for the escape_character. + + Note that if you change the window size of the parent the SIGWINCH + signal will not be passed through to the child. If you want the child + window size to change when the parent's window size changes then do + something like the following example:: + + import pexpect, struct, fcntl, termios, signal, sys + def sigwinch_passthrough (sig, data): + s = struct.pack("HHHH", 0, 0, 0, 0) + a = struct.unpack('hhhh', fcntl.ioctl(sys.stdout.fileno(), termios.TIOCGWINSZ , s)) + global p + p.setwinsize(a[0],a[1]) + p = pexpect.spawn('/bin/bash') # Note this is global and used in sigwinch_passthrough. + signal.signal(signal.SIGWINCH, sigwinch_passthrough) + p.interact() + """ + + # Flush the buffer. + self.stdout.write (self.buffer) + self.stdout.flush() + self.buffer = '' + mode = tty.tcgetattr(self.STDIN_FILENO) + tty.setraw(self.STDIN_FILENO) + try: + self.__interact_copy(escape_character, input_filter, output_filter) + finally: + tty.tcsetattr(self.STDIN_FILENO, tty.TCSAFLUSH, mode) + + def __interact_writen(self, fd, data): + + """This is used by the interact() method. + """ + + while data != '' and self.isalive(): + n = os.write(fd, data) + data = data[n:] + + def __interact_read(self, fd): + + """This is used by the interact() method. + """ + + return os.read(fd, 1000) + + def __interact_copy(self, escape_character = None, input_filter = None, output_filter = None): + + """This is used by the interact() method. + """ + + while self.isalive(): + r,w,e = self.__select([self.child_fd, self.STDIN_FILENO], [], []) + if self.child_fd in r: + data = self.__interact_read(self.child_fd) + if output_filter: data = output_filter(data) + if self.logfile is not None: + self.logfile.write (data) + self.logfile.flush() + os.write(self.STDOUT_FILENO, data) + if self.STDIN_FILENO in r: + data = self.__interact_read(self.STDIN_FILENO) + if input_filter: data = input_filter(data) + i = data.rfind(escape_character) + if i != -1: + data = data[:i] + self.__interact_writen(self.child_fd, data) + break + self.__interact_writen(self.child_fd, data) + + def __select (self, iwtd, owtd, ewtd, timeout=None): + + """This is a wrapper around select.select() that ignores signals. If + select.select raises a select.error exception and errno is an EINTR + error then it is ignored. Mainly this is used to ignore sigwinch + (terminal resize). """ + + # if select() is interrupted by a signal (errno==EINTR) then + # we loop back and enter the select() again. + if timeout is not None: + end_time = time.time() + timeout + while True: + try: + return select.select (iwtd, owtd, ewtd, timeout) + except select.error, e: + if e[0] == errno.EINTR: + # if we loop back we have to subtract the amount of time we already waited. + if timeout is not None: + timeout = end_time - time.time() + if timeout < 0: + return ([],[],[]) + else: # something else caused the select.error, so this really is an exception + raise + +############################################################################## +# The following methods are no longer supported or allowed. + + def setmaxread (self, maxread): + + """This method is no longer supported or allowed. I don't like getters + and setters without a good reason. """ + + raise ExceptionPexpect ('This method is no longer supported or allowed. Just assign a value to the maxread member variable.') + + def setlog (self, fileobject): + + """This method is no longer supported or allowed. + """ + + raise ExceptionPexpect ('This method is no longer supported or allowed. Just assign a value to the logfile member variable.') + +############################################################################## +# End of spawn class +############################################################################## + +class searcher_string (object): + + """This is a plain string search helper for the spawn.expect_any() method. + + Attributes: + + eof_index - index of EOF, or -1 + timeout_index - index of TIMEOUT, or -1 + + After a successful match by the search() method the following attributes + are available: + + start - index into the buffer, first byte of match + end - index into the buffer, first byte after match + match - the matching string itself + """ + + def __init__(self, strings): + + """This creates an instance of searcher_string. This argument 'strings' + may be a list; a sequence of strings; or the EOF or TIMEOUT types. """ + + self.eof_index = -1 + self.timeout_index = -1 + self._strings = [] + for n, s in zip(range(len(strings)), strings): + if s is EOF: + self.eof_index = n + continue + if s is TIMEOUT: + self.timeout_index = n + continue + self._strings.append((n, s)) + + def __str__(self): + + """This returns a human-readable string that represents the state of + the object.""" + + ss = [ (ns[0],' %d: "%s"' % ns) for ns in self._strings ] + ss.append((-1,'searcher_string:')) + if self.eof_index >= 0: + ss.append ((self.eof_index,' %d: EOF' % self.eof_index)) + if self.timeout_index >= 0: + ss.append ((self.timeout_index,' %d: TIMEOUT' % self.timeout_index)) + ss.sort() + ss = zip(*ss)[1] + return '\n'.join(ss) + + def search(self, buffer, freshlen, searchwindowsize=None): + + """This searches 'buffer' for the first occurence of one of the search + strings. 'freshlen' must indicate the number of bytes at the end of + 'buffer' which have not been searched before. It helps to avoid + searching the same, possibly big, buffer over and over again. + + See class spawn for the 'searchwindowsize' argument. + + If there is a match this returns the index of that string, and sets + 'start', 'end' and 'match'. Otherwise, this returns -1. """ + + absurd_match = len(buffer) + first_match = absurd_match + + # 'freshlen' helps a lot here. Further optimizations could + # possibly include: + # + # using something like the Boyer-Moore Fast String Searching + # Algorithm; pre-compiling the search through a list of + # strings into something that can scan the input once to + # search for all N strings; realize that if we search for + # ['bar', 'baz'] and the input is '...foo' we need not bother + # rescanning until we've read three more bytes. + # + # Sadly, I don't know enough about this interesting topic. /grahn + + for index, s in self._strings: + if searchwindowsize is None: + # the match, if any, can only be in the fresh data, + # or at the very end of the old data + offset = -(freshlen+len(s)) + else: + # better obey searchwindowsize + offset = -searchwindowsize + n = buffer.find(s, offset) + if n >= 0 and n < first_match: + first_match = n + best_index, best_match = index, s + if first_match == absurd_match: + return -1 + self.match = best_match + self.start = first_match + self.end = self.start + len(self.match) + return best_index + +class searcher_re (object): + + """This is regular expression string search helper for the + spawn.expect_any() method. + + Attributes: + + eof_index - index of EOF, or -1 + timeout_index - index of TIMEOUT, or -1 + + After a successful match by the search() method the following attributes + are available: + + start - index into the buffer, first byte of match + end - index into the buffer, first byte after match + match - the re.match object returned by a succesful re.search + + """ + + def __init__(self, patterns): + + """This creates an instance that searches for 'patterns' Where + 'patterns' may be a list or other sequence of compiled regular + expressions, or the EOF or TIMEOUT types.""" + + self.eof_index = -1 + self.timeout_index = -1 + self._searches = [] + for n, s in zip(range(len(patterns)), patterns): + if s is EOF: + self.eof_index = n + continue + if s is TIMEOUT: + self.timeout_index = n + continue + self._searches.append((n, s)) + + def __str__(self): + + """This returns a human-readable string that represents the state of + the object.""" + + ss = [ (n,' %d: re.compile("%s")' % (n,str(s.pattern))) for n,s in self._searches] + ss.append((-1,'searcher_re:')) + if self.eof_index >= 0: + ss.append ((self.eof_index,' %d: EOF' % self.eof_index)) + if self.timeout_index >= 0: + ss.append ((self.timeout_index,' %d: TIMEOUT' % self.timeout_index)) + ss.sort() + ss = zip(*ss)[1] + return '\n'.join(ss) + + def search(self, buffer, freshlen, searchwindowsize=None): + + """This searches 'buffer' for the first occurence of one of the regular + expressions. 'freshlen' must indicate the number of bytes at the end of + 'buffer' which have not been searched before. + + See class spawn for the 'searchwindowsize' argument. + + If there is a match this returns the index of that string, and sets + 'start', 'end' and 'match'. Otherwise, returns -1.""" + + absurd_match = len(buffer) + first_match = absurd_match + # 'freshlen' doesn't help here -- we cannot predict the + # length of a match, and the re module provides no help. + if searchwindowsize is None: + searchstart = 0 + else: + searchstart = max(0, len(buffer)-searchwindowsize) + for index, s in self._searches: + match = s.search(buffer, searchstart) + if match is None: + continue + n = match.start() + if n < first_match: + first_match = n + the_match = match + best_index = index + if first_match == absurd_match: + return -1 + self.start = first_match + self.match = the_match + self.end = self.match.end() + return best_index + +def which (filename): + + """This takes a given filename; tries to find it in the environment path; + then checks if it is executable. This returns the full path to the filename + if found and executable. Otherwise this returns None.""" + + # Special case where filename already contains a path. + if os.path.dirname(filename) != '': + if os.access (filename, os.X_OK): + return filename + + if not os.environ.has_key('PATH') or os.environ['PATH'] == '': + p = os.defpath + else: + p = os.environ['PATH'] + + # Oddly enough this was the one line that made Pexpect + # incompatible with Python 1.5.2. + #pathlist = p.split (os.pathsep) + pathlist = string.split (p, os.pathsep) + + for path in pathlist: + f = os.path.join(path, filename) + if os.access(f, os.X_OK): + return f + return None + +def split_command_line(command_line): + + """This splits a command line into a list of arguments. It splits arguments + on spaces, but handles embedded quotes, doublequotes, and escaped + characters. It's impossible to do this with a regular expression, so I + wrote a little state machine to parse the command line. """ + + arg_list = [] + arg = '' + + # Constants to name the states we can be in. + state_basic = 0 + state_esc = 1 + state_singlequote = 2 + state_doublequote = 3 + state_whitespace = 4 # The state of consuming whitespace between commands. + state = state_basic + + for c in command_line: + if state == state_basic or state == state_whitespace: + if c == '\\': # Escape the next character + state = state_esc + elif c == r"'": # Handle single quote + state = state_singlequote + elif c == r'"': # Handle double quote + state = state_doublequote + elif c.isspace(): + # Add arg to arg_list if we aren't in the middle of whitespace. + if state == state_whitespace: + None # Do nothing. + else: + arg_list.append(arg) + arg = '' + state = state_whitespace + else: + arg = arg + c + state = state_basic + elif state == state_esc: + arg = arg + c + state = state_basic + elif state == state_singlequote: + if c == r"'": + state = state_basic + else: + arg = arg + c + elif state == state_doublequote: + if c == r'"': + state = state_basic + else: + arg = arg + c + + if arg != '': + arg_list.append(arg) + return arg_list + +# vi:ts=4:sw=4:expandtab:ft=python: diff --git a/awx/lib/site-packages/pxssh.py b/awx/lib/site-packages/pxssh.py new file mode 100644 index 0000000000..04ba25cbff --- /dev/null +++ b/awx/lib/site-packages/pxssh.py @@ -0,0 +1,311 @@ +"""This class extends pexpect.spawn to specialize setting up SSH connections. +This adds methods for login, logout, and expecting the shell prompt. + +$Id: pxssh.py 513 2008-02-09 18:26:13Z noah $ +""" + +from pexpect import * +import pexpect +import time + +__all__ = ['ExceptionPxssh', 'pxssh'] + +# Exception classes used by this module. +class ExceptionPxssh(ExceptionPexpect): + """Raised for pxssh exceptions. + """ + +class pxssh (spawn): + + """This class extends pexpect.spawn to specialize setting up SSH + connections. This adds methods for login, logout, and expecting the shell + prompt. It does various tricky things to handle many situations in the SSH + login process. For example, if the session is your first login, then pxssh + automatically accepts the remote certificate; or if you have public key + authentication setup then pxssh won't wait for the password prompt. + + pxssh uses the shell prompt to synchronize output from the remote host. In + order to make this more robust it sets the shell prompt to something more + unique than just $ or #. This should work on most Borne/Bash or Csh style + shells. + + Example that runs a few commands on a remote server and prints the result:: + + import pxssh + import getpass + try: + s = pxssh.pxssh() + hostname = raw_input('hostname: ') + username = raw_input('username: ') + password = getpass.getpass('password: ') + s.login (hostname, username, password) + s.sendline ('uptime') # run a command + s.prompt() # match the prompt + print s.before # print everything before the prompt. + s.sendline ('ls -l') + s.prompt() + print s.before + s.sendline ('df') + s.prompt() + print s.before + s.logout() + except pxssh.ExceptionPxssh, e: + print "pxssh failed on login." + print str(e) + + Note that if you have ssh-agent running while doing development with pxssh + then this can lead to a lot of confusion. Many X display managers (xdm, + gdm, kdm, etc.) will automatically start a GUI agent. You may see a GUI + dialog box popup asking for a password during development. You should turn + off any key agents during testing. The 'force_password' attribute will turn + off public key authentication. This will only work if the remote SSH server + is configured to allow password logins. Example of using 'force_password' + attribute:: + + s = pxssh.pxssh() + s.force_password = True + hostname = raw_input('hostname: ') + username = raw_input('username: ') + password = getpass.getpass('password: ') + s.login (hostname, username, password) + """ + + def __init__ (self, timeout=30, maxread=2000, searchwindowsize=None, logfile=None, cwd=None, env=None): + spawn.__init__(self, None, timeout=timeout, maxread=maxread, searchwindowsize=searchwindowsize, logfile=logfile, cwd=cwd, env=env) + + self.name = '<pxssh>' + + #SUBTLE HACK ALERT! Note that the command to set the prompt uses a + #slightly different string than the regular expression to match it. This + #is because when you set the prompt the command will echo back, but we + #don't want to match the echoed command. So if we make the set command + #slightly different than the regex we eliminate the problem. To make the + #set command different we add a backslash in front of $. The $ doesn't + #need to be escaped, but it doesn't hurt and serves to make the set + #prompt command different than the regex. + + # used to match the command-line prompt + self.UNIQUE_PROMPT = "\[PEXPECT\][\$\#] " + self.PROMPT = self.UNIQUE_PROMPT + + # used to set shell command-line prompt to UNIQUE_PROMPT. + self.PROMPT_SET_SH = "PS1='[PEXPECT]\$ '" + self.PROMPT_SET_CSH = "set prompt='[PEXPECT]\$ '" + self.SSH_OPTS = "-o'RSAAuthentication=no' -o 'PubkeyAuthentication=no'" + # Disabling X11 forwarding gets rid of the annoying SSH_ASKPASS from + # displaying a GUI password dialog. I have not figured out how to + # disable only SSH_ASKPASS without also disabling X11 forwarding. + # Unsetting SSH_ASKPASS on the remote side doesn't disable it! Annoying! + #self.SSH_OPTS = "-x -o'RSAAuthentication=no' -o 'PubkeyAuthentication=no'" + self.force_password = False + self.auto_prompt_reset = True + + def levenshtein_distance(self, a,b): + + """This calculates the Levenshtein distance between a and b. + """ + + n, m = len(a), len(b) + if n > m: + a,b = b,a + n,m = m,n + current = range(n+1) + for i in range(1,m+1): + previous, current = current, [i]+[0]*n + for j in range(1,n+1): + add, delete = previous[j]+1, current[j-1]+1 + change = previous[j-1] + if a[j-1] != b[i-1]: + change = change + 1 + current[j] = min(add, delete, change) + return current[n] + + def sync_original_prompt (self): + + """This attempts to find the prompt. Basically, press enter and record + the response; press enter again and record the response; if the two + responses are similar then assume we are at the original prompt. This + is a slow function. It can take over 10 seconds. """ + + # All of these timing pace values are magic. + # I came up with these based on what seemed reliable for + # connecting to a heavily loaded machine I have. + # If latency is worse than these values then this will fail. + + try: + self.read_nonblocking(size=10000,timeout=1) # GAS: Clear out the cache before getting the prompt + except TIMEOUT: + pass + time.sleep(0.1) + self.sendline() + time.sleep(0.5) + x = self.read_nonblocking(size=1000,timeout=1) + time.sleep(0.1) + self.sendline() + time.sleep(0.5) + a = self.read_nonblocking(size=1000,timeout=1) + time.sleep(0.1) + self.sendline() + time.sleep(0.5) + b = self.read_nonblocking(size=1000,timeout=1) + ld = self.levenshtein_distance(a,b) + len_a = len(a) + if len_a == 0: + return False + if float(ld)/len_a < 0.4: + return True + return False + + ### TODO: This is getting messy and I'm pretty sure this isn't perfect. + ### TODO: I need to draw a flow chart for this. + def login (self,server,username,password='',terminal_type='ansi',original_prompt=r"[#$]",login_timeout=10,port=None,auto_prompt_reset=True): + + """This logs the user into the given server. It uses the + 'original_prompt' to try to find the prompt right after login. When it + finds the prompt it immediately tries to reset the prompt to something + more easily matched. The default 'original_prompt' is very optimistic + and is easily fooled. It's more reliable to try to match the original + prompt as exactly as possible to prevent false matches by server + strings such as the "Message Of The Day". On many systems you can + disable the MOTD on the remote server by creating a zero-length file + called "~/.hushlogin" on the remote server. If a prompt cannot be found + then this will not necessarily cause the login to fail. In the case of + a timeout when looking for the prompt we assume that the original + prompt was so weird that we could not match it, so we use a few tricks + to guess when we have reached the prompt. Then we hope for the best and + blindly try to reset the prompt to something more unique. If that fails + then login() raises an ExceptionPxssh exception. + + In some situations it is not possible or desirable to reset the + original prompt. In this case, set 'auto_prompt_reset' to False to + inhibit setting the prompt to the UNIQUE_PROMPT. Remember that pxssh + uses a unique prompt in the prompt() method. If the original prompt is + not reset then this will disable the prompt() method unless you + manually set the PROMPT attribute. """ + + ssh_options = '-q' + if self.force_password: + ssh_options = ssh_options + ' ' + self.SSH_OPTS + if port is not None: + ssh_options = ssh_options + ' -p %s'%(str(port)) + cmd = "ssh %s -l %s %s" % (ssh_options, username, server) + + # This does not distinguish between a remote server 'password' prompt + # and a local ssh 'passphrase' prompt (for unlocking a private key). + spawn._spawn(self, cmd) + i = self.expect(["(?i)are you sure you want to continue connecting", original_prompt, "(?i)(?:password)|(?:passphrase for key)", "(?i)permission denied", "(?i)terminal type", TIMEOUT, "(?i)connection closed by remote host"], timeout=login_timeout) + + # First phase + if i==0: + # New certificate -- always accept it. + # This is what you get if SSH does not have the remote host's + # public key stored in the 'known_hosts' cache. + self.sendline("yes") + i = self.expect(["(?i)are you sure you want to continue connecting", original_prompt, "(?i)(?:password)|(?:passphrase for key)", "(?i)permission denied", "(?i)terminal type", TIMEOUT]) + if i==2: # password or passphrase + self.sendline(password) + i = self.expect(["(?i)are you sure you want to continue connecting", original_prompt, "(?i)(?:password)|(?:passphrase for key)", "(?i)permission denied", "(?i)terminal type", TIMEOUT]) + if i==4: + self.sendline(terminal_type) + i = self.expect(["(?i)are you sure you want to continue connecting", original_prompt, "(?i)(?:password)|(?:passphrase for key)", "(?i)permission denied", "(?i)terminal type", TIMEOUT]) + + # Second phase + if i==0: + # This is weird. This should not happen twice in a row. + self.close() + raise ExceptionPxssh ('Weird error. Got "are you sure" prompt twice.') + elif i==1: # can occur if you have a public key pair set to authenticate. + ### TODO: May NOT be OK if expect() got tricked and matched a false prompt. + pass + elif i==2: # password prompt again + # For incorrect passwords, some ssh servers will + # ask for the password again, others return 'denied' right away. + # If we get the password prompt again then this means + # we didn't get the password right the first time. + self.close() + raise ExceptionPxssh ('password refused') + elif i==3: # permission denied -- password was bad. + self.close() + raise ExceptionPxssh ('permission denied') + elif i==4: # terminal type again? WTF? + self.close() + raise ExceptionPxssh ('Weird error. Got "terminal type" prompt twice.') + elif i==5: # Timeout + #This is tricky... I presume that we are at the command-line prompt. + #It may be that the shell prompt was so weird that we couldn't match + #it. Or it may be that we couldn't log in for some other reason. I + #can't be sure, but it's safe to guess that we did login because if + #I presume wrong and we are not logged in then this should be caught + #later when I try to set the shell prompt. + pass + elif i==6: # Connection closed by remote host + self.close() + raise ExceptionPxssh ('connection closed') + else: # Unexpected + self.close() + raise ExceptionPxssh ('unexpected login response') + if not self.sync_original_prompt(): + self.close() + raise ExceptionPxssh ('could not synchronize with original prompt') + # We appear to be in. + # set shell prompt to something unique. + if auto_prompt_reset: + if not self.set_unique_prompt(): + self.close() + raise ExceptionPxssh ('could not set shell prompt\n'+self.before) + return True + + def logout (self): + + """This sends exit to the remote shell. If there are stopped jobs then + this automatically sends exit twice. """ + + self.sendline("exit") + index = self.expect([EOF, "(?i)there are stopped jobs"]) + if index==1: + self.sendline("exit") + self.expect(EOF) + self.close() + + def prompt (self, timeout=20): + + """This matches the shell prompt. This is little more than a short-cut + to the expect() method. This returns True if the shell prompt was + matched. This returns False if there was a timeout. Note that if you + called login() with auto_prompt_reset set to False then you should have + manually set the PROMPT attribute to a regex pattern for matching the + prompt. """ + + i = self.expect([self.PROMPT, TIMEOUT], timeout=timeout) + if i==1: + return False + return True + + def set_unique_prompt (self): + + """This sets the remote prompt to something more unique than # or $. + This makes it easier for the prompt() method to match the shell prompt + unambiguously. This method is called automatically by the login() + method, but you may want to call it manually if you somehow reset the + shell prompt. For example, if you 'su' to a different user then you + will need to manually reset the prompt. This sends shell commands to + the remote host to set the prompt, so this assumes the remote host is + ready to receive commands. + + Alternatively, you may use your own prompt pattern. Just set the PROMPT + attribute to a regular expression that matches it. In this case you + should call login() with auto_prompt_reset=False; then set the PROMPT + attribute. After that the prompt() method will try to match your prompt + pattern.""" + + self.sendline ("unset PROMPT_COMMAND") + self.sendline (self.PROMPT_SET_SH) # sh-style + i = self.expect ([TIMEOUT, self.PROMPT], timeout=10) + if i == 0: # csh-style + self.sendline (self.PROMPT_SET_CSH) + i = self.expect ([TIMEOUT, self.PROMPT], timeout=10) + if i == 0: + return False + return True + +# vi:ts=4:sw=4:expandtab:ft=python: diff --git a/awx/lib/site-packages/pytz/__init__.py b/awx/lib/site-packages/pytz/__init__.py new file mode 100644 index 0000000000..1975ea62e6 --- /dev/null +++ b/awx/lib/site-packages/pytz/__init__.py @@ -0,0 +1,1543 @@ +''' +datetime.tzinfo timezone definitions generated from the +Olson timezone database: + + ftp://elsie.nci.nih.gov/pub/tz*.tar.gz + +See the datetime section of the Python Library Reference for information +on how to use these modules. +''' + +# The Olson database is updated several times a year. +OLSON_VERSION = '2013b' +VERSION = OLSON_VERSION +# Version format for a patch release - only one so far. +#VERSION = OLSON_VERSION + '.2' +__version__ = OLSON_VERSION + +OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling + +__all__ = [ + 'timezone', 'utc', 'country_timezones', 'country_names', + 'AmbiguousTimeError', 'InvalidTimeError', + 'NonExistentTimeError', 'UnknownTimeZoneError', + 'all_timezones', 'all_timezones_set', + 'common_timezones', 'common_timezones_set', + ] + +import sys, datetime, os.path, gettext +try: + from UserDict import DictMixin +except ImportError: + from collections import Mapping as DictMixin + +try: + from pkg_resources import resource_stream +except ImportError: + resource_stream = None + +from pytz.exceptions import AmbiguousTimeError +from pytz.exceptions import InvalidTimeError +from pytz.exceptions import NonExistentTimeError +from pytz.exceptions import UnknownTimeZoneError +from pytz.tzinfo import unpickler +from pytz.tzfile import build_tzinfo, _byte_string + + +try: + unicode + +except NameError: # Python 3.x + + # Python 3.x doesn't have unicode(), making writing code + # for Python 2.3 and Python 3.x a pain. + unicode = str + + def ascii(s): + r""" + >>> ascii('Hello') + 'Hello' + >>> ascii('\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + UnicodeEncodeError: ... + """ + s.encode('US-ASCII') # Raise an exception if not ASCII + return s # But return the original string - not a byte string. + +else: # Python 2.x + + def ascii(s): + r""" + >>> ascii('Hello') + 'Hello' + >>> ascii(u'Hello') + 'Hello' + >>> ascii(u'\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + UnicodeEncodeError: ... + """ + return s.encode('US-ASCII') + + +def open_resource(name): + """Open a resource from the zoneinfo subdir for reading. + + Uses the pkg_resources module if available and no standard file + found at the calculated location. + """ + name_parts = name.lstrip('/').split('/') + for part in name_parts: + if part == os.path.pardir or os.path.sep in part: + raise ValueError('Bad path segment: %r' % part) + filename = os.path.join(os.path.dirname(__file__), + 'zoneinfo', *name_parts) + if not os.path.exists(filename) and resource_stream is not None: + # http://bugs.launchpad.net/bugs/383171 - we avoid using this + # unless absolutely necessary to help when a broken version of + # pkg_resources is installed. + return resource_stream(__name__, 'zoneinfo/' + name) + return open(filename, 'rb') + + +def resource_exists(name): + """Return true if the given resource exists""" + try: + open_resource(name).close() + return True + except IOError: + return False + + +# Enable this when we get some translations? +# We want an i18n API that is useful to programs using Python's gettext +# module, as well as the Zope3 i18n package. Perhaps we should just provide +# the POT file and translations, and leave it up to callers to make use +# of them. +# +# t = gettext.translation( +# 'pytz', os.path.join(os.path.dirname(__file__), 'locales'), +# fallback=True +# ) +# def _(timezone_name): +# """Translate a timezone name using the current locale, returning Unicode""" +# return t.ugettext(timezone_name) + + +_tzinfo_cache = {} + +def timezone(zone): + r''' Return a datetime.tzinfo implementation for the given timezone + + >>> from datetime import datetime, timedelta + >>> utc = timezone('UTC') + >>> eastern = timezone('US/Eastern') + >>> eastern.zone + 'US/Eastern' + >>> timezone(unicode('US/Eastern')) is eastern + True + >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) + >>> loc_dt = utc_dt.astimezone(eastern) + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + >>> loc_dt.strftime(fmt) + '2002-10-27 01:00:00 EST (-0500)' + >>> (loc_dt - timedelta(minutes=10)).strftime(fmt) + '2002-10-27 00:50:00 EST (-0500)' + >>> eastern.normalize(loc_dt - timedelta(minutes=10)).strftime(fmt) + '2002-10-27 01:50:00 EDT (-0400)' + >>> (loc_dt + timedelta(minutes=10)).strftime(fmt) + '2002-10-27 01:10:00 EST (-0500)' + + Raises UnknownTimeZoneError if passed an unknown zone. + + >>> try: + ... timezone('Asia/Shangri-La') + ... except UnknownTimeZoneError: + ... print('Unknown') + Unknown + + >>> try: + ... timezone(unicode('\N{TRADE MARK SIGN}')) + ... except UnknownTimeZoneError: + ... print('Unknown') + Unknown + + ''' + if zone.upper() == 'UTC': + return utc + + try: + zone = ascii(zone) + except UnicodeEncodeError: + # All valid timezones are ASCII + raise UnknownTimeZoneError(zone) + + zone = _unmunge_zone(zone) + if zone not in _tzinfo_cache: + if zone in all_timezones_set: + fp = open_resource(zone) + try: + _tzinfo_cache[zone] = build_tzinfo(zone, fp) + finally: + fp.close() + else: + raise UnknownTimeZoneError(zone) + + return _tzinfo_cache[zone] + + +def _unmunge_zone(zone): + """Undo the time zone name munging done by older versions of pytz.""" + return zone.replace('_plus_', '+').replace('_minus_', '-') + + +ZERO = datetime.timedelta(0) +HOUR = datetime.timedelta(hours=1) + + +class UTC(datetime.tzinfo): + """UTC + + Optimized UTC implementation. It unpickles using the single module global + instance defined beneath this class declaration. + """ + zone = "UTC" + + _utcoffset = ZERO + _dst = ZERO + _tzname = zone + + def fromutc(self, dt): + if dt.tzinfo is None: + return self.localize(dt) + return super(utc.__class__, self).fromutc(dt) + + def utcoffset(self, dt): + return ZERO + + def tzname(self, dt): + return "UTC" + + def dst(self, dt): + return ZERO + + def __reduce__(self): + return _UTC, () + + def localize(self, dt, is_dst=False): + '''Convert naive time to local time''' + if dt.tzinfo is not None: + raise ValueError('Not naive datetime (tzinfo is already set)') + return dt.replace(tzinfo=self) + + def normalize(self, dt, is_dst=False): + '''Correct the timezone information on the given datetime''' + if dt.tzinfo is self: + return dt + if dt.tzinfo is None: + raise ValueError('Naive time - no tzinfo set') + return dt.astimezone(self) + + def __repr__(self): + return "<UTC>" + + def __str__(self): + return "UTC" + + +UTC = utc = UTC() # UTC is a singleton + + +def _UTC(): + """Factory function for utc unpickling. + + Makes sure that unpickling a utc instance always returns the same + module global. + + These examples belong in the UTC class above, but it is obscured; or in + the README.txt, but we are not depending on Python 2.4 so integrating + the README.txt examples with the unit tests is not trivial. + + >>> import datetime, pickle + >>> dt = datetime.datetime(2005, 3, 1, 14, 13, 21, tzinfo=utc) + >>> naive = dt.replace(tzinfo=None) + >>> p = pickle.dumps(dt, 1) + >>> naive_p = pickle.dumps(naive, 1) + >>> len(p) - len(naive_p) + 17 + >>> new = pickle.loads(p) + >>> new == dt + True + >>> new is dt + False + >>> new.tzinfo is dt.tzinfo + True + >>> utc is UTC is timezone('UTC') + True + >>> utc is timezone('GMT') + False + """ + return utc +_UTC.__safe_for_unpickling__ = True + + +def _p(*args): + """Factory function for unpickling pytz tzinfo instances. + + Just a wrapper around tzinfo.unpickler to save a few bytes in each pickle + by shortening the path. + """ + return unpickler(*args) +_p.__safe_for_unpickling__ = True + + +class _LazyDict(DictMixin): + """Dictionary populated on first use.""" + data = None + def __getitem__(self, key): + if self.data is None: + self._fill() + return self.data[key.upper()] + + def __contains__(self, key): + if self.data is None: + self._fill() + return key in self.data + + def __iter__(self): + if self.data is None: + self._fill() + return iter(self.data) + + def __len__(self): + if self.data is None: + self._fill() + return len(self.data) + + def keys(self): + if self.data is None: + self._fill() + return self.data.keys() + + +class _CountryTimezoneDict(_LazyDict): + """Map ISO 3166 country code to a list of timezone names commonly used + in that country. + + iso3166_code is the two letter code used to identify the country. + + >>> def print_list(list_of_strings): + ... 'We use a helper so doctests work under Python 2.3 -> 3.x' + ... for s in list_of_strings: + ... print(s) + + >>> print_list(country_timezones['nz']) + Pacific/Auckland + Pacific/Chatham + >>> print_list(country_timezones['ch']) + Europe/Zurich + >>> print_list(country_timezones['CH']) + Europe/Zurich + >>> print_list(country_timezones[unicode('ch')]) + Europe/Zurich + >>> print_list(country_timezones['XXX']) + Traceback (most recent call last): + ... + KeyError: 'XXX' + + Previously, this information was exposed as a function rather than a + dictionary. This is still supported:: + + >>> print_list(country_timezones('nz')) + Pacific/Auckland + Pacific/Chatham + """ + def __call__(self, iso3166_code): + """Backwards compatibility.""" + return self[iso3166_code] + + def _fill(self): + data = {} + zone_tab = open_resource('zone.tab') + try: + for line in zone_tab: + line = line.decode('US-ASCII') + if line.startswith('#'): + continue + code, coordinates, zone = line.split(None, 4)[:3] + if zone not in all_timezones_set: + continue + try: + data[code].append(zone) + except KeyError: + data[code] = [zone] + self.data = data + finally: + zone_tab.close() + +country_timezones = _CountryTimezoneDict() + + +class _CountryNameDict(_LazyDict): + '''Dictionary proving ISO3166 code -> English name. + + >>> print(country_names['au']) + Australia + ''' + def _fill(self): + data = {} + zone_tab = open_resource('iso3166.tab') + try: + for line in zone_tab.readlines(): + line = line.decode('US-ASCII') + if line.startswith('#'): + continue + code, name = line.split(None, 1) + data[code] = name.strip() + self.data = data + finally: + zone_tab.close() + +country_names = _CountryNameDict() + + +# Time-zone info based solely on fixed offsets + +class _FixedOffset(datetime.tzinfo): + + zone = None # to match the standard pytz API + + def __init__(self, minutes): + if abs(minutes) >= 1440: + raise ValueError("absolute offset is too large", minutes) + self._minutes = minutes + self._offset = datetime.timedelta(minutes=minutes) + + def utcoffset(self, dt): + return self._offset + + def __reduce__(self): + return FixedOffset, (self._minutes, ) + + def dst(self, dt): + return ZERO + + def tzname(self, dt): + return None + + def __repr__(self): + return 'pytz.FixedOffset(%d)' % self._minutes + + def localize(self, dt, is_dst=False): + '''Convert naive time to local time''' + if dt.tzinfo is not None: + raise ValueError('Not naive datetime (tzinfo is already set)') + return dt.replace(tzinfo=self) + + def normalize(self, dt, is_dst=False): + '''Correct the timezone information on the given datetime''' + if dt.tzinfo is None: + raise ValueError('Naive time - no tzinfo set') + return dt.replace(tzinfo=self) + + +def FixedOffset(offset, _tzinfos = {}): + """return a fixed-offset timezone based off a number of minutes. + + >>> one = FixedOffset(-330) + >>> one + pytz.FixedOffset(-330) + >>> one.utcoffset(datetime.datetime.now()) + datetime.timedelta(-1, 66600) + >>> one.dst(datetime.datetime.now()) + datetime.timedelta(0) + + >>> two = FixedOffset(1380) + >>> two + pytz.FixedOffset(1380) + >>> two.utcoffset(datetime.datetime.now()) + datetime.timedelta(0, 82800) + >>> two.dst(datetime.datetime.now()) + datetime.timedelta(0) + + The datetime.timedelta must be between the range of -1 and 1 day, + non-inclusive. + + >>> FixedOffset(1440) + Traceback (most recent call last): + ... + ValueError: ('absolute offset is too large', 1440) + + >>> FixedOffset(-1440) + Traceback (most recent call last): + ... + ValueError: ('absolute offset is too large', -1440) + + An offset of 0 is special-cased to return UTC. + + >>> FixedOffset(0) is UTC + True + + There should always be only one instance of a FixedOffset per timedelta. + This should be true for multiple creation calls. + + >>> FixedOffset(-330) is one + True + >>> FixedOffset(1380) is two + True + + It should also be true for pickling. + + >>> import pickle + >>> pickle.loads(pickle.dumps(one)) is one + True + >>> pickle.loads(pickle.dumps(two)) is two + True + """ + if offset == 0: + return UTC + + info = _tzinfos.get(offset) + if info is None: + # We haven't seen this one before. we need to save it. + + # Use setdefault to avoid a race condition and make sure we have + # only one + info = _tzinfos.setdefault(offset, _FixedOffset(offset)) + + return info + +FixedOffset.__safe_for_unpickling__ = True + + +def _test(): + import doctest, os, sys + sys.path.insert(0, os.pardir) + import pytz + return doctest.testmod(pytz) + +if __name__ == '__main__': + _test() + +all_timezones = \ +['Africa/Abidjan', + 'Africa/Accra', + 'Africa/Addis_Ababa', + 'Africa/Algiers', + 'Africa/Asmara', + 'Africa/Asmera', + 'Africa/Bamako', + 'Africa/Bangui', + 'Africa/Banjul', + 'Africa/Bissau', + 'Africa/Blantyre', + 'Africa/Brazzaville', + 'Africa/Bujumbura', + 'Africa/Cairo', + 'Africa/Casablanca', + 'Africa/Ceuta', + 'Africa/Conakry', + 'Africa/Dakar', + 'Africa/Dar_es_Salaam', + 'Africa/Djibouti', + 'Africa/Douala', + 'Africa/El_Aaiun', + 'Africa/Freetown', + 'Africa/Gaborone', + 'Africa/Harare', + 'Africa/Johannesburg', + 'Africa/Juba', + 'Africa/Kampala', + 'Africa/Khartoum', + 'Africa/Kigali', + 'Africa/Kinshasa', + 'Africa/Lagos', + 'Africa/Libreville', + 'Africa/Lome', + 'Africa/Luanda', + 'Africa/Lubumbashi', + 'Africa/Lusaka', + 'Africa/Malabo', + 'Africa/Maputo', + 'Africa/Maseru', + 'Africa/Mbabane', + 'Africa/Mogadishu', + 'Africa/Monrovia', + 'Africa/Nairobi', + 'Africa/Ndjamena', + 'Africa/Niamey', + 'Africa/Nouakchott', + 'Africa/Ouagadougou', + 'Africa/Porto-Novo', + 'Africa/Sao_Tome', + 'Africa/Timbuktu', + 'Africa/Tripoli', + 'Africa/Tunis', + 'Africa/Windhoek', + 'America/Adak', + 'America/Anchorage', + 'America/Anguilla', + 'America/Antigua', + 'America/Araguaina', + 'America/Argentina/Buenos_Aires', + 'America/Argentina/Catamarca', + 'America/Argentina/ComodRivadavia', + 'America/Argentina/Cordoba', + 'America/Argentina/Jujuy', + 'America/Argentina/La_Rioja', + 'America/Argentina/Mendoza', + 'America/Argentina/Rio_Gallegos', + 'America/Argentina/Salta', + 'America/Argentina/San_Juan', + 'America/Argentina/San_Luis', + 'America/Argentina/Tucuman', + 'America/Argentina/Ushuaia', + 'America/Aruba', + 'America/Asuncion', + 'America/Atikokan', + 'America/Atka', + 'America/Bahia', + 'America/Bahia_Banderas', + 'America/Barbados', + 'America/Belem', + 'America/Belize', + 'America/Blanc-Sablon', + 'America/Boa_Vista', + 'America/Bogota', + 'America/Boise', + 'America/Buenos_Aires', + 'America/Cambridge_Bay', + 'America/Campo_Grande', + 'America/Cancun', + 'America/Caracas', + 'America/Catamarca', + 'America/Cayenne', + 'America/Cayman', + 'America/Chicago', + 'America/Chihuahua', + 'America/Coral_Harbour', + 'America/Cordoba', + 'America/Costa_Rica', + 'America/Creston', + 'America/Cuiaba', + 'America/Curacao', + 'America/Danmarkshavn', + 'America/Dawson', + 'America/Dawson_Creek', + 'America/Denver', + 'America/Detroit', + 'America/Dominica', + 'America/Edmonton', + 'America/Eirunepe', + 'America/El_Salvador', + 'America/Ensenada', + 'America/Fort_Wayne', + 'America/Fortaleza', + 'America/Glace_Bay', + 'America/Godthab', + 'America/Goose_Bay', + 'America/Grand_Turk', + 'America/Grenada', + 'America/Guadeloupe', + 'America/Guatemala', + 'America/Guayaquil', + 'America/Guyana', + 'America/Halifax', + 'America/Havana', + 'America/Hermosillo', + 'America/Indiana/Indianapolis', + 'America/Indiana/Knox', + 'America/Indiana/Marengo', + 'America/Indiana/Petersburg', + 'America/Indiana/Tell_City', + 'America/Indiana/Vevay', + 'America/Indiana/Vincennes', + 'America/Indiana/Winamac', + 'America/Indianapolis', + 'America/Inuvik', + 'America/Iqaluit', + 'America/Jamaica', + 'America/Jujuy', + 'America/Juneau', + 'America/Kentucky/Louisville', + 'America/Kentucky/Monticello', + 'America/Knox_IN', + 'America/Kralendijk', + 'America/La_Paz', + 'America/Lima', + 'America/Los_Angeles', + 'America/Louisville', + 'America/Lower_Princes', + 'America/Maceio', + 'America/Managua', + 'America/Manaus', + 'America/Marigot', + 'America/Martinique', + 'America/Matamoros', + 'America/Mazatlan', + 'America/Mendoza', + 'America/Menominee', + 'America/Merida', + 'America/Metlakatla', + 'America/Mexico_City', + 'America/Miquelon', + 'America/Moncton', + 'America/Monterrey', + 'America/Montevideo', + 'America/Montreal', + 'America/Montserrat', + 'America/Nassau', + 'America/New_York', + 'America/Nipigon', + 'America/Nome', + 'America/Noronha', + 'America/North_Dakota/Beulah', + 'America/North_Dakota/Center', + 'America/North_Dakota/New_Salem', + 'America/Ojinaga', + 'America/Panama', + 'America/Pangnirtung', + 'America/Paramaribo', + 'America/Phoenix', + 'America/Port-au-Prince', + 'America/Port_of_Spain', + 'America/Porto_Acre', + 'America/Porto_Velho', + 'America/Puerto_Rico', + 'America/Rainy_River', + 'America/Rankin_Inlet', + 'America/Recife', + 'America/Regina', + 'America/Resolute', + 'America/Rio_Branco', + 'America/Rosario', + 'America/Santa_Isabel', + 'America/Santarem', + 'America/Santiago', + 'America/Santo_Domingo', + 'America/Sao_Paulo', + 'America/Scoresbysund', + 'America/Shiprock', + 'America/Sitka', + 'America/St_Barthelemy', + 'America/St_Johns', + 'America/St_Kitts', + 'America/St_Lucia', + 'America/St_Thomas', + 'America/St_Vincent', + 'America/Swift_Current', + 'America/Tegucigalpa', + 'America/Thule', + 'America/Thunder_Bay', + 'America/Tijuana', + 'America/Toronto', + 'America/Tortola', + 'America/Vancouver', + 'America/Virgin', + 'America/Whitehorse', + 'America/Winnipeg', + 'America/Yakutat', + 'America/Yellowknife', + 'Antarctica/Casey', + 'Antarctica/Davis', + 'Antarctica/DumontDUrville', + 'Antarctica/Macquarie', + 'Antarctica/Mawson', + 'Antarctica/McMurdo', + 'Antarctica/Palmer', + 'Antarctica/Rothera', + 'Antarctica/South_Pole', + 'Antarctica/Syowa', + 'Antarctica/Vostok', + 'Arctic/Longyearbyen', + 'Asia/Aden', + 'Asia/Almaty', + 'Asia/Amman', + 'Asia/Anadyr', + 'Asia/Aqtau', + 'Asia/Aqtobe', + 'Asia/Ashgabat', + 'Asia/Ashkhabad', + 'Asia/Baghdad', + 'Asia/Bahrain', + 'Asia/Baku', + 'Asia/Bangkok', + 'Asia/Beirut', + 'Asia/Bishkek', + 'Asia/Brunei', + 'Asia/Calcutta', + 'Asia/Choibalsan', + 'Asia/Chongqing', + 'Asia/Chungking', + 'Asia/Colombo', + 'Asia/Dacca', + 'Asia/Damascus', + 'Asia/Dhaka', + 'Asia/Dili', + 'Asia/Dubai', + 'Asia/Dushanbe', + 'Asia/Gaza', + 'Asia/Harbin', + 'Asia/Hebron', + 'Asia/Ho_Chi_Minh', + 'Asia/Hong_Kong', + 'Asia/Hovd', + 'Asia/Irkutsk', + 'Asia/Istanbul', + 'Asia/Jakarta', + 'Asia/Jayapura', + 'Asia/Jerusalem', + 'Asia/Kabul', + 'Asia/Kamchatka', + 'Asia/Karachi', + 'Asia/Kashgar', + 'Asia/Kathmandu', + 'Asia/Katmandu', + 'Asia/Khandyga', + 'Asia/Kolkata', + 'Asia/Krasnoyarsk', + 'Asia/Kuala_Lumpur', + 'Asia/Kuching', + 'Asia/Kuwait', + 'Asia/Macao', + 'Asia/Macau', + 'Asia/Magadan', + 'Asia/Makassar', + 'Asia/Manila', + 'Asia/Muscat', + 'Asia/Nicosia', + 'Asia/Novokuznetsk', + 'Asia/Novosibirsk', + 'Asia/Omsk', + 'Asia/Oral', + 'Asia/Phnom_Penh', + 'Asia/Pontianak', + 'Asia/Pyongyang', + 'Asia/Qatar', + 'Asia/Qyzylorda', + 'Asia/Rangoon', + 'Asia/Riyadh', + 'Asia/Saigon', + 'Asia/Sakhalin', + 'Asia/Samarkand', + 'Asia/Seoul', + 'Asia/Shanghai', + 'Asia/Singapore', + 'Asia/Taipei', + 'Asia/Tashkent', + 'Asia/Tbilisi', + 'Asia/Tehran', + 'Asia/Tel_Aviv', + 'Asia/Thimbu', + 'Asia/Thimphu', + 'Asia/Tokyo', + 'Asia/Ujung_Pandang', + 'Asia/Ulaanbaatar', + 'Asia/Ulan_Bator', + 'Asia/Urumqi', + 'Asia/Ust-Nera', + 'Asia/Vientiane', + 'Asia/Vladivostok', + 'Asia/Yakutsk', + 'Asia/Yekaterinburg', + 'Asia/Yerevan', + 'Atlantic/Azores', + 'Atlantic/Bermuda', + 'Atlantic/Canary', + 'Atlantic/Cape_Verde', + 'Atlantic/Faeroe', + 'Atlantic/Faroe', + 'Atlantic/Jan_Mayen', + 'Atlantic/Madeira', + 'Atlantic/Reykjavik', + 'Atlantic/South_Georgia', + 'Atlantic/St_Helena', + 'Atlantic/Stanley', + 'Australia/ACT', + 'Australia/Adelaide', + 'Australia/Brisbane', + 'Australia/Broken_Hill', + 'Australia/Canberra', + 'Australia/Currie', + 'Australia/Darwin', + 'Australia/Eucla', + 'Australia/Hobart', + 'Australia/LHI', + 'Australia/Lindeman', + 'Australia/Lord_Howe', + 'Australia/Melbourne', + 'Australia/NSW', + 'Australia/North', + 'Australia/Perth', + 'Australia/Queensland', + 'Australia/South', + 'Australia/Sydney', + 'Australia/Tasmania', + 'Australia/Victoria', + 'Australia/West', + 'Australia/Yancowinna', + 'Brazil/Acre', + 'Brazil/DeNoronha', + 'Brazil/East', + 'Brazil/West', + 'CET', + 'CST6CDT', + 'Canada/Atlantic', + 'Canada/Central', + 'Canada/East-Saskatchewan', + 'Canada/Eastern', + 'Canada/Mountain', + 'Canada/Newfoundland', + 'Canada/Pacific', + 'Canada/Saskatchewan', + 'Canada/Yukon', + 'Chile/Continental', + 'Chile/EasterIsland', + 'Cuba', + 'EET', + 'EST', + 'EST5EDT', + 'Egypt', + 'Eire', + 'Etc/GMT', + 'Etc/GMT+0', + 'Etc/GMT+1', + 'Etc/GMT+10', + 'Etc/GMT+11', + 'Etc/GMT+12', + 'Etc/GMT+2', + 'Etc/GMT+3', + 'Etc/GMT+4', + 'Etc/GMT+5', + 'Etc/GMT+6', + 'Etc/GMT+7', + 'Etc/GMT+8', + 'Etc/GMT+9', + 'Etc/GMT-0', + 'Etc/GMT-1', + 'Etc/GMT-10', + 'Etc/GMT-11', + 'Etc/GMT-12', + 'Etc/GMT-13', + 'Etc/GMT-14', + 'Etc/GMT-2', + 'Etc/GMT-3', + 'Etc/GMT-4', + 'Etc/GMT-5', + 'Etc/GMT-6', + 'Etc/GMT-7', + 'Etc/GMT-8', + 'Etc/GMT-9', + 'Etc/GMT0', + 'Etc/Greenwich', + 'Etc/UCT', + 'Etc/UTC', + 'Etc/Universal', + 'Etc/Zulu', + 'Europe/Amsterdam', + 'Europe/Andorra', + 'Europe/Athens', + 'Europe/Belfast', + 'Europe/Belgrade', + 'Europe/Berlin', + 'Europe/Bratislava', + 'Europe/Brussels', + 'Europe/Bucharest', + 'Europe/Budapest', + 'Europe/Busingen', + 'Europe/Chisinau', + 'Europe/Copenhagen', + 'Europe/Dublin', + 'Europe/Gibraltar', + 'Europe/Guernsey', + 'Europe/Helsinki', + 'Europe/Isle_of_Man', + 'Europe/Istanbul', + 'Europe/Jersey', + 'Europe/Kaliningrad', + 'Europe/Kiev', + 'Europe/Lisbon', + 'Europe/Ljubljana', + 'Europe/London', + 'Europe/Luxembourg', + 'Europe/Madrid', + 'Europe/Malta', + 'Europe/Mariehamn', + 'Europe/Minsk', + 'Europe/Monaco', + 'Europe/Moscow', + 'Europe/Nicosia', + 'Europe/Oslo', + 'Europe/Paris', + 'Europe/Podgorica', + 'Europe/Prague', + 'Europe/Riga', + 'Europe/Rome', + 'Europe/Samara', + 'Europe/San_Marino', + 'Europe/Sarajevo', + 'Europe/Simferopol', + 'Europe/Skopje', + 'Europe/Sofia', + 'Europe/Stockholm', + 'Europe/Tallinn', + 'Europe/Tirane', + 'Europe/Tiraspol', + 'Europe/Uzhgorod', + 'Europe/Vaduz', + 'Europe/Vatican', + 'Europe/Vienna', + 'Europe/Vilnius', + 'Europe/Volgograd', + 'Europe/Warsaw', + 'Europe/Zagreb', + 'Europe/Zaporozhye', + 'Europe/Zurich', + 'GB', + 'GB-Eire', + 'GMT', + 'GMT+0', + 'GMT-0', + 'GMT0', + 'Greenwich', + 'HST', + 'Hongkong', + 'Iceland', + 'Indian/Antananarivo', + 'Indian/Chagos', + 'Indian/Christmas', + 'Indian/Cocos', + 'Indian/Comoro', + 'Indian/Kerguelen', + 'Indian/Mahe', + 'Indian/Maldives', + 'Indian/Mauritius', + 'Indian/Mayotte', + 'Indian/Reunion', + 'Iran', + 'Israel', + 'Jamaica', + 'Japan', + 'Kwajalein', + 'Libya', + 'MET', + 'MST', + 'MST7MDT', + 'Mexico/BajaNorte', + 'Mexico/BajaSur', + 'Mexico/General', + 'NZ', + 'NZ-CHAT', + 'Navajo', + 'PRC', + 'PST8PDT', + 'Pacific/Apia', + 'Pacific/Auckland', + 'Pacific/Chatham', + 'Pacific/Chuuk', + 'Pacific/Easter', + 'Pacific/Efate', + 'Pacific/Enderbury', + 'Pacific/Fakaofo', + 'Pacific/Fiji', + 'Pacific/Funafuti', + 'Pacific/Galapagos', + 'Pacific/Gambier', + 'Pacific/Guadalcanal', + 'Pacific/Guam', + 'Pacific/Honolulu', + 'Pacific/Johnston', + 'Pacific/Kiritimati', + 'Pacific/Kosrae', + 'Pacific/Kwajalein', + 'Pacific/Majuro', + 'Pacific/Marquesas', + 'Pacific/Midway', + 'Pacific/Nauru', + 'Pacific/Niue', + 'Pacific/Norfolk', + 'Pacific/Noumea', + 'Pacific/Pago_Pago', + 'Pacific/Palau', + 'Pacific/Pitcairn', + 'Pacific/Pohnpei', + 'Pacific/Ponape', + 'Pacific/Port_Moresby', + 'Pacific/Rarotonga', + 'Pacific/Saipan', + 'Pacific/Samoa', + 'Pacific/Tahiti', + 'Pacific/Tarawa', + 'Pacific/Tongatapu', + 'Pacific/Truk', + 'Pacific/Wake', + 'Pacific/Wallis', + 'Pacific/Yap', + 'Poland', + 'Portugal', + 'ROC', + 'ROK', + 'Singapore', + 'Turkey', + 'UCT', + 'US/Alaska', + 'US/Aleutian', + 'US/Arizona', + 'US/Central', + 'US/East-Indiana', + 'US/Eastern', + 'US/Hawaii', + 'US/Indiana-Starke', + 'US/Michigan', + 'US/Mountain', + 'US/Pacific', + 'US/Pacific-New', + 'US/Samoa', + 'UTC', + 'Universal', + 'W-SU', + 'WET', + 'Zulu'] +all_timezones = [ + tz for tz in all_timezones if resource_exists(tz)] + +all_timezones_set = set(all_timezones) +common_timezones = \ +['Africa/Abidjan', + 'Africa/Accra', + 'Africa/Addis_Ababa', + 'Africa/Algiers', + 'Africa/Asmara', + 'Africa/Bamako', + 'Africa/Bangui', + 'Africa/Banjul', + 'Africa/Bissau', + 'Africa/Blantyre', + 'Africa/Brazzaville', + 'Africa/Bujumbura', + 'Africa/Cairo', + 'Africa/Casablanca', + 'Africa/Ceuta', + 'Africa/Conakry', + 'Africa/Dakar', + 'Africa/Dar_es_Salaam', + 'Africa/Djibouti', + 'Africa/Douala', + 'Africa/El_Aaiun', + 'Africa/Freetown', + 'Africa/Gaborone', + 'Africa/Harare', + 'Africa/Johannesburg', + 'Africa/Juba', + 'Africa/Kampala', + 'Africa/Khartoum', + 'Africa/Kigali', + 'Africa/Kinshasa', + 'Africa/Lagos', + 'Africa/Libreville', + 'Africa/Lome', + 'Africa/Luanda', + 'Africa/Lubumbashi', + 'Africa/Lusaka', + 'Africa/Malabo', + 'Africa/Maputo', + 'Africa/Maseru', + 'Africa/Mbabane', + 'Africa/Mogadishu', + 'Africa/Monrovia', + 'Africa/Nairobi', + 'Africa/Ndjamena', + 'Africa/Niamey', + 'Africa/Nouakchott', + 'Africa/Ouagadougou', + 'Africa/Porto-Novo', + 'Africa/Sao_Tome', + 'Africa/Tripoli', + 'Africa/Tunis', + 'Africa/Windhoek', + 'America/Adak', + 'America/Anchorage', + 'America/Anguilla', + 'America/Antigua', + 'America/Araguaina', + 'America/Argentina/Buenos_Aires', + 'America/Argentina/Catamarca', + 'America/Argentina/Cordoba', + 'America/Argentina/Jujuy', + 'America/Argentina/La_Rioja', + 'America/Argentina/Mendoza', + 'America/Argentina/Rio_Gallegos', + 'America/Argentina/Salta', + 'America/Argentina/San_Juan', + 'America/Argentina/San_Luis', + 'America/Argentina/Tucuman', + 'America/Argentina/Ushuaia', + 'America/Aruba', + 'America/Asuncion', + 'America/Atikokan', + 'America/Bahia', + 'America/Bahia_Banderas', + 'America/Barbados', + 'America/Belem', + 'America/Belize', + 'America/Blanc-Sablon', + 'America/Boa_Vista', + 'America/Bogota', + 'America/Boise', + 'America/Cambridge_Bay', + 'America/Campo_Grande', + 'America/Cancun', + 'America/Caracas', + 'America/Cayenne', + 'America/Cayman', + 'America/Chicago', + 'America/Chihuahua', + 'America/Costa_Rica', + 'America/Creston', + 'America/Cuiaba', + 'America/Curacao', + 'America/Danmarkshavn', + 'America/Dawson', + 'America/Dawson_Creek', + 'America/Denver', + 'America/Detroit', + 'America/Dominica', + 'America/Edmonton', + 'America/Eirunepe', + 'America/El_Salvador', + 'America/Fortaleza', + 'America/Glace_Bay', + 'America/Godthab', + 'America/Goose_Bay', + 'America/Grand_Turk', + 'America/Grenada', + 'America/Guadeloupe', + 'America/Guatemala', + 'America/Guayaquil', + 'America/Guyana', + 'America/Halifax', + 'America/Havana', + 'America/Hermosillo', + 'America/Indiana/Indianapolis', + 'America/Indiana/Knox', + 'America/Indiana/Marengo', + 'America/Indiana/Petersburg', + 'America/Indiana/Tell_City', + 'America/Indiana/Vevay', + 'America/Indiana/Vincennes', + 'America/Indiana/Winamac', + 'America/Inuvik', + 'America/Iqaluit', + 'America/Jamaica', + 'America/Juneau', + 'America/Kentucky/Louisville', + 'America/Kentucky/Monticello', + 'America/Kralendijk', + 'America/La_Paz', + 'America/Lima', + 'America/Los_Angeles', + 'America/Lower_Princes', + 'America/Maceio', + 'America/Managua', + 'America/Manaus', + 'America/Marigot', + 'America/Martinique', + 'America/Matamoros', + 'America/Mazatlan', + 'America/Menominee', + 'America/Merida', + 'America/Metlakatla', + 'America/Mexico_City', + 'America/Miquelon', + 'America/Moncton', + 'America/Monterrey', + 'America/Montevideo', + 'America/Montreal', + 'America/Montserrat', + 'America/Nassau', + 'America/New_York', + 'America/Nipigon', + 'America/Nome', + 'America/Noronha', + 'America/North_Dakota/Beulah', + 'America/North_Dakota/Center', + 'America/North_Dakota/New_Salem', + 'America/Ojinaga', + 'America/Panama', + 'America/Pangnirtung', + 'America/Paramaribo', + 'America/Phoenix', + 'America/Port-au-Prince', + 'America/Port_of_Spain', + 'America/Porto_Velho', + 'America/Puerto_Rico', + 'America/Rainy_River', + 'America/Rankin_Inlet', + 'America/Recife', + 'America/Regina', + 'America/Resolute', + 'America/Rio_Branco', + 'America/Santa_Isabel', + 'America/Santarem', + 'America/Santiago', + 'America/Santo_Domingo', + 'America/Sao_Paulo', + 'America/Scoresbysund', + 'America/Shiprock', + 'America/Sitka', + 'America/St_Barthelemy', + 'America/St_Johns', + 'America/St_Kitts', + 'America/St_Lucia', + 'America/St_Thomas', + 'America/St_Vincent', + 'America/Swift_Current', + 'America/Tegucigalpa', + 'America/Thule', + 'America/Thunder_Bay', + 'America/Tijuana', + 'America/Toronto', + 'America/Tortola', + 'America/Vancouver', + 'America/Whitehorse', + 'America/Winnipeg', + 'America/Yakutat', + 'America/Yellowknife', + 'Antarctica/Casey', + 'Antarctica/Davis', + 'Antarctica/DumontDUrville', + 'Antarctica/Macquarie', + 'Antarctica/Mawson', + 'Antarctica/McMurdo', + 'Antarctica/Palmer', + 'Antarctica/Rothera', + 'Antarctica/South_Pole', + 'Antarctica/Syowa', + 'Antarctica/Vostok', + 'Arctic/Longyearbyen', + 'Asia/Aden', + 'Asia/Almaty', + 'Asia/Amman', + 'Asia/Anadyr', + 'Asia/Aqtau', + 'Asia/Aqtobe', + 'Asia/Ashgabat', + 'Asia/Baghdad', + 'Asia/Bahrain', + 'Asia/Baku', + 'Asia/Bangkok', + 'Asia/Beirut', + 'Asia/Bishkek', + 'Asia/Brunei', + 'Asia/Choibalsan', + 'Asia/Chongqing', + 'Asia/Colombo', + 'Asia/Damascus', + 'Asia/Dhaka', + 'Asia/Dili', + 'Asia/Dubai', + 'Asia/Dushanbe', + 'Asia/Gaza', + 'Asia/Harbin', + 'Asia/Hebron', + 'Asia/Ho_Chi_Minh', + 'Asia/Hong_Kong', + 'Asia/Hovd', + 'Asia/Irkutsk', + 'Asia/Jakarta', + 'Asia/Jayapura', + 'Asia/Jerusalem', + 'Asia/Kabul', + 'Asia/Kamchatka', + 'Asia/Karachi', + 'Asia/Kashgar', + 'Asia/Kathmandu', + 'Asia/Khandyga', + 'Asia/Kolkata', + 'Asia/Krasnoyarsk', + 'Asia/Kuala_Lumpur', + 'Asia/Kuching', + 'Asia/Kuwait', + 'Asia/Macau', + 'Asia/Magadan', + 'Asia/Makassar', + 'Asia/Manila', + 'Asia/Muscat', + 'Asia/Nicosia', + 'Asia/Novokuznetsk', + 'Asia/Novosibirsk', + 'Asia/Omsk', + 'Asia/Oral', + 'Asia/Phnom_Penh', + 'Asia/Pontianak', + 'Asia/Pyongyang', + 'Asia/Qatar', + 'Asia/Qyzylorda', + 'Asia/Rangoon', + 'Asia/Riyadh', + 'Asia/Sakhalin', + 'Asia/Samarkand', + 'Asia/Seoul', + 'Asia/Shanghai', + 'Asia/Singapore', + 'Asia/Taipei', + 'Asia/Tashkent', + 'Asia/Tbilisi', + 'Asia/Tehran', + 'Asia/Thimphu', + 'Asia/Tokyo', + 'Asia/Ulaanbaatar', + 'Asia/Urumqi', + 'Asia/Ust-Nera', + 'Asia/Vientiane', + 'Asia/Vladivostok', + 'Asia/Yakutsk', + 'Asia/Yekaterinburg', + 'Asia/Yerevan', + 'Atlantic/Azores', + 'Atlantic/Bermuda', + 'Atlantic/Canary', + 'Atlantic/Cape_Verde', + 'Atlantic/Faroe', + 'Atlantic/Madeira', + 'Atlantic/Reykjavik', + 'Atlantic/South_Georgia', + 'Atlantic/St_Helena', + 'Atlantic/Stanley', + 'Australia/Adelaide', + 'Australia/Brisbane', + 'Australia/Broken_Hill', + 'Australia/Currie', + 'Australia/Darwin', + 'Australia/Eucla', + 'Australia/Hobart', + 'Australia/Lindeman', + 'Australia/Lord_Howe', + 'Australia/Melbourne', + 'Australia/Perth', + 'Australia/Sydney', + 'Canada/Atlantic', + 'Canada/Central', + 'Canada/Eastern', + 'Canada/Mountain', + 'Canada/Newfoundland', + 'Canada/Pacific', + 'Europe/Amsterdam', + 'Europe/Andorra', + 'Europe/Athens', + 'Europe/Belgrade', + 'Europe/Berlin', + 'Europe/Bratislava', + 'Europe/Brussels', + 'Europe/Bucharest', + 'Europe/Budapest', + 'Europe/Busingen', + 'Europe/Chisinau', + 'Europe/Copenhagen', + 'Europe/Dublin', + 'Europe/Gibraltar', + 'Europe/Guernsey', + 'Europe/Helsinki', + 'Europe/Isle_of_Man', + 'Europe/Istanbul', + 'Europe/Jersey', + 'Europe/Kaliningrad', + 'Europe/Kiev', + 'Europe/Lisbon', + 'Europe/Ljubljana', + 'Europe/London', + 'Europe/Luxembourg', + 'Europe/Madrid', + 'Europe/Malta', + 'Europe/Mariehamn', + 'Europe/Minsk', + 'Europe/Monaco', + 'Europe/Moscow', + 'Europe/Oslo', + 'Europe/Paris', + 'Europe/Podgorica', + 'Europe/Prague', + 'Europe/Riga', + 'Europe/Rome', + 'Europe/Samara', + 'Europe/San_Marino', + 'Europe/Sarajevo', + 'Europe/Simferopol', + 'Europe/Skopje', + 'Europe/Sofia', + 'Europe/Stockholm', + 'Europe/Tallinn', + 'Europe/Tirane', + 'Europe/Uzhgorod', + 'Europe/Vaduz', + 'Europe/Vatican', + 'Europe/Vienna', + 'Europe/Vilnius', + 'Europe/Volgograd', + 'Europe/Warsaw', + 'Europe/Zagreb', + 'Europe/Zaporozhye', + 'Europe/Zurich', + 'GMT', + 'Indian/Antananarivo', + 'Indian/Chagos', + 'Indian/Christmas', + 'Indian/Cocos', + 'Indian/Comoro', + 'Indian/Kerguelen', + 'Indian/Mahe', + 'Indian/Maldives', + 'Indian/Mauritius', + 'Indian/Mayotte', + 'Indian/Reunion', + 'Pacific/Apia', + 'Pacific/Auckland', + 'Pacific/Chatham', + 'Pacific/Chuuk', + 'Pacific/Easter', + 'Pacific/Efate', + 'Pacific/Enderbury', + 'Pacific/Fakaofo', + 'Pacific/Fiji', + 'Pacific/Funafuti', + 'Pacific/Galapagos', + 'Pacific/Gambier', + 'Pacific/Guadalcanal', + 'Pacific/Guam', + 'Pacific/Honolulu', + 'Pacific/Johnston', + 'Pacific/Kiritimati', + 'Pacific/Kosrae', + 'Pacific/Kwajalein', + 'Pacific/Majuro', + 'Pacific/Marquesas', + 'Pacific/Midway', + 'Pacific/Nauru', + 'Pacific/Niue', + 'Pacific/Norfolk', + 'Pacific/Noumea', + 'Pacific/Pago_Pago', + 'Pacific/Palau', + 'Pacific/Pitcairn', + 'Pacific/Pohnpei', + 'Pacific/Port_Moresby', + 'Pacific/Rarotonga', + 'Pacific/Saipan', + 'Pacific/Tahiti', + 'Pacific/Tarawa', + 'Pacific/Tongatapu', + 'Pacific/Wake', + 'Pacific/Wallis', + 'US/Alaska', + 'US/Arizona', + 'US/Central', + 'US/Eastern', + 'US/Hawaii', + 'US/Mountain', + 'US/Pacific', + 'UTC'] +common_timezones = [ + tz for tz in common_timezones if tz in all_timezones] + +common_timezones_set = set(common_timezones) diff --git a/awx/lib/site-packages/pytz/exceptions.py b/awx/lib/site-packages/pytz/exceptions.py new file mode 100644 index 0000000000..0376108e14 --- /dev/null +++ b/awx/lib/site-packages/pytz/exceptions.py @@ -0,0 +1,48 @@ +''' +Custom exceptions raised by pytz. +''' + +__all__ = [ + 'UnknownTimeZoneError', 'InvalidTimeError', 'AmbiguousTimeError', + 'NonExistentTimeError', + ] + + +class UnknownTimeZoneError(KeyError): + '''Exception raised when pytz is passed an unknown timezone. + + >>> isinstance(UnknownTimeZoneError(), LookupError) + True + + This class is actually a subclass of KeyError to provide backwards + compatibility with code relying on the undocumented behavior of earlier + pytz releases. + + >>> isinstance(UnknownTimeZoneError(), KeyError) + True + ''' + pass + + +class InvalidTimeError(Exception): + '''Base class for invalid time exceptions.''' + + +class AmbiguousTimeError(InvalidTimeError): + '''Exception raised when attempting to create an ambiguous wallclock time. + + At the end of a DST transition period, a particular wallclock time will + occur twice (once before the clocks are set back, once after). Both + possibilities may be correct, unless further information is supplied. + + See DstTzInfo.normalize() for more info + ''' + + +class NonExistentTimeError(InvalidTimeError): + '''Exception raised when attempting to create a wallclock time that + cannot exist. + + At the start of a DST transition period, the wallclock time jumps forward. + The instants jumped over never occur. + ''' diff --git a/awx/lib/site-packages/pytz/reference.py b/awx/lib/site-packages/pytz/reference.py new file mode 100644 index 0000000000..3dda13e75c --- /dev/null +++ b/awx/lib/site-packages/pytz/reference.py @@ -0,0 +1,127 @@ +''' +Reference tzinfo implementations from the Python docs. +Used for testing against as they are only correct for the years +1987 to 2006. Do not use these for real code. +''' + +from datetime import tzinfo, timedelta, datetime +from pytz import utc, UTC, HOUR, ZERO + +# A class building tzinfo objects for fixed-offset time zones. +# Note that FixedOffset(0, "UTC") is a different way to build a +# UTC tzinfo object. + +class FixedOffset(tzinfo): + """Fixed offset in minutes east from UTC.""" + + def __init__(self, offset, name): + self.__offset = timedelta(minutes = offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return ZERO + +# A class capturing the platform's idea of local time. + +import time as _time + +STDOFFSET = timedelta(seconds = -_time.timezone) +if _time.daylight: + DSTOFFSET = timedelta(seconds = -_time.altzone) +else: + DSTOFFSET = STDOFFSET + +DSTDIFF = DSTOFFSET - STDOFFSET + +class LocalTimezone(tzinfo): + + def utcoffset(self, dt): + if self._isdst(dt): + return DSTOFFSET + else: + return STDOFFSET + + def dst(self, dt): + if self._isdst(dt): + return DSTDIFF + else: + return ZERO + + def tzname(self, dt): + return _time.tzname[self._isdst(dt)] + + def _isdst(self, dt): + tt = (dt.year, dt.month, dt.day, + dt.hour, dt.minute, dt.second, + dt.weekday(), 0, -1) + stamp = _time.mktime(tt) + tt = _time.localtime(stamp) + return tt.tm_isdst > 0 + +Local = LocalTimezone() + +# A complete implementation of current DST rules for major US time zones. + +def first_sunday_on_or_after(dt): + days_to_go = 6 - dt.weekday() + if days_to_go: + dt += timedelta(days_to_go) + return dt + +# In the US, DST starts at 2am (standard time) on the first Sunday in April. +DSTSTART = datetime(1, 4, 1, 2) +# and ends at 2am (DST time; 1am standard time) on the last Sunday of Oct. +# which is the first Sunday on or after Oct 25. +DSTEND = datetime(1, 10, 25, 1) + +class USTimeZone(tzinfo): + + def __init__(self, hours, reprname, stdname, dstname): + self.stdoffset = timedelta(hours=hours) + self.reprname = reprname + self.stdname = stdname + self.dstname = dstname + + def __repr__(self): + return self.reprname + + def tzname(self, dt): + if self.dst(dt): + return self.dstname + else: + return self.stdname + + def utcoffset(self, dt): + return self.stdoffset + self.dst(dt) + + def dst(self, dt): + if dt is None or dt.tzinfo is None: + # An exception may be sensible here, in one or both cases. + # It depends on how you want to treat them. The default + # fromutc() implementation (called by the default astimezone() + # implementation) passes a datetime with dt.tzinfo is self. + return ZERO + assert dt.tzinfo is self + + # Find first Sunday in April & the last in October. + start = first_sunday_on_or_after(DSTSTART.replace(year=dt.year)) + end = first_sunday_on_or_after(DSTEND.replace(year=dt.year)) + + # Can't compare naive to aware objects, so strip the timezone from + # dt first. + if start <= dt.replace(tzinfo=None) < end: + return HOUR + else: + return ZERO + +Eastern = USTimeZone(-5, "Eastern", "EST", "EDT") +Central = USTimeZone(-6, "Central", "CST", "CDT") +Mountain = USTimeZone(-7, "Mountain", "MST", "MDT") +Pacific = USTimeZone(-8, "Pacific", "PST", "PDT") + diff --git a/awx/lib/site-packages/pytz/tzfile.py b/awx/lib/site-packages/pytz/tzfile.py new file mode 100644 index 0000000000..9c007c8099 --- /dev/null +++ b/awx/lib/site-packages/pytz/tzfile.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python +''' +$Id: tzfile.py,v 1.8 2004/06/03 00:15:24 zenzen Exp $ +''' + +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO +from datetime import datetime, timedelta +from struct import unpack, calcsize + +from pytz.tzinfo import StaticTzInfo, DstTzInfo, memorized_ttinfo +from pytz.tzinfo import memorized_datetime, memorized_timedelta + +def _byte_string(s): + """Cast a string or byte string to an ASCII byte string.""" + return s.encode('US-ASCII') + +_NULL = _byte_string('\0') + +def _std_string(s): + """Cast a string or byte string to an ASCII string.""" + return str(s.decode('US-ASCII')) + +def build_tzinfo(zone, fp): + head_fmt = '>4s c 15x 6l' + head_size = calcsize(head_fmt) + (magic, format, ttisgmtcnt, ttisstdcnt,leapcnt, timecnt, + typecnt, charcnt) = unpack(head_fmt, fp.read(head_size)) + + # Make sure it is a tzfile(5) file + assert magic == _byte_string('TZif'), 'Got magic %s' % repr(magic) + + # Read out the transition times, localtime indices and ttinfo structures. + data_fmt = '>%(timecnt)dl %(timecnt)dB %(ttinfo)s %(charcnt)ds' % dict( + timecnt=timecnt, ttinfo='lBB'*typecnt, charcnt=charcnt) + data_size = calcsize(data_fmt) + data = unpack(data_fmt, fp.read(data_size)) + + # make sure we unpacked the right number of values + assert len(data) == 2 * timecnt + 3 * typecnt + 1 + transitions = [memorized_datetime(trans) + for trans in data[:timecnt]] + lindexes = list(data[timecnt:2 * timecnt]) + ttinfo_raw = data[2 * timecnt:-1] + tznames_raw = data[-1] + del data + + # Process ttinfo into separate structs + ttinfo = [] + tznames = {} + i = 0 + while i < len(ttinfo_raw): + # have we looked up this timezone name yet? + tzname_offset = ttinfo_raw[i+2] + if tzname_offset not in tznames: + nul = tznames_raw.find(_NULL, tzname_offset) + if nul < 0: + nul = len(tznames_raw) + tznames[tzname_offset] = _std_string( + tznames_raw[tzname_offset:nul]) + ttinfo.append((ttinfo_raw[i], + bool(ttinfo_raw[i+1]), + tznames[tzname_offset])) + i += 3 + + # Now build the timezone object + if len(transitions) == 0: + ttinfo[0][0], ttinfo[0][2] + cls = type(zone, (StaticTzInfo,), dict( + zone=zone, + _utcoffset=memorized_timedelta(ttinfo[0][0]), + _tzname=ttinfo[0][2])) + else: + # Early dates use the first standard time ttinfo + i = 0 + while ttinfo[i][1]: + i += 1 + if ttinfo[i] == ttinfo[lindexes[0]]: + transitions[0] = datetime.min + else: + transitions.insert(0, datetime.min) + lindexes.insert(0, i) + + # calculate transition info + transition_info = [] + for i in range(len(transitions)): + inf = ttinfo[lindexes[i]] + utcoffset = inf[0] + if not inf[1]: + dst = 0 + else: + for j in range(i-1, -1, -1): + prev_inf = ttinfo[lindexes[j]] + if not prev_inf[1]: + break + dst = inf[0] - prev_inf[0] # dst offset + + # Bad dst? Look further. DST > 24 hours happens when + # a timzone has moved across the international dateline. + if dst <= 0 or dst > 3600*3: + for j in range(i+1, len(transitions)): + stdinf = ttinfo[lindexes[j]] + if not stdinf[1]: + dst = inf[0] - stdinf[0] + if dst > 0: + break # Found a useful std time. + + tzname = inf[2] + + # Round utcoffset and dst to the nearest minute or the + # datetime library will complain. Conversions to these timezones + # might be up to plus or minus 30 seconds out, but it is + # the best we can do. + utcoffset = int((utcoffset + 30) // 60) * 60 + dst = int((dst + 30) // 60) * 60 + transition_info.append(memorized_ttinfo(utcoffset, dst, tzname)) + + cls = type(zone, (DstTzInfo,), dict( + zone=zone, + _utc_transition_times=transitions, + _transition_info=transition_info)) + + return cls() + +if __name__ == '__main__': + import os.path + from pprint import pprint + base = os.path.join(os.path.dirname(__file__), 'zoneinfo') + tz = build_tzinfo('Australia/Melbourne', + open(os.path.join(base,'Australia','Melbourne'), 'rb')) + tz = build_tzinfo('US/Eastern', + open(os.path.join(base,'US','Eastern'), 'rb')) + pprint(tz._utc_transition_times) + #print tz.asPython(4) + #print tz.transitions_mapping diff --git a/awx/lib/site-packages/pytz/tzinfo.py b/awx/lib/site-packages/pytz/tzinfo.py new file mode 100644 index 0000000000..a1e43cdf0c --- /dev/null +++ b/awx/lib/site-packages/pytz/tzinfo.py @@ -0,0 +1,563 @@ +'''Base classes and helpers for building zone specific tzinfo classes''' + +from datetime import datetime, timedelta, tzinfo +from bisect import bisect_right +try: + set +except NameError: + from sets import Set as set + +import pytz +from pytz.exceptions import AmbiguousTimeError, NonExistentTimeError + +__all__ = [] + +_timedelta_cache = {} +def memorized_timedelta(seconds): + '''Create only one instance of each distinct timedelta''' + try: + return _timedelta_cache[seconds] + except KeyError: + delta = timedelta(seconds=seconds) + _timedelta_cache[seconds] = delta + return delta + +_epoch = datetime.utcfromtimestamp(0) +_datetime_cache = {0: _epoch} +def memorized_datetime(seconds): + '''Create only one instance of each distinct datetime''' + try: + return _datetime_cache[seconds] + except KeyError: + # NB. We can't just do datetime.utcfromtimestamp(seconds) as this + # fails with negative values under Windows (Bug #90096) + dt = _epoch + timedelta(seconds=seconds) + _datetime_cache[seconds] = dt + return dt + +_ttinfo_cache = {} +def memorized_ttinfo(*args): + '''Create only one instance of each distinct tuple''' + try: + return _ttinfo_cache[args] + except KeyError: + ttinfo = ( + memorized_timedelta(args[0]), + memorized_timedelta(args[1]), + args[2] + ) + _ttinfo_cache[args] = ttinfo + return ttinfo + +_notime = memorized_timedelta(0) + +def _to_seconds(td): + '''Convert a timedelta to seconds''' + return td.seconds + td.days * 24 * 60 * 60 + + +class BaseTzInfo(tzinfo): + # Overridden in subclass + _utcoffset = None + _tzname = None + zone = None + + def __str__(self): + return self.zone + + +class StaticTzInfo(BaseTzInfo): + '''A timezone that has a constant offset from UTC + + These timezones are rare, as most locations have changed their + offset at some point in their history + ''' + def fromutc(self, dt): + '''See datetime.tzinfo.fromutc''' + if dt.tzinfo is not None and dt.tzinfo is not self: + raise ValueError('fromutc: dt.tzinfo is not self') + return (dt + self._utcoffset).replace(tzinfo=self) + + def utcoffset(self, dt, is_dst=None): + '''See datetime.tzinfo.utcoffset + + is_dst is ignored for StaticTzInfo, and exists only to + retain compatibility with DstTzInfo. + ''' + return self._utcoffset + + def dst(self, dt, is_dst=None): + '''See datetime.tzinfo.dst + + is_dst is ignored for StaticTzInfo, and exists only to + retain compatibility with DstTzInfo. + ''' + return _notime + + def tzname(self, dt, is_dst=None): + '''See datetime.tzinfo.tzname + + is_dst is ignored for StaticTzInfo, and exists only to + retain compatibility with DstTzInfo. + ''' + return self._tzname + + def localize(self, dt, is_dst=False): + '''Convert naive time to local time''' + if dt.tzinfo is not None: + raise ValueError('Not naive datetime (tzinfo is already set)') + return dt.replace(tzinfo=self) + + def normalize(self, dt, is_dst=False): + '''Correct the timezone information on the given datetime. + + This is normally a no-op, as StaticTzInfo timezones never have + ambiguous cases to correct: + + >>> from pytz import timezone + >>> gmt = timezone('GMT') + >>> isinstance(gmt, StaticTzInfo) + True + >>> dt = datetime(2011, 5, 8, 1, 2, 3, tzinfo=gmt) + >>> gmt.normalize(dt) is dt + True + + The supported method of converting between timezones is to use + datetime.astimezone(). Currently normalize() also works: + + >>> la = timezone('America/Los_Angeles') + >>> dt = la.localize(datetime(2011, 5, 7, 1, 2, 3)) + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + >>> gmt.normalize(dt).strftime(fmt) + '2011-05-07 08:02:03 GMT (+0000)' + ''' + if dt.tzinfo is self: + return dt + if dt.tzinfo is None: + raise ValueError('Naive time - no tzinfo set') + return dt.astimezone(self) + + def __repr__(self): + return '<StaticTzInfo %r>' % (self.zone,) + + def __reduce__(self): + # Special pickle to zone remains a singleton and to cope with + # database changes. + return pytz._p, (self.zone,) + + +class DstTzInfo(BaseTzInfo): + '''A timezone that has a variable offset from UTC + + The offset might change if daylight savings time comes into effect, + or at a point in history when the region decides to change their + timezone definition. + ''' + # Overridden in subclass + _utc_transition_times = None # Sorted list of DST transition times in UTC + _transition_info = None # [(utcoffset, dstoffset, tzname)] corresponding + # to _utc_transition_times entries + zone = None + + # Set in __init__ + _tzinfos = None + _dst = None # DST offset + + def __init__(self, _inf=None, _tzinfos=None): + if _inf: + self._tzinfos = _tzinfos + self._utcoffset, self._dst, self._tzname = _inf + else: + _tzinfos = {} + self._tzinfos = _tzinfos + self._utcoffset, self._dst, self._tzname = self._transition_info[0] + _tzinfos[self._transition_info[0]] = self + for inf in self._transition_info[1:]: + if inf not in _tzinfos: + _tzinfos[inf] = self.__class__(inf, _tzinfos) + + def fromutc(self, dt): + '''See datetime.tzinfo.fromutc''' + if (dt.tzinfo is not None + and getattr(dt.tzinfo, '_tzinfos', None) is not self._tzinfos): + raise ValueError('fromutc: dt.tzinfo is not self') + dt = dt.replace(tzinfo=None) + idx = max(0, bisect_right(self._utc_transition_times, dt) - 1) + inf = self._transition_info[idx] + return (dt + inf[0]).replace(tzinfo=self._tzinfos[inf]) + + def normalize(self, dt): + '''Correct the timezone information on the given datetime + + If date arithmetic crosses DST boundaries, the tzinfo + is not magically adjusted. This method normalizes the + tzinfo to the correct one. + + To test, first we need to do some setup + + >>> from pytz import timezone + >>> utc = timezone('UTC') + >>> eastern = timezone('US/Eastern') + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + + We next create a datetime right on an end-of-DST transition point, + the instant when the wallclocks are wound back one hour. + + >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) + >>> loc_dt = utc_dt.astimezone(eastern) + >>> loc_dt.strftime(fmt) + '2002-10-27 01:00:00 EST (-0500)' + + Now, if we subtract a few minutes from it, note that the timezone + information has not changed. + + >>> before = loc_dt - timedelta(minutes=10) + >>> before.strftime(fmt) + '2002-10-27 00:50:00 EST (-0500)' + + But we can fix that by calling the normalize method + + >>> before = eastern.normalize(before) + >>> before.strftime(fmt) + '2002-10-27 01:50:00 EDT (-0400)' + + The supported method of converting between timezones is to use + datetime.astimezone(). Currently, normalize() also works: + + >>> th = timezone('Asia/Bangkok') + >>> am = timezone('Europe/Amsterdam') + >>> dt = th.localize(datetime(2011, 5, 7, 1, 2, 3)) + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + >>> am.normalize(dt).strftime(fmt) + '2011-05-06 20:02:03 CEST (+0200)' + ''' + if dt.tzinfo is None: + raise ValueError('Naive time - no tzinfo set') + + # Convert dt in localtime to UTC + offset = dt.tzinfo._utcoffset + dt = dt.replace(tzinfo=None) + dt = dt - offset + # convert it back, and return it + return self.fromutc(dt) + + def localize(self, dt, is_dst=False): + '''Convert naive time to local time. + + This method should be used to construct localtimes, rather + than passing a tzinfo argument to a datetime constructor. + + is_dst is used to determine the correct timezone in the ambigous + period at the end of daylight savings time. + + >>> from pytz import timezone + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + >>> amdam = timezone('Europe/Amsterdam') + >>> dt = datetime(2004, 10, 31, 2, 0, 0) + >>> loc_dt1 = amdam.localize(dt, is_dst=True) + >>> loc_dt2 = amdam.localize(dt, is_dst=False) + >>> loc_dt1.strftime(fmt) + '2004-10-31 02:00:00 CEST (+0200)' + >>> loc_dt2.strftime(fmt) + '2004-10-31 02:00:00 CET (+0100)' + >>> str(loc_dt2 - loc_dt1) + '1:00:00' + + Use is_dst=None to raise an AmbiguousTimeError for ambiguous + times at the end of daylight savings + + >>> try: + ... loc_dt1 = amdam.localize(dt, is_dst=None) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + + is_dst defaults to False + + >>> amdam.localize(dt) == amdam.localize(dt, False) + True + + is_dst is also used to determine the correct timezone in the + wallclock times jumped over at the start of daylight savings time. + + >>> pacific = timezone('US/Pacific') + >>> dt = datetime(2008, 3, 9, 2, 0, 0) + >>> ploc_dt1 = pacific.localize(dt, is_dst=True) + >>> ploc_dt2 = pacific.localize(dt, is_dst=False) + >>> ploc_dt1.strftime(fmt) + '2008-03-09 02:00:00 PDT (-0700)' + >>> ploc_dt2.strftime(fmt) + '2008-03-09 02:00:00 PST (-0800)' + >>> str(ploc_dt2 - ploc_dt1) + '1:00:00' + + Use is_dst=None to raise a NonExistentTimeError for these skipped + times. + + >>> try: + ... loc_dt1 = pacific.localize(dt, is_dst=None) + ... except NonExistentTimeError: + ... print('Non-existent') + Non-existent + ''' + if dt.tzinfo is not None: + raise ValueError('Not naive datetime (tzinfo is already set)') + + # Find the two best possibilities. + possible_loc_dt = set() + for delta in [timedelta(days=-1), timedelta(days=1)]: + loc_dt = dt + delta + idx = max(0, bisect_right( + self._utc_transition_times, loc_dt) - 1) + inf = self._transition_info[idx] + tzinfo = self._tzinfos[inf] + loc_dt = tzinfo.normalize(dt.replace(tzinfo=tzinfo)) + if loc_dt.replace(tzinfo=None) == dt: + possible_loc_dt.add(loc_dt) + + if len(possible_loc_dt) == 1: + return possible_loc_dt.pop() + + # If there are no possibly correct timezones, we are attempting + # to convert a time that never happened - the time period jumped + # during the start-of-DST transition period. + if len(possible_loc_dt) == 0: + # If we refuse to guess, raise an exception. + if is_dst is None: + raise NonExistentTimeError(dt) + + # If we are forcing the pre-DST side of the DST transition, we + # obtain the correct timezone by winding the clock forward a few + # hours. + elif is_dst: + return self.localize( + dt + timedelta(hours=6), is_dst=True) - timedelta(hours=6) + + # If we are forcing the post-DST side of the DST transition, we + # obtain the correct timezone by winding the clock back. + else: + return self.localize( + dt - timedelta(hours=6), is_dst=False) + timedelta(hours=6) + + + # If we get this far, we have multiple possible timezones - this + # is an ambiguous case occuring during the end-of-DST transition. + + # If told to be strict, raise an exception since we have an + # ambiguous case + if is_dst is None: + raise AmbiguousTimeError(dt) + + # Filter out the possiblilities that don't match the requested + # is_dst + filtered_possible_loc_dt = [ + p for p in possible_loc_dt + if bool(p.tzinfo._dst) == is_dst + ] + + # Hopefully we only have one possibility left. Return it. + if len(filtered_possible_loc_dt) == 1: + return filtered_possible_loc_dt[0] + + if len(filtered_possible_loc_dt) == 0: + filtered_possible_loc_dt = list(possible_loc_dt) + + # If we get this far, we have in a wierd timezone transition + # where the clocks have been wound back but is_dst is the same + # in both (eg. Europe/Warsaw 1915 when they switched to CET). + # At this point, we just have to guess unless we allow more + # hints to be passed in (such as the UTC offset or abbreviation), + # but that is just getting silly. + # + # Choose the earliest (by UTC) applicable timezone. + sorting_keys = {} + for local_dt in filtered_possible_loc_dt: + key = local_dt.replace(tzinfo=None) - local_dt.tzinfo._utcoffset + sorting_keys[key] = local_dt + first_key = sorted(sorting_keys)[0] + return sorting_keys[first_key] + + def utcoffset(self, dt, is_dst=None): + '''See datetime.tzinfo.utcoffset + + The is_dst parameter may be used to remove ambiguity during DST + transitions. + + >>> from pytz import timezone + >>> tz = timezone('America/St_Johns') + >>> ambiguous = datetime(2009, 10, 31, 23, 30) + + >>> tz.utcoffset(ambiguous, is_dst=False) + datetime.timedelta(-1, 73800) + + >>> tz.utcoffset(ambiguous, is_dst=True) + datetime.timedelta(-1, 77400) + + >>> try: + ... tz.utcoffset(ambiguous) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + + ''' + if dt is None: + return None + elif dt.tzinfo is not self: + dt = self.localize(dt, is_dst) + return dt.tzinfo._utcoffset + else: + return self._utcoffset + + def dst(self, dt, is_dst=None): + '''See datetime.tzinfo.dst + + The is_dst parameter may be used to remove ambiguity during DST + transitions. + + >>> from pytz import timezone + >>> tz = timezone('America/St_Johns') + + >>> normal = datetime(2009, 9, 1) + + >>> tz.dst(normal) + datetime.timedelta(0, 3600) + >>> tz.dst(normal, is_dst=False) + datetime.timedelta(0, 3600) + >>> tz.dst(normal, is_dst=True) + datetime.timedelta(0, 3600) + + >>> ambiguous = datetime(2009, 10, 31, 23, 30) + + >>> tz.dst(ambiguous, is_dst=False) + datetime.timedelta(0) + >>> tz.dst(ambiguous, is_dst=True) + datetime.timedelta(0, 3600) + >>> try: + ... tz.dst(ambiguous) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + + ''' + if dt is None: + return None + elif dt.tzinfo is not self: + dt = self.localize(dt, is_dst) + return dt.tzinfo._dst + else: + return self._dst + + def tzname(self, dt, is_dst=None): + '''See datetime.tzinfo.tzname + + The is_dst parameter may be used to remove ambiguity during DST + transitions. + + >>> from pytz import timezone + >>> tz = timezone('America/St_Johns') + + >>> normal = datetime(2009, 9, 1) + + >>> tz.tzname(normal) + 'NDT' + >>> tz.tzname(normal, is_dst=False) + 'NDT' + >>> tz.tzname(normal, is_dst=True) + 'NDT' + + >>> ambiguous = datetime(2009, 10, 31, 23, 30) + + >>> tz.tzname(ambiguous, is_dst=False) + 'NST' + >>> tz.tzname(ambiguous, is_dst=True) + 'NDT' + >>> try: + ... tz.tzname(ambiguous) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + ''' + if dt is None: + return self.zone + elif dt.tzinfo is not self: + dt = self.localize(dt, is_dst) + return dt.tzinfo._tzname + else: + return self._tzname + + def __repr__(self): + if self._dst: + dst = 'DST' + else: + dst = 'STD' + if self._utcoffset > _notime: + return '<DstTzInfo %r %s+%s %s>' % ( + self.zone, self._tzname, self._utcoffset, dst + ) + else: + return '<DstTzInfo %r %s%s %s>' % ( + self.zone, self._tzname, self._utcoffset, dst + ) + + def __reduce__(self): + # Special pickle to zone remains a singleton and to cope with + # database changes. + return pytz._p, ( + self.zone, + _to_seconds(self._utcoffset), + _to_seconds(self._dst), + self._tzname + ) + + + +def unpickler(zone, utcoffset=None, dstoffset=None, tzname=None): + """Factory function for unpickling pytz tzinfo instances. + + This is shared for both StaticTzInfo and DstTzInfo instances, because + database changes could cause a zones implementation to switch between + these two base classes and we can't break pickles on a pytz version + upgrade. + """ + # Raises a KeyError if zone no longer exists, which should never happen + # and would be a bug. + tz = pytz.timezone(zone) + + # A StaticTzInfo - just return it + if utcoffset is None: + return tz + + # This pickle was created from a DstTzInfo. We need to + # determine which of the list of tzinfo instances for this zone + # to use in order to restore the state of any datetime instances using + # it correctly. + utcoffset = memorized_timedelta(utcoffset) + dstoffset = memorized_timedelta(dstoffset) + try: + return tz._tzinfos[(utcoffset, dstoffset, tzname)] + except KeyError: + # The particular state requested in this timezone no longer exists. + # This indicates a corrupt pickle, or the timezone database has been + # corrected violently enough to make this particular + # (utcoffset,dstoffset) no longer exist in the zone, or the + # abbreviation has been changed. + pass + + # See if we can find an entry differing only by tzname. Abbreviations + # get changed from the initial guess by the database maintainers to + # match reality when this information is discovered. + for localized_tz in tz._tzinfos.values(): + if (localized_tz._utcoffset == utcoffset + and localized_tz._dst == dstoffset): + return localized_tz + + # This (utcoffset, dstoffset) information has been removed from the + # zone. Add it back. This might occur when the database maintainers have + # corrected incorrect information. datetime instances using this + # incorrect information will continue to do so, exactly as they were + # before being pickled. This is purely an overly paranoid safety net - I + # doubt this will ever been needed in real life. + inf = (utcoffset, dstoffset, tzname) + tz._tzinfos[inf] = tz.__class__(inf, tz._tzinfos) + return tz._tzinfos[inf] + diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Abidjan b/awx/lib/site-packages/pytz/zoneinfo/Africa/Abidjan new file mode 100644 index 0000000000..65d19ec265 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Abidjan differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Accra b/awx/lib/site-packages/pytz/zoneinfo/Africa/Accra new file mode 100644 index 0000000000..8c473eda0b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Accra differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Addis_Ababa b/awx/lib/site-packages/pytz/zoneinfo/Africa/Addis_Ababa new file mode 100644 index 0000000000..5a95ab6662 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Addis_Ababa differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Algiers b/awx/lib/site-packages/pytz/zoneinfo/Africa/Algiers new file mode 100644 index 0000000000..c888831171 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Algiers differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Asmara b/awx/lib/site-packages/pytz/zoneinfo/Africa/Asmara new file mode 100644 index 0000000000..d1e876e2ff Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Asmara differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Asmera b/awx/lib/site-packages/pytz/zoneinfo/Africa/Asmera new file mode 100644 index 0000000000..d1e876e2ff Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Asmera differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Bamako b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bamako new file mode 100644 index 0000000000..da18d71377 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bamako differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Bangui b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bangui new file mode 100644 index 0000000000..883e597eb0 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bangui differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Banjul b/awx/lib/site-packages/pytz/zoneinfo/Africa/Banjul new file mode 100644 index 0000000000..a85a7d8726 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Banjul differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Bissau b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bissau new file mode 100644 index 0000000000..ab4a195a88 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bissau differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Blantyre b/awx/lib/site-packages/pytz/zoneinfo/Africa/Blantyre new file mode 100644 index 0000000000..2972580dad Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Blantyre differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Brazzaville b/awx/lib/site-packages/pytz/zoneinfo/Africa/Brazzaville new file mode 100644 index 0000000000..abb0c08700 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Brazzaville differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Bujumbura b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bujumbura new file mode 100644 index 0000000000..cac5652457 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bujumbura differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Cairo b/awx/lib/site-packages/pytz/zoneinfo/Africa/Cairo new file mode 100644 index 0000000000..1c6a2fc8fa Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Cairo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Casablanca b/awx/lib/site-packages/pytz/zoneinfo/Africa/Casablanca new file mode 100644 index 0000000000..13b90fa594 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Casablanca differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Ceuta b/awx/lib/site-packages/pytz/zoneinfo/Africa/Ceuta new file mode 100644 index 0000000000..c9b0c08bc8 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Ceuta differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Conakry b/awx/lib/site-packages/pytz/zoneinfo/Africa/Conakry new file mode 100644 index 0000000000..75b8523fb2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Conakry differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Dakar b/awx/lib/site-packages/pytz/zoneinfo/Africa/Dakar new file mode 100644 index 0000000000..31104133c2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Dakar differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Dar_es_Salaam b/awx/lib/site-packages/pytz/zoneinfo/Africa/Dar_es_Salaam new file mode 100644 index 0000000000..720d76c830 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Dar_es_Salaam differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Djibouti b/awx/lib/site-packages/pytz/zoneinfo/Africa/Djibouti new file mode 100644 index 0000000000..297d93a3e4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Djibouti differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Douala b/awx/lib/site-packages/pytz/zoneinfo/Africa/Douala new file mode 100644 index 0000000000..8627f2e05e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Douala differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/El_Aaiun b/awx/lib/site-packages/pytz/zoneinfo/Africa/El_Aaiun new file mode 100644 index 0000000000..3275161b17 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/El_Aaiun differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Freetown b/awx/lib/site-packages/pytz/zoneinfo/Africa/Freetown new file mode 100644 index 0000000000..720b8e3c90 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Freetown differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Gaborone b/awx/lib/site-packages/pytz/zoneinfo/Africa/Gaborone new file mode 100644 index 0000000000..e2abcb6643 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Gaborone differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Harare b/awx/lib/site-packages/pytz/zoneinfo/Africa/Harare new file mode 100644 index 0000000000..258b393637 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Harare differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Johannesburg b/awx/lib/site-packages/pytz/zoneinfo/Africa/Johannesburg new file mode 100644 index 0000000000..d1bec73815 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Johannesburg differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Juba b/awx/lib/site-packages/pytz/zoneinfo/Africa/Juba new file mode 100644 index 0000000000..20284ff94b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Juba differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Kampala b/awx/lib/site-packages/pytz/zoneinfo/Africa/Kampala new file mode 100644 index 0000000000..b018ba26f2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Kampala differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Khartoum b/awx/lib/site-packages/pytz/zoneinfo/Africa/Khartoum new file mode 100644 index 0000000000..6f62fd764c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Khartoum differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Kigali b/awx/lib/site-packages/pytz/zoneinfo/Africa/Kigali new file mode 100644 index 0000000000..c9623c56e8 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Kigali differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Kinshasa b/awx/lib/site-packages/pytz/zoneinfo/Africa/Kinshasa new file mode 100644 index 0000000000..e8481f3476 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Kinshasa differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Lagos b/awx/lib/site-packages/pytz/zoneinfo/Africa/Lagos new file mode 100644 index 0000000000..cbdc0450fc Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Lagos differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Libreville b/awx/lib/site-packages/pytz/zoneinfo/Africa/Libreville new file mode 100644 index 0000000000..d7691ae56f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Libreville differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Lome b/awx/lib/site-packages/pytz/zoneinfo/Africa/Lome new file mode 100644 index 0000000000..297ec5dae3 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Lome differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Luanda b/awx/lib/site-packages/pytz/zoneinfo/Africa/Luanda new file mode 100644 index 0000000000..576b2043cf Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Luanda differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Lubumbashi b/awx/lib/site-packages/pytz/zoneinfo/Africa/Lubumbashi new file mode 100644 index 0000000000..d3fab52a6c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Lubumbashi differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Lusaka b/awx/lib/site-packages/pytz/zoneinfo/Africa/Lusaka new file mode 100644 index 0000000000..87d7a95fc7 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Lusaka differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Malabo b/awx/lib/site-packages/pytz/zoneinfo/Africa/Malabo new file mode 100644 index 0000000000..c70de1f99d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Malabo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Maputo b/awx/lib/site-packages/pytz/zoneinfo/Africa/Maputo new file mode 100644 index 0000000000..31cfad771a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Maputo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Maseru b/awx/lib/site-packages/pytz/zoneinfo/Africa/Maseru new file mode 100644 index 0000000000..117006eead Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Maseru differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Mbabane b/awx/lib/site-packages/pytz/zoneinfo/Africa/Mbabane new file mode 100644 index 0000000000..be6ed60baa Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Mbabane differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Mogadishu b/awx/lib/site-packages/pytz/zoneinfo/Africa/Mogadishu new file mode 100644 index 0000000000..bd08463429 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Mogadishu differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Monrovia b/awx/lib/site-packages/pytz/zoneinfo/Africa/Monrovia new file mode 100644 index 0000000000..bd2fa4e631 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Monrovia differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Nairobi b/awx/lib/site-packages/pytz/zoneinfo/Africa/Nairobi new file mode 100644 index 0000000000..72676bb987 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Nairobi differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Ndjamena b/awx/lib/site-packages/pytz/zoneinfo/Africa/Ndjamena new file mode 100644 index 0000000000..8779590e04 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Ndjamena differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Niamey b/awx/lib/site-packages/pytz/zoneinfo/Africa/Niamey new file mode 100644 index 0000000000..799381c310 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Niamey differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Nouakchott b/awx/lib/site-packages/pytz/zoneinfo/Africa/Nouakchott new file mode 100644 index 0000000000..ead817afcd Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Nouakchott differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Ouagadougou b/awx/lib/site-packages/pytz/zoneinfo/Africa/Ouagadougou new file mode 100644 index 0000000000..df782a489c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Ouagadougou differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Porto-Novo b/awx/lib/site-packages/pytz/zoneinfo/Africa/Porto-Novo new file mode 100644 index 0000000000..600a30d82a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Porto-Novo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Sao_Tome b/awx/lib/site-packages/pytz/zoneinfo/Africa/Sao_Tome new file mode 100644 index 0000000000..ddf7fb42e4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Sao_Tome differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Timbuktu b/awx/lib/site-packages/pytz/zoneinfo/Africa/Timbuktu new file mode 100644 index 0000000000..da18d71377 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Timbuktu differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Tripoli b/awx/lib/site-packages/pytz/zoneinfo/Africa/Tripoli new file mode 100644 index 0000000000..479f5b4575 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Tripoli differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Tunis b/awx/lib/site-packages/pytz/zoneinfo/Africa/Tunis new file mode 100644 index 0000000000..dd559ee763 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Tunis differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Windhoek b/awx/lib/site-packages/pytz/zoneinfo/Africa/Windhoek new file mode 100644 index 0000000000..6f22b0a7df Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Windhoek differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Adak b/awx/lib/site-packages/pytz/zoneinfo/America/Adak new file mode 100644 index 0000000000..391ec98ec0 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Adak differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Anchorage b/awx/lib/site-packages/pytz/zoneinfo/America/Anchorage new file mode 100644 index 0000000000..d14735026a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Anchorage differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Anguilla b/awx/lib/site-packages/pytz/zoneinfo/America/Anguilla new file mode 100644 index 0000000000..20bc9464b8 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Anguilla differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Antigua b/awx/lib/site-packages/pytz/zoneinfo/America/Antigua new file mode 100644 index 0000000000..608b635977 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Antigua differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Araguaina b/awx/lib/site-packages/pytz/zoneinfo/America/Araguaina new file mode 100644 index 0000000000..e4ea527f98 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Araguaina differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Buenos_Aires b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Buenos_Aires new file mode 100644 index 0000000000..5a52a51fc8 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Buenos_Aires differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Catamarca b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Catamarca new file mode 100644 index 0000000000..b9c987bb56 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Catamarca differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/ComodRivadavia b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/ComodRivadavia new file mode 100644 index 0000000000..b9c987bb56 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/ComodRivadavia differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Cordoba b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Cordoba new file mode 100644 index 0000000000..a703e957d5 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Cordoba differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Jujuy b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Jujuy new file mode 100644 index 0000000000..86800f0344 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Jujuy differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/La_Rioja b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/La_Rioja new file mode 100644 index 0000000000..333819a15f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/La_Rioja differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Mendoza b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Mendoza new file mode 100644 index 0000000000..76afd5909e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Mendoza differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Rio_Gallegos b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Rio_Gallegos new file mode 100644 index 0000000000..65d0230a2d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Rio_Gallegos differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Salta b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Salta new file mode 100644 index 0000000000..963917a01a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Salta differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/San_Juan b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/San_Juan new file mode 100644 index 0000000000..fe7007b85c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/San_Juan differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/San_Luis b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/San_Luis new file mode 100644 index 0000000000..fa30a68397 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/San_Luis differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Tucuman b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Tucuman new file mode 100644 index 0000000000..be7bd27163 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Tucuman differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Ushuaia b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Ushuaia new file mode 100644 index 0000000000..18590effb0 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Argentina/Ushuaia differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Aruba b/awx/lib/site-packages/pytz/zoneinfo/America/Aruba new file mode 100644 index 0000000000..73bb7eaa81 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Aruba differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Asuncion b/awx/lib/site-packages/pytz/zoneinfo/America/Asuncion new file mode 100644 index 0000000000..a0c56370aa Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Asuncion differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Atikokan b/awx/lib/site-packages/pytz/zoneinfo/America/Atikokan new file mode 100644 index 0000000000..1b49e37c94 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Atikokan differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Atka b/awx/lib/site-packages/pytz/zoneinfo/America/Atka new file mode 100644 index 0000000000..391ec98ec0 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Atka differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Bahia b/awx/lib/site-packages/pytz/zoneinfo/America/Bahia new file mode 100644 index 0000000000..403d9d1060 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Bahia differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Bahia_Banderas b/awx/lib/site-packages/pytz/zoneinfo/America/Bahia_Banderas new file mode 100644 index 0000000000..cd531078d0 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Bahia_Banderas differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Barbados b/awx/lib/site-packages/pytz/zoneinfo/America/Barbados new file mode 100644 index 0000000000..7bb7ac4d6a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Barbados differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Belem b/awx/lib/site-packages/pytz/zoneinfo/America/Belem new file mode 100644 index 0000000000..9c37b6a5c4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Belem differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Belize b/awx/lib/site-packages/pytz/zoneinfo/America/Belize new file mode 100644 index 0000000000..a18cd39058 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Belize differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Blanc-Sablon b/awx/lib/site-packages/pytz/zoneinfo/America/Blanc-Sablon new file mode 100644 index 0000000000..8a33789afc Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Blanc-Sablon differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Boa_Vista b/awx/lib/site-packages/pytz/zoneinfo/America/Boa_Vista new file mode 100644 index 0000000000..cb15afbf5c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Boa_Vista differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Bogota b/awx/lib/site-packages/pytz/zoneinfo/America/Bogota new file mode 100644 index 0000000000..bddda98a2c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Bogota differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Boise b/awx/lib/site-packages/pytz/zoneinfo/America/Boise new file mode 100644 index 0000000000..441afe55c4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Boise differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Buenos_Aires b/awx/lib/site-packages/pytz/zoneinfo/America/Buenos_Aires new file mode 100644 index 0000000000..5a52a51fc8 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Buenos_Aires differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Cambridge_Bay b/awx/lib/site-packages/pytz/zoneinfo/America/Cambridge_Bay new file mode 100644 index 0000000000..99c77c5b20 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Cambridge_Bay differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Campo_Grande b/awx/lib/site-packages/pytz/zoneinfo/America/Campo_Grande new file mode 100644 index 0000000000..4684098104 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Campo_Grande differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Cancun b/awx/lib/site-packages/pytz/zoneinfo/America/Cancun new file mode 100644 index 0000000000..90993faa70 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Cancun differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Caracas b/awx/lib/site-packages/pytz/zoneinfo/America/Caracas new file mode 100644 index 0000000000..d96a5c00bf Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Caracas differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Catamarca b/awx/lib/site-packages/pytz/zoneinfo/America/Catamarca new file mode 100644 index 0000000000..b9c987bb56 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Catamarca differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Cayenne b/awx/lib/site-packages/pytz/zoneinfo/America/Cayenne new file mode 100644 index 0000000000..7109a98ec5 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Cayenne differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Cayman b/awx/lib/site-packages/pytz/zoneinfo/America/Cayman new file mode 100644 index 0000000000..a4095d3863 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Cayman differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Chicago b/awx/lib/site-packages/pytz/zoneinfo/America/Chicago new file mode 100644 index 0000000000..71aae7246a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Chicago differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Chihuahua b/awx/lib/site-packages/pytz/zoneinfo/America/Chihuahua new file mode 100644 index 0000000000..b2687241cd Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Chihuahua differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Coral_Harbour b/awx/lib/site-packages/pytz/zoneinfo/America/Coral_Harbour new file mode 100644 index 0000000000..1b49e37c94 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Coral_Harbour differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Cordoba b/awx/lib/site-packages/pytz/zoneinfo/America/Cordoba new file mode 100644 index 0000000000..a703e957d5 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Cordoba differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Costa_Rica b/awx/lib/site-packages/pytz/zoneinfo/America/Costa_Rica new file mode 100644 index 0000000000..018d945b11 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Costa_Rica differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Creston b/awx/lib/site-packages/pytz/zoneinfo/America/Creston new file mode 100644 index 0000000000..1cf719ae83 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Creston differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Cuiaba b/awx/lib/site-packages/pytz/zoneinfo/America/Cuiaba new file mode 100644 index 0000000000..232ef670ff Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Cuiaba differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Curacao b/awx/lib/site-packages/pytz/zoneinfo/America/Curacao new file mode 100644 index 0000000000..2d01c18860 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Curacao differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Danmarkshavn b/awx/lib/site-packages/pytz/zoneinfo/America/Danmarkshavn new file mode 100644 index 0000000000..9feacfb1c9 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Danmarkshavn differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Dawson b/awx/lib/site-packages/pytz/zoneinfo/America/Dawson new file mode 100644 index 0000000000..fab0609998 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Dawson differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Dawson_Creek b/awx/lib/site-packages/pytz/zoneinfo/America/Dawson_Creek new file mode 100644 index 0000000000..c3fb166b08 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Dawson_Creek differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Denver b/awx/lib/site-packages/pytz/zoneinfo/America/Denver new file mode 100644 index 0000000000..f8908febf2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Denver differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Detroit b/awx/lib/site-packages/pytz/zoneinfo/America/Detroit new file mode 100644 index 0000000000..da53d46df3 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Detroit differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Dominica b/awx/lib/site-packages/pytz/zoneinfo/America/Dominica new file mode 100644 index 0000000000..7783831b49 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Dominica differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Edmonton b/awx/lib/site-packages/pytz/zoneinfo/America/Edmonton new file mode 100644 index 0000000000..3fa0579891 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Edmonton differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Eirunepe b/awx/lib/site-packages/pytz/zoneinfo/America/Eirunepe new file mode 100644 index 0000000000..4e586a341f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Eirunepe differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/El_Salvador b/awx/lib/site-packages/pytz/zoneinfo/America/El_Salvador new file mode 100644 index 0000000000..ac774e83f4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/El_Salvador differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Ensenada b/awx/lib/site-packages/pytz/zoneinfo/America/Ensenada new file mode 100644 index 0000000000..fffdc24bfc Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Ensenada differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Fort_Wayne b/awx/lib/site-packages/pytz/zoneinfo/America/Fort_Wayne new file mode 100644 index 0000000000..aa3dfc4373 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Fort_Wayne differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Fortaleza b/awx/lib/site-packages/pytz/zoneinfo/America/Fortaleza new file mode 100644 index 0000000000..2598c53559 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Fortaleza differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Glace_Bay b/awx/lib/site-packages/pytz/zoneinfo/America/Glace_Bay new file mode 100644 index 0000000000..48412a4cbf Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Glace_Bay differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Godthab b/awx/lib/site-packages/pytz/zoneinfo/America/Godthab new file mode 100644 index 0000000000..85623ce7a7 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Godthab differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Goose_Bay b/awx/lib/site-packages/pytz/zoneinfo/America/Goose_Bay new file mode 100644 index 0000000000..83e5a9b398 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Goose_Bay differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Grand_Turk b/awx/lib/site-packages/pytz/zoneinfo/America/Grand_Turk new file mode 100644 index 0000000000..733c17984b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Grand_Turk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Grenada b/awx/lib/site-packages/pytz/zoneinfo/America/Grenada new file mode 100644 index 0000000000..df1b6895f9 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Grenada differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Guadeloupe b/awx/lib/site-packages/pytz/zoneinfo/America/Guadeloupe new file mode 100644 index 0000000000..15c0f1f745 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Guadeloupe differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Guatemala b/awx/lib/site-packages/pytz/zoneinfo/America/Guatemala new file mode 100644 index 0000000000..6118b5ce2d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Guatemala differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Guayaquil b/awx/lib/site-packages/pytz/zoneinfo/America/Guayaquil new file mode 100644 index 0000000000..e6de7f8da2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Guayaquil differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Guyana b/awx/lib/site-packages/pytz/zoneinfo/America/Guyana new file mode 100644 index 0000000000..5f98c4a0e8 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Guyana differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Halifax b/awx/lib/site-packages/pytz/zoneinfo/America/Halifax new file mode 100644 index 0000000000..756099abe6 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Halifax differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Havana b/awx/lib/site-packages/pytz/zoneinfo/America/Havana new file mode 100644 index 0000000000..28aca00028 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Havana differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Hermosillo b/awx/lib/site-packages/pytz/zoneinfo/America/Hermosillo new file mode 100644 index 0000000000..26c269d967 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Hermosillo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Indianapolis b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Indianapolis new file mode 100644 index 0000000000..aa3dfc4373 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Indianapolis differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Knox b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Knox new file mode 100644 index 0000000000..33169f4596 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Knox differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Marengo b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Marengo new file mode 100644 index 0000000000..255b739718 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Marengo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Petersburg b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Petersburg new file mode 100644 index 0000000000..c611106d57 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Petersburg differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Tell_City b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Tell_City new file mode 100644 index 0000000000..97e319e343 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Tell_City differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Vevay b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Vevay new file mode 100644 index 0000000000..de6167c082 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Vevay differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Vincennes b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Vincennes new file mode 100644 index 0000000000..b79f6725b6 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Vincennes differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Winamac b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Winamac new file mode 100644 index 0000000000..b2611e75ee Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Indiana/Winamac differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Indianapolis b/awx/lib/site-packages/pytz/zoneinfo/America/Indianapolis new file mode 100644 index 0000000000..aa3dfc4373 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Indianapolis differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Inuvik b/awx/lib/site-packages/pytz/zoneinfo/America/Inuvik new file mode 100644 index 0000000000..c17af37f5f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Inuvik differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Iqaluit b/awx/lib/site-packages/pytz/zoneinfo/America/Iqaluit new file mode 100644 index 0000000000..cea5c2e0f3 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Iqaluit differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Jamaica b/awx/lib/site-packages/pytz/zoneinfo/America/Jamaica new file mode 100644 index 0000000000..09e3eb9399 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Jamaica differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Jujuy b/awx/lib/site-packages/pytz/zoneinfo/America/Jujuy new file mode 100644 index 0000000000..86800f0344 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Jujuy differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Juneau b/awx/lib/site-packages/pytz/zoneinfo/America/Juneau new file mode 100644 index 0000000000..48bd37e88e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Juneau differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Kentucky/Louisville b/awx/lib/site-packages/pytz/zoneinfo/America/Kentucky/Louisville new file mode 100644 index 0000000000..65e7e19049 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Kentucky/Louisville differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Kentucky/Monticello b/awx/lib/site-packages/pytz/zoneinfo/America/Kentucky/Monticello new file mode 100644 index 0000000000..fc2f1b0df8 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Kentucky/Monticello differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Knox_IN b/awx/lib/site-packages/pytz/zoneinfo/America/Knox_IN new file mode 100644 index 0000000000..33169f4596 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Knox_IN differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Kralendijk b/awx/lib/site-packages/pytz/zoneinfo/America/Kralendijk new file mode 100644 index 0000000000..2d01c18860 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Kralendijk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/La_Paz b/awx/lib/site-packages/pytz/zoneinfo/America/La_Paz new file mode 100644 index 0000000000..2a5a15e4c4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/La_Paz differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Lima b/awx/lib/site-packages/pytz/zoneinfo/America/Lima new file mode 100644 index 0000000000..a37eeff7de Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Lima differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Los_Angeles b/awx/lib/site-packages/pytz/zoneinfo/America/Los_Angeles new file mode 100644 index 0000000000..3b7ce1dcee Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Los_Angeles differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Louisville b/awx/lib/site-packages/pytz/zoneinfo/America/Louisville new file mode 100644 index 0000000000..65e7e19049 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Louisville differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Lower_Princes b/awx/lib/site-packages/pytz/zoneinfo/America/Lower_Princes new file mode 100644 index 0000000000..2d01c18860 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Lower_Princes differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Maceio b/awx/lib/site-packages/pytz/zoneinfo/America/Maceio new file mode 100644 index 0000000000..b5201e81d4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Maceio differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Managua b/awx/lib/site-packages/pytz/zoneinfo/America/Managua new file mode 100644 index 0000000000..f1c35040ce Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Managua differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Manaus b/awx/lib/site-packages/pytz/zoneinfo/America/Manaus new file mode 100644 index 0000000000..1129211841 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Manaus differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Marigot b/awx/lib/site-packages/pytz/zoneinfo/America/Marigot new file mode 100644 index 0000000000..15c0f1f745 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Marigot differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Martinique b/awx/lib/site-packages/pytz/zoneinfo/America/Martinique new file mode 100644 index 0000000000..c223ef5c20 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Martinique differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Matamoros b/awx/lib/site-packages/pytz/zoneinfo/America/Matamoros new file mode 100644 index 0000000000..5c59984def Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Matamoros differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Mazatlan b/awx/lib/site-packages/pytz/zoneinfo/America/Mazatlan new file mode 100644 index 0000000000..43ee12d84a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Mazatlan differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Mendoza b/awx/lib/site-packages/pytz/zoneinfo/America/Mendoza new file mode 100644 index 0000000000..76afd5909e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Mendoza differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Menominee b/awx/lib/site-packages/pytz/zoneinfo/America/Menominee new file mode 100644 index 0000000000..438f5ff0b8 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Menominee differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Merida b/awx/lib/site-packages/pytz/zoneinfo/America/Merida new file mode 100644 index 0000000000..b46298e1f2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Merida differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Metlakatla b/awx/lib/site-packages/pytz/zoneinfo/America/Metlakatla new file mode 100644 index 0000000000..4145b9a581 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Metlakatla differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Mexico_City b/awx/lib/site-packages/pytz/zoneinfo/America/Mexico_City new file mode 100644 index 0000000000..1434ab0880 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Mexico_City differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Miquelon b/awx/lib/site-packages/pytz/zoneinfo/America/Miquelon new file mode 100644 index 0000000000..52cd391ebb Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Miquelon differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Moncton b/awx/lib/site-packages/pytz/zoneinfo/America/Moncton new file mode 100644 index 0000000000..b51125ebf1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Moncton differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Monterrey b/awx/lib/site-packages/pytz/zoneinfo/America/Monterrey new file mode 100644 index 0000000000..7dc5057774 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Monterrey differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Montevideo b/awx/lib/site-packages/pytz/zoneinfo/America/Montevideo new file mode 100644 index 0000000000..4745f0dfb2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Montevideo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Montreal b/awx/lib/site-packages/pytz/zoneinfo/America/Montreal new file mode 100644 index 0000000000..47633bd49f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Montreal differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Montserrat b/awx/lib/site-packages/pytz/zoneinfo/America/Montserrat new file mode 100644 index 0000000000..ee5043a8e3 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Montserrat differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Nassau b/awx/lib/site-packages/pytz/zoneinfo/America/Nassau new file mode 100644 index 0000000000..5091eb5d8d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Nassau differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/New_York b/awx/lib/site-packages/pytz/zoneinfo/America/New_York new file mode 100644 index 0000000000..b2c2377f4e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/New_York differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Nipigon b/awx/lib/site-packages/pytz/zoneinfo/America/Nipigon new file mode 100644 index 0000000000..619f1f7590 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Nipigon differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Nome b/awx/lib/site-packages/pytz/zoneinfo/America/Nome new file mode 100644 index 0000000000..b682bfd9cd Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Nome differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Noronha b/awx/lib/site-packages/pytz/zoneinfo/America/Noronha new file mode 100644 index 0000000000..c60239009e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Noronha differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/North_Dakota/Beulah b/awx/lib/site-packages/pytz/zoneinfo/America/North_Dakota/Beulah new file mode 100644 index 0000000000..c1e3b025b9 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/North_Dakota/Beulah differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/North_Dakota/Center b/awx/lib/site-packages/pytz/zoneinfo/America/North_Dakota/Center new file mode 100644 index 0000000000..786ba1778a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/North_Dakota/Center differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/North_Dakota/New_Salem b/awx/lib/site-packages/pytz/zoneinfo/America/North_Dakota/New_Salem new file mode 100644 index 0000000000..3488e466d5 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/North_Dakota/New_Salem differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Ojinaga b/awx/lib/site-packages/pytz/zoneinfo/America/Ojinaga new file mode 100644 index 0000000000..37d78301bd Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Ojinaga differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Panama b/awx/lib/site-packages/pytz/zoneinfo/America/Panama new file mode 100644 index 0000000000..3a4ff2aecf Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Panama differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Pangnirtung b/awx/lib/site-packages/pytz/zoneinfo/America/Pangnirtung new file mode 100644 index 0000000000..80a6009381 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Pangnirtung differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Paramaribo b/awx/lib/site-packages/pytz/zoneinfo/America/Paramaribo new file mode 100644 index 0000000000..6f889ccaf1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Paramaribo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Phoenix b/awx/lib/site-packages/pytz/zoneinfo/America/Phoenix new file mode 100644 index 0000000000..67589026c2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Phoenix differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Port-au-Prince b/awx/lib/site-packages/pytz/zoneinfo/America/Port-au-Prince new file mode 100644 index 0000000000..3f86e8ab75 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Port-au-Prince differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Port_of_Spain b/awx/lib/site-packages/pytz/zoneinfo/America/Port_of_Spain new file mode 100644 index 0000000000..bdedd1bd9b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Port_of_Spain differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Porto_Acre b/awx/lib/site-packages/pytz/zoneinfo/America/Porto_Acre new file mode 100644 index 0000000000..7be212b1e6 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Porto_Acre differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Porto_Velho b/awx/lib/site-packages/pytz/zoneinfo/America/Porto_Velho new file mode 100644 index 0000000000..1277479232 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Porto_Velho differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Puerto_Rico b/awx/lib/site-packages/pytz/zoneinfo/America/Puerto_Rico new file mode 100644 index 0000000000..eada37a111 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Puerto_Rico differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Rainy_River b/awx/lib/site-packages/pytz/zoneinfo/America/Rainy_River new file mode 100644 index 0000000000..e006a30dbf Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Rainy_River differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Rankin_Inlet b/awx/lib/site-packages/pytz/zoneinfo/America/Rankin_Inlet new file mode 100644 index 0000000000..99195714c4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Rankin_Inlet differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Recife b/awx/lib/site-packages/pytz/zoneinfo/America/Recife new file mode 100644 index 0000000000..0903a77195 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Recife differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Regina b/awx/lib/site-packages/pytz/zoneinfo/America/Regina new file mode 100644 index 0000000000..20c9c84df4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Regina differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Resolute b/awx/lib/site-packages/pytz/zoneinfo/America/Resolute new file mode 100644 index 0000000000..7713f5bead Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Resolute differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Rio_Branco b/awx/lib/site-packages/pytz/zoneinfo/America/Rio_Branco new file mode 100644 index 0000000000..7be212b1e6 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Rio_Branco differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Rosario b/awx/lib/site-packages/pytz/zoneinfo/America/Rosario new file mode 100644 index 0000000000..a703e957d5 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Rosario differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Santa_Isabel b/awx/lib/site-packages/pytz/zoneinfo/America/Santa_Isabel new file mode 100644 index 0000000000..80a2f2d5b1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Santa_Isabel differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Santarem b/awx/lib/site-packages/pytz/zoneinfo/America/Santarem new file mode 100644 index 0000000000..45419528f9 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Santarem differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Santiago b/awx/lib/site-packages/pytz/zoneinfo/America/Santiago new file mode 100644 index 0000000000..910cdc923e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Santiago differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Santo_Domingo b/awx/lib/site-packages/pytz/zoneinfo/America/Santo_Domingo new file mode 100644 index 0000000000..23ace9adc3 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Santo_Domingo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Sao_Paulo b/awx/lib/site-packages/pytz/zoneinfo/America/Sao_Paulo new file mode 100644 index 0000000000..8df63a17bd Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Sao_Paulo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Scoresbysund b/awx/lib/site-packages/pytz/zoneinfo/America/Scoresbysund new file mode 100644 index 0000000000..fae3757ce9 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Scoresbysund differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Shiprock b/awx/lib/site-packages/pytz/zoneinfo/America/Shiprock new file mode 100644 index 0000000000..f8908febf2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Shiprock differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Sitka b/awx/lib/site-packages/pytz/zoneinfo/America/Sitka new file mode 100644 index 0000000000..f2ae47a323 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Sitka differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/St_Barthelemy b/awx/lib/site-packages/pytz/zoneinfo/America/St_Barthelemy new file mode 100644 index 0000000000..15c0f1f745 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/St_Barthelemy differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/St_Johns b/awx/lib/site-packages/pytz/zoneinfo/America/St_Johns new file mode 100644 index 0000000000..e7a18d601d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/St_Johns differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/St_Kitts b/awx/lib/site-packages/pytz/zoneinfo/America/St_Kitts new file mode 100644 index 0000000000..911d2221b6 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/St_Kitts differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/St_Lucia b/awx/lib/site-packages/pytz/zoneinfo/America/St_Lucia new file mode 100644 index 0000000000..b37a1cf7e8 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/St_Lucia differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/St_Thomas b/awx/lib/site-packages/pytz/zoneinfo/America/St_Thomas new file mode 100644 index 0000000000..482f0b549f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/St_Thomas differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/St_Vincent b/awx/lib/site-packages/pytz/zoneinfo/America/St_Vincent new file mode 100644 index 0000000000..e553af77b5 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/St_Vincent differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Swift_Current b/awx/lib/site-packages/pytz/zoneinfo/America/Swift_Current new file mode 100644 index 0000000000..8e9ef255ee Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Swift_Current differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Tegucigalpa b/awx/lib/site-packages/pytz/zoneinfo/America/Tegucigalpa new file mode 100644 index 0000000000..477e93950c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Tegucigalpa differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Thule b/awx/lib/site-packages/pytz/zoneinfo/America/Thule new file mode 100644 index 0000000000..2969ebe59b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Thule differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Thunder_Bay b/awx/lib/site-packages/pytz/zoneinfo/America/Thunder_Bay new file mode 100644 index 0000000000..34f750b4df Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Thunder_Bay differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Tijuana b/awx/lib/site-packages/pytz/zoneinfo/America/Tijuana new file mode 100644 index 0000000000..fffdc24bfc Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Tijuana differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Toronto b/awx/lib/site-packages/pytz/zoneinfo/America/Toronto new file mode 100644 index 0000000000..1698477a48 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Toronto differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Tortola b/awx/lib/site-packages/pytz/zoneinfo/America/Tortola new file mode 100644 index 0000000000..6f9d932385 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Tortola differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Vancouver b/awx/lib/site-packages/pytz/zoneinfo/America/Vancouver new file mode 100644 index 0000000000..0c1fa52690 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Vancouver differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Virgin b/awx/lib/site-packages/pytz/zoneinfo/America/Virgin new file mode 100644 index 0000000000..482f0b549f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Virgin differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Whitehorse b/awx/lib/site-packages/pytz/zoneinfo/America/Whitehorse new file mode 100644 index 0000000000..15216d55ff Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Whitehorse differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Winnipeg b/awx/lib/site-packages/pytz/zoneinfo/America/Winnipeg new file mode 100644 index 0000000000..2d22791686 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Winnipeg differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Yakutat b/awx/lib/site-packages/pytz/zoneinfo/America/Yakutat new file mode 100644 index 0000000000..8071602773 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Yakutat differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Yellowknife b/awx/lib/site-packages/pytz/zoneinfo/America/Yellowknife new file mode 100644 index 0000000000..947bec914b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/America/Yellowknife differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Casey b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Casey new file mode 100644 index 0000000000..8ebf0598ff Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Casey differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Davis b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Davis new file mode 100644 index 0000000000..cd7acad690 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Davis differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Antarctica/DumontDUrville b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/DumontDUrville new file mode 100644 index 0000000000..5ea18e6e77 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/DumontDUrville differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Macquarie b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Macquarie new file mode 100644 index 0000000000..43e01c0989 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Macquarie differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Mawson b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Mawson new file mode 100644 index 0000000000..48e24e1e5e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Mawson differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Antarctica/McMurdo b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/McMurdo new file mode 100644 index 0000000000..62ac42f078 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/McMurdo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Palmer b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Palmer new file mode 100644 index 0000000000..3d6bd73e58 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Palmer differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Rothera b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Rothera new file mode 100644 index 0000000000..b5dc735639 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Rothera differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Antarctica/South_Pole b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/South_Pole new file mode 100644 index 0000000000..62ac42f078 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/South_Pole differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Syowa b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Syowa new file mode 100644 index 0000000000..ba6e5f3887 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Syowa differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Vostok b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Vostok new file mode 100644 index 0000000000..e19e2b7cdb Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Vostok differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Arctic/Longyearbyen b/awx/lib/site-packages/pytz/zoneinfo/Arctic/Longyearbyen new file mode 100644 index 0000000000..6326961453 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Arctic/Longyearbyen differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Aden b/awx/lib/site-packages/pytz/zoneinfo/Asia/Aden new file mode 100644 index 0000000000..5948b310b9 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Aden differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Almaty b/awx/lib/site-packages/pytz/zoneinfo/Asia/Almaty new file mode 100644 index 0000000000..52f941e266 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Almaty differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Amman b/awx/lib/site-packages/pytz/zoneinfo/Asia/Amman new file mode 100644 index 0000000000..3e7d0da624 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Amman differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Anadyr b/awx/lib/site-packages/pytz/zoneinfo/Asia/Anadyr new file mode 100644 index 0000000000..2841a6376d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Anadyr differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Aqtau b/awx/lib/site-packages/pytz/zoneinfo/Asia/Aqtau new file mode 100644 index 0000000000..27a3d50d37 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Aqtau differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Aqtobe b/awx/lib/site-packages/pytz/zoneinfo/Asia/Aqtobe new file mode 100644 index 0000000000..3683be2db8 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Aqtobe differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Ashgabat b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ashgabat new file mode 100644 index 0000000000..589dbc18ea Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ashgabat differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Ashkhabad b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ashkhabad new file mode 100644 index 0000000000..589dbc18ea Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ashkhabad differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Baghdad b/awx/lib/site-packages/pytz/zoneinfo/Asia/Baghdad new file mode 100644 index 0000000000..3ad361495c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Baghdad differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Bahrain b/awx/lib/site-packages/pytz/zoneinfo/Asia/Bahrain new file mode 100644 index 0000000000..d87b7ce739 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Bahrain differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Baku b/awx/lib/site-packages/pytz/zoneinfo/Asia/Baku new file mode 100644 index 0000000000..72ae96e9cc Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Baku differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Bangkok b/awx/lib/site-packages/pytz/zoneinfo/Asia/Bangkok new file mode 100644 index 0000000000..44a1018ef6 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Bangkok differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Beirut b/awx/lib/site-packages/pytz/zoneinfo/Asia/Beirut new file mode 100644 index 0000000000..c1270bc1ad Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Beirut differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Bishkek b/awx/lib/site-packages/pytz/zoneinfo/Asia/Bishkek new file mode 100644 index 0000000000..fc827d8871 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Bishkek differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Brunei b/awx/lib/site-packages/pytz/zoneinfo/Asia/Brunei new file mode 100644 index 0000000000..d6e713d436 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Brunei differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Calcutta b/awx/lib/site-packages/pytz/zoneinfo/Asia/Calcutta new file mode 100644 index 0000000000..bc909c92c1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Calcutta differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Choibalsan b/awx/lib/site-packages/pytz/zoneinfo/Asia/Choibalsan new file mode 100644 index 0000000000..043b7ed5c2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Choibalsan differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Chongqing b/awx/lib/site-packages/pytz/zoneinfo/Asia/Chongqing new file mode 100644 index 0000000000..8a7a28a480 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Chongqing differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Chungking b/awx/lib/site-packages/pytz/zoneinfo/Asia/Chungking new file mode 100644 index 0000000000..8a7a28a480 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Chungking differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Colombo b/awx/lib/site-packages/pytz/zoneinfo/Asia/Colombo new file mode 100644 index 0000000000..c71c0503d9 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Colombo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Dacca b/awx/lib/site-packages/pytz/zoneinfo/Asia/Dacca new file mode 100644 index 0000000000..52e98ffc2c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Dacca differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Damascus b/awx/lib/site-packages/pytz/zoneinfo/Asia/Damascus new file mode 100644 index 0000000000..4b610b5a08 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Damascus differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Dhaka b/awx/lib/site-packages/pytz/zoneinfo/Asia/Dhaka new file mode 100644 index 0000000000..52e98ffc2c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Dhaka differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Dili b/awx/lib/site-packages/pytz/zoneinfo/Asia/Dili new file mode 100644 index 0000000000..37bfc4b278 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Dili differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Dubai b/awx/lib/site-packages/pytz/zoneinfo/Asia/Dubai new file mode 100644 index 0000000000..53f70d57a1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Dubai differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Dushanbe b/awx/lib/site-packages/pytz/zoneinfo/Asia/Dushanbe new file mode 100644 index 0000000000..c65ff2a7b3 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Dushanbe differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Gaza b/awx/lib/site-packages/pytz/zoneinfo/Asia/Gaza new file mode 100644 index 0000000000..4582b3b2c7 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Gaza differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Harbin b/awx/lib/site-packages/pytz/zoneinfo/Asia/Harbin new file mode 100644 index 0000000000..11e352a511 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Harbin differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Hebron b/awx/lib/site-packages/pytz/zoneinfo/Asia/Hebron new file mode 100644 index 0000000000..b5bcaa0d2f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Hebron differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Ho_Chi_Minh b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ho_Chi_Minh new file mode 100644 index 0000000000..6401a10256 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ho_Chi_Minh differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Hong_Kong b/awx/lib/site-packages/pytz/zoneinfo/Asia/Hong_Kong new file mode 100644 index 0000000000..8e5c581366 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Hong_Kong differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Hovd b/awx/lib/site-packages/pytz/zoneinfo/Asia/Hovd new file mode 100644 index 0000000000..27fab05c19 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Hovd differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Irkutsk b/awx/lib/site-packages/pytz/zoneinfo/Asia/Irkutsk new file mode 100644 index 0000000000..7c38e7fd6b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Irkutsk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Istanbul b/awx/lib/site-packages/pytz/zoneinfo/Asia/Istanbul new file mode 100644 index 0000000000..864099556b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Istanbul differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Jakarta b/awx/lib/site-packages/pytz/zoneinfo/Asia/Jakarta new file mode 100644 index 0000000000..a4cbe0c576 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Jakarta differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Jayapura b/awx/lib/site-packages/pytz/zoneinfo/Asia/Jayapura new file mode 100644 index 0000000000..0e79d31788 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Jayapura differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Jerusalem b/awx/lib/site-packages/pytz/zoneinfo/Asia/Jerusalem new file mode 100644 index 0000000000..e7864171d1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Jerusalem differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Kabul b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kabul new file mode 100644 index 0000000000..7392c0497a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kabul differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Kamchatka b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kamchatka new file mode 100644 index 0000000000..090bf48895 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kamchatka differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Karachi b/awx/lib/site-packages/pytz/zoneinfo/Asia/Karachi new file mode 100644 index 0000000000..a8ff8cb769 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Karachi differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Kashgar b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kashgar new file mode 100644 index 0000000000..25e64945cf Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kashgar differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Kathmandu b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kathmandu new file mode 100644 index 0000000000..65c7b63258 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kathmandu differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Katmandu b/awx/lib/site-packages/pytz/zoneinfo/Asia/Katmandu new file mode 100644 index 0000000000..65c7b63258 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Katmandu differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Khandyga b/awx/lib/site-packages/pytz/zoneinfo/Asia/Khandyga new file mode 100644 index 0000000000..39d2c2df80 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Khandyga differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Kolkata b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kolkata new file mode 100644 index 0000000000..bc909c92c1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kolkata differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Krasnoyarsk b/awx/lib/site-packages/pytz/zoneinfo/Asia/Krasnoyarsk new file mode 100644 index 0000000000..580e8dd2f4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Krasnoyarsk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Kuala_Lumpur b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kuala_Lumpur new file mode 100644 index 0000000000..41bba37b0c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kuala_Lumpur differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Kuching b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kuching new file mode 100644 index 0000000000..272f46546e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kuching differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Kuwait b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kuwait new file mode 100644 index 0000000000..1dab31cba0 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kuwait differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Macao b/awx/lib/site-packages/pytz/zoneinfo/Asia/Macao new file mode 100644 index 0000000000..7c93779578 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Macao differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Macau b/awx/lib/site-packages/pytz/zoneinfo/Asia/Macau new file mode 100644 index 0000000000..7c93779578 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Macau differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Magadan b/awx/lib/site-packages/pytz/zoneinfo/Asia/Magadan new file mode 100644 index 0000000000..e3c76b57f5 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Magadan differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Makassar b/awx/lib/site-packages/pytz/zoneinfo/Asia/Makassar new file mode 100644 index 0000000000..f35823303b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Makassar differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Manila b/awx/lib/site-packages/pytz/zoneinfo/Asia/Manila new file mode 100644 index 0000000000..0e90ba6326 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Manila differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Muscat b/awx/lib/site-packages/pytz/zoneinfo/Asia/Muscat new file mode 100644 index 0000000000..daee4cc82a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Muscat differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Nicosia b/awx/lib/site-packages/pytz/zoneinfo/Asia/Nicosia new file mode 100644 index 0000000000..f7f10ab766 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Nicosia differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Novokuznetsk b/awx/lib/site-packages/pytz/zoneinfo/Asia/Novokuznetsk new file mode 100644 index 0000000000..f78c1f88bf Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Novokuznetsk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Novosibirsk b/awx/lib/site-packages/pytz/zoneinfo/Asia/Novosibirsk new file mode 100644 index 0000000000..c401a9817d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Novosibirsk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Omsk b/awx/lib/site-packages/pytz/zoneinfo/Asia/Omsk new file mode 100644 index 0000000000..a3dbb4bb1c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Omsk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Oral b/awx/lib/site-packages/pytz/zoneinfo/Asia/Oral new file mode 100644 index 0000000000..8da2a1dee7 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Oral differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Phnom_Penh b/awx/lib/site-packages/pytz/zoneinfo/Asia/Phnom_Penh new file mode 100644 index 0000000000..5a52722a16 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Phnom_Penh differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Pontianak b/awx/lib/site-packages/pytz/zoneinfo/Asia/Pontianak new file mode 100644 index 0000000000..3e882dc357 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Pontianak differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Pyongyang b/awx/lib/site-packages/pytz/zoneinfo/Asia/Pyongyang new file mode 100644 index 0000000000..9dbd3c1ae0 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Pyongyang differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Qatar b/awx/lib/site-packages/pytz/zoneinfo/Asia/Qatar new file mode 100644 index 0000000000..49668c2583 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Qatar differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Qyzylorda b/awx/lib/site-packages/pytz/zoneinfo/Asia/Qyzylorda new file mode 100644 index 0000000000..fc3bf46bcc Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Qyzylorda differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Rangoon b/awx/lib/site-packages/pytz/zoneinfo/Asia/Rangoon new file mode 100644 index 0000000000..68591c56e7 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Rangoon differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh b/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh new file mode 100644 index 0000000000..6ebe393d0b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh87 b/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh87 new file mode 100644 index 0000000000..ebe16c4995 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh87 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh88 b/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh88 new file mode 100644 index 0000000000..7f2224bbb2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh88 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh89 b/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh89 new file mode 100644 index 0000000000..a50ca48a91 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh89 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Saigon b/awx/lib/site-packages/pytz/zoneinfo/Asia/Saigon new file mode 100644 index 0000000000..6401a10256 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Saigon differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Sakhalin b/awx/lib/site-packages/pytz/zoneinfo/Asia/Sakhalin new file mode 100644 index 0000000000..f5105a363c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Sakhalin differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Samarkand b/awx/lib/site-packages/pytz/zoneinfo/Asia/Samarkand new file mode 100644 index 0000000000..191c07c127 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Samarkand differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Seoul b/awx/lib/site-packages/pytz/zoneinfo/Asia/Seoul new file mode 100644 index 0000000000..96bb0c36d7 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Seoul differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Shanghai b/awx/lib/site-packages/pytz/zoneinfo/Asia/Shanghai new file mode 100644 index 0000000000..af6d6fbbd2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Shanghai differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Singapore b/awx/lib/site-packages/pytz/zoneinfo/Asia/Singapore new file mode 100644 index 0000000000..a6f2db8f3a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Singapore differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Taipei b/awx/lib/site-packages/pytz/zoneinfo/Asia/Taipei new file mode 100644 index 0000000000..70cfb27ca9 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Taipei differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Tashkent b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tashkent new file mode 100644 index 0000000000..5bc806238a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tashkent differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Tbilisi b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tbilisi new file mode 100644 index 0000000000..d7e40548ac Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tbilisi differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Tehran b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tehran new file mode 100644 index 0000000000..16149ed6bf Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tehran differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Tel_Aviv b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tel_Aviv new file mode 100644 index 0000000000..e7864171d1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tel_Aviv differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Thimbu b/awx/lib/site-packages/pytz/zoneinfo/Asia/Thimbu new file mode 100644 index 0000000000..90294aea20 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Thimbu differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Thimphu b/awx/lib/site-packages/pytz/zoneinfo/Asia/Thimphu new file mode 100644 index 0000000000..90294aea20 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Thimphu differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Tokyo b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tokyo new file mode 100644 index 0000000000..058c1e99ba Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tokyo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Ujung_Pandang b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ujung_Pandang new file mode 100644 index 0000000000..f35823303b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ujung_Pandang differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Ulaanbaatar b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ulaanbaatar new file mode 100644 index 0000000000..39bdd89416 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ulaanbaatar differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Ulan_Bator b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ulan_Bator new file mode 100644 index 0000000000..39bdd89416 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ulan_Bator differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Urumqi b/awx/lib/site-packages/pytz/zoneinfo/Asia/Urumqi new file mode 100644 index 0000000000..f46ff380a0 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Urumqi differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Ust-Nera b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ust-Nera new file mode 100644 index 0000000000..ae89650f10 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ust-Nera differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Vientiane b/awx/lib/site-packages/pytz/zoneinfo/Asia/Vientiane new file mode 100644 index 0000000000..7d39589f19 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Vientiane differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Vladivostok b/awx/lib/site-packages/pytz/zoneinfo/Asia/Vladivostok new file mode 100644 index 0000000000..1cae6d0fd9 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Vladivostok differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Yakutsk b/awx/lib/site-packages/pytz/zoneinfo/Asia/Yakutsk new file mode 100644 index 0000000000..461901f6f8 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Yakutsk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Yekaterinburg b/awx/lib/site-packages/pytz/zoneinfo/Asia/Yekaterinburg new file mode 100644 index 0000000000..aca50c6e1d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Yekaterinburg differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Yerevan b/awx/lib/site-packages/pytz/zoneinfo/Asia/Yerevan new file mode 100644 index 0000000000..c4ab2197f8 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Yerevan differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Azores b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Azores new file mode 100644 index 0000000000..19e4004067 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Azores differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Bermuda b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Bermuda new file mode 100644 index 0000000000..3a5c6dbf7a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Bermuda differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Canary b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Canary new file mode 100644 index 0000000000..972388be71 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Canary differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Cape_Verde b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Cape_Verde new file mode 100644 index 0000000000..5238ac8a67 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Cape_Verde differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Faeroe b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Faeroe new file mode 100644 index 0000000000..4dab7ef085 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Faeroe differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Faroe b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Faroe new file mode 100644 index 0000000000..4dab7ef085 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Faroe differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Jan_Mayen b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Jan_Mayen new file mode 100644 index 0000000000..6326961453 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Jan_Mayen differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Madeira b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Madeira new file mode 100644 index 0000000000..2175096cf1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Madeira differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Reykjavik b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Reykjavik new file mode 100644 index 0000000000..e97f13a652 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Reykjavik differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/South_Georgia b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/South_Georgia new file mode 100644 index 0000000000..ab2c8236b0 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/South_Georgia differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/St_Helena b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/St_Helena new file mode 100644 index 0000000000..d365e3ddf3 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/St_Helena differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Stanley b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Stanley new file mode 100644 index 0000000000..34f9d022cd Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Stanley differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/ACT b/awx/lib/site-packages/pytz/zoneinfo/Australia/ACT new file mode 100644 index 0000000000..d95c245e5e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/ACT differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Adelaide b/awx/lib/site-packages/pytz/zoneinfo/Australia/Adelaide new file mode 100644 index 0000000000..b350cb6660 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Adelaide differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Brisbane b/awx/lib/site-packages/pytz/zoneinfo/Australia/Brisbane new file mode 100644 index 0000000000..3e899a163f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Brisbane differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Broken_Hill b/awx/lib/site-packages/pytz/zoneinfo/Australia/Broken_Hill new file mode 100644 index 0000000000..d8f3155c84 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Broken_Hill differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Canberra b/awx/lib/site-packages/pytz/zoneinfo/Australia/Canberra new file mode 100644 index 0000000000..d95c245e5e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Canberra differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Currie b/awx/lib/site-packages/pytz/zoneinfo/Australia/Currie new file mode 100644 index 0000000000..43ca1e4558 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Currie differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Darwin b/awx/lib/site-packages/pytz/zoneinfo/Australia/Darwin new file mode 100644 index 0000000000..c44512fbb7 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Darwin differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Eucla b/awx/lib/site-packages/pytz/zoneinfo/Australia/Eucla new file mode 100644 index 0000000000..e78c2d4251 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Eucla differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Hobart b/awx/lib/site-packages/pytz/zoneinfo/Australia/Hobart new file mode 100644 index 0000000000..c4604e5386 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Hobart differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/LHI b/awx/lib/site-packages/pytz/zoneinfo/Australia/LHI new file mode 100644 index 0000000000..1f542d3700 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/LHI differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Lindeman b/awx/lib/site-packages/pytz/zoneinfo/Australia/Lindeman new file mode 100644 index 0000000000..05c3c1c324 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Lindeman differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Lord_Howe b/awx/lib/site-packages/pytz/zoneinfo/Australia/Lord_Howe new file mode 100644 index 0000000000..1f542d3700 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Lord_Howe differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Melbourne b/awx/lib/site-packages/pytz/zoneinfo/Australia/Melbourne new file mode 100644 index 0000000000..af3152f6c4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Melbourne differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/NSW b/awx/lib/site-packages/pytz/zoneinfo/Australia/NSW new file mode 100644 index 0000000000..d95c245e5e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/NSW differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/North b/awx/lib/site-packages/pytz/zoneinfo/Australia/North new file mode 100644 index 0000000000..c44512fbb7 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/North differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Perth b/awx/lib/site-packages/pytz/zoneinfo/Australia/Perth new file mode 100644 index 0000000000..1c7ebb795c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Perth differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Queensland b/awx/lib/site-packages/pytz/zoneinfo/Australia/Queensland new file mode 100644 index 0000000000..3e899a163f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Queensland differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/South b/awx/lib/site-packages/pytz/zoneinfo/Australia/South new file mode 100644 index 0000000000..b350cb6660 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/South differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Sydney b/awx/lib/site-packages/pytz/zoneinfo/Australia/Sydney new file mode 100644 index 0000000000..d95c245e5e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Sydney differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Tasmania b/awx/lib/site-packages/pytz/zoneinfo/Australia/Tasmania new file mode 100644 index 0000000000..c4604e5386 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Tasmania differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Victoria b/awx/lib/site-packages/pytz/zoneinfo/Australia/Victoria new file mode 100644 index 0000000000..af3152f6c4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Victoria differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/West b/awx/lib/site-packages/pytz/zoneinfo/Australia/West new file mode 100644 index 0000000000..1c7ebb795c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/West differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Yancowinna b/awx/lib/site-packages/pytz/zoneinfo/Australia/Yancowinna new file mode 100644 index 0000000000..d8f3155c84 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Yancowinna differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Brazil/Acre b/awx/lib/site-packages/pytz/zoneinfo/Brazil/Acre new file mode 100644 index 0000000000..7be212b1e6 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Brazil/Acre differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Brazil/DeNoronha b/awx/lib/site-packages/pytz/zoneinfo/Brazil/DeNoronha new file mode 100644 index 0000000000..c60239009e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Brazil/DeNoronha differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Brazil/East b/awx/lib/site-packages/pytz/zoneinfo/Brazil/East new file mode 100644 index 0000000000..8df63a17bd Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Brazil/East differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Brazil/West b/awx/lib/site-packages/pytz/zoneinfo/Brazil/West new file mode 100644 index 0000000000..1129211841 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Brazil/West differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/CET b/awx/lib/site-packages/pytz/zoneinfo/CET new file mode 100644 index 0000000000..4c4f8ef9ae Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/CET differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/CST6CDT b/awx/lib/site-packages/pytz/zoneinfo/CST6CDT new file mode 100644 index 0000000000..5c8a1d9a3e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/CST6CDT differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Canada/Atlantic b/awx/lib/site-packages/pytz/zoneinfo/Canada/Atlantic new file mode 100644 index 0000000000..756099abe6 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Canada/Atlantic differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Canada/Central b/awx/lib/site-packages/pytz/zoneinfo/Canada/Central new file mode 100644 index 0000000000..2d22791686 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Canada/Central differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Canada/East-Saskatchewan b/awx/lib/site-packages/pytz/zoneinfo/Canada/East-Saskatchewan new file mode 100644 index 0000000000..20c9c84df4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Canada/East-Saskatchewan differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Canada/Eastern b/awx/lib/site-packages/pytz/zoneinfo/Canada/Eastern new file mode 100644 index 0000000000..1698477a48 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Canada/Eastern differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Canada/Mountain b/awx/lib/site-packages/pytz/zoneinfo/Canada/Mountain new file mode 100644 index 0000000000..3fa0579891 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Canada/Mountain differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Canada/Newfoundland b/awx/lib/site-packages/pytz/zoneinfo/Canada/Newfoundland new file mode 100644 index 0000000000..e7a18d601d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Canada/Newfoundland differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Canada/Pacific b/awx/lib/site-packages/pytz/zoneinfo/Canada/Pacific new file mode 100644 index 0000000000..0c1fa52690 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Canada/Pacific differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Canada/Saskatchewan b/awx/lib/site-packages/pytz/zoneinfo/Canada/Saskatchewan new file mode 100644 index 0000000000..20c9c84df4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Canada/Saskatchewan differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Canada/Yukon b/awx/lib/site-packages/pytz/zoneinfo/Canada/Yukon new file mode 100644 index 0000000000..15216d55ff Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Canada/Yukon differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Chile/Continental b/awx/lib/site-packages/pytz/zoneinfo/Chile/Continental new file mode 100644 index 0000000000..910cdc923e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Chile/Continental differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Chile/EasterIsland b/awx/lib/site-packages/pytz/zoneinfo/Chile/EasterIsland new file mode 100644 index 0000000000..f22cc98af8 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Chile/EasterIsland differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Cuba b/awx/lib/site-packages/pytz/zoneinfo/Cuba new file mode 100644 index 0000000000..28aca00028 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Cuba differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/EET b/awx/lib/site-packages/pytz/zoneinfo/EET new file mode 100644 index 0000000000..beb273a248 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/EET differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/EST b/awx/lib/site-packages/pytz/zoneinfo/EST new file mode 100644 index 0000000000..074a4fc76a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/EST differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/EST5EDT b/awx/lib/site-packages/pytz/zoneinfo/EST5EDT new file mode 100644 index 0000000000..54541fc271 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/EST5EDT differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Egypt b/awx/lib/site-packages/pytz/zoneinfo/Egypt new file mode 100644 index 0000000000..1c6a2fc8fa Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Egypt differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Eire b/awx/lib/site-packages/pytz/zoneinfo/Eire new file mode 100644 index 0000000000..3dec02693d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Eire differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT new file mode 100644 index 0000000000..2ee14295f1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+0 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+0 new file mode 100644 index 0000000000..2ee14295f1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+0 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+1 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+1 new file mode 100644 index 0000000000..67b88c9623 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+1 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+10 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+10 new file mode 100644 index 0000000000..d564b28a6f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+10 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+11 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+11 new file mode 100644 index 0000000000..52eb573057 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+11 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+12 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+12 new file mode 100644 index 0000000000..c54cead625 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+12 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+2 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+2 new file mode 100644 index 0000000000..e43b63f66b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+2 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+3 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+3 new file mode 100644 index 0000000000..f029bac683 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+3 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+4 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+4 new file mode 100644 index 0000000000..0ad0ee3229 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+4 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+5 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+5 new file mode 100644 index 0000000000..e53f3febec Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+5 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+6 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+6 new file mode 100644 index 0000000000..b41149616a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+6 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+7 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+7 new file mode 100644 index 0000000000..32fa6dcb42 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+7 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+8 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+8 new file mode 100644 index 0000000000..512578ca6d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+8 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+9 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+9 new file mode 100644 index 0000000000..d3e47e7b24 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT+9 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-0 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-0 new file mode 100644 index 0000000000..2ee14295f1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-0 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-1 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-1 new file mode 100644 index 0000000000..9a6adebc88 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-1 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-10 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-10 new file mode 100644 index 0000000000..37b93fb9d1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-10 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-11 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-11 new file mode 100644 index 0000000000..f1af0e290c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-11 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-12 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-12 new file mode 100644 index 0000000000..0fa4a8dc03 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-12 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-13 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-13 new file mode 100644 index 0000000000..0a5dbe16cd Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-13 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-14 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-14 new file mode 100644 index 0000000000..41c6a1d1ca Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-14 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-2 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-2 new file mode 100644 index 0000000000..9f63268d09 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-2 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-3 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-3 new file mode 100644 index 0000000000..38ccd8a610 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-3 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-4 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-4 new file mode 100644 index 0000000000..43badfb220 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-4 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-5 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-5 new file mode 100644 index 0000000000..c88cf210c3 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-5 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-6 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-6 new file mode 100644 index 0000000000..c1a0634cf5 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-6 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-7 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-7 new file mode 100644 index 0000000000..bc152efdaf Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-7 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-8 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-8 new file mode 100644 index 0000000000..2c0de20faa Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-8 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-9 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-9 new file mode 100644 index 0000000000..8a3bd45af3 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT-9 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT0 b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT0 new file mode 100644 index 0000000000..2ee14295f1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/GMT0 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/Greenwich b/awx/lib/site-packages/pytz/zoneinfo/Etc/Greenwich new file mode 100644 index 0000000000..2ee14295f1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/Greenwich differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/UCT b/awx/lib/site-packages/pytz/zoneinfo/Etc/UCT new file mode 100644 index 0000000000..a88c4b665b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/UCT differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/UTC b/awx/lib/site-packages/pytz/zoneinfo/Etc/UTC new file mode 100644 index 0000000000..5583f5b0c6 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/UTC differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/Universal b/awx/lib/site-packages/pytz/zoneinfo/Etc/Universal new file mode 100644 index 0000000000..5583f5b0c6 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/Universal differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Etc/Zulu b/awx/lib/site-packages/pytz/zoneinfo/Etc/Zulu new file mode 100644 index 0000000000..5583f5b0c6 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Etc/Zulu differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Amsterdam b/awx/lib/site-packages/pytz/zoneinfo/Europe/Amsterdam new file mode 100644 index 0000000000..30ca3243a5 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Amsterdam differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Andorra b/awx/lib/site-packages/pytz/zoneinfo/Europe/Andorra new file mode 100644 index 0000000000..cf9533a3e4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Andorra differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Athens b/awx/lib/site-packages/pytz/zoneinfo/Europe/Athens new file mode 100644 index 0000000000..726e56c3fe Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Athens differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Belfast b/awx/lib/site-packages/pytz/zoneinfo/Europe/Belfast new file mode 100644 index 0000000000..fe63ff7e7f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Belfast differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Belgrade b/awx/lib/site-packages/pytz/zoneinfo/Europe/Belgrade new file mode 100644 index 0000000000..5f0389f039 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Belgrade differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Berlin b/awx/lib/site-packages/pytz/zoneinfo/Europe/Berlin new file mode 100644 index 0000000000..96059c7854 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Berlin differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Bratislava b/awx/lib/site-packages/pytz/zoneinfo/Europe/Bratislava new file mode 100644 index 0000000000..9ab78e9156 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Bratislava differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Brussels b/awx/lib/site-packages/pytz/zoneinfo/Europe/Brussels new file mode 100644 index 0000000000..2791edeba3 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Brussels differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Bucharest b/awx/lib/site-packages/pytz/zoneinfo/Europe/Bucharest new file mode 100644 index 0000000000..de2a5f0af1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Bucharest differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Budapest b/awx/lib/site-packages/pytz/zoneinfo/Europe/Budapest new file mode 100644 index 0000000000..1b787b16ce Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Budapest differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Busingen b/awx/lib/site-packages/pytz/zoneinfo/Europe/Busingen new file mode 100644 index 0000000000..0cf15c17ee Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Busingen differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Chisinau b/awx/lib/site-packages/pytz/zoneinfo/Europe/Chisinau new file mode 100644 index 0000000000..983cc70716 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Chisinau differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Copenhagen b/awx/lib/site-packages/pytz/zoneinfo/Europe/Copenhagen new file mode 100644 index 0000000000..af7e9269e5 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Copenhagen differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Dublin b/awx/lib/site-packages/pytz/zoneinfo/Europe/Dublin new file mode 100644 index 0000000000..3dec02693d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Dublin differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Gibraltar b/awx/lib/site-packages/pytz/zoneinfo/Europe/Gibraltar new file mode 100644 index 0000000000..f3dbeb65ec Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Gibraltar differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Guernsey b/awx/lib/site-packages/pytz/zoneinfo/Europe/Guernsey new file mode 100644 index 0000000000..fe63ff7e7f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Guernsey differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Helsinki b/awx/lib/site-packages/pytz/zoneinfo/Europe/Helsinki new file mode 100644 index 0000000000..19d7babd53 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Helsinki differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Isle_of_Man b/awx/lib/site-packages/pytz/zoneinfo/Europe/Isle_of_Man new file mode 100644 index 0000000000..fe63ff7e7f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Isle_of_Man differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Istanbul b/awx/lib/site-packages/pytz/zoneinfo/Europe/Istanbul new file mode 100644 index 0000000000..864099556b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Istanbul differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Jersey b/awx/lib/site-packages/pytz/zoneinfo/Europe/Jersey new file mode 100644 index 0000000000..fe63ff7e7f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Jersey differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Kaliningrad b/awx/lib/site-packages/pytz/zoneinfo/Europe/Kaliningrad new file mode 100644 index 0000000000..fa6bab8620 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Kaliningrad differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Kiev b/awx/lib/site-packages/pytz/zoneinfo/Europe/Kiev new file mode 100644 index 0000000000..075cc02fd8 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Kiev differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Lisbon b/awx/lib/site-packages/pytz/zoneinfo/Europe/Lisbon new file mode 100644 index 0000000000..168accf060 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Lisbon differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Ljubljana b/awx/lib/site-packages/pytz/zoneinfo/Europe/Ljubljana new file mode 100644 index 0000000000..5f0389f039 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Ljubljana differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/London b/awx/lib/site-packages/pytz/zoneinfo/Europe/London new file mode 100644 index 0000000000..fe63ff7e7f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/London differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Luxembourg b/awx/lib/site-packages/pytz/zoneinfo/Europe/Luxembourg new file mode 100644 index 0000000000..6c194a5cdc Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Luxembourg differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Madrid b/awx/lib/site-packages/pytz/zoneinfo/Europe/Madrid new file mode 100644 index 0000000000..931195955a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Madrid differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Malta b/awx/lib/site-packages/pytz/zoneinfo/Europe/Malta new file mode 100644 index 0000000000..5f518a1f17 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Malta differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Mariehamn b/awx/lib/site-packages/pytz/zoneinfo/Europe/Mariehamn new file mode 100644 index 0000000000..19d7babd53 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Mariehamn differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Minsk b/awx/lib/site-packages/pytz/zoneinfo/Europe/Minsk new file mode 100644 index 0000000000..ba9971c631 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Minsk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Monaco b/awx/lib/site-packages/pytz/zoneinfo/Europe/Monaco new file mode 100644 index 0000000000..664f6161ab Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Monaco differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Moscow b/awx/lib/site-packages/pytz/zoneinfo/Europe/Moscow new file mode 100644 index 0000000000..6068f8b9e5 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Moscow differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Nicosia b/awx/lib/site-packages/pytz/zoneinfo/Europe/Nicosia new file mode 100644 index 0000000000..f7f10ab766 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Nicosia differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Oslo b/awx/lib/site-packages/pytz/zoneinfo/Europe/Oslo new file mode 100644 index 0000000000..6326961453 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Oslo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Paris b/awx/lib/site-packages/pytz/zoneinfo/Europe/Paris new file mode 100644 index 0000000000..fd8ea7dbae Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Paris differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Podgorica b/awx/lib/site-packages/pytz/zoneinfo/Europe/Podgorica new file mode 100644 index 0000000000..5f0389f039 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Podgorica differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Prague b/awx/lib/site-packages/pytz/zoneinfo/Europe/Prague new file mode 100644 index 0000000000..9ab78e9156 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Prague differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Riga b/awx/lib/site-packages/pytz/zoneinfo/Europe/Riga new file mode 100644 index 0000000000..abea45d309 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Riga differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Rome b/awx/lib/site-packages/pytz/zoneinfo/Europe/Rome new file mode 100644 index 0000000000..28ddffe0d9 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Rome differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Samara b/awx/lib/site-packages/pytz/zoneinfo/Europe/Samara new file mode 100644 index 0000000000..fe5060094b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Samara differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/San_Marino b/awx/lib/site-packages/pytz/zoneinfo/Europe/San_Marino new file mode 100644 index 0000000000..28ddffe0d9 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/San_Marino differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Sarajevo b/awx/lib/site-packages/pytz/zoneinfo/Europe/Sarajevo new file mode 100644 index 0000000000..5f0389f039 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Sarajevo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Simferopol b/awx/lib/site-packages/pytz/zoneinfo/Europe/Simferopol new file mode 100644 index 0000000000..ebb63b4450 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Simferopol differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Skopje b/awx/lib/site-packages/pytz/zoneinfo/Europe/Skopje new file mode 100644 index 0000000000..5f0389f039 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Skopje differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Sofia b/awx/lib/site-packages/pytz/zoneinfo/Europe/Sofia new file mode 100644 index 0000000000..d8032335b2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Sofia differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Stockholm b/awx/lib/site-packages/pytz/zoneinfo/Europe/Stockholm new file mode 100644 index 0000000000..3bc6dbd9d1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Stockholm differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Tallinn b/awx/lib/site-packages/pytz/zoneinfo/Europe/Tallinn new file mode 100644 index 0000000000..4ba4424121 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Tallinn differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Tirane b/awx/lib/site-packages/pytz/zoneinfo/Europe/Tirane new file mode 100644 index 0000000000..0b86017d24 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Tirane differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Tiraspol b/awx/lib/site-packages/pytz/zoneinfo/Europe/Tiraspol new file mode 100644 index 0000000000..983cc70716 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Tiraspol differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Uzhgorod b/awx/lib/site-packages/pytz/zoneinfo/Europe/Uzhgorod new file mode 100644 index 0000000000..7032ab9b34 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Uzhgorod differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Vaduz b/awx/lib/site-packages/pytz/zoneinfo/Europe/Vaduz new file mode 100644 index 0000000000..c4e20dbe0e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Vaduz differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Vatican b/awx/lib/site-packages/pytz/zoneinfo/Europe/Vatican new file mode 100644 index 0000000000..28ddffe0d9 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Vatican differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Vienna b/awx/lib/site-packages/pytz/zoneinfo/Europe/Vienna new file mode 100644 index 0000000000..8e4c9a9b5f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Vienna differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Vilnius b/awx/lib/site-packages/pytz/zoneinfo/Europe/Vilnius new file mode 100644 index 0000000000..b6545b2494 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Vilnius differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Volgograd b/awx/lib/site-packages/pytz/zoneinfo/Europe/Volgograd new file mode 100644 index 0000000000..b91e4fbff6 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Volgograd differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Warsaw b/awx/lib/site-packages/pytz/zoneinfo/Europe/Warsaw new file mode 100644 index 0000000000..3797b1cb65 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Warsaw differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Zagreb b/awx/lib/site-packages/pytz/zoneinfo/Europe/Zagreb new file mode 100644 index 0000000000..5f0389f039 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Zagreb differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Zaporozhye b/awx/lib/site-packages/pytz/zoneinfo/Europe/Zaporozhye new file mode 100644 index 0000000000..2ccf8998b2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Zaporozhye differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Zurich b/awx/lib/site-packages/pytz/zoneinfo/Europe/Zurich new file mode 100644 index 0000000000..0cf15c17ee Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Zurich differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Factory b/awx/lib/site-packages/pytz/zoneinfo/Factory new file mode 100644 index 0000000000..a65f97edd2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Factory differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/GB b/awx/lib/site-packages/pytz/zoneinfo/GB new file mode 100644 index 0000000000..fe63ff7e7f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/GB differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/GB-Eire b/awx/lib/site-packages/pytz/zoneinfo/GB-Eire new file mode 100644 index 0000000000..fe63ff7e7f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/GB-Eire differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/GMT b/awx/lib/site-packages/pytz/zoneinfo/GMT new file mode 100644 index 0000000000..2ee14295f1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/GMT differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/GMT+0 b/awx/lib/site-packages/pytz/zoneinfo/GMT+0 new file mode 100644 index 0000000000..2ee14295f1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/GMT+0 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/GMT-0 b/awx/lib/site-packages/pytz/zoneinfo/GMT-0 new file mode 100644 index 0000000000..2ee14295f1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/GMT-0 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/GMT0 b/awx/lib/site-packages/pytz/zoneinfo/GMT0 new file mode 100644 index 0000000000..2ee14295f1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/GMT0 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Greenwich b/awx/lib/site-packages/pytz/zoneinfo/Greenwich new file mode 100644 index 0000000000..2ee14295f1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Greenwich differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/HST b/awx/lib/site-packages/pytz/zoneinfo/HST new file mode 100644 index 0000000000..616c31bc5e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/HST differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Hongkong b/awx/lib/site-packages/pytz/zoneinfo/Hongkong new file mode 100644 index 0000000000..8e5c581366 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Hongkong differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Iceland b/awx/lib/site-packages/pytz/zoneinfo/Iceland new file mode 100644 index 0000000000..e97f13a652 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Iceland differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Indian/Antananarivo b/awx/lib/site-packages/pytz/zoneinfo/Indian/Antananarivo new file mode 100644 index 0000000000..ef6e745c45 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Indian/Antananarivo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Indian/Chagos b/awx/lib/site-packages/pytz/zoneinfo/Indian/Chagos new file mode 100644 index 0000000000..864d3e29a2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Indian/Chagos differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Indian/Christmas b/awx/lib/site-packages/pytz/zoneinfo/Indian/Christmas new file mode 100644 index 0000000000..686d5b3c65 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Indian/Christmas differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Indian/Cocos b/awx/lib/site-packages/pytz/zoneinfo/Indian/Cocos new file mode 100644 index 0000000000..6f7d869f0f Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Indian/Cocos differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Indian/Comoro b/awx/lib/site-packages/pytz/zoneinfo/Indian/Comoro new file mode 100644 index 0000000000..297c6db63c Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Indian/Comoro differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Indian/Kerguelen b/awx/lib/site-packages/pytz/zoneinfo/Indian/Kerguelen new file mode 100644 index 0000000000..1f42bbc1ff Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Indian/Kerguelen differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Indian/Mahe b/awx/lib/site-packages/pytz/zoneinfo/Indian/Mahe new file mode 100644 index 0000000000..d048242cac Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Indian/Mahe differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Indian/Maldives b/awx/lib/site-packages/pytz/zoneinfo/Indian/Maldives new file mode 100644 index 0000000000..65e7eeee8d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Indian/Maldives differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Indian/Mauritius b/awx/lib/site-packages/pytz/zoneinfo/Indian/Mauritius new file mode 100644 index 0000000000..54f2220108 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Indian/Mauritius differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Indian/Mayotte b/awx/lib/site-packages/pytz/zoneinfo/Indian/Mayotte new file mode 100644 index 0000000000..8401a37aa0 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Indian/Mayotte differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Indian/Reunion b/awx/lib/site-packages/pytz/zoneinfo/Indian/Reunion new file mode 100644 index 0000000000..9b3830ec31 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Indian/Reunion differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Iran b/awx/lib/site-packages/pytz/zoneinfo/Iran new file mode 100644 index 0000000000..16149ed6bf Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Iran differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Israel b/awx/lib/site-packages/pytz/zoneinfo/Israel new file mode 100644 index 0000000000..e7864171d1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Israel differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Jamaica b/awx/lib/site-packages/pytz/zoneinfo/Jamaica new file mode 100644 index 0000000000..09e3eb9399 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Jamaica differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Japan b/awx/lib/site-packages/pytz/zoneinfo/Japan new file mode 100644 index 0000000000..058c1e99ba Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Japan differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Kwajalein b/awx/lib/site-packages/pytz/zoneinfo/Kwajalein new file mode 100644 index 0000000000..b57237272d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Kwajalein differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Libya b/awx/lib/site-packages/pytz/zoneinfo/Libya new file mode 100644 index 0000000000..479f5b4575 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Libya differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/MET b/awx/lib/site-packages/pytz/zoneinfo/MET new file mode 100644 index 0000000000..71963d533e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/MET differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/MST b/awx/lib/site-packages/pytz/zoneinfo/MST new file mode 100644 index 0000000000..da3e926d23 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/MST differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/MST7MDT b/awx/lib/site-packages/pytz/zoneinfo/MST7MDT new file mode 100644 index 0000000000..726a7e5717 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/MST7MDT differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Mexico/BajaNorte b/awx/lib/site-packages/pytz/zoneinfo/Mexico/BajaNorte new file mode 100644 index 0000000000..fffdc24bfc Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Mexico/BajaNorte differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Mexico/BajaSur b/awx/lib/site-packages/pytz/zoneinfo/Mexico/BajaSur new file mode 100644 index 0000000000..43ee12d84a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Mexico/BajaSur differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Mexico/General b/awx/lib/site-packages/pytz/zoneinfo/Mexico/General new file mode 100644 index 0000000000..1434ab0880 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Mexico/General differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Mideast/Riyadh87 b/awx/lib/site-packages/pytz/zoneinfo/Mideast/Riyadh87 new file mode 100644 index 0000000000..ebe16c4995 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Mideast/Riyadh87 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Mideast/Riyadh88 b/awx/lib/site-packages/pytz/zoneinfo/Mideast/Riyadh88 new file mode 100644 index 0000000000..7f2224bbb2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Mideast/Riyadh88 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Mideast/Riyadh89 b/awx/lib/site-packages/pytz/zoneinfo/Mideast/Riyadh89 new file mode 100644 index 0000000000..a50ca48a91 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Mideast/Riyadh89 differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/NZ b/awx/lib/site-packages/pytz/zoneinfo/NZ new file mode 100644 index 0000000000..a40767df93 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/NZ differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/NZ-CHAT b/awx/lib/site-packages/pytz/zoneinfo/NZ-CHAT new file mode 100644 index 0000000000..6329e4fce0 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/NZ-CHAT differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Navajo b/awx/lib/site-packages/pytz/zoneinfo/Navajo new file mode 100644 index 0000000000..f8908febf2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Navajo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/PRC b/awx/lib/site-packages/pytz/zoneinfo/PRC new file mode 100644 index 0000000000..af6d6fbbd2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/PRC differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/PST8PDT b/awx/lib/site-packages/pytz/zoneinfo/PST8PDT new file mode 100644 index 0000000000..6242ac04c0 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/PST8PDT differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Apia b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Apia new file mode 100644 index 0000000000..efe6d5a1de Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Apia differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Auckland b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Auckland new file mode 100644 index 0000000000..a40767df93 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Auckland differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Chatham b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Chatham new file mode 100644 index 0000000000..6329e4fce0 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Chatham differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Chuuk b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Chuuk new file mode 100644 index 0000000000..0ef473871d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Chuuk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Easter b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Easter new file mode 100644 index 0000000000..f22cc98af8 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Easter differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Efate b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Efate new file mode 100644 index 0000000000..c46154a805 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Efate differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Enderbury b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Enderbury new file mode 100644 index 0000000000..69e75d754e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Enderbury differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Fakaofo b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Fakaofo new file mode 100644 index 0000000000..22902f98fe Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Fakaofo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Fiji b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Fiji new file mode 100644 index 0000000000..0bc19629c1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Fiji differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Funafuti b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Funafuti new file mode 100644 index 0000000000..66cf5e1df3 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Funafuti differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Galapagos b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Galapagos new file mode 100644 index 0000000000..7504cc66f5 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Galapagos differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Gambier b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Gambier new file mode 100644 index 0000000000..fc49c03f6e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Gambier differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Guadalcanal b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Guadalcanal new file mode 100644 index 0000000000..3a4ec12e56 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Guadalcanal differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Guam b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Guam new file mode 100644 index 0000000000..a05292f4ba Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Guam differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Honolulu b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Honolulu new file mode 100644 index 0000000000..1b4684b9b4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Honolulu differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Johnston b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Johnston new file mode 100644 index 0000000000..616c31bc5e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Johnston differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Kiritimati b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Kiritimati new file mode 100644 index 0000000000..7131453c55 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Kiritimati differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Kosrae b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Kosrae new file mode 100644 index 0000000000..61b7561589 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Kosrae differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Kwajalein b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Kwajalein new file mode 100644 index 0000000000..b57237272d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Kwajalein differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Majuro b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Majuro new file mode 100644 index 0000000000..eab93a2af9 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Majuro differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Marquesas b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Marquesas new file mode 100644 index 0000000000..cd2d5b073a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Marquesas differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Midway b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Midway new file mode 100644 index 0000000000..8889a26fa7 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Midway differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Nauru b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Nauru new file mode 100644 index 0000000000..1d8179bcb5 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Nauru differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Niue b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Niue new file mode 100644 index 0000000000..b9f18a544a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Niue differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Norfolk b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Norfolk new file mode 100644 index 0000000000..2e989c2556 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Norfolk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Noumea b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Noumea new file mode 100644 index 0000000000..ae9e138fa5 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Noumea differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Pago_Pago b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Pago_Pago new file mode 100644 index 0000000000..fa084ba584 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Pago_Pago differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Palau b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Palau new file mode 100644 index 0000000000..efc556b140 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Palau differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Pitcairn b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Pitcairn new file mode 100644 index 0000000000..51f01c6410 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Pitcairn differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Pohnpei b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Pohnpei new file mode 100644 index 0000000000..f175ea5875 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Pohnpei differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Ponape b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Ponape new file mode 100644 index 0000000000..f175ea5875 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Ponape differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Port_Moresby b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Port_Moresby new file mode 100644 index 0000000000..8d4d12ccb0 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Port_Moresby differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Rarotonga b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Rarotonga new file mode 100644 index 0000000000..581299788a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Rarotonga differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Saipan b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Saipan new file mode 100644 index 0000000000..519c86e966 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Saipan differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Samoa b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Samoa new file mode 100644 index 0000000000..fa084ba584 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Samoa differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Tahiti b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Tahiti new file mode 100644 index 0000000000..22f8697467 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Tahiti differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Tarawa b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Tarawa new file mode 100644 index 0000000000..065dcd8194 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Tarawa differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Tongatapu b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Tongatapu new file mode 100644 index 0000000000..01ab6b87ef Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Tongatapu differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Truk b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Truk new file mode 100644 index 0000000000..0ef473871d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Truk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Wake b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Wake new file mode 100644 index 0000000000..f89c52829a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Wake differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Wallis b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Wallis new file mode 100644 index 0000000000..9aaf558f1d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Wallis differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Yap b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Yap new file mode 100644 index 0000000000..0ef473871d Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Yap differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Poland b/awx/lib/site-packages/pytz/zoneinfo/Poland new file mode 100644 index 0000000000..3797b1cb65 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Poland differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Portugal b/awx/lib/site-packages/pytz/zoneinfo/Portugal new file mode 100644 index 0000000000..168accf060 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Portugal differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/ROC b/awx/lib/site-packages/pytz/zoneinfo/ROC new file mode 100644 index 0000000000..70cfb27ca9 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/ROC differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/ROK b/awx/lib/site-packages/pytz/zoneinfo/ROK new file mode 100644 index 0000000000..96bb0c36d7 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/ROK differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Singapore b/awx/lib/site-packages/pytz/zoneinfo/Singapore new file mode 100644 index 0000000000..a6f2db8f3a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Singapore differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Turkey b/awx/lib/site-packages/pytz/zoneinfo/Turkey new file mode 100644 index 0000000000..864099556b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Turkey differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/UCT b/awx/lib/site-packages/pytz/zoneinfo/UCT new file mode 100644 index 0000000000..a88c4b665b Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/UCT differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/US/Alaska b/awx/lib/site-packages/pytz/zoneinfo/US/Alaska new file mode 100644 index 0000000000..d14735026a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/US/Alaska differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/US/Aleutian b/awx/lib/site-packages/pytz/zoneinfo/US/Aleutian new file mode 100644 index 0000000000..391ec98ec0 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/US/Aleutian differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/US/Arizona b/awx/lib/site-packages/pytz/zoneinfo/US/Arizona new file mode 100644 index 0000000000..67589026c2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/US/Arizona differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/US/Central b/awx/lib/site-packages/pytz/zoneinfo/US/Central new file mode 100644 index 0000000000..71aae7246a Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/US/Central differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/US/East-Indiana b/awx/lib/site-packages/pytz/zoneinfo/US/East-Indiana new file mode 100644 index 0000000000..aa3dfc4373 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/US/East-Indiana differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/US/Eastern b/awx/lib/site-packages/pytz/zoneinfo/US/Eastern new file mode 100644 index 0000000000..b2c2377f4e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/US/Eastern differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/US/Hawaii b/awx/lib/site-packages/pytz/zoneinfo/US/Hawaii new file mode 100644 index 0000000000..1b4684b9b4 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/US/Hawaii differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/US/Indiana-Starke b/awx/lib/site-packages/pytz/zoneinfo/US/Indiana-Starke new file mode 100644 index 0000000000..33169f4596 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/US/Indiana-Starke differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/US/Michigan b/awx/lib/site-packages/pytz/zoneinfo/US/Michigan new file mode 100644 index 0000000000..da53d46df3 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/US/Michigan differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/US/Mountain b/awx/lib/site-packages/pytz/zoneinfo/US/Mountain new file mode 100644 index 0000000000..f8908febf2 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/US/Mountain differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/US/Pacific b/awx/lib/site-packages/pytz/zoneinfo/US/Pacific new file mode 100644 index 0000000000..3b7ce1dcee Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/US/Pacific differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/US/Pacific-New b/awx/lib/site-packages/pytz/zoneinfo/US/Pacific-New new file mode 100644 index 0000000000..3b7ce1dcee Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/US/Pacific-New differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/US/Samoa b/awx/lib/site-packages/pytz/zoneinfo/US/Samoa new file mode 100644 index 0000000000..fa084ba584 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/US/Samoa differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/UTC b/awx/lib/site-packages/pytz/zoneinfo/UTC new file mode 100644 index 0000000000..5583f5b0c6 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/UTC differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Universal b/awx/lib/site-packages/pytz/zoneinfo/Universal new file mode 100644 index 0000000000..5583f5b0c6 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Universal differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/W-SU b/awx/lib/site-packages/pytz/zoneinfo/W-SU new file mode 100644 index 0000000000..6068f8b9e5 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/W-SU differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/WET b/awx/lib/site-packages/pytz/zoneinfo/WET new file mode 100644 index 0000000000..444a1933d7 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/WET differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Zulu b/awx/lib/site-packages/pytz/zoneinfo/Zulu new file mode 100644 index 0000000000..5583f5b0c6 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Zulu differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/iso3166.tab b/awx/lib/site-packages/pytz/zoneinfo/iso3166.tab new file mode 100644 index 0000000000..b952ca1c59 --- /dev/null +++ b/awx/lib/site-packages/pytz/zoneinfo/iso3166.tab @@ -0,0 +1,276 @@ +# <pre> +# This file is in the public domain, so clarified as of +# 2009-05-17 by Arthur David Olson. +# ISO 3166 alpha-2 country codes +# +# From Paul Eggert (2006-09-27): +# +# This file contains a table with the following columns: +# 1. ISO 3166-1 alpha-2 country code, current as of +# ISO 3166-1 Newsletter VI-1 (2007-09-21). See: +# <a href="http://www.iso.org/iso/en/prods-services/iso3166ma/index.html"> +# ISO 3166 Maintenance agency (ISO 3166/MA) +# </a>. +# 2. The usual English name for the country, +# chosen so that alphabetic sorting of subsets produces helpful lists. +# This is not the same as the English name in the ISO 3166 tables. +# +# Columns are separated by a single tab. +# The table is sorted by country code. +# +# Lines beginning with `#' are comments. +# +# From Arthur David Olson (2011-08-17): +# Resynchronized today with the ISO 3166 site (adding SS for South Sudan). +# +#country- +#code country name +AD Andorra +AE United Arab Emirates +AF Afghanistan +AG Antigua & Barbuda +AI Anguilla +AL Albania +AM Armenia +AO Angola +AQ Antarctica +AR Argentina +AS Samoa (American) +AT Austria +AU Australia +AW Aruba +AX Aaland Islands +AZ Azerbaijan +BA Bosnia & Herzegovina +BB Barbados +BD Bangladesh +BE Belgium +BF Burkina Faso +BG Bulgaria +BH Bahrain +BI Burundi +BJ Benin +BL St Barthelemy +BM Bermuda +BN Brunei +BO Bolivia +BQ Bonaire Sint Eustatius & Saba +BR Brazil +BS Bahamas +BT Bhutan +BV Bouvet Island +BW Botswana +BY Belarus +BZ Belize +CA Canada +CC Cocos (Keeling) Islands +CD Congo (Dem. Rep.) +CF Central African Rep. +CG Congo (Rep.) +CH Switzerland +CI Cote d'Ivoire +CK Cook Islands +CL Chile +CM Cameroon +CN China +CO Colombia +CR Costa Rica +CU Cuba +CV Cape Verde +CW Curacao +CX Christmas Island +CY Cyprus +CZ Czech Republic +DE Germany +DJ Djibouti +DK Denmark +DM Dominica +DO Dominican Republic +DZ Algeria +EC Ecuador +EE Estonia +EG Egypt +EH Western Sahara +ER Eritrea +ES Spain +ET Ethiopia +FI Finland +FJ Fiji +FK Falkland Islands +FM Micronesia +FO Faroe Islands +FR France +GA Gabon +GB Britain (UK) +GD Grenada +GE Georgia +GF French Guiana +GG Guernsey +GH Ghana +GI Gibraltar +GL Greenland +GM Gambia +GN Guinea +GP Guadeloupe +GQ Equatorial Guinea +GR Greece +GS South Georgia & the South Sandwich Islands +GT Guatemala +GU Guam +GW Guinea-Bissau +GY Guyana +HK Hong Kong +HM Heard Island & McDonald Islands +HN Honduras +HR Croatia +HT Haiti +HU Hungary +ID Indonesia +IE Ireland +IL Israel +IM Isle of Man +IN India +IO British Indian Ocean Territory +IQ Iraq +IR Iran +IS Iceland +IT Italy +JE Jersey +JM Jamaica +JO Jordan +JP Japan +KE Kenya +KG Kyrgyzstan +KH Cambodia +KI Kiribati +KM Comoros +KN St Kitts & Nevis +KP Korea (North) +KR Korea (South) +KW Kuwait +KY Cayman Islands +KZ Kazakhstan +LA Laos +LB Lebanon +LC St Lucia +LI Liechtenstein +LK Sri Lanka +LR Liberia +LS Lesotho +LT Lithuania +LU Luxembourg +LV Latvia +LY Libya +MA Morocco +MC Monaco +MD Moldova +ME Montenegro +MF St Martin (French part) +MG Madagascar +MH Marshall Islands +MK Macedonia +ML Mali +MM Myanmar (Burma) +MN Mongolia +MO Macau +MP Northern Mariana Islands +MQ Martinique +MR Mauritania +MS Montserrat +MT Malta +MU Mauritius +MV Maldives +MW Malawi +MX Mexico +MY Malaysia +MZ Mozambique +NA Namibia +NC New Caledonia +NE Niger +NF Norfolk Island +NG Nigeria +NI Nicaragua +NL Netherlands +NO Norway +NP Nepal +NR Nauru +NU Niue +NZ New Zealand +OM Oman +PA Panama +PE Peru +PF French Polynesia +PG Papua New Guinea +PH Philippines +PK Pakistan +PL Poland +PM St Pierre & Miquelon +PN Pitcairn +PR Puerto Rico +PS Palestine +PT Portugal +PW Palau +PY Paraguay +QA Qatar +RE Reunion +RO Romania +RS Serbia +RU Russia +RW Rwanda +SA Saudi Arabia +SB Solomon Islands +SC Seychelles +SD Sudan +SE Sweden +SG Singapore +SH St Helena +SI Slovenia +SJ Svalbard & Jan Mayen +SK Slovakia +SL Sierra Leone +SM San Marino +SN Senegal +SO Somalia +SR Suriname +SS South Sudan +ST Sao Tome & Principe +SV El Salvador +SX Sint Maarten +SY Syria +SZ Swaziland +TC Turks & Caicos Is +TD Chad +TF French Southern & Antarctic Lands +TG Togo +TH Thailand +TJ Tajikistan +TK Tokelau +TL East Timor +TM Turkmenistan +TN Tunisia +TO Tonga +TR Turkey +TT Trinidad & Tobago +TV Tuvalu +TW Taiwan +TZ Tanzania +UA Ukraine +UG Uganda +UM US minor outlying islands +US United States +UY Uruguay +UZ Uzbekistan +VA Vatican City +VC St Vincent +VE Venezuela +VG Virgin Islands (UK) +VI Virgin Islands (US) +VN Vietnam +VU Vanuatu +WF Wallis & Futuna +WS Samoa (western) +YE Yemen +YT Mayotte +ZA South Africa +ZM Zambia +ZW Zimbabwe diff --git a/awx/lib/site-packages/pytz/zoneinfo/localtime b/awx/lib/site-packages/pytz/zoneinfo/localtime new file mode 100644 index 0000000000..2ee14295f1 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/localtime differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/posixrules b/awx/lib/site-packages/pytz/zoneinfo/posixrules new file mode 100644 index 0000000000..b2c2377f4e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/posixrules differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/zone.tab b/awx/lib/site-packages/pytz/zoneinfo/zone.tab new file mode 100644 index 0000000000..c1cd95e89e --- /dev/null +++ b/awx/lib/site-packages/pytz/zoneinfo/zone.tab @@ -0,0 +1,444 @@ +# <pre> +# This file is in the public domain, so clarified as of +# 2009-05-17 by Arthur David Olson. +# +# TZ zone descriptions +# +# From Paul Eggert (1996-08-05): +# +# This file contains a table with the following columns: +# 1. ISO 3166 2-character country code. See the file `iso3166.tab'. +# 2. Latitude and longitude of the zone's principal location +# in ISO 6709 sign-degrees-minutes-seconds format, +# either +-DDMM+-DDDMM or +-DDMMSS+-DDDMMSS, +# first latitude (+ is north), then longitude (+ is east). +# 3. Zone name used in value of TZ environment variable. +# 4. Comments; present if and only if the country has multiple rows. +# +# Columns are separated by a single tab. +# The table is sorted first by country, then an order within the country that +# (1) makes some geographical sense, and +# (2) puts the most populous zones first, where that does not contradict (1). +# +# Lines beginning with `#' are comments. +# +#country- +#code coordinates TZ comments +AD +4230+00131 Europe/Andorra +AE +2518+05518 Asia/Dubai +AF +3431+06912 Asia/Kabul +AG +1703-06148 America/Antigua +AI +1812-06304 America/Anguilla +AL +4120+01950 Europe/Tirane +AM +4011+04430 Asia/Yerevan +AO -0848+01314 Africa/Luanda +AQ -7750+16636 Antarctica/McMurdo McMurdo Station, Ross Island +AQ -9000+00000 Antarctica/South_Pole Amundsen-Scott Station, South Pole +AQ -6734-06808 Antarctica/Rothera Rothera Station, Adelaide Island +AQ -6448-06406 Antarctica/Palmer Palmer Station, Anvers Island +AQ -6736+06253 Antarctica/Mawson Mawson Station, Holme Bay +AQ -6835+07758 Antarctica/Davis Davis Station, Vestfold Hills +AQ -6617+11031 Antarctica/Casey Casey Station, Bailey Peninsula +AQ -7824+10654 Antarctica/Vostok Vostok Station, Lake Vostok +AQ -6640+14001 Antarctica/DumontDUrville Dumont-d'Urville Station, Terre Adelie +AQ -690022+0393524 Antarctica/Syowa Syowa Station, E Ongul I +AQ -5430+15857 Antarctica/Macquarie Macquarie Island Station, Macquarie Island +AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF) +AR -3124-06411 America/Argentina/Cordoba most locations (CB, CC, CN, ER, FM, MN, SE, SF) +AR -2447-06525 America/Argentina/Salta (SA, LP, NQ, RN) +AR -2411-06518 America/Argentina/Jujuy Jujuy (JY) +AR -2649-06513 America/Argentina/Tucuman Tucuman (TM) +AR -2828-06547 America/Argentina/Catamarca Catamarca (CT), Chubut (CH) +AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR) +AR -3132-06831 America/Argentina/San_Juan San Juan (SJ) +AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ) +AR -3319-06621 America/Argentina/San_Luis San Luis (SL) +AR -5138-06913 America/Argentina/Rio_Gallegos Santa Cruz (SC) +AR -5448-06818 America/Argentina/Ushuaia Tierra del Fuego (TF) +AS -1416-17042 Pacific/Pago_Pago +AT +4813+01620 Europe/Vienna +AU -3133+15905 Australia/Lord_Howe Lord Howe Island +AU -4253+14719 Australia/Hobart Tasmania - most locations +AU -3956+14352 Australia/Currie Tasmania - King Island +AU -3749+14458 Australia/Melbourne Victoria +AU -3352+15113 Australia/Sydney New South Wales - most locations +AU -3157+14127 Australia/Broken_Hill New South Wales - Yancowinna +AU -2728+15302 Australia/Brisbane Queensland - most locations +AU -2016+14900 Australia/Lindeman Queensland - Holiday Islands +AU -3455+13835 Australia/Adelaide South Australia +AU -1228+13050 Australia/Darwin Northern Territory +AU -3157+11551 Australia/Perth Western Australia - most locations +AU -3143+12852 Australia/Eucla Western Australia - Eucla area +AW +1230-06958 America/Aruba +AX +6006+01957 Europe/Mariehamn +AZ +4023+04951 Asia/Baku +BA +4352+01825 Europe/Sarajevo +BB +1306-05937 America/Barbados +BD +2343+09025 Asia/Dhaka +BE +5050+00420 Europe/Brussels +BF +1222-00131 Africa/Ouagadougou +BG +4241+02319 Europe/Sofia +BH +2623+05035 Asia/Bahrain +BI -0323+02922 Africa/Bujumbura +BJ +0629+00237 Africa/Porto-Novo +BL +1753-06251 America/St_Barthelemy +BM +3217-06446 Atlantic/Bermuda +BN +0456+11455 Asia/Brunei +BO -1630-06809 America/La_Paz +BQ +120903-0681636 America/Kralendijk +BR -0351-03225 America/Noronha Atlantic islands +BR -0127-04829 America/Belem Amapa, E Para +BR -0343-03830 America/Fortaleza NE Brazil (MA, PI, CE, RN, PB) +BR -0803-03454 America/Recife Pernambuco +BR -0712-04812 America/Araguaina Tocantins +BR -0940-03543 America/Maceio Alagoas, Sergipe +BR -1259-03831 America/Bahia Bahia +BR -2332-04637 America/Sao_Paulo S & SE Brazil (GO, DF, MG, ES, RJ, SP, PR, SC, RS) +BR -2027-05437 America/Campo_Grande Mato Grosso do Sul +BR -1535-05605 America/Cuiaba Mato Grosso +BR -0226-05452 America/Santarem W Para +BR -0846-06354 America/Porto_Velho Rondonia +BR +0249-06040 America/Boa_Vista Roraima +BR -0308-06001 America/Manaus E Amazonas +BR -0640-06952 America/Eirunepe W Amazonas +BR -0958-06748 America/Rio_Branco Acre +BS +2505-07721 America/Nassau +BT +2728+08939 Asia/Thimphu +BW -2439+02555 Africa/Gaborone +BY +5354+02734 Europe/Minsk +BZ +1730-08812 America/Belize +CA +4734-05243 America/St_Johns Newfoundland Time, including SE Labrador +CA +4439-06336 America/Halifax Atlantic Time - Nova Scotia (most places), PEI +CA +4612-05957 America/Glace_Bay Atlantic Time - Nova Scotia - places that did not observe DST 1966-1971 +CA +4606-06447 America/Moncton Atlantic Time - New Brunswick +CA +5320-06025 America/Goose_Bay Atlantic Time - Labrador - most locations +CA +5125-05707 America/Blanc-Sablon Atlantic Standard Time - Quebec - Lower North Shore +CA +4531-07334 America/Montreal Eastern Time - Quebec - most locations +CA +4339-07923 America/Toronto Eastern Time - Ontario - most locations +CA +4901-08816 America/Nipigon Eastern Time - Ontario & Quebec - places that did not observe DST 1967-1973 +CA +4823-08915 America/Thunder_Bay Eastern Time - Thunder Bay, Ontario +CA +6344-06828 America/Iqaluit Eastern Time - east Nunavut - most locations +CA +6608-06544 America/Pangnirtung Eastern Time - Pangnirtung, Nunavut +CA +744144-0944945 America/Resolute Central Standard Time - Resolute, Nunavut +CA +484531-0913718 America/Atikokan Eastern Standard Time - Atikokan, Ontario and Southampton I, Nunavut +CA +624900-0920459 America/Rankin_Inlet Central Time - central Nunavut +CA +4953-09709 America/Winnipeg Central Time - Manitoba & west Ontario +CA +4843-09434 America/Rainy_River Central Time - Rainy River & Fort Frances, Ontario +CA +5024-10439 America/Regina Central Standard Time - Saskatchewan - most locations +CA +5017-10750 America/Swift_Current Central Standard Time - Saskatchewan - midwest +CA +5333-11328 America/Edmonton Mountain Time - Alberta, east British Columbia & west Saskatchewan +CA +690650-1050310 America/Cambridge_Bay Mountain Time - west Nunavut +CA +6227-11421 America/Yellowknife Mountain Time - central Northwest Territories +CA +682059-1334300 America/Inuvik Mountain Time - west Northwest Territories +CA +4906-11631 America/Creston Mountain Standard Time - Creston, British Columbia +CA +5946-12014 America/Dawson_Creek Mountain Standard Time - Dawson Creek & Fort Saint John, British Columbia +CA +4916-12307 America/Vancouver Pacific Time - west British Columbia +CA +6043-13503 America/Whitehorse Pacific Time - south Yukon +CA +6404-13925 America/Dawson Pacific Time - north Yukon +CC -1210+09655 Indian/Cocos +CD -0418+01518 Africa/Kinshasa west Dem. Rep. of Congo +CD -1140+02728 Africa/Lubumbashi east Dem. Rep. of Congo +CF +0422+01835 Africa/Bangui +CG -0416+01517 Africa/Brazzaville +CH +4723+00832 Europe/Zurich +CI +0519-00402 Africa/Abidjan +CK -2114-15946 Pacific/Rarotonga +CL -3327-07040 America/Santiago most locations +CL -2709-10926 Pacific/Easter Easter Island & Sala y Gomez +CM +0403+00942 Africa/Douala +CN +3114+12128 Asia/Shanghai east China - Beijing, Guangdong, Shanghai, etc. +CN +4545+12641 Asia/Harbin Heilongjiang (except Mohe), Jilin +CN +2934+10635 Asia/Chongqing central China - Sichuan, Yunnan, Guangxi, Shaanxi, Guizhou, etc. +CN +4348+08735 Asia/Urumqi most of Tibet & Xinjiang +CN +3929+07559 Asia/Kashgar west Tibet & Xinjiang +CO +0436-07405 America/Bogota +CR +0956-08405 America/Costa_Rica +CU +2308-08222 America/Havana +CV +1455-02331 Atlantic/Cape_Verde +CW +1211-06900 America/Curacao +CX -1025+10543 Indian/Christmas +CY +3510+03322 Asia/Nicosia +CZ +5005+01426 Europe/Prague +DE +5230+01322 Europe/Berlin most locations +DE +4742+00841 Europe/Busingen Busingen +DJ +1136+04309 Africa/Djibouti +DK +5540+01235 Europe/Copenhagen +DM +1518-06124 America/Dominica +DO +1828-06954 America/Santo_Domingo +DZ +3647+00303 Africa/Algiers +EC -0210-07950 America/Guayaquil mainland +EC -0054-08936 Pacific/Galapagos Galapagos Islands +EE +5925+02445 Europe/Tallinn +EG +3003+03115 Africa/Cairo +EH +2709-01312 Africa/El_Aaiun +ER +1520+03853 Africa/Asmara +ES +4024-00341 Europe/Madrid mainland +ES +3553-00519 Africa/Ceuta Ceuta & Melilla +ES +2806-01524 Atlantic/Canary Canary Islands +ET +0902+03842 Africa/Addis_Ababa +FI +6010+02458 Europe/Helsinki +FJ -1808+17825 Pacific/Fiji +FK -5142-05751 Atlantic/Stanley +FM +0725+15147 Pacific/Chuuk Chuuk (Truk) and Yap +FM +0658+15813 Pacific/Pohnpei Pohnpei (Ponape) +FM +0519+16259 Pacific/Kosrae Kosrae +FO +6201-00646 Atlantic/Faroe +FR +4852+00220 Europe/Paris +GA +0023+00927 Africa/Libreville +GB +513030-0000731 Europe/London +GD +1203-06145 America/Grenada +GE +4143+04449 Asia/Tbilisi +GF +0456-05220 America/Cayenne +GG +4927-00232 Europe/Guernsey +GH +0533-00013 Africa/Accra +GI +3608-00521 Europe/Gibraltar +GL +6411-05144 America/Godthab most locations +GL +7646-01840 America/Danmarkshavn east coast, north of Scoresbysund +GL +7029-02158 America/Scoresbysund Scoresbysund / Ittoqqortoormiit +GL +7634-06847 America/Thule Thule / Pituffik +GM +1328-01639 Africa/Banjul +GN +0931-01343 Africa/Conakry +GP +1614-06132 America/Guadeloupe +GQ +0345+00847 Africa/Malabo +GR +3758+02343 Europe/Athens +GS -5416-03632 Atlantic/South_Georgia +GT +1438-09031 America/Guatemala +GU +1328+14445 Pacific/Guam +GW +1151-01535 Africa/Bissau +GY +0648-05810 America/Guyana +HK +2217+11409 Asia/Hong_Kong +HN +1406-08713 America/Tegucigalpa +HR +4548+01558 Europe/Zagreb +HT +1832-07220 America/Port-au-Prince +HU +4730+01905 Europe/Budapest +ID -0610+10648 Asia/Jakarta Java & Sumatra +ID -0002+10920 Asia/Pontianak west & central Borneo +ID -0507+11924 Asia/Makassar east & south Borneo, Sulawesi (Celebes), Bali, Nusa Tengarra, west Timor +ID -0232+14042 Asia/Jayapura west New Guinea (Irian Jaya) & Malukus (Moluccas) +IE +5320-00615 Europe/Dublin +IL +3146+03514 Asia/Jerusalem +IM +5409-00428 Europe/Isle_of_Man +IN +2232+08822 Asia/Kolkata +IO -0720+07225 Indian/Chagos +IQ +3321+04425 Asia/Baghdad +IR +3540+05126 Asia/Tehran +IS +6409-02151 Atlantic/Reykjavik +IT +4154+01229 Europe/Rome +JE +4912-00207 Europe/Jersey +JM +1800-07648 America/Jamaica +JO +3157+03556 Asia/Amman +JP +353916+1394441 Asia/Tokyo +KE -0117+03649 Africa/Nairobi +KG +4254+07436 Asia/Bishkek +KH +1133+10455 Asia/Phnom_Penh +KI +0125+17300 Pacific/Tarawa Gilbert Islands +KI -0308-17105 Pacific/Enderbury Phoenix Islands +KI +0152-15720 Pacific/Kiritimati Line Islands +KM -1141+04316 Indian/Comoro +KN +1718-06243 America/St_Kitts +KP +3901+12545 Asia/Pyongyang +KR +3733+12658 Asia/Seoul +KW +2920+04759 Asia/Kuwait +KY +1918-08123 America/Cayman +KZ +4315+07657 Asia/Almaty most locations +KZ +4448+06528 Asia/Qyzylorda Qyzylorda (Kyzylorda, Kzyl-Orda) +KZ +5017+05710 Asia/Aqtobe Aqtobe (Aktobe) +KZ +4431+05016 Asia/Aqtau Atyrau (Atirau, Gur'yev), Mangghystau (Mankistau) +KZ +5113+05121 Asia/Oral West Kazakhstan +LA +1758+10236 Asia/Vientiane +LB +3353+03530 Asia/Beirut +LC +1401-06100 America/St_Lucia +LI +4709+00931 Europe/Vaduz +LK +0656+07951 Asia/Colombo +LR +0618-01047 Africa/Monrovia +LS -2928+02730 Africa/Maseru +LT +5441+02519 Europe/Vilnius +LU +4936+00609 Europe/Luxembourg +LV +5657+02406 Europe/Riga +LY +3254+01311 Africa/Tripoli +MA +3339-00735 Africa/Casablanca +MC +4342+00723 Europe/Monaco +MD +4700+02850 Europe/Chisinau +ME +4226+01916 Europe/Podgorica +MF +1804-06305 America/Marigot +MG -1855+04731 Indian/Antananarivo +MH +0709+17112 Pacific/Majuro most locations +MH +0905+16720 Pacific/Kwajalein Kwajalein +MK +4159+02126 Europe/Skopje +ML +1239-00800 Africa/Bamako +MM +1647+09610 Asia/Rangoon +MN +4755+10653 Asia/Ulaanbaatar most locations +MN +4801+09139 Asia/Hovd Bayan-Olgiy, Govi-Altai, Hovd, Uvs, Zavkhan +MN +4804+11430 Asia/Choibalsan Dornod, Sukhbaatar +MO +2214+11335 Asia/Macau +MP +1512+14545 Pacific/Saipan +MQ +1436-06105 America/Martinique +MR +1806-01557 Africa/Nouakchott +MS +1643-06213 America/Montserrat +MT +3554+01431 Europe/Malta +MU -2010+05730 Indian/Mauritius +MV +0410+07330 Indian/Maldives +MW -1547+03500 Africa/Blantyre +MX +1924-09909 America/Mexico_City Central Time - most locations +MX +2105-08646 America/Cancun Central Time - Quintana Roo +MX +2058-08937 America/Merida Central Time - Campeche, Yucatan +MX +2540-10019 America/Monterrey Mexican Central Time - Coahuila, Durango, Nuevo Leon, Tamaulipas away from US border +MX +2550-09730 America/Matamoros US Central Time - Coahuila, Durango, Nuevo Leon, Tamaulipas near US border +MX +2313-10625 America/Mazatlan Mountain Time - S Baja, Nayarit, Sinaloa +MX +2838-10605 America/Chihuahua Mexican Mountain Time - Chihuahua away from US border +MX +2934-10425 America/Ojinaga US Mountain Time - Chihuahua near US border +MX +2904-11058 America/Hermosillo Mountain Standard Time - Sonora +MX +3232-11701 America/Tijuana US Pacific Time - Baja California near US border +MX +3018-11452 America/Santa_Isabel Mexican Pacific Time - Baja California away from US border +MX +2048-10515 America/Bahia_Banderas Mexican Central Time - Bahia de Banderas +MY +0310+10142 Asia/Kuala_Lumpur peninsular Malaysia +MY +0133+11020 Asia/Kuching Sabah & Sarawak +MZ -2558+03235 Africa/Maputo +NA -2234+01706 Africa/Windhoek +NC -2216+16627 Pacific/Noumea +NE +1331+00207 Africa/Niamey +NF -2903+16758 Pacific/Norfolk +NG +0627+00324 Africa/Lagos +NI +1209-08617 America/Managua +NL +5222+00454 Europe/Amsterdam +NO +5955+01045 Europe/Oslo +NP +2743+08519 Asia/Kathmandu +NR -0031+16655 Pacific/Nauru +NU -1901-16955 Pacific/Niue +NZ -3652+17446 Pacific/Auckland most locations +NZ -4357-17633 Pacific/Chatham Chatham Islands +OM +2336+05835 Asia/Muscat +PA +0858-07932 America/Panama +PE -1203-07703 America/Lima +PF -1732-14934 Pacific/Tahiti Society Islands +PF -0900-13930 Pacific/Marquesas Marquesas Islands +PF -2308-13457 Pacific/Gambier Gambier Islands +PG -0930+14710 Pacific/Port_Moresby +PH +1435+12100 Asia/Manila +PK +2452+06703 Asia/Karachi +PL +5215+02100 Europe/Warsaw +PM +4703-05620 America/Miquelon +PN -2504-13005 Pacific/Pitcairn +PR +182806-0660622 America/Puerto_Rico +PS +3130+03428 Asia/Gaza Gaza Strip +PS +313200+0350542 Asia/Hebron West Bank +PT +3843-00908 Europe/Lisbon mainland +PT +3238-01654 Atlantic/Madeira Madeira Islands +PT +3744-02540 Atlantic/Azores Azores +PW +0720+13429 Pacific/Palau +PY -2516-05740 America/Asuncion +QA +2517+05132 Asia/Qatar +RE -2052+05528 Indian/Reunion +RO +4426+02606 Europe/Bucharest +RS +4450+02030 Europe/Belgrade +RU +5443+02030 Europe/Kaliningrad Moscow-01 - Kaliningrad +RU +5545+03735 Europe/Moscow Moscow+00 - west Russia +RU +4844+04425 Europe/Volgograd Moscow+00 - Caspian Sea +RU +5312+05009 Europe/Samara Moscow+00 - Samara, Udmurtia +RU +5651+06036 Asia/Yekaterinburg Moscow+02 - Urals +RU +5500+07324 Asia/Omsk Moscow+03 - west Siberia +RU +5502+08255 Asia/Novosibirsk Moscow+03 - Novosibirsk +RU +5345+08707 Asia/Novokuznetsk Moscow+03 - Novokuznetsk +RU +5601+09250 Asia/Krasnoyarsk Moscow+04 - Yenisei River +RU +5216+10420 Asia/Irkutsk Moscow+05 - Lake Baikal +RU +6200+12940 Asia/Yakutsk Moscow+06 - Lena River +RU +623923+1353314 Asia/Khandyga Moscow+06 - Tomponsky, Ust-Maysky +RU +4310+13156 Asia/Vladivostok Moscow+07 - Amur River +RU +4658+14242 Asia/Sakhalin Moscow+07 - Sakhalin Island +RU +643337+1431336 Asia/Ust-Nera Moscow+07 - Oymyakonsky +RU +5934+15048 Asia/Magadan Moscow+08 - Magadan +RU +5301+15839 Asia/Kamchatka Moscow+08 - Kamchatka +RU +6445+17729 Asia/Anadyr Moscow+08 - Bering Sea +RW -0157+03004 Africa/Kigali +SA +2438+04643 Asia/Riyadh +SB -0932+16012 Pacific/Guadalcanal +SC -0440+05528 Indian/Mahe +SD +1536+03232 Africa/Khartoum +SE +5920+01803 Europe/Stockholm +SG +0117+10351 Asia/Singapore +SH -1555-00542 Atlantic/St_Helena +SI +4603+01431 Europe/Ljubljana +SJ +7800+01600 Arctic/Longyearbyen +SK +4809+01707 Europe/Bratislava +SL +0830-01315 Africa/Freetown +SM +4355+01228 Europe/San_Marino +SN +1440-01726 Africa/Dakar +SO +0204+04522 Africa/Mogadishu +SR +0550-05510 America/Paramaribo +SS +0451+03136 Africa/Juba +ST +0020+00644 Africa/Sao_Tome +SV +1342-08912 America/El_Salvador +SX +180305-0630250 America/Lower_Princes +SY +3330+03618 Asia/Damascus +SZ -2618+03106 Africa/Mbabane +TC +2128-07108 America/Grand_Turk +TD +1207+01503 Africa/Ndjamena +TF -492110+0701303 Indian/Kerguelen +TG +0608+00113 Africa/Lome +TH +1345+10031 Asia/Bangkok +TJ +3835+06848 Asia/Dushanbe +TK -0922-17114 Pacific/Fakaofo +TL -0833+12535 Asia/Dili +TM +3757+05823 Asia/Ashgabat +TN +3648+01011 Africa/Tunis +TO -2110-17510 Pacific/Tongatapu +TR +4101+02858 Europe/Istanbul +TT +1039-06131 America/Port_of_Spain +TV -0831+17913 Pacific/Funafuti +TW +2503+12130 Asia/Taipei +TZ -0648+03917 Africa/Dar_es_Salaam +UA +5026+03031 Europe/Kiev most locations +UA +4837+02218 Europe/Uzhgorod Ruthenia +UA +4750+03510 Europe/Zaporozhye Zaporozh'ye, E Lugansk / Zaporizhia, E Luhansk +UA +4457+03406 Europe/Simferopol central Crimea +UG +0019+03225 Africa/Kampala +UM +1645-16931 Pacific/Johnston Johnston Atoll +UM +2813-17722 Pacific/Midway Midway Islands +UM +1917+16637 Pacific/Wake Wake Island +US +404251-0740023 America/New_York Eastern Time +US +421953-0830245 America/Detroit Eastern Time - Michigan - most locations +US +381515-0854534 America/Kentucky/Louisville Eastern Time - Kentucky - Louisville area +US +364947-0845057 America/Kentucky/Monticello Eastern Time - Kentucky - Wayne County +US +394606-0860929 America/Indiana/Indianapolis Eastern Time - Indiana - most locations +US +384038-0873143 America/Indiana/Vincennes Eastern Time - Indiana - Daviess, Dubois, Knox & Martin Counties +US +410305-0863611 America/Indiana/Winamac Eastern Time - Indiana - Pulaski County +US +382232-0862041 America/Indiana/Marengo Eastern Time - Indiana - Crawford County +US +382931-0871643 America/Indiana/Petersburg Eastern Time - Indiana - Pike County +US +384452-0850402 America/Indiana/Vevay Eastern Time - Indiana - Switzerland County +US +415100-0873900 America/Chicago Central Time +US +375711-0864541 America/Indiana/Tell_City Central Time - Indiana - Perry County +US +411745-0863730 America/Indiana/Knox Central Time - Indiana - Starke County +US +450628-0873651 America/Menominee Central Time - Michigan - Dickinson, Gogebic, Iron & Menominee Counties +US +470659-1011757 America/North_Dakota/Center Central Time - North Dakota - Oliver County +US +465042-1012439 America/North_Dakota/New_Salem Central Time - North Dakota - Morton County (except Mandan area) +US +471551-1014640 America/North_Dakota/Beulah Central Time - North Dakota - Mercer County +US +394421-1045903 America/Denver Mountain Time +US +433649-1161209 America/Boise Mountain Time - south Idaho & east Oregon +US +364708-1084111 America/Shiprock Mountain Time - Navajo +US +332654-1120424 America/Phoenix Mountain Standard Time - Arizona +US +340308-1181434 America/Los_Angeles Pacific Time +US +611305-1495401 America/Anchorage Alaska Time +US +581807-1342511 America/Juneau Alaska Time - Alaska panhandle +US +571035-1351807 America/Sitka Alaska Time - southeast Alaska panhandle +US +593249-1394338 America/Yakutat Alaska Time - Alaska panhandle neck +US +643004-1652423 America/Nome Alaska Time - west Alaska +US +515248-1763929 America/Adak Aleutian Islands +US +550737-1313435 America/Metlakatla Metlakatla Time - Annette Island +US +211825-1575130 Pacific/Honolulu Hawaii +UY -3453-05611 America/Montevideo +UZ +3940+06648 Asia/Samarkand west Uzbekistan +UZ +4120+06918 Asia/Tashkent east Uzbekistan +VA +415408+0122711 Europe/Vatican +VC +1309-06114 America/St_Vincent +VE +1030-06656 America/Caracas +VG +1827-06437 America/Tortola +VI +1821-06456 America/St_Thomas +VN +1045+10640 Asia/Ho_Chi_Minh +VU -1740+16825 Pacific/Efate +WF -1318-17610 Pacific/Wallis +WS -1350-17144 Pacific/Apia +YE +1245+04512 Asia/Aden +YT -1247+04514 Indian/Mayotte +ZA -2615+02800 Africa/Johannesburg +ZM -1525+02817 Africa/Lusaka +ZW -1750+03103 Africa/Harare diff --git a/awx/lib/site-packages/requests/__init__.py b/awx/lib/site-packages/requests/__init__.py new file mode 100644 index 0000000000..1af8d8ed2e --- /dev/null +++ b/awx/lib/site-packages/requests/__init__.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- + +# __ +# /__) _ _ _ _ _/ _ +# / ( (- (/ (/ (- _) / _) +# / + +""" +requests HTTP library +~~~~~~~~~~~~~~~~~~~~~ + +Requests is an HTTP library, written in Python, for human beings. Basic GET +usage: + + >>> import requests + >>> r = requests.get('http://python.org') + >>> r.status_code + 200 + >>> 'Python is a programming language' in r.content + True + +... or POST: + + >>> payload = dict(key1='value1', key2='value2') + >>> r = requests.post("http://httpbin.org/post", data=payload) + >>> print r.text + { + ... + "form": { + "key2": "value2", + "key1": "value1" + }, + ... + } + +The other HTTP methods are supported - see `requests.api`. Full documentation +is at <http://python-requests.org>. + +:copyright: (c) 2013 by Kenneth Reitz. +:license: Apache 2.0, see LICENSE for more details. + +""" + +__title__ = 'requests' +__version__ = '1.2.3' +__build__ = 0x010203 +__author__ = 'Kenneth Reitz' +__license__ = 'Apache 2.0' +__copyright__ = 'Copyright 2013 Kenneth Reitz' + +# Attempt to enable urllib3's SNI support, if possible +try: + from requests.packages.urllib3.contrib import pyopenssl + pyopenssl.inject_into_urllib3() +except ImportError: + pass + +from . import utils +from .models import Request, Response, PreparedRequest +from .api import request, get, head, post, patch, put, delete, options +from .sessions import session, Session +from .status_codes import codes +from .exceptions import ( + RequestException, Timeout, URLRequired, + TooManyRedirects, HTTPError, ConnectionError +) + +# Set default logging handler to avoid "No handler found" warnings. +import logging +try: # Python 2.7+ + from logging import NullHandler +except ImportError: + class NullHandler(logging.Handler): + def emit(self, record): + pass + +logging.getLogger(__name__).addHandler(NullHandler()) diff --git a/awx/lib/site-packages/requests/adapters.py b/awx/lib/site-packages/requests/adapters.py new file mode 100644 index 0000000000..98b7317edb --- /dev/null +++ b/awx/lib/site-packages/requests/adapters.py @@ -0,0 +1,342 @@ +# -*- coding: utf-8 -*- + +""" +requests.adapters +~~~~~~~~~~~~~~~~~ + +This module contains the transport adapters that Requests uses to define +and maintain connections. +""" + +import socket + +from .models import Response +from .packages.urllib3.poolmanager import PoolManager, ProxyManager +from .packages.urllib3.response import HTTPResponse +from .compat import urlparse, basestring, urldefrag, unquote +from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers, + prepend_scheme_if_needed, get_auth_from_url) +from .structures import CaseInsensitiveDict +from .packages.urllib3.exceptions import MaxRetryError +from .packages.urllib3.exceptions import TimeoutError +from .packages.urllib3.exceptions import SSLError as _SSLError +from .packages.urllib3.exceptions import HTTPError as _HTTPError +from .cookies import extract_cookies_to_jar +from .exceptions import ConnectionError, Timeout, SSLError +from .auth import _basic_auth_str + +DEFAULT_POOLBLOCK = False +DEFAULT_POOLSIZE = 10 +DEFAULT_RETRIES = 0 + + +class BaseAdapter(object): + """The Base Transport Adapter""" + + def __init__(self): + super(BaseAdapter, self).__init__() + + def send(self): + raise NotImplementedError + + def close(self): + raise NotImplementedError + + +class HTTPAdapter(BaseAdapter): + """The built-in HTTP Adapter for urllib3. + + Provides a general-case interface for Requests sessions to contact HTTP and + HTTPS urls by implementing the Transport Adapter interface. This class will + usually be created by the :class:`Session <Session>` class under the + covers. + + :param pool_connections: The number of urllib3 connection pools to cache. + :param pool_maxsize: The maximum number of connections to save in the pool. + :param max_retries: The maximum number of retries each connection should attempt. + :param pool_block: Whether the connection pool should block for connections. + + Usage:: + + >>> import requests + >>> s = requests.Session() + >>> a = requests.adapters.HTTPAdapter() + >>> s.mount('http://', a) + """ + __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize', + '_pool_block'] + + def __init__(self, pool_connections=DEFAULT_POOLSIZE, + pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, + pool_block=DEFAULT_POOLBLOCK): + self.max_retries = max_retries + self.config = {} + + super(HTTPAdapter, self).__init__() + + self._pool_connections = pool_connections + self._pool_maxsize = pool_maxsize + self._pool_block = pool_block + + self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) + + def __getstate__(self): + return dict((attr, getattr(self, attr, None)) for attr in + self.__attrs__) + + def __setstate__(self, state): + for attr, value in state.items(): + setattr(self, attr, value) + + self.init_poolmanager(self._pool_connections, self._pool_maxsize, + block=self._pool_block) + + def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK): + """Initializes a urllib3 PoolManager. This method should not be called + from user code, and is only exposed for use when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param connections: The number of urllib3 connection pools to cache. + :param maxsize: The maximum number of connections to save in the pool. + :param block: Block when no free connections are available. + """ + # save these values for pickling + self._pool_connections = connections + self._pool_maxsize = maxsize + self._pool_block = block + + self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, + block=block) + + def cert_verify(self, conn, url, verify, cert): + """Verify a SSL certificate. This method should not be called from user + code, and is only exposed for use when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param conn: The urllib3 connection object associated with the cert. + :param url: The requested URL. + :param verify: Whether we should actually verify the certificate. + :param cert: The SSL certificate to verify. + """ + if url.startswith('https') and verify: + + cert_loc = None + + # Allow self-specified cert location. + if verify is not True: + cert_loc = verify + + if not cert_loc: + cert_loc = DEFAULT_CA_BUNDLE_PATH + + if not cert_loc: + raise Exception("Could not find a suitable SSL CA certificate bundle.") + + conn.cert_reqs = 'CERT_REQUIRED' + conn.ca_certs = cert_loc + else: + conn.cert_reqs = 'CERT_NONE' + conn.ca_certs = None + + if cert: + if not isinstance(cert, basestring): + conn.cert_file = cert[0] + conn.key_file = cert[1] + else: + conn.cert_file = cert + + def build_response(self, req, resp): + """Builds a :class:`Response <requests.Response>` object from a urllib3 + response. This should not be called from user code, and is only exposed + for use when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>` + + :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response. + :param resp: The urllib3 response object. + """ + response = Response() + + # Fallback to None if there's no status_code, for whatever reason. + response.status_code = getattr(resp, 'status', None) + + # Make headers case-insensitive. + response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) + + # Set encoding. + response.encoding = get_encoding_from_headers(response.headers) + response.raw = resp + response.reason = response.raw.reason + + if isinstance(req.url, bytes): + response.url = req.url.decode('utf-8') + else: + response.url = req.url + + # Add new cookies from the server. + extract_cookies_to_jar(response.cookies, req, resp) + + # Give the Response some context. + response.request = req + response.connection = self + + return response + + def get_connection(self, url, proxies=None): + """Returns a urllib3 connection for the given URL. This should not be + called from user code, and is only exposed for use when subclassing the + :class:`HTTPAdapter <reqeusts.adapters.HTTPAdapter>`. + + :param url: The URL to connect to. + :param proxies: (optional) A Requests-style dictionary of proxies used on this request. + """ + proxies = proxies or {} + proxy = proxies.get(urlparse(url).scheme) + + if proxy: + proxy = prepend_scheme_if_needed(proxy, urlparse(url).scheme) + conn = ProxyManager(self.poolmanager.connection_from_url(proxy)) + else: + conn = self.poolmanager.connection_from_url(url) + + return conn + + def close(self): + """Disposes of any internal state. + + Currently, this just closes the PoolManager, which closes pooled + connections. + """ + self.poolmanager.clear() + + def request_url(self, request, proxies): + """Obtain the url to use when making the final request. + + If the message is being sent through a proxy, the full URL has to be + used. Otherwise, we should only use the path portion of the URL. + + This shoudl not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. + :param proxies: A dictionary of schemes to proxy URLs. + """ + proxies = proxies or {} + proxy = proxies.get(urlparse(request.url).scheme) + + if proxy: + url, _ = urldefrag(request.url) + else: + url = request.path_url + + return url + + def add_headers(self, request, **kwargs): + """Add any headers needed by the connection. Currently this adds a + Proxy-Authorization header. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to. + :param kwargs: The keyword arguments from the call to send(). + """ + proxies = kwargs.get('proxies', {}) + + if proxies is None: + proxies = {} + + proxy = proxies.get(urlparse(request.url).scheme) + username, password = get_auth_from_url(proxy) + + if username and password: + # Proxy auth usernames and passwords will be urlencoded, we need + # to decode them. + username = unquote(username) + password = unquote(password) + request.headers['Proxy-Authorization'] = _basic_auth_str(username, + password) + + def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) The timeout on the request. + :param verify: (optional) Whether to verify SSL certificates. + :param vert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + """ + + conn = self.get_connection(request.url, proxies) + + self.cert_verify(conn, request.url, verify, cert) + url = self.request_url(request, proxies) + self.add_headers(request, proxies=proxies) + + chunked = not (request.body is None or 'Content-Length' in request.headers) + + try: + if not chunked: + resp = conn.urlopen( + method=request.method, + url=url, + body=request.body, + headers=request.headers, + redirect=False, + assert_same_host=False, + preload_content=False, + decode_content=False, + retries=self.max_retries, + timeout=timeout + ) + + # Send the request. + else: + if hasattr(conn, 'proxy_pool'): + conn = conn.proxy_pool + + low_conn = conn._get_conn(timeout=timeout) + low_conn.putrequest(request.method, url, skip_accept_encoding=True) + + for header, value in request.headers.items(): + low_conn.putheader(header, value) + + low_conn.endheaders() + + for i in request.body: + low_conn.send(hex(len(i))[2:].encode('utf-8')) + low_conn.send(b'\r\n') + low_conn.send(i) + low_conn.send(b'\r\n') + low_conn.send(b'0\r\n\r\n') + + r = low_conn.getresponse() + resp = HTTPResponse.from_httplib(r, + pool=conn, + connection=low_conn, + preload_content=False, + decode_content=False + ) + + except socket.error as sockerr: + raise ConnectionError(sockerr) + + except MaxRetryError as e: + raise ConnectionError(e) + + except (_SSLError, _HTTPError) as e: + if isinstance(e, _SSLError): + raise SSLError(e) + elif isinstance(e, TimeoutError): + raise Timeout(e) + else: + raise + + r = self.build_response(request, resp) + + if not stream: + r.content + + return r diff --git a/awx/lib/site-packages/requests/api.py b/awx/lib/site-packages/requests/api.py new file mode 100644 index 0000000000..baf43dd613 --- /dev/null +++ b/awx/lib/site-packages/requests/api.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- + +""" +requests.api +~~~~~~~~~~~~ + +This module implements the Requests API. + +:copyright: (c) 2012 by Kenneth Reitz. +:license: Apache2, see LICENSE for more details. + +""" + +from . import sessions + + +def request(method, url, **kwargs): + """Constructs and sends a :class:`Request <Request>`. + Returns :class:`Response <Response>` object. + + :param method: method for the new :class:`Request` object. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. + :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. + :param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload. + :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) Float describing the timeout of the request. + :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. + :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. + :param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided. + :param stream: (optional) if ``False``, the response content will be immediately downloaded. + :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. + + Usage:: + + >>> import requests + >>> req = requests.request('GET', 'http://httpbin.org/get') + <Response [200]> + """ + + session = sessions.Session() + return session.request(method=method, url=url, **kwargs) + + +def get(url, **kwargs): + """Sends a GET request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + """ + + kwargs.setdefault('allow_redirects', True) + return request('get', url, **kwargs) + + +def options(url, **kwargs): + """Sends a OPTIONS request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + """ + + kwargs.setdefault('allow_redirects', True) + return request('options', url, **kwargs) + + +def head(url, **kwargs): + """Sends a HEAD request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + """ + + kwargs.setdefault('allow_redirects', False) + return request('head', url, **kwargs) + + +def post(url, data=None, **kwargs): + """Sends a POST request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + """ + + return request('post', url, data=data, **kwargs) + + +def put(url, data=None, **kwargs): + """Sends a PUT request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + """ + + return request('put', url, data=data, **kwargs) + + +def patch(url, data=None, **kwargs): + """Sends a PATCH request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + """ + + return request('patch', url, data=data, **kwargs) + + +def delete(url, **kwargs): + """Sends a DELETE request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + """ + + return request('delete', url, **kwargs) diff --git a/awx/lib/site-packages/requests/auth.py b/awx/lib/site-packages/requests/auth.py new file mode 100644 index 0000000000..fab05cf3bc --- /dev/null +++ b/awx/lib/site-packages/requests/auth.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- + +""" +requests.auth +~~~~~~~~~~~~~ + +This module contains the authentication handlers for Requests. +""" + +import os +import re +import time +import hashlib +import logging + +from base64 import b64encode + +from .compat import urlparse, str +from .utils import parse_dict_header + + +log = logging.getLogger(__name__) + +CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' +CONTENT_TYPE_MULTI_PART = 'multipart/form-data' + + +def _basic_auth_str(username, password): + """Returns a Basic Auth string.""" + + return 'Basic ' + b64encode(('%s:%s' % (username, password)).encode('latin1')).strip().decode('latin1') + + +class AuthBase(object): + """Base class that all auth implementations derive from""" + + def __call__(self, r): + raise NotImplementedError('Auth hooks must be callable.') + + +class HTTPBasicAuth(AuthBase): + """Attaches HTTP Basic Authentication to the given Request object.""" + def __init__(self, username, password): + self.username = username + self.password = password + + def __call__(self, r): + r.headers['Authorization'] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPProxyAuth(HTTPBasicAuth): + """Attaches HTTP Proxy Authentication to a given Request object.""" + def __call__(self, r): + r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPDigestAuth(AuthBase): + """Attaches HTTP Digest Authentication to the given Request object.""" + def __init__(self, username, password): + self.username = username + self.password = password + self.last_nonce = '' + self.nonce_count = 0 + self.chal = {} + + def build_digest_header(self, method, url): + + realm = self.chal['realm'] + nonce = self.chal['nonce'] + qop = self.chal.get('qop') + algorithm = self.chal.get('algorithm') + opaque = self.chal.get('opaque') + + if algorithm is None: + _algorithm = 'MD5' + else: + _algorithm = algorithm.upper() + # lambdas assume digest modules are imported at the top level + if _algorithm == 'MD5': + def md5_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.md5(x).hexdigest() + hash_utf8 = md5_utf8 + elif _algorithm == 'SHA': + def sha_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.sha1(x).hexdigest() + hash_utf8 = sha_utf8 + # XXX MD5-sess + KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) + + if hash_utf8 is None: + return None + + # XXX not implemented yet + entdig = None + p_parsed = urlparse(url) + path = p_parsed.path + if p_parsed.query: + path += '?' + p_parsed.query + + A1 = '%s:%s:%s' % (self.username, realm, self.password) + A2 = '%s:%s' % (method, path) + + if qop == 'auth': + if nonce == self.last_nonce: + self.nonce_count += 1 + else: + self.nonce_count = 1 + + ncvalue = '%08x' % self.nonce_count + s = str(self.nonce_count).encode('utf-8') + s += nonce.encode('utf-8') + s += time.ctime().encode('utf-8') + s += os.urandom(8) + + cnonce = (hashlib.sha1(s).hexdigest()[:16]) + noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, hash_utf8(A2)) + respdig = KD(hash_utf8(A1), noncebit) + elif qop is None: + respdig = KD(hash_utf8(A1), "%s:%s" % (nonce, hash_utf8(A2))) + else: + # XXX handle auth-int. + return None + + self.last_nonce = nonce + + # XXX should the partial digests be encoded too? + base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ + 'response="%s"' % (self.username, realm, nonce, path, respdig) + if opaque: + base += ', opaque="%s"' % opaque + if algorithm: + base += ', algorithm="%s"' % algorithm + if entdig: + base += ', digest="%s"' % entdig + if qop: + base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce) + + return 'Digest %s' % (base) + + def handle_401(self, r, **kwargs): + """Takes the given response and tries digest-auth, if needed.""" + + num_401_calls = getattr(self, 'num_401_calls', 1) + s_auth = r.headers.get('www-authenticate', '') + + if 'digest' in s_auth.lower() and num_401_calls < 2: + + setattr(self, 'num_401_calls', num_401_calls + 1) + pat = re.compile(r'digest ', flags=re.IGNORECASE) + self.chal = parse_dict_header(pat.sub('', s_auth, count=1)) + + # Consume content and release the original connection + # to allow our new request to reuse the same one. + r.content + r.raw.release_conn() + + r.request.headers['Authorization'] = self.build_digest_header(r.request.method, r.request.url) + _r = r.connection.send(r.request, **kwargs) + _r.history.append(r) + + return _r + + setattr(self, 'num_401_calls', 1) + return r + + def __call__(self, r): + # If we have a saved nonce, skip the 401 + if self.last_nonce: + r.headers['Authorization'] = self.build_digest_header(r.method, r.url) + r.register_hook('response', self.handle_401) + return r diff --git a/awx/lib/site-packages/requests/cacert.pem b/awx/lib/site-packages/requests/cacert.pem new file mode 100644 index 0000000000..504fdccf2f --- /dev/null +++ b/awx/lib/site-packages/requests/cacert.pem @@ -0,0 +1,3290 @@ +## +## ca-bundle.crt -- Bundle of CA Root Certificates +## +## Certificate data from Mozilla as of: Wed Jan 18 00:04:16 2012 +## +## This is a bundle of X.509 certificates of public Certificate Authorities +## (CA). These were automatically extracted from Mozilla's root certificates +## file (certdata.txt). This file can be found in the mozilla source tree: +## http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1 +## +## It contains the certificates in PEM format and therefore +## can be directly used with curl / libcurl / php_curl, or with +## an Apache+mod_ssl webserver for SSL client authentication. +## Just configure this file as the SSLCACertificateFile. +## + +# ***** BEGIN LICENSE BLOCK ***** +# Version: MPL 1.1/GPL 2.0/LGPL 2.1 +# +# The contents of this file are subject to the Mozilla Public License Version +# 1.1 (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.mozilla.org/MPL/ +# +# Software distributed under the License is distributed on an "AS IS" basis, +# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License +# for the specific language governing rights and limitations under the +# License. +# +# The Original Code is the Netscape security libraries. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1994-2000 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# +# Alternatively, the contents of this file may be used under the terms of +# either the GNU General Public License Version 2 or later (the "GPL"), or +# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), +# in which case the provisions of the GPL or the LGPL are applicable instead +# of those above. If you wish to allow use of your version of this file only +# under the terms of either the GPL or the LGPL, and not to allow others to +# use your version of this file under the terms of the MPL, indicate your +# decision by deleting the provisions above and replace them with the notice +# and other provisions required by the GPL or the LGPL. If you do not delete +# the provisions above, a recipient may use your version of this file under +# the terms of any one of the MPL, the GPL or the LGPL. +# +# ***** END LICENSE BLOCK ***** +# @(#) $RCSfile: certdata.txt,v $ $Revision: 1.81 $ $Date: 2012/01/17 22:02:37 $ + +GTE CyberTrust Global Root +========================== +-----BEGIN CERTIFICATE----- +MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYDVQQKEw9HVEUg +Q29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNvbHV0aW9ucywgSW5jLjEjMCEG +A1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJvb3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEz +MjM1OTAwWjB1MQswCQYDVQQGEwJVUzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQL +Ex5HVEUgQ3liZXJUcnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0 +IEdsb2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrHiM3dFw4u +sJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTSr41tiGeA5u2ylc9yMcql +HHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X404Wqk2kmhXBIgD8SFcd5tB8FLztimQID +AQABMA0GCSqGSIb3DQEBBAUAA4GBAG3rGwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMW +M4ETCJ57NE7fQMh017l93PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OF +NMQkpw0PlZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/ +-----END CERTIFICATE----- + +Thawte Server CA +================ +-----BEGIN CERTIFICATE----- +MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT +DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3dGUgQ29uc3Vs +dGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UE +AxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5j +b20wHhcNOTYwODAxMDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNV +BAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3dGUgQ29u +c3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcG +A1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0 +ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl +/Kj0R1HahbUgdJSGHg91yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg7 +1CcEJRCXL+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGjEzAR +MA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG7oWDTSEwjsrZqG9J +GubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6eQNuozDJ0uW8NxuOzRAvZim+aKZuZ +GCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZqdq5snUb9kLy78fyGPmJvKP/iiMucEc= +-----END CERTIFICATE----- + +Thawte Premium Server CA +======================== +-----BEGIN CERTIFICATE----- +MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkExFTATBgNVBAgT +DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3dGUgQ29uc3Vs +dGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UE +AxMYVGhhd3RlIFByZW1pdW0gU2VydmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZl +ckB0aGF3dGUuY29tMB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYT +AlpBMRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsGA1UEChMU +VGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRpb24gU2VydmljZXMgRGl2 +aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNlcnZlciBDQTEoMCYGCSqGSIb3DQEJARYZ +cHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2 +aovXwlue2oFBYo847kkEVdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIh +Udib0GfQug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMRuHM/ +qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQQFAAOBgQAm +SCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUIhfzJATj/Tb7yFkJD57taRvvBxhEf +8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JMpAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7t +UCemDaYj+bvLpgcUQg== +-----END CERTIFICATE----- + +Equifax Secure CA +================= +-----BEGIN CERTIFICATE----- +MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJVUzEQMA4GA1UE +ChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoT +B0VxdWlmYXgxLTArBgNVBAsTJEVxdWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCB +nzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPR +fM6fBeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+AcJkVV5MW +8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kCAwEAAaOCAQkwggEFMHAG +A1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UE +CxMkRXF1aWZheCBTZWN1cmUgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoG +A1UdEAQTMBGBDzIwMTgwODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvS +spXXR9gjIBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQFMAMB +Af8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUAA4GBAFjOKer89961 +zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y7qj/WsjTVbJmcVfewCHrPSqnI0kB +BIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee95 +70+sB3c4 +-----END CERTIFICATE----- + +Digital Signature Trust Co. Global CA 1 +======================================= +-----BEGIN CERTIFICATE----- +MIIDKTCCApKgAwIBAgIENnAVljANBgkqhkiG9w0BAQUFADBGMQswCQYDVQQGEwJVUzEkMCIGA1UE +ChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMREwDwYDVQQLEwhEU1RDQSBFMTAeFw05ODEy +MTAxODEwMjNaFw0xODEyMTAxODQwMjNaMEYxCzAJBgNVBAYTAlVTMSQwIgYDVQQKExtEaWdpdGFs +IFNpZ25hdHVyZSBUcnVzdCBDby4xETAPBgNVBAsTCERTVENBIEUxMIGdMA0GCSqGSIb3DQEBAQUA +A4GLADCBhwKBgQCgbIGpzzQeJN3+hijM3oMv+V7UQtLodGBmE5gGHKlREmlvMVW5SXIACH7TpWJE +NySZj9mDSI+ZbZUTu0M7LklOiDfBu1h//uG9+LthzfNHwJmm8fOR6Hh8AMthyUQncWlVSn5JTe2i +o74CTADKAqjuAQIxZA9SLRN0dja1erQtcQIBA6OCASQwggEgMBEGCWCGSAGG+EIBAQQEAwIABzBo +BgNVHR8EYTBfMF2gW6BZpFcwVTELMAkGA1UEBhMCVVMxJDAiBgNVBAoTG0RpZ2l0YWwgU2lnbmF0 +dXJlIFRydXN0IENvLjERMA8GA1UECxMIRFNUQ0EgRTExDTALBgNVBAMTBENSTDEwKwYDVR0QBCQw +IoAPMTk5ODEyMTAxODEwMjNagQ8yMDE4MTIxMDE4MTAyM1owCwYDVR0PBAQDAgEGMB8GA1UdIwQY +MBaAFGp5fpFpRhgTCgJ3pVlbYJglDqL4MB0GA1UdDgQWBBRqeX6RaUYYEwoCd6VZW2CYJQ6i+DAM +BgNVHRMEBTADAQH/MBkGCSqGSIb2fQdBAAQMMAobBFY0LjADAgSQMA0GCSqGSIb3DQEBBQUAA4GB +ACIS2Hod3IEGtgllsofIH160L+nEHvI8wbsEkBFKg05+k7lNQseSJqBcNJo4cvj9axY+IO6CizEq +kzaFI4iKPANo08kJD038bKTaKHKTDomAsH3+gG9lbRgzl4vCa4nuYD3Im+9/KzJic5PLPON74nZ4 +RbyhkwS7hp86W0N6w4pl +-----END CERTIFICATE----- + +Digital Signature Trust Co. Global CA 3 +======================================= +-----BEGIN CERTIFICATE----- +MIIDKTCCApKgAwIBAgIENm7TzjANBgkqhkiG9w0BAQUFADBGMQswCQYDVQQGEwJVUzEkMCIGA1UE +ChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMREwDwYDVQQLEwhEU1RDQSBFMjAeFw05ODEy +MDkxOTE3MjZaFw0xODEyMDkxOTQ3MjZaMEYxCzAJBgNVBAYTAlVTMSQwIgYDVQQKExtEaWdpdGFs +IFNpZ25hdHVyZSBUcnVzdCBDby4xETAPBgNVBAsTCERTVENBIEUyMIGdMA0GCSqGSIb3DQEBAQUA +A4GLADCBhwKBgQC/k48Xku8zExjrEH9OFr//Bo8qhbxe+SSmJIi2A7fBw18DW9Fvrn5C6mYjuGOD +VvsoLeE4i7TuqAHhzhy2iCoiRoX7n6dwqUcUP87eZfCocfdPJmyMvMa1795JJ/9IKn3oTQPMx7JS +xhcxEzu1TdvIxPbDDyQq2gyd55FbgM2UnQIBA6OCASQwggEgMBEGCWCGSAGG+EIBAQQEAwIABzBo +BgNVHR8EYTBfMF2gW6BZpFcwVTELMAkGA1UEBhMCVVMxJDAiBgNVBAoTG0RpZ2l0YWwgU2lnbmF0 +dXJlIFRydXN0IENvLjERMA8GA1UECxMIRFNUQ0EgRTIxDTALBgNVBAMTBENSTDEwKwYDVR0QBCQw +IoAPMTk5ODEyMDkxOTE3MjZagQ8yMDE4MTIwOTE5MTcyNlowCwYDVR0PBAQDAgEGMB8GA1UdIwQY +MBaAFB6CTShlgDzJQW6sNS5ay97u+DlbMB0GA1UdDgQWBBQegk0oZYA8yUFurDUuWsve7vg5WzAM +BgNVHRMEBTADAQH/MBkGCSqGSIb2fQdBAAQMMAobBFY0LjADAgSQMA0GCSqGSIb3DQEBBQUAA4GB +AEeNg61i8tuwnkUiBbmi1gMOOHLnnvx75pO2mqWilMg0HZHRxdf0CiUPPXiBng+xZ8SQTGPdXqfi +up/1902lMXucKS1M/mQ+7LZT/uqb7YLbdHVLB3luHtgZg3Pe9T7Qtd7nS2h9Qy4qIOF+oHhEngj1 +mPnHfxsb1gYgAlihw6ID +-----END CERTIFICATE----- + +Verisign Class 3 Public Primary Certification Authority +======================================================= +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkGA1UEBhMCVVMx +FzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmltYXJ5 +IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVow +XzELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAz +IFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUA +A4GNADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhEBarsAx94 +f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/isI19wKTakyYbnsZogy1Ol +hec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0GCSqGSIb3DQEBAgUAA4GBALtMEivPLCYA +TxQT3ab7/AoRhIzzKBxnki98tsX63/Dolbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59Ah +WM1pF+NEHJwZRDmJXNycAA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2Omuf +Tqj/ZA1k +-----END CERTIFICATE----- + +Verisign Class 3 Public Primary Certification Authority - G2 +============================================================ +-----BEGIN CERTIFICATE----- +MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJBgNVBAYTAlVT +MRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMgUHJpbWFy +eSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVz +dCBOZXR3b3JrMB4XDTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVT +MRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMgUHJpbWFy +eSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVz +dCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCO +FoUgRm1HP9SFIIThbbP4pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71 +lSk8UOg013gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwIDAQAB +MA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSkU01UbSuvDV1Ai2TT +1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7iF6YM40AIOw7n60RzKprxaZLvcRTD +Oaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpYoJ2daZH9 +-----END CERTIFICATE----- + +GlobalSign Root CA +================== +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkGA1UEBhMCQkUx +GTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jvb3QgQ0ExGzAZBgNVBAMTEkds +b2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAwMDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNV +BAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYD +VQQDExJHbG9iYWxTaWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDa +DuaZjc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavpxy0Sy6sc +THAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp1Wrjsok6Vjk4bwY8iGlb +Kk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdGsnUOhugZitVtbNV4FpWi6cgKOOvyJBNP +c1STE4U6G7weNLWLBYy5d4ux2x8gkasJU26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrX +gzT/LCrBbBlDSgeF59N89iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0BAQUF +AAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOzyj1hTdNGCbM+w6Dj +Y1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE38NflNUVyRRBnMRddWQVDf9VMOyG +j/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymPAbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhH +hm4qxFYxldBniYUr+WymXUadDKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveC +X4XSQRjbgbMEHMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +GlobalSign Root CA - R2 +======================= +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4GA1UECxMXR2xv +YmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2Jh +bFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxT +aWduIFJvb3QgQ0EgLSBSMjETMBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2ln +bjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6 +ErPLv4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8eoLrvozp +s6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklqtTleiDTsvHgMCJiEbKjN +S7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzdC9XZzPnqJworc5HGnRusyMvo4KD0L5CL +TfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pazq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6C +ygPCm48CAwEAAaOBnDCBmTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUm+IHV2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5nbG9i +YWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG3lm0mi3f3BmGLjAN +BgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4GsJ0/WwbgcQ3izDJr86iw8bmEbTUsp +9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu +01yiPqFbQfXf5WRDLenVOavSot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG7 +9G+dwfCMNYxdAfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== +-----END CERTIFICATE----- + +ValiCert Class 1 VA +=================== +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRp +b24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZh +bGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIy +MjM0OFoXDTE5MDYyNTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0 +d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEg +UG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0 +LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMIGfMA0GCSqGSIb3DQEBAQUA +A4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9YLqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIi +GQj4/xEjm84H9b9pGib+TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCm +DuJWBQ8YTfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0LBwG +lN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLWI8sogTLDAHkY7FkX +icnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPwnXS3qT6gpf+2SQMT2iLM7XGCK5nP +Orf1LXLI +-----END CERTIFICATE----- + +ValiCert Class 2 VA +=================== +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRp +b24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZh +bGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAw +MTk1NFoXDTE5MDYyNjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0 +d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIg +UG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0 +LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMIGfMA0GCSqGSIb3DQEBAQUA +A4GNADCBiQKBgQDOOnHK5avIWZJV16vYdA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVC +CSRrCl6zfN1SLUzm1NZ9WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7Rf +ZHM047QSv4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9vUJSZ +SWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTuIYEZoDJJKPTEjlbV +UjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwCW/POuZ6lcg5Ktz885hZo+L7tdEy8 +W9ViH0Pd +-----END CERTIFICATE----- + +RSA Root Certificate 1 +====================== +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRp +b24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZh +bGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAw +MjIzM1oXDTE5MDYyNjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0 +d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMg +UG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0 +LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMIGfMA0GCSqGSIb3DQEBAQUA +A4GNADCBiQKBgQDjmFGWHOjVsQaBalfDcnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td +3zZxFJmP3MKS8edgkpfs2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89H +BFx1cQqYJJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliEZwgs +3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJn0WuPIqpsHEzXcjF +V9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/APhmcGcwTTYJBtYze4D1gCCAPRX5r +on+jjBXu +-----END CERTIFICATE----- + +Verisign Class 3 Public Primary Certification Authority - G3 +============================================================ +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQswCQYDVQQGEwJV +UzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdv +cmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxRTBDBgNVBAMTPFZlcmlTaWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRy +dXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhv +cml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWduIENsYXNzIDMgUHVibGljIFByaW1hcnkg +Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAMu6nFL8eB8aHm8bN3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1 +EUGO+i2tKmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGukxUc +cLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBmCC+Vk7+qRy+oRpfw +EuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJXwzw3sJ2zq/3avL6QaaiMxTJ5Xpj +055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWuimi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEA +ERSWwauSCPc/L8my/uRan2Te2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5f +j267Cz3qWhMeDGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC +/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565pF4ErWjfJXir0 +xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGtTxzhT5yvDwyd93gN2PQ1VoDa +t20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== +-----END CERTIFICATE----- + +Verisign Class 4 Public Primary Certification Authority - G3 +============================================================ +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQswCQYDVQQGEwJV +UzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdv +cmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxRTBDBgNVBAMTPFZlcmlTaWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRy +dXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhv +cml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWduIENsYXNzIDQgUHVibGljIFByaW1hcnkg +Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAK3LpRFpxlmr8Y+1GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaS +tBO3IFsJ+mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0GbdU6LM +8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLmNxdLMEYH5IBtptiW +Lugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XYufTsgsbSPZUd5cBPhMnZo0QoBmrX +Razwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEA +j/ola09b5KROJ1WrIhVZPMq1CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXtt +mhwwjIDLk5Mqg6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm +fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c2NU8Qh0XwRJd +RTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/bLvSHgCwIe34QWKCudiyxLtG +UPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg== +-----END CERTIFICATE----- + +Entrust.net Secure Server CA +============================ +-----BEGIN CERTIFICATE----- +MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMCVVMxFDASBgNV +BAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5uZXQvQ1BTIGluY29ycC4gYnkg +cmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRl +ZDE6MDgGA1UEAxMxRW50cnVzdC5uZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhv +cml0eTAeFw05OTA1MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIG +A1UEChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBi +eSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1p +dGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUAA4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQ +aO2f55M28Qpku0f1BBc/I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5 +gXpa0zf3wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OCAdcw +ggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHboIHYpIHVMIHSMQsw +CQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5l +dC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF +bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu +dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0MFqBDzIwMTkw +NTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8BdiE1U9s/8KAGv7UISX8+1i0Bow +HQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAaMAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EA +BAwwChsEVjQuMAMCBJAwDQYJKoZIhvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyN +Ewr75Ji174z4xRAN95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9 +n9cd2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= +-----END CERTIFICATE----- + +Entrust.net Premium 2048 Secure Server CA +========================================= +-----BEGIN CERTIFICATE----- +MIIEXDCCA0SgAwIBAgIEOGO5ZjANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChMLRW50cnVzdC5u +ZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBpbmNvcnAuIGJ5IHJlZi4gKGxp +bWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNV +BAMTKkVudHJ1c3QubmV0IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQx +NzUwNTFaFw0xOTEyMjQxODIwNTFaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3 +d3d3LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTEl +MCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEGA1UEAxMqRW50cnVzdC5u +ZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgpMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEArU1LqRKGsuqjIAcVFmQqK0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOL +Gp18EzoOH1u3Hs/lJBQesYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSr +hRSGlVuXMlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVTXTzW +nLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/HoZdenoVve8AjhUi +VBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH4QIDAQABo3QwcjARBglghkgBhvhC +AQEEBAMCAAcwHwYDVR0jBBgwFoAUVeSB0RGAvtiJuQijMfmhJAkWuXAwHQYDVR0OBBYEFFXkgdER +gL7YibkIozH5oSQJFrlwMB0GCSqGSIb2fQdBAAQQMA4bCFY1LjA6NC4wAwIEkDANBgkqhkiG9w0B +AQUFAAOCAQEAWUesIYSKF8mciVMeuoCFGsY8Tj6xnLZ8xpJdGGQC49MGCBFhfGPjK50xA3B20qMo +oPS7mmNz7W3lKtvtFKkrxjYR0CvrB4ul2p5cGZ1WEvVUKcgF7bISKo30Axv/55IQh7A6tcOdBTcS +o8f0FbnVpDkWm1M6I5HxqIKiaohowXkCIryqptau37AUX7iH0N18f3v/rxzP5tsHrV7bhZ3QKw0z +2wTR5klAEyt2+z7pnIkPFc4YsIV4IU9rTw76NmfNB/L/CNDi3tm/Kq+4h4YhPATKt5Rof8886ZjX +OP/swNlQ8C5LWK5Gb9Auw2DaclVyvUxFnmG6v4SBkgPR0ml8xQ== +-----END CERTIFICATE----- + +Baltimore CyberTrust Root +========================= +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJRTESMBAGA1UE +ChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYDVQQDExlCYWx0aW1vcmUgQ3li +ZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoXDTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMC +SUUxEjAQBgNVBAoTCUJhbHRpbW9yZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFs +dGltb3JlIEN5YmVyVHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKME +uyKrmD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjrIZ3AQSsB +UnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeKmpYcqWe4PwzV9/lSEy/C +G9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSuXmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9 +XbIGevOF6uvUA65ehD5f/xXtabz5OTZydc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjpr +l3RjM71oGDHweI12v/yejl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoI +VDaGezq1BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEB +BQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT929hkTI7gQCvlYpNRh +cL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3WgxjkzSswF07r51XgdIGn9w/xZchMB5 +hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsa +Y71k5h+3zvDyny67G7fyUIhzksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9H +RCwBXbsdtTLSR9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +Equifax Secure Global eBusiness CA +================================== +-----BEGIN CERTIFICATE----- +MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEcMBoGA1UEChMT +RXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBTZWN1cmUgR2xvYmFsIGVCdXNp +bmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIwMDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMx +HDAaBgNVBAoTE0VxdWlmYXggU2VjdXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEds +b2JhbCBlQnVzaW5lc3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRV +PEnCUdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc58O/gGzN +qfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/o5brhTMhHD4ePmBudpxn +hcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAHMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAUvqigdHJQa0S3ySPY+6j/s1draGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hs +MA0GCSqGSIb3DQEBBAUAA4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okEN +I7SS+RkAZ70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv8qIY +NMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV +-----END CERTIFICATE----- + +Equifax Secure eBusiness CA 1 +============================= +-----BEGIN CERTIFICATE----- +MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMT +RXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENB +LTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQwMDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UE +ChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNz +IENBLTEwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ +1MRoRvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBuWqDZQu4a +IZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKwEnv+j6YDAgMBAAGjZjBk +MBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEp4MlIR21kW +Nl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRKeDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQF +AAOBgQB1W6ibAxHm6VZMzfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5 +lSE/9dR+WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN/Bf+ +KpYrtWKmpj29f5JZzVoqgrI3eQ== +-----END CERTIFICATE----- + +Equifax Secure eBusiness CA 2 +============================= +-----BEGIN CERTIFICATE----- +MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJVUzEXMBUGA1UE +ChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJlIGVCdXNpbmVzcyBDQS0y +MB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoT +DkVxdWlmYXggU2VjdXJlMSYwJAYDVQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCB +nzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn +2Z0GvxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/BPO3QSQ5 +BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0CAwEAAaOCAQkwggEFMHAG +A1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUx +JjAkBgNVBAsTHUVxdWlmYXggU2VjdXJlIGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoG +A1UdEAQTMBGBDzIwMTkwNjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9e +uSBIplBqy/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQFMAMB +Af8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUAA4GBAAyGgq3oThr1 +jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia +78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUm +V+GRMOrN +-----END CERTIFICATE----- + +AddTrust Low-Value Services Root +================================ +-----BEGIN CERTIFICATE----- +MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML +QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRU +cnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMwMTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQsw +CQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBO +ZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ulCDtbKRY6 +54eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6ntGO0/7Gcrjyvd7ZWxbWr +oulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyldI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1 +Zmne3yzxbrww2ywkEtvrNTVokMsAsJchPXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJui +GMx1I4S+6+JNM3GOGvDC+Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8w +HQYDVR0OBBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBlMQswCQYDVQQGEwJT +RTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEw +HwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxt +ZBsfzQ3duQH6lmM0MkhHma6X7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0Ph +iVYrqW9yTkkz43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY +eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJlpz/+0WatC7xr +mYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOAWiFeIc9TVPC6b4nbqKqVz4vj +ccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk= +-----END CERTIFICATE----- + +AddTrust External Root +====================== +-----BEGIN CERTIFICATE----- +MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEUMBIGA1UEChML +QWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFsIFRUUCBOZXR3b3JrMSIwIAYD +VQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEw +NDgzOFowbzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRU +cnVzdCBFeHRlcm5hbCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0Eg +Um9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvtH7xsD821 ++iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9uMq/NzgtHj6RQa1wVsfw +Tz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzXmk6vBbOmcZSccbNQYArHE504B4YCqOmo +aSYYkKtMsE8jqzpPhNjfzp/haW+710LXa0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy +2xSoRcRdKn23tNbE7qzNE0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv7 +7+ldU9U0WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYDVR0P +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0Jvf6xCZU7wO94CTL +VBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRUcnVzdCBBQjEmMCQGA1UECxMdQWRk +VHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsxIjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENB +IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZl +j7DYd7usQWxHYINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 +6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvCNr4TDea9Y355 +e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEXc4g/VhsxOBi0cQ+azcgOno4u +G+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5amnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= +-----END CERTIFICATE----- + +AddTrust Public Services Root +============================= +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEUMBIGA1UEChML +QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSAwHgYDVQQDExdBZGRU +cnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAxMDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJ +BgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5l +dHdvcmsxIDAeBgNVBAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV6tsfSlbu +nyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nXGCwwfQ56HmIexkvA/X1i +d9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnPdzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSG +Aa2Il+tmzV7R/9x98oTaunet3IAIx6eH1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAw +HM+A+WD+eeSI8t0A65RF62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0G +A1UdDgQWBBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDELMAkGA1UEBhMCU0Ux +FDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRUcnVzdCBUVFAgTmV0d29yazEgMB4G +A1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4 +JNojVhaTdt02KLmuG7jD8WS6IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL ++YPoRNWyQSW/iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao +GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh4SINhwBk/ox9 +Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQmXiLsks3/QppEIW1cxeMiHV9H +EufOX1362KqxMy3ZdvJOOjMMK7MtkAY= +-----END CERTIFICATE----- + +AddTrust Qualified Certificates Root +==================================== +-----BEGIN CERTIFICATE----- +MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEUMBIGA1UEChML +QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSMwIQYDVQQDExpBZGRU +cnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcx +CzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQ +IE5ldHdvcmsxIzAhBgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwqxBb/4Oxx +64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G87B4pfYOQnrjfxvM0PC3 +KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i2O+tCBGaKZnhqkRFmhJePp1tUvznoD1o +L/BLcHwTOK28FSXx1s6rosAx1i+f4P8UWfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GR +wVY18BTcZTYJbqukB8c10cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HU +MIHRMB0GA1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6FrpGkwZzELMAkGA1UE +BhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRUcnVzdCBUVFAgTmV0d29y +azEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlmaWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBABmrder4i2VhlRO6aQTvhsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxG +GuoYQ992zPlmhpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X +dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3P6CxB9bpT9ze +RXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9YiQBCYz95OdBEsIJuQRno3eDB +iFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5noxqE= +-----END CERTIFICATE----- + +Entrust Root Certification Authority +==================================== +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMCVVMxFjAUBgNV +BAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0Lm5ldC9DUFMgaXMgaW5jb3Jw +b3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMWKGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsG +A1UEAxMkRW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0 +MloXDTI2MTEyNzIwNTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMu +MTkwNwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSByZWZlcmVu +Y2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNVBAMTJEVudHJ1c3QgUm9v +dCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +ALaVtkNC+sZtKm9I35RMOVcF7sN5EUFoNu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYsz +A9u3g3s+IIRe7bJWKKf44LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOww +Cj0Yzfv9KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGIrb68 +j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi94DkZfs0Nw4pgHBN +rziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOBsDCBrTAOBgNVHQ8BAf8EBAMCAQYw +DwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAigA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1 +MzQyWjAfBgNVHSMEGDAWgBRokORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DH +hmak8fdLQ/uEvW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9tO1KzKtvn1ISM +Y/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6ZuaAGAT/3B+XxFNSRuzFVJ7yVTa +v52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTS +W3iDVuycNsMm4hH2Z0kdkquM++v/eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0 +tHuu2guQOHXvgR1m0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +RSA Security 2048 v3 +==================== +-----BEGIN CERTIFICATE----- +MIIDYTCCAkmgAwIBAgIQCgEBAQAAAnwAAAAKAAAAAjANBgkqhkiG9w0BAQUFADA6MRkwFwYDVQQK +ExBSU0EgU2VjdXJpdHkgSW5jMR0wGwYDVQQLExRSU0EgU2VjdXJpdHkgMjA0OCBWMzAeFw0wMTAy +MjIyMDM5MjNaFw0yNjAyMjIyMDM5MjNaMDoxGTAXBgNVBAoTEFJTQSBTZWN1cml0eSBJbmMxHTAb +BgNVBAsTFFJTQSBTZWN1cml0eSAyMDQ4IFYzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAt49VcdKA3XtpeafwGFAyPGJn9gqVB93mG/Oe2dJBVGutn3y+Gc37RqtBaB4Y6lXIL5F4iSj7 +Jylg/9+PjDvJSZu1pJTOAeo+tWN7fyb9Gd3AIb2E0S1PRsNO3Ng3OTsor8udGuorryGlwSMiuLgb +WhOHV4PR8CDn6E8jQrAApX2J6elhc5SYcSa8LWrg903w8bYqODGBDSnhAMFRD0xS+ARaqn1y07iH +KrtjEAMqs6FPDVpeRrc9DvV07Jmf+T0kgYim3WBU6JU2PcYJk5qjEoAAVZkZR73QpXzDuvsf9/UP ++Ky5tfQ3mBMY3oVbtwyCO4dvlTlYMNpuAWgXIszACwIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/ +MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQHw1EwpKrpRa41JPr/JCwz0LGdjDAdBgNVHQ4E +FgQUB8NRMKSq6UWuNST6/yQsM9CxnYwwDQYJKoZIhvcNAQEFBQADggEBAF8+hnZuuDU8TjYcHnmY +v/3VEhF5Ug7uMYm83X/50cYVIeiKAVQNOvtUudZj1LGqlk2iQk3UUx+LEN5/Zb5gEydxiKRz44Rj +0aRV4VCT5hsOedBnvEbIvz8XDZXmxpBp3ue0L96VfdASPz0+f00/FGj1EVDVwfSQpQgdMWD/YIwj +VAqv/qFuxdF6Kmh4zx6CCiC0H63lhbJqaHVOrSU3lIW+vaHU6rcMSzyd6BIA8F+sDeGscGNz9395 +nzIlQnQFgCi/vcEkllgVsRch6YlL2weIZ/QVrXA+L02FO8K32/6YaCOJ4XQP3vTFhGMpG8zLB8kA +pKnXwiJPZ9d37CAFYd4= +-----END CERTIFICATE----- + +GeoTrust Global CA +================== +-----BEGIN CERTIFICATE----- +MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVTMRYwFAYDVQQK +Ew1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9iYWwgQ0EwHhcNMDIwNTIxMDQw +MDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5j +LjEbMBkGA1UEAxMSR2VvVHJ1c3QgR2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEA2swYYzD99BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjo +BbdqfnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDviS2Aelet +8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU1XupGc1V3sjs0l44U+Vc +T4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+bw8HHa8sHo9gOeL6NlMTOdReJivbPagU +vTLrGAMoUgRx5aszPeE4uwc2hGKceeoWMPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBTAephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVk +DBF9qn1luMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKInZ57Q +zxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfStQWVYrmm3ok9Nns4 +d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcFPseKUgzbFbS9bZvlxrFUaKnjaZC2 +mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Unhw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6p +XE0zX5IJL4hmXXeXxx12E6nV5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvm +Mw== +-----END CERTIFICATE----- + +GeoTrust Global CA 2 +==================== +-----BEGIN CERTIFICATE----- +MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEWMBQGA1UEChMN +R2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFsIENBIDIwHhcNMDQwMzA0MDUw +MDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5j +LjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw +ggEKAoIBAQDvPE1APRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/ +NTL8Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hLTytCOb1k +LUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL5mkWRxHCJ1kDs6ZgwiFA +Vvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7S4wMcoKK+xfNAGw6EzywhIdLFnopsk/b +HdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNH +K266ZUapEBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6tdEPx7 +srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv/NgdRN3ggX+d6Yvh +ZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywNA0ZF66D0f0hExghAzN4bcLUprbqL +OzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkC +x1YAzUm5s2x7UwQa4qjJqhIFI8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqF +H4z1Ir+rzoPz4iIprn2DQKi6bA== +-----END CERTIFICATE----- + +GeoTrust Universal CA +===================== +-----BEGIN CERTIFICATE----- +MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEWMBQGA1UEChMN +R2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVyc2FsIENBMB4XDTA0MDMwNDA1 +MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IElu +Yy4xHjAcBgNVBAMTFUdlb1RydXN0IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBAKYVVaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9t +JPi8cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTTQjOgNB0e +RXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFhF7em6fgemdtzbvQKoiFs +7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2vc7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d +8Lsrlh/eezJS/R27tQahsiFepdaVaH/wmZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7V +qnJNk22CDtucvc+081xdVHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3Cga +Rr0BHdCXteGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZf9hB +Z3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfReBi9Fi1jUIxaS5BZu +KGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+nhutxx9z3SxPGWX9f5NAEC7S8O08 +ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0 +XG0D08DYj3rWMB8GA1UdIwQYMBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIB +hjANBgkqhkiG9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc +aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fXIwjhmF7DWgh2 +qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzynANXH/KttgCJwpQzgXQQpAvvL +oJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0zuzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsK +xr2EoyNB3tZ3b4XUhRxQ4K5RirqNPnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxF +KyDuSN/n3QmOGKjaQI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2 +DFKWkoRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9ER/frslK +xfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQtDF4JbAiXfKM9fJP/P6EU +p8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/SfuvmbJxPgWp6ZKy7PtXny3YuxadIwVyQD8vI +P/rmMuGNG2+k5o7Y+SlIis5z/iw= +-----END CERTIFICATE----- + +GeoTrust Universal CA 2 +======================= +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEWMBQGA1UEChMN +R2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVyc2FsIENBIDIwHhcNMDQwMzA0 +MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3Qg +SW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0 +DE81WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUGFF+3Qs17 +j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdqXbboW0W63MOhBW9Wjo8Q +JqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxLse4YuU6W3Nx2/zu+z18DwPw76L5GG//a +QMJS9/7jOvdqdzXQ2o3rXhhqMcceujwbKNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2 +WP0+GfPtDCapkzj4T8FdIgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP +20gaXT73y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRthAAn +ZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgocQIgfksILAAX/8sgC +SqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4Lt1ZrtmhN79UNdxzMk+MBB4zsslG +8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2 ++/CfXGJx7Tz0RzgQKzAfBgNVHSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8E +BAMCAYYwDQYJKoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z +dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQL1EuxBRa3ugZ +4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgrFg5fNuH8KrUwJM/gYwx7WBr+ +mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSoag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpq +A1Ihn0CoZ1Dy81of398j9tx4TuaYT1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpg +Y+RdM4kX2TGq2tbzGDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiP +pm8m1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJVOCiNUW7d +FGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH6aLcr34YEoP9VhdBLtUp +gn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwXQMAJKOSLakhT2+zNVVXxxvjpoixMptEm +X36vWkzaH6byHCx+rgIW0lbQL1dTR+iS +-----END CERTIFICATE----- + +America Online Root Certification Authority 1 +============================================= +-----BEGIN CERTIFICATE----- +MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEcMBoGA1UEChMT +QW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBPbmxpbmUgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSAxMB4XDTAyMDUyODA2MDAwMFoXDTM3MTExOTIwNDMwMFowYzELMAkG +A1UEBhMCVVMxHDAaBgNVBAoTE0FtZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2Eg +T25saW5lIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAKgv6KRpBgNHw+kqmP8ZonCaxlCyfqXfaE0bfA+2l2h9LaaLl+lkhsmj76CG +v2BlnEtUiMJIxUo5vxTjWVXlGbR0yLQFOVwWpeKVBeASrlmLojNoWBym1BW32J/X3HGrfpq/m44z +DyL9Hy7nBzbvYjnF3cu6JRQj3gzGPTzOggjmZj7aUTsWOqMFf6Dch9Wc/HKpoH145LcxVR5lu9Rh +sCFg7RAycsWSJR74kEoYeEfffjA3PlAb2xzTa5qGUwew76wGePiEmf4hjUyAtgyC9mZweRrTT6PP +8c9GsEsPPt2IYriMqQkoO3rHl+Ee5fSfwMCuJKDIodkP1nsmgmkyPacCAwEAAaNjMGEwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUAK3Zo/Z59m50qX8zPYEX10zPM94wHwYDVR0jBBgwFoAUAK3Z +o/Z59m50qX8zPYEX10zPM94wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBBQUAA4IBAQB8itEf +GDeC4Liwo+1WlchiYZwFos3CYiZhzRAW18y0ZTTQEYqtqKkFZu90821fnZmv9ov761KyBZiibyrF +VL0lvV+uyIbqRizBs73B6UlwGBaXCBOMIOAbLjpHyx7kADCVW/RFo8AasAFOq73AI25jP4BKxQft +3OJvx8Fi8eNy1gTIdGcL+oiroQHIb/AUr9KZzVGTfu0uOMe9zkZQPXLjeSWdm4grECDdpbgyn43g +Kd8hdIaC2y+CMMbHNYaz+ZZfRtsMRf3zUMNvxsNIrUam4SdHCh0Om7bCd39j8uB9Gr784N/Xx6ds +sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7 +-----END CERTIFICATE----- + +America Online Root Certification Authority 2 +============================================= +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEcMBoGA1UEChMT +QW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBPbmxpbmUgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSAyMB4XDTAyMDUyODA2MDAwMFoXDTM3MDkyOTE0MDgwMFowYzELMAkG +A1UEBhMCVVMxHDAaBgNVBAoTE0FtZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2Eg +T25saW5lIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMjCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAMxBRR3pPU0Q9oyxQcngXssNt79Hc9PwVU3dxgz6sWYFas14tNwC206B89en +fHG8dWOgXeMHDEjsJcQDIPT/DjsS/5uN4cbVG7RtIuOx238hZK+GvFciKtZHgVdEglZTvYYUAQv8 +f3SkWq7xuhG1m1hagLQ3eAkzfDJHA1zEpYNI9FdWboE2JxhP7JsowtS013wMPgwr38oE18aO6lhO +qKSlGBxsRZijQdEt0sdtjRnxrXm3gT+9BoInLRBYBbV4Bbkv2wxrkJB+FFk4u5QkE+XRnRTf04JN +RvCAOVIyD+OEsnpD8l7eXz8d3eOyG6ChKiMDbi4BFYdcpnV1x5dhvt6G3NRI270qv0pV2uh9UPu0 +gBe4lL8BPeraunzgWGcXuVjgiIZGZ2ydEEdYMtA1fHkqkKJaEBEjNa0vzORKW6fIJ/KD3l67Xnfn +6KVuY8INXWHQjNJsWiEOyiijzirplcdIz5ZvHZIlyMbGwcEMBawmxNJ10uEqZ8A9W6Wa6897Gqid +FEXlD6CaZd4vKL3Ob5Rmg0gp2OpljK+T2WSfVVcmv2/LNzGZo2C7HK2JNDJiuEMhBnIMoVxtRsX6 +Kc8w3onccVvdtjc+31D1uAclJuW8tf48ArO3+L5DwYcRlJ4jbBeKuIonDFRH8KmzwICMoCfrHRnj +B453cMor9H124HhnAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE1FwWg4u3Op +aaEg5+31IqEjFNeeMB8GA1UdIwQYMBaAFE1FwWg4u3OpaaEg5+31IqEjFNeeMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAZ2sGuV9FOypLM7PmG2tZTiLMubekJcmnxPBUlgtk87FY +T15R/LKXeydlwuXK5w0MJXti4/qftIe3RUavg6WXSIylvfEWK5t2LHo1YGwRgJfMqZJS5ivmae2p ++DYtLHe/YUjRYwu5W1LtGLBDQiKmsXeu3mnFzcccobGlHBD7GL4acN3Bkku+KVqdPzW+5X1R+FXg +JXUjhx5c3LqdsKyzadsXg8n33gy8CNyRnqjQ1xU3c6U1uPx+xURABsPr+CKAXEfOAuMRn0T//Zoy +zH1kUQ7rVyZ2OuMeIjzCpjbdGe+n/BLzJsBZMYVMnNjP36TMzCmT/5RtdlwTCJfy7aULTd3oyWgO +ZtMADjMSW7yV5TKQqLPGbIOtd+6Lfn6xqavT4fG2wLHqiMDn05DpKJKUe2h7lyoKZy2FAjgQ5ANh +1NolNscIWC2hp1GvMApJ9aZphwctREZ2jirlmjvXGKL8nDgQzMY70rUXOm/9riW99XJZZLF0Kjhf +GEzfz3EEWjbUvy+ZnOjZurGV5gJLIaFb1cFPj65pbVPbAZO1XB4Y3WRayhgoPmMEEf0cjQAPuDff +Z4qdZqkCapH/E8ovXYO8h5Ns3CRRFgQlZvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuP +cX/9XhmgD0uRuMRUvAawRY8mkaKO/qk= +-----END CERTIFICATE----- + +Visa eCommerce Root +=================== +-----BEGIN CERTIFICATE----- +MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBrMQswCQYDVQQG +EwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5hdGlvbmFsIFNlcnZpY2Ug +QXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2 +WhcNMjIwNjI0MDAxNjEyWjBrMQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMm +VmlzYSBJbnRlcm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv +bW1lcmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h2mCxlCfL +F9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4ElpF7sDPwsRROEW+1QK8b +RaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdVZqW1LS7YgFmypw23RuwhY/81q6UCzyr0 +TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI +/k4+oKsGGelT84ATB+0tvz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzs +GHxBvfaLdXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEG +MB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUFAAOCAQEAX/FBfXxc +CLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcRzCSs00Rsca4BIGsDoo8Ytyk6feUW +YFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pz +zkWKsKZJ/0x9nXGIxHYdkFsd7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBu +YQa7FkKMcPcw++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt +398znM/jra6O1I7mT1GvFpLgXPYHDw== +-----END CERTIFICATE----- + +Certum Root CA +============== +-----BEGIN CERTIFICATE----- +MIIDDDCCAfSgAwIBAgIDAQAgMA0GCSqGSIb3DQEBBQUAMD4xCzAJBgNVBAYTAlBMMRswGQYDVQQK +ExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBDQTAeFw0wMjA2MTExMDQ2Mzla +Fw0yNzA2MTExMDQ2MzlaMD4xCzAJBgNVBAYTAlBMMRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8u +by4xEjAQBgNVBAMTCUNlcnR1bSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM6x +wS7TT3zNJc4YPk/EjG+AanPIW1H4m9LcuwBcsaD8dQPugfCI7iNS6eYVM42sLQnFdvkrOYCJ5JdL +kKWoePhzQ3ukYbDYWMzhbGZ+nPMJXlVjhNWo7/OxLjBos8Q82KxujZlakE403Daaj4GIULdtlkIJ +89eVgw1BS7Bqa/j8D35in2fE7SZfECYPCE/wpFcozo+47UX2bu4lXapuOb7kky/ZR6By6/qmW6/K +Uz/iDsaWVhFu9+lmqSbYf5VT7QqFiLpPKaVCjF62/IUgAKpoC6EahQGcxEZjgoi2IrHu/qpGWX7P +NSzVttpd90gzFFS269lvzs2I1qsb2pY7HVkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkq +hkiG9w0BAQUFAAOCAQEAuI3O7+cUus/usESSbLQ5PqKEbq24IXfS1HeCh+YgQYHu4vgRt2PRFze+ +GXYkHAQaTOs9qmdvLdTN/mUxcMUbpgIKumB7bVjCmkn+YzILa+M6wKyrO7Do0wlRjBCDxjTgxSvg +GrZgFCdsMneMvLJymM/NzD+5yCRCFNZX/OYmQ6kd5YCQzgNUKD73P9P4Te1qCjqTE5s7FCMTY5w/ +0YcneeVMUeMBrYVdGjux1XMQpNPyvG5k9VpWkKjHDkx0Dy5xO/fIR/RpbxXyEV6DHpx8Uq79AtoS +qFlnGNu8cN2bsWntgM6JQEhqDjXKKWYVIZQs6GAqm4VKQPNriiTsBhYscw== +-----END CERTIFICATE----- + +Comodo AAA Services root +======================== +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEbMBkGA1UECAwS +R3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRowGAYDVQQKDBFDb21vZG8gQ0Eg +TGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAw +MFoXDTI4MTIzMTIzNTk1OVowezELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hl +c3RlcjEQMA4GA1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNV +BAMMGEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQuaBtDFcCLNSS1UY8y2bmhG +C1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe3M/vg4aijJRPn2jymJBGhCfHdr/jzDUs +i14HZGWCwEiwqJH5YZ92IFCokcdmtet4YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszW +Y19zjNoFmag4qMsXeDZRrOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjH +Ypy+g8cmez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQUoBEK +Iz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wewYDVR0f +BHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20vQUFBQ2VydGlmaWNhdGVTZXJ2aWNl +cy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29tb2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2Vz +LmNybDANBgkqhkiG9w0BAQUFAAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm +7l3sAg9g1o1QGE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2G9w84FoVxp7Z +8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsil2D4kF501KKaU73yqWjgom7C +12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +Comodo Secure Services root +=========================== +-----BEGIN CERTIFICATE----- +MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEbMBkGA1UECAwS +R3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRowGAYDVQQKDBFDb21vZG8gQ0Eg +TGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRpZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAw +MDAwMFoXDTI4MTIzMTIzNTk1OVowfjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFu +Y2hlc3RlcjEQMA4GA1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAi +BgNVBAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPMcm3ye5drswfxdySRXyWP +9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3SHpR7LZQdqnXXs5jLrLxkU0C8j6ysNstc +rbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rC +oznl2yY4rYsK7hljxxwk3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3V +p6ea5EQz6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNVHQ4E +FgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8w +gYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2RvY2EuY29tL1NlY3VyZUNlcnRpZmlj +YXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRwOi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlm +aWNhdGVTZXJ2aWNlcy5jcmwwDQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm +4J4oqF7Tt/Q05qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj +Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtIgKvcnDe4IRRL +DXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJaD61JlfutuC23bkpgHl9j6Pw +pCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDlizeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1H +RR3B7Hzs/Sk= +-----END CERTIFICATE----- + +Comodo Trusted Services root +============================ +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEbMBkGA1UECAwS +R3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRowGAYDVQQKDBFDb21vZG8gQ0Eg +TGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEw +MDAwMDBaFw0yODEyMzEyMzU5NTlaMH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1h +bmNoZXN0ZXIxEDAOBgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUw +IwYDVQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWWfnJSoBVC21ndZHoa0Lh7 +3TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMtTGo87IvDktJTdyR0nAducPy9C1t2ul/y +/9c3S0pgePfw+spwtOpZqqPOSC+pw7ILfhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6 +juljatEPmsbS9Is6FARW1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsS +ivnkBbA7kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0GA1Ud +DgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21vZG9jYS5jb20vVHJ1c3RlZENlcnRp +ZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRodHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENl +cnRpZmljYXRlU2VydmljZXMuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8Ntw +uleGFTQQuS9/HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32 +pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxISjBc/lDb+XbDA +BHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+xqFx7D+gIIxmOom0jtTYsU0l +R+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/AtyjcndBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O +9y5Xt5hwXsjEeLBi +-----END CERTIFICATE----- + +QuoVadis Root CA +================ +-----BEGIN CERTIFICATE----- +MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJCTTEZMBcGA1UE +ChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 +eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAz +MTkxODMzMzNaFw0yMTAzMTcxODMzMzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRp +cyBMaW1pdGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQD +EyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Ypli4kVEAkOPcahdxYTMuk +J0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2DrOpm2RgbaIr1VxqYuvXtdj182d6UajtL +F8HVj71lODqV0D1VNk7feVcxKh7YWWVJWCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeL +YzcS19Dsw3sgQUSj7cugF+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWen +AScOospUxbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCCAk4w +PQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVvdmFkaXNvZmZzaG9y +ZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREwggENMIIBCQYJKwYBBAG+WAABMIH7 +MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNlIG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmlj +YXRlIGJ5IGFueSBwYXJ0eSBhc3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJs +ZSBzdGFuZGFyZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh +Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYIKwYBBQUHAgEW +Fmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3TKbkGGew5Oanwl4Rqy+/fMIGu +BgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rqy+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkw +FwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MS4wLAYDVQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6 +tlCLMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSkfnIYj9lo +fFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf87C9TqnN7Az10buYWnuul +LsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1RcHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2x +gI4JVrmcGmD+XcHXetwReNDWXcG31a0ymQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi +5upZIof4l/UO/erMkqQWxFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi +5nrQNiOKSnQ2+Q== +-----END CERTIFICATE----- + +QuoVadis Root CA 2 +================== +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0xGTAXBgNVBAoT +EFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJvb3QgQ0EgMjAeFw0wNjExMjQx +ODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMRswGQYDVQQDExJRdW9WYWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQCaGMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6 +XJxgFyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55JWpzmM+Yk +lvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bBrrcCaoF6qUWD4gXmuVbB +lDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp+ARz8un+XJiM9XOva7R+zdRcAitMOeGy +lZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt +66/3FsvbzSUr5R/7mp/iUcw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1Jdxn +wQ5hYIizPtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og/zOh +D7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UHoycR7hYQe7xFSkyy +BNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuIyV77zGHcizN300QyNQliBJIWENie +J0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1UdEwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1Ud +DgQWBBQahGK8SEwzJQTU7tD2A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGU +a6FJpEcwRTELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2fBluornFdLwUv +Z+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzng/iN/Ae42l9NLmeyhP3ZRPx3 +UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2BlfF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodm +VjB3pjd4M1IQWK4/YY7yarHvGH5KWWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK ++JDSV6IZUaUtl0HaB0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrW +IozchLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPRTUIZ3Ph1 +WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWDmbA4CD/pXvk1B+TJYm5X +f6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0ZohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II +4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8 +VCLAAVBpQ570su9t+Oza8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +QuoVadis Root CA 3 +================== +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0xGTAXBgNVBAoT +EFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJvb3QgQ0EgMzAeFw0wNjExMjQx +OTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMRswGQYDVQQDExJRdW9WYWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQDMV0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNgg +DhoB4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUrH556VOij +KTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd8lyyBTNvijbO0BNO/79K +DDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9CabwvvWhDFlaJKjdhkf2mrk7AyxRllDdLkgbv +BNDInIjbC3uBr7E9KsRlOni27tyAsdLTmZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwp +p5ijJUMv7/FfJuGITfhebtfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8 +nT8KKdjcT5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDtWAEX +MJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZc6tsgLjoC2SToJyM +Gf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A4iLItLRkT9a6fUg+qGkM17uGcclz +uD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYDVR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHT +BgkrBgEEAb5YAAMwgcUwgZMGCCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmlj +YXRlIGNvbnN0aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVudC4wLQYIKwYB +BQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2NwczALBgNVHQ8EBAMCAQYwHQYD +VR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4GA1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4 +ywLQoUmkRzBFMQswCQYDVQQGEwJCTTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UE +AxMSUXVvVmFkaXMgUm9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZV +qyM07ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSemd1o417+s +hvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd+LJ2w/w4E6oM3kJpK27z +POuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2 +Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadNt54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp +8kokUvd0/bpO5qgdAm6xDYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBC +bjPsMZ57k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6szHXu +g/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0jWy10QJLZYxkNc91p +vGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeTmJlglFwjz1onl14LBQaTNx47aTbr +qZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +Security Communication Root CA +============================== +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMP +U0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEw +HhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMP +U0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEw +ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw +8yl89f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJDKaVv0uM +DPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9Ms+k2Y7CI9eNqPPYJayX +5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/NQV3Is00qVUarH9oe4kA92819uZKAnDfd +DJZkndwi92SL32HeFZRSFaB9UslLqCHJxrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2 +JChzAgMBAAGjPzA9MB0GA1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYw +DwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vGkl3g +0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfrUj94nK9NrvjVT8+a +mCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5Bw+SUEmK3TGXX8npN6o7WWWXlDLJ +s58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJUJRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ +6rBK+1YWc26sTfcioU+tHXotRSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAi +FL39vmwLAw== +-----END CERTIFICATE----- + +Sonera Class 2 Root CA +====================== +-----BEGIN CERTIFICATE----- +MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEPMA0GA1UEChMG +U29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAxMDQwNjA3Mjk0MFoXDTIxMDQw +NjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNVBAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJh +IENsYXNzMiBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3 +/Ei9vX+ALTU74W+oZ6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybT +dXnt5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s3TmVToMG +f+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2EjvOr7nQKV0ba5cTppCD8P +tOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu8nYybieDwnPz3BjotJPqdURrBGAgcVeH +nfO+oJAjPYok4doh28MCAwEAAaMzMDEwDwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITT +XjwwCwYDVR0PBAQDAgEGMA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt +0jSv9zilzqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/3DEI +cbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvDFNr450kkkdAdavph +Oe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6Tk6ezAyNlNzZRZxe7EJQY670XcSx +EtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLH +llpwrN9M +-----END CERTIFICATE----- + +Staat der Nederlanden Root CA +============================= +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgIEAJiWijANBgkqhkiG9w0BAQUFADBVMQswCQYDVQQGEwJOTDEeMBwGA1UE +ChMVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSYwJAYDVQQDEx1TdGFhdCBkZXIgTmVkZXJsYW5kZW4g +Um9vdCBDQTAeFw0wMjEyMTcwOTIzNDlaFw0xNTEyMTYwOTE1MzhaMFUxCzAJBgNVBAYTAk5MMR4w +HAYDVQQKExVTdGFhdCBkZXIgTmVkZXJsYW5kZW4xJjAkBgNVBAMTHVN0YWF0IGRlciBOZWRlcmxh +bmRlbiBSb290IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmNK1URF6gaYUmHFt +vsznExvWJw56s2oYHLZhWtVhCb/ekBPHZ+7d89rFDBKeNVU+LCeIQGv33N0iYfXCxw719tV2U02P +jLwYdjeFnejKScfST5gTCaI+Ioicf9byEGW07l8Y1Rfj+MX94p2i71MOhXeiD+EwR+4A5zN9RGca +C1Hoi6CeUJhoNFIfLm0B8mBF8jHrqTFoKbt6QZ7GGX+UtFE5A3+y3qcym7RHjm+0Sq7lr7HcsBth +vJly3uSJt3omXdozSVtSnA71iq3DuD3oBmrC1SoLbHuEvVYFy4ZlkuxEK7COudxwC0barbxjiDn6 +22r+I/q85Ej0ZytqERAhSQIDAQABo4GRMIGOMAwGA1UdEwQFMAMBAf8wTwYDVR0gBEgwRjBEBgRV +HSAAMDwwOgYIKwYBBQUHAgEWLmh0dHA6Ly93d3cucGtpb3ZlcmhlaWQubmwvcG9saWNpZXMvcm9v +dC1wb2xpY3kwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSofeu8Y6R0E3QA7Jbg0zTBLL9s+DAN +BgkqhkiG9w0BAQUFAAOCAQEABYSHVXQ2YcG70dTGFagTtJ+k/rvuFbQvBgwp8qiSpGEN/KtcCFtR +EytNwiphyPgJWPwtArI5fZlmgb9uXJVFIGzmeafR2Bwp/MIgJ1HI8XxdNGdphREwxgDS1/PTfLbw +MVcoEoJz6TMvplW0C5GUR5z6u3pCMuiufi3IvKwUv9kP2Vv8wfl6leF9fpb8cbDCTMjfRTTJzg3y +nGQI0DvDKcWy7ZAEwbEpkcUwb8GpcjPM/l0WFywRaed+/sWDCN+83CI6LiBpIzlWYGeQiy52OfsR +iJf2fL1LuCAWZwWN4jvBcj+UlTfHXbme2JOhF4//DGYVwSR8MnwDHTuhWEUykw== +-----END CERTIFICATE----- + +UTN DATACorp SGC Root CA +======================== +-----BEGIN CERTIFICATE----- +MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCBkzELMAkGA1UE +BhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEeMBwGA1UEChMVVGhl +IFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZ +BgNVBAMTElVUTiAtIERBVEFDb3JwIFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBa +MIGTMQswCQYDVQQGEwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4w +HAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cudXNlcnRy +dXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ys +raP6LnD43m77VkIVni5c7yPeIbkFdicZD0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlo +wHDyUwDAXlCCpVZvNvlK4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA +9P4yPykqlXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulWbfXv +33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQABo4GrMIGoMAsGA1Ud +DwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRTMtGzz3/64PGgXYVOktKeRR20TzA9 +BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3JsLnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dD +LmNybDAqBgNVHSUEIzAhBggrBgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3 +DQEBBQUAA4IBAQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft +Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyjj98C5OBxOvG0 +I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVHKWss5nbZqSl9Mt3JNjy9rjXx +EZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwP +DPafepE39peC4N1xaf92P2BNPM/3mfnGV/TJVTl4uix5yaaIK/QI +-----END CERTIFICATE----- + +UTN USERFirst Hardware Root CA +============================== +-----BEGIN CERTIFICATE----- +MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCBlzELMAkGA1UE +BhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEeMBwGA1UEChMVVGhl +IFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAd +BgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgx +OTIyWjCBlzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0 +eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8vd3d3LnVz +ZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdhcmUwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlI +wrthdBKWHTxqctU8EGc6Oe0rE81m65UJM6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFd +tqdt++BxF2uiiPsA3/4aMXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8 +i4fDidNdoI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqIDsjf +Pe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9KsyoUhbAgMBAAGjgbkw +gbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFKFyXyYbKJhDlV0HN9WF +lp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNF +UkZpcnN0LUhhcmR3YXJlLmNybDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUF +BwMGBggrBgEFBQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM +//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28GpgoiskliCE7/yMgUsogW +XecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gECJChicsZUN/KHAG8HQQZexB2 +lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kn +iCrVWFCVH/A7HFe7fRQ5YiuayZSSKqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67 +nfhmqA== +-----END CERTIFICATE----- + +Camerfirma Chambers of Commerce Root +==================================== +-----BEGIN CERTIFICATE----- +MIIEvTCCA6WgAwIBAgIBADANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJFVTEnMCUGA1UEChMe +QUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1i +ZXJzaWduLm9yZzEiMCAGA1UEAxMZQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdDAeFw0wMzA5MzAx +NjEzNDNaFw0zNzA5MzAxNjEzNDRaMH8xCzAJBgNVBAYTAkVVMScwJQYDVQQKEx5BQyBDYW1lcmZp +cm1hIFNBIENJRiBBODI3NDMyODcxIzAhBgNVBAsTGmh0dHA6Ly93d3cuY2hhbWJlcnNpZ24ub3Jn +MSIwIAYDVQQDExlDaGFtYmVycyBvZiBDb21tZXJjZSBSb290MIIBIDANBgkqhkiG9w0BAQEFAAOC +AQ0AMIIBCAKCAQEAtzZV5aVdGDDg2olUkfzIx1L4L1DZ77F1c2VHfRtbunXF/KGIJPov7coISjlU +xFF6tdpg6jg8gbLL8bvZkSM/SAFwdakFKq0fcfPJVD0dBmpAPrMMhe5cG3nCYsS4No41XQEMIwRH +NaqbYE6gZj3LJgqcQKH0XZi/caulAGgq7YN6D6IUtdQis4CwPAxaUWktWBiP7Zme8a7ileb2R6jW +DA+wWFjbw2Y3npuRVDM30pQcakjJyfKl2qUMI/cjDpwyVV5xnIQFUZot/eZOKjRa3spAN2cMVCFV +d9oKDMyXroDclDZK9D7ONhMeU+SsTjoF7Nuucpw4i9A5O4kKPnf+dQIBA6OCAUQwggFAMBIGA1Ud +EwEB/wQIMAYBAf8CAQwwPAYDVR0fBDUwMzAxoC+gLYYraHR0cDovL2NybC5jaGFtYmVyc2lnbi5v +cmcvY2hhbWJlcnNyb290LmNybDAdBgNVHQ4EFgQU45T1sU3p26EpW1eLTXYGduHRooowDgYDVR0P +AQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzAnBgNVHREEIDAegRxjaGFtYmVyc3Jvb3RAY2hh +bWJlcnNpZ24ub3JnMCcGA1UdEgQgMB6BHGNoYW1iZXJzcm9vdEBjaGFtYmVyc2lnbi5vcmcwWAYD +VR0gBFEwTzBNBgsrBgEEAYGHLgoDATA+MDwGCCsGAQUFBwIBFjBodHRwOi8vY3BzLmNoYW1iZXJz +aWduLm9yZy9jcHMvY2hhbWJlcnNyb290Lmh0bWwwDQYJKoZIhvcNAQEFBQADggEBAAxBl8IahsAi +fJ/7kPMa0QOx7xP5IV8EnNrJpY0nbJaHkb5BkAFyk+cefV/2icZdp0AJPaxJRUXcLo0waLIJuvvD +L8y6C98/d3tGfToSJI6WjzwFCm/SlCgdbQzALogi1djPHRPH8EjX1wWnz8dHnjs8NMiAT9QUu/wN +UPf6s+xCX6ndbcj0dc97wXImsQEcXCz9ek60AcUFV7nnPKoF2YjpB0ZBzu9Bga5Y34OirsrXdx/n +ADydb47kMgkdTXg0eDQ8lJsm7U9xxhl6vSAiSFr+S30Dt+dYvsYyTnQeaN2oaFuzPu5ifdmA6Ap1 +erfutGWaIZDgqtCYvDi1czyL+Nw= +-----END CERTIFICATE----- + +Camerfirma Global Chambersign Root +================================== +-----BEGIN CERTIFICATE----- +MIIExTCCA62gAwIBAgIBADANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJFVTEnMCUGA1UEChMe +QUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1i +ZXJzaWduLm9yZzEgMB4GA1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwHhcNMDMwOTMwMTYx +NDE4WhcNMzcwOTMwMTYxNDE4WjB9MQswCQYDVQQGEwJFVTEnMCUGA1UEChMeQUMgQ2FtZXJmaXJt +YSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEg +MB4GA1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAw +ggEIAoIBAQCicKLQn0KuWxfH2H3PFIP8T8mhtxOviteePgQKkotgVvq0Mi+ITaFgCPS3CU6gSS9J +1tPfnZdan5QEcOw/Wdm3zGaLmFIoCQLfxS+EjXqXd7/sQJ0lcqu1PzKY+7e3/HKE5TWH+VX6ox8O +by4o3Wmg2UIQxvi1RMLQQ3/bvOSiPGpVeAp3qdjqGTK3L/5cPxvusZjsyq16aUXjlg9V9ubtdepl +6DJWk0aJqCWKZQbua795B9Dxt6/tLE2Su8CoX6dnfQTyFQhwrJLWfQTSM/tMtgsL+xrJxI0DqX5c +8lCrEqWhz0hQpe/SyBoT+rB/sYIcd2oPX9wLlY/vQ37mRQklAgEDo4IBUDCCAUwwEgYDVR0TAQH/ +BAgwBgEB/wIBDDA/BgNVHR8EODA2MDSgMqAwhi5odHRwOi8vY3JsLmNoYW1iZXJzaWduLm9yZy9j +aGFtYmVyc2lnbnJvb3QuY3JsMB0GA1UdDgQWBBRDnDafsJ4wTcbOX60Qq+UDpfqpFDAOBgNVHQ8B +Af8EBAMCAQYwEQYJYIZIAYb4QgEBBAQDAgAHMCoGA1UdEQQjMCGBH2NoYW1iZXJzaWducm9vdEBj +aGFtYmVyc2lnbi5vcmcwKgYDVR0SBCMwIYEfY2hhbWJlcnNpZ25yb290QGNoYW1iZXJzaWduLm9y +ZzBbBgNVHSAEVDBSMFAGCysGAQQBgYcuCgEBMEEwPwYIKwYBBQUHAgEWM2h0dHA6Ly9jcHMuY2hh +bWJlcnNpZ24ub3JnL2Nwcy9jaGFtYmVyc2lnbnJvb3QuaHRtbDANBgkqhkiG9w0BAQUFAAOCAQEA +PDtwkfkEVCeR4e3t/mh/YV3lQWVPMvEYBZRqHN4fcNs+ezICNLUMbKGKfKX0j//U2K0X1S0E0T9Y +gOKBWYi+wONGkyT+kL0mojAt6JcmVzWJdJYY9hXiryQZVgICsroPFOrGimbBhkVVi76SvpykBMdJ +PJ7oKXqJ1/6v/2j1pReQvayZzKWGVwlnRtvWFsJG8eSpUPWP0ZIV018+xgBJOm5YstHRJw0lyDL4 +IBHNfTIzSJRUTN3cecQwn+uOuFW114hcxWokPbLTBQNRxgfvzBRydD1ucs4YKIxKoHflCStFREes +t2d/AYoFWpO+ocH/+OcOZ6RHSXZddZAa9SaP8A== +-----END CERTIFICATE----- + +NetLock Notary (Class A) Root +============================= +-----BEGIN CERTIFICATE----- +MIIGfTCCBWWgAwIBAgICAQMwDQYJKoZIhvcNAQEEBQAwga8xCzAJBgNVBAYTAkhVMRAwDgYDVQQI +EwdIdW5nYXJ5MREwDwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6 +dG9uc2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9j +ayBLb3pqZWd5em9pIChDbGFzcyBBKSBUYW51c2l0dmFueWtpYWRvMB4XDTk5MDIyNDIzMTQ0N1oX +DTE5MDIxOTIzMTQ0N1owga8xCzAJBgNVBAYTAkhVMRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQH +EwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQuMRowGAYD +VQQLExFUYW51c2l0dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBLb3pqZWd5em9pIChDbGFz +cyBBKSBUYW51c2l0dmFueWtpYWRvMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvHSM +D7tM9DceqQWC2ObhbHDqeLVu0ThEDaiDzl3S1tWBxdRL51uUcCbbO51qTGL3cfNk1mE7PetzozfZ +z+qMkjvN9wfcZnSX9EUi3fRc4L9t875lM+QVOr/bmJBVOMTtplVjC7B4BPTjbsE/jvxReB+SnoPC +/tmwqcm8WgD/qaiYdPv2LD4VOQ22BFWoDpggQrOxJa1+mm9dU7GrDPzr4PN6s6iz/0b2Y6LYOph7 +tqyF/7AlT3Rj5xMHpQqPBffAZG9+pyeAlt7ULoZgx2srXnN7F+eRP2QM2EsiNCubMvJIH5+hCoR6 +4sKtlz2O1cH5VqNQ6ca0+pii7pXmKgOM3wIDAQABo4ICnzCCApswDgYDVR0PAQH/BAQDAgAGMBIG +A1UdEwEB/wQIMAYBAf8CAQQwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaC +Ak1GSUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFub3MgU3pv +bGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBhbGFwamFuIGtlc3p1bHQu +IEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExvY2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2Vn +LWJpenRvc2l0YXNhIHZlZGkuIEEgZGlnaXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0 +ZXRlbGUgYXogZWxvaXJ0IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFz +IGxlaXJhc2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGphbiBh +IGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJoZXRvIGF6IGVsbGVu +b3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBPUlRBTlQhIFRoZSBpc3N1YW5jZSBh +bmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmljYXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sg +Q1BTIGF2YWlsYWJsZSBhdCBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFp +bCBhdCBjcHNAbmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4IBAQBIJEb3ulZv+sgoA0BO5TE5 +ayZrU3/b39/zcT0mwBQOxmd7I6gMc90Bu8bKbjc5VdXHjFYgDigKDtIqpLBJUsY4B/6+CgmM0ZjP +ytoUMaFP0jn8DxEsQ8Pdq5PHVT5HfBgaANzze9jyf1JsIPQLX2lS9O74silg6+NJMSEN1rUQQeJB +CWziGppWS3cC9qCbmieH6FUpccKQn0V4GuEVZD3QDtigdp+uxdAu6tYPVuxkf1qbFFgBJ34TUMdr +KuZoPL9coAob4Q566eKAw+np9v1sEZ7Q5SgnK1QyQhSCdeZK8CtmdWOMovsEPoMOmzbwGOQmIMOM +8CgHrTwXZoi1/baI +-----END CERTIFICATE----- + +NetLock Business (Class B) Root +=============================== +-----BEGIN CERTIFICATE----- +MIIFSzCCBLSgAwIBAgIBaTANBgkqhkiG9w0BAQQFADCBmTELMAkGA1UEBhMCSFUxETAPBgNVBAcT +CEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0b25zYWdpIEtmdC4xGjAYBgNV +BAsTEVRhbnVzaXR2YW55a2lhZG9rMTIwMAYDVQQDEylOZXRMb2NrIFV6bGV0aSAoQ2xhc3MgQikg +VGFudXNpdHZhbnlraWFkbzAeFw05OTAyMjUxNDEwMjJaFw0xOTAyMjAxNDEwMjJaMIGZMQswCQYD +VQQGEwJIVTERMA8GA1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRv +bnNhZ2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxMjAwBgNVBAMTKU5ldExvY2sg +VXpsZXRpIChDbGFzcyBCKSBUYW51c2l0dmFueWtpYWRvMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCB +iQKBgQCx6gTsIKAjwo84YM/HRrPVG/77uZmeBNwcf4xKgZjupNTKihe5In+DCnVMm8Bp2GQ5o+2S +o/1bXHQawEfKOml2mrriRBf8TKPV/riXiK+IA4kfpPIEPsgHC+b5sy96YhQJRhTKZPWLgLViqNhr +1nGTLbO/CVRY7QbrqHvcQ7GhaQIDAQABo4ICnzCCApswEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNV +HQ8BAf8EBAMCAAYwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1GSUdZ +RUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFub3MgU3pvbGdhbHRh +dGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBhbGFwamFuIGtlc3p1bHQuIEEgaGl0 +ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExvY2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRv +c2l0YXNhIHZlZGkuIEEgZGlnaXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUg +YXogZWxvaXJ0IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJh +c2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGphbiBhIGh0dHBz +Oi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJoZXRvIGF6IGVsbGVub3J6ZXNA +bmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBPUlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhl +IHVzZSBvZiB0aGlzIGNlcnRpZmljYXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2 +YWlsYWJsZSBhdCBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBj +cHNAbmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4GBAATbrowXr/gOkDFOzT4JwG06sPgzTEdM +43WIEJessDgVkcYplswhwG08pXTP2IKlOcNl40JwuyKQ433bNXbhoLXan3BukxowOR0w2y7jfLKR +stE3Kfq51hdcR0/jHTjrn9V7lagonhVK0dHQKwCXoOKSNitjrFgBazMpUIaD8QFI +-----END CERTIFICATE----- + +NetLock Express (Class C) Root +============================== +-----BEGIN CERTIFICATE----- +MIIFTzCCBLigAwIBAgIBaDANBgkqhkiG9w0BAQQFADCBmzELMAkGA1UEBhMCSFUxETAPBgNVBAcT +CEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0b25zYWdpIEtmdC4xGjAYBgNV +BAsTEVRhbnVzaXR2YW55a2lhZG9rMTQwMgYDVQQDEytOZXRMb2NrIEV4cHJlc3N6IChDbGFzcyBD +KSBUYW51c2l0dmFueWtpYWRvMB4XDTk5MDIyNTE0MDgxMVoXDTE5MDIyMDE0MDgxMVowgZsxCzAJ +BgNVBAYTAkhVMREwDwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6 +dG9uc2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE0MDIGA1UEAxMrTmV0TG9j +ayBFeHByZXNzeiAoQ2xhc3MgQykgVGFudXNpdHZhbnlraWFkbzCBnzANBgkqhkiG9w0BAQEFAAOB +jQAwgYkCgYEA6+ywbGGKIyWvYCDj2Z/8kwvbXY2wobNAOoLO/XXgeDIDhlqGlZHtU/qdQPzm6N3Z +W3oDvV3zOwzDUXmbrVWg6dADEK8KuhRC2VImESLH0iDMgqSaqf64gXadarfSNnU+sYYJ9m5tfk63 +euyucYT2BDMIJTLrdKwWRMbkQJMdf60CAwEAAaOCAp8wggKbMBIGA1UdEwEB/wQIMAYBAf8CAQQw +DgYDVR0PAQH/BAQDAgAGMBEGCWCGSAGG+EIBAQQEAwIABzCCAmAGCWCGSAGG+EIBDQSCAlEWggJN +RklHWUVMRU0hIEV6ZW4gdGFudXNpdHZhbnkgYSBOZXRMb2NrIEtmdC4gQWx0YWxhbm9zIFN6b2xn +YWx0YXRhc2kgRmVsdGV0ZWxlaWJlbiBsZWlydCBlbGphcmFzb2sgYWxhcGphbiBrZXN6dWx0LiBB +IGhpdGVsZXNpdGVzIGZvbHlhbWF0YXQgYSBOZXRMb2NrIEtmdC4gdGVybWVrZmVsZWxvc3NlZy1i +aXp0b3NpdGFzYSB2ZWRpLiBBIGRpZ2l0YWxpcyBhbGFpcmFzIGVsZm9nYWRhc2FuYWsgZmVsdGV0 +ZWxlIGF6IGVsb2lydCBlbGxlbm9yemVzaSBlbGphcmFzIG1lZ3RldGVsZS4gQXogZWxqYXJhcyBs +ZWlyYXNhIG1lZ3RhbGFsaGF0byBhIE5ldExvY2sgS2Z0LiBJbnRlcm5ldCBob25sYXBqYW4gYSBo +dHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIGNpbWVuIHZhZ3kga2VyaGV0byBheiBlbGxlbm9y +emVzQG5ldGxvY2submV0IGUtbWFpbCBjaW1lbi4gSU1QT1JUQU5UISBUaGUgaXNzdWFuY2UgYW5k +IHRoZSB1c2Ugb2YgdGhpcyBjZXJ0aWZpY2F0ZSBpcyBzdWJqZWN0IHRvIHRoZSBOZXRMb2NrIENQ +UyBhdmFpbGFibGUgYXQgaHR0cHM6Ly93d3cubmV0bG9jay5uZXQvZG9jcyBvciBieSBlLW1haWwg +YXQgY3BzQG5ldGxvY2submV0LjANBgkqhkiG9w0BAQQFAAOBgQAQrX/XDDKACtiG8XmYta3UzbM2 +xJZIwVzNmtkFLp++UOv0JhQQLdRmF/iewSf98e3ke0ugbLWrmldwpu2gpO0u9f38vf5NNwgMvOOW +gyL1SRt/Syu0VMGAfJlOHdCM7tCs5ZL6dVb+ZKATj7i4Fp1hBWeAyNDYpQcCNJgEjTME1A== +-----END CERTIFICATE----- + +XRamp Global CA Root +==================== +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCBgjELMAkGA1UE +BhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2Vj +dXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwHhcNMDQxMTAxMTcxNDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMx +HjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkg +U2VydmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS638eMpSe2OAtp87ZOqCwu +IR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCPKZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMx +foArtYzAQDsRhtDLooY2YKTVMIJt2W7QDxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FE +zG+gSqmUsE3a56k0enI4qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqs +AxcZZPRaJSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNViPvry +xS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASsjVy16bYbMDYGA1UdHwQvMC0wK6Ap +oCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMC +AQEwDQYJKoZIhvcNAQEFBQADggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc +/Kh4ZzXxHfARvbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLaIR9NmXmd4c8n +nxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSyi6mx5O+aGtA9aZnuqCij4Tyz +8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQO+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +Go Daddy Class 2 CA +=================== +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMY +VGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkG +A1UEBhMCVVMxITAfBgNVBAoTGFRoZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28g +RGFkZHkgQ2xhc3MgMiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQAD +ggENADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCAPVYYYwhv +2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6wwdhFJ2+qN1j3hybX2C32 +qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXiEqITLdiOr18SPaAIBQi2XKVlOARFmR6j +YGB0xUGlcmIbYsUfb18aQr4CUWWoriMYavx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmY +vLEHZ6IVDd2gWMZEewo+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0O +BBYEFNLEsNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h/t2o +atTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMu +MTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwG +A1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wim +PQoZ+YeAEW5p5JYXMP80kWNyOO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKt +I3lpjbi2Tc7PTMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mERdEr/VxqHD3VI +Ls9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5CufReYNnyicsbkqWletNw+vHX/b +vZ8= +-----END CERTIFICATE----- + +Starfield Class 2 CA +==================== +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzElMCMGA1UEChMc +U3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZpZWxkIENsYXNzIDIg +Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQwNjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBo +MQswCQYDVQQGEwJVUzElMCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAG +A1UECxMpU3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqG +SIb3DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf8MOh2tTY +bitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN+lq2cwQlZut3f+dZxkqZ +JRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVm +epsZGD3/cVE8MC5fvj13c7JdBmzDI1aaK4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSN +F4Azbl5KXZnJHoe0nRrA1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HF +MIHCMB0GA1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fRzt0f +hvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0YXJmaWVsZCBUZWNo +bm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBDbGFzcyAyIENlcnRpZmljYXRpb24g +QXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGs +afPzWdqbAYcaT1epoXkJKtv3L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLM +PUxA2IGvd56Deruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynpVSJYACPq4xJD +KVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEYWQPJIrSPnNVeKtelttQKbfi3 +QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +StartCom Certification Authority +================================ +-----BEGIN CERTIFICATE----- +MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEWMBQGA1UEChMN +U3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmlu +ZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0 +NjM2WhcNMzYwOTE3MTk0NjM2WjB9MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRk +LjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMg +U3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZkpMyONvg45iPwbm2xPN1y +o4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rfOQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/ +Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/CJi/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/d +eMotHweXMAEtcnn6RtYTKqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt +2PZE4XNiHzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMMAv+Z +6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w+2OqqGwaVLRcJXrJ +osmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/ +untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVc +UjyJthkqcwEKDwOzEmDyei+B26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT +37uMdBNSSwIDAQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE +FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9jZXJ0LnN0YXJ0 +Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3JsLnN0YXJ0Y29tLm9yZy9zZnNj +YS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFMBgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUH +AgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRw +Oi8vY2VydC5zdGFydGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYg +U3RhcnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlhYmlsaXR5 +LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2YgdGhlIFN0YXJ0Q29tIENl +cnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFpbGFibGUgYXQgaHR0cDovL2NlcnQuc3Rh +cnRjb20ub3JnL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilT +dGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOC +AgEAFmyZ9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8jhvh +3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUWFjgKXlf2Ysd6AgXm +vB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJzewT4F+irsfMuXGRuczE6Eri8sxHk +fY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3 +fsNrarnDy0RLrHiQi+fHLB5LEUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZ +EoalHmdkrQYuL6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq +yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuCO3NJo2pXh5Tl +1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6Vum0ABj6y6koQOdjQK/W/7HW/ +lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkyShNOsF/5oirpt9P/FlUQqmMGqz9IgcgA38coro +g14= +-----END CERTIFICATE----- + +Taiwan GRCA +=========== +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/MQswCQYDVQQG +EwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4X +DTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1owPzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dv +dmVybm1lbnQgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qN +w8XRIePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1qgQdW8or5 +BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKyyhwOeYHWtXBiCAEuTk8O +1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAtsF/tnyMKtsc2AtJfcdgEWFelq16TheEfO +htX7MfP6Mb40qij7cEwdScevLJ1tZqa2jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wov +J5pGfaENda1UhhXcSTvxls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7 +Q3hub/FCVGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHKYS1t +B6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoHEgKXTiCQ8P8NHuJB +O9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThNXo+EHWbNxWCWtFJaBYmOlXqYwZE8 +lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1UdDgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNV +HRMEBTADAQH/MDkGBGcqBwAEMTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg2 +09yewDL7MTqKUWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ +TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyfqzvS/3WXy6Tj +Zwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaKZEk9GhiHkASfQlK3T8v+R0F2 +Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFEJPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlU +D7gsL0u8qV1bYH+Mh6XgUmMqvtg7hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6Qz +DxARvBMB1uUO07+1EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+Hbk +Z6MmnD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WXudpVBrkk +7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44VbnzssQwmSNOXfJIoRIM3BKQ +CZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDeLMDDav7v3Aun+kbfYNucpllQdSNpc5Oy ++fwC00fmcc4QAu4njIT/rEUNE1yDMuAlpYYsfPQS +-----END CERTIFICATE----- + +Firmaprofesional Root CA +======================== +-----BEGIN CERTIFICATE----- +MIIEVzCCAz+gAwIBAgIBATANBgkqhkiG9w0BAQUFADCBnTELMAkGA1UEBhMCRVMxIjAgBgNVBAcT +GUMvIE11bnRhbmVyIDI0NCBCYXJjZWxvbmExQjBABgNVBAMTOUF1dG9yaWRhZCBkZSBDZXJ0aWZp +Y2FjaW9uIEZpcm1hcHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODEmMCQGCSqGSIb3DQEJARYXY2FA +ZmlybWFwcm9mZXNpb25hbC5jb20wHhcNMDExMDI0MjIwMDAwWhcNMTMxMDI0MjIwMDAwWjCBnTEL +MAkGA1UEBhMCRVMxIjAgBgNVBAcTGUMvIE11bnRhbmVyIDI0NCBCYXJjZWxvbmExQjBABgNVBAMT +OUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1hcHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2 +ODEmMCQGCSqGSIb3DQEJARYXY2FAZmlybWFwcm9mZXNpb25hbC5jb20wggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQDnIwNvbyOlXnjOlSztlB5uCp4Bx+ow0Syd3Tfom5h5VtP8c9/Qit5V +j1H5WuretXDE7aTt/6MNbg9kUDGvASdYrv5sp0ovFy3Tc9UTHI9ZpTQsHVQERc1ouKDAA6XPhUJH +lShbz++AbOCQl4oBPB3zhxAwJkh91/zpnZFx/0GaqUC1N5wpIE8fUuOgfRNtVLcK3ulqTgesrBlf +3H5idPayBQC6haD9HThuy1q7hryUZzM1gywfI834yJFxzJeL764P3CkDG8A563DtwW4O2GcLiam8 +NeTvtjS0pbbELaW+0MOUJEjb35bTALVmGotmBQ/dPz/LP6pemkr4tErvlTcbAgMBAAGjgZ8wgZww +KgYDVR0RBCMwIYYfaHR0cDovL3d3dy5maXJtYXByb2Zlc2lvbmFsLmNvbTASBgNVHRMBAf8ECDAG +AQH/AgEBMCsGA1UdEAQkMCKADzIwMDExMDI0MjIwMDAwWoEPMjAxMzEwMjQyMjAwMDBaMA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUMwugZtHq2s7eYpMEKFK1FH84aLcwDQYJKoZIhvcNAQEFBQAD +ggEBAEdz/o0nVPD11HecJ3lXV7cVVuzH2Fi3AQL0M+2TUIiefEaxvT8Ub/GzR0iLjJcG1+p+o1wq +u00vR+L4OQbJnC4xGgN49Lw4xiKLMzHwFgQEffl25EvXwOaD7FnMP97/T2u3Z36mhoEyIwOdyPdf +wUpgpZKpsaSgYMN4h7Mi8yrrW6ntBas3D7Hi05V2Y1Z0jFhyGzflZKG+TQyTmAyX9odtsz/ny4Cm +7YjHX1BiAuiZdBbQ5rQ58SfLyEDW44YQqSMSkuBpQWOnryULwMWSyx6Yo1q6xTMPoJcB3X/ge9YG +VM+h4k0460tQtcsm9MracEpqoeJ5quGnM/b9Sh/22WA= +-----END CERTIFICATE----- + +Wells Fargo Root CA +=================== +-----BEGIN CERTIFICATE----- +MIID5TCCAs2gAwIBAgIEOeSXnjANBgkqhkiG9w0BAQUFADCBgjELMAkGA1UEBhMCVVMxFDASBgNV +BAoTC1dlbGxzIEZhcmdvMSwwKgYDVQQLEyNXZWxscyBGYXJnbyBDZXJ0aWZpY2F0aW9uIEF1dGhv +cml0eTEvMC0GA1UEAxMmV2VsbHMgRmFyZ28gUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcN +MDAxMDExMTY0MTI4WhcNMjEwMTE0MTY0MTI4WjCBgjELMAkGA1UEBhMCVVMxFDASBgNVBAoTC1dl +bGxzIEZhcmdvMSwwKgYDVQQLEyNXZWxscyBGYXJnbyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEv +MC0GA1UEAxMmV2VsbHMgRmFyZ28gUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDVqDM7Jvk0/82bfuUER84A4n135zHCLielTWi5MbqNQ1mX +x3Oqfz1cQJ4F5aHiidlMuD+b+Qy0yGIZLEWukR5zcUHESxP9cMIlrCL1dQu3U+SlK93OvRw6esP3 +E48mVJwWa2uv+9iWsWCaSOAlIiR5NM4OJgALTqv9i86C1y8IcGjBqAr5dE8Hq6T54oN+J3N0Prj5 +OEL8pahbSCOz6+MlsoCultQKnMJ4msZoGK43YjdeUXWoWGPAUe5AeH6orxqg4bB4nVCMe+ez/I4j +sNtlAHCEAQgAFG5Uhpq6zPk3EPbg3oQtnaSFN9OH4xXQwReQfhkhahKpdv0SAulPIV4XAgMBAAGj +YTBfMA8GA1UdEwEB/wQFMAMBAf8wTAYDVR0gBEUwQzBBBgtghkgBhvt7hwcBCzAyMDAGCCsGAQUF +BwIBFiRodHRwOi8vd3d3LndlbGxzZmFyZ28uY29tL2NlcnRwb2xpY3kwDQYJKoZIhvcNAQEFBQAD +ggEBANIn3ZwKdyu7IvICtUpKkfnRLb7kuxpo7w6kAOnu5+/u9vnldKTC2FJYxHT7zmu1Oyl5GFrv +m+0fazbuSCUlFLZWohDo7qd/0D+j0MNdJu4HzMPBJCGHHt8qElNvQRbn7a6U+oxy+hNH8Dx+rn0R +OhPs7fpvcmR7nX1/Jv16+yWt6j4pf0zjAFcysLPp7VMX2YuyFA4w6OXVE8Zkr8QA1dhYJPz1j+zx +x32l2w8n0cbyQIjmH/ZhqPRCyLk306m+LFZ4wnKbWV01QIroTmMatukgalHizqSQ33ZwmVxwQ023 +tqcZZE6St8WRPH9IFmV7Fv3L/PvZ1dZPIWU7Sn9Ho/s= +-----END CERTIFICATE----- + +Swisscom Root CA 1 +================== +-----BEGIN CERTIFICATE----- +MIIF2TCCA8GgAwIBAgIQXAuFXAvnWUHfV8w/f52oNjANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQG +EwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0YWwgQ2VydGlmaWNhdGUgU2Vy +dmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3QgQ0EgMTAeFw0wNTA4MTgxMjA2MjBaFw0yNTA4 +MTgyMjA2MjBaMGQxCzAJBgNVBAYTAmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGln +aXRhbCBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAxMIIC +IjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0LmwqAzZuz8h+BvVM5OAFmUgdbI9m2BtRsiM +MW8Xw/qabFbtPMWRV8PNq5ZJkCoZSx6jbVfd8StiKHVFXqrWW/oLJdihFvkcxC7mlSpnzNApbjyF +NDhhSbEAn9Y6cV9Nbc5fuankiX9qUvrKm/LcqfmdmUc/TilftKaNXXsLmREDA/7n29uj/x2lzZAe +AR81sH8A25Bvxn570e56eqeqDFdvpG3FEzuwpdntMhy0XmeLVNxzh+XTF3xmUHJd1BpYwdnP2IkC +b6dJtDZd0KTeByy2dbcokdaXvij1mB7qWybJvbCXc9qukSbraMH5ORXWZ0sKbU/Lz7DkQnGMU3nn +7uHbHaBuHYwadzVcFh4rUx80i9Fs/PJnB3r1re3WmquhsUvhzDdf/X/NTa64H5xD+SpYVUNFvJbN +cA78yeNmuk6NO4HLFWR7uZToXTNShXEuT46iBhFRyePLoW4xCGQMwtI89Tbo19AOeCMgkckkKmUp +WyL3Ic6DXqTz3kvTaI9GdVyDCW4pa8RwjPWd1yAv/0bSKzjCL3UcPX7ape8eYIVpQtPM+GP+HkM5 +haa2Y0EQs3MevNP6yn0WR+Kn1dCjigoIlmJWbjTb2QK5MHXjBNLnj8KwEUAKrNVxAmKLMb7dxiNY +MUJDLXT5xp6mig/p/r+D5kNXJLrvRjSq1xIBOO0CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYw +HQYDVR0hBBYwFDASBgdghXQBUwABBgdghXQBUwABMBIGA1UdEwEB/wQIMAYBAf8CAQcwHwYDVR0j +BBgwFoAUAyUv3m+CATpcLNwroWm1Z9SM0/0wHQYDVR0OBBYEFAMlL95vggE6XCzcK6FptWfUjNP9 +MA0GCSqGSIb3DQEBBQUAA4ICAQA1EMvspgQNDQ/NwNurqPKIlwzfky9NfEBWMXrrpA9gzXrzvsMn +jgM+pN0S734edAY8PzHyHHuRMSG08NBsl9Tpl7IkVh5WwzW9iAUPWxAaZOHHgjD5Mq2eUCzneAXQ +MbFamIp1TpBcahQq4FJHgmDmHtqBsfsUC1rxn9KVuj7QG9YVHaO+htXbD8BJZLsuUBlL0iT43R4H +VtA4oJVwIHaM190e3p9xxCPvgxNcoyQVTSlAPGrEqdi3pkSlDfTgnXceQHAm/NrZNuR55LU/vJtl +vrsRls/bxig5OgjOR1tTWsWZ/l2p3e9M1MalrQLmjAcSHm8D0W+go/MpvRLHUKKwf4ipmXeascCl +OS5cfGniLLDqN2qk4Vrh9VDlg++luyqI54zb/W1elxmofmZ1a3Hqv7HHb6D0jqTsNFFbjCYDcKF3 +1QESVwA12yPeDooomf2xEG9L/zgtYE4snOtnta1J7ksfrK/7DZBaZmBwXarNeNQk7shBoJMBkpxq +nvy5JMWzFYJ+vq6VK+uxwNrjAWALXmmshFZhvnEX/h0TD/7Gh0Xp/jKgGg0TpJRVcaUWi7rKibCy +x/yP2FS1k2Kdzs9Z+z0YzirLNRWCXf9UIltxUvu3yf5gmwBBZPCqKuy2QkPOiWaByIufOVQDJdMW +NY6E0F/6MBr1mmz0DlP5OlvRHA== +-----END CERTIFICATE----- + +DigiCert Assured ID Root CA +=========================== +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSQw +IgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzEx +MTEwMDAwMDAwWjBlMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQL +ExB3d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0Ew +ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7cJpSIqvTO +9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYPmDI2dsze3Tyoou9q+yHy +UmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW +/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpy +oeb6pNnVFzF1roV9Iq4/AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whf +GHdPAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRF +66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYunpyGd823IDzANBgkq +hkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRCdWKuh+vy1dneVrOfzM4UKLkNl2Bc +EkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTffwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38Fn +SbNd67IJKusm7Xi+fT8r87cmNW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i +8b5QZ7dsvfPxH2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +DigiCert Global Root CA +======================= +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBhMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSAw +HgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBDQTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAw +MDAwMDBaMGExCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3 +dy5kaWdpY2VydC5jb20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkq +hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsBCSDMAZOn +TjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97nh6Vfe63SKMI2tavegw5 +BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt43C/dxC//AH2hdmoRBBYMql1GNXRor5H +4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7PT19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y +7vrTC0LUq7dBMtoM1O/4gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQAB +o2MwYTAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbRTLtm +8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUwDQYJKoZIhvcNAQEF +BQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/EsrhMAtudXH/vTBH1jLuG2cenTnmCmr +EbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIt +tep3Sp+dWOIrWcBAI+0tKIJFPnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886 +UAb3LujEV0lsYSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +DigiCert High Assurance EV Root CA +================================== +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBsMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSsw +KQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5jZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAw +MFoXDTMxMTExMDAwMDAwMFowbDELMAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZ +MBcGA1UECxMQd3d3LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFu +Y2UgRVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm+9S75S0t +Mqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTWPNt0OKRKzE0lgvdKpVMS +OO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEMxChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3 +MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFBIk5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQ +NAQTXKFx01p8VdteZOE3hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUe +h10aUAsgEsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMB +Af8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaAFLE+w2kD+L9HAdSY +JhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3NecnzyIZgYIVyHbIUf4KmeqvxgydkAQ +V8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6zeM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFp +myPInngiK3BD41VHMWEZ71jFhS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkK +mNEVX58Svnw2Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep+OkuE6N36B9K +-----END CERTIFICATE----- + +Certplus Class 2 Primary CA +=========================== +-----BEGIN CERTIFICATE----- +MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAwPTELMAkGA1UE +BhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFzcyAyIFByaW1hcnkgQ0EwHhcN +OTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2Vy +dHBsdXMxGzAZBgNVBAMTEkNsYXNzIDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBANxQltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR +5aiRVhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyLkcAbmXuZ +Vg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCdEgETjdyAYveVqUSISnFO +YFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yasH7WLO7dDWWuwJKZtkIvEcupdM5i3y95e +e++U8Rs+yskhwcWYAqqi9lt3m/V+llU0HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRME +CDAGAQH/AgEKMAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJ +YIZIAYb4QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMuY29t +L0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/AN9WM2K191EBkOvD +P9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8yfFC82x/xXp8HVGIutIKPidd3i1R +TtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMRFcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+ +7UCmnYR0ObncHoUW2ikbhiMAybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW +//1IMwrh3KWBkJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 +l7+ijrRU +-----END CERTIFICATE----- + +DST Root CA X3 +============== +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/MSQwIgYDVQQK +ExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMTDkRTVCBSb290IENBIFgzMB4X +DTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVowPzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1 +cmUgVHJ1c3QgQ28uMRcwFQYDVQQDEw5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmT +rE4Orz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEqOLl5CjH9 +UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9bxiqKqy69cK3FCxolkHRy +xXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40d +utolucbY38EVAjqr2m7xPi71XAicPNaDaeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQ +MA0GCSqGSIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69ikug +dB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXrAvHRAosZy5Q6XkjE +GB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZzR8srzJmwN0jP41ZL9c8PDHIyh8bw +RLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubS +fZGL+T0yjWW06XyxV3bqxbYoOb8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ +-----END CERTIFICATE----- + +DST ACES CA X6 +============== +-----BEGIN CERTIFICATE----- +MIIECTCCAvGgAwIBAgIQDV6ZCtadt3js2AdWO4YV2TANBgkqhkiG9w0BAQUFADBbMQswCQYDVQQG +EwJVUzEgMB4GA1UEChMXRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QxETAPBgNVBAsTCERTVCBBQ0VT +MRcwFQYDVQQDEw5EU1QgQUNFUyBDQSBYNjAeFw0wMzExMjAyMTE5NThaFw0xNzExMjAyMTE5NTha +MFsxCzAJBgNVBAYTAlVTMSAwHgYDVQQKExdEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdDERMA8GA1UE +CxMIRFNUIEFDRVMxFzAVBgNVBAMTDkRTVCBBQ0VTIENBIFg2MIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEAuT31LMmU3HWKlV1j6IR3dma5WZFcRt2SPp/5DgO0PWGSvSMmtWPuktKe1jzI +DZBfZIGxqAgNTNj50wUoUrQBJcWVHAx+PhCEdc/BGZFjz+iokYi5Q1K7gLFViYsx+tC3dr5BPTCa +pCIlF3PoHuLTrCq9Wzgh1SpL11V94zpVvddtawJXa+ZHfAjIgrrep4c9oW24MFbCswKBXy314pow +GCi4ZtPLAZZv6opFVdbgnf9nKxcCpk4aahELfrd755jWjHZvwTvbUJN+5dCOHze4vbrGn2zpfDPy +MjwmR/onJALJfh1biEITajV8fTXpLmaRcpPVMibEdPVTo7NdmvYJywIDAQABo4HIMIHFMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgHGMB8GA1UdEQQYMBaBFHBraS1vcHNAdHJ1c3Rkc3Qu +Y29tMGIGA1UdIARbMFkwVwYKYIZIAWUDAgEBATBJMEcGCCsGAQUFBwIBFjtodHRwOi8vd3d3LnRy +dXN0ZHN0LmNvbS9jZXJ0aWZpY2F0ZXMvcG9saWN5L0FDRVMtaW5kZXguaHRtbDAdBgNVHQ4EFgQU +CXIGThhDD+XWzMNqizF7eI+og7gwDQYJKoZIhvcNAQEFBQADggEBAKPYjtay284F5zLNAdMEA+V2 +5FYrnJmQ6AgwbN99Pe7lv7UkQIRJ4dEorsTCOlMwiPH1d25Ryvr/ma8kXxug/fKshMrfqfBfBC6t +Fr8hlxCBPeP/h40y3JTlR4peahPJlJU90u7INJXQgNStMgiAVDzgvVJT11J8smk/f3rPanTK+gQq +nExaBqXpIK1FZg9p8d2/6eMyi/rgwYZNcjwu2JN4Cir42NInPRmJX1p7ijvMDNpRrscL9yuwNwXs +vFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf29w4LTJxoeHtxMcfrHuBnQfO3 +oKfN5XozNmr6mis= +-----END CERTIFICATE----- + +SwissSign Gold CA - G2 +====================== +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNVBAYTAkNIMRUw +EwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2lnbiBHb2xkIENBIC0gRzIwHhcN +MDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBFMQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dp +c3NTaWduIEFHMR8wHQYDVQQDExZTd2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUq +t2/876LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+bbqBHH5C +jCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c6bM8K8vzARO/Ws/BtQpg +vd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqEemA8atufK+ze3gE/bk3lUIbLtK/tREDF +ylqM2tIrfKjuvqblCqoOpd8FUrdVxyJdMmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvR +AiTysybUa9oEVeXBCsdtMDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuend +jIj3o02yMszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69yFGkO +peUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPiaG59je883WX0XaxR +7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxMgI93e2CaHt+28kgeDrpOVG2Y4OGi +GqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUWyV7lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64 +OfPAeGZe6Drn8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe645R88a7A3hfm +5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczOUYrHUDFu4Up+GC9pWbY9ZIEr +44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOf +Mke6UiI0HTJ6CVanfCU2qT1L2sCCbwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6m +Gu6uLftIdxf+u+yvGPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxp +mo/a77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCChdiDyyJk +vC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid392qgQmwLOM7XdVAyksLf +KzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEppLd6leNcG2mqeSz53OiATIgHQv2ieY2Br +NU0LbbqhPcCT4H8js1WtciVORvnSFu+wZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6Lqj +viOvrv1vA+ACOzB2+httQc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +SwissSign Silver CA - G2 +======================== +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCQ0gxFTAT +BgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMB4X +DTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0NlowRzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3 +aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG +9w0BAQEFAAOCAg8AMIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644 +N0MvFz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7brYT7QbNHm ++/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieFnbAVlDLaYQ1HTWBCrpJH +6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH6ATK72oxh9TAtvmUcXtnZLi2kUpCe2Uu +MGoM9ZDulebyzYLs2aFK7PayS+VFheZteJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5h +qAaEuSh6XzjZG6k4sIN/c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5 +FZGkECwJMoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRHHTBs +ROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTfjNFusB3hB48IHpmc +celM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb65i/4z3GcRm25xBWNOHkDRUjvxF3X +CO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOBrDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUF6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRB +tjpbO8tFnb0cwpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBAHPGgeAn0i0P +4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShpWJHckRE1qTodvBqlYJ7YH39F +kWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L +3XWgwF15kIwb4FDm3jH+mHtwX6WQ2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx +/uNncqCxv1yL5PqZIseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFa +DGi8aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2Xem1ZqSqP +e97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQRdAtq/gsD/KNVV4n+Ssuu +WxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJ +DIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ub +DgEj8Z+7fNzcbBGXJbLytGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +GeoTrust Primary Certification Authority +======================================== +-----BEGIN CERTIFICATE----- +MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMoR2VvVHJ1c3QgUHJpbWFyeSBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjExMjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgx +CzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQ +cmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9AWbK7hWN +b6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjAZIVcFU2Ix7e64HXprQU9 +nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE07e9GceBrAqg1cmuXm2bgyxx5X9gaBGge +RwLmnWDiNpcB3841kt++Z8dtd1k7j53WkBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGt +tm/81w7a4DSwDRp35+MImO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJKoZI +hvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ16CePbJC/kRYkRj5K +Ts4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl4b7UVXGYNTq+k+qurUKykG/g/CFN +NWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6KoKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHa +Floxt/m0cYASSJlyc1pZU8FjUjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG +1riR/aYNKxoUAT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= +-----END CERTIFICATE----- + +thawte Primary Root CA +====================== +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCBqTELMAkGA1UE +BhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2 +aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhv +cml6ZWQgdXNlIG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3 +MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwg +SW5jLjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMv +KGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNVBAMT +FnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCs +oPD7gFnUnMekz52hWXMJEEUMDSxuaPFsW0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ +1CRfBsDMRJSUjQJib+ta3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGc +q/gcfomk6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6Sk/K +aAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94JNqR32HuHUETVPm4p +afs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XPr87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUF +AAOCAQEAeRHAS7ORtvzw6WfUDW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeE +uzLlQRHAd9mzYJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX +xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2/qxAeeWsEG89 +jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/LHbTY5xZ3Y+m4Q6gLkH3LpVH +z7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7jVaMaA== +-----END CERTIFICATE----- + +VeriSign Class 3 Public Primary Certification Authority - G5 +============================================================ +-----BEGIN CERTIFICATE----- +MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCByjELMAkGA1UE +BhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBO +ZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVk +IHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCB +yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2ln +biBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2lnbiwgSW5jLiAtIEZvciBh +dXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw +ggEKAoIBAQCvJAgIKXo1nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKz +j/i5Vbext0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIzSdhD +Y2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQGBO+QueQA5N06tRn/ +Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+rCpSx4/VBEnkjWNHiDxpg8v+R70r +fk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/ +BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2Uv +Z2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy +aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKvMzEzMA0GCSqG +SIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzEp6B4Eq1iDkVwZMXnl2YtmAl+ +X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKE +KQsTb47bDN0lAtukixlE0kF6BWlKWE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiC +Km0oHw0LxOXnGiYZ4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vE +ZV8NhnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq +-----END CERTIFICATE----- + +SecureTrust CA +============== +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBIMQswCQYDVQQG +EwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24xFzAVBgNVBAMTDlNlY3VyZVRy +dXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIzMTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAe +BgNVBAoTF1NlY3VyZVRydXN0IENvcnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQX +OZEzZum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO0gMdA+9t +DWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIaowW8xQmxSPmjL8xk037uH +GFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b +01k/unK8RCSc43Oz969XL0Imnal0ugBS8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmH +ursCAwEAAaOBnTCBmjATBgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCegJYYj +aHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQAwDQYJ +KoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt36Z3q059c4EVlew3KW+JwULKUBRSu +SceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHf +mbx8IVQr5Fiiu1cprp6poxkmD5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZ +nMUFdAvnZyPSCPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +Secure Global CA +================ +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBKMQswCQYDVQQG +EwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBH +bG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkxMjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEg +MB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwg +Q0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jx +YDiJiQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa/FHtaMbQ +bqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJjnIFHovdRIWCQtBJwB1g +8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnIHmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYV +HDGA76oYa8J719rO+TMg1fW9ajMtgQT7sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi +0XPnj3pDAgMBAAGjgZ0wgZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCswKaAn +oCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsGAQQBgjcVAQQDAgEA +MA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0LURYD7xh8yOOvaliTFGCRsoTciE6+ +OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXOH0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cn +CDpOGR86p1hcF895P4vkp9MmI50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/5 +3CYNv6ZHdAbYiNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +COMODO Certification Authority +============================== +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCBgTELMAkGA1UE +BhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgG +A1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNVBAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1 +dGhvcml0eTAeFw0wNjEyMDEwMDAwMDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEb +MBkGA1UECBMSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFD +T01PRE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3UcEbVASY06m/weaKXTuH ++7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI2GqGd0S7WWaXUF601CxwRM/aN5VCaTww +xHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV +4EajcNxo2f8ESIl33rXp+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA +1KGzqSX+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5OnKVI +rLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW/zAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6gPKA6hjhodHRwOi8vY3JsLmNvbW9k +b2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9uQXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOC +AQEAPpiem/Yb6dc5t3iuHXIYSdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CP +OGEIqB6BCsAvIC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4zJVSk/BwJVmc +IGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5ddBA6+C4OmF4O5MBKgxTMVBbkN ++8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IBZQ== +-----END CERTIFICATE----- + +Network Solutions Certificate Authority +======================================= +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBiMQswCQYDVQQG +EwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydOZXR3b3Jr +IFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMx +MjM1OTU5WjBiMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwzc7MEL7xx +jOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPPOCwGJgl6cvf6UDL4wpPT +aaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rlmGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXT +crA/vGp97Eh/jcOrqnErU2lBUzS1sLnFBgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc +/Qzpf14Dl847ABSHJ3A4qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMB +AAGjgZcwgZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwubmV0c29sc3NsLmNv +bS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3JpdHkuY3JsMA0GCSqGSIb3DQEBBQUA +A4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc86fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q +4LqILPxFzBiwmZVRDuwduIj/h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/ +GGUsyfJj4akH/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHNpGxlaKFJdlxD +ydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +WellsSecure Public Root Certificate Authority +============================================= +-----BEGIN CERTIFICATE----- +MIIEvTCCA6WgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoM +F1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYw +NAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcN +MDcxMjEzMTcwNzU0WhcNMjIxMjE0MDAwNzU0WjCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoMF1dl +bGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYD +VQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDub7S9eeKPCCGeOARBJe+rWxxTkqxtnt3CxC5FlAM1 +iGd0V+PfjLindo8796jE2yljDpFoNoqXjopxaAkH5OjUDk/41itMpBb570OYj7OeUt9tkTmPOL13 +i0Nj67eT/DBMHAGTthP796EfvyXhdDcsHqRePGj4S78NuR4uNuip5Kf4D8uCdXw1LSLWwr8L87T8 +bJVhHlfXBIEyg1J55oNjz7fLY4sR4r1e6/aN7ZVyKLSsEmLpSjPmgzKuBXWVvYSV2ypcm44uDLiB +K0HmOFafSZtsdvqKXfcBeYF8wYNABf5x/Qw/zE5gCQ5lRxAvAcAFP4/4s0HvWkJ+We/SlwxlAgMB +AAGjggE0MIIBMDAPBgNVHRMBAf8EBTADAQH/MDkGA1UdHwQyMDAwLqAsoCqGKGh0dHA6Ly9jcmwu +cGtpLndlbGxzZmFyZ28uY29tL3dzcHJjYS5jcmwwDgYDVR0PAQH/BAQDAgHGMB0GA1UdDgQWBBQm +lRkQ2eihl5H/3BnZtQQ+0nMKajCBsgYDVR0jBIGqMIGngBQmlRkQ2eihl5H/3BnZtQQ+0nMKaqGB +i6SBiDCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRww +GgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMg +Um9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHmCAQEwDQYJKoZIhvcNAQEFBQADggEBALkVsUSRzCPI +K0134/iaeycNzXK7mQDKfGYZUMbVmO2rvwNa5U3lHshPcZeG1eMd/ZDJPHV3V3p9+N701NX3leZ0 +bh08rnyd2wIDBSxxSyU+B+NemvVmFymIGjifz6pBA4SXa5M4esowRBskRDPQ5NHcKDj0E0M1NSlj +qHyita04pO2t/caaH/+Xc/77szWnk4bGdpEA5qxRFsQnMlzbc9qlk1eOPm01JghZ1edE13YgY+es +E2fDbbFwRnzVlhE9iW9dqKHrjQrawx0zbKPqZxmamX9LPYNRKh3KL4YMon4QLSvUFpULB6ouFJJJ +tylv2G0xffX8oRAHh84vWdw+WNs= +-----END CERTIFICATE----- + +COMODO ECC Certification Authority +================================== +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTELMAkGA1UEBhMC +R0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UE +ChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwHhcNMDgwMzA2MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0Ix +GzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSRFtSrYpn1PlILBs5BAH+X +4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0JcfRK9ChQtP6IHG4/bC8vCVlbpVsLM5ni +wz2J+Wos77LTBumjQjBAMB0GA1UdDgQWBBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VG +FAkK+qDmfQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdvGDeA +U/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +IGC/A +===== +-----BEGIN CERTIFICATE----- +MIIEAjCCAuqgAwIBAgIFORFFEJQwDQYJKoZIhvcNAQEFBQAwgYUxCzAJBgNVBAYTAkZSMQ8wDQYD +VQQIEwZGcmFuY2UxDjAMBgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQTS9TR0ROMQ4wDAYDVQQLEwVE +Q1NTSTEOMAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2LmZy +MB4XDTAyMTIxMzE0MjkyM1oXDTIwMTAxNzE0MjkyMlowgYUxCzAJBgNVBAYTAkZSMQ8wDQYDVQQI +EwZGcmFuY2UxDjAMBgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQTS9TR0ROMQ4wDAYDVQQLEwVEQ1NT +STEOMAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2LmZyMIIB +IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsh/R0GLFMzvABIaIs9z4iPf930Pfeo2aSVz2 +TqrMHLmh6yeJ8kbpO0px1R2OLc/mratjUMdUC24SyZA2xtgv2pGqaMVy/hcKshd+ebUyiHDKcMCW +So7kVc0dJ5S/znIq7Fz5cyD+vfcuiWe4u0dzEvfRNWk68gq5rv9GQkaiv6GFGvm/5P9JhfejcIYy +HF2fYPepraX/z9E0+X1bF8bc1g4oa8Ld8fUzaJ1O/Id8NhLWo4DoQw1VYZTqZDdH6nfK0LJYBcNd +frGoRpAxVs5wKpayMLh35nnAvSk7/ZR3TL0gzUEl4C7HG7vupARB0l2tEmqKm0f7yd1GQOGdPDPQ +tQIDAQABo3cwdTAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBRjAVBgNVHSAEDjAMMAoGCCqB +egF5AQEBMB0GA1UdDgQWBBSjBS8YYFDCiQrdKyFP/45OqDAxNjAfBgNVHSMEGDAWgBSjBS8YYFDC +iQrdKyFP/45OqDAxNjANBgkqhkiG9w0BAQUFAAOCAQEABdwm2Pp3FURo/C9mOnTgXeQp/wYHE4RK +q89toB9RlPhJy3Q2FLwV3duJL92PoF189RLrn544pEfMs5bZvpwlqwN+Mw+VgQ39FuCIvjfwbF3Q +MZsyK10XZZOYYLxuj7GoPB7ZHPOpJkL5ZB3C55L29B5aqhlSXa/oovdgoPaN8In1buAKBQGVyYsg +Crpa/JosPL3Dt8ldeCUFP1YUmwza+zpI/pdpXsoQhvdOlgQITeywvl3cO45Pwf2aNjSaTFR+FwNI +lQgRHAdvhQh+XU3Endv7rs6y0bO4g2wdsrN58dhwmX7wEwLOXt1R0982gaEbeC9xs/FZTEYYKKuF +0mBWWg== +-----END CERTIFICATE----- + +Security Communication EV RootCA1 +================================= +-----BEGIN CERTIFICATE----- +MIIDfTCCAmWgAwIBAgIBADANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJKUDElMCMGA1UEChMc +U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEqMCgGA1UECxMhU2VjdXJpdHkgQ29tbXVuaWNh +dGlvbiBFViBSb290Q0ExMB4XDTA3MDYwNjAyMTIzMloXDTM3MDYwNjAyMTIzMlowYDELMAkGA1UE +BhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKjAoBgNVBAsTIVNl +Y3VyaXR5IENvbW11bmljYXRpb24gRVYgUm9vdENBMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBALx/7FebJOD+nLpCeamIivqA4PUHKUPqjgo0No0c+qe1OXj/l3X3L+SqawSERMqm4miO +/VVQYg+kcQ7OBzgtQoVQrTyWb4vVog7P3kmJPdZkLjjlHmy1V4qe70gOzXppFodEtZDkBp2uoQSX +WHnvIEqCa4wiv+wfD+mEce3xDuS4GBPMVjZd0ZoeUWs5bmB2iDQL87PRsJ3KYeJkHcFGB7hj3R4z +ZbOOCVVSPbW9/wfrrWFVGCypaZhKqkDFMxRldAD5kd6vA0jFQFTcD4SQaCDFkpbcLuUCRarAX1T4 +bepJz11sS6/vmsJWXMY1VkJqMF/Cq/biPT+zyRGPMUzXn0kCAwEAAaNCMEAwHQYDVR0OBBYEFDVK +9U2vP9eCOKyrcWUXdYydVZPmMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqG +SIb3DQEBBQUAA4IBAQCoh+ns+EBnXcPBZsdAS5f8hxOQWsTvoMpfi7ent/HWtWS3irO4G8za+6xm +iEHO6Pzk2x6Ipu0nUBsCMCRGef4Eh3CXQHPRwMFXGZpppSeZq51ihPZRwSzJIxXYKLerJRO1RuGG +Av8mjMSIkh1W/hln8lXkgKNrnKt34VFxDSDbEJrbvXZ5B3eZKK2aXtqxT0QsNY6llsf9g/BYxnnW +mHyojf6GPgcWkuF75x3sM3Z+Qi5KhfmRiWiEA4Glm5q+4zfFVKtWOxgtQaQM+ELbmaDgcm+7XeEW +T1MKZPlO9L9OVL14bIjqv5wTJMJwaaJ/D8g8rQjJsJhAoyrniIPtd490 +-----END CERTIFICATE----- + +OISTE WISeKey Global Root GA CA +=============================== +-----BEGIN CERTIFICATE----- +MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCBijELMAkGA1UE +BhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHlyaWdodCAoYykgMjAwNTEiMCAG +A1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBH +bG9iYWwgUm9vdCBHQSBDQTAeFw0wNTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYD +VQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIw +IAYDVQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5 +IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAy0+zAJs9 +Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxRVVuuk+g3/ytr6dTqvirdqFEr12bDYVxg +Asj1znJ7O7jyTmUIms2kahnBAbtzptf2w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbD +d50kc3vkDIzh2TbhmYsFmQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ +/yxViJGg4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t94B3R +LoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQwEAYJKwYBBAGCNxUBBAMCAQAwDQYJ +KoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOxSPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vIm +MMkQyh2I+3QZH4VFvbBsUfk2ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4 ++vg1YFkCExh8vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa +hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZiFj4A4xylNoEY +okxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ/L7fCg0= +-----END CERTIFICATE----- + +Microsec e-Szigno Root CA +========================= +-----BEGIN CERTIFICATE----- +MIIHqDCCBpCgAwIBAgIRAMy4579OKRr9otxmpRwsDxEwDQYJKoZIhvcNAQEFBQAwcjELMAkGA1UE +BhMCSFUxETAPBgNVBAcTCEJ1ZGFwZXN0MRYwFAYDVQQKEw1NaWNyb3NlYyBMdGQuMRQwEgYDVQQL +EwtlLVN6aWdubyBDQTEiMCAGA1UEAxMZTWljcm9zZWMgZS1Temlnbm8gUm9vdCBDQTAeFw0wNTA0 +MDYxMjI4NDRaFw0xNzA0MDYxMjI4NDRaMHIxCzAJBgNVBAYTAkhVMREwDwYDVQQHEwhCdWRhcGVz +dDEWMBQGA1UEChMNTWljcm9zZWMgTHRkLjEUMBIGA1UECxMLZS1Temlnbm8gQ0ExIjAgBgNVBAMT +GU1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQDtyADVgXvNOABHzNuEwSFpLHSQDCHZU4ftPkNEU6+r+ICbPHiN1I2uuO/TEdyB5s87lozWbxXG +d36hL+BfkrYn13aaHUM86tnsL+4582pnS4uCzyL4ZVX+LMsvfUh6PXX5qqAnu3jCBspRwn5mS6/N +oqdNAoI/gqyFxuEPkEeZlApxcpMqyabAvjxWTHOSJ/FrtfX9/DAFYJLG65Z+AZHCabEeHXtTRbjc +QR/Ji3HWVBTji1R4P770Yjtb9aPs1ZJ04nQw7wHb4dSrmZsqa/i9phyGI0Jf7Enemotb9HI6QMVJ +PqW+jqpx62z69Rrkav17fVVA71hu5tnVvCSrwe+3AgMBAAGjggQ3MIIEMzBnBggrBgEFBQcBAQRb +MFkwKAYIKwYBBQUHMAGGHGh0dHBzOi8vcmNhLmUtc3ppZ25vLmh1L29jc3AwLQYIKwYBBQUHMAKG +IWh0dHA6Ly93d3cuZS1zemlnbm8uaHUvUm9vdENBLmNydDAPBgNVHRMBAf8EBTADAQH/MIIBcwYD +VR0gBIIBajCCAWYwggFiBgwrBgEEAYGoGAIBAQEwggFQMCgGCCsGAQUFBwIBFhxodHRwOi8vd3d3 +LmUtc3ppZ25vLmh1L1NaU1ovMIIBIgYIKwYBBQUHAgIwggEUHoIBEABBACAAdABhAG4A+gBzAO0A +dAB2AOEAbgB5ACAA6QByAHQAZQBsAG0AZQB6AOkAcwDpAGgAZQB6ACAA6QBzACAAZQBsAGYAbwBn +AGEAZADhAHMA4QBoAG8AegAgAGEAIABTAHoAbwBsAGcA4QBsAHQAYQB0APMAIABTAHoAbwBsAGcA +4QBsAHQAYQB0AOEAcwBpACAAUwB6AGEAYgDhAGwAeQB6AGEAdABhACAAcwB6AGUAcgBpAG4AdAAg +AGsAZQBsAGwAIABlAGwAagDhAHIAbgBpADoAIABoAHQAdABwADoALwAvAHcAdwB3AC4AZQAtAHMA +egBpAGcAbgBvAC4AaAB1AC8AUwBaAFMAWgAvMIHIBgNVHR8EgcAwgb0wgbqggbeggbSGIWh0dHA6 +Ly93d3cuZS1zemlnbm8uaHUvUm9vdENBLmNybIaBjmxkYXA6Ly9sZGFwLmUtc3ppZ25vLmh1L0NO +PU1pY3Jvc2VjJTIwZS1Temlnbm8lMjBSb290JTIwQ0EsT1U9ZS1Temlnbm8lMjBDQSxPPU1pY3Jv +c2VjJTIwTHRkLixMPUJ1ZGFwZXN0LEM9SFU/Y2VydGlmaWNhdGVSZXZvY2F0aW9uTGlzdDtiaW5h +cnkwDgYDVR0PAQH/BAQDAgEGMIGWBgNVHREEgY4wgYuBEGluZm9AZS1zemlnbm8uaHWkdzB1MSMw +IQYDVQQDDBpNaWNyb3NlYyBlLVN6aWduw7MgUm9vdCBDQTEWMBQGA1UECwwNZS1TemlnbsOzIEhT +WjEWMBQGA1UEChMNTWljcm9zZWMgS2Z0LjERMA8GA1UEBxMIQnVkYXBlc3QxCzAJBgNVBAYTAkhV +MIGsBgNVHSMEgaQwgaGAFMegSXUWYYTbMUuE0vE3QJDvTtz3oXakdDByMQswCQYDVQQGEwJIVTER +MA8GA1UEBxMIQnVkYXBlc3QxFjAUBgNVBAoTDU1pY3Jvc2VjIEx0ZC4xFDASBgNVBAsTC2UtU3pp +Z25vIENBMSIwIAYDVQQDExlNaWNyb3NlYyBlLVN6aWdubyBSb290IENBghEAzLjnv04pGv2i3Gal +HCwPETAdBgNVHQ4EFgQUx6BJdRZhhNsxS4TS8TdAkO9O3PcwDQYJKoZIhvcNAQEFBQADggEBANMT +nGZjWS7KXHAM/IO8VbH0jgdsZifOwTsgqRy7RlRw7lrMoHfqaEQn6/Ip3Xep1fvj1KcExJW4C+FE +aGAHQzAxQmHl7tnlJNUb3+FKG6qfx1/4ehHqE5MAyopYse7tDk2016g2JnzgOsHVV4Lxdbb9iV/a +86g4nzUGCM4ilb7N1fy+W955a9x6qWVmvrElWl/tftOsRm1M9DKHtCAE4Gx4sHfRhUZLphK3dehK +yVZs15KrnfVJONJPU+NVkBHbmJbGSfI+9J8b4PeI3CVimUTYc78/MPMMNz7UwiiAc7EBt51alhQB +S6kRnSlqLtBdgcDPsiBDxwPgN05dCtxZICU= +-----END CERTIFICATE----- + +Certigna +======== +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNVBAYTAkZSMRIw +EAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4XDTA3MDYyOTE1MTMwNVoXDTI3 +MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwI +Q2VydGlnbmEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7q +XOEm7RFHYeGifBZ4QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyH +GxnygQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbwzBfsV1/p +ogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q130yGLMLLGq/jj8UEYkg +DncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKf +Irjxwo1p3Po6WAbfAgMBAAGjgbwwgbkwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQ +tCRZvgHyUtVF9lo53BEwZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJ +BgNVBAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzjAQ/J +SP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG9w0BAQUFAAOCAQEA +hQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8hbV6lUmPOEvjvKtpv6zf+EwLHyzs+ +ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFncfca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1klu +PBS1xp81HlDQwY9qcEQCYsuuHWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY +1gkIl2PlwS6wt0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +AC Ra\xC3\xADz Certic\xC3\xA1mara S.A. +====================================== +-----BEGIN CERTIFICATE----- +MIIGZjCCBE6gAwIBAgIPB35Sk3vgFeNX8GmMy+wMMA0GCSqGSIb3DQEBBQUAMHsxCzAJBgNVBAYT +AkNPMUcwRQYDVQQKDD5Tb2NpZWRhZCBDYW1lcmFsIGRlIENlcnRpZmljYWNpw7NuIERpZ2l0YWwg +LSBDZXJ0aWPDoW1hcmEgUy5BLjEjMCEGA1UEAwwaQUMgUmHDrXogQ2VydGljw6FtYXJhIFMuQS4w +HhcNMDYxMTI3MjA0NjI5WhcNMzAwNDAyMjE0MjAyWjB7MQswCQYDVQQGEwJDTzFHMEUGA1UECgw+ +U29jaWVkYWQgQ2FtZXJhbCBkZSBDZXJ0aWZpY2FjacOzbiBEaWdpdGFsIC0gQ2VydGljw6FtYXJh +IFMuQS4xIzAhBgNVBAMMGkFDIFJhw616IENlcnRpY8OhbWFyYSBTLkEuMIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAq2uJo1PMSCMI+8PPUZYILrgIem08kBeGqentLhM0R7LQcNzJPNCN +yu5LF6vQhbCnIwTLqKL85XXbQMpiiY9QngE9JlsYhBzLfDe3fezTf3MZsGqy2IiKLUV0qPezuMDU +2s0iiXRNWhU5cxh0T7XrmafBHoi0wpOQY5fzp6cSsgkiBzPZkc0OnB8OIMfuuzONj8LSWKdf/WU3 +4ojC2I+GdV75LaeHM/J4Ny+LvB2GNzmxlPLYvEqcgxhaBvzz1NS6jBUJJfD5to0EfhcSM2tXSExP +2yYe68yQ54v5aHxwD6Mq0Do43zeX4lvegGHTgNiRg0JaTASJaBE8rF9ogEHMYELODVoqDA+bMMCm +8Ibbq0nXl21Ii/kDwFJnmxL3wvIumGVC2daa49AZMQyth9VXAnow6IYm+48jilSH5L887uvDdUhf +HjlvgWJsxS3EF1QZtzeNnDeRyPYL1epjb4OsOMLzP96a++EjYfDIJss2yKHzMI+ko6Kh3VOz3vCa +Mh+DkXkwwakfU5tTohVTP92dsxA7SH2JD/ztA/X7JWR1DhcZDY8AFmd5ekD8LVkH2ZD6mq093ICK +5lw1omdMEWux+IBkAC1vImHFrEsm5VoQgpukg3s0956JkSCXjrdCx2bD0Omk1vUgjcTDlaxECp1b +czwmPS9KvqfJpxAe+59QafMCAwEAAaOB5jCB4zAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjAdBgNVHQ4EFgQU0QnQ6dfOeXRU+Tows/RtLAMDG2gwgaAGA1UdIASBmDCBlTCBkgYEVR0g +ADCBiTArBggrBgEFBQcCARYfaHR0cDovL3d3dy5jZXJ0aWNhbWFyYS5jb20vZHBjLzBaBggrBgEF +BQcCAjBOGkxMaW1pdGFjaW9uZXMgZGUgZ2FyYW507WFzIGRlIGVzdGUgY2VydGlmaWNhZG8gc2Ug +cHVlZGVuIGVuY29udHJhciBlbiBsYSBEUEMuMA0GCSqGSIb3DQEBBQUAA4ICAQBclLW4RZFNjmEf +AygPU3zmpFmps4p6xbD/CHwso3EcIRNnoZUSQDWDg4902zNc8El2CoFS3UnUmjIz75uny3XlesuX +EpBcunvFm9+7OSPI/5jOCk0iAUgHforA1SBClETvv3eiiWdIG0ADBaGJ7M9i4z0ldma/Jre7Ir5v +/zlXdLp6yQGVwZVR6Kss+LGGIOk/yzVb0hfpKv6DExdA7ohiZVvVO2Dpezy4ydV/NgIlqmjCMRW3 +MGXrfx1IebHPOeJCgBbT9ZMj/EyXyVo3bHwi2ErN0o42gzmRkBDI8ck1fj+404HGIGQatlDCIaR4 +3NAvO2STdPCWkPHv+wlaNECW8DYSwaN0jJN+Qd53i+yG2dIPPy3RzECiiWZIHiCznCNZc6lEc7wk +eZBWN7PGKX6jD/EpOe9+XCgycDWs2rjIdWb8m0w5R44bb5tNAlQiM+9hup4phO9OSzNHdpdqy35f +/RWmnkJDW2ZaiogN9xa5P1FlK2Zqi9E4UqLWRhH6/JocdJ6PlwsCT2TG9WjTSy3/pDceiz+/RL5h +RqGEPQgnTIEgd4kI6mdAXmwIUV80WoyWaM3X94nCHNMyAK9Sy9NgWyo6R35rMDOhYil/SrnhLecU +Iw4OGEfhefwVVdCx/CVxY3UzHCMrr1zZ7Ud3YA47Dx7SwNxkBYn8eNZcLCZDqQ== +-----END CERTIFICATE----- + +TC TrustCenter Class 2 CA II +============================ +-----BEGIN CERTIFICATE----- +MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjELMAkGA1UEBhMC +REUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNVBAsTGVRDIFRydXN0Q2VudGVy +IENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYw +MTEyMTQzODQzWhcNMjUxMjMxMjI1OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1 +c3RDZW50ZXIgR21iSDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UE +AxMcVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jftMjWQ+nEdVl//OEd+DFw +IxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKguNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2 +xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2JXjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQ +Xa7pIXSSTYtZgo+U4+lK8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7u +SNQZu+995OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3kUrL84J6E1wIqzCB +7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRydXN0Y2VudGVyLmRlL2NybC92Mi90 +Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBU +cnVzdENlbnRlciUyMENsYXNzJTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21i +SCxPVT1yb290Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u +TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iSGNn3Bzn1LL4G +dXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprtZjluS5TmVfwLG4t3wVMTZonZ +KNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8au0WOB9/WIFaGusyiC2y8zl3gK9etmF1Kdsj +TYjKUCjLhdLTEKJZbtOTVAB6okaVhgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kP +JOzHdiEoZa5X6AeIdUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfk +vQ== +-----END CERTIFICATE----- + +TC TrustCenter Class 3 CA II +============================ +-----BEGIN CERTIFICATE----- +MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjELMAkGA1UEBhMC +REUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNVBAsTGVRDIFRydXN0Q2VudGVy +IENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYw +MTEyMTQ0MTU3WhcNMjUxMjMxMjI1OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1 +c3RDZW50ZXIgR21iSDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UE +AxMcVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJWHt4bNwcwIi9v8Qbxq63W +yKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+QVl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo +6SI7dYnWRBpl8huXJh0obazovVkdKyT21oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZ +uV3bOx4a+9P/FRQI2AlqukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk +2ZyqBwi1Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NXXAek0CSnwPIA1DCB +7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRydXN0Y2VudGVyLmRlL2NybC92Mi90 +Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBU +cnVzdENlbnRlciUyMENsYXNzJTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21i +SCxPVT1yb290Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u +TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlNirTzwppVMXzE +O2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8TtXqluJucsG7Kv5sbviRmEb8 +yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9 +IJqDnxrcOfHFcqMRA/07QlIp2+gB95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal +092Y+tTmBvTwtiBjS+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc +5A== +-----END CERTIFICATE----- + +TC TrustCenter Universal CA I +============================= +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTELMAkGA1UEBhMC +REUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNVBAsTG1RDIFRydXN0Q2VudGVy +IFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcN +MDYwMzIyMTU1NDI4WhcNMjUxMjMxMjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMg +VHJ1c3RDZW50ZXIgR21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYw +JAYDVQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSRJJZ4Hgmgm5qVSkr1YnwC +qMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3TfCZdzHd55yx4Oagmcw6iXSVphU9VDprv +xrlE4Vc93x9UIuVvZaozhDrzznq+VZeujRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtw +ag+1m7Z3W0hZneTvWq3zwZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9O +gdwZu5GQfezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYDVR0j +BBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0GCSqGSIb3DQEBBQUAA4IBAQAo0uCG +1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X17caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/Cy +vwbZ71q+s2IhtNerNXxTPqYn8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3 +ghUJGooWMNjsydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT +ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/2TYcuiUaUj0a +7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY +-----END CERTIFICATE----- + +Deutsche Telekom Root CA 2 +========================== +-----BEGIN CERTIFICATE----- +MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEcMBoGA1UEChMT +RGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2VjIFRydXN0IENlbnRlcjEjMCEG +A1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENBIDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5 +MjM1OTAwWjBxMQswCQYDVQQGEwJERTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0G +A1UECxMWVC1UZWxlU2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBS +b290IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEUha88EOQ5 +bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhCQN/Po7qCWWqSG6wcmtoI +KyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1MjwrrFDa1sPeg5TKqAyZMg4ISFZbavva4VhY +AUlfckE8FQYBjl2tqriTtM2e66foai1SNNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aK +Se5TBY8ZTNXeWHmb0mocQqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTV +jlsB9WoHtxa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAPBgNV +HRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAlGRZrTlk5ynr +E/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756AbrsptJh6sTtU6zkXR34ajgv8HzFZMQSy +zhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpaIzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8 +rZ7/gFnkm0W09juwzTkZmDLl6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4G +dyd1Lx+4ivn+xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU +Cm26OWMohpLzGITY+9HPBVZkVw== +-----END CERTIFICATE----- + +ComSign Secured CA +================== +-----BEGIN CERTIFICATE----- +MIIDqzCCApOgAwIBAgIRAMcoRwmzuGxFjB36JPU2TukwDQYJKoZIhvcNAQEFBQAwPDEbMBkGA1UE +AxMSQ29tU2lnbiBTZWN1cmVkIENBMRAwDgYDVQQKEwdDb21TaWduMQswCQYDVQQGEwJJTDAeFw0w +NDAzMjQxMTM3MjBaFw0yOTAzMTYxNTA0NTZaMDwxGzAZBgNVBAMTEkNvbVNpZ24gU2VjdXJlZCBD +QTEQMA4GA1UEChMHQ29tU2lnbjELMAkGA1UEBhMCSUwwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw +ggEKAoIBAQDGtWhfHZQVw6QIVS3joFd67+l0Kru5fFdJGhFeTymHDEjWaueP1H5XJLkGieQcPOqs +49ohgHMhCu95mGwfCP+hUH3ymBvJVG8+pSjsIQQPRbsHPaHA+iqYHU4Gk/v1iDurX8sWv+bznkqH +7Rnqwp9D5PGBpX8QTz7RSmKtUxvLg/8HZaWSLWapW7ha9B20IZFKF3ueMv5WJDmyVIRD9YTC2LxB +kMyd1mja6YJQqTtoz7VdApRgFrFD2UNd3V2Hbuq7s8lr9gOUCXDeFhF6K+h2j0kQmHe5Y1yLM5d1 +9guMsqtb3nQgJT/j8xH5h2iGNXHDHYwt6+UarA9z1YJZQIDTAgMBAAGjgacwgaQwDAYDVR0TBAUw +AwEB/zBEBgNVHR8EPTA7MDmgN6A1hjNodHRwOi8vZmVkaXIuY29tc2lnbi5jby5pbC9jcmwvQ29t +U2lnblNlY3VyZWRDQS5jcmwwDgYDVR0PAQH/BAQDAgGGMB8GA1UdIwQYMBaAFMFL7XC29z58ADsA +j8c+DkWfHl3sMB0GA1UdDgQWBBTBS+1wtvc+fAA7AI/HPg5Fnx5d7DANBgkqhkiG9w0BAQUFAAOC +AQEAFs/ukhNQq3sUnjO2QiBq1BW9Cav8cujvR3qQrFHBZE7piL1DRYHjZiM/EoZNGeQFsOY3wo3a +BijJD4mkU6l1P7CW+6tMM1X5eCZGbxs2mPtCdsGCuY7e+0X5YxtiOzkGynd6qDwJz2w2PQ8KRUtp +FhpFfTMDZflScZAmlaxMDPWLkz/MdXSFmLr/YnpNH4n+rr2UAJm/EaXc4HnFFgt9AmEd6oX5AhVP +51qJThRv4zdLhfXBPGHg/QVBspJ/wx2g0K5SZGBrGMYmnNj1ZOQ2GmKfig8+/21OGVZOIJFsnzQz +OjRXUDpvgV4GxvU+fE6OK85lBi5d0ipTdF7Tbieejw== +-----END CERTIFICATE----- + +Cybertrust Global Root +====================== +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYGA1UEChMPQ3li +ZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBSb290MB4XDTA2MTIxNTA4 +MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQD +ExZDeWJlcnRydXN0IEdsb2JhbCBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA ++Mi8vRRQZhP/8NN57CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW +0ozSJ8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2yHLtgwEZL +AfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iPt3sMpTjr3kfb1V05/Iin +89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNzFtApD0mpSPCzqrdsxacwOUBdrsTiXSZT +8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAYXSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2 +MDSgMqAwhi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3JsMB8G +A1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUAA4IBAQBW7wojoFRO +lZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMjWqd8BfP9IjsO0QbE2zZMcwSO5bAi +5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUxXOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2 +hO0j9n0Hq0V+09+zv+mKts2oomcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+T +X3EJIrduPuocA06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +WL1WMRJOEcgh4LMRkWXbtKaIOM5V +-----END CERTIFICATE----- + +ePKI Root Certification Authority +================================= +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBeMQswCQYDVQQG +EwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0ZC4xKjAoBgNVBAsMIWVQS0kg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMx +MjdaMF4xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEq +MCgGA1UECwwhZVBLSSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAHSyZbCUNs +IZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAhijHyl3SJCRImHJ7K2RKi +lTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3XDZoTM1PRYfl61dd4s5oz9wCGzh1NlDiv +qOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX +12ruOzjjK9SXDrkb5wdJfzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0O +WQqraffAsgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uUWH1+ +ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLSnT0IFaUQAS2zMnao +lQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pHdmX2Os+PYhcZewoozRrSgx4hxyy/ +vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJipNiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXi +Zo1jDiVN1Rmy5nk3pyKdVDECAwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/Qkqi +MAwGA1UdEwQFMAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGBuvl2ICO1J2B0 +1GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6YlPwZpVnPDimZI+ymBV3QGypzq +KOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkPJXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdV +xrsStZf0X4OFunHB2WyBEXYKCrC/gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEP +NXubrjlpC2JgQCA2j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+r +GNm65ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUBo2M3IUxE +xJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS/jQ6fbjpKdx2qcgw+BRx +gMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2zGp1iro2C6pSe3VkQw63d4k3jMdXH7Ojy +sP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTEW9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmOD +BCEIZ43ygknQW/2xzQ+DhNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +T\xc3\x9c\x42\xC4\xB0TAK UEKAE K\xC3\xB6k Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1 - S\xC3\xBCr\xC3\xBCm 3 +============================================================================================================================= +-----BEGIN CERTIFICATE----- +MIIFFzCCA/+gAwIBAgIBETANBgkqhkiG9w0BAQUFADCCASsxCzAJBgNVBAYTAlRSMRgwFgYDVQQH +DA9HZWJ6ZSAtIEtvY2FlbGkxRzBFBgNVBAoMPlTDvHJraXllIEJpbGltc2VsIHZlIFRla25vbG9q +aWsgQXJhxZ90xLFybWEgS3VydW11IC0gVMOcQsSwVEFLMUgwRgYDVQQLDD9VbHVzYWwgRWxla3Ry +b25payB2ZSBLcmlwdG9sb2ppIEFyYcWfdMSxcm1hIEVuc3RpdMO8c8O8IC0gVUVLQUUxIzAhBgNV +BAsMGkthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppMUowSAYDVQQDDEFUw5xCxLBUQUsgVUVLQUUg +S8O2ayBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSAtIFPDvHLDvG0gMzAeFw0wNzA4 +MjQxMTM3MDdaFw0xNzA4MjExMTM3MDdaMIIBKzELMAkGA1UEBhMCVFIxGDAWBgNVBAcMD0dlYnpl +IC0gS29jYWVsaTFHMEUGA1UECgw+VMO8cmtpeWUgQmlsaW1zZWwgdmUgVGVrbm9sb2ppayBBcmHF +n3TEsXJtYSBLdXJ1bXUgLSBUw5xCxLBUQUsxSDBGBgNVBAsMP1VsdXNhbCBFbGVrdHJvbmlrIHZl +IEtyaXB0b2xvamkgQXJhxZ90xLFybWEgRW5zdGl0w7xzw7wgLSBVRUtBRTEjMCEGA1UECwwaS2Ft +dSBTZXJ0aWZpa2FzeW9uIE1lcmtlemkxSjBIBgNVBAMMQVTDnELEsFRBSyBVRUtBRSBLw7ZrIFNl +cnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIC0gU8O8csO8bSAzMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEAim1L/xCIOsP2fpTo6iBkcK4hgb46ezzb8R1Sf1n68yJMlaCQvEhO +Eav7t7WNeoMojCZG2E6VQIdhn8WebYGHV2yKO7Rm6sxA/OOqbLLLAdsyv9Lrhc+hDVXDWzhXcLh1 +xnnRFDDtG1hba+818qEhTsXOfJlfbLm4IpNQp81McGq+agV/E5wrHur+R84EpW+sky58K5+eeROR +6Oqeyjh1jmKwlZMq5d/pXpduIF9fhHpEORlAHLpVK/swsoHvhOPc7Jg4OQOFCKlUAwUp8MmPi+oL +hmUZEdPpCSPeaJMDyTYcIW7OjGbxmTDY17PDHfiBLqi9ggtm/oLL4eAagsNAgQIDAQABo0IwQDAd +BgNVHQ4EFgQUvYiHyY/2pAoLquvF/pEjnatKijIwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAB18+kmPNOm3JpIWmgV050vQbTlswyb2zrgxvMTfvCr4 +N5EY3ATIZJkrGG2AA1nJrvhY0D7twyOfaTyGOBye79oneNGEN3GKPEs5z35FBtYt2IpNeBLWrcLT +y9LQQfMmNkqblWwM7uXRQydmwYj3erMgbOqwaSvHIOgMA8RBBZniP+Rr+KCGgceExh/VS4ESshYh +LBOhgLJeDEoTniDYYkCrkOpkSi+sDQESeUWoL4cZaMjihccwsnX5OD+ywJO0a+IDRM5noN+J1q2M +dqMTw5RhK2vZbMEHCiIHhWyFJEapvj+LeISCfiQMnf2BN+MlqO02TpUsyZyQ2uypQjyttgI= +-----END CERTIFICATE----- + +Buypass Class 2 CA 1 +==================== +-----BEGIN CERTIFICATE----- +MIIDUzCCAjugAwIBAgIBATANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEdMBsGA1UECgwU +QnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3MgQ2xhc3MgMiBDQSAxMB4XDTA2 +MTAxMzEwMjUwOVoXDTE2MTAxMzEwMjUwOVowSzELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBh +c3MgQVMtOTgzMTYzMzI3MR0wGwYDVQQDDBRCdXlwYXNzIENsYXNzIDIgQ0EgMTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAIs8B0XY9t/mx8q6jUPFR42wWsE425KEHK8T1A9vNkYgxC7M +cXA0ojTTNy7Y3Tp3L8DrKehc0rWpkTSHIln+zNvnma+WwajHQN2lFYxuyHyXA8vmIPLXl18xoS83 +0r7uvqmtqEyeIWZDO6i88wmjONVZJMHCR3axiFyCO7srpgTXjAePzdVBHfCuuCkslFJgNJQ72uA4 +0Z0zPhX0kzLFANq1KWYOOngPIVJfAuWSeyXTkh4vFZ2B5J2O6O+JzhRMVB0cgRJNcKi+EAUXfh/R +uFdV7c27UsKwHnjCTTZoy1YmwVLBvXb3WNVyfh9EdrsAiR0WnVE1703CVu9r4Iw7DekCAwEAAaNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUP42aWYv8e3uco684sDntkHGA1sgwDgYDVR0P +AQH/BAQDAgEGMA0GCSqGSIb3DQEBBQUAA4IBAQAVGn4TirnoB6NLJzKyQJHyIdFkhb5jatLPgcIV +1Xp+DCmsNx4cfHZSldq1fyOhKXdlyTKdqC5Wq2B2zha0jX94wNWZUYN/Xtm+DKhQ7SLHrQVMdvvt +7h5HZPb3J31cKA9FxVxiXqaakZG3Uxcu3K1gnZZkOb1naLKuBctN518fV4bVIJwo+28TOPX2EZL2 +fZleHwzoq0QkKXJAPTZSr4xYkHPB7GEseaHsh7U/2k3ZIQAw3pDaDtMaSKk+hQsUi4y8QZ5q9w5w +wDX3OaJdZtB7WZ+oRxKaJyOkLY4ng5IgodcVf/EuGO70SH8vf/GhGLWhC5SgYiAynB321O+/TIho +-----END CERTIFICATE----- + +Buypass Class 3 CA 1 +==================== +-----BEGIN CERTIFICATE----- +MIIDUzCCAjugAwIBAgIBAjANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEdMBsGA1UECgwU +QnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3MgQ2xhc3MgMyBDQSAxMB4XDTA1 +MDUwOTE0MTMwM1oXDTE1MDUwOTE0MTMwM1owSzELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBh +c3MgQVMtOTgzMTYzMzI3MR0wGwYDVQQDDBRCdXlwYXNzIENsYXNzIDMgQ0EgMTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAKSO13TZKWTeXx+HgJHqTjnmGcZEC4DVC69TB4sSveZn8AKx +ifZgisRbsELRwCGoy+Gb72RRtqfPFfV0gGgEkKBYouZ0plNTVUhjP5JW3SROjvi6K//zNIqeKNc0 +n6wv1g/xpC+9UrJJhW05NfBEMJNGJPO251P7vGGvqaMU+8IXF4Rs4HyI+MkcVyzwPX6UvCWThOia +AJpFBUJXgPROztmuOfbIUxAMZTpHe2DC1vqRycZxbL2RhzyRhkmr8w+gbCZ2Xhysm3HljbybIR6c +1jh+JIAVMYKWsUnTYjdbiAwKYjT+p0h+mbEwi5A3lRyoH6UsjfRVyNvdWQrCrXig9IsCAwEAAaNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUOBTmyPCppAP0Tj4io1vy1uCtQHQwDgYDVR0P +AQH/BAQDAgEGMA0GCSqGSIb3DQEBBQUAA4IBAQABZ6OMySU9E2NdFm/soT4JXJEVKirZgCFPBdy7 +pYmrEzMqnji3jG8CcmPHc3ceCQa6Oyh7pEfJYWsICCD8igWKH7y6xsL+z27sEzNxZy5p+qksP2bA +EllNC1QCkoS72xLvg3BweMhT+t/Gxv/ciC8HwEmdMldg0/L2mSlf56oBzKwzqBwKu5HEA6BvtjT5 +htOzdlSY9EqBs1OdTUDs5XcTRa9bqh/YL0yCe/4qxFi7T/ye/QNlGioOw6UgFpRreaaiErS7GqQj +el/wroQk5PMr+4okoyeYZdowdXb8GZHo2+ubPzK/QJcHJrrM85SFSnonk8+QQtS4Wxam58tAA915 +-----END CERTIFICATE----- + +EBG Elektronik Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1 +========================================================================== +-----BEGIN CERTIFICATE----- +MIIF5zCCA8+gAwIBAgIITK9zQhyOdAIwDQYJKoZIhvcNAQEFBQAwgYAxODA2BgNVBAMML0VCRyBF +bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMTcwNQYDVQQKDC5FQkcg +QmlsacWfaW0gVGVrbm9sb2ppbGVyaSB2ZSBIaXptZXRsZXJpIEEuxZ4uMQswCQYDVQQGEwJUUjAe +Fw0wNjA4MTcwMDIxMDlaFw0xNjA4MTQwMDMxMDlaMIGAMTgwNgYDVQQDDC9FQkcgRWxla3Ryb25p +ayBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTE3MDUGA1UECgwuRUJHIEJpbGnFn2lt +IFRla25vbG9qaWxlcmkgdmUgSGl6bWV0bGVyaSBBLsWeLjELMAkGA1UEBhMCVFIwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDuoIRh0DpqZhAy2DE4f6en5f2h4fuXd7hxlugTlkaDT7by +X3JWbhNgpQGR4lvFzVcfd2NR/y8927k/qqk153nQ9dAktiHq6yOU/im/+4mRDGSaBUorzAzu8T2b +gmmkTPiab+ci2hC6X5L8GCcKqKpE+i4stPtGmggDg3KriORqcsnlZR9uKg+ds+g75AxuetpX/dfr +eYteIAbTdgtsApWjluTLdlHRKJ2hGvxEok3MenaoDT2/F08iiFD9rrbskFBKW5+VQarKD7JK/oCZ +TqNGFav4c0JqwmZ2sQomFd2TkuzbqV9UIlKRcF0T6kjsbgNs2d1s/OsNA/+mgxKb8amTD8UmTDGy +Y5lhcucqZJnSuOl14nypqZoaqsNW2xCaPINStnuWt6yHd6i58mcLlEOzrz5z+kI2sSXFCjEmN1Zn +uqMLfdb3ic1nobc6HmZP9qBVFCVMLDMNpkGMvQQxahByCp0OLna9XvNRiYuoP1Vzv9s6xiQFlpJI +qkuNKgPlV5EQ9GooFW5Hd4RcUXSfGenmHmMWOeMRFeNYGkS9y8RsZteEBt8w9DeiQyJ50hBs37vm +ExH8nYQKE3vwO9D8owrXieqWfo1IhR5kX9tUoqzVegJ5a9KK8GfaZXINFHDk6Y54jzJ0fFfy1tb0 +Nokb+Clsi7n2l9GkLqq+CxnCRelwXQIDAJ3Zo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB +/wQEAwIBBjAdBgNVHQ4EFgQU587GT/wWZ5b6SqMHwQSny2re2kcwHwYDVR0jBBgwFoAU587GT/wW +Z5b6SqMHwQSny2re2kcwDQYJKoZIhvcNAQEFBQADggIBAJuYml2+8ygjdsZs93/mQJ7ANtyVDR2t +FcU22NU57/IeIl6zgrRdu0waypIN30ckHrMk2pGI6YNw3ZPX6bqz3xZaPt7gyPvT/Wwp+BVGoGgm +zJNSroIBk5DKd8pNSe/iWtkqvTDOTLKBtjDOWU/aWR1qeqRFsIImgYZ29fUQALjuswnoT4cCB64k +XPBfrAowzIpAoHMEwfuJJPaaHFy3PApnNgUIMbOv2AFoKuB4j3TeuFGkjGwgPaL7s9QJ/XvCgKqT +bCmYIai7FvOpEl90tYeY8pUm3zTvilORiF0alKM/fCL414i6poyWqD1SNGKfAB5UVUJnxk1Gj7sU +RT0KlhaOEKGXmdXTMIXM3rRyt7yKPBgpaP3ccQfuJDlq+u2lrDgv+R4QDgZxGhBM/nV+/x5XOULK +1+EVoVZVWRvRo68R2E7DpSvvkL/A7IITW43WciyTTo9qKd+FPNMN4KIYEsxVL0e3p5sC/kH2iExt +2qkBR4NkJ2IQgtYSe14DHzSpyZH+r11thie3I6p1GMog57AP14kOpmciY/SDQSsGS7tY1dHXt7kQ +Y9iJSrSq3RZj9W6+YKH47ejWkE8axsWgKdOnIaj1Wjz3x0miIZpKlVIglnKaZsv30oZDfCK+lvm9 +AahH3eU7QPl1K5srRmSGjR70j/sHd9DqSaIcjVIUpgqT +-----END CERTIFICATE----- + +certSIGN ROOT CA +================ +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYTAlJPMREwDwYD +VQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTAeFw0wNjA3MDQxNzIwMDRa +Fw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UE +CxMQY2VydFNJR04gUk9PVCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7I +JUqOtdu0KBuqV5Do0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHH +rfAQUySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5dRdY4zTW2 +ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQOA7+j0xbm0bqQfWwCHTD +0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwvJoIQ4uNllAoEwF73XVv4EOLQunpL+943 +AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8B +Af8EBAMCAcYwHQYDVR0OBBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IB +AQA+0hyJLjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecYMnQ8 +SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ44gx+FkagQnIl6Z0 +x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6IJd1hJyMctTEHBDa0GpC9oHRxUIlt +vBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNwi/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7Nz +TogVZ96edhBiIL5VaZVDADlN9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +CNNIC ROOT +========== +-----BEGIN CERTIFICATE----- +MIIDVTCCAj2gAwIBAgIESTMAATANBgkqhkiG9w0BAQUFADAyMQswCQYDVQQGEwJDTjEOMAwGA1UE +ChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwHhcNMDcwNDE2MDcwOTE0WhcNMjcwNDE2MDcw +OTE0WjAyMQswCQYDVQQGEwJDTjEOMAwGA1UEChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1Qw +ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDTNfc/c3et6FtzF8LRb+1VvG7q6KR5smzD +o+/hn7E7SIX1mlwhIhAsxYLO2uOabjfhhyzcuQxauohV3/2q2x8x6gHx3zkBwRP9SFIhxFXf2tiz +VHa6dLG3fdfA6PZZxU3Iva0fFNrfWEQlMhkqx35+jq44sDB7R3IJMfAw28Mbdim7aXZOV/kbZKKT +VrdvmW7bCgScEeOAH8tjlBAKqeFkgjH5jCftppkA9nCTGPihNIaj3XrCGHn2emU1z5DrvTOTn1Or +czvmmzQgLx3vqR1jGqCA2wMv+SYahtKNu6m+UjqHZ0gNv7Sg2Ca+I19zN38m5pIEo3/PIKe38zrK +y5nLAgMBAAGjczBxMBEGCWCGSAGG+EIBAQQEAwIABzAfBgNVHSMEGDAWgBRl8jGtKvf33VKWCscC +wQ7vptU7ETAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIB/jAdBgNVHQ4EFgQUZfIxrSr3991S +lgrHAsEO76bVOxEwDQYJKoZIhvcNAQEFBQADggEBAEs17szkrr/Dbq2flTtLP1se31cpolnKOOK5 +Gv+e5m4y3R6u6jW39ZORTtpC4cMXYFDy0VwmuYK36m3knITnA3kXr5g9lNvHugDnuL8BV8F3RTIM +O/G0HAiw/VGgod2aHRM2mm23xzy54cXZF/qD1T0VoDy7HgviyJA/qIYM/PmLXoXLT1tLYhFHxUV8 +BS9BsZ4QaRuZluBVeftOhpm4lNqGOGqTo+fLbuXf6iFViZx9fX+Y9QCJ7uOEwFyWtcVG6kbghVW2 +G8kS1sHNzYDzAgE8yGnLRUhj2JTQ7IUOO04RZfSCjKY9ri4ilAnIXOo8gV0WKgOXFlUJ24pBgp5m +mxE= +-----END CERTIFICATE----- + +ApplicationCA - Japanese Government +=================================== +-----BEGIN CERTIFICATE----- +MIIDoDCCAoigAwIBAgIBMTANBgkqhkiG9w0BAQUFADBDMQswCQYDVQQGEwJKUDEcMBoGA1UEChMT +SmFwYW5lc2UgR292ZXJubWVudDEWMBQGA1UECxMNQXBwbGljYXRpb25DQTAeFw0wNzEyMTIxNTAw +MDBaFw0xNzEyMTIxNTAwMDBaMEMxCzAJBgNVBAYTAkpQMRwwGgYDVQQKExNKYXBhbmVzZSBHb3Zl +cm5tZW50MRYwFAYDVQQLEw1BcHBsaWNhdGlvbkNBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAp23gdE6Hj6UG3mii24aZS2QNcfAKBZuOquHMLtJqO8F6tJdhjYq+xpqcBrSGUeQ3DnR4 +fl+Kf5Sk10cI/VBaVuRorChzoHvpfxiSQE8tnfWuREhzNgaeZCw7NCPbXCbkcXmP1G55IrmTwcrN +wVbtiGrXoDkhBFcsovW8R0FPXjQilbUfKW1eSvNNcr5BViCH/OlQR9cwFO5cjFW6WY2H/CPek9AE +jP3vbb3QesmlOmpyM8ZKDQUXKi17safY1vC+9D/qDihtQWEjdnjDuGWk81quzMKq2edY3rZ+nYVu +nyoKb58DKTCXKB28t89UKU5RMfkntigm/qJj5kEW8DOYRwIDAQABo4GeMIGbMB0GA1UdDgQWBBRU +WssmP3HMlEYNllPqa0jQk/5CdTAOBgNVHQ8BAf8EBAMCAQYwWQYDVR0RBFIwUKROMEwxCzAJBgNV +BAYTAkpQMRgwFgYDVQQKDA/ml6XmnKzlm73mlL/lupwxIzAhBgNVBAsMGuOCouODl+ODquOCseOD +vOOCt+ODp+ODs0NBMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBADlqRHZ3ODrs +o2dGD/mLBqj7apAxzn7s2tGJfHrrLgy9mTLnsCTWw//1sogJhyzjVOGjprIIC8CFqMjSnHH2HZ9g +/DgzE+Ge3Atf2hZQKXsvcJEPmbo0NI2VdMV+eKlmXb3KIXdCEKxmJj3ekav9FfBv7WxfEPjzFvYD +io+nEhEMy/0/ecGc/WLuo89UDNErXxc+4z6/wCs+CZv+iKZ+tJIX/COUgb1up8WMwusRRdv4QcmW +dupwX3kSa+SjB1oF7ydJzyGfikwJcGapJsErEU4z0g781mzSDjJkaP+tBXhfAx2o45CsJOAPQKdL +rosot4LKGAfmt1t06SAZf7IbiVQ= +-----END CERTIFICATE----- + +GeoTrust Primary Certification Authority - G3 +============================================= +-----BEGIN CERTIFICATE----- +MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UE +BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChjKSAyMDA4IEdlb1RydXN0 +IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFy +eSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIz +NTk1OVowgZgxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAo +YykgMjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNVBAMT +LUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMzCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5j +K/BGvESyiaHAKAxJcCGVn2TAppMSAmUmhsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdE +c5IiaacDiGydY8hS2pgn5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3C +IShwiP/WJmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exALDmKu +dlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZChuOl1UcCAwEAAaNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMR5yo6hTgMdHNxr +2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IBAQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9 +cr5HqQ6XErhK8WTTOd8lNNTBzU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbE +Ap7aDHdlDkQNkv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD +AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUHSJsMC8tJP33s +t/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2Gspki4cErx5z481+oghLrGREt +-----END CERTIFICATE----- + +thawte Primary Root CA - G2 +=========================== +-----BEGIN CERTIFICATE----- +MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDELMAkGA1UEBhMC +VVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMpIDIwMDcgdGhhd3RlLCBJbmMu +IC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3Qg +Q0EgLSBHMjAeFw0wNzExMDUwMDAwMDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEV +MBMGA1UEChMMdGhhd3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBG +b3IgYXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAt +IEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/BebfowJPDQfGAFG6DAJS +LSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6papu+7qzcMBniKI11KOasf2twu8x+qi5 +8/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQU +mtgAMADna3+FGO6Lts6KDPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUN +G4k8VIZ3KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41oxXZ3K +rr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== +-----END CERTIFICATE----- + +thawte Primary Root CA - G3 +=========================== +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCBrjELMAkGA1UE +BhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2 +aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIwMDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhv +cml6ZWQgdXNlIG9ubHkxJDAiBgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0w +ODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh +d3RlLCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9uMTgwNgYD +VQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTEkMCIG +A1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEczMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAsr8nLPvb2FvdeHsbnndmgcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2At +P0LMqmsywCPLLEHd5N/8YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC ++BsUa0Lfb1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS99irY +7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2SzhkGcuYMXDhpxwTW +vGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUkOQIDAQABo0IwQDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJ +KoZIhvcNAQELBQADggEBABpA2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweK +A3rD6z8KLFIWoCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu +t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7cKUGRIjxpp7sC +8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fMm7v/OeZWYdMKp8RcTGB7BXcm +er/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZuMdRAGmI0Nj81Aa6sY6A= +-----END CERTIFICATE----- + +GeoTrust Primary Certification Authority - G2 +============================================= +-----BEGIN CERTIFICATE----- +MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChjKSAyMDA3IEdlb1RydXN0IElu +Yy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1 +OVowgZgxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg +MjAwNyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNVBAMTLUdl +b1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMjB2MBAGByqGSM49AgEG +BSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcLSo17VDs6bl8VAsBQps8lL33KSLjHUGMc +KiEIfJo22Av+0SbFWDEwKCXzXV2juLaltJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+ +EVXVMAoGCCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGTqQ7m +ndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBuczrD6ogRLQy7rQkgu2 +npaqBA+K +-----END CERTIFICATE----- + +VeriSign Universal Root Certification Authority +=============================================== +-----BEGIN CERTIFICATE----- +MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCBvTELMAkGA1UE +BhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBO +ZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVk +IHVzZSBvbmx5MTgwNgYDVQQDEy9WZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9u +IEF1dGhvcml0eTAeFw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJV +UzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdv +cmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNhbCBSb290IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj +1mCOkdeQmIN65lgZOIzF9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGP +MiJhgsWHH26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+HLL72 +9fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN/BMReYTtXlT2NJ8I +AfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPTrJ9VAMf2CGqUuV/c4DPxhGD5WycR +tPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0G +CCsGAQUFBwEMBGEwX6FdoFswWTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2O +a8PPgGrUSBgsexkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud +DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4sAPmLGd75JR3 +Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+seQxIcaBlVZaDrHC1LGmWazx +Y8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTx +P/jgdFcrGJ2BtMQo2pSXpXDrrB2+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+P +wGZsY6rp2aQW9IHRlRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4 +mJO37M2CYfE45k+XmCpajQ== +-----END CERTIFICATE----- + +VeriSign Class 3 Public Primary Certification Authority - G4 +============================================================ +-----BEGIN CERTIFICATE----- +MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjELMAkGA1UEBhMC +VVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3 +b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVz +ZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBU +cnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRo +b3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5 +IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8 +Utpkmw4tXNherJI9/gHmGUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGz +rl0Bp3vefLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEw +HzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVyaXNpZ24u +Y29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMWkf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMD +A2gAMGUCMGYhDBgmYFo4e1ZC4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIx +AJw9SDkjOVgaFRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== +-----END CERTIFICATE----- + +NetLock Arany (Class Gold) Főtanúsítvány +============================================ +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQGEwJIVTERMA8G +A1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3MDUGA1UECwwuVGFuw7pzw610 +dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNlcnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBB +cmFueSAoQ2xhc3MgR29sZCkgRsWRdGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgx +MjA2MTUwODIxWjCBpzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxO +ZXRMb2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNzIEdvbGQpIEbFkXRhbsO6 +c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxCRec75LbRTDofTjl5Bu +0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrTlF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw +/HpYzY6b7cNGbIRwXdrzAZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAk +H3B5r9s5VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRGILdw +fzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2BJtr+UBdADTHLpl1 +neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAGAQH/AgEEMA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2MU9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwW +qZw8UQCgwBEIBaeZ5m8BiFRhbvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTta +YtOUZcTh5m2C+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2FuLjbvrW5Kfna +NwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2XjG4Kvte9nHfRCaexOYNkbQu +dZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +Staat der Nederlanden Root CA - G2 +================================== +-----BEGIN CERTIFICATE----- +MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJOTDEeMBwGA1UE +CgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFhdCBkZXIgTmVkZXJsYW5kZW4g +Um9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oXDTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMC +TkwxHjAcBgNVBAoMFVN0YWF0IGRlciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5l +ZGVybGFuZGVuIFJvb3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ +5291qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8SpuOUfiUtn +vWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPUZ5uW6M7XxgpT0GtJlvOj +CwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvEpMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiil +e7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCR +OME4HYYEhLoaJXhena/MUGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpI +CT0ugpTNGmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy5V65 +48r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv6q012iDTiIJh8BIi +trzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEKeN5KzlW/HdXZt1bv8Hb/C3m1r737 +qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMB +AAGjgZcwgZQwDwYDVR0TAQH/BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcC +ARYxaHR0cDovL3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqGSIb3DQEBCwUA +A4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLySCZa59sCrI2AGeYwRTlHSeYAz ++51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwj +f/ST7ZwaUb7dRUG/kSS0H4zpX897IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaN +kqbG9AclVMwWVxJKgnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfk +CpYL+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxLvJxxcypF +URmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkmbEgeqmiSBeGCc1qb3Adb +CG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvkN1trSt8sV4pAWja63XVECDdCcAz+3F4h +oKOKwJCcaNpQ5kUQR3i2TtJlycM33+FCY7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoV +IPVVYpbtbZNQvOSqeK3Zywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm +66+KAQ== +-----END CERTIFICATE----- + +CA Disig +======== +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBATANBgkqhkiG9w0BAQUFADBKMQswCQYDVQQGEwJTSzETMBEGA1UEBxMK +QnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcgYS5zLjERMA8GA1UEAxMIQ0EgRGlzaWcwHhcNMDYw +MzIyMDEzOTM0WhcNMTYwMzIyMDEzOTM0WjBKMQswCQYDVQQGEwJTSzETMBEGA1UEBxMKQnJhdGlz +bGF2YTETMBEGA1UEChMKRGlzaWcgYS5zLjERMA8GA1UEAxMIQ0EgRGlzaWcwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCS9jHBfYj9mQGp2HvycXXxMcbzdWb6UShGhJd4NLxs/LxFWYgm +GErENx+hSkS943EE9UQX4j/8SFhvXJ56CbpRNyIjZkMhsDxkovhqFQ4/61HhVKndBpnXmjxUizkD +Pw/Fzsbrg3ICqB9x8y34dQjbYkzo+s7552oftms1grrijxaSfQUMbEYDXcDtab86wYqg6I7ZuUUo +hwjstMoVvoLdtUSLLa2GDGhibYVW8qwUYzrG0ZmsNHhWS8+2rT+MitcE5eN4TPWGqvWP+j1scaMt +ymfraHtuM6kMgiioTGohQBUgDCZbg8KpFhXAJIJdKxatymP2dACw30PEEGBWZ2NFAgMBAAGjgf8w +gfwwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUjbJJaJ1yCCW5wCf1UJNWSEZx+Y8wDgYDVR0P +AQH/BAQDAgEGMDYGA1UdEQQvMC2BE2Nhb3BlcmF0b3JAZGlzaWcuc2uGFmh0dHA6Ly93d3cuZGlz +aWcuc2svY2EwZgYDVR0fBF8wXTAtoCugKYYnaHR0cDovL3d3dy5kaXNpZy5zay9jYS9jcmwvY2Ff +ZGlzaWcuY3JsMCygKqAohiZodHRwOi8vY2EuZGlzaWcuc2svY2EvY3JsL2NhX2Rpc2lnLmNybDAa +BgNVHSAEEzARMA8GDSuBHpGT5goAAAABAQEwDQYJKoZIhvcNAQEFBQADggEBAF00dGFMrzvY/59t +WDYcPQuBDRIrRhCA/ec8J9B6yKm2fnQwM6M6int0wHl5QpNt/7EpFIKrIYwvF/k/Ji/1WcbvgAa3 +mkkp7M5+cTxqEEHA9tOasnxakZzArFvITV734VP/Q3f8nktnbNfzg9Gg4H8l37iYC5oyOGwwoPP/ +CBUz91BKez6jPiCp3C9WgArtQVCwyfTssuMmRAAOb54GvCKWU3BlxFAKRmukLyeBEicTXxChds6K +ezfqwzlhA5WYOudsiCUI/HloDYd9Yvi0X/vF2Ey9WLw/Q1vUHgFNPGO+I++MzVpQuGhU+QqZMxEA +4Z7CRneC9VkGjCFMhwnN5ag= +-----END CERTIFICATE----- + +Juur-SK +======= +-----BEGIN CERTIFICATE----- +MIIE5jCCA86gAwIBAgIEO45L/DANBgkqhkiG9w0BAQUFADBdMRgwFgYJKoZIhvcNAQkBFglwa2lA +c2suZWUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKExlBUyBTZXJ0aWZpdHNlZXJpbWlza2Vza3VzMRAw +DgYDVQQDEwdKdXVyLVNLMB4XDTAxMDgzMDE0MjMwMVoXDTE2MDgyNjE0MjMwMVowXTEYMBYGCSqG +SIb3DQEJARYJcGtpQHNrLmVlMQswCQYDVQQGEwJFRTEiMCAGA1UEChMZQVMgU2VydGlmaXRzZWVy +aW1pc2tlc2t1czEQMA4GA1UEAxMHSnV1ci1TSzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAIFxNj4zB9bjMI0TfncyRsvPGbJgMUaXhvSYRqTCZUXP00B841oiqBB4M8yIsdOBSvZiF3tf +TQou0M+LI+5PAk676w7KvRhj6IAcjeEcjT3g/1tf6mTll+g/mX8MCgkzABpTpyHhOEvWgxutr2TC ++Rx6jGZITWYfGAriPrsfB2WThbkasLnE+w0R9vXW+RvHLCu3GFH+4Hv2qEivbDtPL+/40UceJlfw +UR0zlv/vWT3aTdEVNMfqPxZIe5EcgEMPPbgFPtGzlc3Yyg/CQ2fbt5PgIoIuvvVoKIO5wTtpeyDa +Tpxt4brNj3pssAki14sL2xzVWiZbDcDq5WDQn/413z8CAwEAAaOCAawwggGoMA8GA1UdEwEB/wQF +MAMBAf8wggEWBgNVHSAEggENMIIBCTCCAQUGCisGAQQBzh8BAQEwgfYwgdAGCCsGAQUFBwICMIHD +HoHAAFMAZQBlACAAcwBlAHIAdABpAGYAaQBrAGEAYQB0ACAAbwBuACAAdgDkAGwAagBhAHMAdABh +AHQAdQBkACAAQQBTAC0AaQBzACAAUwBlAHIAdABpAGYAaQB0AHMAZQBlAHIAaQBtAGkAcwBrAGUA +cwBrAHUAcwAgAGEAbABhAG0ALQBTAEsAIABzAGUAcgB0AGkAZgBpAGsAYQBhAHQAaQBkAGUAIABr +AGkAbgBuAGkAdABhAG0AaQBzAGUAawBzMCEGCCsGAQUFBwIBFhVodHRwOi8vd3d3LnNrLmVlL2Nw +cy8wKwYDVR0fBCQwIjAgoB6gHIYaaHR0cDovL3d3dy5zay5lZS9qdXVyL2NybC8wHQYDVR0OBBYE +FASqekej5ImvGs8KQKcYP2/v6X2+MB8GA1UdIwQYMBaAFASqekej5ImvGs8KQKcYP2/v6X2+MA4G +A1UdDwEB/wQEAwIB5jANBgkqhkiG9w0BAQUFAAOCAQEAe8EYlFOiCfP+JmeaUOTDBS8rNXiRTHyo +ERF5TElZrMj3hWVcRrs7EKACr81Ptcw2Kuxd/u+gkcm2k298gFTsxwhwDY77guwqYHhpNjbRxZyL +abVAyJRld/JXIWY7zoVAtjNjGr95HvxcHdMdkxuLDF2FvZkwMhgJkVLpfKG6/2SSmuz+Ne6ML678 +IIbsSt4beDI3poHSna9aEhbKmVv8b20OxaAehsmR0FyYgl9jDIpaq9iVpszLita/ZEuOyoqysOkh +Mp6qqIWYNIE5ITuoOlIyPfZrN4YGWhWY3PARZv40ILcD9EEQfTmEeZZyY7aWAuVrua0ZTbvGRNs2 +yyqcjg== +-----END CERTIFICATE----- + +Hongkong Post Root CA 1 +======================= +-----BEGIN CERTIFICATE----- +MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsxFjAUBgNVBAoT +DUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3QgUm9vdCBDQSAxMB4XDTAzMDUx +NTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkGA1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25n +IFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1 +ApzQjVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEnPzlTCeqr +auh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjhZY4bXSNmO7ilMlHIhqqh +qZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9nnV0ttgCXjqQesBCNnLsak3c78QA3xMY +V18meMjWCnl3v/evt3a5pQuEF10Q6m/hq5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNV +HRMBAf8ECDAGAQH/AgEDMA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7i +h9legYsCmEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI37pio +l7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clBoiMBdDhViw+5Lmei +IAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJsEhTkYY2sEJCehFC78JZvRZ+K88ps +T/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpOfMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilT +c4afU9hDDl3WY4JxHYB0yvbiAmvZWg== +-----END CERTIFICATE----- + +SecureSign RootCA11 +=================== +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDErMCkGA1UEChMi +SmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoGA1UEAxMTU2VjdXJlU2lnbiBS +b290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSsw +KQYDVQQKEyJKYXBhbiBDZXJ0aWZpY2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1 +cmVTaWduIFJvb3RDQTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvL +TJszi1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8h9uuywGO +wvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOVMdrAG/LuYpmGYz+/3ZMq +g6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rP +O7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitA +bpSACW22s293bzUIUPsCh8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZX +t94wDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAKCh +OBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xmKbabfSVSSUOrTC4r +bnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQX5Ucv+2rIrVls4W6ng+4reV6G4pQ +Oh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWrQbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01 +y8hSyn+B/tlr0/cR7SXf+Of5pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061 +lgeLKBObjBmNQSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +ACEDICOM Root +============= +-----BEGIN CERTIFICATE----- +MIIFtTCCA52gAwIBAgIIYY3HhjsBggUwDQYJKoZIhvcNAQEFBQAwRDEWMBQGA1UEAwwNQUNFRElD +T00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00xCzAJBgNVBAYTAkVTMB4XDTA4 +MDQxODE2MjQyMloXDTI4MDQxMzE2MjQyMlowRDEWMBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoG +A1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00xCzAJBgNVBAYTAkVTMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA/5KV4WgGdrQsyFhIyv2AVClVYyT/kGWbEHV7w2rbYgIB8hiGtXxaOLHk +WLn709gtn70yN78sFW2+tfQh0hOR2QetAQXW8713zl9CgQr5auODAKgrLlUTY4HKRxx7XBZXehuD +YAQ6PmXDzQHe3qTWDLqO3tkE7hdWIpuPY/1NFgu3e3eM+SW10W2ZEi5PGrjm6gSSrj0RuVFCPYew +MYWveVqc/udOXpJPQ/yrOq2lEiZmueIM15jO1FillUAKt0SdE3QrwqXrIhWYENiLxQSfHY9g5QYb +m8+5eaA9oiM/Qj9r+hwDezCNzmzAv+YbX79nuIQZ1RXve8uQNjFiybwCq0Zfm/4aaJQ0PZCOrfbk +HQl/Sog4P75n/TSW9R28MHTLOO7VbKvU/PQAtwBbhTIWdjPp2KOZnQUAqhbm84F9b32qhm2tFXTT +xKJxqvQUfecyuB+81fFOvW8XAjnXDpVCOscAPukmYxHqC9FK/xidstd7LzrZlvvoHpKuE1XI2Sf2 +3EgbsCTBheN3nZqk8wwRHQ3ItBTutYJXCb8gWH8vIiPYcMt5bMlL8qkqyPyHK9caUPgn6C9D4zq9 +2Fdx/c6mUlv53U3t5fZvie27k5x2IXXwkkwp9y+cAS7+UEaeZAwUswdbxcJzbPEHXEUkFDWug/Fq +TYl6+rPYLWbwNof1K1MCAwEAAaOBqjCBpzAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKaz +4SsrSbbXc6GqlPUB53NlTKxQMA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUprPhKytJttdzoaqU +9QHnc2VMrFAwRAYDVR0gBD0wOzA5BgRVHSAAMDEwLwYIKwYBBQUHAgEWI2h0dHA6Ly9hY2VkaWNv +bS5lZGljb21ncm91cC5jb20vZG9jMA0GCSqGSIb3DQEBBQUAA4ICAQDOLAtSUWImfQwng4/F9tqg +aHtPkl7qpHMyEVNEskTLnewPeUKzEKbHDZ3Ltvo/Onzqv4hTGzz3gvoFNTPhNahXwOf9jU8/kzJP +eGYDdwdY6ZXIfj7QeQCM8htRM5u8lOk6e25SLTKeI6RF+7YuE7CLGLHdztUdp0J/Vb77W7tH1Pwk +zQSulgUV1qzOMPPKC8W64iLgpq0i5ALudBF/TP94HTXa5gI06xgSYXcGCRZj6hitoocf8seACQl1 +ThCojz2GuHURwCRiipZ7SkXp7FnFvmuD5uHorLUwHv4FB4D54SMNUI8FmP8sX+g7tq3PgbUhh8oI +KiMnMCArz+2UW6yyetLHKKGKC5tNSixthT8Jcjxn4tncB7rrZXtaAWPWkFtPF2Y9fwsZo5NjEFIq +nxQWWOLcpfShFosOkYuByptZ+thrkQdlVV9SH686+5DdaaVbnG0OLLb6zqylfDJKZ0DcMDQj3dcE +I2bw/FWAp/tmGYI1Z2JwOV5vx+qQQEQIHriy1tvuWacNGHk0vFQYXlPKNFHtRQrmjseCNj6nOGOp +MCwXEGCSn1WHElkQwg9naRHMTh5+Spqtr0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3o +tkYNbn5XOmeUwssfnHdKZ05phkOTOPu220+DkdRgfks+KzgHVZhepA== +-----END CERTIFICATE----- + +Verisign Class 3 Public Primary Certification Authority +======================================================= +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkGA1UEBhMCVVMx +FzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmltYXJ5 +IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVow +XzELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAz +IFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUA +A4GNADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhEBarsAx94 +f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/isI19wKTakyYbnsZogy1Ol +hec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBABByUqkFFBky +CEHwxWsKzH4PIRnN5GfcX6kb5sroc50i2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWX +bj9T/UWZYB2oK0z5XqcJ2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/ +D/xwzoiQ +-----END CERTIFICATE----- + +Microsec e-Szigno Root CA 2009 +============================== +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYDVQQGEwJIVTER +MA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jv +c2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTAeFw0wOTA2MTYxMTMwMThaFw0yOTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UE +BwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUt +U3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvPkd6mJviZpWNwrZuuyjNA +fW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tccbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG +0IMZfcChEhyVbUr02MelTTMuhTlAdX4UfIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKA +pxn1ntxVUwOXewdI/5n7N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm +1HxdrtbCxkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1+rUC +AwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTLD8bf +QkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAbBgNVHREE +FDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqGSIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0o +lZMEyL/azXm4Q5DwpL7v8u8hmLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfX +I/OMn74dseGkddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c2Pm2G2JwCz02 +yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5tHMN1Rq41Bab2XD0h7lbwyYIi +LXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +E-Guven Kok Elektronik Sertifika Hizmet Saglayicisi +=================================================== +-----BEGIN CERTIFICATE----- +MIIDtjCCAp6gAwIBAgIQRJmNPMADJ72cdpW56tustTANBgkqhkiG9w0BAQUFADB1MQswCQYDVQQG +EwJUUjEoMCYGA1UEChMfRWxla3Ryb25payBCaWxnaSBHdXZlbmxpZ2kgQS5TLjE8MDoGA1UEAxMz +ZS1HdXZlbiBLb2sgRWxla3Ryb25payBTZXJ0aWZpa2EgSGl6bWV0IFNhZ2xheWljaXNpMB4XDTA3 +MDEwNDExMzI0OFoXDTE3MDEwNDExMzI0OFowdTELMAkGA1UEBhMCVFIxKDAmBgNVBAoTH0VsZWt0 +cm9uaWsgQmlsZ2kgR3V2ZW5saWdpIEEuUy4xPDA6BgNVBAMTM2UtR3V2ZW4gS29rIEVsZWt0cm9u +aWsgU2VydGlmaWthIEhpem1ldCBTYWdsYXlpY2lzaTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBAMMSIJ6wXgBljU5Gu4Bc6SwGl9XzcslwuedLZYDBS75+PNdUMZTe1RK6UxYC6lhj71vY +8+0qGqpxSKPcEC1fX+tcS5yWCEIlKBHMilpiAVDV6wlTL/jDj/6z/P2douNffb7tC+Bg62nsM+3Y +jfsSSYMAyYuXjDtzKjKzEve5TfL0TW3H5tYmNwjy2f1rXKPlSFxYvEK+A1qBuhw1DADT9SN+cTAI +JjjcJRFHLfO6IxClv7wC90Nex/6wN1CZew+TzuZDLMN+DfIcQ2Zgy2ExR4ejT669VmxMvLz4Bcpk +9Ok0oSy1c+HCPujIyTQlCFzz7abHlJ+tiEMl1+E5YP6sOVkCAwEAAaNCMEAwDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFJ/uRLOU1fqRTy7ZVZoEVtstxNulMA0GCSqG +SIb3DQEBBQUAA4IBAQB/X7lTW2M9dTLn+sR0GstG30ZpHFLPqk/CaOv/gKlR6D1id4k9CnU58W5d +F4dvaAXBlGzZXd/aslnLpRCKysw5zZ/rTt5S/wzw9JKp8mxTq5vSR6AfdPebmvEvFZ96ZDAYBzwq +D2fK/A+JYZ1lpTzlvBNbCNvj/+27BrtqBrF6T2XGgv0enIu1De5Iu7i9qgi0+6N8y5/NkHZchpZ4 +Vwpm+Vganf2XKWDeEaaQHBkc7gGWIjQ0LpH5t8Qn0Xvmv/uARFoW5evg1Ao4vOSR49XrXMGs3xtq +fJ7lddK2l4fbzIcrQzqECK+rPNv3PGYxhrCdU3nt+CPeQuMtgvEP5fqX +-----END CERTIFICATE----- + +GlobalSign Root CA - R3 +======================= +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4GA1UECxMXR2xv +YmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2Jh +bFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxT +aWduIFJvb3QgQ0EgLSBSMzETMBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2ln +bjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWt +iHL8RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsTgHeMCOFJ +0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmmKPZpO/bLyCiR5Z2KYVc3 +rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zdQQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjl +OCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZXriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2 +xmmFghcCAwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE +FI/wS3+oLkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZURUm7 +lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMpjjM5RcOO5LlXbKr8 +EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK6fBdRoyV3XpYKBovHd7NADdBj+1E +bddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQXmcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18 +YIvDQVETI53O9zJrlAGomecsMx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7r +kpeDMdmztcpHWD9f +-----END CERTIFICATE----- + +TC TrustCenter Universal CA III +=============================== +-----BEGIN CERTIFICATE----- +MIID4TCCAsmgAwIBAgIOYyUAAQACFI0zFQLkbPQwDQYJKoZIhvcNAQEFBQAwezELMAkGA1UEBhMC +REUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNVBAsTG1RDIFRydXN0Q2VudGVy +IFVuaXZlcnNhbCBDQTEoMCYGA1UEAxMfVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJSTAe +Fw0wOTA5MDkwODE1MjdaFw0yOTEyMzEyMzU5NTlaMHsxCzAJBgNVBAYTAkRFMRwwGgYDVQQKExNU +QyBUcnVzdENlbnRlciBHbWJIMSQwIgYDVQQLExtUQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0Ex +KDAmBgNVBAMTH1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQSBJSUkwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQDC2pxisLlxErALyBpXsq6DFJmzNEubkKLF5+cvAqBNLaT6hdqbJYUt +QCggbergvbFIgyIpRJ9Og+41URNzdNW88jBmlFPAQDYvDIRlzg9uwliT6CwLOunBjvvya8o84pxO +juT5fdMnnxvVZ3iHLX8LR7PH6MlIfK8vzArZQe+f/prhsq75U7Xl6UafYOPfjdN/+5Z+s7Vy+Eut +CHnNaYlAJ/Uqwa1D7KRTyGG299J5KmcYdkhtWyUB0SbFt1dpIxVbYYqt8Bst2a9c8SaQaanVDED1 +M4BDj5yjdipFtK+/fz6HP3bFzSreIMUWWMv5G/UPyw0RUmS40nZid4PxWJ//AgMBAAGjYzBhMB8G +A1UdIwQYMBaAFFbn4VslQ4Dg9ozhcbyO5YAvxEjiMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMB0GA1UdDgQWBBRW5+FbJUOA4PaM4XG8juWAL8RI4jANBgkqhkiG9w0BAQUFAAOCAQEA +g8ev6n9NCjw5sWi+e22JLumzCecYV42FmhfzdkJQEw/HkG8zrcVJYCtsSVgZ1OK+t7+rSbyUyKu+ +KGwWaODIl0YgoGhnYIg5IFHYaAERzqf2EQf27OysGh+yZm5WZ2B6dF7AbZc2rrUNXWZzwCUyRdhK +BgePxLcHsU0GDeGl6/R1yrqc0L2z0zIkTO5+4nYES0lT2PLpVDP85XEfPRRclkvxOvIAu2y0+pZV +CIgJwcyRGSmwIC3/yzikQOEXvnlhgP8HA4ZMTnsGnxGGjYnuJ8Tb4rwZjgvDwxPHLQNjO9Po5KIq +woIIlBZU8O8fJ5AluA0OKBtHd0e9HKgl8ZS0Zg== +-----END CERTIFICATE----- + +Autoridad de Certificacion Firmaprofesional CIF A62634068 +========================================================= +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UEBhMCRVMxQjBA +BgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1hcHJvZmVzaW9uYWwgQ0lGIEE2 +MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEyMzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIw +QAYDVQQDDDlBdXRvcmlkYWQgZGUgQ2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBB +NjI2MzQwNjgwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDD +Utd9thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQMcas9UX4P +B99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefGL9ItWY16Ck6WaVICqjaY +7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15iNA9wBj4gGFrO93IbJWyTdBSTo3OxDqqH +ECNZXyAFGUftaI6SEspd/NYrspI8IM/hX68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyI +plD9amML9ZMWGxmPsu2bm8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctX +MbScyJCyZ/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirjaEbsX +LZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/TKI8xWVvTyQKmtFLK +bpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF6NkBiDkal4ZkQdU7hwxu+g/GvUgU +vzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVhOSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1Ud +EwEB/wQIMAYBAf8CAQEwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNH +DhpkLzCBpgYDVR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBvACAAZABlACAA +bABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBlAGwAbwBuAGEAIAAwADgAMAAx +ADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx +51tkljYyGOylMnfX40S2wBEqgLk9am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qk +R71kMrv2JYSiJ0L1ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaP +T481PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS3a/DTg4f +Jl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5kSeTy36LssUzAKh3ntLFl +osS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF3dvd6qJ2gHN99ZwExEWN57kci57q13XR +crHedUTnQn3iV2t93Jm8PYMo6oCTjcVMZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoR +saS8I8nkvof/uZS2+F0gStRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTD +KCOM/iczQ0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQBjLMi +6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +Izenpe.com +========== +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4MQswCQYDVQQG +EwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5jb20wHhcNMDcxMjEz +MTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMu +QS4xEzARBgNVBAMMCkl6ZW5wZS5jb20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ +03rKDx6sp4boFmVqscIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAK +ClaOxdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6HLmYRY2xU ++zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFXuaOKmMPsOzTFlUFpfnXC +PCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQDyCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxT +OTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbK +F7jJeodWLBoBHmy+E60QrLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK +0GqfvEyNBjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8Lhij+ +0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIBQFqNeb+Lz0vPqhbB +leStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+HMh3/1uaD7euBUbl8agW7EekFwID +AQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2luZm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+ +SVpFTlBFIFMuQS4gLSBDSUYgQTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBG +NjIgUzgxQzBBBgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0O +BBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUAA4ICAQB4pgwWSp9MiDrAyw6l +Fn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWblaQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbga +kEyrkgPH7UIBzg/YsfqikuFgba56awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8q +hT/AQKM6WfxZSzwoJNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Cs +g1lwLDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCTVyvehQP5 +aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGkLhObNA5me0mrZJfQRsN5 +nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJbUjWumDqtujWTI6cfSN01RpiyEGjkpTHC +ClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZo +Q0iy2+tzJOeRf1SktoA+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1Z +WrOZyGlsQyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +Chambers of Commerce Root - 2008 +================================ +-----BEGIN CERTIFICATE----- +MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYDVQQGEwJFVTFD +MEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNv +bS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMu +QS4xKTAnBgNVBAMTIENoYW1iZXJzIG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEy +Mjk1MFoXDTM4MDczMTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNl +ZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIwEAYDVQQF +EwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJl +cnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW928sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKA +XuFixrYp4YFs8r/lfTJqVKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorj +h40G072QDuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR5gN/ +ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfLZEFHcpOrUMPrCXZk +NNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05aSd+pZgvMPMZ4fKecHePOjlO+Bd5g +D2vlGts/4+EhySnB8esHnFIbAURRPHsl18TlUlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331 +lubKgdaX8ZSD6e2wsWsSaR6s+12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ +0wlf2eOKNcx5Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj +ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAxhduub+84Mxh2 +EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNVHQ4EFgQU+SSsD7K1+HnA+mCI +G8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1+HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJ +BgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNh +bWVyZmlybWEuY29tL2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENh +bWVyZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDiC +CQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCowKAYIKwYBBQUH +AgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZIhvcNAQEFBQADggIBAJASryI1 +wqM58C7e6bXpeHxIvj99RZJe6dqxGfwWPJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH +3qLPaYRgM+gQDROpI9CF5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbU +RWpGqOt1glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaHFoI6 +M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2pSB7+R5KBWIBpih1 +YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MDxvbxrN8y8NmBGuScvfaAFPDRLLmF +9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QGtjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcK +zBIKinmwPQN/aUv0NCB9szTqjktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvG +nrDQWzilm1DefhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg +OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZd0jQ +-----END CERTIFICATE----- + +Global Chambersign Root - 2008 +============================== +-----BEGIN CERTIFICATE----- +MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYDVQQGEwJFVTFD +MEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNv +bS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMu +QS4xJzAlBgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMx +NDBaFw0zODA3MzExMjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUg +Y3VycmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJ +QTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD +aGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDf +VtPkOpt2RbQT2//BthmLN0EYlVJH6xedKYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXf +XjaOcNFccUMd2drvXNL7G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0 +ZJJ0YPP2zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4ddPB +/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyGHoiMvvKRhI9lNNgA +TH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2Id3UwD2ln58fQ1DJu7xsepeY7s2M +H/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3VyJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfe +Ox2YItaswTXbo6Al/3K1dh3ebeksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSF +HTynyQbehP9r6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh +wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsogzCtLkykPAgMB +AAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQWBBS5CcqcHtvTbDprru1U8VuT +BjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDprru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UE +BhMCRVUxQzBBBgNVBAcTOk1hZHJpZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJm +aXJtYS5jb20vYWRkcmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJm +aXJtYSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiCCQDJzdPp +1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCowKAYIKwYBBQUHAgEWHGh0 +dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZIhvcNAQEFBQADggIBAICIf3DekijZBZRG +/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZUohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6 +ReAJ3spED8IXDneRRXozX1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/s +dZ7LoR/xfxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVza2Mg +9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yydYhz2rXzdpjEetrHH +foUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMdSqlapskD7+3056huirRXhOukP9Du +qqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9OAP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETr +P3iZ8ntxPjzxmKfFGBI/5rsoM0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVq +c5iJWzouE4gev8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z +09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B +-----END CERTIFICATE----- + +Go Daddy Root Certificate Authority - G2 +======================================== +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMxEDAOBgNVBAgT +B0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoTEUdvRGFkZHkuY29tLCBJbmMu +MTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8G +A1UEAxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKDE6bFIEMBO4Tx5oVJnyfq +9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD ++qK+ihVqf94Lw7YZFAXK6sOoBJQ7RnwyDfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutd +fMh8+7ArU6SSYmlRJQVhGkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMl +NAJWJwGRtDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEAAaNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFDqahQcQZyi27/a9 +BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmXWWcDYfF+OwYxdS2hII5PZYe096ac +vNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r +5N9ss4UXnT3ZJE95kTXWXwTrgIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYV +N8Gb5DKj7Tjo2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI4uJEvlz36hz1 +-----END CERTIFICATE----- + +Starfield Root Certificate Authority - G2 +========================================= +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMxEDAOBgNVBAgT +B0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoTHFN0YXJmaWVsZCBUZWNobm9s +b2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVsZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0 +eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAw +DgYDVQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQg +VGVjaG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZpY2F0ZSBB +dXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL3twQP89o/8ArFv +W59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMgnLRJdzIpVv257IzdIvpy3Cdhl+72WoTs +bhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNk +N3mSwOxGXn/hbVNMYq/NHwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7Nf +ZTD4p7dNdloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0HZbU +JtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0GCSqGSIb3DQEBCwUAA4IBAQARWfol +TwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjUsHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx +4mcujJUDJi5DnUox9g61DLu34jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUw +F5okxBDgBPfg8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1mMpYjn0q7pBZ +c2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +Starfield Services Root Certificate Authority - G2 +================================================== +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMxEDAOBgNVBAgT +B0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoTHFN0YXJmaWVsZCBUZWNobm9s +b2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVsZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRl +IEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNV +BAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxT +dGFyZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2VydmljZXMg +Um9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20pOsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2 +h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm28xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4Pa +hHQUw2eeBGg6345AWh1KTs9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLP +LJGmpufehRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk6mFB +rMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+qAdcwKziIorhtSpzyEZGDMA0GCSqG +SIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMIbw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPP +E95Dz+I0swSdHynVv/heyNXBve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTy +xQGjhdByPq1zqwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn0q23KXB56jza +YyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCNsSi6 +-----END CERTIFICATE----- + +AffirmTrust Commercial +====================== +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UEBhMCVVMxFDAS +BgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBDb21tZXJjaWFsMB4XDTEw +MDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmly +bVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6Eqdb +DuKPHx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yrba0F8PrV +C8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPALMeIrJmqbTFeurCA+ukV6 +BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1yHp52UKqK39c/s4mT6NmgTWvRLpUHhww +MmWd5jyTXlBOeuM61G7MGvv50jeuJCqrVwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNV +HQ4EFgQUnZPGU4teyq8/nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYGXUPG +hi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNjvbz4YYCanrHOQnDi +qX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivtZ8SOyUOyXGsViQK8YvxO8rUzqrJv +0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9gN53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0kh +sUlHRUe072o0EclNmsxZt9YCnlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +AffirmTrust Networking +====================== +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UEBhMCVVMxFDAS +BgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBOZXR3b3JraW5nMB4XDTEw +MDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmly +bVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SE +Hi3yYJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbuakCNrmreI +dIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRLQESxG9fhwoXA3hA/Pe24 +/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gb +h+0t+nvujArjqWaJGctB+d1ENmHP4ndGyH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNV +HQ4EFgQUBx/S55zawm6iQLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfOtDIu +UFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzuQY0x2+c06lkh1QF6 +12S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZLgo/bNjR9eUJtGxUAArgFU2HdW23 +WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4uolu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9 +/ZFvgrG+CJPbFEfxojfHRZ48x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +AffirmTrust Premium +=================== +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UEBhMCVVMxFDAS +BgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVzdCBQcmVtaXVtMB4XDTEwMDEy +OTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRy +dXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxBLfqV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtn +BKAQJG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ+jjeRFcV +5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrSs8PhaJyJ+HoAVt70VZVs ++7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmd +GPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d770O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5R +p9EixAqnOEhss/n/fauGV+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NI +S+LI+H+SqHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S5u04 +6uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4IaC1nEWTJ3s7xgaVY5 +/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TXOwF0lkLgAOIua+rF7nKsu7/+6qqo ++Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYEFJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByv +MiPIs0laUZx2KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B8OWycvpEgjNC +6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQMKSOyARiqcTtNd56l+0OOF6S +L5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK ++4w1IX2COPKpVJEZNZOUbWo6xbLQu4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmV +BtWVyuEklut89pMFu+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFg +IxpHYoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8GKa1qF60 +g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaORtGdFNrHF+QFlozEJLUb +zxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6eKeC2uAloGRwYQw== +-----END CERTIFICATE----- + +AffirmTrust Premium ECC +======================= +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMCVVMxFDASBgNV +BAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQcmVtaXVtIEVDQzAeFw0xMDAx +MjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJBgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1U +cnVzdDEgMB4GA1UEAwwXQWZmaXJtVHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAQNMF4bFZ0D0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQ +N8O9ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0GA1UdDgQW +BBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAK +BggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/VsaobgxCd05DhT1wV/GzTjxi+zygk8N53X +57hG8f2h4nECMEJZh0PUUd+60wkyWs6Iflc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKM +eQ== +-----END CERTIFICATE----- + +Certum Trusted Network CA +========================= +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBMMSIwIAYDVQQK +ExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBUcnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIy +MTIwNzM3WhcNMjkxMjMxMTIwNzM3WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBU +ZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MSIwIAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rHUV+rpDKmYYe2bg+G0jAC +l/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LMTXPb865Px1bVWqeWifrzq2jUI4ZZJ88J +J7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVUBBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4 +fOQtf/WsX+sWn7Et0brMkUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0 +cvW0QM8xAcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNVHRMB +Af8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNVHQ8BAf8EBAMCAQYw +DQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15ysHhE49wcrwn9I0j6vSrEuVUEtRCj +jSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfLI9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1 +mS1FhIrlQgnXdAIv94nYmem8J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5aj +Zt3hrvJBW8qYVoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +Certinomis - Autorité Racine +============================= +-----BEGIN CERTIFICATE----- +MIIFnDCCA4SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJGUjETMBEGA1UEChMK +Q2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxJjAkBgNVBAMMHUNlcnRpbm9taXMg +LSBBdXRvcml0w6kgUmFjaW5lMB4XDTA4MDkxNzA4Mjg1OVoXDTI4MDkxNzA4Mjg1OVowYzELMAkG +A1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMxFzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMSYw +JAYDVQQDDB1DZXJ0aW5vbWlzIC0gQXV0b3JpdMOpIFJhY2luZTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAJ2Fn4bT46/HsmtuM+Cet0I0VZ35gb5j2CN2DpdUzZlMGvE5x4jYF1AMnmHa +wE5V3udauHpOd4cN5bjr+p5eex7Ezyh0x5P1FMYiKAT5kcOrJ3NqDi5N8y4oH3DfVS9O7cdxbwly +Lu3VMpfQ8Vh30WC8Tl7bmoT2R2FFK/ZQpn9qcSdIhDWerP5pqZ56XjUl+rSnSTV3lqc2W+HN3yNw +2F1MpQiD8aYkOBOo7C+ooWfHpi2GR+6K/OybDnT0K0kCe5B1jPyZOQE51kqJ5Z52qz6WKDgmi92N +jMD2AR5vpTESOH2VwnHu7XSu5DaiQ3XV8QCb4uTXzEIDS3h65X27uK4uIJPT5GHfceF2Z5c/tt9q +c1pkIuVC28+BA5PY9OMQ4HL2AHCs8MF6DwV/zzRpRbWT5BnbUhYjBYkOjUjkJW+zeL9i9Qf6lSTC +lrLooyPCXQP8w9PlfMl1I9f09bze5N/NgL+RiH2nE7Q5uiy6vdFrzPOlKO1Enn1So2+WLhl+HPNb +xxaOu2B9d2ZHVIIAEWBsMsGoOBvrbpgT1u449fCfDu/+MYHB0iSVL1N6aaLwD4ZFjliCK0wi1F6g +530mJ0jfJUaNSih8hp75mxpZuWW/Bd22Ql095gBIgl4g9xGC3srYn+Y3RyYe63j3YcNBZFgCQfna +4NH4+ej9Uji29YnfAgMBAAGjWzBZMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBQNjLZh2kS40RR9w759XkjwzspqsDAXBgNVHSAEEDAOMAwGCiqBegFWAgIAAQEwDQYJ +KoZIhvcNAQEFBQADggIBACQ+YAZ+He86PtvqrxyaLAEL9MW12Ukx9F1BjYkMTv9sov3/4gbIOZ/x +WqndIlgVqIrTseYyCYIDbNc/CMf4uboAbbnW/FIyXaR/pDGUu7ZMOH8oMDX/nyNTt7buFHAAQCva +R6s0fl6nVjBhK4tDrP22iCj1a7Y+YEq6QpA0Z43q619FVDsXrIvkxmUP7tCMXWY5zjKn2BCXwH40 +nJ+U8/aGH88bc62UeYdocMMzpXDn2NU4lG9jeeu/Cg4I58UvD0KgKxRA/yHgBcUn4YQRE7rWhh1B +CxMjidPJC+iKunqjo3M3NYB9Ergzd0A4wPpeMNLytqOx1qKVl4GbUu1pTP+A5FPbVFsDbVRfsbjv +JL1vnxHDx2TCDyhihWZeGnuyt++uNckZM6i4J9szVb9o4XVIRFb7zdNIu0eJOqxp9YDG5ERQL1TE +qkPFMTFYvZbF6nVsmnWxTfj3l/+WFvKXTej28xH5On2KOG4Ey+HTRRWqpdEdnV1j6CTmNhTih60b +WfVEm/vXd3wfAXBioSAaosUaKPQhA+4u2cGA6rnZgtZbdsLLO7XSAPCjDuGtbkD326C00EauFddE +wk01+dIL8hf2rGbVJLJP0RyZwG71fet0BLj5TXcJ17TPBzAJ8bgAVtkXFhYKK4bfjwEZGuW7gmP/ +vgt2Fl43N+bYdJeimUV5 +-----END CERTIFICATE----- + +Root CA Generalitat Valenciana +============================== +-----BEGIN CERTIFICATE----- +MIIGizCCBXOgAwIBAgIEO0XlaDANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJFUzEfMB0GA1UE +ChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJR1ZBMScwJQYDVQQDEx5Sb290 +IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwHhcNMDEwNzA2MTYyMjQ3WhcNMjEwNzAxMTUyMjQ3 +WjBoMQswCQYDVQQGEwJFUzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UE +CxMGUEtJR1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGKqtXETcvIorKA3Qdyu0togu8M1JAJke+WmmmO3I2 +F0zo37i7L3bhQEZ0ZQKQUgi0/6iMweDHiVYQOTPvaLRfX9ptI6GJXiKjSgbwJ/BXufjpTjJ3Cj9B +ZPPrZe52/lSqfR0grvPXdMIKX/UIKFIIzFVd0g/bmoGlu6GzwZTNVOAydTGRGmKy3nXiz0+J2ZGQ +D0EbtFpKd71ng+CT516nDOeB0/RSrFOyA8dEJvt55cs0YFAQexvba9dHq198aMpunUEDEO5rmXte +JajCq+TA81yc477OMUxkHl6AovWDfgzWyoxVjr7gvkkHD6MkQXpYHYTqWBLI4bft75PelAgxAgMB +AAGjggM7MIIDNzAyBggrBgEFBQcBAQQmMCQwIgYIKwYBBQUHMAGGFmh0dHA6Ly9vY3NwLnBraS5n +dmEuZXMwEgYDVR0TAQH/BAgwBgEB/wIBAjCCAjQGA1UdIASCAiswggInMIICIwYKKwYBBAG/VQIB +ADCCAhMwggHoBggrBgEFBQcCAjCCAdoeggHWAEEAdQB0AG8AcgBpAGQAYQBkACAAZABlACAAQwBl +AHIAdABpAGYAaQBjAGEAYwBpAPMAbgAgAFIAYQDtAHoAIABkAGUAIABsAGEAIABHAGUAbgBlAHIA +YQBsAGkAdABhAHQAIABWAGEAbABlAG4AYwBpAGEAbgBhAC4ADQAKAEwAYQAgAEQAZQBjAGwAYQBy +AGEAYwBpAPMAbgAgAGQAZQAgAFAAcgDhAGMAdABpAGMAYQBzACAAZABlACAAQwBlAHIAdABpAGYA +aQBjAGEAYwBpAPMAbgAgAHEAdQBlACAAcgBpAGcAZQAgAGUAbAAgAGYAdQBuAGMAaQBvAG4AYQBt +AGkAZQBuAHQAbwAgAGQAZQAgAGwAYQAgAHAAcgBlAHMAZQBuAHQAZQAgAEEAdQB0AG8AcgBpAGQA +YQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEAYwBpAPMAbgAgAHMAZQAgAGUAbgBjAHUAZQBu +AHQAcgBhACAAZQBuACAAbABhACAAZABpAHIAZQBjAGMAaQDzAG4AIAB3AGUAYgAgAGgAdAB0AHAA +OgAvAC8AdwB3AHcALgBwAGsAaQAuAGcAdgBhAC4AZQBzAC8AYwBwAHMwJQYIKwYBBQUHAgEWGWh0 +dHA6Ly93d3cucGtpLmd2YS5lcy9jcHMwHQYDVR0OBBYEFHs100DSHHgZZu90ECjcPk+yeAT8MIGV +BgNVHSMEgY0wgYqAFHs100DSHHgZZu90ECjcPk+yeAT8oWykajBoMQswCQYDVQQGEwJFUzEfMB0G +A1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJR1ZBMScwJQYDVQQDEx5S +b290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmGCBDtF5WgwDQYJKoZIhvcNAQEFBQADggEBACRh +TvW1yEICKrNcda3FbcrnlD+laJWIwVTAEGmiEi8YPyVQqHxK6sYJ2fR1xkDar1CdPaUWu20xxsdz +Ckj+IHLtb8zog2EWRpABlUt9jppSCS/2bxzkoXHPjCpaF3ODR00PNvsETUlR4hTJZGH71BTg9J63 +NI8KJr2XXPR5OkowGcytT6CYirQxlyric21+eLj4iIlPsSKRZEv1UN4D2+XFducTZnV+ZfsBn5OH +iJ35Rld8TWCvmHMTI6QgkYH60GFmuH3Rr9ZvHmw96RH9qfmCIoaZM3Fa6hlXPZHNqcCjbgcTpsnt ++GijnsNacgmHKNHEc8RzGF9QdRYxn7fofMM= +-----END CERTIFICATE----- + +A-Trust-nQual-03 +================ +-----BEGIN CERTIFICATE----- +MIIDzzCCAregAwIBAgIDAWweMA0GCSqGSIb3DQEBBQUAMIGNMQswCQYDVQQGEwJBVDFIMEYGA1UE +Cgw/QS1UcnVzdCBHZXMuIGYuIFNpY2hlcmhlaXRzc3lzdGVtZSBpbSBlbGVrdHIuIERhdGVudmVy +a2VociBHbWJIMRkwFwYDVQQLDBBBLVRydXN0LW5RdWFsLTAzMRkwFwYDVQQDDBBBLVRydXN0LW5R +dWFsLTAzMB4XDTA1MDgxNzIyMDAwMFoXDTE1MDgxNzIyMDAwMFowgY0xCzAJBgNVBAYTAkFUMUgw +RgYDVQQKDD9BLVRydXN0IEdlcy4gZi4gU2ljaGVyaGVpdHNzeXN0ZW1lIGltIGVsZWt0ci4gRGF0 +ZW52ZXJrZWhyIEdtYkgxGTAXBgNVBAsMEEEtVHJ1c3QtblF1YWwtMDMxGTAXBgNVBAMMEEEtVHJ1 +c3QtblF1YWwtMDMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtPWFuA/OQO8BBC4SA +zewqo51ru27CQoT3URThoKgtUaNR8t4j8DRE/5TrzAUjlUC5B3ilJfYKvUWG6Nm9wASOhURh73+n +yfrBJcyFLGM/BWBzSQXgYHiVEEvc+RFZznF/QJuKqiTfC0Li21a8StKlDJu3Qz7dg9MmEALP6iPE +SU7l0+m0iKsMrmKS1GWH2WrX9IWf5DMiJaXlyDO6w8dB3F/GaswADm0yqLaHNgBid5seHzTLkDx4 +iHQF63n1k3Flyp3HaxgtPVxO59X4PzF9j4fsCiIvI+n+u33J4PTs63zEsMMtYrWacdaxaujs2e3V +cuy+VwHOBVWf3tFgiBCzAgMBAAGjNjA0MA8GA1UdEwEB/wQFMAMBAf8wEQYDVR0OBAoECERqlWdV +eRFPMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAVdRU0VlIXLOThaq/Yy/kgM40 +ozRiPvbY7meIMQQDbwvUB/tOdQ/TLtPAF8fGKOwGDREkDg6lXb+MshOWcdzUzg4NCmgybLlBMRmr +sQd7TZjTXLDR8KdCoLXEjq/+8T/0709GAHbrAvv5ndJAlseIOrifEXnzgGWovR/TeIGgUUw3tKZd +JXDRZslo+S4RFGjxVJgIrCaSD96JntT6s3kr0qN51OyLrIdTaEJMUVF0HhsnLuP1Hyl0Te2v9+GS +mYHovjrHF1D2t8b8m7CKa9aIA5GPBnc6hQLdmNVDeD/GMBWsm2vLV7eJUYs66MmEDNuxUCAKGkq6 +ahq97BvIxYSazQ== +-----END CERTIFICATE----- + +TWCA Root Certification Authority +================================= +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJ +VEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMzWhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQG +EwJUVzESMBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NB +IFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFEAcK0HMMx +QhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HHK3XLfJ+utdGdIzdjp9xC +oi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeXRfwZVzsrb+RH9JlF/h3x+JejiB03HFyP +4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/zrX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1r +y+UPizgN7gr8/g+YnzAx3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkqhkiG +9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeCMErJk/9q56YAf4lC +mtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdlsXebQ79NqZp4VKIV66IIArB6nCWlW +QtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62Dlhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVY +T0bf+215WfKEIlKuD8z7fDvnaspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocny +Yh0igzyXxfkZYiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +Security Communication RootCA2 +============================== +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc +U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMeU2VjdXJpdHkgQ29tbXVuaWNh +dGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoXDTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMC +SlAxJTAjBgNVBAoTHFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3Vy +aXR5IENvbW11bmljYXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +ANAVOVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGrzbl+dp++ ++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVMVAX3NuRFg3sUZdbcDE3R +3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQhNBqyjoGADdH5H5XTz+L62e4iKrFvlNV +spHEfbmwhRkGeC7bYRr6hfVKkaHnFtWOojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1K +EOtOghY6rCcMU/Gt1SSwawNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8 +QIH4D5csOPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEB +CwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpFcoJxDjrSzG+ntKEj +u/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXcokgfGT+Ok+vx+hfuzU7jBBJV1uXk +3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6q +tnRGEmyR7jTV7JqR50S+kDFy1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29 +mvVXIwAHIRc/SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +EC-ACC +====== +-----BEGIN CERTIFICATE----- +MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB8zELMAkGA1UE +BhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2VydGlmaWNhY2lvIChOSUYgUS0w +ODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYD +VQQLEyxWZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UE +CxMsSmVyYXJxdWlhIEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMT +BkVDLUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQGEwJFUzE7 +MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8gKE5JRiBRLTA4MDExNzYt +SSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBDZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZl +Z2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQubmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJh +cnF1aWEgRW50aXRhdHMgZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUND +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R85iK +w5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm4CgPukLjbo73FCeT +ae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaVHMf5NLWUhdWZXqBIoH7nF2W4onW4 +HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNdQlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0a +E9jD2z3Il3rucO2n5nzbcc8tlGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw +0JDnJwIDAQABo4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4opvpXY0wfwYD +VR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBodHRwczovL3d3dy5jYXRjZXJ0 +Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidWZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5l +dC92ZXJhcnJlbCAwDQYJKoZIhvcNAQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJ +lF7W2u++AVtd0x7Y/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNa +Al6kSBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhyRp/7SNVe +l+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOSAgu+TGbrIP65y7WZf+a2 +E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xlnJ2lYJU6Un/10asIbvPuW/mIPX64b24D +5EI= +-----END CERTIFICATE----- + +Hellenic Academic and Research Institutions RootCA 2011 +======================================================= +-----BEGIN CERTIFICATE----- +MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1IxRDBCBgNVBAoT +O0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9y +aXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25z +IFJvb3RDQSAyMDExMB4XDTExMTIwNjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYT +AkdSMUQwQgYDVQQKEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25z +IENlcnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2VhcmNo +IEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +AKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPzdYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI +1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJfel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa +71HFK9+WXesyHgLacEnsbgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u +8yBRQlqD75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSPFEDH +3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNVHRMBAf8EBTADAQH/ +MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp5dgTBCPuQSUwRwYDVR0eBEAwPqA8 +MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQub3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQu +b3JnMA0GCSqGSIb3DQEBBQUAA4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVt +XdMiKahsog2p6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 +TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7dIsXRSZMFpGD +/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8AcysNnq/onN694/BtZqhFLKPM58N +7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXIl7WdmplNsDz4SgCbZN2fOUvRJ9e4 +-----END CERTIFICATE----- diff --git a/awx/lib/site-packages/requests/certs.py b/awx/lib/site-packages/requests/certs.py new file mode 100644 index 0000000000..bc00826191 --- /dev/null +++ b/awx/lib/site-packages/requests/certs.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" +certs.py +~~~~~~~~ + +This module returns the preferred default CA certificate bundle. + +If you are packaging Requests, e.g., for a Linux distribution or a managed +environment, you can change the definition of where() to return a separately +packaged CA bundle. +""" + +import os.path + + +def where(): + """Return the preferred certificate bundle.""" + # vendored bundle inside Requests + return os.path.join(os.path.dirname(__file__), 'cacert.pem') + +if __name__ == '__main__': + print(where()) diff --git a/awx/lib/site-packages/requests/compat.py b/awx/lib/site-packages/requests/compat.py new file mode 100644 index 0000000000..bcf94b0067 --- /dev/null +++ b/awx/lib/site-packages/requests/compat.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- + +""" +pythoncompat +""" + +from .packages import charade as chardet + +import sys + +# ------- +# Pythons +# ------- + +# Syntax sugar. +_ver = sys.version_info + +#: Python 2.x? +is_py2 = (_ver[0] == 2) + +#: Python 3.x? +is_py3 = (_ver[0] == 3) + +#: Python 3.0.x +is_py30 = (is_py3 and _ver[1] == 0) + +#: Python 3.1.x +is_py31 = (is_py3 and _ver[1] == 1) + +#: Python 3.2.x +is_py32 = (is_py3 and _ver[1] == 2) + +#: Python 3.3.x +is_py33 = (is_py3 and _ver[1] == 3) + +#: Python 3.4.x +is_py34 = (is_py3 and _ver[1] == 4) + +#: Python 2.7.x +is_py27 = (is_py2 and _ver[1] == 7) + +#: Python 2.6.x +is_py26 = (is_py2 and _ver[1] == 6) + +#: Python 2.5.x +is_py25 = (is_py2 and _ver[1] == 5) + +#: Python 2.4.x +is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice. + + +# --------- +# Platforms +# --------- + + +# Syntax sugar. +_ver = sys.version.lower() + +is_pypy = ('pypy' in _ver) +is_jython = ('jython' in _ver) +is_ironpython = ('iron' in _ver) + +# Assume CPython, if nothing else. +is_cpython = not any((is_pypy, is_jython, is_ironpython)) + +# Windows-based system. +is_windows = 'win32' in str(sys.platform).lower() + +# Standard Linux 2+ system. +is_linux = ('linux' in str(sys.platform).lower()) +is_osx = ('darwin' in str(sys.platform).lower()) +is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess. +is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess. + +try: + import simplejson as json +except ImportError: + import json + +# --------- +# Specifics +# --------- + +if is_py2: + from urllib import quote, unquote, quote_plus, unquote_plus, urlencode + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag + from urllib2 import parse_http_list + import cookielib + from Cookie import Morsel + from StringIO import StringIO + from .packages.urllib3.packages.ordered_dict import OrderedDict + + builtin_str = str + bytes = str + str = unicode + basestring = basestring + numeric_types = (int, long, float) + + +elif is_py3: + from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag + from urllib.request import parse_http_list + from http import cookiejar as cookielib + from http.cookies import Morsel + from io import StringIO + from collections import OrderedDict + + builtin_str = str + str = str + bytes = bytes + basestring = (str, bytes) + numeric_types = (int, float) diff --git a/awx/lib/site-packages/requests/cookies.py b/awx/lib/site-packages/requests/cookies.py new file mode 100644 index 0000000000..d759d0a977 --- /dev/null +++ b/awx/lib/site-packages/requests/cookies.py @@ -0,0 +1,389 @@ +# -*- coding: utf-8 -*- + +""" +Compatibility code to be able to use `cookielib.CookieJar` with requests. + +requests.utils imports from here, so be careful with imports. +""" + +import collections +from .compat import cookielib, urlparse, Morsel + +try: + import threading + # grr, pyflakes: this fixes "redefinition of unused 'threading'" + threading +except ImportError: + import dummy_threading as threading + + +class MockRequest(object): + """Wraps a `requests.Request` to mimic a `urllib2.Request`. + + The code in `cookielib.CookieJar` expects this interface in order to correctly + manage cookie policies, i.e., determine whether a cookie can be set, given the + domains of the request and the cookie. + + The original request object is read-only. The client is responsible for collecting + the new headers via `get_new_headers()` and interpreting them appropriately. You + probably want `get_cookie_header`, defined below. + """ + + def __init__(self, request): + self._r = request + self._new_headers = {} + self.type = urlparse(self._r.url).scheme + + def get_type(self): + return self.type + + def get_host(self): + return urlparse(self._r.url).netloc + + def get_origin_req_host(self): + return self.get_host() + + def get_full_url(self): + return self._r.url + + def is_unverifiable(self): + return True + + def has_header(self, name): + return name in self._r.headers or name in self._new_headers + + def get_header(self, name, default=None): + return self._r.headers.get(name, self._new_headers.get(name, default)) + + def add_header(self, key, val): + """cookielib has no legitimate use for this method; add it back if you find one.""" + raise NotImplementedError("Cookie headers should be added with add_unredirected_header()") + + def add_unredirected_header(self, name, value): + self._new_headers[name] = value + + def get_new_headers(self): + return self._new_headers + + @property + def unverifiable(self): + return self.is_unverifiable() + + @property + def origin_req_host(self): + return self.get_origin_req_host() + + +class MockResponse(object): + """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. + + ...what? Basically, expose the parsed HTTP headers from the server response + the way `cookielib` expects to see them. + """ + + def __init__(self, headers): + """Make a MockResponse for `cookielib` to read. + + :param headers: a httplib.HTTPMessage or analogous carrying the headers + """ + self._headers = headers + + def info(self): + return self._headers + + def getheaders(self, name): + self._headers.getheaders(name) + + +def extract_cookies_to_jar(jar, request, response): + """Extract the cookies from the response into a CookieJar. + + :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) + :param request: our own requests.Request object + :param response: urllib3.HTTPResponse object + """ + # the _original_response field is the wrapped httplib.HTTPResponse object, + req = MockRequest(request) + # pull out the HTTPMessage with the headers and put it in the mock: + res = MockResponse(response._original_response.msg) + jar.extract_cookies(res, req) + + +def get_cookie_header(jar, request): + """Produce an appropriate Cookie header string to be sent with `request`, or None.""" + r = MockRequest(request) + jar.add_cookie_header(r) + return r.get_new_headers().get('Cookie') + + +def remove_cookie_by_name(cookiejar, name, domain=None, path=None): + """Unsets a cookie by name, by default over all domains and paths. + + Wraps CookieJar.clear(), is O(n). + """ + clearables = [] + for cookie in cookiejar: + if cookie.name == name: + if domain is None or domain == cookie.domain: + if path is None or path == cookie.path: + clearables.append((cookie.domain, cookie.path, cookie.name)) + + for domain, path, name in clearables: + cookiejar.clear(domain, path, name) + + +class CookieConflictError(RuntimeError): + """There are two cookies that meet the criteria specified in the cookie jar. + Use .get and .set and include domain and path args in order to be more specific.""" + + +class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping): + """Compatibility class; is a cookielib.CookieJar, but exposes a dict interface. + + This is the CookieJar we create by default for requests and sessions that + don't specify one, since some clients may expect response.cookies and + session.cookies to support dict operations. + + Don't use the dict interface internally; it's just for compatibility with + with external client code. All `requests` code should work out of the box + with externally provided instances of CookieJar, e.g., LWPCookieJar and + FileCookieJar. + + Caution: dictionary operations that are normally O(1) may be O(n). + + Unlike a regular CookieJar, this class is pickleable. + """ + + def get(self, name, default=None, domain=None, path=None): + """Dict-like get() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. Caution: operation is O(n), not O(1).""" + try: + return self._find_no_duplicates(name, domain, path) + except KeyError: + return default + + def set(self, name, value, **kwargs): + """Dict-like set() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains.""" + # support client code that unsets cookies by assignment of a None value: + if value is None: + remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path')) + return + + if isinstance(value, Morsel): + c = morsel_to_cookie(value) + else: + c = create_cookie(name, value, **kwargs) + self.set_cookie(c) + return c + + def keys(self): + """Dict-like keys() that returns a list of names of cookies from the jar. + See values() and items().""" + keys = [] + for cookie in iter(self): + keys.append(cookie.name) + return keys + + def values(self): + """Dict-like values() that returns a list of values of cookies from the jar. + See keys() and items().""" + values = [] + for cookie in iter(self): + values.append(cookie.value) + return values + + def items(self): + """Dict-like items() that returns a list of name-value tuples from the jar. + See keys() and values(). Allows client-code to call "dict(RequestsCookieJar) + and get a vanilla python dict of key value pairs.""" + items = [] + for cookie in iter(self): + items.append((cookie.name, cookie.value)) + return items + + def list_domains(self): + """Utility method to list all the domains in the jar.""" + domains = [] + for cookie in iter(self): + if cookie.domain not in domains: + domains.append(cookie.domain) + return domains + + def list_paths(self): + """Utility method to list all the paths in the jar.""" + paths = [] + for cookie in iter(self): + if cookie.path not in paths: + paths.append(cookie.path) + return paths + + def multiple_domains(self): + """Returns True if there are multiple domains in the jar. + Returns False otherwise.""" + domains = [] + for cookie in iter(self): + if cookie.domain is not None and cookie.domain in domains: + return True + domains.append(cookie.domain) + return False # there is only one domain in jar + + def get_dict(self, domain=None, path=None): + """Takes as an argument an optional domain and path and returns a plain old + Python dict of name-value pairs of cookies that meet the requirements.""" + dictionary = {} + for cookie in iter(self): + if (domain is None or cookie.domain == domain) and (path is None + or cookie.path == path): + dictionary[cookie.name] = cookie.value + return dictionary + + def __getitem__(self, name): + """Dict-like __getitem__() for compatibility with client code. Throws exception + if there are more than one cookie with name. In that case, use the more + explicit get() method instead. Caution: operation is O(n), not O(1).""" + + return self._find_no_duplicates(name) + + def __setitem__(self, name, value): + """Dict-like __setitem__ for compatibility with client code. Throws exception + if there is already a cookie of that name in the jar. In that case, use the more + explicit set() method instead.""" + + self.set(name, value) + + def __delitem__(self, name): + """Deletes a cookie given a name. Wraps cookielib.CookieJar's remove_cookie_by_name().""" + remove_cookie_by_name(self, name) + + def update(self, other): + """Updates this jar with cookies from another CookieJar or dict-like""" + if isinstance(other, cookielib.CookieJar): + for cookie in other: + self.set_cookie(cookie) + else: + super(RequestsCookieJar, self).update(other) + + def _find(self, name, domain=None, path=None): + """Requests uses this method internally to get cookie values. Takes as args name + and optional domain and path. Returns a cookie.value. If there are conflicting cookies, + _find arbitrarily chooses one. See _find_no_duplicates if you want an exception thrown + if there are conflicting cookies.""" + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + return cookie.value + + raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) + + def _find_no_duplicates(self, name, domain=None, path=None): + """__get_item__ and get call _find_no_duplicates -- never used in Requests internally. + Takes as args name and optional domain and path. Returns a cookie.value. + Throws KeyError if cookie is not found and CookieConflictError if there are + multiple cookies that match name and optionally domain and path.""" + toReturn = None + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if toReturn is not None: # if there are multiple cookies that meet passed in criteria + raise CookieConflictError('There are multiple cookies with name, %r' % (name)) + toReturn = cookie.value # we will eventually return this as long as no cookie conflict + + if toReturn: + return toReturn + raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) + + def __getstate__(self): + """Unlike a normal CookieJar, this class is pickleable.""" + state = self.__dict__.copy() + # remove the unpickleable RLock object + state.pop('_cookies_lock') + return state + + def __setstate__(self, state): + """Unlike a normal CookieJar, this class is pickleable.""" + self.__dict__.update(state) + if '_cookies_lock' not in self.__dict__: + self._cookies_lock = threading.RLock() + + def copy(self): + """Return a copy of this RequestsCookieJar.""" + new_cj = RequestsCookieJar() + new_cj.update(self) + return new_cj + + +def create_cookie(name, value, **kwargs): + """Make a cookie from underspecified parameters. + + By default, the pair of `name` and `value` will be set for the domain '' + and sent on every request (this is sometimes called a "supercookie"). + """ + result = dict( + version=0, + name=name, + value=value, + port=None, + domain='', + path='/', + secure=False, + expires=None, + discard=True, + comment=None, + comment_url=None, + rest={'HttpOnly': None}, + rfc2109=False,) + + badargs = set(kwargs) - set(result) + if badargs: + err = 'create_cookie() got unexpected keyword arguments: %s' + raise TypeError(err % list(badargs)) + + result.update(kwargs) + result['port_specified'] = bool(result['port']) + result['domain_specified'] = bool(result['domain']) + result['domain_initial_dot'] = result['domain'].startswith('.') + result['path_specified'] = bool(result['path']) + + return cookielib.Cookie(**result) + + +def morsel_to_cookie(morsel): + """Convert a Morsel object into a Cookie containing the one k/v pair.""" + c = create_cookie( + name=morsel.key, + value=morsel.value, + version=morsel['version'] or 0, + port=None, + port_specified=False, + domain=morsel['domain'], + domain_specified=bool(morsel['domain']), + domain_initial_dot=morsel['domain'].startswith('.'), + path=morsel['path'], + path_specified=bool(morsel['path']), + secure=bool(morsel['secure']), + expires=morsel['max-age'] or morsel['expires'], + discard=False, + comment=morsel['comment'], + comment_url=bool(morsel['comment']), + rest={'HttpOnly': morsel['httponly']}, + rfc2109=False,) + return c + + +def cookiejar_from_dict(cookie_dict, cookiejar=None): + """Returns a CookieJar from a key/value dictionary. + + :param cookie_dict: Dict of key/values to insert into CookieJar. + """ + if cookiejar is None: + cookiejar = RequestsCookieJar() + + if cookie_dict is not None: + for name in cookie_dict: + cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) + return cookiejar diff --git a/awx/lib/site-packages/requests/exceptions.py b/awx/lib/site-packages/requests/exceptions.py new file mode 100644 index 0000000000..c0588f6aed --- /dev/null +++ b/awx/lib/site-packages/requests/exceptions.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- + +""" +requests.exceptions +~~~~~~~~~~~~~~~~~~~ + +This module contains the set of Requests' exceptions. + +""" + + +class RequestException(RuntimeError): + """There was an ambiguous exception that occurred while handling your + request.""" + + +class HTTPError(RequestException): + """An HTTP error occurred.""" + + def __init__(self, *args, **kwargs): + """ Initializes HTTPError with optional `response` object. """ + self.response = kwargs.pop('response', None) + super(HTTPError, self).__init__(*args, **kwargs) + + +class ConnectionError(RequestException): + """A Connection error occurred.""" + + +class SSLError(ConnectionError): + """An SSL error occurred.""" + + +class Timeout(RequestException): + """The request timed out.""" + + +class URLRequired(RequestException): + """A valid URL is required to make a request.""" + + +class TooManyRedirects(RequestException): + """Too many redirects.""" + + +class MissingSchema(RequestException, ValueError): + """The URL schema (e.g. http or https) is missing.""" + + +class InvalidSchema(RequestException, ValueError): + """See defaults.py for valid schemas.""" + + +class InvalidURL(RequestException, ValueError): + """ The URL provided was somehow invalid. """ diff --git a/awx/lib/site-packages/requests/hooks.py b/awx/lib/site-packages/requests/hooks.py new file mode 100644 index 0000000000..5dfaf6b680 --- /dev/null +++ b/awx/lib/site-packages/requests/hooks.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- + +""" +requests.hooks +~~~~~~~~~~~~~~ + +This module provides the capabilities for the Requests hooks system. + +Available hooks: + +``response``: + The response generated from a Request. + +""" + + +HOOKS = ['response'] + + +def default_hooks(): + hooks = {} + for event in HOOKS: + hooks[event] = [] + return hooks + +# TODO: response is the only one + + +def dispatch_hook(key, hooks, hook_data, **kwargs): + """Dispatches a hook dictionary on a given piece of data.""" + + hooks = hooks or dict() + + if key in hooks: + hooks = hooks.get(key) + + if hasattr(hooks, '__call__'): + hooks = [hooks] + + for hook in hooks: + _hook_data = hook(hook_data, **kwargs) + if _hook_data is not None: + hook_data = _hook_data + + return hook_data diff --git a/awx/lib/site-packages/requests/models.py b/awx/lib/site-packages/requests/models.py new file mode 100644 index 0000000000..6cf2aaa1a6 --- /dev/null +++ b/awx/lib/site-packages/requests/models.py @@ -0,0 +1,686 @@ +# -*- coding: utf-8 -*- + +""" +requests.models +~~~~~~~~~~~~~~~ + +This module contains the primary objects that power Requests. +""" + +import collections +import logging +import datetime + +from io import BytesIO +from .hooks import default_hooks +from .structures import CaseInsensitiveDict + +from .auth import HTTPBasicAuth +from .cookies import cookiejar_from_dict, get_cookie_header +from .packages.urllib3.filepost import encode_multipart_formdata +from .packages.urllib3.util import parse_url +from .exceptions import HTTPError, RequestException, MissingSchema, InvalidURL +from .utils import ( + guess_filename, get_auth_from_url, requote_uri, + stream_decode_response_unicode, to_key_val_list, parse_header_links, + iter_slices, guess_json_utf, super_len) +from .compat import ( + cookielib, urlparse, urlunparse, urlsplit, urlencode, str, bytes, StringIO, + is_py2, chardet, json, builtin_str, basestring) + +CONTENT_CHUNK_SIZE = 10 * 1024 +ITER_CHUNK_SIZE = 512 + +log = logging.getLogger(__name__) + + +class RequestEncodingMixin(object): + @property + def path_url(self): + """Build the path URL to use.""" + + url = [] + + p = urlsplit(self.url) + + path = p.path + if not path: + path = '/' + + url.append(path) + + query = p.query + if query: + url.append('?') + url.append(query) + + return ''.join(url) + + @staticmethod + def _encode_params(data): + """Encode parameters in a piece of data. + + Will successfully encode parameters when passed as a dict or a list of + 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary + if parameters are supplied as a dict. + """ + + if isinstance(data, (str, bytes)): + return data + elif hasattr(data, 'read'): + return data + elif hasattr(data, '__iter__'): + result = [] + for k, vs in to_key_val_list(data): + if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): + vs = [vs] + for v in vs: + if v is not None: + result.append( + (k.encode('utf-8') if isinstance(k, str) else k, + v.encode('utf-8') if isinstance(v, str) else v)) + return urlencode(result, doseq=True) + else: + return data + + @staticmethod + def _encode_files(files, data): + """Build the body for a multipart/form-data request. + + Will successfully encode files when passed as a dict or a list of + 2-tuples. Order is retained if data is a list of 2-tuples but abritrary + if parameters are supplied as a dict. + + """ + if (not files) or isinstance(data, str): + return None + + new_fields = [] + fields = to_key_val_list(data or {}) + files = to_key_val_list(files or {}) + + for field, val in fields: + if isinstance(val, basestring) or not hasattr(val, '__iter__'): + val = [val] + for v in val: + if v is not None: + new_fields.append( + (field.decode('utf-8') if isinstance(field, bytes) else field, + v.encode('utf-8') if isinstance(v, str) else v)) + + for (k, v) in files: + # support for explicit filename + ft = None + if isinstance(v, (tuple, list)): + if len(v) == 2: + fn, fp = v + else: + fn, fp, ft = v + else: + fn = guess_filename(v) or k + fp = v + if isinstance(fp, str): + fp = StringIO(fp) + if isinstance(fp, bytes): + fp = BytesIO(fp) + + if ft: + new_v = (fn, fp.read(), ft) + else: + new_v = (fn, fp.read()) + new_fields.append((k, new_v)) + + body, content_type = encode_multipart_formdata(new_fields) + + return body, content_type + + +class RequestHooksMixin(object): + def register_hook(self, event, hook): + """Properly register a hook.""" + + if isinstance(hook, collections.Callable): + self.hooks[event].append(hook) + elif hasattr(hook, '__iter__'): + self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable)) + + def deregister_hook(self, event, hook): + """Deregister a previously registered hook. + Returns True if the hook existed, False if not. + """ + + try: + self.hooks[event].remove(hook) + return True + except ValueError: + return False + + +class Request(RequestHooksMixin): + """A user-created :class:`Request <Request>` object. + + Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server. + + :param method: HTTP method to use. + :param url: URL to send. + :param headers: dictionary of headers to send. + :param files: dictionary of {filename: fileobject} files to multipart upload. + :param data: the body to attach the request. If a dictionary is provided, form-encoding will take place. + :param params: dictionary of URL parameters to append to the URL. + :param auth: Auth handler or (user, pass) tuple. + :param cookies: dictionary or CookieJar of cookies to attach to this request. + :param hooks: dictionary of callback hooks, for internal usage. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'http://httpbin.org/get') + >>> req.prepare() + <PreparedRequest [GET]> + + """ + def __init__(self, + method=None, + url=None, + headers=None, + files=None, + data=dict(), + params=dict(), + auth=None, + cookies=None, + hooks=None): + + # Default empty dicts for dict params. + data = [] if data is None else data + files = [] if files is None else files + headers = {} if headers is None else headers + params = {} if params is None else params + hooks = {} if hooks is None else hooks + + self.hooks = default_hooks() + for (k, v) in list(hooks.items()): + self.register_hook(event=k, hook=v) + + self.method = method + self.url = url + self.headers = headers + self.files = files + self.data = data + self.params = params + self.auth = auth + self.cookies = cookies + self.hooks = hooks + + def __repr__(self): + return '<Request [%s]>' % (self.method) + + def prepare(self): + """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.""" + p = PreparedRequest() + + p.prepare_method(self.method) + p.prepare_url(self.url, self.params) + p.prepare_headers(self.headers) + p.prepare_cookies(self.cookies) + p.prepare_body(self.data, self.files) + p.prepare_auth(self.auth, self.url) + # Note that prepare_auth must be last to enable authentication schemes + # such as OAuth to work on a fully prepared request. + + # This MUST go after prepare_auth. Authenticators could add a hook + p.prepare_hooks(self.hooks) + + return p + + +class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): + """The fully mutable :class:`PreparedRequest <PreparedRequest>` object, + containing the exact bytes that will be sent to the server. + + Generated from either a :class:`Request <Request>` object or manually. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'http://httpbin.org/get') + >>> r = req.prepare() + <PreparedRequest [GET]> + + >>> s = requests.Session() + >>> s.send(r) + <Response [200]> + + """ + + def __init__(self): + #: HTTP verb to send to the server. + self.method = None + #: HTTP URL to send the request to. + self.url = None + #: dictionary of HTTP headers. + self.headers = None + #: request body to send to the server. + self.body = None + #: dictionary of callback hooks, for internal usage. + self.hooks = default_hooks() + + def __repr__(self): + return '<PreparedRequest [%s]>' % (self.method) + + def prepare_method(self, method): + """Prepares the given HTTP method.""" + self.method = method + if self.method is not None: + self.method = self.method.upper() + + def prepare_url(self, url, params): + """Prepares the given HTTP URL.""" + #: Accept objects that have string representations. + try: + url = unicode(url) + except NameError: + # We're on Python 3. + url = str(url) + except UnicodeDecodeError: + pass + + # Support for unicode domain names and paths. + scheme, auth, host, port, path, query, fragment = parse_url(url) + + if not scheme: + raise MissingSchema("Invalid URL %r: No schema supplied" % url) + + if not host: + raise InvalidURL("Invalid URL %r: No host supplied" % url) + + # Only want to apply IDNA to the hostname + try: + host = host.encode('idna').decode('utf-8') + except UnicodeError: + raise InvalidURL('URL has an invalid label.') + + # Carefully reconstruct the network location + netloc = auth or '' + if netloc: + netloc += '@' + netloc += host + if port: + netloc += ':' + str(port) + + # Bare domains aren't valid URLs. + if not path: + path = '/' + + if is_py2: + if isinstance(scheme, str): + scheme = scheme.encode('utf-8') + if isinstance(netloc, str): + netloc = netloc.encode('utf-8') + if isinstance(path, str): + path = path.encode('utf-8') + if isinstance(query, str): + query = query.encode('utf-8') + if isinstance(fragment, str): + fragment = fragment.encode('utf-8') + + enc_params = self._encode_params(params) + if enc_params: + if query: + query = '%s&%s' % (query, enc_params) + else: + query = enc_params + + url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) + self.url = url + + def prepare_headers(self, headers): + """Prepares the given HTTP headers.""" + + if headers: + headers = dict((name.encode('ascii'), value) for name, value in headers.items()) + self.headers = CaseInsensitiveDict(headers) + else: + self.headers = CaseInsensitiveDict() + + def prepare_body(self, data, files): + """Prepares the given HTTP body data.""" + + # Check if file, fo, generator, iterator. + # If not, run through normal process. + + # Nottin' on you. + body = None + content_type = None + length = None + is_stream = False + + is_stream = all([ + hasattr(data, '__iter__'), + not isinstance(data, basestring), + not isinstance(data, list), + not isinstance(data, dict) + ]) + + try: + length = super_len(data) + except (TypeError, AttributeError): + length = False + + if is_stream: + body = data + + if files: + raise NotImplementedError('Streamed bodies and files are mutually exclusive.') + + if length: + self.headers['Content-Length'] = str(length) + else: + self.headers['Transfer-Encoding'] = 'chunked' + # Check if file, fo, generator, iterator. + # If not, run through normal process. + + else: + # Multi-part file uploads. + if files: + (body, content_type) = self._encode_files(files, data) + else: + if data: + body = self._encode_params(data) + if isinstance(data, str) or isinstance(data, builtin_str) or hasattr(data, 'read'): + content_type = None + else: + content_type = 'application/x-www-form-urlencoded' + + self.prepare_content_length(body) + + # Add content-type if it wasn't explicitly provided. + if (content_type) and (not 'content-type' in self.headers): + self.headers['Content-Type'] = content_type + + self.body = body + + def prepare_content_length(self, body): + if hasattr(body, 'seek') and hasattr(body, 'tell'): + body.seek(0, 2) + self.headers['Content-Length'] = str(body.tell()) + body.seek(0, 0) + elif body is not None: + l = super_len(body) + if l: + self.headers['Content-Length'] = str(l) + elif self.method not in ('GET', 'HEAD'): + self.headers['Content-Length'] = '0' + + def prepare_auth(self, auth, url=''): + """Prepares the given HTTP auth data.""" + + # If no Auth is explicitly provided, extract it from the URL first. + if auth is None: + url_auth = get_auth_from_url(self.url) + auth = url_auth if any(url_auth) else None + + if auth: + if isinstance(auth, tuple) and len(auth) == 2: + # special-case basic HTTP auth + auth = HTTPBasicAuth(*auth) + + # Allow auth to make its changes. + r = auth(self) + + # Update self to reflect the auth changes. + self.__dict__.update(r.__dict__) + + # Recompute Content-Length + self.prepare_content_length(self.body) + + def prepare_cookies(self, cookies): + """Prepares the given HTTP cookie data.""" + + if isinstance(cookies, cookielib.CookieJar): + cookies = cookies + else: + cookies = cookiejar_from_dict(cookies) + + if 'cookie' not in self.headers: + cookie_header = get_cookie_header(cookies, self) + if cookie_header is not None: + self.headers['Cookie'] = cookie_header + + def prepare_hooks(self, hooks): + """Prepares the given hooks.""" + for event in hooks: + self.register_hook(event, hooks[event]) + + +class Response(object): + """The :class:`Response <Response>` object, which contains a + server's response to an HTTP request. + """ + + def __init__(self): + super(Response, self).__init__() + + self._content = False + self._content_consumed = False + + #: Integer Code of responded HTTP Status. + self.status_code = None + + #: Case-insensitive Dictionary of Response Headers. + #: For example, ``headers['content-encoding']`` will return the + #: value of a ``'Content-Encoding'`` response header. + self.headers = CaseInsensitiveDict() + + #: File-like object representation of response (for advanced usage). + #: Requires that ``stream=True` on the request. + # This requirement does not apply for use internally to Requests. + self.raw = None + + #: Final URL location of Response. + self.url = None + + #: Encoding to decode with when accessing r.text. + self.encoding = None + + #: A list of :class:`Response <Response>` objects from + #: the history of the Request. Any redirect responses will end + #: up here. The list is sorted from the oldest to the most recent request. + self.history = [] + + self.reason = None + + #: A CookieJar of Cookies the server sent back. + self.cookies = cookiejar_from_dict({}) + + #: The amount of time elapsed between sending the request + #: and the arrival of the response (as a timedelta) + self.elapsed = datetime.timedelta(0) + + def __repr__(self): + return '<Response [%s]>' % (self.status_code) + + def __bool__(self): + """Returns true if :attr:`status_code` is 'OK'.""" + return self.ok + + def __nonzero__(self): + """Returns true if :attr:`status_code` is 'OK'.""" + return self.ok + + def __iter__(self): + """Allows you to use a response as an iterator.""" + return self.iter_content(128) + + @property + def ok(self): + try: + self.raise_for_status() + except RequestException: + return False + return True + + @property + def apparent_encoding(self): + """The apparent encoding, provided by the lovely Charade library + (Thanks, Ian!).""" + return chardet.detect(self.content)['encoding'] + + def iter_content(self, chunk_size=1, decode_unicode=False): + """Iterates over the response data. When stream=True is set on the + request, this avoids reading the content at once into memory for + large responses. The chunk size is the number of bytes it should + read into memory. This is not necessarily the length of each item + returned as decoding can take place. + """ + if self._content_consumed: + # simulate reading small chunks of the content + return iter_slices(self._content, chunk_size) + + def generate(): + while 1: + chunk = self.raw.read(chunk_size, decode_content=True) + if not chunk: + break + yield chunk + self._content_consumed = True + + gen = generate() + + if decode_unicode: + gen = stream_decode_response_unicode(gen, self) + + return gen + + def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None): + """Iterates over the response data, one line at a time. When + stream=True is set on the request, this avoids reading the + content at once into memory for large responses. + """ + + pending = None + + for chunk in self.iter_content(chunk_size=chunk_size, + decode_unicode=decode_unicode): + + if pending is not None: + chunk = pending + chunk + lines = chunk.splitlines() + + if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: + pending = lines.pop() + else: + pending = None + + for line in lines: + yield line + + if pending is not None: + yield pending + + @property + def content(self): + """Content of the response, in bytes.""" + + if self._content is False: + # Read the contents. + try: + if self._content_consumed: + raise RuntimeError( + 'The content for this response was already consumed') + + if self.status_code == 0: + self._content = None + else: + self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes() + + except AttributeError: + self._content = None + + self._content_consumed = True + # don't need to release the connection; that's been handled by urllib3 + # since we exhausted the data. + return self._content + + @property + def text(self): + """Content of the response, in unicode. + + if Response.encoding is None and chardet module is available, encoding + will be guessed. + """ + + # Try charset from content-type + content = None + encoding = self.encoding + + if not self.content: + return str('') + + # Fallback to auto-detected encoding. + if self.encoding is None: + encoding = self.apparent_encoding + + # Decode unicode from given encoding. + try: + content = str(self.content, encoding, errors='replace') + except (LookupError, TypeError): + # A LookupError is raised if the encoding was not found which could + # indicate a misspelling or similar mistake. + # + # A TypeError can be raised if encoding is None + # + # So we try blindly encoding. + content = str(self.content, errors='replace') + + return content + + def json(self, **kwargs): + """Returns the json-encoded content of a response, if any. + + :param \*\*kwargs: Optional arguments that ``json.loads`` takes. + """ + + if not self.encoding and len(self.content) > 3: + # No encoding set. JSON RFC 4627 section 3 states we should expect + # UTF-8, -16 or -32. Detect which one to use; If the detection or + # decoding fails, fall back to `self.text` (using chardet to make + # a best guess). + encoding = guess_json_utf(self.content) + if encoding is not None: + return json.loads(self.content.decode(encoding), **kwargs) + return json.loads(self.text or self.content, **kwargs) + + @property + def links(self): + """Returns the parsed header links of the response, if any.""" + + header = self.headers.get('link') + + # l = MultiDict() + l = {} + + if header: + links = parse_header_links(header) + + for link in links: + key = link.get('rel') or link.get('url') + l[key] = link + + return l + + def raise_for_status(self): + """Raises stored :class:`HTTPError`, if one occurred.""" + + http_error_msg = '' + + if 400 <= self.status_code < 500: + http_error_msg = '%s Client Error: %s' % (self.status_code, self.reason) + + elif 500 <= self.status_code < 600: + http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason) + + if http_error_msg: + raise HTTPError(http_error_msg, response=self) + + def close(self): + return self.raw.release_conn() diff --git a/awx/lib/site-packages/requests/packages/__init__.py b/awx/lib/site-packages/requests/packages/__init__.py new file mode 100644 index 0000000000..d62c4b7111 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/__init__.py @@ -0,0 +1,3 @@ +from __future__ import absolute_import + +from . import urllib3 diff --git a/awx/lib/site-packages/requests/packages/charade/__init__.py b/awx/lib/site-packages/requests/packages/charade/__init__.py new file mode 100644 index 0000000000..1aadf3e74d --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/__init__.py @@ -0,0 +1,32 @@ +######################## BEGIN LICENSE BLOCK ######################## +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +__version__ = "1.0.3" +from sys import version_info + + +def detect(aBuf): + if ((version_info < (3, 0) and isinstance(aBuf, unicode)) or + (version_info >= (3, 0) and not isinstance(aBuf, bytes))): + raise ValueError('Expected a bytes object, not a unicode object') + + from . import universaldetector + u = universaldetector.UniversalDetector() + u.reset() + u.feed(aBuf) + u.close() + return u.result diff --git a/awx/lib/site-packages/requests/packages/charade/big5freq.py b/awx/lib/site-packages/requests/packages/charade/big5freq.py new file mode 100644 index 0000000000..65bffc04b0 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/big5freq.py @@ -0,0 +1,925 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Big5 frequency table +# by Taiwan's Mandarin Promotion Council +# <http://www.edu.tw:81/mandr/> +# +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +#Char to FreqOrder table +BIG5_TABLE_SIZE = 5376 + +Big5CharToFreqOrder = ( + 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 +3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 +1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 + 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 +3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 +4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 +5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 + 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 + 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 + 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 +2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 +1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 +3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 + 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 +3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 +2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 + 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 +3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 +1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 +5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 + 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 +5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 +1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 + 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 + 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 +3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 +3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 + 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 +2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 +2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 + 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 + 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 +3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 +1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 +1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 +1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 +2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 + 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 +4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 +1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 +5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 +2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 + 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 + 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 + 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 + 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 +5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 + 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 +1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 + 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 + 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 +5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 +1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 + 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 +3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 +4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 +3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 + 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 + 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 +1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 +4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 +3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 +3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 +2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 +5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 +3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 +5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 +1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 +2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 +1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 + 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 +1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 +4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 +3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 + 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 + 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 + 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 +2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 +5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 +1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 +2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 +1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 +1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 +5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 +5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 +5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 +3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 +4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 +4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 +2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 +5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 +3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 + 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 +5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 +5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 +1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 +2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 +3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 +4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 +5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 +3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 +4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 +1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 +1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 +4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 +1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 + 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 +1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 +1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 +3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 + 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 +5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 +2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 +1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 +1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 +5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 + 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 +4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 + 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 +2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 + 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 +1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 +1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 + 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 +4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 +4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 +1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 +3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 +5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 +5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 +1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 +2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 +1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 +3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 +2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 +3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 +2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 +4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 +4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 +3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 + 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 +3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 + 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 +3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 +4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 +3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 +1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 +5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 + 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 +5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 +1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 + 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 +4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 +4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 + 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 +2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 +2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 +3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 +1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 +4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 +2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 +1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 +1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 +2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 +3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 +1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 +5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 +1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 +4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 +1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 + 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 +1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 +4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 +4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 +2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 +1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 +4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 + 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 +5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 +2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 +3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 +4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 + 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 +5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 +5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 +1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 +4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 +4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 +2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 +3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 +3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 +2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 +1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 +4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 +3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 +3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 +2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 +4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 +5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 +3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 +2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 +3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 +1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 +2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 +3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 +4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 +2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 +2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 +5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 +1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 +2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 +1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 +3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 +4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 +2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 +3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 +3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 +2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 +4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 +2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 +3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 +4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 +5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 +3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 + 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 +1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 +4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 +1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 +4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 +5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 + 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 +5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 +5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 +2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 +3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 +2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 +2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 + 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 +1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 +4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 +3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 +3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 + 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 +2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 + 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 +2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 +4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 +1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 +4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 +1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 +3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 + 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 +3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 +5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 +5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 +3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 +3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 +1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 +2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 +5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 +1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 +1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 +3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 + 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 +1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 +4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 +5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 +2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 +3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 + 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 +1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 +2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 +2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 +5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 +5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 +5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 +2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 +2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 +1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 +4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 +3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 +3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 +4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 +4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 +2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 +2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 +5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 +4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 +5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 +4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 + 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 + 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 +1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 +3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 +4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 +1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 +5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 +2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 +2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 +3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 +5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 +1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 +3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 +5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 +1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 +5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 +2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 +3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 +2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 +3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 +3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 +3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 +4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 + 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 +2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 +4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 +3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 +5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 +1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 +5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 + 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 +1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 + 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 +4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 +1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 +4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 +1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 + 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 +3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 +4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 +5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 + 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 +3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 + 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 +2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 #last 512 +#Everything below is of no interest for detection purpose +2522,1613,4812,5799,3345,3945,2523,5800,4162,5801,1637,4163,2471,4813,3946,5802, # 5392 +2500,3034,3800,5803,5804,2195,4814,5805,2163,5806,5807,5808,5809,5810,5811,5812, # 5408 +5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828, # 5424 +5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844, # 5440 +5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856,5857,5858,5859,5860, # 5456 +5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872,5873,5874,5875,5876, # 5472 +5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888,5889,5890,5891,5892, # 5488 +5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,5908, # 5504 +5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924, # 5520 +5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936,5937,5938,5939,5940, # 5536 +5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,5953,5954,5955,5956, # 5552 +5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972, # 5568 +5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984,5985,5986,5987,5988, # 5584 +5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004, # 5600 +6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020, # 5616 +6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032,6033,6034,6035,6036, # 5632 +6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052, # 5648 +6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068, # 5664 +6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084, # 5680 +6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100, # 5696 +6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116, # 5712 +6117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,6132, # 5728 +6133,6134,6135,6136,6137,6138,6139,6140,6141,6142,6143,6144,6145,6146,6147,6148, # 5744 +6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164, # 5760 +6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180, # 5776 +6181,6182,6183,6184,6185,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196, # 5792 +6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212, # 5808 +6213,6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,3670,6224,6225,6226,6227, # 5824 +6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243, # 5840 +6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,6254,6255,6256,6257,6258,6259, # 5856 +6260,6261,6262,6263,6264,6265,6266,6267,6268,6269,6270,6271,6272,6273,6274,6275, # 5872 +6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,4815,6286,6287,6288,6289,6290, # 5888 +6291,6292,4816,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,6303,6304,6305, # 5904 +6306,6307,6308,6309,6310,6311,4817,4818,6312,6313,6314,6315,6316,6317,6318,4819, # 5920 +6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334, # 5936 +6335,6336,6337,4820,6338,6339,6340,6341,6342,6343,6344,6345,6346,6347,6348,6349, # 5952 +6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365, # 5968 +6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381, # 5984 +6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397, # 6000 +6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,3441,6411,6412, # 6016 +6413,6414,6415,6416,6417,6418,6419,6420,6421,6422,6423,6424,6425,4440,6426,6427, # 6032 +6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,6439,6440,6441,6442,6443, # 6048 +6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,4821,6455,6456,6457,6458, # 6064 +6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474, # 6080 +6475,6476,6477,3947,3948,6478,6479,6480,6481,3272,4441,6482,6483,6484,6485,4442, # 6096 +6486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,4822,6497,6498,6499,6500, # 6112 +6501,6502,6503,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516, # 6128 +6517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532, # 6144 +6533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548, # 6160 +6549,6550,6551,6552,6553,6554,6555,6556,2784,6557,4823,6558,6559,6560,6561,6562, # 6176 +6563,6564,6565,6566,6567,6568,6569,3949,6570,6571,6572,4824,6573,6574,6575,6576, # 6192 +6577,6578,6579,6580,6581,6582,6583,4825,6584,6585,6586,3950,2785,6587,6588,6589, # 6208 +6590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605, # 6224 +6606,6607,6608,6609,6610,6611,6612,4826,6613,6614,6615,4827,6616,6617,6618,6619, # 6240 +6620,6621,6622,6623,6624,6625,4164,6626,6627,6628,6629,6630,6631,6632,6633,6634, # 6256 +3547,6635,4828,6636,6637,6638,6639,6640,6641,6642,3951,2984,6643,6644,6645,6646, # 6272 +6647,6648,6649,4165,6650,4829,6651,6652,4830,6653,6654,6655,6656,6657,6658,6659, # 6288 +6660,6661,6662,4831,6663,6664,6665,6666,6667,6668,6669,6670,6671,4166,6672,4832, # 6304 +3952,6673,6674,6675,6676,4833,6677,6678,6679,4167,6680,6681,6682,3198,6683,6684, # 6320 +6685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,4834,6698,6699, # 6336 +6700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715, # 6352 +6716,6717,6718,6719,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6731, # 6368 +6732,6733,6734,4443,6735,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,4444, # 6384 +6746,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,6758,6759,6760,6761, # 6400 +6762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777, # 6416 +6778,6779,6780,6781,4168,6782,6783,3442,6784,6785,6786,6787,6788,6789,6790,6791, # 6432 +4169,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806, # 6448 +6807,6808,6809,6810,6811,4835,6812,6813,6814,4445,6815,6816,4446,6817,6818,6819, # 6464 +6820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835, # 6480 +3548,6836,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,4836,6847,6848,6849, # 6496 +6850,6851,6852,6853,6854,3953,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864, # 6512 +6865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,3199,6878,6879, # 6528 +6880,6881,6882,4447,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894, # 6544 +6895,6896,6897,6898,6899,6900,6901,6902,6903,6904,4170,6905,6906,6907,6908,6909, # 6560 +6910,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925, # 6576 +6926,6927,4837,6928,6929,6930,6931,6932,6933,6934,6935,6936,3346,6937,6938,4838, # 6592 +6939,6940,6941,4448,6942,6943,6944,6945,6946,4449,6947,6948,6949,6950,6951,6952, # 6608 +6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968, # 6624 +6969,6970,6971,6972,6973,6974,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984, # 6640 +6985,6986,6987,6988,6989,6990,6991,6992,6993,6994,3671,6995,6996,6997,6998,4839, # 6656 +6999,7000,7001,7002,3549,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013, # 6672 +7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,7029, # 6688 +7030,4840,7031,7032,7033,7034,7035,7036,7037,7038,4841,7039,7040,7041,7042,7043, # 6704 +7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059, # 6720 +7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,2985,7071,7072,7073,7074, # 6736 +7075,7076,7077,7078,7079,7080,4842,7081,7082,7083,7084,7085,7086,7087,7088,7089, # 6752 +7090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105, # 6768 +7106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,4450,7119,7120, # 6784 +7121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136, # 6800 +7137,7138,7139,7140,7141,7142,7143,4843,7144,7145,7146,7147,7148,7149,7150,7151, # 6816 +7152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167, # 6832 +7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183, # 6848 +7184,7185,7186,7187,7188,4171,4172,7189,7190,7191,7192,7193,7194,7195,7196,7197, # 6864 +7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213, # 6880 +7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229, # 6896 +7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245, # 6912 +7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261, # 6928 +7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277, # 6944 +7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293, # 6960 +7294,7295,7296,4844,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308, # 6976 +7309,7310,7311,7312,7313,7314,7315,7316,4451,7317,7318,7319,7320,7321,7322,7323, # 6992 +7324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339, # 7008 +7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,4173,7354, # 7024 +7355,4845,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369, # 7040 +7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385, # 7056 +7386,7387,7388,4846,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400, # 7072 +7401,7402,7403,7404,7405,3672,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415, # 7088 +7416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431, # 7104 +7432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447, # 7120 +7448,7449,7450,7451,7452,7453,4452,7454,3200,7455,7456,7457,7458,7459,7460,7461, # 7136 +7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,4847,7475,7476, # 7152 +7477,3133,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491, # 7168 +7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,3347,7503,7504,7505,7506, # 7184 +7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,4848, # 7200 +7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537, # 7216 +7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,3801,4849,7550,7551, # 7232 +7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, # 7248 +7568,7569,3035,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582, # 7264 +7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598, # 7280 +7599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614, # 7296 +7615,7616,4850,7617,7618,3802,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628, # 7312 +7629,7630,7631,7632,4851,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643, # 7328 +7644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659, # 7344 +7660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,4453,7671,7672,7673,7674, # 7360 +7675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690, # 7376 +7691,7692,7693,7694,7695,7696,7697,3443,7698,7699,7700,7701,7702,4454,7703,7704, # 7392 +7705,7706,7707,7708,7709,7710,7711,7712,7713,2472,7714,7715,7716,7717,7718,7719, # 7408 +7720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,3954,7732,7733,7734, # 7424 +7735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750, # 7440 +3134,7751,7752,4852,7753,7754,7755,4853,7756,7757,7758,7759,7760,4174,7761,7762, # 7456 +7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778, # 7472 +7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794, # 7488 +7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,4854,7806,7807,7808,7809, # 7504 +7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825, # 7520 +4855,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7536 +7841,7842,7843,7844,7845,7846,7847,3955,7848,7849,7850,7851,7852,7853,7854,7855, # 7552 +7856,7857,7858,7859,7860,3444,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870, # 7568 +7871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886, # 7584 +7887,7888,7889,7890,7891,4175,7892,7893,7894,7895,7896,4856,4857,7897,7898,7899, # 7600 +7900,2598,7901,7902,7903,7904,7905,7906,7907,7908,4455,7909,7910,7911,7912,7913, # 7616 +7914,3201,7915,7916,7917,7918,7919,7920,7921,4858,7922,7923,7924,7925,7926,7927, # 7632 +7928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943, # 7648 +7944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959, # 7664 +7960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975, # 7680 +7976,7977,7978,7979,7980,7981,4859,7982,7983,7984,7985,7986,7987,7988,7989,7990, # 7696 +7991,7992,7993,7994,7995,7996,4860,7997,7998,7999,8000,8001,8002,8003,8004,8005, # 7712 +8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,4176,8017,8018,8019,8020, # 7728 +8021,8022,8023,4861,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035, # 7744 +8036,4862,4456,8037,8038,8039,8040,4863,8041,8042,8043,8044,8045,8046,8047,8048, # 7760 +8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064, # 7776 +8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080, # 7792 +8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096, # 7808 +8097,8098,8099,4864,4177,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110, # 7824 +8111,8112,8113,8114,8115,8116,8117,8118,8119,8120,4178,8121,8122,8123,8124,8125, # 7840 +8126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141, # 7856 +8142,8143,8144,8145,4865,4866,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155, # 7872 +8156,8157,8158,8159,8160,8161,8162,8163,8164,8165,4179,8166,8167,8168,8169,8170, # 7888 +8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,4457,8182,8183,8184,8185, # 7904 +8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201, # 7920 +8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217, # 7936 +8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233, # 7952 +8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249, # 7968 +8250,8251,8252,8253,8254,8255,8256,3445,8257,8258,8259,8260,8261,8262,4458,8263, # 7984 +8264,8265,8266,8267,8268,8269,8270,8271,8272,4459,8273,8274,8275,8276,3550,8277, # 8000 +8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,4460,8290,8291,8292, # 8016 +8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,4867, # 8032 +8308,8309,8310,8311,8312,3551,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322, # 8048 +8323,8324,8325,8326,4868,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337, # 8064 +8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353, # 8080 +8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,4869,4461,8364,8365,8366,8367, # 8096 +8368,8369,8370,4870,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382, # 8112 +8383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398, # 8128 +8399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,4871,8411,8412,8413, # 8144 +8414,8415,8416,8417,8418,8419,8420,8421,8422,4462,8423,8424,8425,8426,8427,8428, # 8160 +8429,8430,8431,8432,8433,2986,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443, # 8176 +8444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459, # 8192 +8460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475, # 8208 +8476,8477,8478,4180,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490, # 8224 +8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506, # 8240 +8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522, # 8256 +8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538, # 8272 +8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554, # 8288 +8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,4872,8565,8566,8567,8568,8569, # 8304 +8570,8571,8572,8573,4873,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584, # 8320 +8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600, # 8336 +8601,8602,8603,8604,8605,3803,8606,8607,8608,8609,8610,8611,8612,8613,4874,3804, # 8352 +8614,8615,8616,8617,8618,8619,8620,8621,3956,8622,8623,8624,8625,8626,8627,8628, # 8368 +8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,2865,8639,8640,8641,8642,8643, # 8384 +8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,4463,8657,8658, # 8400 +8659,4875,4876,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672, # 8416 +8673,8674,8675,8676,8677,8678,8679,8680,8681,4464,8682,8683,8684,8685,8686,8687, # 8432 +8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, # 8448 +8704,8705,8706,8707,8708,8709,2261,8710,8711,8712,8713,8714,8715,8716,8717,8718, # 8464 +8719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,4181, # 8480 +8734,8735,8736,8737,8738,8739,8740,8741,8742,8743,8744,8745,8746,8747,8748,8749, # 8496 +8750,8751,8752,8753,8754,8755,8756,8757,8758,8759,8760,8761,8762,8763,4877,8764, # 8512 +8765,8766,8767,8768,8769,8770,8771,8772,8773,8774,8775,8776,8777,8778,8779,8780, # 8528 +8781,8782,8783,8784,8785,8786,8787,8788,4878,8789,4879,8790,8791,8792,4880,8793, # 8544 +8794,8795,8796,8797,8798,8799,8800,8801,4881,8802,8803,8804,8805,8806,8807,8808, # 8560 +8809,8810,8811,8812,8813,8814,8815,3957,8816,8817,8818,8819,8820,8821,8822,8823, # 8576 +8824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839, # 8592 +8840,8841,8842,8843,8844,8845,8846,8847,4882,8848,8849,8850,8851,8852,8853,8854, # 8608 +8855,8856,8857,8858,8859,8860,8861,8862,8863,8864,8865,8866,8867,8868,8869,8870, # 8624 +8871,8872,8873,8874,8875,8876,8877,8878,8879,8880,8881,8882,8883,8884,3202,8885, # 8640 +8886,8887,8888,8889,8890,8891,8892,8893,8894,8895,8896,8897,8898,8899,8900,8901, # 8656 +8902,8903,8904,8905,8906,8907,8908,8909,8910,8911,8912,8913,8914,8915,8916,8917, # 8672 +8918,8919,8920,8921,8922,8923,8924,4465,8925,8926,8927,8928,8929,8930,8931,8932, # 8688 +4883,8933,8934,8935,8936,8937,8938,8939,8940,8941,8942,8943,2214,8944,8945,8946, # 8704 +8947,8948,8949,8950,8951,8952,8953,8954,8955,8956,8957,8958,8959,8960,8961,8962, # 8720 +8963,8964,8965,4884,8966,8967,8968,8969,8970,8971,8972,8973,8974,8975,8976,8977, # 8736 +8978,8979,8980,8981,8982,8983,8984,8985,8986,8987,8988,8989,8990,8991,8992,4885, # 8752 +8993,8994,8995,8996,8997,8998,8999,9000,9001,9002,9003,9004,9005,9006,9007,9008, # 8768 +9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,4182,9022,9023, # 8784 +9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039, # 8800 +9040,9041,9042,9043,9044,9045,9046,9047,9048,9049,9050,9051,9052,9053,9054,9055, # 8816 +9056,9057,9058,9059,9060,9061,9062,9063,4886,9064,9065,9066,9067,9068,9069,4887, # 8832 +9070,9071,9072,9073,9074,9075,9076,9077,9078,9079,9080,9081,9082,9083,9084,9085, # 8848 +9086,9087,9088,9089,9090,9091,9092,9093,9094,9095,9096,9097,9098,9099,9100,9101, # 8864 +9102,9103,9104,9105,9106,9107,9108,9109,9110,9111,9112,9113,9114,9115,9116,9117, # 8880 +9118,9119,9120,9121,9122,9123,9124,9125,9126,9127,9128,9129,9130,9131,9132,9133, # 8896 +9134,9135,9136,9137,9138,9139,9140,9141,3958,9142,9143,9144,9145,9146,9147,9148, # 8912 +9149,9150,9151,4888,9152,9153,9154,9155,9156,9157,9158,9159,9160,9161,9162,9163, # 8928 +9164,9165,9166,9167,9168,9169,9170,9171,9172,9173,9174,9175,4889,9176,9177,9178, # 8944 +9179,9180,9181,9182,9183,9184,9185,9186,9187,9188,9189,9190,9191,9192,9193,9194, # 8960 +9195,9196,9197,9198,9199,9200,9201,9202,9203,4890,9204,9205,9206,9207,9208,9209, # 8976 +9210,9211,9212,9213,9214,9215,9216,9217,9218,9219,9220,9221,9222,4466,9223,9224, # 8992 +9225,9226,9227,9228,9229,9230,9231,9232,9233,9234,9235,9236,9237,9238,9239,9240, # 9008 +9241,9242,9243,9244,9245,4891,9246,9247,9248,9249,9250,9251,9252,9253,9254,9255, # 9024 +9256,9257,4892,9258,9259,9260,9261,4893,4894,9262,9263,9264,9265,9266,9267,9268, # 9040 +9269,9270,9271,9272,9273,4467,9274,9275,9276,9277,9278,9279,9280,9281,9282,9283, # 9056 +9284,9285,3673,9286,9287,9288,9289,9290,9291,9292,9293,9294,9295,9296,9297,9298, # 9072 +9299,9300,9301,9302,9303,9304,9305,9306,9307,9308,9309,9310,9311,9312,9313,9314, # 9088 +9315,9316,9317,9318,9319,9320,9321,9322,4895,9323,9324,9325,9326,9327,9328,9329, # 9104 +9330,9331,9332,9333,9334,9335,9336,9337,9338,9339,9340,9341,9342,9343,9344,9345, # 9120 +9346,9347,4468,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360, # 9136 +9361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,4896,9374,4469, # 9152 +9375,9376,9377,9378,9379,4897,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389, # 9168 +9390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405, # 9184 +9406,4470,9407,2751,9408,9409,3674,3552,9410,9411,9412,9413,9414,9415,9416,9417, # 9200 +9418,9419,9420,9421,4898,9422,9423,9424,9425,9426,9427,9428,9429,3959,9430,9431, # 9216 +9432,9433,9434,9435,9436,4471,9437,9438,9439,9440,9441,9442,9443,9444,9445,9446, # 9232 +9447,9448,9449,9450,3348,9451,9452,9453,9454,9455,9456,9457,9458,9459,9460,9461, # 9248 +9462,9463,9464,9465,9466,9467,9468,9469,9470,9471,9472,4899,9473,9474,9475,9476, # 9264 +9477,4900,9478,9479,9480,9481,9482,9483,9484,9485,9486,9487,9488,3349,9489,9490, # 9280 +9491,9492,9493,9494,9495,9496,9497,9498,9499,9500,9501,9502,9503,9504,9505,9506, # 9296 +9507,9508,9509,9510,9511,9512,9513,9514,9515,9516,9517,9518,9519,9520,4901,9521, # 9312 +9522,9523,9524,9525,9526,4902,9527,9528,9529,9530,9531,9532,9533,9534,9535,9536, # 9328 +9537,9538,9539,9540,9541,9542,9543,9544,9545,9546,9547,9548,9549,9550,9551,9552, # 9344 +9553,9554,9555,9556,9557,9558,9559,9560,9561,9562,9563,9564,9565,9566,9567,9568, # 9360 +9569,9570,9571,9572,9573,9574,9575,9576,9577,9578,9579,9580,9581,9582,9583,9584, # 9376 +3805,9585,9586,9587,9588,9589,9590,9591,9592,9593,9594,9595,9596,9597,9598,9599, # 9392 +9600,9601,9602,4903,9603,9604,9605,9606,9607,4904,9608,9609,9610,9611,9612,9613, # 9408 +9614,4905,9615,9616,9617,9618,9619,9620,9621,9622,9623,9624,9625,9626,9627,9628, # 9424 +9629,9630,9631,9632,4906,9633,9634,9635,9636,9637,9638,9639,9640,9641,9642,9643, # 9440 +4907,9644,9645,9646,9647,9648,9649,9650,9651,9652,9653,9654,9655,9656,9657,9658, # 9456 +9659,9660,9661,9662,9663,9664,9665,9666,9667,9668,9669,9670,9671,9672,4183,9673, # 9472 +9674,9675,9676,9677,4908,9678,9679,9680,9681,4909,9682,9683,9684,9685,9686,9687, # 9488 +9688,9689,9690,4910,9691,9692,9693,3675,9694,9695,9696,2945,9697,9698,9699,9700, # 9504 +9701,9702,9703,9704,9705,4911,9706,9707,9708,9709,9710,9711,9712,9713,9714,9715, # 9520 +9716,9717,9718,9719,9720,9721,9722,9723,9724,9725,9726,9727,9728,9729,9730,9731, # 9536 +9732,9733,9734,9735,4912,9736,9737,9738,9739,9740,4913,9741,9742,9743,9744,9745, # 9552 +9746,9747,9748,9749,9750,9751,9752,9753,9754,9755,9756,9757,9758,4914,9759,9760, # 9568 +9761,9762,9763,9764,9765,9766,9767,9768,9769,9770,9771,9772,9773,9774,9775,9776, # 9584 +9777,9778,9779,9780,9781,9782,4915,9783,9784,9785,9786,9787,9788,9789,9790,9791, # 9600 +9792,9793,4916,9794,9795,9796,9797,9798,9799,9800,9801,9802,9803,9804,9805,9806, # 9616 +9807,9808,9809,9810,9811,9812,9813,9814,9815,9816,9817,9818,9819,9820,9821,9822, # 9632 +9823,9824,9825,9826,9827,9828,9829,9830,9831,9832,9833,9834,9835,9836,9837,9838, # 9648 +9839,9840,9841,9842,9843,9844,9845,9846,9847,9848,9849,9850,9851,9852,9853,9854, # 9664 +9855,9856,9857,9858,9859,9860,9861,9862,9863,9864,9865,9866,9867,9868,4917,9869, # 9680 +9870,9871,9872,9873,9874,9875,9876,9877,9878,9879,9880,9881,9882,9883,9884,9885, # 9696 +9886,9887,9888,9889,9890,9891,9892,4472,9893,9894,9895,9896,9897,3806,9898,9899, # 9712 +9900,9901,9902,9903,9904,9905,9906,9907,9908,9909,9910,9911,9912,9913,9914,4918, # 9728 +9915,9916,9917,4919,9918,9919,9920,9921,4184,9922,9923,9924,9925,9926,9927,9928, # 9744 +9929,9930,9931,9932,9933,9934,9935,9936,9937,9938,9939,9940,9941,9942,9943,9944, # 9760 +9945,9946,4920,9947,9948,9949,9950,9951,9952,9953,9954,9955,4185,9956,9957,9958, # 9776 +9959,9960,9961,9962,9963,9964,9965,4921,9966,9967,9968,4473,9969,9970,9971,9972, # 9792 +9973,9974,9975,9976,9977,4474,9978,9979,9980,9981,9982,9983,9984,9985,9986,9987, # 9808 +9988,9989,9990,9991,9992,9993,9994,9995,9996,9997,9998,9999,10000,10001,10002,10003, # 9824 +10004,10005,10006,10007,10008,10009,10010,10011,10012,10013,10014,10015,10016,10017,10018,10019, # 9840 +10020,10021,4922,10022,4923,10023,10024,10025,10026,10027,10028,10029,10030,10031,10032,10033, # 9856 +10034,10035,10036,10037,10038,10039,10040,10041,10042,10043,10044,10045,10046,10047,10048,4924, # 9872 +10049,10050,10051,10052,10053,10054,10055,10056,10057,10058,10059,10060,10061,10062,10063,10064, # 9888 +10065,10066,10067,10068,10069,10070,10071,10072,10073,10074,10075,10076,10077,10078,10079,10080, # 9904 +10081,10082,10083,10084,10085,10086,10087,4475,10088,10089,10090,10091,10092,10093,10094,10095, # 9920 +10096,10097,4476,10098,10099,10100,10101,10102,10103,10104,10105,10106,10107,10108,10109,10110, # 9936 +10111,2174,10112,10113,10114,10115,10116,10117,10118,10119,10120,10121,10122,10123,10124,10125, # 9952 +10126,10127,10128,10129,10130,10131,10132,10133,10134,10135,10136,10137,10138,10139,10140,3807, # 9968 +4186,4925,10141,10142,10143,10144,10145,10146,10147,4477,4187,10148,10149,10150,10151,10152, # 9984 +10153,4188,10154,10155,10156,10157,10158,10159,10160,10161,4926,10162,10163,10164,10165,10166, #10000 +10167,10168,10169,10170,10171,10172,10173,10174,10175,10176,10177,10178,10179,10180,10181,10182, #10016 +10183,10184,10185,10186,10187,10188,10189,10190,10191,10192,3203,10193,10194,10195,10196,10197, #10032 +10198,10199,10200,4478,10201,10202,10203,10204,4479,10205,10206,10207,10208,10209,10210,10211, #10048 +10212,10213,10214,10215,10216,10217,10218,10219,10220,10221,10222,10223,10224,10225,10226,10227, #10064 +10228,10229,10230,10231,10232,10233,10234,4927,10235,10236,10237,10238,10239,10240,10241,10242, #10080 +10243,10244,10245,10246,10247,10248,10249,10250,10251,10252,10253,10254,10255,10256,10257,10258, #10096 +10259,10260,10261,10262,10263,10264,10265,10266,10267,10268,10269,10270,10271,10272,10273,4480, #10112 +4928,4929,10274,10275,10276,10277,10278,10279,10280,10281,10282,10283,10284,10285,10286,10287, #10128 +10288,10289,10290,10291,10292,10293,10294,10295,10296,10297,10298,10299,10300,10301,10302,10303, #10144 +10304,10305,10306,10307,10308,10309,10310,10311,10312,10313,10314,10315,10316,10317,10318,10319, #10160 +10320,10321,10322,10323,10324,10325,10326,10327,10328,10329,10330,10331,10332,10333,10334,4930, #10176 +10335,10336,10337,10338,10339,10340,10341,10342,4931,10343,10344,10345,10346,10347,10348,10349, #10192 +10350,10351,10352,10353,10354,10355,3088,10356,2786,10357,10358,10359,10360,4189,10361,10362, #10208 +10363,10364,10365,10366,10367,10368,10369,10370,10371,10372,10373,10374,10375,4932,10376,10377, #10224 +10378,10379,10380,10381,10382,10383,10384,10385,10386,10387,10388,10389,10390,10391,10392,4933, #10240 +10393,10394,10395,4934,10396,10397,10398,10399,10400,10401,10402,10403,10404,10405,10406,10407, #10256 +10408,10409,10410,10411,10412,3446,10413,10414,10415,10416,10417,10418,10419,10420,10421,10422, #10272 +10423,4935,10424,10425,10426,10427,10428,10429,10430,4936,10431,10432,10433,10434,10435,10436, #10288 +10437,10438,10439,10440,10441,10442,10443,4937,10444,10445,10446,10447,4481,10448,10449,10450, #10304 +10451,10452,10453,10454,10455,10456,10457,10458,10459,10460,10461,10462,10463,10464,10465,10466, #10320 +10467,10468,10469,10470,10471,10472,10473,10474,10475,10476,10477,10478,10479,10480,10481,10482, #10336 +10483,10484,10485,10486,10487,10488,10489,10490,10491,10492,10493,10494,10495,10496,10497,10498, #10352 +10499,10500,10501,10502,10503,10504,10505,4938,10506,10507,10508,10509,10510,2552,10511,10512, #10368 +10513,10514,10515,10516,3447,10517,10518,10519,10520,10521,10522,10523,10524,10525,10526,10527, #10384 +10528,10529,10530,10531,10532,10533,10534,10535,10536,10537,10538,10539,10540,10541,10542,10543, #10400 +4482,10544,4939,10545,10546,10547,10548,10549,10550,10551,10552,10553,10554,10555,10556,10557, #10416 +10558,10559,10560,10561,10562,10563,10564,10565,10566,10567,3676,4483,10568,10569,10570,10571, #10432 +10572,3448,10573,10574,10575,10576,10577,10578,10579,10580,10581,10582,10583,10584,10585,10586, #10448 +10587,10588,10589,10590,10591,10592,10593,10594,10595,10596,10597,10598,10599,10600,10601,10602, #10464 +10603,10604,10605,10606,10607,10608,10609,10610,10611,10612,10613,10614,10615,10616,10617,10618, #10480 +10619,10620,10621,10622,10623,10624,10625,10626,10627,4484,10628,10629,10630,10631,10632,4940, #10496 +10633,10634,10635,10636,10637,10638,10639,10640,10641,10642,10643,10644,10645,10646,10647,10648, #10512 +10649,10650,10651,10652,10653,10654,10655,10656,4941,10657,10658,10659,2599,10660,10661,10662, #10528 +10663,10664,10665,10666,3089,10667,10668,10669,10670,10671,10672,10673,10674,10675,10676,10677, #10544 +10678,10679,10680,4942,10681,10682,10683,10684,10685,10686,10687,10688,10689,10690,10691,10692, #10560 +10693,10694,10695,10696,10697,4485,10698,10699,10700,10701,10702,10703,10704,4943,10705,3677, #10576 +10706,10707,10708,10709,10710,10711,10712,4944,10713,10714,10715,10716,10717,10718,10719,10720, #10592 +10721,10722,10723,10724,10725,10726,10727,10728,4945,10729,10730,10731,10732,10733,10734,10735, #10608 +10736,10737,10738,10739,10740,10741,10742,10743,10744,10745,10746,10747,10748,10749,10750,10751, #10624 +10752,10753,10754,10755,10756,10757,10758,10759,10760,10761,4946,10762,10763,10764,10765,10766, #10640 +10767,4947,4948,10768,10769,10770,10771,10772,10773,10774,10775,10776,10777,10778,10779,10780, #10656 +10781,10782,10783,10784,10785,10786,10787,10788,10789,10790,10791,10792,10793,10794,10795,10796, #10672 +10797,10798,10799,10800,10801,10802,10803,10804,10805,10806,10807,10808,10809,10810,10811,10812, #10688 +10813,10814,10815,10816,10817,10818,10819,10820,10821,10822,10823,10824,10825,10826,10827,10828, #10704 +10829,10830,10831,10832,10833,10834,10835,10836,10837,10838,10839,10840,10841,10842,10843,10844, #10720 +10845,10846,10847,10848,10849,10850,10851,10852,10853,10854,10855,10856,10857,10858,10859,10860, #10736 +10861,10862,10863,10864,10865,10866,10867,10868,10869,10870,10871,10872,10873,10874,10875,10876, #10752 +10877,10878,4486,10879,10880,10881,10882,10883,10884,10885,4949,10886,10887,10888,10889,10890, #10768 +10891,10892,10893,10894,10895,10896,10897,10898,10899,10900,10901,10902,10903,10904,10905,10906, #10784 +10907,10908,10909,10910,10911,10912,10913,10914,10915,10916,10917,10918,10919,4487,10920,10921, #10800 +10922,10923,10924,10925,10926,10927,10928,10929,10930,10931,10932,4950,10933,10934,10935,10936, #10816 +10937,10938,10939,10940,10941,10942,10943,10944,10945,10946,10947,10948,10949,4488,10950,10951, #10832 +10952,10953,10954,10955,10956,10957,10958,10959,4190,10960,10961,10962,10963,10964,10965,10966, #10848 +10967,10968,10969,10970,10971,10972,10973,10974,10975,10976,10977,10978,10979,10980,10981,10982, #10864 +10983,10984,10985,10986,10987,10988,10989,10990,10991,10992,10993,10994,10995,10996,10997,10998, #10880 +10999,11000,11001,11002,11003,11004,11005,11006,3960,11007,11008,11009,11010,11011,11012,11013, #10896 +11014,11015,11016,11017,11018,11019,11020,11021,11022,11023,11024,11025,11026,11027,11028,11029, #10912 +11030,11031,11032,4951,11033,11034,11035,11036,11037,11038,11039,11040,11041,11042,11043,11044, #10928 +11045,11046,11047,4489,11048,11049,11050,11051,4952,11052,11053,11054,11055,11056,11057,11058, #10944 +4953,11059,11060,11061,11062,11063,11064,11065,11066,11067,11068,11069,11070,11071,4954,11072, #10960 +11073,11074,11075,11076,11077,11078,11079,11080,11081,11082,11083,11084,11085,11086,11087,11088, #10976 +11089,11090,11091,11092,11093,11094,11095,11096,11097,11098,11099,11100,11101,11102,11103,11104, #10992 +11105,11106,11107,11108,11109,11110,11111,11112,11113,11114,11115,3808,11116,11117,11118,11119, #11008 +11120,11121,11122,11123,11124,11125,11126,11127,11128,11129,11130,11131,11132,11133,11134,4955, #11024 +11135,11136,11137,11138,11139,11140,11141,11142,11143,11144,11145,11146,11147,11148,11149,11150, #11040 +11151,11152,11153,11154,11155,11156,11157,11158,11159,11160,11161,4956,11162,11163,11164,11165, #11056 +11166,11167,11168,11169,11170,11171,11172,11173,11174,11175,11176,11177,11178,11179,11180,4957, #11072 +11181,11182,11183,11184,11185,11186,4958,11187,11188,11189,11190,11191,11192,11193,11194,11195, #11088 +11196,11197,11198,11199,11200,3678,11201,11202,11203,11204,11205,11206,4191,11207,11208,11209, #11104 +11210,11211,11212,11213,11214,11215,11216,11217,11218,11219,11220,11221,11222,11223,11224,11225, #11120 +11226,11227,11228,11229,11230,11231,11232,11233,11234,11235,11236,11237,11238,11239,11240,11241, #11136 +11242,11243,11244,11245,11246,11247,11248,11249,11250,11251,4959,11252,11253,11254,11255,11256, #11152 +11257,11258,11259,11260,11261,11262,11263,11264,11265,11266,11267,11268,11269,11270,11271,11272, #11168 +11273,11274,11275,11276,11277,11278,11279,11280,11281,11282,11283,11284,11285,11286,11287,11288, #11184 +11289,11290,11291,11292,11293,11294,11295,11296,11297,11298,11299,11300,11301,11302,11303,11304, #11200 +11305,11306,11307,11308,11309,11310,11311,11312,11313,11314,3679,11315,11316,11317,11318,4490, #11216 +11319,11320,11321,11322,11323,11324,11325,11326,11327,11328,11329,11330,11331,11332,11333,11334, #11232 +11335,11336,11337,11338,11339,11340,11341,11342,11343,11344,11345,11346,11347,4960,11348,11349, #11248 +11350,11351,11352,11353,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365, #11264 +11366,11367,11368,11369,11370,11371,11372,11373,11374,11375,11376,11377,3961,4961,11378,11379, #11280 +11380,11381,11382,11383,11384,11385,11386,11387,11388,11389,11390,11391,11392,11393,11394,11395, #11296 +11396,11397,4192,11398,11399,11400,11401,11402,11403,11404,11405,11406,11407,11408,11409,11410, #11312 +11411,4962,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11424,11425, #11328 +11426,11427,11428,11429,11430,11431,11432,11433,11434,11435,11436,11437,11438,11439,11440,11441, #11344 +11442,11443,11444,11445,11446,11447,11448,11449,11450,11451,11452,11453,11454,11455,11456,11457, #11360 +11458,11459,11460,11461,11462,11463,11464,11465,11466,11467,11468,11469,4963,11470,11471,4491, #11376 +11472,11473,11474,11475,4964,11476,11477,11478,11479,11480,11481,11482,11483,11484,11485,11486, #11392 +11487,11488,11489,11490,11491,11492,4965,11493,11494,11495,11496,11497,11498,11499,11500,11501, #11408 +11502,11503,11504,11505,11506,11507,11508,11509,11510,11511,11512,11513,11514,11515,11516,11517, #11424 +11518,11519,11520,11521,11522,11523,11524,11525,11526,11527,11528,11529,3962,11530,11531,11532, #11440 +11533,11534,11535,11536,11537,11538,11539,11540,11541,11542,11543,11544,11545,11546,11547,11548, #11456 +11549,11550,11551,11552,11553,11554,11555,11556,11557,11558,11559,11560,11561,11562,11563,11564, #11472 +4193,4194,11565,11566,11567,11568,11569,11570,11571,11572,11573,11574,11575,11576,11577,11578, #11488 +11579,11580,11581,11582,11583,11584,11585,11586,11587,11588,11589,11590,11591,4966,4195,11592, #11504 +11593,11594,11595,11596,11597,11598,11599,11600,11601,11602,11603,11604,3090,11605,11606,11607, #11520 +11608,11609,11610,4967,11611,11612,11613,11614,11615,11616,11617,11618,11619,11620,11621,11622, #11536 +11623,11624,11625,11626,11627,11628,11629,11630,11631,11632,11633,11634,11635,11636,11637,11638, #11552 +11639,11640,11641,11642,11643,11644,11645,11646,11647,11648,11649,11650,11651,11652,11653,11654, #11568 +11655,11656,11657,11658,11659,11660,11661,11662,11663,11664,11665,11666,11667,11668,11669,11670, #11584 +11671,11672,11673,11674,4968,11675,11676,11677,11678,11679,11680,11681,11682,11683,11684,11685, #11600 +11686,11687,11688,11689,11690,11691,11692,11693,3809,11694,11695,11696,11697,11698,11699,11700, #11616 +11701,11702,11703,11704,11705,11706,11707,11708,11709,11710,11711,11712,11713,11714,11715,11716, #11632 +11717,11718,3553,11719,11720,11721,11722,11723,11724,11725,11726,11727,11728,11729,11730,4969, #11648 +11731,11732,11733,11734,11735,11736,11737,11738,11739,11740,4492,11741,11742,11743,11744,11745, #11664 +11746,11747,11748,11749,11750,11751,11752,4970,11753,11754,11755,11756,11757,11758,11759,11760, #11680 +11761,11762,11763,11764,11765,11766,11767,11768,11769,11770,11771,11772,11773,11774,11775,11776, #11696 +11777,11778,11779,11780,11781,11782,11783,11784,11785,11786,11787,11788,11789,11790,4971,11791, #11712 +11792,11793,11794,11795,11796,11797,4972,11798,11799,11800,11801,11802,11803,11804,11805,11806, #11728 +11807,11808,11809,11810,4973,11811,11812,11813,11814,11815,11816,11817,11818,11819,11820,11821, #11744 +11822,11823,11824,11825,11826,11827,11828,11829,11830,11831,11832,11833,11834,3680,3810,11835, #11760 +11836,4974,11837,11838,11839,11840,11841,11842,11843,11844,11845,11846,11847,11848,11849,11850, #11776 +11851,11852,11853,11854,11855,11856,11857,11858,11859,11860,11861,11862,11863,11864,11865,11866, #11792 +11867,11868,11869,11870,11871,11872,11873,11874,11875,11876,11877,11878,11879,11880,11881,11882, #11808 +11883,11884,4493,11885,11886,11887,11888,11889,11890,11891,11892,11893,11894,11895,11896,11897, #11824 +11898,11899,11900,11901,11902,11903,11904,11905,11906,11907,11908,11909,11910,11911,11912,11913, #11840 +11914,11915,4975,11916,11917,11918,11919,11920,11921,11922,11923,11924,11925,11926,11927,11928, #11856 +11929,11930,11931,11932,11933,11934,11935,11936,11937,11938,11939,11940,11941,11942,11943,11944, #11872 +11945,11946,11947,11948,11949,4976,11950,11951,11952,11953,11954,11955,11956,11957,11958,11959, #11888 +11960,11961,11962,11963,11964,11965,11966,11967,11968,11969,11970,11971,11972,11973,11974,11975, #11904 +11976,11977,11978,11979,11980,11981,11982,11983,11984,11985,11986,11987,4196,11988,11989,11990, #11920 +11991,11992,4977,11993,11994,11995,11996,11997,11998,11999,12000,12001,12002,12003,12004,12005, #11936 +12006,12007,12008,12009,12010,12011,12012,12013,12014,12015,12016,12017,12018,12019,12020,12021, #11952 +12022,12023,12024,12025,12026,12027,12028,12029,12030,12031,12032,12033,12034,12035,12036,12037, #11968 +12038,12039,12040,12041,12042,12043,12044,12045,12046,12047,12048,12049,12050,12051,12052,12053, #11984 +12054,12055,12056,12057,12058,12059,12060,12061,4978,12062,12063,12064,12065,12066,12067,12068, #12000 +12069,12070,12071,12072,12073,12074,12075,12076,12077,12078,12079,12080,12081,12082,12083,12084, #12016 +12085,12086,12087,12088,12089,12090,12091,12092,12093,12094,12095,12096,12097,12098,12099,12100, #12032 +12101,12102,12103,12104,12105,12106,12107,12108,12109,12110,12111,12112,12113,12114,12115,12116, #12048 +12117,12118,12119,12120,12121,12122,12123,4979,12124,12125,12126,12127,12128,4197,12129,12130, #12064 +12131,12132,12133,12134,12135,12136,12137,12138,12139,12140,12141,12142,12143,12144,12145,12146, #12080 +12147,12148,12149,12150,12151,12152,12153,12154,4980,12155,12156,12157,12158,12159,12160,4494, #12096 +12161,12162,12163,12164,3811,12165,12166,12167,12168,12169,4495,12170,12171,4496,12172,12173, #12112 +12174,12175,12176,3812,12177,12178,12179,12180,12181,12182,12183,12184,12185,12186,12187,12188, #12128 +12189,12190,12191,12192,12193,12194,12195,12196,12197,12198,12199,12200,12201,12202,12203,12204, #12144 +12205,12206,12207,12208,12209,12210,12211,12212,12213,12214,12215,12216,12217,12218,12219,12220, #12160 +12221,4981,12222,12223,12224,12225,12226,12227,12228,12229,12230,12231,12232,12233,12234,12235, #12176 +4982,12236,12237,12238,12239,12240,12241,12242,12243,12244,12245,4983,12246,12247,12248,12249, #12192 +4984,12250,12251,12252,12253,12254,12255,12256,12257,12258,12259,12260,12261,12262,12263,12264, #12208 +4985,12265,4497,12266,12267,12268,12269,12270,12271,12272,12273,12274,12275,12276,12277,12278, #12224 +12279,12280,12281,12282,12283,12284,12285,12286,12287,4986,12288,12289,12290,12291,12292,12293, #12240 +12294,12295,12296,2473,12297,12298,12299,12300,12301,12302,12303,12304,12305,12306,12307,12308, #12256 +12309,12310,12311,12312,12313,12314,12315,12316,12317,12318,12319,3963,12320,12321,12322,12323, #12272 +12324,12325,12326,12327,12328,12329,12330,12331,12332,4987,12333,12334,12335,12336,12337,12338, #12288 +12339,12340,12341,12342,12343,12344,12345,12346,12347,12348,12349,12350,12351,12352,12353,12354, #12304 +12355,12356,12357,12358,12359,3964,12360,12361,12362,12363,12364,12365,12366,12367,12368,12369, #12320 +12370,3965,12371,12372,12373,12374,12375,12376,12377,12378,12379,12380,12381,12382,12383,12384, #12336 +12385,12386,12387,12388,12389,12390,12391,12392,12393,12394,12395,12396,12397,12398,12399,12400, #12352 +12401,12402,12403,12404,12405,12406,12407,12408,4988,12409,12410,12411,12412,12413,12414,12415, #12368 +12416,12417,12418,12419,12420,12421,12422,12423,12424,12425,12426,12427,12428,12429,12430,12431, #12384 +12432,12433,12434,12435,12436,12437,12438,3554,12439,12440,12441,12442,12443,12444,12445,12446, #12400 +12447,12448,12449,12450,12451,12452,12453,12454,12455,12456,12457,12458,12459,12460,12461,12462, #12416 +12463,12464,4989,12465,12466,12467,12468,12469,12470,12471,12472,12473,12474,12475,12476,12477, #12432 +12478,12479,12480,4990,12481,12482,12483,12484,12485,12486,12487,12488,12489,4498,12490,12491, #12448 +12492,12493,12494,12495,12496,12497,12498,12499,12500,12501,12502,12503,12504,12505,12506,12507, #12464 +12508,12509,12510,12511,12512,12513,12514,12515,12516,12517,12518,12519,12520,12521,12522,12523, #12480 +12524,12525,12526,12527,12528,12529,12530,12531,12532,12533,12534,12535,12536,12537,12538,12539, #12496 +12540,12541,12542,12543,12544,12545,12546,12547,12548,12549,12550,12551,4991,12552,12553,12554, #12512 +12555,12556,12557,12558,12559,12560,12561,12562,12563,12564,12565,12566,12567,12568,12569,12570, #12528 +12571,12572,12573,12574,12575,12576,12577,12578,3036,12579,12580,12581,12582,12583,3966,12584, #12544 +12585,12586,12587,12588,12589,12590,12591,12592,12593,12594,12595,12596,12597,12598,12599,12600, #12560 +12601,12602,12603,12604,12605,12606,12607,12608,12609,12610,12611,12612,12613,12614,12615,12616, #12576 +12617,12618,12619,12620,12621,12622,12623,12624,12625,12626,12627,12628,12629,12630,12631,12632, #12592 +12633,12634,12635,12636,12637,12638,12639,12640,12641,12642,12643,12644,12645,12646,4499,12647, #12608 +12648,12649,12650,12651,12652,12653,12654,12655,12656,12657,12658,12659,12660,12661,12662,12663, #12624 +12664,12665,12666,12667,12668,12669,12670,12671,12672,12673,12674,12675,12676,12677,12678,12679, #12640 +12680,12681,12682,12683,12684,12685,12686,12687,12688,12689,12690,12691,12692,12693,12694,12695, #12656 +12696,12697,12698,4992,12699,12700,12701,12702,12703,12704,12705,12706,12707,12708,12709,12710, #12672 +12711,12712,12713,12714,12715,12716,12717,12718,12719,12720,12721,12722,12723,12724,12725,12726, #12688 +12727,12728,12729,12730,12731,12732,12733,12734,12735,12736,12737,12738,12739,12740,12741,12742, #12704 +12743,12744,12745,12746,12747,12748,12749,12750,12751,12752,12753,12754,12755,12756,12757,12758, #12720 +12759,12760,12761,12762,12763,12764,12765,12766,12767,12768,12769,12770,12771,12772,12773,12774, #12736 +12775,12776,12777,12778,4993,2175,12779,12780,12781,12782,12783,12784,12785,12786,4500,12787, #12752 +12788,12789,12790,12791,12792,12793,12794,12795,12796,12797,12798,12799,12800,12801,12802,12803, #12768 +12804,12805,12806,12807,12808,12809,12810,12811,12812,12813,12814,12815,12816,12817,12818,12819, #12784 +12820,12821,12822,12823,12824,12825,12826,4198,3967,12827,12828,12829,12830,12831,12832,12833, #12800 +12834,12835,12836,12837,12838,12839,12840,12841,12842,12843,12844,12845,12846,12847,12848,12849, #12816 +12850,12851,12852,12853,12854,12855,12856,12857,12858,12859,12860,12861,4199,12862,12863,12864, #12832 +12865,12866,12867,12868,12869,12870,12871,12872,12873,12874,12875,12876,12877,12878,12879,12880, #12848 +12881,12882,12883,12884,12885,12886,12887,4501,12888,12889,12890,12891,12892,12893,12894,12895, #12864 +12896,12897,12898,12899,12900,12901,12902,12903,12904,12905,12906,12907,12908,12909,12910,12911, #12880 +12912,4994,12913,12914,12915,12916,12917,12918,12919,12920,12921,12922,12923,12924,12925,12926, #12896 +12927,12928,12929,12930,12931,12932,12933,12934,12935,12936,12937,12938,12939,12940,12941,12942, #12912 +12943,12944,12945,12946,12947,12948,12949,12950,12951,12952,12953,12954,12955,12956,1772,12957, #12928 +12958,12959,12960,12961,12962,12963,12964,12965,12966,12967,12968,12969,12970,12971,12972,12973, #12944 +12974,12975,12976,12977,12978,12979,12980,12981,12982,12983,12984,12985,12986,12987,12988,12989, #12960 +12990,12991,12992,12993,12994,12995,12996,12997,4502,12998,4503,12999,13000,13001,13002,13003, #12976 +4504,13004,13005,13006,13007,13008,13009,13010,13011,13012,13013,13014,13015,13016,13017,13018, #12992 +13019,13020,13021,13022,13023,13024,13025,13026,13027,13028,13029,3449,13030,13031,13032,13033, #13008 +13034,13035,13036,13037,13038,13039,13040,13041,13042,13043,13044,13045,13046,13047,13048,13049, #13024 +13050,13051,13052,13053,13054,13055,13056,13057,13058,13059,13060,13061,13062,13063,13064,13065, #13040 +13066,13067,13068,13069,13070,13071,13072,13073,13074,13075,13076,13077,13078,13079,13080,13081, #13056 +13082,13083,13084,13085,13086,13087,13088,13089,13090,13091,13092,13093,13094,13095,13096,13097, #13072 +13098,13099,13100,13101,13102,13103,13104,13105,13106,13107,13108,13109,13110,13111,13112,13113, #13088 +13114,13115,13116,13117,13118,3968,13119,4995,13120,13121,13122,13123,13124,13125,13126,13127, #13104 +4505,13128,13129,13130,13131,13132,13133,13134,4996,4506,13135,13136,13137,13138,13139,4997, #13120 +13140,13141,13142,13143,13144,13145,13146,13147,13148,13149,13150,13151,13152,13153,13154,13155, #13136 +13156,13157,13158,13159,4998,13160,13161,13162,13163,13164,13165,13166,13167,13168,13169,13170, #13152 +13171,13172,13173,13174,13175,13176,4999,13177,13178,13179,13180,13181,13182,13183,13184,13185, #13168 +13186,13187,13188,13189,13190,13191,13192,13193,13194,13195,13196,13197,13198,13199,13200,13201, #13184 +13202,13203,13204,13205,13206,5000,13207,13208,13209,13210,13211,13212,13213,13214,13215,13216, #13200 +13217,13218,13219,13220,13221,13222,13223,13224,13225,13226,13227,4200,5001,13228,13229,13230, #13216 +13231,13232,13233,13234,13235,13236,13237,13238,13239,13240,3969,13241,13242,13243,13244,3970, #13232 +13245,13246,13247,13248,13249,13250,13251,13252,13253,13254,13255,13256,13257,13258,13259,13260, #13248 +13261,13262,13263,13264,13265,13266,13267,13268,3450,13269,13270,13271,13272,13273,13274,13275, #13264 +13276,5002,13277,13278,13279,13280,13281,13282,13283,13284,13285,13286,13287,13288,13289,13290, #13280 +13291,13292,13293,13294,13295,13296,13297,13298,13299,13300,13301,13302,3813,13303,13304,13305, #13296 +13306,13307,13308,13309,13310,13311,13312,13313,13314,13315,13316,13317,13318,13319,13320,13321, #13312 +13322,13323,13324,13325,13326,13327,13328,4507,13329,13330,13331,13332,13333,13334,13335,13336, #13328 +13337,13338,13339,13340,13341,5003,13342,13343,13344,13345,13346,13347,13348,13349,13350,13351, #13344 +13352,13353,13354,13355,13356,13357,13358,13359,13360,13361,13362,13363,13364,13365,13366,13367, #13360 +5004,13368,13369,13370,13371,13372,13373,13374,13375,13376,13377,13378,13379,13380,13381,13382, #13376 +13383,13384,13385,13386,13387,13388,13389,13390,13391,13392,13393,13394,13395,13396,13397,13398, #13392 +13399,13400,13401,13402,13403,13404,13405,13406,13407,13408,13409,13410,13411,13412,13413,13414, #13408 +13415,13416,13417,13418,13419,13420,13421,13422,13423,13424,13425,13426,13427,13428,13429,13430, #13424 +13431,13432,4508,13433,13434,13435,4201,13436,13437,13438,13439,13440,13441,13442,13443,13444, #13440 +13445,13446,13447,13448,13449,13450,13451,13452,13453,13454,13455,13456,13457,5005,13458,13459, #13456 +13460,13461,13462,13463,13464,13465,13466,13467,13468,13469,13470,4509,13471,13472,13473,13474, #13472 +13475,13476,13477,13478,13479,13480,13481,13482,13483,13484,13485,13486,13487,13488,13489,13490, #13488 +13491,13492,13493,13494,13495,13496,13497,13498,13499,13500,13501,13502,13503,13504,13505,13506, #13504 +13507,13508,13509,13510,13511,13512,13513,13514,13515,13516,13517,13518,13519,13520,13521,13522, #13520 +13523,13524,13525,13526,13527,13528,13529,13530,13531,13532,13533,13534,13535,13536,13537,13538, #13536 +13539,13540,13541,13542,13543,13544,13545,13546,13547,13548,13549,13550,13551,13552,13553,13554, #13552 +13555,13556,13557,13558,13559,13560,13561,13562,13563,13564,13565,13566,13567,13568,13569,13570, #13568 +13571,13572,13573,13574,13575,13576,13577,13578,13579,13580,13581,13582,13583,13584,13585,13586, #13584 +13587,13588,13589,13590,13591,13592,13593,13594,13595,13596,13597,13598,13599,13600,13601,13602, #13600 +13603,13604,13605,13606,13607,13608,13609,13610,13611,13612,13613,13614,13615,13616,13617,13618, #13616 +13619,13620,13621,13622,13623,13624,13625,13626,13627,13628,13629,13630,13631,13632,13633,13634, #13632 +13635,13636,13637,13638,13639,13640,13641,13642,5006,13643,13644,13645,13646,13647,13648,13649, #13648 +13650,13651,5007,13652,13653,13654,13655,13656,13657,13658,13659,13660,13661,13662,13663,13664, #13664 +13665,13666,13667,13668,13669,13670,13671,13672,13673,13674,13675,13676,13677,13678,13679,13680, #13680 +13681,13682,13683,13684,13685,13686,13687,13688,13689,13690,13691,13692,13693,13694,13695,13696, #13696 +13697,13698,13699,13700,13701,13702,13703,13704,13705,13706,13707,13708,13709,13710,13711,13712, #13712 +13713,13714,13715,13716,13717,13718,13719,13720,13721,13722,13723,13724,13725,13726,13727,13728, #13728 +13729,13730,13731,13732,13733,13734,13735,13736,13737,13738,13739,13740,13741,13742,13743,13744, #13744 +13745,13746,13747,13748,13749,13750,13751,13752,13753,13754,13755,13756,13757,13758,13759,13760, #13760 +13761,13762,13763,13764,13765,13766,13767,13768,13769,13770,13771,13772,13773,13774,3273,13775, #13776 +13776,13777,13778,13779,13780,13781,13782,13783,13784,13785,13786,13787,13788,13789,13790,13791, #13792 +13792,13793,13794,13795,13796,13797,13798,13799,13800,13801,13802,13803,13804,13805,13806,13807, #13808 +13808,13809,13810,13811,13812,13813,13814,13815,13816,13817,13818,13819,13820,13821,13822,13823, #13824 +13824,13825,13826,13827,13828,13829,13830,13831,13832,13833,13834,13835,13836,13837,13838,13839, #13840 +13840,13841,13842,13843,13844,13845,13846,13847,13848,13849,13850,13851,13852,13853,13854,13855, #13856 +13856,13857,13858,13859,13860,13861,13862,13863,13864,13865,13866,13867,13868,13869,13870,13871, #13872 +13872,13873,13874,13875,13876,13877,13878,13879,13880,13881,13882,13883,13884,13885,13886,13887, #13888 +13888,13889,13890,13891,13892,13893,13894,13895,13896,13897,13898,13899,13900,13901,13902,13903, #13904 +13904,13905,13906,13907,13908,13909,13910,13911,13912,13913,13914,13915,13916,13917,13918,13919, #13920 +13920,13921,13922,13923,13924,13925,13926,13927,13928,13929,13930,13931,13932,13933,13934,13935, #13936 +13936,13937,13938,13939,13940,13941,13942,13943,13944,13945,13946,13947,13948,13949,13950,13951, #13952 +13952,13953,13954,13955,13956,13957,13958,13959,13960,13961,13962,13963,13964,13965,13966,13967, #13968 +13968,13969,13970,13971,13972) #13973 + +# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/big5prober.py b/awx/lib/site-packages/requests/packages/charade/big5prober.py new file mode 100644 index 0000000000..7382f7c5d4 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/big5prober.py @@ -0,0 +1,42 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import Big5DistributionAnalysis +from .mbcssm import Big5SMModel + + +class Big5Prober(MultiByteCharSetProber): + def __init__(self): + MultiByteCharSetProber.__init__(self) + self._mCodingSM = CodingStateMachine(Big5SMModel) + self._mDistributionAnalyzer = Big5DistributionAnalysis() + self.reset() + + def get_charset_name(self): + return "Big5" diff --git a/awx/lib/site-packages/requests/packages/charade/chardistribution.py b/awx/lib/site-packages/requests/packages/charade/chardistribution.py new file mode 100644 index 0000000000..dfd3355e91 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/chardistribution.py @@ -0,0 +1,231 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .euctwfreq import (EUCTWCharToFreqOrder, EUCTW_TABLE_SIZE, + EUCTW_TYPICAL_DISTRIBUTION_RATIO) +from .euckrfreq import (EUCKRCharToFreqOrder, EUCKR_TABLE_SIZE, + EUCKR_TYPICAL_DISTRIBUTION_RATIO) +from .gb2312freq import (GB2312CharToFreqOrder, GB2312_TABLE_SIZE, + GB2312_TYPICAL_DISTRIBUTION_RATIO) +from .big5freq import (Big5CharToFreqOrder, BIG5_TABLE_SIZE, + BIG5_TYPICAL_DISTRIBUTION_RATIO) +from .jisfreq import (JISCharToFreqOrder, JIS_TABLE_SIZE, + JIS_TYPICAL_DISTRIBUTION_RATIO) +from .compat import wrap_ord + +ENOUGH_DATA_THRESHOLD = 1024 +SURE_YES = 0.99 +SURE_NO = 0.01 +MINIMUM_DATA_THRESHOLD = 3 + + +class CharDistributionAnalysis: + def __init__(self): + # Mapping table to get frequency order from char order (get from + # GetOrder()) + self._mCharToFreqOrder = None + self._mTableSize = None # Size of above table + # This is a constant value which varies from language to language, + # used in calculating confidence. See + # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html + # for further detail. + self._mTypicalDistributionRatio = None + self.reset() + + def reset(self): + """reset analyser, clear any state""" + # If this flag is set to True, detection is done and conclusion has + # been made + self._mDone = False + self._mTotalChars = 0 # Total characters encountered + # The number of characters whose frequency order is less than 512 + self._mFreqChars = 0 + + def feed(self, aBuf, aCharLen): + """feed a character with known length""" + if aCharLen == 2: + # we only care about 2-bytes character in our distribution analysis + order = self.get_order(aBuf) + else: + order = -1 + if order >= 0: + self._mTotalChars += 1 + # order is valid + if order < self._mTableSize: + if 512 > self._mCharToFreqOrder[order]: + self._mFreqChars += 1 + + def get_confidence(self): + """return confidence based on existing data""" + # if we didn't receive any character in our consideration range, + # return negative answer + if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD: + return SURE_NO + + if self._mTotalChars != self._mFreqChars: + r = (self._mFreqChars / ((self._mTotalChars - self._mFreqChars) + * self._mTypicalDistributionRatio)) + if r < SURE_YES: + return r + + # normalize confidence (we don't want to be 100% sure) + return SURE_YES + + def got_enough_data(self): + # It is not necessary to receive all data to draw conclusion. + # For charset detection, certain amount of data is enough + return self._mTotalChars > ENOUGH_DATA_THRESHOLD + + def get_order(self, aBuf): + # We do not handle characters based on the original encoding string, + # but convert this encoding string to a number, here called order. + # This allows multiple encodings of a language to share one frequency + # table. + return -1 + + +class EUCTWDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + CharDistributionAnalysis.__init__(self) + self._mCharToFreqOrder = EUCTWCharToFreqOrder + self._mTableSize = EUCTW_TABLE_SIZE + self._mTypicalDistributionRatio = EUCTW_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, aBuf): + # for euc-TW encoding, we are interested + # first byte range: 0xc4 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = wrap_ord(aBuf[0]) + if first_char >= 0xC4: + return 94 * (first_char - 0xC4) + wrap_ord(aBuf[1]) - 0xA1 + else: + return -1 + + +class EUCKRDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + CharDistributionAnalysis.__init__(self) + self._mCharToFreqOrder = EUCKRCharToFreqOrder + self._mTableSize = EUCKR_TABLE_SIZE + self._mTypicalDistributionRatio = EUCKR_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, aBuf): + # for euc-KR encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = wrap_ord(aBuf[0]) + if first_char >= 0xB0: + return 94 * (first_char - 0xB0) + wrap_ord(aBuf[1]) - 0xA1 + else: + return -1 + + +class GB2312DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + CharDistributionAnalysis.__init__(self) + self._mCharToFreqOrder = GB2312CharToFreqOrder + self._mTableSize = GB2312_TABLE_SIZE + self._mTypicalDistributionRatio = GB2312_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, aBuf): + # for GB2312 encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1]) + if (first_char >= 0xB0) and (second_char >= 0xA1): + return 94 * (first_char - 0xB0) + second_char - 0xA1 + else: + return -1 + + +class Big5DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + CharDistributionAnalysis.__init__(self) + self._mCharToFreqOrder = Big5CharToFreqOrder + self._mTableSize = BIG5_TABLE_SIZE + self._mTypicalDistributionRatio = BIG5_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, aBuf): + # for big5 encoding, we are interested + # first byte range: 0xa4 -- 0xfe + # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1]) + if first_char >= 0xA4: + if second_char >= 0xA1: + return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 + else: + return 157 * (first_char - 0xA4) + second_char - 0x40 + else: + return -1 + + +class SJISDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + CharDistributionAnalysis.__init__(self) + self._mCharToFreqOrder = JISCharToFreqOrder + self._mTableSize = JIS_TABLE_SIZE + self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, aBuf): + # for sjis encoding, we are interested + # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe + # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe + # no validation needed here. State machine has done that + first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1]) + if (first_char >= 0x81) and (first_char <= 0x9F): + order = 188 * (first_char - 0x81) + elif (first_char >= 0xE0) and (first_char <= 0xEF): + order = 188 * (first_char - 0xE0 + 31) + else: + return -1 + order = order + second_char - 0x40 + if second_char > 0x7F: + order = -1 + return order + + +class EUCJPDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + CharDistributionAnalysis.__init__(self) + self._mCharToFreqOrder = JISCharToFreqOrder + self._mTableSize = JIS_TABLE_SIZE + self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, aBuf): + # for euc-JP encoding, we are interested + # first byte range: 0xa0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + char = wrap_ord(aBuf[0]) + if char >= 0xA0: + return 94 * (char - 0xA1) + wrap_ord(aBuf[1]) - 0xa1 + else: + return -1 diff --git a/awx/lib/site-packages/requests/packages/charade/charsetgroupprober.py b/awx/lib/site-packages/requests/packages/charade/charsetgroupprober.py new file mode 100644 index 0000000000..2959654748 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/charsetgroupprober.py @@ -0,0 +1,106 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from . import constants +import sys +from .charsetprober import CharSetProber + + +class CharSetGroupProber(CharSetProber): + def __init__(self): + CharSetProber.__init__(self) + self._mActiveNum = 0 + self._mProbers = [] + self._mBestGuessProber = None + + def reset(self): + CharSetProber.reset(self) + self._mActiveNum = 0 + for prober in self._mProbers: + if prober: + prober.reset() + prober.active = True + self._mActiveNum += 1 + self._mBestGuessProber = None + + def get_charset_name(self): + if not self._mBestGuessProber: + self.get_confidence() + if not self._mBestGuessProber: + return None +# self._mBestGuessProber = self._mProbers[0] + return self._mBestGuessProber.get_charset_name() + + def feed(self, aBuf): + for prober in self._mProbers: + if not prober: + continue + if not prober.active: + continue + st = prober.feed(aBuf) + if not st: + continue + if st == constants.eFoundIt: + self._mBestGuessProber = prober + return self.get_state() + elif st == constants.eNotMe: + prober.active = False + self._mActiveNum -= 1 + if self._mActiveNum <= 0: + self._mState = constants.eNotMe + return self.get_state() + return self.get_state() + + def get_confidence(self): + st = self.get_state() + if st == constants.eFoundIt: + return 0.99 + elif st == constants.eNotMe: + return 0.01 + bestConf = 0.0 + self._mBestGuessProber = None + for prober in self._mProbers: + if not prober: + continue + if not prober.active: + if constants._debug: + sys.stderr.write(prober.get_charset_name() + + ' not active\n') + continue + cf = prober.get_confidence() + if constants._debug: + sys.stderr.write('%s confidence = %s\n' % + (prober.get_charset_name(), cf)) + if bestConf < cf: + bestConf = cf + self._mBestGuessProber = prober + if not self._mBestGuessProber: + return 0.0 + return bestConf +# else: +# self._mBestGuessProber = self._mProbers[0] +# return self._mBestGuessProber.get_confidence() diff --git a/awx/lib/site-packages/requests/packages/charade/charsetprober.py b/awx/lib/site-packages/requests/packages/charade/charsetprober.py new file mode 100644 index 0000000000..97581712c1 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/charsetprober.py @@ -0,0 +1,62 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from . import constants +import re + + +class CharSetProber: + def __init__(self): + pass + + def reset(self): + self._mState = constants.eDetecting + + def get_charset_name(self): + return None + + def feed(self, aBuf): + pass + + def get_state(self): + return self._mState + + def get_confidence(self): + return 0.0 + + def filter_high_bit_only(self, aBuf): + aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf) + return aBuf + + def filter_without_english_letters(self, aBuf): + aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf) + return aBuf + + def filter_with_english_letters(self, aBuf): + # TODO + return aBuf diff --git a/awx/lib/site-packages/requests/packages/charade/codingstatemachine.py b/awx/lib/site-packages/requests/packages/charade/codingstatemachine.py new file mode 100644 index 0000000000..1bda9ff162 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/codingstatemachine.py @@ -0,0 +1,61 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .constants import eStart +from .compat import wrap_ord + + +class CodingStateMachine: + def __init__(self, sm): + self._mModel = sm + self._mCurrentBytePos = 0 + self._mCurrentCharLen = 0 + self.reset() + + def reset(self): + self._mCurrentState = eStart + + def next_state(self, c): + # for each byte we get its class + # if it is first byte, we also get byte length + # PY3K: aBuf is a byte stream, so c is an int, not a byte + byteCls = self._mModel['classTable'][wrap_ord(c)] + if self._mCurrentState == eStart: + self._mCurrentBytePos = 0 + self._mCurrentCharLen = self._mModel['charLenTable'][byteCls] + # from byte's class and stateTable, we get its next state + curr_state = (self._mCurrentState * self._mModel['classFactor'] + + byteCls) + self._mCurrentState = self._mModel['stateTable'][curr_state] + self._mCurrentBytePos += 1 + return self._mCurrentState + + def get_current_charlen(self): + return self._mCurrentCharLen + + def get_coding_state_machine(self): + return self._mModel['name'] diff --git a/awx/lib/site-packages/requests/packages/charade/compat.py b/awx/lib/site-packages/requests/packages/charade/compat.py new file mode 100644 index 0000000000..d9e30addf9 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/compat.py @@ -0,0 +1,34 @@ +######################## BEGIN LICENSE BLOCK ######################## +# Contributor(s): +# Ian Cordasco - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys + + +if sys.version_info < (3, 0): + base_str = (str, unicode) +else: + base_str = (bytes, str) + + +def wrap_ord(a): + if sys.version_info < (3, 0) and isinstance(a, base_str): + return ord(a) + else: + return a diff --git a/awx/lib/site-packages/requests/packages/charade/constants.py b/awx/lib/site-packages/requests/packages/charade/constants.py new file mode 100644 index 0000000000..a3d27de250 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/constants.py @@ -0,0 +1,39 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +_debug = 0 + +eDetecting = 0 +eFoundIt = 1 +eNotMe = 2 + +eStart = 0 +eError = 1 +eItsMe = 2 + +SHORTCUT_THRESHOLD = 0.95 diff --git a/awx/lib/site-packages/requests/packages/charade/cp949prober.py b/awx/lib/site-packages/requests/packages/charade/cp949prober.py new file mode 100644 index 0000000000..543501fe09 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/cp949prober.py @@ -0,0 +1,44 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCKRDistributionAnalysis +from .mbcssm import CP949SMModel + + +class CP949Prober(MultiByteCharSetProber): + def __init__(self): + MultiByteCharSetProber.__init__(self) + self._mCodingSM = CodingStateMachine(CP949SMModel) + # NOTE: CP949 is a superset of EUC-KR, so the distribution should be + # not different. + self._mDistributionAnalyzer = EUCKRDistributionAnalysis() + self.reset() + + def get_charset_name(self): + return "CP949" diff --git a/awx/lib/site-packages/requests/packages/charade/escprober.py b/awx/lib/site-packages/requests/packages/charade/escprober.py new file mode 100644 index 0000000000..0063935ce6 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/escprober.py @@ -0,0 +1,86 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from . import constants +from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel, + ISO2022KRSMModel) +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .compat import wrap_ord + + +class EscCharSetProber(CharSetProber): + def __init__(self): + CharSetProber.__init__(self) + self._mCodingSM = [ + CodingStateMachine(HZSMModel), + CodingStateMachine(ISO2022CNSMModel), + CodingStateMachine(ISO2022JPSMModel), + CodingStateMachine(ISO2022KRSMModel) + ] + self.reset() + + def reset(self): + CharSetProber.reset(self) + for codingSM in self._mCodingSM: + if not codingSM: + continue + codingSM.active = True + codingSM.reset() + self._mActiveSM = len(self._mCodingSM) + self._mDetectedCharset = None + + def get_charset_name(self): + return self._mDetectedCharset + + def get_confidence(self): + if self._mDetectedCharset: + return 0.99 + else: + return 0.00 + + def feed(self, aBuf): + for c in aBuf: + # PY3K: aBuf is a byte array, so c is an int, not a byte + for codingSM in self._mCodingSM: + if not codingSM: + continue + if not codingSM.active: + continue + codingState = codingSM.next_state(wrap_ord(c)) + if codingState == constants.eError: + codingSM.active = False + self._mActiveSM -= 1 + if self._mActiveSM <= 0: + self._mState = constants.eNotMe + return self.get_state() + elif codingState == constants.eItsMe: + self._mState = constants.eFoundIt + self._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8 + return self.get_state() + + return self.get_state() diff --git a/awx/lib/site-packages/requests/packages/charade/escsm.py b/awx/lib/site-packages/requests/packages/charade/escsm.py new file mode 100644 index 0000000000..1cf3aa6db6 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/escsm.py @@ -0,0 +1,242 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .constants import eStart, eError, eItsMe + +HZ_cls = ( +1,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,0,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,4,0,5,2,0, # 78 - 7f +1,1,1,1,1,1,1,1, # 80 - 87 +1,1,1,1,1,1,1,1, # 88 - 8f +1,1,1,1,1,1,1,1, # 90 - 97 +1,1,1,1,1,1,1,1, # 98 - 9f +1,1,1,1,1,1,1,1, # a0 - a7 +1,1,1,1,1,1,1,1, # a8 - af +1,1,1,1,1,1,1,1, # b0 - b7 +1,1,1,1,1,1,1,1, # b8 - bf +1,1,1,1,1,1,1,1, # c0 - c7 +1,1,1,1,1,1,1,1, # c8 - cf +1,1,1,1,1,1,1,1, # d0 - d7 +1,1,1,1,1,1,1,1, # d8 - df +1,1,1,1,1,1,1,1, # e0 - e7 +1,1,1,1,1,1,1,1, # e8 - ef +1,1,1,1,1,1,1,1, # f0 - f7 +1,1,1,1,1,1,1,1, # f8 - ff +) + +HZ_st = ( +eStart,eError, 3,eStart,eStart,eStart,eError,eError,# 00-07 +eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f +eItsMe,eItsMe,eError,eError,eStart,eStart, 4,eError,# 10-17 + 5,eError, 6,eError, 5, 5, 4,eError,# 18-1f + 4,eError, 4, 4, 4,eError, 4,eError,# 20-27 + 4,eItsMe,eStart,eStart,eStart,eStart,eStart,eStart,# 28-2f +) + +HZCharLenTable = (0, 0, 0, 0, 0, 0) + +HZSMModel = {'classTable': HZ_cls, + 'classFactor': 6, + 'stateTable': HZ_st, + 'charLenTable': HZCharLenTable, + 'name': "HZ-GB-2312"} + +ISO2022CN_cls = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,3,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,4,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022CN_st = ( +eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07 +eStart,eError,eError,eError,eError,eError,eError,eError,# 08-0f +eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17 +eItsMe,eItsMe,eItsMe,eError,eError,eError, 4,eError,# 18-1f +eError,eError,eError,eItsMe,eError,eError,eError,eError,# 20-27 + 5, 6,eError,eError,eError,eError,eError,eError,# 28-2f +eError,eError,eError,eItsMe,eError,eError,eError,eError,# 30-37 +eError,eError,eError,eError,eError,eItsMe,eError,eStart,# 38-3f +) + +ISO2022CNCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022CNSMModel = {'classTable': ISO2022CN_cls, + 'classFactor': 9, + 'stateTable': ISO2022CN_st, + 'charLenTable': ISO2022CNCharLenTable, + 'name': "ISO-2022-CN"} + +ISO2022JP_cls = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,2,2, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,7,0,0,0, # 20 - 27 +3,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +6,0,4,0,8,0,0,0, # 40 - 47 +0,9,5,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022JP_st = ( +eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07 +eStart,eStart,eError,eError,eError,eError,eError,eError,# 08-0f +eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17 +eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,# 18-1f +eError, 5,eError,eError,eError, 4,eError,eError,# 20-27 +eError,eError,eError, 6,eItsMe,eError,eItsMe,eError,# 28-2f +eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,# 30-37 +eError,eError,eError,eItsMe,eError,eError,eError,eError,# 38-3f +eError,eError,eError,eError,eItsMe,eError,eStart,eStart,# 40-47 +) + +ISO2022JPCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022JPSMModel = {'classTable': ISO2022JP_cls, + 'classFactor': 10, + 'stateTable': ISO2022JP_st, + 'charLenTable': ISO2022JPCharLenTable, + 'name': "ISO-2022-JP"} + +ISO2022KR_cls = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,3,0,0,0, # 20 - 27 +0,4,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,5,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022KR_st = ( +eStart, 3,eError,eStart,eStart,eStart,eError,eError,# 00-07 +eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f +eItsMe,eItsMe,eError,eError,eError, 4,eError,eError,# 10-17 +eError,eError,eError,eError, 5,eError,eError,eError,# 18-1f +eError,eError,eError,eItsMe,eStart,eStart,eStart,eStart,# 20-27 +) + +ISO2022KRCharLenTable = (0, 0, 0, 0, 0, 0) + +ISO2022KRSMModel = {'classTable': ISO2022KR_cls, + 'classFactor': 6, + 'stateTable': ISO2022KR_st, + 'charLenTable': ISO2022KRCharLenTable, + 'name': "ISO-2022-KR"} + +# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/eucjpprober.py b/awx/lib/site-packages/requests/packages/charade/eucjpprober.py new file mode 100644 index 0000000000..d70cfbbb01 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/eucjpprober.py @@ -0,0 +1,90 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys +from . import constants +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCJPDistributionAnalysis +from .jpcntx import EUCJPContextAnalysis +from .mbcssm import EUCJPSMModel + + +class EUCJPProber(MultiByteCharSetProber): + def __init__(self): + MultiByteCharSetProber.__init__(self) + self._mCodingSM = CodingStateMachine(EUCJPSMModel) + self._mDistributionAnalyzer = EUCJPDistributionAnalysis() + self._mContextAnalyzer = EUCJPContextAnalysis() + self.reset() + + def reset(self): + MultiByteCharSetProber.reset(self) + self._mContextAnalyzer.reset() + + def get_charset_name(self): + return "EUC-JP" + + def feed(self, aBuf): + aLen = len(aBuf) + for i in range(0, aLen): + # PY3K: aBuf is a byte array, so aBuf[i] is an int, not a byte + codingState = self._mCodingSM.next_state(aBuf[i]) + if codingState == constants.eError: + if constants._debug: + sys.stderr.write(self.get_charset_name() + + ' prober hit error at byte ' + str(i) + + '\n') + self._mState = constants.eNotMe + break + elif codingState == constants.eItsMe: + self._mState = constants.eFoundIt + break + elif codingState == constants.eStart: + charLen = self._mCodingSM.get_current_charlen() + if i == 0: + self._mLastChar[1] = aBuf[0] + self._mContextAnalyzer.feed(self._mLastChar, charLen) + self._mDistributionAnalyzer.feed(self._mLastChar, charLen) + else: + self._mContextAnalyzer.feed(aBuf[i - 1:i + 1], charLen) + self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], + charLen) + + self._mLastChar[0] = aBuf[aLen - 1] + + if self.get_state() == constants.eDetecting: + if (self._mContextAnalyzer.got_enough_data() and + (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): + self._mState = constants.eFoundIt + + return self.get_state() + + def get_confidence(self): + contxtCf = self._mContextAnalyzer.get_confidence() + distribCf = self._mDistributionAnalyzer.get_confidence() + return max(contxtCf, distribCf) diff --git a/awx/lib/site-packages/requests/packages/charade/euckrfreq.py b/awx/lib/site-packages/requests/packages/charade/euckrfreq.py new file mode 100644 index 0000000000..a179e4c21c --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/euckrfreq.py @@ -0,0 +1,596 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology + +# 128 --> 0.79 +# 256 --> 0.92 +# 512 --> 0.986 +# 1024 --> 0.99944 +# 2048 --> 0.99999 +# +# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 +# Random Distribution Ration = 512 / (2350-512) = 0.279. +# +# Typical Distribution Ratio + +EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 + +EUCKR_TABLE_SIZE = 2352 + +# Char to FreqOrder table , +EUCKRCharToFreqOrder = ( \ + 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, +1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, +1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, + 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, + 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, + 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, +1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, + 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, + 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, +1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, +1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, +1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, +1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, +1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, + 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, +1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, +1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, +1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, +1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, + 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, +1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, + 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, + 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, +1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, + 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, +1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, + 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, + 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, +1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, +1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, +1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, +1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, + 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, +1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, + 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, + 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, +1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, +1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, +1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, +1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, +1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, +1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, + 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, + 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, + 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, +1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, + 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, +1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, + 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, + 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, +2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, + 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, + 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, +2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, +2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, +2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, + 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, + 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, +2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, + 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, +1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, +2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, +1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, +2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, +2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, +1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, + 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, +2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, +2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, + 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, + 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, +2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, +1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, +2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, +2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, +2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, +2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, +2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, +2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, +1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, +2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, +2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, +2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, +2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, +2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, +1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, +1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, +2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, +1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, +2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, +1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, + 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, +2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, + 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, +2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, + 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, +2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, +2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, + 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, +2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, +1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, + 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, +1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, +2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, +1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, +2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, + 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, +2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, +1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, +2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, +1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, +2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, +1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, + 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, +2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, +2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, + 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, + 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, +1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, +1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, + 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, +2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, +2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, + 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, + 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, + 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, +2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, + 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, + 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, +2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, +2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, + 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, +2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, +1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, + 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, +2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, +2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, +2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, + 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, + 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, + 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, +2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, +2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, +2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, +1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, +2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, + 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 +#Everything below is of no interest for detection purpose +2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658, +2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674, +2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690, +2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704, +2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720, +2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734, +2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750, +2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765, +2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779, +2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793, +2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809, +2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824, +2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840, +2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856, +1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869, +2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883, +2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899, +2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915, +2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331, +2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945, +2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961, +2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976, +2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992, +2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008, +3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021, +3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037, +3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052, +3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066, +3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080, +3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095, +3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110, +3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124, +3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140, +3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156, +3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172, +3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187, +3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201, +3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217, +3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233, +3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248, +3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264, +3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279, +3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295, +3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311, +3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327, +3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343, +3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359, +3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374, +3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389, +3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405, +3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338, +3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432, +3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446, +3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191, +3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471, +3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486, +1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499, +1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513, +3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525, +3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541, +3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557, +3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573, +3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587, +3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603, +3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618, +3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632, +3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648, +3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663, +3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679, +3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695, +3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583, +1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722, +3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738, +3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753, +3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767, +3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782, +3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796, +3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810, +3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591, +1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836, +3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851, +3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866, +3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880, +3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895, +1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905, +3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921, +3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934, +3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603, +3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964, +3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978, +3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993, +3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009, +4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024, +4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040, +1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055, +4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069, +4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083, +4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098, +4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113, +4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610, +4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142, +4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157, +4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173, +4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189, +4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205, +4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220, +4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234, +4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249, +4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265, +4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279, +4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294, +4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310, +4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326, +4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341, +4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357, +4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371, +4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387, +4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403, +4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418, +4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432, +4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446, +4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461, +4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476, +4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491, +4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507, +4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623, +4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536, +4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551, +4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567, +4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581, +4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627, +4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611, +4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626, +4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642, +4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657, +4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672, +4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687, +1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700, +4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715, +4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731, +4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633, +4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758, +4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773, +4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788, +4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803, +4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817, +4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832, +4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847, +4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863, +4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879, +4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893, +4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909, +4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923, +4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938, +4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954, +4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970, +4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645, +4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999, +5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078, +5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028, +1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042, +5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056, +5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072, +5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087, +5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103, +5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118, +1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132, +5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148, +5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161, +5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177, +5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192, +5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206, +1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218, +5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234, +5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249, +5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262, +5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278, +5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293, +5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308, +5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323, +5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338, +5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353, +5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369, +5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385, +5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400, +5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415, +5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430, +5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445, +5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461, +5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477, +5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491, +5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507, +5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523, +5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539, +5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554, +5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570, +1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585, +5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600, +5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615, +5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631, +5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646, +5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660, +1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673, +5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688, +5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703, +5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716, +5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729, +5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744, +1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758, +5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773, +1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786, +5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801, +5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815, +5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831, +5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847, +5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862, +5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876, +5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889, +5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905, +5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, +5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687, +5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951, +5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963, +5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979, +5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993, +5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009, +6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025, +6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039, +6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055, +6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071, +6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086, +6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102, +6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118, +6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133, +6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147, +6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163, +6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179, +6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194, +6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210, +6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225, +6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241, +6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256, +6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271, #1024 +6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287, +6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699, +6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317, +6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333, +6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347, +6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363, +6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379, +6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395, +6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411, +6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425, +6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440, +6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456, +6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472, +6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488, +6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266, +6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519, +6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535, +6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551, +1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565, +6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581, +6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597, +6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613, +6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629, +6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644, +1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659, +6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674, +1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689, +6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705, +6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721, +6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736, +1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748, +6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763, +6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779, +6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794, +6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711, +6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825, +6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840, +6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856, +6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872, +6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888, +6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903, +6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918, +6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934, +6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950, +6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966, +6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981, +6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996, +6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011, +7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027, +7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042, +7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058, +7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074, +7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090, +7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106, +7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122, +7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138, +7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154, +7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170, +7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186, +7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202, +7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216, +7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232, +7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248, +7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264, +7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280, +7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296, +7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312, +7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327, +7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343, +7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359, +7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375, +7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391, +7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407, +7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423, +7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439, +7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455, +7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471, +7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487, +7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503, +7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519, +7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535, +7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551, +7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, +7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583, +7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599, +7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615, +7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631, +7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647, +7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663, +7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679, +7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695, +7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711, +7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727, +7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743, +7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759, +7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775, +7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791, +7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807, +7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823, +7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839, +7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855, +7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871, +7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887, +7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903, +7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919, +7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, +7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, +7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, +7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, +7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, +8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, +8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, +8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, +8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, +8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, +8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, +8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, +8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, +8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, +8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, +8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, +8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, +8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, +8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, +8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, +8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, +8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271, +8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287, +8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303, +8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319, +8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335, +8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351, +8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367, +8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383, +8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399, +8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415, +8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431, +8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447, +8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463, +8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479, +8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495, +8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511, +8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527, +8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543, +8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559, +8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575, +8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591, +8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607, +8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623, +8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639, +8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655, +8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671, +8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687, +8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, +8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719, +8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735, +8736,8737,8738,8739,8740,8741) + +# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/euckrprober.py b/awx/lib/site-packages/requests/packages/charade/euckrprober.py new file mode 100644 index 0000000000..def3e42902 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/euckrprober.py @@ -0,0 +1,42 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCKRDistributionAnalysis +from .mbcssm import EUCKRSMModel + + +class EUCKRProber(MultiByteCharSetProber): + def __init__(self): + MultiByteCharSetProber.__init__(self) + self._mCodingSM = CodingStateMachine(EUCKRSMModel) + self._mDistributionAnalyzer = EUCKRDistributionAnalysis() + self.reset() + + def get_charset_name(self): + return "EUC-KR" diff --git a/awx/lib/site-packages/requests/packages/charade/euctwfreq.py b/awx/lib/site-packages/requests/packages/charade/euctwfreq.py new file mode 100644 index 0000000000..576e7504dc --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/euctwfreq.py @@ -0,0 +1,428 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# EUCTW frequency table +# Converted from big5 work +# by Taiwan's Mandarin Promotion Council +# <http:#www.edu.tw:81/mandr/> + +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +# Char to FreqOrder table , +EUCTW_TABLE_SIZE = 8102 + +EUCTWCharToFreqOrder = ( + 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 +3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 +1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 + 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 +3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 +4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 +7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 + 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 + 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 + 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 +2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 +1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 +3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 + 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 +3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 +2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 + 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 +3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 +1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 +7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 + 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 +7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 +1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 + 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 + 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 +3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 +3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 + 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 +2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 +2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 + 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 + 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 +3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 +1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 +1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 +1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 +2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 + 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 +4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 +1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 +7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 +2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 + 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 + 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 + 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 + 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 +7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 + 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 +1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 + 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 + 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 +7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 +1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 + 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 +3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 +4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 +3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 + 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 + 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 +1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 +4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 +3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 +3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 +2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 +7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 +3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 +7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 +1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 +2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 +1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 + 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 +1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 +4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 +3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 + 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 + 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 + 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 +2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 +7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 +1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 +2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 +1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 +1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 +7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 +7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 +7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 +3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 +4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 +1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 +7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 +2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 +7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 +3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 +3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 +7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 +2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 +7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 + 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 +4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 +2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 +7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 +3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 +2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 +2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 + 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 +2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 +1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 +1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 +2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 +1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 +7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 +7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 +2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 +4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 +1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 +7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 + 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 +4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 + 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 +2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 + 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 +1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 +1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 + 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 +3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 +3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 +1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 +3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 +7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 +7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 +1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 +2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 +1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 +3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 +2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 +3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 +2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 +4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 +4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 +3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 + 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 +3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 + 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 +3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 +3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 +3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 +1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 +7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 + 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 +7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 +1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 + 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 +4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 +3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 + 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 +2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 +2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 +3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 +1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 +4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 +2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 +1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 +1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 +2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 +3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 +1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 +7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 +1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 +4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 +1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 + 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 +1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 +3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 +3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 +2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 +1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 +4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 + 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 +7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 +2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 +3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 +4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 + 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 +7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 +7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 +1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 +4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 +3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 +2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 +3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 +3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 +2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 +1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 +4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 +3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 +3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 +2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 +4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 +7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 +3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 +2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 +3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 +1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 +2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 +3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 +4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 +2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 +2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 +7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 +1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 +2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 +1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 +3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 +4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 +2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 +3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 +3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 +2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 +4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 +2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 +3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 +4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 +7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 +3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 + 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 +1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 +4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 +1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 +4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 +7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 + 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 +7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 +2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 +1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 +1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 +3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 + 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 + 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 + 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 +3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 +2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 + 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 +7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 +1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 +3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 +7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 +1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 +7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 +4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 +1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 +2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 +2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 +4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 + 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 + 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 +3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 +3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 +1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 +2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 +7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 +1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 +1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 +3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 + 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 +1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 +4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 +7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 +2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 +3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 + 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 +1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 +2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 +2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 +7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 +7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 +7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 +2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 +2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 +1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 +4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 +3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 +3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 +4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 +4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 +2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 +2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 +7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 +4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 +7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 +2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 +1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 +3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 +4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 +2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 + 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 +2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 +1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 +2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 +2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 +4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 +7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 +1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 +3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 +7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 +1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 +8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 +2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 +8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 +2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 +2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 +8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 +8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 +8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 + 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 +8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 +4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 +3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 +8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 +1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 +8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 + 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 +1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 + 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 +4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 +1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 +4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 +1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 + 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 +3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 +4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 +8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 + 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 +3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 + 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 +2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 +#Everything below is of no interest for detection purpose +2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118 +2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134 +8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150 +8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166 +8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182 +8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198 +8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214 +8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230 +8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246 +8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262 +8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278 +8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294 +8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310 +8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326 +8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342 +8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358 +8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374 +8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390 +8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406 +8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422 +8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438 +8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454 +8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470 +8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486 +8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502 +8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518 +8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534 +8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550 +8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566 +8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582 +8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598 +8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614 +8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630 +8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646 +8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662 +8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678 +8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694 +8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710 +8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726 +8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742 + +# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/euctwprober.py b/awx/lib/site-packages/requests/packages/charade/euctwprober.py new file mode 100644 index 0000000000..e601adfdc6 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/euctwprober.py @@ -0,0 +1,41 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCTWDistributionAnalysis +from .mbcssm import EUCTWSMModel + +class EUCTWProber(MultiByteCharSetProber): + def __init__(self): + MultiByteCharSetProber.__init__(self) + self._mCodingSM = CodingStateMachine(EUCTWSMModel) + self._mDistributionAnalyzer = EUCTWDistributionAnalysis() + self.reset() + + def get_charset_name(self): + return "EUC-TW" diff --git a/awx/lib/site-packages/requests/packages/charade/gb2312freq.py b/awx/lib/site-packages/requests/packages/charade/gb2312freq.py new file mode 100644 index 0000000000..1238f510fc --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/gb2312freq.py @@ -0,0 +1,472 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# GB2312 most frequently used character table +# +# Char to FreqOrder table , from hz6763 + +# 512 --> 0.79 -- 0.79 +# 1024 --> 0.92 -- 0.13 +# 2048 --> 0.98 -- 0.06 +# 6768 --> 1.00 -- 0.02 +# +# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 +# Random Distribution Ration = 512 / (3755 - 512) = 0.157 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR + +GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 + +GB2312_TABLE_SIZE = 3760 + +GB2312CharToFreqOrder = ( +1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, +2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, +2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, + 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, +1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, +1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, + 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, +1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, +2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, +3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, + 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, +1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, + 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, +2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, + 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, +2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, +1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, +3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, + 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, +1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, + 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, +2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, +1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, +3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, +1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, +2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, +1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, + 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, +3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, +3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, + 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, +3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, + 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, +1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, +3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, +2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, +1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, + 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, +1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, +4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, + 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, +3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, +3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, + 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, +1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, +2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, +1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, +1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, + 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, +3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, +3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, +4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, + 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, +3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, +1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, +1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, +4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, + 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, + 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, +3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, +1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, + 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, +1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, +2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, + 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, + 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, + 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, +3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, +4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, +3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, + 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, +2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, +2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, +2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, + 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, +2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, + 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, + 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, + 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, +3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, +2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, +2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, +1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, + 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, +2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, + 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, + 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, +1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, +1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, + 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, + 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, +1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, +2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, +3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, +2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, +2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, +2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, +3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, +1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, +1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, +2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, +1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, +3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, +1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, +1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, +3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, + 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, +2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, +1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, +4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, +1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, +1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, +3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, +1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, + 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, + 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, +1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, + 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, +1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, +1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, + 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, +3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, +4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, +3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, +2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, +2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, +1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, +3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, +2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, +1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, +1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, + 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, +2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, +2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, +3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, +4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, +3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, + 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, +3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, +2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, +1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, + 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, + 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, +3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, +4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, +2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, +1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, +1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, + 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, +1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, +3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, + 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, + 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, +1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, + 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, +1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, + 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, +2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, + 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, +2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, +2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, +1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, +1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, +2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, + 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, +1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, +1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, +2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, +2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, +3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, +1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, +4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, + 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, + 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, +3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, +1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, + 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, +3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, +1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, +4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, +1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, +2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, +1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, + 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, +1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, +3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, + 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, +2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, + 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, +1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, +1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, +1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, +3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, +2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, +3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, +3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, +3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, + 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, +2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, + 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, +2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, + 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, +1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, + 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, + 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, +1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, +3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, +3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, +1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, +1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, +3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, +2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, +2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, +1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, +3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, + 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, +4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, +1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, +2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, +3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, +3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, +1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, + 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, + 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, +2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, + 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, +1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, + 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, +1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, +1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, +1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, +1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, +1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, + 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, + 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, # last 512 +#Everything below is of no interest for detection purpose +5508,6484,3900,3414,3974,4441,4024,3537,4037,5628,5099,3633,6485,3148,6486,3636, +5509,3257,5510,5973,5445,5872,4941,4403,3174,4627,5873,6276,2286,4230,5446,5874, +5122,6102,6103,4162,5447,5123,5323,4849,6277,3980,3851,5066,4246,5774,5067,6278, +3001,2807,5695,3346,5775,5974,5158,5448,6487,5975,5976,5776,3598,6279,5696,4806, +4211,4154,6280,6488,6489,6490,6281,4212,5037,3374,4171,6491,4562,4807,4722,4827, +5977,6104,4532,4079,5159,5324,5160,4404,3858,5359,5875,3975,4288,4610,3486,4512, +5325,3893,5360,6282,6283,5560,2522,4231,5978,5186,5449,2569,3878,6284,5401,3578, +4415,6285,4656,5124,5979,2506,4247,4449,3219,3417,4334,4969,4329,6492,4576,4828, +4172,4416,4829,5402,6286,3927,3852,5361,4369,4830,4477,4867,5876,4173,6493,6105, +4657,6287,6106,5877,5450,6494,4155,4868,5451,3700,5629,4384,6288,6289,5878,3189, +4881,6107,6290,6495,4513,6496,4692,4515,4723,5100,3356,6497,6291,3810,4080,5561, +3570,4430,5980,6498,4355,5697,6499,4724,6108,6109,3764,4050,5038,5879,4093,3226, +6292,5068,5217,4693,3342,5630,3504,4831,4377,4466,4309,5698,4431,5777,6293,5778, +4272,3706,6110,5326,3752,4676,5327,4273,5403,4767,5631,6500,5699,5880,3475,5039, +6294,5562,5125,4348,4301,4482,4068,5126,4593,5700,3380,3462,5981,5563,3824,5404, +4970,5511,3825,4738,6295,6501,5452,4516,6111,5881,5564,6502,6296,5982,6503,4213, +4163,3454,6504,6112,4009,4450,6113,4658,6297,6114,3035,6505,6115,3995,4904,4739, +4563,4942,4110,5040,3661,3928,5362,3674,6506,5292,3612,4791,5565,4149,5983,5328, +5259,5021,4725,4577,4564,4517,4364,6298,5405,4578,5260,4594,4156,4157,5453,3592, +3491,6507,5127,5512,4709,4922,5984,5701,4726,4289,6508,4015,6116,5128,4628,3424, +4241,5779,6299,4905,6509,6510,5454,5702,5780,6300,4365,4923,3971,6511,5161,3270, +3158,5985,4100, 867,5129,5703,6117,5363,3695,3301,5513,4467,6118,6512,5455,4232, +4242,4629,6513,3959,4478,6514,5514,5329,5986,4850,5162,5566,3846,4694,6119,5456, +4869,5781,3779,6301,5704,5987,5515,4710,6302,5882,6120,4392,5364,5705,6515,6121, +6516,6517,3736,5988,5457,5989,4695,2457,5883,4551,5782,6303,6304,6305,5130,4971, +6122,5163,6123,4870,3263,5365,3150,4871,6518,6306,5783,5069,5706,3513,3498,4409, +5330,5632,5366,5458,5459,3991,5990,4502,3324,5991,5784,3696,4518,5633,4119,6519, +4630,5634,4417,5707,4832,5992,3418,6124,5993,5567,4768,5218,6520,4595,3458,5367, +6125,5635,6126,4202,6521,4740,4924,6307,3981,4069,4385,6308,3883,2675,4051,3834, +4302,4483,5568,5994,4972,4101,5368,6309,5164,5884,3922,6127,6522,6523,5261,5460, +5187,4164,5219,3538,5516,4111,3524,5995,6310,6311,5369,3181,3386,2484,5188,3464, +5569,3627,5708,6524,5406,5165,4677,4492,6312,4872,4851,5885,4468,5996,6313,5709, +5710,6128,2470,5886,6314,5293,4882,5785,3325,5461,5101,6129,5711,5786,6525,4906, +6526,6527,4418,5887,5712,4808,2907,3701,5713,5888,6528,3765,5636,5331,6529,6530, +3593,5889,3637,4943,3692,5714,5787,4925,6315,6130,5462,4405,6131,6132,6316,5262, +6531,6532,5715,3859,5716,5070,4696,5102,3929,5788,3987,4792,5997,6533,6534,3920, +4809,5000,5998,6535,2974,5370,6317,5189,5263,5717,3826,6536,3953,5001,4883,3190, +5463,5890,4973,5999,4741,6133,6134,3607,5570,6000,4711,3362,3630,4552,5041,6318, +6001,2950,2953,5637,4646,5371,4944,6002,2044,4120,3429,6319,6537,5103,4833,6538, +6539,4884,4647,3884,6003,6004,4758,3835,5220,5789,4565,5407,6540,6135,5294,4697, +4852,6320,6321,3206,4907,6541,6322,4945,6542,6136,6543,6323,6005,4631,3519,6544, +5891,6545,5464,3784,5221,6546,5571,4659,6547,6324,6137,5190,6548,3853,6549,4016, +4834,3954,6138,5332,3827,4017,3210,3546,4469,5408,5718,3505,4648,5790,5131,5638, +5791,5465,4727,4318,6325,6326,5792,4553,4010,4698,3439,4974,3638,4335,3085,6006, +5104,5042,5166,5892,5572,6327,4356,4519,5222,5573,5333,5793,5043,6550,5639,5071, +4503,6328,6139,6551,6140,3914,3901,5372,6007,5640,4728,4793,3976,3836,4885,6552, +4127,6553,4451,4102,5002,6554,3686,5105,6555,5191,5072,5295,4611,5794,5296,6556, +5893,5264,5894,4975,5466,5265,4699,4976,4370,4056,3492,5044,4886,6557,5795,4432, +4769,4357,5467,3940,4660,4290,6141,4484,4770,4661,3992,6329,4025,4662,5022,4632, +4835,4070,5297,4663,4596,5574,5132,5409,5895,6142,4504,5192,4664,5796,5896,3885, +5575,5797,5023,4810,5798,3732,5223,4712,5298,4084,5334,5468,6143,4052,4053,4336, +4977,4794,6558,5335,4908,5576,5224,4233,5024,4128,5469,5225,4873,6008,5045,4729, +4742,4633,3675,4597,6559,5897,5133,5577,5003,5641,5719,6330,6560,3017,2382,3854, +4406,4811,6331,4393,3964,4946,6561,2420,3722,6562,4926,4378,3247,1736,4442,6332, +5134,6333,5226,3996,2918,5470,4319,4003,4598,4743,4744,4485,3785,3902,5167,5004, +5373,4394,5898,6144,4874,1793,3997,6334,4085,4214,5106,5642,4909,5799,6009,4419, +4189,3330,5899,4165,4420,5299,5720,5227,3347,6145,4081,6335,2876,3930,6146,3293, +3786,3910,3998,5900,5300,5578,2840,6563,5901,5579,6147,3531,5374,6564,6565,5580, +4759,5375,6566,6148,3559,5643,6336,6010,5517,6337,6338,5721,5902,3873,6011,6339, +6567,5518,3868,3649,5722,6568,4771,4947,6569,6149,4812,6570,2853,5471,6340,6341, +5644,4795,6342,6012,5723,6343,5724,6013,4349,6344,3160,6150,5193,4599,4514,4493, +5168,4320,6345,4927,3666,4745,5169,5903,5005,4928,6346,5725,6014,4730,4203,5046, +4948,3395,5170,6015,4150,6016,5726,5519,6347,5047,3550,6151,6348,4197,4310,5904, +6571,5581,2965,6152,4978,3960,4291,5135,6572,5301,5727,4129,4026,5905,4853,5728, +5472,6153,6349,4533,2700,4505,5336,4678,3583,5073,2994,4486,3043,4554,5520,6350, +6017,5800,4487,6351,3931,4103,5376,6352,4011,4321,4311,4190,5136,6018,3988,3233, +4350,5906,5645,4198,6573,5107,3432,4191,3435,5582,6574,4139,5410,6353,5411,3944, +5583,5074,3198,6575,6354,4358,6576,5302,4600,5584,5194,5412,6577,6578,5585,5413, +5303,4248,5414,3879,4433,6579,4479,5025,4854,5415,6355,4760,4772,3683,2978,4700, +3797,4452,3965,3932,3721,4910,5801,6580,5195,3551,5907,3221,3471,3029,6019,3999, +5908,5909,5266,5267,3444,3023,3828,3170,4796,5646,4979,4259,6356,5647,5337,3694, +6357,5648,5338,4520,4322,5802,3031,3759,4071,6020,5586,4836,4386,5048,6581,3571, +4679,4174,4949,6154,4813,3787,3402,3822,3958,3215,3552,5268,4387,3933,4950,4359, +6021,5910,5075,3579,6358,4234,4566,5521,6359,3613,5049,6022,5911,3375,3702,3178, +4911,5339,4521,6582,6583,4395,3087,3811,5377,6023,6360,6155,4027,5171,5649,4421, +4249,2804,6584,2270,6585,4000,4235,3045,6156,5137,5729,4140,4312,3886,6361,4330, +6157,4215,6158,3500,3676,4929,4331,3713,4930,5912,4265,3776,3368,5587,4470,4855, +3038,4980,3631,6159,6160,4132,4680,6161,6362,3923,4379,5588,4255,6586,4121,6587, +6363,4649,6364,3288,4773,4774,6162,6024,6365,3543,6588,4274,3107,3737,5050,5803, +4797,4522,5589,5051,5730,3714,4887,5378,4001,4523,6163,5026,5522,4701,4175,2791, +3760,6589,5473,4224,4133,3847,4814,4815,4775,3259,5416,6590,2738,6164,6025,5304, +3733,5076,5650,4816,5590,6591,6165,6592,3934,5269,6593,3396,5340,6594,5804,3445, +3602,4042,4488,5731,5732,3525,5591,4601,5196,6166,6026,5172,3642,4612,3202,4506, +4798,6366,3818,5108,4303,5138,5139,4776,3332,4304,2915,3415,4434,5077,5109,4856, +2879,5305,4817,6595,5913,3104,3144,3903,4634,5341,3133,5110,5651,5805,6167,4057, +5592,2945,4371,5593,6596,3474,4182,6367,6597,6168,4507,4279,6598,2822,6599,4777, +4713,5594,3829,6169,3887,5417,6170,3653,5474,6368,4216,2971,5228,3790,4579,6369, +5733,6600,6601,4951,4746,4555,6602,5418,5475,6027,3400,4665,5806,6171,4799,6028, +5052,6172,3343,4800,4747,5006,6370,4556,4217,5476,4396,5229,5379,5477,3839,5914, +5652,5807,4714,3068,4635,5808,6173,5342,4192,5078,5419,5523,5734,6174,4557,6175, +4602,6371,6176,6603,5809,6372,5735,4260,3869,5111,5230,6029,5112,6177,3126,4681, +5524,5915,2706,3563,4748,3130,6178,4018,5525,6604,6605,5478,4012,4837,6606,4534, +4193,5810,4857,3615,5479,6030,4082,3697,3539,4086,5270,3662,4508,4931,5916,4912, +5811,5027,3888,6607,4397,3527,3302,3798,2775,2921,2637,3966,4122,4388,4028,4054, +1633,4858,5079,3024,5007,3982,3412,5736,6608,3426,3236,5595,3030,6179,3427,3336, +3279,3110,6373,3874,3039,5080,5917,5140,4489,3119,6374,5812,3405,4494,6031,4666, +4141,6180,4166,6032,5813,4981,6609,5081,4422,4982,4112,3915,5653,3296,3983,6375, +4266,4410,5654,6610,6181,3436,5082,6611,5380,6033,3819,5596,4535,5231,5306,5113, +6612,4952,5918,4275,3113,6613,6376,6182,6183,5814,3073,4731,4838,5008,3831,6614, +4888,3090,3848,4280,5526,5232,3014,5655,5009,5737,5420,5527,6615,5815,5343,5173, +5381,4818,6616,3151,4953,6617,5738,2796,3204,4360,2989,4281,5739,5174,5421,5197, +3132,5141,3849,5142,5528,5083,3799,3904,4839,5480,2880,4495,3448,6377,6184,5271, +5919,3771,3193,6034,6035,5920,5010,6036,5597,6037,6378,6038,3106,5422,6618,5423, +5424,4142,6619,4889,5084,4890,4313,5740,6620,3437,5175,5307,5816,4199,5198,5529, +5817,5199,5656,4913,5028,5344,3850,6185,2955,5272,5011,5818,4567,4580,5029,5921, +3616,5233,6621,6622,6186,4176,6039,6379,6380,3352,5200,5273,2908,5598,5234,3837, +5308,6623,6624,5819,4496,4323,5309,5201,6625,6626,4983,3194,3838,4167,5530,5922, +5274,6381,6382,3860,3861,5599,3333,4292,4509,6383,3553,5481,5820,5531,4778,6187, +3955,3956,4324,4389,4218,3945,4325,3397,2681,5923,4779,5085,4019,5482,4891,5382, +5383,6040,4682,3425,5275,4094,6627,5310,3015,5483,5657,4398,5924,3168,4819,6628, +5925,6629,5532,4932,4613,6041,6630,4636,6384,4780,4204,5658,4423,5821,3989,4683, +5822,6385,4954,6631,5345,6188,5425,5012,5384,3894,6386,4490,4104,6632,5741,5053, +6633,5823,5926,5659,5660,5927,6634,5235,5742,5824,4840,4933,4820,6387,4859,5928, +4955,6388,4143,3584,5825,5346,5013,6635,5661,6389,5014,5484,5743,4337,5176,5662, +6390,2836,6391,3268,6392,6636,6042,5236,6637,4158,6638,5744,5663,4471,5347,3663, +4123,5143,4293,3895,6639,6640,5311,5929,5826,3800,6189,6393,6190,5664,5348,3554, +3594,4749,4603,6641,5385,4801,6043,5827,4183,6642,5312,5426,4761,6394,5665,6191, +4715,2669,6643,6644,5533,3185,5427,5086,5930,5931,5386,6192,6044,6645,4781,4013, +5745,4282,4435,5534,4390,4267,6045,5746,4984,6046,2743,6193,3501,4087,5485,5932, +5428,4184,4095,5747,4061,5054,3058,3862,5933,5600,6646,5144,3618,6395,3131,5055, +5313,6396,4650,4956,3855,6194,3896,5202,4985,4029,4225,6195,6647,5828,5486,5829, +3589,3002,6648,6397,4782,5276,6649,6196,6650,4105,3803,4043,5237,5830,6398,4096, +3643,6399,3528,6651,4453,3315,4637,6652,3984,6197,5535,3182,3339,6653,3096,2660, +6400,6654,3449,5934,4250,4236,6047,6401,5831,6655,5487,3753,4062,5832,6198,6199, +6656,3766,6657,3403,4667,6048,6658,4338,2897,5833,3880,2797,3780,4326,6659,5748, +5015,6660,5387,4351,5601,4411,6661,3654,4424,5935,4339,4072,5277,4568,5536,6402, +6662,5238,6663,5349,5203,6200,5204,6201,5145,4536,5016,5056,4762,5834,4399,4957, +6202,6403,5666,5749,6664,4340,6665,5936,5177,5667,6666,6667,3459,4668,6404,6668, +6669,4543,6203,6670,4276,6405,4480,5537,6671,4614,5205,5668,6672,3348,2193,4763, +6406,6204,5937,5602,4177,5669,3419,6673,4020,6205,4443,4569,5388,3715,3639,6407, +6049,4058,6206,6674,5938,4544,6050,4185,4294,4841,4651,4615,5488,6207,6408,6051, +5178,3241,3509,5835,6208,4958,5836,4341,5489,5278,6209,2823,5538,5350,5206,5429, +6675,4638,4875,4073,3516,4684,4914,4860,5939,5603,5389,6052,5057,3237,5490,3791, +6676,6409,6677,4821,4915,4106,5351,5058,4243,5539,4244,5604,4842,4916,5239,3028, +3716,5837,5114,5605,5390,5940,5430,6210,4332,6678,5540,4732,3667,3840,6053,4305, +3408,5670,5541,6410,2744,5240,5750,6679,3234,5606,6680,5607,5671,3608,4283,4159, +4400,5352,4783,6681,6411,6682,4491,4802,6211,6412,5941,6413,6414,5542,5751,6683, +4669,3734,5942,6684,6415,5943,5059,3328,4670,4144,4268,6685,6686,6687,6688,4372, +3603,6689,5944,5491,4373,3440,6416,5543,4784,4822,5608,3792,4616,5838,5672,3514, +5391,6417,4892,6690,4639,6691,6054,5673,5839,6055,6692,6056,5392,6212,4038,5544, +5674,4497,6057,6693,5840,4284,5675,4021,4545,5609,6418,4454,6419,6213,4113,4472, +5314,3738,5087,5279,4074,5610,4959,4063,3179,4750,6058,6420,6214,3476,4498,4716, +5431,4960,4685,6215,5241,6694,6421,6216,6695,5841,5945,6422,3748,5946,5179,3905, +5752,5545,5947,4374,6217,4455,6423,4412,6218,4803,5353,6696,3832,5280,6219,4327, +4702,6220,6221,6059,4652,5432,6424,3749,4751,6425,5753,4986,5393,4917,5948,5030, +5754,4861,4733,6426,4703,6697,6222,4671,5949,4546,4961,5180,6223,5031,3316,5281, +6698,4862,4295,4934,5207,3644,6427,5842,5950,6428,6429,4570,5843,5282,6430,6224, +5088,3239,6060,6699,5844,5755,6061,6431,2701,5546,6432,5115,5676,4039,3993,3327, +4752,4425,5315,6433,3941,6434,5677,4617,4604,3074,4581,6225,5433,6435,6226,6062, +4823,5756,5116,6227,3717,5678,4717,5845,6436,5679,5846,6063,5847,6064,3977,3354, +6437,3863,5117,6228,5547,5394,4499,4524,6229,4605,6230,4306,4500,6700,5951,6065, +3693,5952,5089,4366,4918,6701,6231,5548,6232,6702,6438,4704,5434,6703,6704,5953, +4168,6705,5680,3420,6706,5242,4407,6066,3812,5757,5090,5954,4672,4525,3481,5681, +4618,5395,5354,5316,5955,6439,4962,6707,4526,6440,3465,4673,6067,6441,5682,6708, +5435,5492,5758,5683,4619,4571,4674,4804,4893,4686,5493,4753,6233,6068,4269,6442, +6234,5032,4705,5146,5243,5208,5848,6235,6443,4963,5033,4640,4226,6236,5849,3387, +6444,6445,4436,4437,5850,4843,5494,4785,4894,6709,4361,6710,5091,5956,3331,6237, +4987,5549,6069,6711,4342,3517,4473,5317,6070,6712,6071,4706,6446,5017,5355,6713, +6714,4988,5436,6447,4734,5759,6715,4735,4547,4456,4754,6448,5851,6449,6450,3547, +5852,5318,6451,6452,5092,4205,6716,6238,4620,4219,5611,6239,6072,4481,5760,5957, +5958,4059,6240,6453,4227,4537,6241,5761,4030,4186,5244,5209,3761,4457,4876,3337, +5495,5181,6242,5959,5319,5612,5684,5853,3493,5854,6073,4169,5613,5147,4895,6074, +5210,6717,5182,6718,3830,6243,2798,3841,6075,6244,5855,5614,3604,4606,5496,5685, +5118,5356,6719,6454,5960,5357,5961,6720,4145,3935,4621,5119,5962,4261,6721,6455, +4786,5963,4375,4582,6245,6246,6247,6076,5437,4877,5856,3376,4380,6248,4160,6722, +5148,6456,5211,6457,6723,4718,6458,6724,6249,5358,4044,3297,6459,6250,5857,5615, +5497,5245,6460,5498,6725,6251,6252,5550,3793,5499,2959,5396,6461,6462,4572,5093, +5500,5964,3806,4146,6463,4426,5762,5858,6077,6253,4755,3967,4220,5965,6254,4989, +5501,6464,4352,6726,6078,4764,2290,5246,3906,5438,5283,3767,4964,2861,5763,5094, +6255,6256,4622,5616,5859,5860,4707,6727,4285,4708,4824,5617,6257,5551,4787,5212, +4965,4935,4687,6465,6728,6466,5686,6079,3494,4413,2995,5247,5966,5618,6729,5967, +5764,5765,5687,5502,6730,6731,6080,5397,6467,4990,6258,6732,4538,5060,5619,6733, +4719,5688,5439,5018,5149,5284,5503,6734,6081,4607,6259,5120,3645,5861,4583,6260, +4584,4675,5620,4098,5440,6261,4863,2379,3306,4585,5552,5689,4586,5285,6735,4864, +6736,5286,6082,6737,4623,3010,4788,4381,4558,5621,4587,4896,3698,3161,5248,4353, +4045,6262,3754,5183,4588,6738,6263,6739,6740,5622,3936,6741,6468,6742,6264,5095, +6469,4991,5968,6743,4992,6744,6083,4897,6745,4256,5766,4307,3108,3968,4444,5287, +3889,4343,6084,4510,6085,4559,6086,4898,5969,6746,5623,5061,4919,5249,5250,5504, +5441,6265,5320,4878,3242,5862,5251,3428,6087,6747,4237,5624,5442,6266,5553,4539, +6748,2585,3533,5398,4262,6088,5150,4736,4438,6089,6267,5505,4966,6749,6268,6750, +6269,5288,5554,3650,6090,6091,4624,6092,5690,6751,5863,4270,5691,4277,5555,5864, +6752,5692,4720,4865,6470,5151,4688,4825,6753,3094,6754,6471,3235,4653,6755,5213, +5399,6756,3201,4589,5865,4967,6472,5866,6473,5019,3016,6757,5321,4756,3957,4573, +6093,4993,5767,4721,6474,6758,5625,6759,4458,6475,6270,6760,5556,4994,5214,5252, +6271,3875,5768,6094,5034,5506,4376,5769,6761,2120,6476,5253,5770,6762,5771,5970, +3990,5971,5557,5558,5772,6477,6095,2787,4641,5972,5121,6096,6097,6272,6763,3703, +5867,5507,6273,4206,6274,4789,6098,6764,3619,3646,3833,3804,2394,3788,4936,3978, +4866,4899,6099,6100,5559,6478,6765,3599,5868,6101,5869,5870,6275,6766,4527,6767) + +# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/gb2312prober.py b/awx/lib/site-packages/requests/packages/charade/gb2312prober.py new file mode 100644 index 0000000000..643fe2519e --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/gb2312prober.py @@ -0,0 +1,41 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import GB2312DistributionAnalysis +from .mbcssm import GB2312SMModel + +class GB2312Prober(MultiByteCharSetProber): + def __init__(self): + MultiByteCharSetProber.__init__(self) + self._mCodingSM = CodingStateMachine(GB2312SMModel) + self._mDistributionAnalyzer = GB2312DistributionAnalysis() + self.reset() + + def get_charset_name(self): + return "GB2312" diff --git a/awx/lib/site-packages/requests/packages/charade/hebrewprober.py b/awx/lib/site-packages/requests/packages/charade/hebrewprober.py new file mode 100644 index 0000000000..90d171f302 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/hebrewprober.py @@ -0,0 +1,283 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Shy Shalom +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .constants import eNotMe, eDetecting +from .compat import wrap_ord + +# This prober doesn't actually recognize a language or a charset. +# It is a helper prober for the use of the Hebrew model probers + +### General ideas of the Hebrew charset recognition ### +# +# Four main charsets exist in Hebrew: +# "ISO-8859-8" - Visual Hebrew +# "windows-1255" - Logical Hebrew +# "ISO-8859-8-I" - Logical Hebrew +# "x-mac-hebrew" - ?? Logical Hebrew ?? +# +# Both "ISO" charsets use a completely identical set of code points, whereas +# "windows-1255" and "x-mac-hebrew" are two different proper supersets of +# these code points. windows-1255 defines additional characters in the range +# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific +# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. +# x-mac-hebrew defines similar additional code points but with a different +# mapping. +# +# As far as an average Hebrew text with no diacritics is concerned, all four +# charsets are identical with respect to code points. Meaning that for the +# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters +# (including final letters). +# +# The dominant difference between these charsets is their directionality. +# "Visual" directionality means that the text is ordered as if the renderer is +# not aware of a BIDI rendering algorithm. The renderer sees the text and +# draws it from left to right. The text itself when ordered naturally is read +# backwards. A buffer of Visual Hebrew generally looks like so: +# "[last word of first line spelled backwards] [whole line ordered backwards +# and spelled backwards] [first word of first line spelled backwards] +# [end of line] [last word of second line] ... etc' " +# adding punctuation marks, numbers and English text to visual text is +# naturally also "visual" and from left to right. +# +# "Logical" directionality means the text is ordered "naturally" according to +# the order it is read. It is the responsibility of the renderer to display +# the text from right to left. A BIDI algorithm is used to place general +# punctuation marks, numbers and English text in the text. +# +# Texts in x-mac-hebrew are almost impossible to find on the Internet. From +# what little evidence I could find, it seems that its general directionality +# is Logical. +# +# To sum up all of the above, the Hebrew probing mechanism knows about two +# charsets: +# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are +# backwards while line order is natural. For charset recognition purposes +# the line order is unimportant (In fact, for this implementation, even +# word order is unimportant). +# Logical Hebrew - "windows-1255" - normal, naturally ordered text. +# +# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be +# specifically identified. +# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew +# that contain special punctuation marks or diacritics is displayed with +# some unconverted characters showing as question marks. This problem might +# be corrected using another model prober for x-mac-hebrew. Due to the fact +# that x-mac-hebrew texts are so rare, writing another model prober isn't +# worth the effort and performance hit. +# +#### The Prober #### +# +# The prober is divided between two SBCharSetProbers and a HebrewProber, +# all of which are managed, created, fed data, inquired and deleted by the +# SBCSGroupProber. The two SBCharSetProbers identify that the text is in +# fact some kind of Hebrew, Logical or Visual. The final decision about which +# one is it is made by the HebrewProber by combining final-letter scores +# with the scores of the two SBCharSetProbers to produce a final answer. +# +# The SBCSGroupProber is responsible for stripping the original text of HTML +# tags, English characters, numbers, low-ASCII punctuation characters, spaces +# and new lines. It reduces any sequence of such characters to a single space. +# The buffer fed to each prober in the SBCS group prober is pure text in +# high-ASCII. +# The two SBCharSetProbers (model probers) share the same language model: +# Win1255Model. +# The first SBCharSetProber uses the model normally as any other +# SBCharSetProber does, to recognize windows-1255, upon which this model was +# built. The second SBCharSetProber is told to make the pair-of-letter +# lookup in the language model backwards. This in practice exactly simulates +# a visual Hebrew model using the windows-1255 logical Hebrew model. +# +# The HebrewProber is not using any language model. All it does is look for +# final-letter evidence suggesting the text is either logical Hebrew or visual +# Hebrew. Disjointed from the model probers, the results of the HebrewProber +# alone are meaningless. HebrewProber always returns 0.00 as confidence +# since it never identifies a charset by itself. Instead, the pointer to the +# HebrewProber is passed to the model probers as a helper "Name Prober". +# When the Group prober receives a positive identification from any prober, +# it asks for the name of the charset identified. If the prober queried is a +# Hebrew model prober, the model prober forwards the call to the +# HebrewProber to make the final decision. In the HebrewProber, the +# decision is made according to the final-letters scores maintained and Both +# model probers scores. The answer is returned in the form of the name of the +# charset identified, either "windows-1255" or "ISO-8859-8". + +# windows-1255 / ISO-8859-8 code points of interest +FINAL_KAF = 0xea +NORMAL_KAF = 0xeb +FINAL_MEM = 0xed +NORMAL_MEM = 0xee +FINAL_NUN = 0xef +NORMAL_NUN = 0xf0 +FINAL_PE = 0xf3 +NORMAL_PE = 0xf4 +FINAL_TSADI = 0xf5 +NORMAL_TSADI = 0xf6 + +# Minimum Visual vs Logical final letter score difference. +# If the difference is below this, don't rely solely on the final letter score +# distance. +MIN_FINAL_CHAR_DISTANCE = 5 + +# Minimum Visual vs Logical model score difference. +# If the difference is below this, don't rely at all on the model score +# distance. +MIN_MODEL_DISTANCE = 0.01 + +VISUAL_HEBREW_NAME = "ISO-8859-8" +LOGICAL_HEBREW_NAME = "windows-1255" + + +class HebrewProber(CharSetProber): + def __init__(self): + CharSetProber.__init__(self) + self._mLogicalProber = None + self._mVisualProber = None + self.reset() + + def reset(self): + self._mFinalCharLogicalScore = 0 + self._mFinalCharVisualScore = 0 + # The two last characters seen in the previous buffer, + # mPrev and mBeforePrev are initialized to space in order to simulate + # a word delimiter at the beginning of the data + self._mPrev = ' ' + self._mBeforePrev = ' ' + # These probers are owned by the group prober. + + def set_model_probers(self, logicalProber, visualProber): + self._mLogicalProber = logicalProber + self._mVisualProber = visualProber + + def is_final(self, c): + return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE, + FINAL_TSADI] + + def is_non_final(self, c): + # The normal Tsadi is not a good Non-Final letter due to words like + # 'lechotet' (to chat) containing an apostrophe after the tsadi. This + # apostrophe is converted to a space in FilterWithoutEnglishLetters + # causing the Non-Final tsadi to appear at an end of a word even + # though this is not the case in the original text. + # The letters Pe and Kaf rarely display a related behavior of not being + # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' + # for example legally end with a Non-Final Pe or Kaf. However, the + # benefit of these letters as Non-Final letters outweighs the damage + # since these words are quite rare. + return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE] + + def feed(self, aBuf): + # Final letter analysis for logical-visual decision. + # Look for evidence that the received buffer is either logical Hebrew + # or visual Hebrew. + # The following cases are checked: + # 1) A word longer than 1 letter, ending with a final letter. This is + # an indication that the text is laid out "naturally" since the + # final letter really appears at the end. +1 for logical score. + # 2) A word longer than 1 letter, ending with a Non-Final letter. In + # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, + # should not end with the Non-Final form of that letter. Exceptions + # to this rule are mentioned above in isNonFinal(). This is an + # indication that the text is laid out backwards. +1 for visual + # score + # 3) A word longer than 1 letter, starting with a final letter. Final + # letters should not appear at the beginning of a word. This is an + # indication that the text is laid out backwards. +1 for visual + # score. + # + # The visual score and logical score are accumulated throughout the + # text and are finally checked against each other in GetCharSetName(). + # No checking for final letters in the middle of words is done since + # that case is not an indication for either Logical or Visual text. + # + # We automatically filter out all 7-bit characters (replace them with + # spaces) so the word boundary detection works properly. [MAP] + + if self.get_state() == eNotMe: + # Both model probers say it's not them. No reason to continue. + return eNotMe + + aBuf = self.filter_high_bit_only(aBuf) + + for cur in aBuf: + if cur == ' ': + # We stand on a space - a word just ended + if self._mBeforePrev != ' ': + # next-to-last char was not a space so self._mPrev is not a + # 1 letter word + if self.is_final(self._mPrev): + # case (1) [-2:not space][-1:final letter][cur:space] + self._mFinalCharLogicalScore += 1 + elif self.is_non_final(self._mPrev): + # case (2) [-2:not space][-1:Non-Final letter][ + # cur:space] + self._mFinalCharVisualScore += 1 + else: + # Not standing on a space + if ((self._mBeforePrev == ' ') and + (self.is_final(self._mPrev)) and (cur != ' ')): + # case (3) [-2:space][-1:final letter][cur:not space] + self._mFinalCharVisualScore += 1 + self._mBeforePrev = self._mPrev + self._mPrev = cur + + # Forever detecting, till the end or until both model probers return + # eNotMe (handled above) + return eDetecting + + def get_charset_name(self): + # Make the decision: is it Logical or Visual? + # If the final letter score distance is dominant enough, rely on it. + finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore + if finalsub >= MIN_FINAL_CHAR_DISTANCE: + return LOGICAL_HEBREW_NAME + if finalsub <= -MIN_FINAL_CHAR_DISTANCE: + return VISUAL_HEBREW_NAME + + # It's not dominant enough, try to rely on the model scores instead. + modelsub = (self._mLogicalProber.get_confidence() + - self._mVisualProber.get_confidence()) + if modelsub > MIN_MODEL_DISTANCE: + return LOGICAL_HEBREW_NAME + if modelsub < -MIN_MODEL_DISTANCE: + return VISUAL_HEBREW_NAME + + # Still no good, back to final letter distance, maybe it'll save the + # day. + if finalsub < 0.0: + return VISUAL_HEBREW_NAME + + # (finalsub > 0 - Logical) or (don't know what to do) default to + # Logical. + return LOGICAL_HEBREW_NAME + + def get_state(self): + # Remain active as long as any of the model probers are active. + if (self._mLogicalProber.get_state() == eNotMe) and \ + (self._mVisualProber.get_state() == eNotMe): + return eNotMe + return eDetecting diff --git a/awx/lib/site-packages/requests/packages/charade/jisfreq.py b/awx/lib/site-packages/requests/packages/charade/jisfreq.py new file mode 100644 index 0000000000..064345b086 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/jisfreq.py @@ -0,0 +1,569 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology +# +# Japanese frequency table, applied to both S-JIS and EUC-JP +# They are sorted in order. + +# 128 --> 0.77094 +# 256 --> 0.85710 +# 512 --> 0.92635 +# 1024 --> 0.97130 +# 2048 --> 0.99431 +# +# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 +# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 +# +# Typical Distribution Ratio, 25% of IDR + +JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 + +# Char to FreqOrder table , +JIS_TABLE_SIZE = 4368 + +JISCharToFreqOrder = ( + 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 +3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 +1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 +2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 +2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 +5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 +1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 +5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 +5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 +5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 +5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 +5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 +5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 +1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 +1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 +1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 +2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 +3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 +3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 + 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 + 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 +1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 + 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 +5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 + 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 + 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 + 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 + 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 + 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 +5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 +5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 +5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 +4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 +5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 +5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 +5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 +5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 +5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 +5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 +5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 +5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 +5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 +3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 +5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 +5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 +5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 +5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 +5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 +5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 +5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 +5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 +5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 +5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 +5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 +5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 +5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 +5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 +5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 +5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 +5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 +5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 +5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 +5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 +5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 +5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 +5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 +5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 +5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 +5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 +5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 +5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 +5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 +5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 +5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 +5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 +5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 +5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 +5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 +5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 +5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 +5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 +6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 +6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 +6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 +6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 +6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 +6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 +6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 +6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 +4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 + 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 + 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 +1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 +1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 + 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 +3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 +3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 + 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 +3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 +3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 + 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 +2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 + 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 +3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 +1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 + 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 +1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 + 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 +2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 +2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 +2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 +2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 +1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 +1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 +1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 +1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 +2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 +1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 +2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 +1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 +1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 +1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 +1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 +1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 +1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 + 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 + 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 +1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 +2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 +2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 +2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 +3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 +3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 + 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 +3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 +1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 + 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 +2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 +1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 + 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 +3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 +4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 +2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 +1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 +2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 +1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 + 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 + 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 +1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 +2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 +2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 +2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 +3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 +1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 +2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 + 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 + 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 + 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 +1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 +2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 + 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 +1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 +1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 + 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 +1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 +1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 +1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 + 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 +2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 + 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 +2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 +3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 +2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 +1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 +6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 +1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 +2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 +1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 + 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 + 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 +3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 +3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 +1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 +1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 +1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 +1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 + 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 + 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 +2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 + 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 +3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 +2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 + 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 +1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 +2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 + 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 +1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 + 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 +4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 +2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 +1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 + 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 +1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 +2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 + 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 +6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 +1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 +1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 +2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 +3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 + 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 +3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 +1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 + 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 +1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 + 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 +3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 + 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 +2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 + 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 +4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 +2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 +1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 +1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 +1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 + 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 +1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 +3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 +1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 +3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 + 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 + 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 + 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 +2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 +1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 + 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 +1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 + 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 +1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 + 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 + 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 + 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 +1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 +1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 +2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 +4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 + 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 +1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 + 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 +1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 +3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 +1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 +2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 +2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 +1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 +1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 +2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 + 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 +2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 +1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 +1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 +1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 +1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 +3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 +2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 +2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 + 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 +3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 +3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 +1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 +2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 +1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 +2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 +#Everything below is of no interest for detection purpose +2138,2122,3730,2888,1995,1820,1044,6190,6191,6192,6193,6194,6195,6196,6197,6198, # 4384 +6199,6200,6201,6202,6203,6204,6205,4670,6206,6207,6208,6209,6210,6211,6212,6213, # 4400 +6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,6224,6225,6226,6227,6228,6229, # 4416 +6230,6231,6232,6233,6234,6235,6236,6237,3187,6238,6239,3969,6240,6241,6242,6243, # 4432 +6244,4671,6245,6246,4672,6247,6248,4133,6249,6250,4364,6251,2923,2556,2613,4673, # 4448 +4365,3970,6252,6253,6254,6255,4674,6256,6257,6258,2768,2353,4366,4675,4676,3188, # 4464 +4367,3463,6259,4134,4677,4678,6260,2267,6261,3842,3332,4368,3543,6262,6263,6264, # 4480 +3013,1954,1928,4135,4679,6265,6266,2478,3091,6267,4680,4369,6268,6269,1699,6270, # 4496 +3544,4136,4681,6271,4137,6272,4370,2804,6273,6274,2593,3971,3972,4682,6275,2236, # 4512 +4683,6276,6277,4684,6278,6279,4138,3973,4685,6280,6281,3258,6282,6283,6284,6285, # 4528 +3974,4686,2841,3975,6286,6287,3545,6288,6289,4139,4687,4140,6290,4141,6291,4142, # 4544 +6292,6293,3333,6294,6295,6296,4371,6297,3399,6298,6299,4372,3976,6300,6301,6302, # 4560 +4373,6303,6304,3843,3731,6305,4688,4374,6306,6307,3259,2294,6308,3732,2530,4143, # 4576 +6309,4689,6310,6311,6312,3048,6313,6314,4690,3733,2237,6315,6316,2282,3334,6317, # 4592 +6318,3844,6319,6320,4691,6321,3400,4692,6322,4693,6323,3049,6324,4375,6325,3977, # 4608 +6326,6327,6328,3546,6329,4694,3335,6330,4695,4696,6331,6332,6333,6334,4376,3978, # 4624 +6335,4697,3979,4144,6336,3980,4698,6337,6338,6339,6340,6341,4699,4700,4701,6342, # 4640 +6343,4702,6344,6345,4703,6346,6347,4704,6348,4705,4706,3135,6349,4707,6350,4708, # 4656 +6351,4377,6352,4709,3734,4145,6353,2506,4710,3189,6354,3050,4711,3981,6355,3547, # 4672 +3014,4146,4378,3735,2651,3845,3260,3136,2224,1986,6356,3401,6357,4712,2594,3627, # 4688 +3137,2573,3736,3982,4713,3628,4714,4715,2682,3629,4716,6358,3630,4379,3631,6359, # 4704 +6360,6361,3983,6362,6363,6364,6365,4147,3846,4717,6366,6367,3737,2842,6368,4718, # 4720 +2628,6369,3261,6370,2386,6371,6372,3738,3984,4719,3464,4720,3402,6373,2924,3336, # 4736 +4148,2866,6374,2805,3262,4380,2704,2069,2531,3138,2806,2984,6375,2769,6376,4721, # 4752 +4722,3403,6377,6378,3548,6379,6380,2705,3092,1979,4149,2629,3337,2889,6381,3338, # 4768 +4150,2557,3339,4381,6382,3190,3263,3739,6383,4151,4723,4152,2558,2574,3404,3191, # 4784 +6384,6385,4153,6386,4724,4382,6387,6388,4383,6389,6390,4154,6391,4725,3985,6392, # 4800 +3847,4155,6393,6394,6395,6396,6397,3465,6398,4384,6399,6400,6401,6402,6403,6404, # 4816 +4156,6405,6406,6407,6408,2123,6409,6410,2326,3192,4726,6411,6412,6413,6414,4385, # 4832 +4157,6415,6416,4158,6417,3093,3848,6418,3986,6419,6420,3849,6421,6422,6423,4159, # 4848 +6424,6425,4160,6426,3740,6427,6428,6429,6430,3987,6431,4727,6432,2238,6433,6434, # 4864 +4386,3988,6435,6436,3632,6437,6438,2843,6439,6440,6441,6442,3633,6443,2958,6444, # 4880 +6445,3466,6446,2364,4387,3850,6447,4388,2959,3340,6448,3851,6449,4728,6450,6451, # 4896 +3264,4729,6452,3193,6453,4389,4390,2706,3341,4730,6454,3139,6455,3194,6456,3051, # 4912 +2124,3852,1602,4391,4161,3853,1158,3854,4162,3989,4392,3990,4731,4732,4393,2040, # 4928 +4163,4394,3265,6457,2807,3467,3855,6458,6459,6460,3991,3468,4733,4734,6461,3140, # 4944 +2960,6462,4735,6463,6464,6465,6466,4736,4737,4738,4739,6467,6468,4164,2403,3856, # 4960 +6469,6470,2770,2844,6471,4740,6472,6473,6474,6475,6476,6477,6478,3195,6479,4741, # 4976 +4395,6480,2867,6481,4742,2808,6482,2493,4165,6483,6484,6485,6486,2295,4743,6487, # 4992 +6488,6489,3634,6490,6491,6492,6493,6494,6495,6496,2985,4744,6497,6498,4745,6499, # 5008 +6500,2925,3141,4166,6501,6502,4746,6503,6504,4747,6505,6506,6507,2890,6508,6509, # 5024 +6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,3469,4167,6520,6521,6522,4748, # 5040 +4396,3741,4397,4749,4398,3342,2125,4750,6523,4751,4752,4753,3052,6524,2961,4168, # 5056 +6525,4754,6526,4755,4399,2926,4169,6527,3857,6528,4400,4170,6529,4171,6530,6531, # 5072 +2595,6532,6533,6534,6535,3635,6536,6537,6538,6539,6540,6541,6542,4756,6543,6544, # 5088 +6545,6546,6547,6548,4401,6549,6550,6551,6552,4402,3405,4757,4403,6553,6554,6555, # 5104 +4172,3742,6556,6557,6558,3992,3636,6559,6560,3053,2726,6561,3549,4173,3054,4404, # 5120 +6562,6563,3993,4405,3266,3550,2809,4406,6564,6565,6566,4758,4759,6567,3743,6568, # 5136 +4760,3744,4761,3470,6569,6570,6571,4407,6572,3745,4174,6573,4175,2810,4176,3196, # 5152 +4762,6574,4177,6575,6576,2494,2891,3551,6577,6578,3471,6579,4408,6580,3015,3197, # 5168 +6581,3343,2532,3994,3858,6582,3094,3406,4409,6583,2892,4178,4763,4410,3016,4411, # 5184 +6584,3995,3142,3017,2683,6585,4179,6586,6587,4764,4412,6588,6589,4413,6590,2986, # 5200 +6591,2962,3552,6592,2963,3472,6593,6594,4180,4765,6595,6596,2225,3267,4414,6597, # 5216 +3407,3637,4766,6598,6599,3198,6600,4415,6601,3859,3199,6602,3473,4767,2811,4416, # 5232 +1856,3268,3200,2575,3996,3997,3201,4417,6603,3095,2927,6604,3143,6605,2268,6606, # 5248 +3998,3860,3096,2771,6607,6608,3638,2495,4768,6609,3861,6610,3269,2745,4769,4181, # 5264 +3553,6611,2845,3270,6612,6613,6614,3862,6615,6616,4770,4771,6617,3474,3999,4418, # 5280 +4419,6618,3639,3344,6619,4772,4182,6620,2126,6621,6622,6623,4420,4773,6624,3018, # 5296 +6625,4774,3554,6626,4183,2025,3746,6627,4184,2707,6628,4421,4422,3097,1775,4185, # 5312 +3555,6629,6630,2868,6631,6632,4423,6633,6634,4424,2414,2533,2928,6635,4186,2387, # 5328 +6636,4775,6637,4187,6638,1891,4425,3202,3203,6639,6640,4776,6641,3345,6642,6643, # 5344 +3640,6644,3475,3346,3641,4000,6645,3144,6646,3098,2812,4188,3642,3204,6647,3863, # 5360 +3476,6648,3864,6649,4426,4001,6650,6651,6652,2576,6653,4189,4777,6654,6655,6656, # 5376 +2846,6657,3477,3205,4002,6658,4003,6659,3347,2252,6660,6661,6662,4778,6663,6664, # 5392 +6665,6666,6667,6668,6669,4779,4780,2048,6670,3478,3099,6671,3556,3747,4004,6672, # 5408 +6673,6674,3145,4005,3748,6675,6676,6677,6678,6679,3408,6680,6681,6682,6683,3206, # 5424 +3207,6684,6685,4781,4427,6686,4782,4783,4784,6687,6688,6689,4190,6690,6691,3479, # 5440 +6692,2746,6693,4428,6694,6695,6696,6697,6698,6699,4785,6700,6701,3208,2727,6702, # 5456 +3146,6703,6704,3409,2196,6705,4429,6706,6707,6708,2534,1996,6709,6710,6711,2747, # 5472 +6712,6713,6714,4786,3643,6715,4430,4431,6716,3557,6717,4432,4433,6718,6719,6720, # 5488 +6721,3749,6722,4006,4787,6723,6724,3644,4788,4434,6725,6726,4789,2772,6727,6728, # 5504 +6729,6730,6731,2708,3865,2813,4435,6732,6733,4790,4791,3480,6734,6735,6736,6737, # 5520 +4436,3348,6738,3410,4007,6739,6740,4008,6741,6742,4792,3411,4191,6743,6744,6745, # 5536 +6746,6747,3866,6748,3750,6749,6750,6751,6752,6753,6754,6755,3867,6756,4009,6757, # 5552 +4793,4794,6758,2814,2987,6759,6760,6761,4437,6762,6763,6764,6765,3645,6766,6767, # 5568 +3481,4192,6768,3751,6769,6770,2174,6771,3868,3752,6772,6773,6774,4193,4795,4438, # 5584 +3558,4796,4439,6775,4797,6776,6777,4798,6778,4799,3559,4800,6779,6780,6781,3482, # 5600 +6782,2893,6783,6784,4194,4801,4010,6785,6786,4440,6787,4011,6788,6789,6790,6791, # 5616 +6792,6793,4802,6794,6795,6796,4012,6797,6798,6799,6800,3349,4803,3483,6801,4804, # 5632 +4195,6802,4013,6803,6804,4196,6805,4014,4015,6806,2847,3271,2848,6807,3484,6808, # 5648 +6809,6810,4441,6811,4442,4197,4443,3272,4805,6812,3412,4016,1579,6813,6814,4017, # 5664 +6815,3869,6816,2964,6817,4806,6818,6819,4018,3646,6820,6821,4807,4019,4020,6822, # 5680 +6823,3560,6824,6825,4021,4444,6826,4198,6827,6828,4445,6829,6830,4199,4808,6831, # 5696 +6832,6833,3870,3019,2458,6834,3753,3413,3350,6835,4809,3871,4810,3561,4446,6836, # 5712 +6837,4447,4811,4812,6838,2459,4448,6839,4449,6840,6841,4022,3872,6842,4813,4814, # 5728 +6843,6844,4815,4200,4201,4202,6845,4023,6846,6847,4450,3562,3873,6848,6849,4816, # 5744 +4817,6850,4451,4818,2139,6851,3563,6852,6853,3351,6854,6855,3352,4024,2709,3414, # 5760 +4203,4452,6856,4204,6857,6858,3874,3875,6859,6860,4819,6861,6862,6863,6864,4453, # 5776 +3647,6865,6866,4820,6867,6868,6869,6870,4454,6871,2869,6872,6873,4821,6874,3754, # 5792 +6875,4822,4205,6876,6877,6878,3648,4206,4455,6879,4823,6880,4824,3876,6881,3055, # 5808 +4207,6882,3415,6883,6884,6885,4208,4209,6886,4210,3353,6887,3354,3564,3209,3485, # 5824 +2652,6888,2728,6889,3210,3755,6890,4025,4456,6891,4825,6892,6893,6894,6895,4211, # 5840 +6896,6897,6898,4826,6899,6900,4212,6901,4827,6902,2773,3565,6903,4828,6904,6905, # 5856 +6906,6907,3649,3650,6908,2849,3566,6909,3567,3100,6910,6911,6912,6913,6914,6915, # 5872 +4026,6916,3355,4829,3056,4457,3756,6917,3651,6918,4213,3652,2870,6919,4458,6920, # 5888 +2438,6921,6922,3757,2774,4830,6923,3356,4831,4832,6924,4833,4459,3653,2507,6925, # 5904 +4834,2535,6926,6927,3273,4027,3147,6928,3568,6929,6930,6931,4460,6932,3877,4461, # 5920 +2729,3654,6933,6934,6935,6936,2175,4835,2630,4214,4028,4462,4836,4215,6937,3148, # 5936 +4216,4463,4837,4838,4217,6938,6939,2850,4839,6940,4464,6941,6942,6943,4840,6944, # 5952 +4218,3274,4465,6945,6946,2710,6947,4841,4466,6948,6949,2894,6950,6951,4842,6952, # 5968 +4219,3057,2871,6953,6954,6955,6956,4467,6957,2711,6958,6959,6960,3275,3101,4843, # 5984 +6961,3357,3569,6962,4844,6963,6964,4468,4845,3570,6965,3102,4846,3758,6966,4847, # 6000 +3878,4848,4849,4029,6967,2929,3879,4850,4851,6968,6969,1733,6970,4220,6971,6972, # 6016 +6973,6974,6975,6976,4852,6977,6978,6979,6980,6981,6982,3759,6983,6984,6985,3486, # 6032 +3487,6986,3488,3416,6987,6988,6989,6990,6991,6992,6993,6994,6995,6996,6997,4853, # 6048 +6998,6999,4030,7000,7001,3211,7002,7003,4221,7004,7005,3571,4031,7006,3572,7007, # 6064 +2614,4854,2577,7008,7009,2965,3655,3656,4855,2775,3489,3880,4222,4856,3881,4032, # 6080 +3882,3657,2730,3490,4857,7010,3149,7011,4469,4858,2496,3491,4859,2283,7012,7013, # 6096 +7014,2365,4860,4470,7015,7016,3760,7017,7018,4223,1917,7019,7020,7021,4471,7022, # 6112 +2776,4472,7023,7024,7025,7026,4033,7027,3573,4224,4861,4034,4862,7028,7029,1929, # 6128 +3883,4035,7030,4473,3058,7031,2536,3761,3884,7032,4036,7033,2966,2895,1968,4474, # 6144 +3276,4225,3417,3492,4226,2105,7034,7035,1754,2596,3762,4227,4863,4475,3763,4864, # 6160 +3764,2615,2777,3103,3765,3658,3418,4865,2296,3766,2815,7036,7037,7038,3574,2872, # 6176 +3277,4476,7039,4037,4477,7040,7041,4038,7042,7043,7044,7045,7046,7047,2537,7048, # 6192 +7049,7050,7051,7052,7053,7054,4478,7055,7056,3767,3659,4228,3575,7057,7058,4229, # 6208 +7059,7060,7061,3660,7062,3212,7063,3885,4039,2460,7064,7065,7066,7067,7068,7069, # 6224 +7070,7071,7072,7073,7074,4866,3768,4867,7075,7076,7077,7078,4868,3358,3278,2653, # 6240 +7079,7080,4479,3886,7081,7082,4869,7083,7084,7085,7086,7087,7088,2538,7089,7090, # 6256 +7091,4040,3150,3769,4870,4041,2896,3359,4230,2930,7092,3279,7093,2967,4480,3213, # 6272 +4481,3661,7094,7095,7096,7097,7098,7099,7100,7101,7102,2461,3770,7103,7104,4231, # 6288 +3151,7105,7106,7107,4042,3662,7108,7109,4871,3663,4872,4043,3059,7110,7111,7112, # 6304 +3493,2988,7113,4873,7114,7115,7116,3771,4874,7117,7118,4232,4875,7119,3576,2336, # 6320 +4876,7120,4233,3419,4044,4877,4878,4482,4483,4879,4484,4234,7121,3772,4880,1045, # 6336 +3280,3664,4881,4882,7122,7123,7124,7125,4883,7126,2778,7127,4485,4486,7128,4884, # 6352 +3214,3887,7129,7130,3215,7131,4885,4045,7132,7133,4046,7134,7135,7136,7137,7138, # 6368 +7139,7140,7141,7142,7143,4235,7144,4886,7145,7146,7147,4887,7148,7149,7150,4487, # 6384 +4047,4488,7151,7152,4888,4048,2989,3888,7153,3665,7154,4049,7155,7156,7157,7158, # 6400 +7159,7160,2931,4889,4890,4489,7161,2631,3889,4236,2779,7162,7163,4891,7164,3060, # 6416 +7165,1672,4892,7166,4893,4237,3281,4894,7167,7168,3666,7169,3494,7170,7171,4050, # 6432 +7172,7173,3104,3360,3420,4490,4051,2684,4052,7174,4053,7175,7176,7177,2253,4054, # 6448 +7178,7179,4895,7180,3152,3890,3153,4491,3216,7181,7182,7183,2968,4238,4492,4055, # 6464 +7184,2990,7185,2479,7186,7187,4493,7188,7189,7190,7191,7192,4896,7193,4897,2969, # 6480 +4494,4898,7194,3495,7195,7196,4899,4495,7197,3105,2731,7198,4900,7199,7200,7201, # 6496 +4056,7202,3361,7203,7204,4496,4901,4902,7205,4497,7206,7207,2315,4903,7208,4904, # 6512 +7209,4905,2851,7210,7211,3577,7212,3578,4906,7213,4057,3667,4907,7214,4058,2354, # 6528 +3891,2376,3217,3773,7215,7216,7217,7218,7219,4498,7220,4908,3282,2685,7221,3496, # 6544 +4909,2632,3154,4910,7222,2337,7223,4911,7224,7225,7226,4912,4913,3283,4239,4499, # 6560 +7227,2816,7228,7229,7230,7231,7232,7233,7234,4914,4500,4501,7235,7236,7237,2686, # 6576 +7238,4915,7239,2897,4502,7240,4503,7241,2516,7242,4504,3362,3218,7243,7244,7245, # 6592 +4916,7246,7247,4505,3363,7248,7249,7250,7251,3774,4506,7252,7253,4917,7254,7255, # 6608 +3284,2991,4918,4919,3219,3892,4920,3106,3497,4921,7256,7257,7258,4922,7259,4923, # 6624 +3364,4507,4508,4059,7260,4240,3498,7261,7262,4924,7263,2992,3893,4060,3220,7264, # 6640 +7265,7266,7267,7268,7269,4509,3775,7270,2817,7271,4061,4925,4510,3776,7272,4241, # 6656 +4511,3285,7273,7274,3499,7275,7276,7277,4062,4512,4926,7278,3107,3894,7279,7280, # 6672 +4927,7281,4513,7282,7283,3668,7284,7285,4242,4514,4243,7286,2058,4515,4928,4929, # 6688 +4516,7287,3286,4244,7288,4517,7289,7290,7291,3669,7292,7293,4930,4931,4932,2355, # 6704 +4933,7294,2633,4518,7295,4245,7296,7297,4519,7298,7299,4520,4521,4934,7300,4246, # 6720 +4522,7301,7302,7303,3579,7304,4247,4935,7305,4936,7306,7307,7308,7309,3777,7310, # 6736 +4523,7311,7312,7313,4248,3580,7314,4524,3778,4249,7315,3581,7316,3287,7317,3221, # 6752 +7318,4937,7319,7320,7321,7322,7323,7324,4938,4939,7325,4525,7326,7327,7328,4063, # 6768 +7329,7330,4940,7331,7332,4941,7333,4526,7334,3500,2780,1741,4942,2026,1742,7335, # 6784 +7336,3582,4527,2388,7337,7338,7339,4528,7340,4250,4943,7341,7342,7343,4944,7344, # 6800 +7345,7346,3020,7347,4945,7348,7349,7350,7351,3895,7352,3896,4064,3897,7353,7354, # 6816 +7355,4251,7356,7357,3898,7358,3779,7359,3780,3288,7360,7361,4529,7362,4946,4530, # 6832 +2027,7363,3899,4531,4947,3222,3583,7364,4948,7365,7366,7367,7368,4949,3501,4950, # 6848 +3781,4951,4532,7369,2517,4952,4252,4953,3155,7370,4954,4955,4253,2518,4533,7371, # 6864 +7372,2712,4254,7373,7374,7375,3670,4956,3671,7376,2389,3502,4065,7377,2338,7378, # 6880 +7379,7380,7381,3061,7382,4957,7383,7384,7385,7386,4958,4534,7387,7388,2993,7389, # 6896 +3062,7390,4959,7391,7392,7393,4960,3108,4961,7394,4535,7395,4962,3421,4536,7396, # 6912 +4963,7397,4964,1857,7398,4965,7399,7400,2176,3584,4966,7401,7402,3422,4537,3900, # 6928 +3585,7403,3782,7404,2852,7405,7406,7407,4538,3783,2654,3423,4967,4539,7408,3784, # 6944 +3586,2853,4540,4541,7409,3901,7410,3902,7411,7412,3785,3109,2327,3903,7413,7414, # 6960 +2970,4066,2932,7415,7416,7417,3904,3672,3424,7418,4542,4543,4544,7419,4968,7420, # 6976 +7421,4255,7422,7423,7424,7425,7426,4067,7427,3673,3365,4545,7428,3110,2559,3674, # 6992 +7429,7430,3156,7431,7432,3503,7433,3425,4546,7434,3063,2873,7435,3223,4969,4547, # 7008 +4548,2898,4256,4068,7436,4069,3587,3786,2933,3787,4257,4970,4971,3788,7437,4972, # 7024 +3064,7438,4549,7439,7440,7441,7442,7443,4973,3905,7444,2874,7445,7446,7447,7448, # 7040 +3021,7449,4550,3906,3588,4974,7450,7451,3789,3675,7452,2578,7453,4070,7454,7455, # 7056 +7456,4258,3676,7457,4975,7458,4976,4259,3790,3504,2634,4977,3677,4551,4260,7459, # 7072 +7460,7461,7462,3907,4261,4978,7463,7464,7465,7466,4979,4980,7467,7468,2213,4262, # 7088 +7469,7470,7471,3678,4981,7472,2439,7473,4263,3224,3289,7474,3908,2415,4982,7475, # 7104 +4264,7476,4983,2655,7477,7478,2732,4552,2854,2875,7479,7480,4265,7481,4553,4984, # 7120 +7482,7483,4266,7484,3679,3366,3680,2818,2781,2782,3367,3589,4554,3065,7485,4071, # 7136 +2899,7486,7487,3157,2462,4072,4555,4073,4985,4986,3111,4267,2687,3368,4556,4074, # 7152 +3791,4268,7488,3909,2783,7489,2656,1962,3158,4557,4987,1963,3159,3160,7490,3112, # 7168 +4988,4989,3022,4990,4991,3792,2855,7491,7492,2971,4558,7493,7494,4992,7495,7496, # 7184 +7497,7498,4993,7499,3426,4559,4994,7500,3681,4560,4269,4270,3910,7501,4075,4995, # 7200 +4271,7502,7503,4076,7504,4996,7505,3225,4997,4272,4077,2819,3023,7506,7507,2733, # 7216 +4561,7508,4562,7509,3369,3793,7510,3590,2508,7511,7512,4273,3113,2994,2616,7513, # 7232 +7514,7515,7516,7517,7518,2820,3911,4078,2748,7519,7520,4563,4998,7521,7522,7523, # 7248 +7524,4999,4274,7525,4564,3682,2239,4079,4565,7526,7527,7528,7529,5000,7530,7531, # 7264 +5001,4275,3794,7532,7533,7534,3066,5002,4566,3161,7535,7536,4080,7537,3162,7538, # 7280 +7539,4567,7540,7541,7542,7543,7544,7545,5003,7546,4568,7547,7548,7549,7550,7551, # 7296 +7552,7553,7554,7555,7556,5004,7557,7558,7559,5005,7560,3795,7561,4569,7562,7563, # 7312 +7564,2821,3796,4276,4277,4081,7565,2876,7566,5006,7567,7568,2900,7569,3797,3912, # 7328 +7570,7571,7572,4278,7573,7574,7575,5007,7576,7577,5008,7578,7579,4279,2934,7580, # 7344 +7581,5009,7582,4570,7583,4280,7584,7585,7586,4571,4572,3913,7587,4573,3505,7588, # 7360 +5010,7589,7590,7591,7592,3798,4574,7593,7594,5011,7595,4281,7596,7597,7598,4282, # 7376 +5012,7599,7600,5013,3163,7601,5014,7602,3914,7603,7604,2734,4575,4576,4577,7605, # 7392 +7606,7607,7608,7609,3506,5015,4578,7610,4082,7611,2822,2901,2579,3683,3024,4579, # 7408 +3507,7612,4580,7613,3226,3799,5016,7614,7615,7616,7617,7618,7619,7620,2995,3290, # 7424 +7621,4083,7622,5017,7623,7624,7625,7626,7627,4581,3915,7628,3291,7629,5018,7630, # 7440 +7631,7632,7633,4084,7634,7635,3427,3800,7636,7637,4582,7638,5019,4583,5020,7639, # 7456 +3916,7640,3801,5021,4584,4283,7641,7642,3428,3591,2269,7643,2617,7644,4585,3592, # 7472 +7645,4586,2902,7646,7647,3227,5022,7648,4587,7649,4284,7650,7651,7652,4588,2284, # 7488 +7653,5023,7654,7655,7656,4589,5024,3802,7657,7658,5025,3508,4590,7659,7660,7661, # 7504 +1969,5026,7662,7663,3684,1821,2688,7664,2028,2509,4285,7665,2823,1841,7666,2689, # 7520 +3114,7667,3917,4085,2160,5027,5028,2972,7668,5029,7669,7670,7671,3593,4086,7672, # 7536 +4591,4087,5030,3803,7673,7674,7675,7676,7677,7678,7679,4286,2366,4592,4593,3067, # 7552 +2328,7680,7681,4594,3594,3918,2029,4287,7682,5031,3919,3370,4288,4595,2856,7683, # 7568 +3509,7684,7685,5032,5033,7686,7687,3804,2784,7688,7689,7690,7691,3371,7692,7693, # 7584 +2877,5034,7694,7695,3920,4289,4088,7696,7697,7698,5035,7699,5036,4290,5037,5038, # 7600 +5039,7700,7701,7702,5040,5041,3228,7703,1760,7704,5042,3229,4596,2106,4089,7705, # 7616 +4597,2824,5043,2107,3372,7706,4291,4090,5044,7707,4091,7708,5045,3025,3805,4598, # 7632 +4292,4293,4294,3373,7709,4599,7710,5046,7711,7712,5047,5048,3806,7713,7714,7715, # 7648 +5049,7716,7717,7718,7719,4600,5050,7720,7721,7722,5051,7723,4295,3429,7724,7725, # 7664 +7726,7727,3921,7728,3292,5052,4092,7729,7730,7731,7732,7733,7734,7735,5053,5054, # 7680 +7736,7737,7738,7739,3922,3685,7740,7741,7742,7743,2635,5055,7744,5056,4601,7745, # 7696 +7746,2560,7747,7748,7749,7750,3923,7751,7752,7753,7754,7755,4296,2903,7756,7757, # 7712 +7758,7759,7760,3924,7761,5057,4297,7762,7763,5058,4298,7764,4093,7765,7766,5059, # 7728 +3925,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,3595,7777,4299,5060,4094, # 7744 +7778,3293,5061,7779,7780,4300,7781,7782,4602,7783,3596,7784,7785,3430,2367,7786, # 7760 +3164,5062,5063,4301,7787,7788,4095,5064,5065,7789,3374,3115,7790,7791,7792,7793, # 7776 +7794,7795,7796,3597,4603,7797,7798,3686,3116,3807,5066,7799,7800,5067,7801,7802, # 7792 +4604,4302,5068,4303,4096,7803,7804,3294,7805,7806,5069,4605,2690,7807,3026,7808, # 7808 +7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824, # 7824 +7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7840 +7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,7856, # 7856 +7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,7872, # 7872 +7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,7888, # 7888 +7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,7904, # 7904 +7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,7920, # 7920 +7921,7922,7923,7924,3926,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, # 7936 +7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, # 7952 +7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, # 7968 +7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, # 7984 +7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, # 8000 +8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, # 8016 +8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, # 8032 +8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, # 8048 +8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, # 8064 +8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, # 8080 +8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, # 8096 +8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, # 8112 +8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, # 8128 +8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, # 8144 +8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, # 8160 +8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, # 8176 +8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, # 8192 +8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, # 8208 +8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, # 8224 +8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, # 8240 +8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, # 8256 +8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271) # 8272 + +# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/jpcntx.py b/awx/lib/site-packages/requests/packages/charade/jpcntx.py new file mode 100644 index 0000000000..b4e6af44a9 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/jpcntx.py @@ -0,0 +1,219 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .compat import wrap_ord + +NUM_OF_CATEGORY = 6 +DONT_KNOW = -1 +ENOUGH_REL_THRESHOLD = 100 +MAX_REL_THRESHOLD = 1000 +MINIMUM_DATA_THRESHOLD = 4 + +# This is hiragana 2-char sequence table, the number in each cell represents its frequency category +jp2CharContext = ( +(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), +(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), +(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), +(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), +(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), +(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), +(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), +(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), +(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), +(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), +(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), +(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), +(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), +(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), +(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), +(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), +(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), +(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), +(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), +(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), +(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), +(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), +(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), +(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), +(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), +(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), +(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), +(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), +(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), +(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), +(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), +(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), +(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), +(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), +(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), +(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), +(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), +(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), +(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), +(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), +(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), +(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), +(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), +(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), +(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), +(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), +(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), +(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), +(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), +(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), +(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), +(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), +(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), +(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), +(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), +(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), +(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), +(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), +(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), +(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), +(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), +(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), +(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), +(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), +(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), +(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), +(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), +(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), +(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), +(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), +(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), +(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), +(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), +(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), +) + +class JapaneseContextAnalysis: + def __init__(self): + self.reset() + + def reset(self): + self._mTotalRel = 0 # total sequence received + # category counters, each interger counts sequence in its category + self._mRelSample = [0] * NUM_OF_CATEGORY + # if last byte in current buffer is not the last byte of a character, + # we need to know how many bytes to skip in next buffer + self._mNeedToSkipCharNum = 0 + self._mLastCharOrder = -1 # The order of previous char + # If this flag is set to True, detection is done and conclusion has + # been made + self._mDone = False + + def feed(self, aBuf, aLen): + if self._mDone: + return + + # The buffer we got is byte oriented, and a character may span in more than one + # buffers. In case the last one or two byte in last buffer is not + # complete, we record how many byte needed to complete that character + # and skip these bytes here. We can choose to record those bytes as + # well and analyse the character once it is complete, but since a + # character will not make much difference, by simply skipping + # this character will simply our logic and improve performance. + i = self._mNeedToSkipCharNum + while i < aLen: + order, charLen = self.get_order(aBuf[i:i + 2]) + i += charLen + if i > aLen: + self._mNeedToSkipCharNum = i - aLen + self._mLastCharOrder = -1 + else: + if (order != -1) and (self._mLastCharOrder != -1): + self._mTotalRel += 1 + if self._mTotalRel > MAX_REL_THRESHOLD: + self._mDone = True + break + self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1 + self._mLastCharOrder = order + + def got_enough_data(self): + return self._mTotalRel > ENOUGH_REL_THRESHOLD + + def get_confidence(self): + # This is just one way to calculate confidence. It works well for me. + if self._mTotalRel > MINIMUM_DATA_THRESHOLD: + return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel + else: + return DONT_KNOW + + def get_order(self, aBuf): + return -1, 1 + +class SJISContextAnalysis(JapaneseContextAnalysis): + def get_order(self, aBuf): + if not aBuf: + return -1, 1 + # find out current char's byte length + first_char = wrap_ord(aBuf[0]) + if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)): + charLen = 2 + else: + charLen = 1 + + # return its order if it is hiragana + if len(aBuf) > 1: + second_char = wrap_ord(aBuf[1]) + if (first_char == 202) and (0x9F <= second_char <= 0xF1): + return second_char - 0x9F, charLen + + return -1, charLen + +class EUCJPContextAnalysis(JapaneseContextAnalysis): + def get_order(self, aBuf): + if not aBuf: + return -1, 1 + # find out current char's byte length + first_char = wrap_ord(aBuf[0]) + if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): + charLen = 2 + elif first_char == 0x8F: + charLen = 3 + else: + charLen = 1 + + # return its order if it is hiragana + if len(aBuf) > 1: + second_char = wrap_ord(aBuf[1]) + if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): + return second_char - 0xA1, charLen + + return -1, charLen + +# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/langbulgarianmodel.py b/awx/lib/site-packages/requests/packages/charade/langbulgarianmodel.py new file mode 100644 index 0000000000..ea5a60ba04 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/langbulgarianmodel.py @@ -0,0 +1,229 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +# this table is modified base on win1251BulgarianCharToOrderMap, so +# only number <64 is sure valid + +Latin5_BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 +210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 + 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 + 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 +) + +win1251BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 +221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 + 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 + 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 96.9392% +# first 1024 sequences:3.0618% +# rest sequences: 0.2992% +# negative sequences: 0.0020% +BulgarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, +3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, +0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, +0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, +0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, +0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, +0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, +2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, +3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, +1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, +3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, +1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, +2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, +2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, +3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, +1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, +2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, +2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, +1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, +2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, +2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, +2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, +1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, +2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, +1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, +3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, +1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, +3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, +1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, +2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, +1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, +2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, +1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, +2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, +1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, +2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, +1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, +0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, +1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, +1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, +1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, +0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, +1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, +1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +) + +Latin5BulgarianModel = { + 'charToOrderMap': Latin5_BulgarianCharToOrderMap, + 'precedenceMatrix': BulgarianLangModel, + 'mTypicalPositiveRatio': 0.969392, + 'keepEnglishLetter': False, + 'charsetName': "ISO-8859-5" +} + +Win1251BulgarianModel = { + 'charToOrderMap': win1251BulgarianCharToOrderMap, + 'precedenceMatrix': BulgarianLangModel, + 'mTypicalPositiveRatio': 0.969392, + 'keepEnglishLetter': False, + 'charsetName': "windows-1251" +} + + +# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/langcyrillicmodel.py b/awx/lib/site-packages/requests/packages/charade/langcyrillicmodel.py new file mode 100644 index 0000000000..15e338fc11 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/langcyrillicmodel.py @@ -0,0 +1,329 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# KOI8-R language model +# Character Mapping Table: +KOI8R_CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 +223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 +238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 + 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 + 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 + 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 + 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 +) + +win1251_CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +) + +latin5_CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +macCyrillic_CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, +) + +IBM855_CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, +206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, + 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, +220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, +230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, + 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, + 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, +250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, +) + +IBM866_CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 97.6601% +# first 1024 sequences: 2.3389% +# rest sequences: 0.1237% +# negative sequences: 0.0009% +RussianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, +1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, +1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, +2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, +1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, +3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, +1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, +2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, +1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, +1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, +1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, +1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, +3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, +1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, +2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, +1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, +2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, +1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, +1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, +1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, +3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, +3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, +1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, +1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, +0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, +1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, +1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, +0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, +1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, +2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, +1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, +1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, +2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, +1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, +1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, +1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, +0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, +0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, +0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, +2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, +0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +) + +Koi8rModel = { + 'charToOrderMap': KOI8R_CharToOrderMap, + 'precedenceMatrix': RussianLangModel, + 'mTypicalPositiveRatio': 0.976601, + 'keepEnglishLetter': False, + 'charsetName': "KOI8-R" +} + +Win1251CyrillicModel = { + 'charToOrderMap': win1251_CharToOrderMap, + 'precedenceMatrix': RussianLangModel, + 'mTypicalPositiveRatio': 0.976601, + 'keepEnglishLetter': False, + 'charsetName': "windows-1251" +} + +Latin5CyrillicModel = { + 'charToOrderMap': latin5_CharToOrderMap, + 'precedenceMatrix': RussianLangModel, + 'mTypicalPositiveRatio': 0.976601, + 'keepEnglishLetter': False, + 'charsetName': "ISO-8859-5" +} + +MacCyrillicModel = { + 'charToOrderMap': macCyrillic_CharToOrderMap, + 'precedenceMatrix': RussianLangModel, + 'mTypicalPositiveRatio': 0.976601, + 'keepEnglishLetter': False, + 'charsetName': "MacCyrillic" +}; + +Ibm866Model = { + 'charToOrderMap': IBM866_CharToOrderMap, + 'precedenceMatrix': RussianLangModel, + 'mTypicalPositiveRatio': 0.976601, + 'keepEnglishLetter': False, + 'charsetName': "IBM866" +} + +Ibm855Model = { + 'charToOrderMap': IBM855_CharToOrderMap, + 'precedenceMatrix': RussianLangModel, + 'mTypicalPositiveRatio': 0.976601, + 'keepEnglishLetter': False, + 'charsetName': "IBM855" +} + +# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/langgreekmodel.py b/awx/lib/site-packages/requests/packages/charade/langgreekmodel.py new file mode 100644 index 0000000000..93241ce26b --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/langgreekmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin7_CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +win1253_CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.2851% +# first 1024 sequences:1.7001% +# rest sequences: 0.0359% +# negative sequences: 0.0148% +GreekLangModel = ( +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, +2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, +2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, +2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, +0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, +3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, +2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, +0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, +0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, +0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, +0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, +0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, +0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, +0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, +0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, +0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, +0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, +0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, +0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, +0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, +0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, +0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, +0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, +0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, +0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +Latin7GreekModel = { + 'charToOrderMap': Latin7_CharToOrderMap, + 'precedenceMatrix': GreekLangModel, + 'mTypicalPositiveRatio': 0.982851, + 'keepEnglishLetter': False, + 'charsetName': "ISO-8859-7" +} + +Win1253GreekModel = { + 'charToOrderMap': win1253_CharToOrderMap, + 'precedenceMatrix': GreekLangModel, + 'mTypicalPositiveRatio': 0.982851, + 'keepEnglishLetter': False, + 'charsetName': "windows-1253" +} + +# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/langhebrewmodel.py b/awx/lib/site-packages/requests/packages/charade/langhebrewmodel.py new file mode 100644 index 0000000000..d87132446d --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/langhebrewmodel.py @@ -0,0 +1,201 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Simon Montagu +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Shoshannah Forbes - original C code (?) +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Windows-1255 language model +# Character Mapping Table: +win1255_CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 + 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 +253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 + 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 +124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, +215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, + 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, +106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, + 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, +238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, + 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, + 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.4004% +# first 1024 sequences: 1.5981% +# rest sequences: 0.087% +# negative sequences: 0.0015% +HebrewLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, +3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, +1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, +1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, +1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, +1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, +0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, +0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, +0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, +0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, +0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, +0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, +0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, +0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, +0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, +1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, +0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, +0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, +0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, +0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, +0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, +1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, +1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, +2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, +0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, +0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, +) + +Win1255HebrewModel = { + 'charToOrderMap': win1255_CharToOrderMap, + 'precedenceMatrix': HebrewLangModel, + 'mTypicalPositiveRatio': 0.984004, + 'keepEnglishLetter': False, + 'charsetName': "windows-1255" +} + +# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/langhungarianmodel.py b/awx/lib/site-packages/requests/packages/charade/langhungarianmodel.py new file mode 100644 index 0000000000..6f59c61260 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/langhungarianmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin2_HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, +175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, + 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, + 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, +245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +win1250HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, +177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, + 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, + 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, +245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 94.7368% +# first 1024 sequences:5.2623% +# rest sequences: 0.8894% +# negative sequences: 0.0009% +HungarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, +3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, +0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, +1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, +1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, +3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, +2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, +2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, +2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, +2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, +1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, +1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, +3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, +1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, +1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, +2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, +2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, +2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, +3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, +1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, +1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, +1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, +2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, +1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, +2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, +2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, +1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, +1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, +0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, +2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, +2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, +1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, +1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, +2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, +2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, +2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, +1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, +0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +) + +Latin2HungarianModel = { + 'charToOrderMap': Latin2_HungarianCharToOrderMap, + 'precedenceMatrix': HungarianLangModel, + 'mTypicalPositiveRatio': 0.947368, + 'keepEnglishLetter': True, + 'charsetName': "ISO-8859-2" +} + +Win1250HungarianModel = { + 'charToOrderMap': win1250HungarianCharToOrderMap, + 'precedenceMatrix': HungarianLangModel, + 'mTypicalPositiveRatio': 0.947368, + 'keepEnglishLetter': True, + 'charsetName': "windows-1250" +} + +# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/langthaimodel.py b/awx/lib/site-packages/requests/packages/charade/langthaimodel.py new file mode 100644 index 0000000000..df343a7473 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/langthaimodel.py @@ -0,0 +1,200 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# The following result for thai was collected from a limited sample (1M). + +# Character Mapping Table: +TIS620CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 +188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 +253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 + 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 +209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, +223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, +236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, + 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, + 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, + 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, + 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, + 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 92.6386% +# first 1024 sequences:7.3177% +# rest sequences: 1.0230% +# negative sequences: 0.0436% +ThaiLangModel = ( +0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, +0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, +3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, +0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, +3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, +3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, +3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, +3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, +2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, +3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, +1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, +3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, +1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, +0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, +0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, +2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, +0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, +3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, +2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, +3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, +2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, +3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, +3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, +3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, +3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, +1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, +0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, +0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, +3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, +3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, +1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, +3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, +3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, +0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, +0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, +1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, +1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, +3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, +0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, +3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, +0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, +0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, +0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, +0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, +0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, +0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, +0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, +0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, +0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, +3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, +2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, +0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, +3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, +1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, +1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +TIS620ThaiModel = { + 'charToOrderMap': TIS620CharToOrderMap, + 'precedenceMatrix': ThaiLangModel, + 'mTypicalPositiveRatio': 0.926386, + 'keepEnglishLetter': False, + 'charsetName': "TIS-620" +} + +# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/latin1prober.py b/awx/lib/site-packages/requests/packages/charade/latin1prober.py new file mode 100644 index 0000000000..bebe1bcb02 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/latin1prober.py @@ -0,0 +1,139 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .constants import eNotMe +from .compat import wrap_ord + +FREQ_CAT_NUM = 4 + +UDF = 0 # undefined +OTH = 1 # other +ASC = 2 # ascii capital letter +ASS = 3 # ascii small letter +ACV = 4 # accent capital vowel +ACO = 5 # accent capital other +ASV = 6 # accent small vowel +ASO = 7 # accent small other +CLASS_NUM = 8 # total classes + +Latin1_CharToClass = ( + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F + OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 + ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F + OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 + ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F + OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 + OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F + UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 + OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF + ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 + ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF + ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 + ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF + ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 + ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF + ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 + ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF +) + +# 0 : illegal +# 1 : very unlikely +# 2 : normal +# 3 : very likely +Latin1ClassModel = ( + # UDF OTH ASC ASS ACV ACO ASV ASO + 0, 0, 0, 0, 0, 0, 0, 0, # UDF + 0, 3, 3, 3, 3, 3, 3, 3, # OTH + 0, 3, 3, 3, 3, 3, 3, 3, # ASC + 0, 3, 3, 3, 1, 1, 3, 3, # ASS + 0, 3, 3, 3, 1, 2, 1, 2, # ACV + 0, 3, 3, 3, 3, 3, 3, 3, # ACO + 0, 3, 1, 3, 1, 1, 1, 3, # ASV + 0, 3, 1, 3, 1, 1, 3, 3, # ASO +) + + +class Latin1Prober(CharSetProber): + def __init__(self): + CharSetProber.__init__(self) + self.reset() + + def reset(self): + self._mLastCharClass = OTH + self._mFreqCounter = [0] * FREQ_CAT_NUM + CharSetProber.reset(self) + + def get_charset_name(self): + return "windows-1252" + + def feed(self, aBuf): + aBuf = self.filter_with_english_letters(aBuf) + for c in aBuf: + charClass = Latin1_CharToClass[wrap_ord(c)] + freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM) + + charClass] + if freq == 0: + self._mState = eNotMe + break + self._mFreqCounter[freq] += 1 + self._mLastCharClass = charClass + + return self.get_state() + + def get_confidence(self): + if self.get_state() == eNotMe: + return 0.01 + + total = sum(self._mFreqCounter) + if total < 0.01: + confidence = 0.0 + else: + confidence = ((self._mFreqCounter[3] / total) + - (self._mFreqCounter[1] * 20.0 / total)) + if confidence < 0.0: + confidence = 0.0 + # lower the confidence of latin1 so that other more accurate + # detector can take priority. + confidence = confidence * 0.5 + return confidence diff --git a/awx/lib/site-packages/requests/packages/charade/mbcharsetprober.py b/awx/lib/site-packages/requests/packages/charade/mbcharsetprober.py new file mode 100644 index 0000000000..1eee253c04 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/mbcharsetprober.py @@ -0,0 +1,86 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys +from . import constants +from .charsetprober import CharSetProber + + +class MultiByteCharSetProber(CharSetProber): + def __init__(self): + CharSetProber.__init__(self) + self._mDistributionAnalyzer = None + self._mCodingSM = None + self._mLastChar = [0, 0] + + def reset(self): + CharSetProber.reset(self) + if self._mCodingSM: + self._mCodingSM.reset() + if self._mDistributionAnalyzer: + self._mDistributionAnalyzer.reset() + self._mLastChar = [0, 0] + + def get_charset_name(self): + pass + + def feed(self, aBuf): + aLen = len(aBuf) + for i in range(0, aLen): + codingState = self._mCodingSM.next_state(aBuf[i]) + if codingState == constants.eError: + if constants._debug: + sys.stderr.write(self.get_charset_name() + + ' prober hit error at byte ' + str(i) + + '\n') + self._mState = constants.eNotMe + break + elif codingState == constants.eItsMe: + self._mState = constants.eFoundIt + break + elif codingState == constants.eStart: + charLen = self._mCodingSM.get_current_charlen() + if i == 0: + self._mLastChar[1] = aBuf[0] + self._mDistributionAnalyzer.feed(self._mLastChar, charLen) + else: + self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], + charLen) + + self._mLastChar[0] = aBuf[aLen - 1] + + if self.get_state() == constants.eDetecting: + if (self._mDistributionAnalyzer.got_enough_data() and + (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): + self._mState = constants.eFoundIt + + return self.get_state() + + def get_confidence(self): + return self._mDistributionAnalyzer.get_confidence() diff --git a/awx/lib/site-packages/requests/packages/charade/mbcsgroupprober.py b/awx/lib/site-packages/requests/packages/charade/mbcsgroupprober.py new file mode 100644 index 0000000000..2f6f5e897f --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/mbcsgroupprober.py @@ -0,0 +1,54 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .utf8prober import UTF8Prober +from .sjisprober import SJISProber +from .eucjpprober import EUCJPProber +from .gb2312prober import GB2312Prober +from .euckrprober import EUCKRProber +from .cp949prober import CP949Prober +from .big5prober import Big5Prober +from .euctwprober import EUCTWProber + + +class MBCSGroupProber(CharSetGroupProber): + def __init__(self): + CharSetGroupProber.__init__(self) + self._mProbers = [ + UTF8Prober(), + SJISProber(), + EUCJPProber(), + GB2312Prober(), + EUCKRProber(), + CP949Prober(), + Big5Prober(), + EUCTWProber() + ] + self.reset() diff --git a/awx/lib/site-packages/requests/packages/charade/mbcssm.py b/awx/lib/site-packages/requests/packages/charade/mbcssm.py new file mode 100644 index 0000000000..55c02f0a06 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/mbcssm.py @@ -0,0 +1,575 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .constants import eStart, eError, eItsMe + +# BIG5 + +BIG5_cls = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 4,4,4,4,4,4,4,4, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 4,3,3,3,3,3,3,3, # a0 - a7 + 3,3,3,3,3,3,3,3, # a8 - af + 3,3,3,3,3,3,3,3, # b0 - b7 + 3,3,3,3,3,3,3,3, # b8 - bf + 3,3,3,3,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +BIG5_st = ( + eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07 + eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,#08-0f + eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart#10-17 +) + +Big5CharLenTable = (0, 1, 1, 2, 0) + +Big5SMModel = {'classTable': BIG5_cls, + 'classFactor': 5, + 'stateTable': BIG5_st, + 'charLenTable': Big5CharLenTable, + 'name': 'Big5'} + +# CP949 + +CP949_cls = ( + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f + 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f + 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f + 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f + 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f + 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f + 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f + 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f + 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af + 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf + 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff +) + +CP949_st = ( +#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = + eError,eStart, 3,eError,eStart,eStart, 4, 5,eError, 6, # eStart + eError,eError,eError,eError,eError,eError,eError,eError,eError,eError, # eError + eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe, # eItsMe + eError,eError,eStart,eStart,eError,eError,eError,eStart,eStart,eStart, # 3 + eError,eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 4 + eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 5 + eError,eStart,eStart,eStart,eStart,eError,eError,eStart,eStart,eStart, # 6 +) + +CP949CharLenTable = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) + +CP949SMModel = {'classTable': CP949_cls, + 'classFactor': 10, + 'stateTable': CP949_st, + 'charLenTable': CP949CharLenTable, + 'name': 'CP949'} + +# EUC-JP + +EUCJP_cls = ( + 4,4,4,4,4,4,4,4, # 00 - 07 + 4,4,4,4,4,4,5,5, # 08 - 0f + 4,4,4,4,4,4,4,4, # 10 - 17 + 4,4,4,5,4,4,4,4, # 18 - 1f + 4,4,4,4,4,4,4,4, # 20 - 27 + 4,4,4,4,4,4,4,4, # 28 - 2f + 4,4,4,4,4,4,4,4, # 30 - 37 + 4,4,4,4,4,4,4,4, # 38 - 3f + 4,4,4,4,4,4,4,4, # 40 - 47 + 4,4,4,4,4,4,4,4, # 48 - 4f + 4,4,4,4,4,4,4,4, # 50 - 57 + 4,4,4,4,4,4,4,4, # 58 - 5f + 4,4,4,4,4,4,4,4, # 60 - 67 + 4,4,4,4,4,4,4,4, # 68 - 6f + 4,4,4,4,4,4,4,4, # 70 - 77 + 4,4,4,4,4,4,4,4, # 78 - 7f + 5,5,5,5,5,5,5,5, # 80 - 87 + 5,5,5,5,5,5,1,3, # 88 - 8f + 5,5,5,5,5,5,5,5, # 90 - 97 + 5,5,5,5,5,5,5,5, # 98 - 9f + 5,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,0,5 # f8 - ff +) + +EUCJP_st = ( + 3, 4, 3, 5,eStart,eError,eError,eError,#00-07 + eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f + eItsMe,eItsMe,eStart,eError,eStart,eError,eError,eError,#10-17 + eError,eError,eStart,eError,eError,eError, 3,eError,#18-1f + 3,eError,eError,eError,eStart,eStart,eStart,eStart#20-27 +) + +EUCJPCharLenTable = (2, 2, 2, 3, 1, 0) + +EUCJPSMModel = {'classTable': EUCJP_cls, + 'classFactor': 6, + 'stateTable': EUCJP_st, + 'charLenTable': EUCJPCharLenTable, + 'name': 'EUC-JP'} + +# EUC-KR + +EUCKR_cls = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,3,3,3, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,3,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 2,2,2,2,2,2,2,2, # e0 - e7 + 2,2,2,2,2,2,2,2, # e8 - ef + 2,2,2,2,2,2,2,2, # f0 - f7 + 2,2,2,2,2,2,2,0 # f8 - ff +) + +EUCKR_st = ( + eError,eStart, 3,eError,eError,eError,eError,eError,#00-07 + eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,eStart #08-0f +) + +EUCKRCharLenTable = (0, 1, 2, 0) + +EUCKRSMModel = {'classTable': EUCKR_cls, + 'classFactor': 4, + 'stateTable': EUCKR_st, + 'charLenTable': EUCKRCharLenTable, + 'name': 'EUC-KR'} + +# EUC-TW + +EUCTW_cls = ( + 2,2,2,2,2,2,2,2, # 00 - 07 + 2,2,2,2,2,2,0,0, # 08 - 0f + 2,2,2,2,2,2,2,2, # 10 - 17 + 2,2,2,0,2,2,2,2, # 18 - 1f + 2,2,2,2,2,2,2,2, # 20 - 27 + 2,2,2,2,2,2,2,2, # 28 - 2f + 2,2,2,2,2,2,2,2, # 30 - 37 + 2,2,2,2,2,2,2,2, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,2, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,6,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,3,4,4,4,4,4,4, # a0 - a7 + 5,5,1,1,1,1,1,1, # a8 - af + 1,1,1,1,1,1,1,1, # b0 - b7 + 1,1,1,1,1,1,1,1, # b8 - bf + 1,1,3,1,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +EUCTW_st = ( + eError,eError,eStart, 3, 3, 3, 4,eError,#00-07 + eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f + eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eStart,eError,#10-17 + eStart,eStart,eStart,eError,eError,eError,eError,eError,#18-1f + 5,eError,eError,eError,eStart,eError,eStart,eStart,#20-27 + eStart,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f +) + +EUCTWCharLenTable = (0, 0, 1, 2, 2, 2, 3) + +EUCTWSMModel = {'classTable': EUCTW_cls, + 'classFactor': 7, + 'stateTable': EUCTW_st, + 'charLenTable': EUCTWCharLenTable, + 'name': 'x-euc-tw'} + +# GB2312 + +GB2312_cls = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 3,3,3,3,3,3,3,3, # 30 - 37 + 3,3,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,4, # 78 - 7f + 5,6,6,6,6,6,6,6, # 80 - 87 + 6,6,6,6,6,6,6,6, # 88 - 8f + 6,6,6,6,6,6,6,6, # 90 - 97 + 6,6,6,6,6,6,6,6, # 98 - 9f + 6,6,6,6,6,6,6,6, # a0 - a7 + 6,6,6,6,6,6,6,6, # a8 - af + 6,6,6,6,6,6,6,6, # b0 - b7 + 6,6,6,6,6,6,6,6, # b8 - bf + 6,6,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 6,6,6,6,6,6,6,6, # e0 - e7 + 6,6,6,6,6,6,6,6, # e8 - ef + 6,6,6,6,6,6,6,6, # f0 - f7 + 6,6,6,6,6,6,6,0 # f8 - ff +) + +GB2312_st = ( + eError,eStart,eStart,eStart,eStart,eStart, 3,eError,#00-07 + eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f + eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,#10-17 + 4,eError,eStart,eStart,eError,eError,eError,eError,#18-1f + eError,eError, 5,eError,eError,eError,eItsMe,eError,#20-27 + eError,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f +) + +# To be accurate, the length of class 6 can be either 2 or 4. +# But it is not necessary to discriminate between the two since +# it is used for frequency analysis only, and we are validing +# each code range there as well. So it is safe to set it to be +# 2 here. +GB2312CharLenTable = (0, 1, 1, 1, 1, 1, 2) + +GB2312SMModel = {'classTable': GB2312_cls, + 'classFactor': 7, + 'stateTable': GB2312_st, + 'charLenTable': GB2312CharLenTable, + 'name': 'GB2312'} + +# Shift_JIS + +SJIS_cls = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 3,3,3,3,3,3,3,3, # 80 - 87 + 3,3,3,3,3,3,3,3, # 88 - 8f + 3,3,3,3,3,3,3,3, # 90 - 97 + 3,3,3,3,3,3,3,3, # 98 - 9f + #0xa0 is illegal in sjis encoding, but some pages does + #contain such byte. We need to be more error forgiven. + 2,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,4,4,4, # e8 - ef + 4,4,4,4,4,4,4,4, # f0 - f7 + 4,4,4,4,4,0,0,0 # f8 - ff +) + + +SJIS_st = ( + eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07 + eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f + eItsMe,eItsMe,eError,eError,eStart,eStart,eStart,eStart #10-17 +) + +SJISCharLenTable = (0, 1, 1, 2, 0, 0) + +SJISSMModel = {'classTable': SJIS_cls, + 'classFactor': 6, + 'stateTable': SJIS_st, + 'charLenTable': SJISCharLenTable, + 'name': 'Shift_JIS'} + +# UCS2-BE + +UCS2BE_cls = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2BE_st = ( + 5, 7, 7,eError, 4, 3,eError,eError,#00-07 + eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f + eItsMe,eItsMe, 6, 6, 6, 6,eError,eError,#10-17 + 6, 6, 6, 6, 6,eItsMe, 6, 6,#18-1f + 6, 6, 6, 6, 5, 7, 7,eError,#20-27 + 5, 8, 6, 6,eError, 6, 6, 6,#28-2f + 6, 6, 6, 6,eError,eError,eStart,eStart #30-37 +) + +UCS2BECharLenTable = (2, 2, 2, 0, 2, 2) + +UCS2BESMModel = {'classTable': UCS2BE_cls, + 'classFactor': 6, + 'stateTable': UCS2BE_st, + 'charLenTable': UCS2BECharLenTable, + 'name': 'UTF-16BE'} + +# UCS2-LE + +UCS2LE_cls = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2LE_st = ( + 6, 6, 7, 6, 4, 3,eError,eError,#00-07 + eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f + eItsMe,eItsMe, 5, 5, 5,eError,eItsMe,eError,#10-17 + 5, 5, 5,eError, 5,eError, 6, 6,#18-1f + 7, 6, 8, 8, 5, 5, 5,eError,#20-27 + 5, 5, 5,eError,eError,eError, 5, 5,#28-2f + 5, 5, 5,eError, 5,eError,eStart,eStart #30-37 +) + +UCS2LECharLenTable = (2, 2, 2, 2, 2, 2) + +UCS2LESMModel = {'classTable': UCS2LE_cls, + 'classFactor': 6, + 'stateTable': UCS2LE_st, + 'charLenTable': UCS2LECharLenTable, + 'name': 'UTF-16LE'} + +# UTF-8 + +UTF8_cls = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 2,2,2,2,3,3,3,3, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 5,5,5,5,5,5,5,5, # a0 - a7 + 5,5,5,5,5,5,5,5, # a8 - af + 5,5,5,5,5,5,5,5, # b0 - b7 + 5,5,5,5,5,5,5,5, # b8 - bf + 0,0,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 7,8,8,8,8,8,8,8, # e0 - e7 + 8,8,8,8,8,9,8,8, # e8 - ef + 10,11,11,11,11,11,11,11, # f0 - f7 + 12,13,13,13,14,15,0,0 # f8 - ff +) + +UTF8_st = ( + eError,eStart,eError,eError,eError,eError, 12, 10,#00-07 + 9, 11, 8, 7, 6, 5, 4, 3,#08-0f + eError,eError,eError,eError,eError,eError,eError,eError,#10-17 + eError,eError,eError,eError,eError,eError,eError,eError,#18-1f + eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#20-27 + eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#28-2f + eError,eError, 5, 5, 5, 5,eError,eError,#30-37 + eError,eError,eError,eError,eError,eError,eError,eError,#38-3f + eError,eError,eError, 5, 5, 5,eError,eError,#40-47 + eError,eError,eError,eError,eError,eError,eError,eError,#48-4f + eError,eError, 7, 7, 7, 7,eError,eError,#50-57 + eError,eError,eError,eError,eError,eError,eError,eError,#58-5f + eError,eError,eError,eError, 7, 7,eError,eError,#60-67 + eError,eError,eError,eError,eError,eError,eError,eError,#68-6f + eError,eError, 9, 9, 9, 9,eError,eError,#70-77 + eError,eError,eError,eError,eError,eError,eError,eError,#78-7f + eError,eError,eError,eError,eError, 9,eError,eError,#80-87 + eError,eError,eError,eError,eError,eError,eError,eError,#88-8f + eError,eError, 12, 12, 12, 12,eError,eError,#90-97 + eError,eError,eError,eError,eError,eError,eError,eError,#98-9f + eError,eError,eError,eError,eError, 12,eError,eError,#a0-a7 + eError,eError,eError,eError,eError,eError,eError,eError,#a8-af + eError,eError, 12, 12, 12,eError,eError,eError,#b0-b7 + eError,eError,eError,eError,eError,eError,eError,eError,#b8-bf + eError,eError,eStart,eStart,eStart,eStart,eError,eError,#c0-c7 + eError,eError,eError,eError,eError,eError,eError,eError #c8-cf +) + +UTF8CharLenTable = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) + +UTF8SMModel = {'classTable': UTF8_cls, + 'classFactor': 16, + 'stateTable': UTF8_st, + 'charLenTable': UTF8CharLenTable, + 'name': 'UTF-8'} + +# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/sbcharsetprober.py b/awx/lib/site-packages/requests/packages/charade/sbcharsetprober.py new file mode 100644 index 0000000000..da26715cfc --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/sbcharsetprober.py @@ -0,0 +1,120 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys +from . import constants +from .charsetprober import CharSetProber +from .compat import wrap_ord + +SAMPLE_SIZE = 64 +SB_ENOUGH_REL_THRESHOLD = 1024 +POSITIVE_SHORTCUT_THRESHOLD = 0.95 +NEGATIVE_SHORTCUT_THRESHOLD = 0.05 +SYMBOL_CAT_ORDER = 250 +NUMBER_OF_SEQ_CAT = 4 +POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1 +#NEGATIVE_CAT = 0 + + +class SingleByteCharSetProber(CharSetProber): + def __init__(self, model, reversed=False, nameProber=None): + CharSetProber.__init__(self) + self._mModel = model + # TRUE if we need to reverse every pair in the model lookup + self._mReversed = reversed + # Optional auxiliary prober for name decision + self._mNameProber = nameProber + self.reset() + + def reset(self): + CharSetProber.reset(self) + # char order of last character + self._mLastOrder = 255 + self._mSeqCounters = [0] * NUMBER_OF_SEQ_CAT + self._mTotalSeqs = 0 + self._mTotalChar = 0 + # characters that fall in our sampling range + self._mFreqChar = 0 + + def get_charset_name(self): + if self._mNameProber: + return self._mNameProber.get_charset_name() + else: + return self._mModel['charsetName'] + + def feed(self, aBuf): + if not self._mModel['keepEnglishLetter']: + aBuf = self.filter_without_english_letters(aBuf) + aLen = len(aBuf) + if not aLen: + return self.get_state() + for c in aBuf: + order = self._mModel['charToOrderMap'][wrap_ord(c)] + if order < SYMBOL_CAT_ORDER: + self._mTotalChar += 1 + if order < SAMPLE_SIZE: + self._mFreqChar += 1 + if self._mLastOrder < SAMPLE_SIZE: + self._mTotalSeqs += 1 + if not self._mReversed: + i = (self._mLastOrder * SAMPLE_SIZE) + order + model = self._mModel['precedenceMatrix'][i] + else: # reverse the order of the letters in the lookup + i = (order * SAMPLE_SIZE) + self._mLastOrder + model = self._mModel['precedenceMatrix'][i] + self._mSeqCounters[model] += 1 + self._mLastOrder = order + + if self.get_state() == constants.eDetecting: + if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD: + cf = self.get_confidence() + if cf > POSITIVE_SHORTCUT_THRESHOLD: + if constants._debug: + sys.stderr.write('%s confidence = %s, we have a' + 'winner\n' % + (self._mModel['charsetName'], cf)) + self._mState = constants.eFoundIt + elif cf < NEGATIVE_SHORTCUT_THRESHOLD: + if constants._debug: + sys.stderr.write('%s confidence = %s, below negative' + 'shortcut threshhold %s\n' % + (self._mModel['charsetName'], cf, + NEGATIVE_SHORTCUT_THRESHOLD)) + self._mState = constants.eNotMe + + return self.get_state() + + def get_confidence(self): + r = 0.01 + if self._mTotalSeqs > 0: + r = ((1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs + / self._mModel['mTypicalPositiveRatio']) + r = r * self._mFreqChar / self._mTotalChar + if r >= 1.0: + r = 0.99 + return r diff --git a/awx/lib/site-packages/requests/packages/charade/sbcsgroupprober.py b/awx/lib/site-packages/requests/packages/charade/sbcsgroupprober.py new file mode 100644 index 0000000000..b224814568 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/sbcsgroupprober.py @@ -0,0 +1,69 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .sbcharsetprober import SingleByteCharSetProber +from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, + Latin5CyrillicModel, MacCyrillicModel, + Ibm866Model, Ibm855Model) +from .langgreekmodel import Latin7GreekModel, Win1253GreekModel +from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel +from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel +from .langthaimodel import TIS620ThaiModel +from .langhebrewmodel import Win1255HebrewModel +from .hebrewprober import HebrewProber + + +class SBCSGroupProber(CharSetGroupProber): + def __init__(self): + CharSetGroupProber.__init__(self) + self._mProbers = [ + SingleByteCharSetProber(Win1251CyrillicModel), + SingleByteCharSetProber(Koi8rModel), + SingleByteCharSetProber(Latin5CyrillicModel), + SingleByteCharSetProber(MacCyrillicModel), + SingleByteCharSetProber(Ibm866Model), + SingleByteCharSetProber(Ibm855Model), + SingleByteCharSetProber(Latin7GreekModel), + SingleByteCharSetProber(Win1253GreekModel), + SingleByteCharSetProber(Latin5BulgarianModel), + SingleByteCharSetProber(Win1251BulgarianModel), + SingleByteCharSetProber(Latin2HungarianModel), + SingleByteCharSetProber(Win1250HungarianModel), + SingleByteCharSetProber(TIS620ThaiModel), + ] + hebrewProber = HebrewProber() + logicalHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, + False, hebrewProber) + visualHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, True, + hebrewProber) + hebrewProber.set_model_probers(logicalHebrewProber, visualHebrewProber) + self._mProbers.extend([hebrewProber, logicalHebrewProber, + visualHebrewProber]) + + self.reset() diff --git a/awx/lib/site-packages/requests/packages/charade/sjisprober.py b/awx/lib/site-packages/requests/packages/charade/sjisprober.py new file mode 100644 index 0000000000..9bb0cdcf1f --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/sjisprober.py @@ -0,0 +1,91 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import SJISDistributionAnalysis +from .jpcntx import SJISContextAnalysis +from .mbcssm import SJISSMModel +from . import constants + + +class SJISProber(MultiByteCharSetProber): + def __init__(self): + MultiByteCharSetProber.__init__(self) + self._mCodingSM = CodingStateMachine(SJISSMModel) + self._mDistributionAnalyzer = SJISDistributionAnalysis() + self._mContextAnalyzer = SJISContextAnalysis() + self.reset() + + def reset(self): + MultiByteCharSetProber.reset(self) + self._mContextAnalyzer.reset() + + def get_charset_name(self): + return "SHIFT_JIS" + + def feed(self, aBuf): + aLen = len(aBuf) + for i in range(0, aLen): + codingState = self._mCodingSM.next_state(aBuf[i]) + if codingState == constants.eError: + if constants._debug: + sys.stderr.write(self.get_charset_name() + + ' prober hit error at byte ' + str(i) + + '\n') + self._mState = constants.eNotMe + break + elif codingState == constants.eItsMe: + self._mState = constants.eFoundIt + break + elif codingState == constants.eStart: + charLen = self._mCodingSM.get_current_charlen() + if i == 0: + self._mLastChar[1] = aBuf[0] + self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:], + charLen) + self._mDistributionAnalyzer.feed(self._mLastChar, charLen) + else: + self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3 + - charLen], charLen) + self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], + charLen) + + self._mLastChar[0] = aBuf[aLen - 1] + + if self.get_state() == constants.eDetecting: + if (self._mContextAnalyzer.got_enough_data() and + (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): + self._mState = constants.eFoundIt + + return self.get_state() + + def get_confidence(self): + contxtCf = self._mContextAnalyzer.get_confidence() + distribCf = self._mDistributionAnalyzer.get_confidence() + return max(contxtCf, distribCf) diff --git a/awx/lib/site-packages/requests/packages/charade/universaldetector.py b/awx/lib/site-packages/requests/packages/charade/universaldetector.py new file mode 100644 index 0000000000..6175bfbc33 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/universaldetector.py @@ -0,0 +1,172 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from . import constants +import sys +import codecs +from .latin1prober import Latin1Prober # windows-1252 +from .mbcsgroupprober import MBCSGroupProber # multi-byte character sets +from .sbcsgroupprober import SBCSGroupProber # single-byte character sets +from .escprober import EscCharSetProber # ISO-2122, etc. +import re + +MINIMUM_THRESHOLD = 0.20 +ePureAscii = 0 +eEscAscii = 1 +eHighbyte = 2 + + +class UniversalDetector: + def __init__(self): + self._highBitDetector = re.compile(b'[\x80-\xFF]') + self._escDetector = re.compile(b'(\033|~{)') + self._mEscCharSetProber = None + self._mCharSetProbers = [] + self.reset() + + def reset(self): + self.result = {'encoding': None, 'confidence': 0.0} + self.done = False + self._mStart = True + self._mGotData = False + self._mInputState = ePureAscii + self._mLastChar = b'' + if self._mEscCharSetProber: + self._mEscCharSetProber.reset() + for prober in self._mCharSetProbers: + prober.reset() + + def feed(self, aBuf): + if self.done: + return + + aLen = len(aBuf) + if not aLen: + return + + if not self._mGotData: + # If the data starts with BOM, we know it is UTF + if aBuf[:3] == codecs.BOM: + # EF BB BF UTF-8 with BOM + self.result = {'encoding': "UTF-8", 'confidence': 1.0} + elif aBuf[:4] == codecs.BOM_UTF32_LE: + # FF FE 00 00 UTF-32, little-endian BOM + self.result = {'encoding': "UTF-32LE", 'confidence': 1.0} + elif aBuf[:4] == codecs.BOM_UTF32_BE: + # 00 00 FE FF UTF-32, big-endian BOM + self.result = {'encoding': "UTF-32BE", 'confidence': 1.0} + elif aBuf[:4] == b'\xFE\xFF\x00\x00': + # FE FF 00 00 UCS-4, unusual octet order BOM (3412) + self.result = { + 'encoding': "X-ISO-10646-UCS-4-3412", + 'confidence': 1.0 + } + elif aBuf[:4] == b'\x00\x00\xFF\xFE': + # 00 00 FF FE UCS-4, unusual octet order BOM (2143) + self.result = { + 'encoding': "X-ISO-10646-UCS-4-2143", + 'confidence': 1.0 + } + elif aBuf[:2] == codecs.BOM_LE: + # FF FE UTF-16, little endian BOM + self.result = {'encoding': "UTF-16LE", 'confidence': 1.0} + elif aBuf[:2] == codecs.BOM_BE: + # FE FF UTF-16, big endian BOM + self.result = {'encoding': "UTF-16BE", 'confidence': 1.0} + + self._mGotData = True + if self.result['encoding'] and (self.result['confidence'] > 0.0): + self.done = True + return + + if self._mInputState == ePureAscii: + if self._highBitDetector.search(aBuf): + self._mInputState = eHighbyte + elif ((self._mInputState == ePureAscii) and + self._escDetector.search(self._mLastChar + aBuf)): + self._mInputState = eEscAscii + + self._mLastChar = aBuf[-1:] + + if self._mInputState == eEscAscii: + if not self._mEscCharSetProber: + self._mEscCharSetProber = EscCharSetProber() + if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt: + self.result = { + 'encoding': self._mEscCharSetProber.get_charset_name(), + 'confidence': self._mEscCharSetProber.get_confidence() + } + self.done = True + elif self._mInputState == eHighbyte: + if not self._mCharSetProbers: + self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(), + Latin1Prober()] + for prober in self._mCharSetProbers: + if prober.feed(aBuf) == constants.eFoundIt: + self.result = {'encoding': prober.get_charset_name(), + 'confidence': prober.get_confidence()} + self.done = True + break + + def close(self): + if self.done: + return + if not self._mGotData: + if constants._debug: + sys.stderr.write('no data received!\n') + return + self.done = True + + if self._mInputState == ePureAscii: + self.result = {'encoding': 'ascii', 'confidence': 1.0} + return self.result + + if self._mInputState == eHighbyte: + proberConfidence = None + maxProberConfidence = 0.0 + maxProber = None + for prober in self._mCharSetProbers: + if not prober: + continue + proberConfidence = prober.get_confidence() + if proberConfidence > maxProberConfidence: + maxProberConfidence = proberConfidence + maxProber = prober + if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD): + self.result = {'encoding': maxProber.get_charset_name(), + 'confidence': maxProber.get_confidence()} + return self.result + + if constants._debug: + sys.stderr.write('no probers hit minimum threshhold\n') + for prober in self._mCharSetProbers[0].mProbers: + if not prober: + continue + sys.stderr.write('%s confidence = %s\n' % + (prober.get_charset_name(), + prober.get_confidence())) diff --git a/awx/lib/site-packages/requests/packages/charade/utf8prober.py b/awx/lib/site-packages/requests/packages/charade/utf8prober.py new file mode 100644 index 0000000000..72c8d3d6a9 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/charade/utf8prober.py @@ -0,0 +1,76 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from . import constants +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .mbcssm import UTF8SMModel + +ONE_CHAR_PROB = 0.5 + + +class UTF8Prober(CharSetProber): + def __init__(self): + CharSetProber.__init__(self) + self._mCodingSM = CodingStateMachine(UTF8SMModel) + self.reset() + + def reset(self): + CharSetProber.reset(self) + self._mCodingSM.reset() + self._mNumOfMBChar = 0 + + def get_charset_name(self): + return "utf-8" + + def feed(self, aBuf): + for c in aBuf: + codingState = self._mCodingSM.next_state(c) + if codingState == constants.eError: + self._mState = constants.eNotMe + break + elif codingState == constants.eItsMe: + self._mState = constants.eFoundIt + break + elif codingState == constants.eStart: + if self._mCodingSM.get_current_charlen() >= 2: + self._mNumOfMBChar += 1 + + if self.get_state() == constants.eDetecting: + if self.get_confidence() > constants.SHORTCUT_THRESHOLD: + self._mState = constants.eFoundIt + + return self.get_state() + + def get_confidence(self): + unlike = 0.99 + if self._mNumOfMBChar < 6: + for i in range(0, self._mNumOfMBChar): + unlike = unlike * ONE_CHAR_PROB + return 1.0 - unlike + else: + return unlike diff --git a/awx/lib/site-packages/requests/packages/urllib3/__init__.py b/awx/lib/site-packages/requests/packages/urllib3/__init__.py new file mode 100644 index 0000000000..bff80b8eb4 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/__init__.py @@ -0,0 +1,58 @@ +# urllib3/__init__.py +# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +urllib3 - Thread-safe connection pooling and re-using. +""" + +__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' +__license__ = 'MIT' +__version__ = 'dev' + + +from .connectionpool import ( + HTTPConnectionPool, + HTTPSConnectionPool, + connection_from_url +) + +from . import exceptions +from .filepost import encode_multipart_formdata +from .poolmanager import PoolManager, ProxyManager, proxy_from_url +from .response import HTTPResponse +from .util import make_headers, get_host + + +# Set default logging handler to avoid "No handler found" warnings. +import logging +try: # Python 2.7+ + from logging import NullHandler +except ImportError: + class NullHandler(logging.Handler): + def emit(self, record): + pass + +logging.getLogger(__name__).addHandler(NullHandler()) + +def add_stderr_logger(level=logging.DEBUG): + """ + Helper for quickly adding a StreamHandler to the logger. Useful for + debugging. + + Returns the handler after adding it. + """ + # This method needs to be in this __init__.py to get the __name__ correct + # even if urllib3 is vendored within another package. + logger = logging.getLogger(__name__) + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) + logger.addHandler(handler) + logger.setLevel(level) + logger.debug('Added an stderr logging handler to logger: %s' % __name__) + return handler + +# ... Clean up. +del NullHandler diff --git a/awx/lib/site-packages/requests/packages/urllib3/_collections.py b/awx/lib/site-packages/requests/packages/urllib3/_collections.py new file mode 100644 index 0000000000..b35a73672e --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/_collections.py @@ -0,0 +1,94 @@ +# urllib3/_collections.py +# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from collections import MutableMapping +from threading import Lock + +try: # Python 2.7+ + from collections import OrderedDict +except ImportError: + from .packages.ordered_dict import OrderedDict + + +__all__ = ['RecentlyUsedContainer'] + + +_Null = object() + + +class RecentlyUsedContainer(MutableMapping): + """ + Provides a thread-safe dict-like container which maintains up to + ``maxsize`` keys while throwing away the least-recently-used keys beyond + ``maxsize``. + + :param maxsize: + Maximum number of recent elements to retain. + + :param dispose_func: + Every time an item is evicted from the container, + ``dispose_func(value)`` is called. Callback which will get called + """ + + ContainerCls = OrderedDict + + def __init__(self, maxsize=10, dispose_func=None): + self._maxsize = maxsize + self.dispose_func = dispose_func + + self._container = self.ContainerCls() + self._lock = Lock() + + def __getitem__(self, key): + # Re-insert the item, moving it to the end of the eviction line. + with self._lock: + item = self._container.pop(key) + self._container[key] = item + return item + + def __setitem__(self, key, value): + evicted_value = _Null + with self._lock: + # Possibly evict the existing value of 'key' + evicted_value = self._container.get(key, _Null) + self._container[key] = value + + # If we didn't evict an existing value, we might have to evict the + # least recently used item from the beginning of the container. + if len(self._container) > self._maxsize: + _key, evicted_value = self._container.popitem(last=False) + + if self.dispose_func and evicted_value is not _Null: + self.dispose_func(evicted_value) + + def __delitem__(self, key): + with self._lock: + value = self._container.pop(key) + + if self.dispose_func: + self.dispose_func(value) + + def __len__(self): + with self._lock: + return len(self._container) + + def __iter__(self): + raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.') + + def clear(self): + with self._lock: + # Copy pointers to all values, then wipe the mapping + # under Python 2, this copies the list of values twice :-| + values = list(self._container.values()) + self._container.clear() + + if self.dispose_func: + for value in values: + self.dispose_func(value) + + def keys(self): + with self._lock: + return self._container.keys() diff --git a/awx/lib/site-packages/requests/packages/urllib3/connectionpool.py b/awx/lib/site-packages/requests/packages/urllib3/connectionpool.py new file mode 100644 index 0000000000..f3e926089f --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/connectionpool.py @@ -0,0 +1,597 @@ +# urllib3/connectionpool.py +# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import logging +import socket +import errno + +from socket import error as SocketError, timeout as SocketTimeout +from .util import resolve_cert_reqs, resolve_ssl_version, assert_fingerprint + +try: # Python 3 + from http.client import HTTPConnection, HTTPException + from http.client import HTTP_PORT, HTTPS_PORT +except ImportError: + from httplib import HTTPConnection, HTTPException + from httplib import HTTP_PORT, HTTPS_PORT + +try: # Python 3 + from queue import LifoQueue, Empty, Full +except ImportError: + from Queue import LifoQueue, Empty, Full + + +try: # Compiled with SSL? + HTTPSConnection = object + BaseSSLError = None + ssl = None + + try: # Python 3 + from http.client import HTTPSConnection + except ImportError: + from httplib import HTTPSConnection + + import ssl + BaseSSLError = ssl.SSLError + +except (ImportError, AttributeError): # Platform-specific: No SSL. + pass + + +from .request import RequestMethods +from .response import HTTPResponse +from .util import get_host, is_connection_dropped, ssl_wrap_socket +from .exceptions import ( + ClosedPoolError, + EmptyPoolError, + HostChangedError, + MaxRetryError, + SSLError, + TimeoutError, +) + +from .packages.ssl_match_hostname import match_hostname, CertificateError +from .packages import six + + +xrange = six.moves.xrange + +log = logging.getLogger(__name__) + +_Default = object() + +port_by_scheme = { + 'http': HTTP_PORT, + 'https': HTTPS_PORT, +} + + +## Connection objects (extension of httplib) + +class VerifiedHTTPSConnection(HTTPSConnection): + """ + Based on httplib.HTTPSConnection but wraps the socket with + SSL certification. + """ + cert_reqs = None + ca_certs = None + ssl_version = None + + def set_cert(self, key_file=None, cert_file=None, + cert_reqs=None, ca_certs=None, + assert_hostname=None, assert_fingerprint=None): + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.ca_certs = ca_certs + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + + def connect(self): + # Add certificate verification + sock = socket.create_connection((self.host, self.port), self.timeout) + + resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs) + resolved_ssl_version = resolve_ssl_version(self.ssl_version) + + # Wrap socket using verification with the root certs in + # trusted_root_certs + self.sock = ssl_wrap_socket(sock, self.key_file, self.cert_file, + cert_reqs=resolved_cert_reqs, + ca_certs=self.ca_certs, + server_hostname=self.host, + ssl_version=resolved_ssl_version) + + if resolved_cert_reqs != ssl.CERT_NONE: + if self.assert_fingerprint: + assert_fingerprint(self.sock.getpeercert(binary_form=True), + self.assert_fingerprint) + else: + match_hostname(self.sock.getpeercert(), + self.assert_hostname or self.host) + +## Pool objects + +class ConnectionPool(object): + """ + Base class for all connection pools, such as + :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. + """ + + scheme = None + QueueCls = LifoQueue + + def __init__(self, host, port=None): + self.host = host + self.port = port + + def __str__(self): + return '%s(host=%r, port=%r)' % (type(self).__name__, + self.host, self.port) + + +class HTTPConnectionPool(ConnectionPool, RequestMethods): + """ + Thread-safe connection pool for one host. + + :param host: + Host used for this HTTP Connection (e.g. "localhost"), passed into + :class:`httplib.HTTPConnection`. + + :param port: + Port used for this HTTP Connection (None is equivalent to 80), passed + into :class:`httplib.HTTPConnection`. + + :param strict: + Causes BadStatusLine to be raised if the status line can't be parsed + as a valid HTTP/1.0 or 1.1 status line, passed into + :class:`httplib.HTTPConnection`. + + :param timeout: + Socket timeout for each individual connection, can be a float. None + disables timeout. + + :param maxsize: + Number of connections to save that can be reused. More than 1 is useful + in multithreaded situations. If ``block`` is set to false, more + connections will be created but they will not be saved once they've + been used. + + :param block: + If set to True, no more than ``maxsize`` connections will be used at + a time. When no free connections are available, the call will block + until a connection has been released. This is a useful side effect for + particular multithreaded situations where one does not want to use more + than maxsize connections per host to prevent flooding. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + """ + + scheme = 'http' + + def __init__(self, host, port=None, strict=False, timeout=None, maxsize=1, + block=False, headers=None): + ConnectionPool.__init__(self, host, port) + RequestMethods.__init__(self, headers) + + self.strict = strict + self.timeout = timeout + self.pool = self.QueueCls(maxsize) + self.block = block + + # Fill the queue up so that doing get() on it will block properly + for _ in xrange(maxsize): + self.pool.put(None) + + # These are mostly for testing and debugging purposes. + self.num_connections = 0 + self.num_requests = 0 + + def _new_conn(self): + """ + Return a fresh :class:`httplib.HTTPConnection`. + """ + self.num_connections += 1 + log.info("Starting new HTTP connection (%d): %s" % + (self.num_connections, self.host)) + return HTTPConnection(host=self.host, + port=self.port, + strict=self.strict) + + def _get_conn(self, timeout=None): + """ + Get a connection. Will return a pooled connection if one is available. + + If no connections are available and :prop:`.block` is ``False``, then a + fresh connection is returned. + + :param timeout: + Seconds to wait before giving up and raising + :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and + :prop:`.block` is ``True``. + """ + conn = None + try: + conn = self.pool.get(block=self.block, timeout=timeout) + + except AttributeError: # self.pool is None + raise ClosedPoolError(self, "Pool is closed.") + + except Empty: + if self.block: + raise EmptyPoolError(self, + "Pool reached maximum size and no more " + "connections are allowed.") + pass # Oh well, we'll create a new connection then + + # If this is a persistent connection, check if it got disconnected + if conn and is_connection_dropped(conn): + log.info("Resetting dropped connection: %s" % self.host) + conn.close() + + return conn or self._new_conn() + + def _put_conn(self, conn): + """ + Put a connection back into the pool. + + :param conn: + Connection object for the current host and port as returned by + :meth:`._new_conn` or :meth:`._get_conn`. + + If the pool is already full, the connection is closed and discarded + because we exceeded maxsize. If connections are discarded frequently, + then maxsize should be increased. + + If the pool is closed, then the connection will be closed and discarded. + """ + try: + self.pool.put(conn, block=False) + return # Everything is dandy, done. + except AttributeError: + # self.pool is None. + pass + except Full: + # This should never happen if self.block == True + log.warning("HttpConnectionPool is full, discarding connection: %s" + % self.host) + + # Connection never got put back into the pool, close it. + conn.close() + + def _make_request(self, conn, method, url, timeout=_Default, + **httplib_request_kw): + """ + Perform a request on a given httplib connection object taken from our + pool. + """ + self.num_requests += 1 + + if timeout is _Default: + timeout = self.timeout + + conn.timeout = timeout # This only does anything in Py26+ + conn.request(method, url, **httplib_request_kw) + + # Set timeout + sock = getattr(conn, 'sock', False) # AppEngine doesn't have sock attr. + if sock: + sock.settimeout(timeout) + + try: # Python 2.7+, use buffering of HTTP responses + httplib_response = conn.getresponse(buffering=True) + except TypeError: # Python 2.6 and older + httplib_response = conn.getresponse() + + # AppEngine doesn't have a version attr. + http_version = getattr(conn, '_http_vsn_str', 'HTTP/?') + log.debug("\"%s %s %s\" %s %s" % (method, url, http_version, + httplib_response.status, + httplib_response.length)) + return httplib_response + + def close(self): + """ + Close all pooled connections and disable the pool. + """ + # Disable access to the pool + old_pool, self.pool = self.pool, None + + try: + while True: + conn = old_pool.get(block=False) + if conn: + conn.close() + + except Empty: + pass # Done. + + def is_same_host(self, url): + """ + Check if the given ``url`` is a member of the same host as this + connection pool. + """ + if url.startswith('/'): + return True + + # TODO: Add optional support for socket.gethostbyname checking. + scheme, host, port = get_host(url) + + if self.port and not port: + # Use explicit default port for comparison when none is given. + port = port_by_scheme.get(scheme) + + return (scheme, host, port) == (self.scheme, self.host, self.port) + + def urlopen(self, method, url, body=None, headers=None, retries=3, + redirect=True, assert_same_host=True, timeout=_Default, + pool_timeout=None, release_conn=None, **response_kw): + """ + Get a connection from the pool and perform an HTTP request. This is the + lowest level call for making a request, so you'll need to specify all + the raw details. + + .. note:: + + More commonly, it's appropriate to use a convenience method provided + by :class:`.RequestMethods`, such as :meth:`request`. + + .. note:: + + `release_conn` will only behave as expected if + `preload_content=False` because we want to make + `preload_content=False` the default behaviour someday soon without + breaking backwards compatibility. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param body: + Data to send in the request body (useful for creating + POST requests, see HTTPConnectionPool.post_url for + more convenience). + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param retries: + Number of retries to allow before raising a MaxRetryError exception. + + :param redirect: + If True, automatically handle redirects (status codes 301, 302, + 303, 307). Each redirect counts as a retry. + + :param assert_same_host: + If ``True``, will make sure that the host of the pool requests is + consistent else will raise HostChangedError. When False, you can + use the pool on an HTTP proxy and request foreign hosts. + + :param timeout: + If specified, overrides the default timeout for this one request. + + :param pool_timeout: + If set and the pool is set to block=True, then this method will + block for ``pool_timeout`` seconds and raise EmptyPoolError if no + connection is available within the time period. + + :param release_conn: + If False, then the urlopen call will not release the connection + back into the pool once a response is received (but will release if + you read the entire contents of the response such as when + `preload_content=True`). This is useful if you're not preloading + the response's content immediately. You will need to call + ``r.release_conn()`` on the response ``r`` to return the connection + back into the pool. If None, it takes the value of + ``response_kw.get('preload_content', True)``. + + :param \**response_kw: + Additional parameters are passed to + :meth:`urllib3.response.HTTPResponse.from_httplib` + """ + if headers is None: + headers = self.headers + + if retries < 0: + raise MaxRetryError(self, url) + + if timeout is _Default: + timeout = self.timeout + + if release_conn is None: + release_conn = response_kw.get('preload_content', True) + + # Check host + if assert_same_host and not self.is_same_host(url): + host = "%s://%s" % (self.scheme, self.host) + if self.port: + host = "%s:%d" % (host, self.port) + + raise HostChangedError(self, url, retries - 1) + + conn = None + + try: + # Request a connection from the queue + conn = self._get_conn(timeout=pool_timeout) + + # Make the request on the httplib connection object + httplib_response = self._make_request(conn, method, url, + timeout=timeout, + body=body, headers=headers) + + # If we're going to release the connection in ``finally:``, then + # the request doesn't need to know about the connection. Otherwise + # it will also try to release it and we'll have a double-release + # mess. + response_conn = not release_conn and conn + + # Import httplib's response into our own wrapper object + response = HTTPResponse.from_httplib(httplib_response, + pool=self, + connection=response_conn, + **response_kw) + + # else: + # The connection will be put back into the pool when + # ``response.release_conn()`` is called (implicitly by + # ``response.read()``) + + except Empty as e: + # Timed out by queue + raise TimeoutError(self, url, + "Request timed out. (pool_timeout=%s)" % + pool_timeout) + + except SocketTimeout as e: + # Timed out by socket + raise TimeoutError(self, url, + "Request timed out. (timeout=%s)" % + timeout) + + except BaseSSLError as e: + # SSL certificate error + raise SSLError(e) + + except CertificateError as e: + # Name mismatch + raise SSLError(e) + + except (HTTPException, SocketError) as e: + # Connection broken, discard. It will be replaced next _get_conn(). + conn = None + # This is necessary so we can access e below + err = e + + if retries == 0: + raise MaxRetryError(self, url, e) + + finally: + if release_conn: + # Put the connection back to be reused. If the connection is + # expired then it will be None, which will get replaced with a + # fresh connection during _get_conn. + self._put_conn(conn) + + if not conn: + # Try again + log.warn("Retrying (%d attempts remain) after connection " + "broken by '%r': %s" % (retries, err, url)) + return self.urlopen(method, url, body, headers, retries - 1, + redirect, assert_same_host, + timeout=timeout, pool_timeout=pool_timeout, + release_conn=release_conn, **response_kw) + + # Handle redirect? + redirect_location = redirect and response.get_redirect_location() + if redirect_location: + if response.status == 303: + method = 'GET' + log.info("Redirecting %s -> %s" % (url, redirect_location)) + return self.urlopen(method, redirect_location, body, headers, + retries - 1, redirect, assert_same_host, + timeout=timeout, pool_timeout=pool_timeout, + release_conn=release_conn, **response_kw) + + return response + + +class HTTPSConnectionPool(HTTPConnectionPool): + """ + Same as :class:`.HTTPConnectionPool`, but HTTPS. + + When Python is compiled with the :mod:`ssl` module, then + :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, + instead of :class:`httplib.HTTPSConnection`. + + :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, + ``assert_hostname`` and ``host`` in this order to verify connections. + + The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs`` and + ``ssl_version`` are only used if :mod:`ssl` is available and are fed into + :meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket + into an SSL socket. + """ + + scheme = 'https' + + def __init__(self, host, port=None, + strict=False, timeout=None, maxsize=1, + block=False, headers=None, + key_file=None, cert_file=None, cert_reqs=None, + ca_certs=None, ssl_version=None, + assert_hostname=None, assert_fingerprint=None): + + HTTPConnectionPool.__init__(self, host, port, + strict, timeout, maxsize, + block, headers) + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.ca_certs = ca_certs + self.ssl_version = ssl_version + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + + def _new_conn(self): + """ + Return a fresh :class:`httplib.HTTPSConnection`. + """ + self.num_connections += 1 + log.info("Starting new HTTPS connection (%d): %s" + % (self.num_connections, self.host)) + + if not ssl: # Platform-specific: Python compiled without +ssl + if not HTTPSConnection or HTTPSConnection is object: + raise SSLError("Can't connect to HTTPS URL because the SSL " + "module is not available.") + + return HTTPSConnection(host=self.host, + port=self.port, + strict=self.strict) + + connection = VerifiedHTTPSConnection(host=self.host, + port=self.port, + strict=self.strict) + connection.set_cert(key_file=self.key_file, cert_file=self.cert_file, + cert_reqs=self.cert_reqs, ca_certs=self.ca_certs, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint) + + connection.ssl_version = self.ssl_version + + return connection + + +def connection_from_url(url, **kw): + """ + Given a url, return an :class:`.ConnectionPool` instance of its host. + + This is a shortcut for not having to parse out the scheme, host, and port + of the url before creating an :class:`.ConnectionPool` instance. + + :param url: + Absolute URL string that must include the scheme. Port is optional. + + :param \**kw: + Passes additional parameters to the constructor of the appropriate + :class:`.ConnectionPool`. Useful for specifying things like + timeout, maxsize, headers, etc. + + Example: :: + + >>> conn = connection_from_url('http://google.com/') + >>> r = conn.request('GET', '/') + """ + scheme, host, port = get_host(url) + if scheme == 'https': + return HTTPSConnectionPool(host, port=port, **kw) + else: + return HTTPConnectionPool(host, port=port, **kw) diff --git a/awx/lib/site-packages/requests/packages/urllib3/contrib/__init__.py b/awx/lib/site-packages/requests/packages/urllib3/contrib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/requests/packages/urllib3/contrib/ntlmpool.py b/awx/lib/site-packages/requests/packages/urllib3/contrib/ntlmpool.py new file mode 100644 index 0000000000..277ee0b2ab --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/contrib/ntlmpool.py @@ -0,0 +1,120 @@ +# urllib3/contrib/ntlmpool.py +# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +NTLM authenticating pool, contributed by erikcederstran + +Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 +""" + +try: + from http.client import HTTPSConnection +except ImportError: + from httplib import HTTPSConnection +from logging import getLogger +from ntlm import ntlm + +from urllib3 import HTTPSConnectionPool + + +log = getLogger(__name__) + + +class NTLMConnectionPool(HTTPSConnectionPool): + """ + Implements an NTLM authentication version of an urllib3 connection pool + """ + + scheme = 'https' + + def __init__(self, user, pw, authurl, *args, **kwargs): + """ + authurl is a random URL on the server that is protected by NTLM. + user is the Windows user, probably in the DOMAIN\username format. + pw is the password for the user. + """ + super(NTLMConnectionPool, self).__init__(*args, **kwargs) + self.authurl = authurl + self.rawuser = user + user_parts = user.split('\\', 1) + self.domain = user_parts[0].upper() + self.user = user_parts[1] + self.pw = pw + + def _new_conn(self): + # Performs the NTLM handshake that secures the connection. The socket + # must be kept open while requests are performed. + self.num_connections += 1 + log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s' % + (self.num_connections, self.host, self.authurl)) + + headers = {} + headers['Connection'] = 'Keep-Alive' + req_header = 'Authorization' + resp_header = 'www-authenticate' + + conn = HTTPSConnection(host=self.host, port=self.port) + + # Send negotiation message + headers[req_header] = ( + 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser)) + log.debug('Request headers: %s' % headers) + conn.request('GET', self.authurl, None, headers) + res = conn.getresponse() + reshdr = dict(res.getheaders()) + log.debug('Response status: %s %s' % (res.status, res.reason)) + log.debug('Response headers: %s' % reshdr) + log.debug('Response data: %s [...]' % res.read(100)) + + # Remove the reference to the socket, so that it can not be closed by + # the response object (we want to keep the socket open) + res.fp = None + + # Server should respond with a challenge message + auth_header_values = reshdr[resp_header].split(', ') + auth_header_value = None + for s in auth_header_values: + if s[:5] == 'NTLM ': + auth_header_value = s[5:] + if auth_header_value is None: + raise Exception('Unexpected %s response header: %s' % + (resp_header, reshdr[resp_header])) + + # Send authentication message + ServerChallenge, NegotiateFlags = \ + ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value) + auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge, + self.user, + self.domain, + self.pw, + NegotiateFlags) + headers[req_header] = 'NTLM %s' % auth_msg + log.debug('Request headers: %s' % headers) + conn.request('GET', self.authurl, None, headers) + res = conn.getresponse() + log.debug('Response status: %s %s' % (res.status, res.reason)) + log.debug('Response headers: %s' % dict(res.getheaders())) + log.debug('Response data: %s [...]' % res.read()[:100]) + if res.status != 200: + if res.status == 401: + raise Exception('Server rejected request: wrong ' + 'username or password') + raise Exception('Wrong server response: %s %s' % + (res.status, res.reason)) + + res.fp = None + log.debug('Connection established') + return conn + + def urlopen(self, method, url, body=None, headers=None, retries=3, + redirect=True, assert_same_host=True): + if headers is None: + headers = {} + headers['Connection'] = 'Keep-Alive' + return super(NTLMConnectionPool, self).urlopen(method, url, body, + headers, retries, + redirect, + assert_same_host) diff --git a/awx/lib/site-packages/requests/packages/urllib3/contrib/pyopenssl.py b/awx/lib/site-packages/requests/packages/urllib3/contrib/pyopenssl.py new file mode 100644 index 0000000000..5c4c6d8d31 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/contrib/pyopenssl.py @@ -0,0 +1,167 @@ +'''SSL with SNI-support for Python 2. + +This needs the following packages installed: + +* pyOpenSSL (tested with 0.13) +* ndg-httpsclient (tested with 0.3.2) +* pyasn1 (tested with 0.1.6) + +To activate it call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3`. +This can be done in a ``sitecustomize`` module, or at any other time before +your application begins using ``urllib3``, like this:: + + try: + import urllib3.contrib.pyopenssl + urllib3.contrib.pyopenssl.inject_into_urllib3() + except ImportError: + pass + +Now you can use :mod:`urllib3` as you normally would, and it will support SNI +when the required modules are installed. +''' + +from ndg.httpsclient.ssl_peer_verification import (ServerSSLCertVerification, + SUBJ_ALT_NAME_SUPPORT) +from ndg.httpsclient.subj_alt_name import SubjectAltName +import OpenSSL.SSL +from pyasn1.codec.der import decoder as der_decoder +from socket import _fileobject +import ssl + +from .. import connectionpool +from .. import util + +__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] + +# SNI only *really* works if we can read the subjectAltName of certificates. +HAS_SNI = SUBJ_ALT_NAME_SUPPORT + +# Map from urllib3 to PyOpenSSL compatible parameter-values. +_openssl_versions = { + ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, + ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD, + ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, +} +_openssl_verify = { + ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, + ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, + ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER + + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, +} + + +orig_util_HAS_SNI = util.HAS_SNI +orig_connectionpool_ssl_wrap_socket = connectionpool.ssl_wrap_socket + + +def inject_into_urllib3(): + 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.' + + connectionpool.ssl_wrap_socket = ssl_wrap_socket + util.HAS_SNI = HAS_SNI + + +def extract_from_urllib3(): + 'Undo monkey-patching by :func:`inject_into_urllib3`.' + + connectionpool.ssl_wrap_socket = orig_connectionpool_ssl_wrap_socket + util.HAS_SNI = orig_util_HAS_SNI + + +### Note: This is a slightly bug-fixed version of same from ndg-httpsclient. +def get_subj_alt_name(peer_cert): + # Search through extensions + dns_name = [] + if not SUBJ_ALT_NAME_SUPPORT: + return dns_name + + general_names = SubjectAltName() + for i in range(peer_cert.get_extension_count()): + ext = peer_cert.get_extension(i) + ext_name = ext.get_short_name() + if ext_name != 'subjectAltName': + continue + + # PyOpenSSL returns extension data in ASN.1 encoded form + ext_dat = ext.get_data() + decoded_dat = der_decoder.decode(ext_dat, + asn1Spec=general_names) + + for name in decoded_dat: + if not isinstance(name, SubjectAltName): + continue + for entry in range(len(name)): + component = name.getComponentByPosition(entry) + if component.getName() != 'dNSName': + continue + dns_name.append(str(component.getComponent())) + + return dns_name + + +class WrappedSocket(object): + '''API-compatibility wrapper for Python OpenSSL's Connection-class.''' + + def __init__(self, connection, socket): + self.connection = connection + self.socket = socket + + def makefile(self, mode, bufsize=-1): + return _fileobject(self.connection, mode, bufsize) + + def settimeout(self, timeout): + return self.socket.settimeout(timeout) + + def sendall(self, data): + return self.connection.sendall(data) + + def getpeercert(self, binary_form=False): + x509 = self.connection.get_peer_certificate() + if not x509: + raise ssl.SSLError('') + + if binary_form: + return OpenSSL.crypto.dump_certificate( + OpenSSL.crypto.FILETYPE_ASN1, + x509) + + return { + 'subject': ( + (('commonName', x509.get_subject().CN),), + ), + 'subjectAltName': [ + ('DNS', value) + for value in get_subj_alt_name(x509) + ] + } + + +def _verify_callback(cnx, x509, err_no, err_depth, return_code): + return err_no == 0 + + +def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, + ca_certs=None, server_hostname=None, + ssl_version=None): + ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version]) + if certfile: + ctx.use_certificate_file(certfile) + if keyfile: + ctx.use_privatekey_file(keyfile) + if cert_reqs != ssl.CERT_NONE: + ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback) + if ca_certs: + try: + ctx.load_verify_locations(ca_certs, None) + except OpenSSL.SSL.Error as e: + raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e) + + cnx = OpenSSL.SSL.Connection(ctx, sock) + cnx.set_tlsext_host_name(server_hostname) + cnx.set_connect_state() + try: + cnx.do_handshake() + except OpenSSL.SSL.Error as e: + raise ssl.SSLError('bad handshake', e) + + return WrappedSocket(cnx, sock) diff --git a/awx/lib/site-packages/requests/packages/urllib3/exceptions.py b/awx/lib/site-packages/requests/packages/urllib3/exceptions.py new file mode 100644 index 0000000000..2e2a259cd6 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/exceptions.py @@ -0,0 +1,95 @@ +# urllib3/exceptions.py +# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +## Base Exceptions + +class HTTPError(Exception): + "Base exception used by this module." + pass + + +class PoolError(HTTPError): + "Base exception for errors caused within a pool." + def __init__(self, pool, message): + self.pool = pool + HTTPError.__init__(self, "%s: %s" % (pool, message)) + + def __reduce__(self): + # For pickling purposes. + return self.__class__, (None, None) + + +class RequestError(PoolError): + "Base exception for PoolErrors that have associated URLs." + def __init__(self, pool, url, message): + self.url = url + PoolError.__init__(self, pool, message) + + def __reduce__(self): + # For pickling purposes. + return self.__class__, (None, self.url, None) + + +class SSLError(HTTPError): + "Raised when SSL certificate fails in an HTTPS connection." + pass + + +class DecodeError(HTTPError): + "Raised when automatic decoding based on Content-Type fails." + pass + + +## Leaf Exceptions + +class MaxRetryError(RequestError): + "Raised when the maximum number of retries is exceeded." + + def __init__(self, pool, url, reason=None): + self.reason = reason + + message = "Max retries exceeded with url: %s" % url + if reason: + message += " (Caused by %s: %s)" % (type(reason), reason) + else: + message += " (Caused by redirect)" + + RequestError.__init__(self, pool, url, message) + + +class HostChangedError(RequestError): + "Raised when an existing pool gets a request for a foreign host." + + def __init__(self, pool, url, retries=3): + message = "Tried to open a foreign host with url: %s" % url + RequestError.__init__(self, pool, url, message) + self.retries = retries + + +class TimeoutError(RequestError): + "Raised when a socket timeout occurs." + pass + + +class EmptyPoolError(PoolError): + "Raised when a pool runs out of connections and no more are allowed." + pass + + +class ClosedPoolError(PoolError): + "Raised when a request enters a pool after the pool has been closed." + pass + + +class LocationParseError(ValueError, HTTPError): + "Raised when get_host or similar fails to parse the URL input." + + def __init__(self, location): + message = "Failed to parse: %s" % location + HTTPError.__init__(self, message) + + self.location = location diff --git a/awx/lib/site-packages/requests/packages/urllib3/filepost.py b/awx/lib/site-packages/requests/packages/urllib3/filepost.py new file mode 100644 index 0000000000..470309a006 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/filepost.py @@ -0,0 +1,98 @@ +# urllib3/filepost.py +# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import codecs +import mimetypes + +from uuid import uuid4 +from io import BytesIO + +from .packages import six +from .packages.six import b + +writer = codecs.lookup('utf-8')[3] + + +def choose_boundary(): + """ + Our embarassingly-simple replacement for mimetools.choose_boundary. + """ + return uuid4().hex + + +def get_content_type(filename): + return mimetypes.guess_type(filename)[0] or 'application/octet-stream' + + +def iter_fields(fields): + """ + Iterate over fields. + + Supports list of (k, v) tuples and dicts. + """ + if isinstance(fields, dict): + return ((k, v) for k, v in six.iteritems(fields)) + + return ((k, v) for k, v in fields) + + +def encode_multipart_formdata(fields, boundary=None): + """ + Encode a dictionary of ``fields`` using the multipart/form-data MIME format. + + :param fields: + Dictionary of fields or list of (key, value) or (key, value, MIME type) + field tuples. The key is treated as the field name, and the value as + the body of the form-data bytes. If the value is a tuple of two + elements, then the first element is treated as the filename of the + form-data section and a suitable MIME type is guessed based on the + filename. If the value is a tuple of three elements, then the third + element is treated as an explicit MIME type of the form-data section. + + Field names and filenames must be unicode. + + :param boundary: + If not specified, then a random boundary will be generated using + :func:`mimetools.choose_boundary`. + """ + body = BytesIO() + if boundary is None: + boundary = choose_boundary() + + for fieldname, value in iter_fields(fields): + body.write(b('--%s\r\n' % (boundary))) + + if isinstance(value, tuple): + if len(value) == 3: + filename, data, content_type = value + else: + filename, data = value + content_type = get_content_type(filename) + writer(body).write('Content-Disposition: form-data; name="%s"; ' + 'filename="%s"\r\n' % (fieldname, filename)) + body.write(b('Content-Type: %s\r\n\r\n' % + (content_type,))) + else: + data = value + writer(body).write('Content-Disposition: form-data; name="%s"\r\n' + % (fieldname)) + body.write(b'\r\n') + + if isinstance(data, int): + data = str(data) # Backwards compatibility + + if isinstance(data, six.text_type): + writer(body).write(data) + else: + body.write(data) + + body.write(b'\r\n') + + body.write(b('--%s--\r\n' % (boundary))) + + content_type = str('multipart/form-data; boundary=%s' % boundary) + + return body.getvalue(), content_type diff --git a/awx/lib/site-packages/requests/packages/urllib3/packages/__init__.py b/awx/lib/site-packages/requests/packages/urllib3/packages/__init__.py new file mode 100644 index 0000000000..37e8351577 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/packages/__init__.py @@ -0,0 +1,4 @@ +from __future__ import absolute_import + +from . import ssl_match_hostname + diff --git a/awx/lib/site-packages/requests/packages/urllib3/packages/ordered_dict.py b/awx/lib/site-packages/requests/packages/urllib3/packages/ordered_dict.py new file mode 100644 index 0000000000..7f8ee15436 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/packages/ordered_dict.py @@ -0,0 +1,260 @@ +# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. +# Passes Python2.7's test suite and incorporates all the latest updates. +# Copyright 2009 Raymond Hettinger, released under the MIT License. +# http://code.activestate.com/recipes/576693/ + +try: + from thread import get_ident as _get_ident +except ImportError: + from dummy_thread import get_ident as _get_ident + +try: + from _abcoll import KeysView, ValuesView, ItemsView +except ImportError: + pass + + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args),)) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running={}): + 'od.__repr__() <==> repr(od)' + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self)==len(other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) diff --git a/awx/lib/site-packages/requests/packages/urllib3/packages/six.py b/awx/lib/site-packages/requests/packages/urllib3/packages/six.py new file mode 100644 index 0000000000..27d80112bf --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/packages/six.py @@ -0,0 +1,385 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +#Copyright (c) 2010-2011 Benjamin Peterson + +#Permission is hereby granted, free of charge, to any person obtaining a copy of +#this software and associated documentation files (the "Software"), to deal in +#the Software without restriction, including without limitation the rights to +#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +#the Software, and to permit persons to whom the Software is furnished to do so, +#subject to the following conditions: + +#The above copyright notice and this permission notice shall be included in all +#copies or substantial portions of the Software. + +#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +import operator +import sys +import types + +__author__ = "Benjamin Peterson <benjamin@python.org>" +__version__ = "1.2.0" # Revision 41c74fef2ded + + +# True if we are running on Python 3. +PY3 = sys.version_info[0] == 3 + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) + # This is a bit ugly, but it avoids running this again. + delattr(tp, self.name) + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + + +class _MovedItems(types.ModuleType): + """Lazy loading of moved objects""" + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("reload_module", "__builtin__", "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("winreg", "_winreg"), +] +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) +del attr + +moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_code = "__code__" + _func_defaults = "__defaults__" + + _iterkeys = "keys" + _itervalues = "values" + _iteritems = "items" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_code = "func_code" + _func_defaults = "func_defaults" + + _iterkeys = "iterkeys" + _itervalues = "itervalues" + _iteritems = "iteritems" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +if PY3: + def get_unbound_function(unbound): + return unbound + + Iterator = object + + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) +else: + def get_unbound_function(unbound): + return unbound.im_func + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) + + +def iterkeys(d): + """Return an iterator over the keys of a dictionary.""" + return iter(getattr(d, _iterkeys)()) + +def itervalues(d): + """Return an iterator over the values of a dictionary.""" + return iter(getattr(d, _itervalues)()) + +def iteritems(d): + """Return an iterator over the (key, value) pairs of a dictionary.""" + return iter(getattr(d, _iteritems)()) + + +if PY3: + def b(s): + return s.encode("latin-1") + def u(s): + return s + if sys.version_info[1] <= 1: + def int2byte(i): + return bytes((i,)) + else: + # This is about 2x faster than the implementation above on 3.2+ + int2byte = operator.methodcaller("to_bytes", 1, "big") + import io + StringIO = io.StringIO + BytesIO = io.BytesIO +else: + def b(s): + return s + def u(s): + return unicode(s, "unicode_escape") + int2byte = chr + import StringIO + StringIO = BytesIO = StringIO.StringIO +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +if PY3: + import builtins + exec_ = getattr(builtins, "exec") + + + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + + + print_ = getattr(builtins, "print") + del builtins + +else: + def exec_(code, globs=None, locs=None): + """Execute code in a namespace.""" + if globs is None: + frame = sys._getframe(1) + globs = frame.f_globals + if locs is None: + locs = frame.f_locals + del frame + elif locs is None: + locs = globs + exec("""exec code in globs, locs""") + + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + + def print_(*args, **kwargs): + """The new-style print function.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + def write(data): + if not isinstance(data, basestring): + data = str(data) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) + +_add_doc(reraise, """Reraise an exception.""") + + +def with_metaclass(meta, base=object): + """Create a base class with a metaclass.""" + return meta("NewBase", (base,), {}) diff --git a/awx/lib/site-packages/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py b/awx/lib/site-packages/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py new file mode 100644 index 0000000000..9560b04529 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py @@ -0,0 +1,61 @@ +"""The match_hostname() function from Python 3.2, essential when using SSL.""" + +import re + +__version__ = '3.2.2' + +class CertificateError(ValueError): + pass + +def _dnsname_to_pat(dn): + pats = [] + for frag in dn.split(r'.'): + if frag == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + else: + # Otherwise, '*' matches any dotless fragment. + frag = re.escape(frag) + pats.append(frag.replace(r'\*', '[^.]*')) + return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + +def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 rules + are mostly followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_to_pat(value).match(hostname): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_to_pat(value).match(hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" + % (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" + % (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") diff --git a/awx/lib/site-packages/requests/packages/urllib3/poolmanager.py b/awx/lib/site-packages/requests/packages/urllib3/poolmanager.py new file mode 100644 index 0000000000..ce0c248ea8 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/poolmanager.py @@ -0,0 +1,192 @@ +# urllib3/poolmanager.py +# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import logging + +from ._collections import RecentlyUsedContainer +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool +from .connectionpool import connection_from_url, port_by_scheme +from .request import RequestMethods +from .util import parse_url + + +__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] + + +pool_classes_by_scheme = { + 'http': HTTPConnectionPool, + 'https': HTTPSConnectionPool, +} + +log = logging.getLogger(__name__) + +SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', + 'ssl_version') + + +class PoolManager(RequestMethods): + """ + Allows for arbitrary requests while transparently keeping track of + necessary connection pools for you. + + :param num_pools: + Number of connection pools to cache before discarding the least + recently used pool. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param \**connection_pool_kw: + Additional parameters are used to create fresh + :class:`urllib3.connectionpool.ConnectionPool` instances. + + Example: :: + + >>> manager = PoolManager(num_pools=2) + >>> r = manager.request('GET', 'http://google.com/') + >>> r = manager.request('GET', 'http://google.com/mail') + >>> r = manager.request('GET', 'http://yahoo.com/') + >>> len(manager.pools) + 2 + + """ + + def __init__(self, num_pools=10, headers=None, **connection_pool_kw): + RequestMethods.__init__(self, headers) + self.connection_pool_kw = connection_pool_kw + self.pools = RecentlyUsedContainer(num_pools, + dispose_func=lambda p: p.close()) + + def _new_pool(self, scheme, host, port): + """ + Create a new :class:`ConnectionPool` based on host, port and scheme. + + This method is used to actually create the connection pools handed out + by :meth:`connection_from_url` and companion methods. It is intended + to be overridden for customization. + """ + pool_cls = pool_classes_by_scheme[scheme] + kwargs = self.connection_pool_kw + if scheme == 'http': + kwargs = self.connection_pool_kw.copy() + for kw in SSL_KEYWORDS: + kwargs.pop(kw, None) + + return pool_cls(host, port, **kwargs) + + def clear(self): + """ + Empty our store of pools and direct them all to close. + + This will not affect in-flight connections, but they will not be + re-used after completion. + """ + self.pools.clear() + + def connection_from_host(self, host, port=None, scheme='http'): + """ + Get a :class:`ConnectionPool` based on the host, port, and scheme. + + If ``port`` isn't given, it will be derived from the ``scheme`` using + ``urllib3.connectionpool.port_by_scheme``. + """ + scheme = scheme or 'http' + port = port or port_by_scheme.get(scheme, 80) + + pool_key = (scheme, host, port) + + # If the scheme, host, or port doesn't match existing open connections, + # open a new ConnectionPool. + pool = self.pools.get(pool_key) + if pool: + return pool + + # Make a fresh ConnectionPool of the desired type + pool = self._new_pool(scheme, host, port) + self.pools[pool_key] = pool + return pool + + def connection_from_url(self, url): + """ + Similar to :func:`urllib3.connectionpool.connection_from_url` but + doesn't pass any additional parameters to the + :class:`urllib3.connectionpool.ConnectionPool` constructor. + + Additional parameters are taken from the :class:`.PoolManager` + constructor. + """ + u = parse_url(url) + return self.connection_from_host(u.host, port=u.port, scheme=u.scheme) + + def urlopen(self, method, url, redirect=True, **kw): + """ + Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen` + with custom cross-host redirect logic and only sends the request-uri + portion of the ``url``. + + The given ``url`` parameter must be absolute, such that an appropriate + :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. + """ + u = parse_url(url) + conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) + + kw['assert_same_host'] = False + kw['redirect'] = False + if 'headers' not in kw: + kw['headers'] = self.headers + + response = conn.urlopen(method, u.request_uri, **kw) + + redirect_location = redirect and response.get_redirect_location() + if not redirect_location: + return response + + if response.status == 303: + method = 'GET' + + log.info("Redirecting %s -> %s" % (url, redirect_location)) + kw['retries'] = kw.get('retries', 3) - 1 # Persist retries countdown + kw['redirect'] = redirect + return self.urlopen(method, redirect_location, **kw) + + +class ProxyManager(RequestMethods): + """ + Given a ConnectionPool to a proxy, the ProxyManager's ``urlopen`` method + will make requests to any url through the defined proxy. The ProxyManager + class will automatically set the 'Host' header if it is not provided. + """ + + def __init__(self, proxy_pool): + self.proxy_pool = proxy_pool + + def _set_proxy_headers(self, url, headers=None): + """ + Sets headers needed by proxies: specifically, the Accept and Host + headers. Only sets headers not provided by the user. + """ + headers_ = {'Accept': '*/*'} + + host = parse_url(url).host + if host: + headers_['Host'] = host + + if headers: + headers_.update(headers) + + return headers_ + + def urlopen(self, method, url, **kw): + "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." + kw['assert_same_host'] = False + kw['headers'] = self._set_proxy_headers(url, headers=kw.get('headers')) + return self.proxy_pool.urlopen(method, url, **kw) + + +def proxy_from_url(url, **pool_kw): + proxy_pool = connection_from_url(url, **pool_kw) + return ProxyManager(proxy_pool) diff --git a/awx/lib/site-packages/requests/packages/urllib3/request.py b/awx/lib/site-packages/requests/packages/urllib3/request.py new file mode 100644 index 0000000000..bf0256e964 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/request.py @@ -0,0 +1,142 @@ +# urllib3/request.py +# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +try: + from urllib.parse import urlencode +except ImportError: + from urllib import urlencode + +from .filepost import encode_multipart_formdata + + +__all__ = ['RequestMethods'] + + +class RequestMethods(object): + """ + Convenience mixin for classes who implement a :meth:`urlopen` method, such + as :class:`~urllib3.connectionpool.HTTPConnectionPool` and + :class:`~urllib3.poolmanager.PoolManager`. + + Provides behavior for making common types of HTTP request methods and + decides which type of request field encoding to use. + + Specifically, + + :meth:`.request_encode_url` is for sending requests whose fields are encoded + in the URL (such as GET, HEAD, DELETE). + + :meth:`.request_encode_body` is for sending requests whose fields are + encoded in the *body* of the request using multipart or www-orm-urlencoded + (such as for POST, PUT, PATCH). + + :meth:`.request` is for making any kind of request, it will look up the + appropriate encoding format and use one of the above two methods to make + the request. + + Initializer parameters: + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + """ + + _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS']) + _encode_body_methods = set(['PATCH', 'POST', 'PUT', 'TRACE']) + + def __init__(self, headers=None): + self.headers = headers or {} + + def urlopen(self, method, url, body=None, headers=None, + encode_multipart=True, multipart_boundary=None, + **kw): # Abstract + raise NotImplemented("Classes extending RequestMethods must implement " + "their own ``urlopen`` method.") + + def request(self, method, url, fields=None, headers=None, **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the appropriate encoding of + ``fields`` based on the ``method`` used. + + This is a convenience method that requires the least amount of manual + effort. It can be used in most situations, while still having the option + to drop down to more specific methods when necessary, such as + :meth:`request_encode_url`, :meth:`request_encode_body`, + or even the lowest level :meth:`urlopen`. + """ + method = method.upper() + + if method in self._encode_url_methods: + return self.request_encode_url(method, url, fields=fields, + headers=headers, + **urlopen_kw) + else: + return self.request_encode_body(method, url, fields=fields, + headers=headers, + **urlopen_kw) + + def request_encode_url(self, method, url, fields=None, **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the url. This is useful for request methods like GET, HEAD, DELETE, etc. + """ + if fields: + url += '?' + urlencode(fields) + return self.urlopen(method, url, **urlopen_kw) + + def request_encode_body(self, method, url, fields=None, headers=None, + encode_multipart=True, multipart_boundary=None, + **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the body. This is useful for request methods like POST, PUT, PATCH, etc. + + When ``encode_multipart=True`` (default), then + :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode the + payload with the appropriate content type. Otherwise + :meth:`urllib.urlencode` is used with the + 'application/x-www-form-urlencoded' content type. + + Multipart encoding must be used when posting files, and it's reasonably + safe to use it in other times too. However, it may break request signing, + such as with OAuth. + + Supports an optional ``fields`` parameter of key/value strings AND + key/filetuple. A filetuple is a (filename, data, MIME type) tuple where + the MIME type is optional. For example: :: + + fields = { + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), + 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + } + + When uploading a file, providing a filename (the first parameter of the + tuple) is optional but recommended to best mimick behavior of browsers. + + Note that if ``headers`` are supplied, the 'Content-Type' header will be + overwritten because it depends on the dynamic random boundary string + which is used to compose the body of the request. The random boundary + string can be explicitly set with the ``multipart_boundary`` parameter. + """ + if encode_multipart: + body, content_type = encode_multipart_formdata(fields or {}, + boundary=multipart_boundary) + else: + body, content_type = (urlencode(fields or {}), + 'application/x-www-form-urlencoded') + + if headers is None: + headers = self.headers + + headers_ = {'Content-Type': content_type} + headers_.update(headers) + + return self.urlopen(method, url, body=body, headers=headers_, + **urlopen_kw) diff --git a/awx/lib/site-packages/requests/packages/urllib3/response.py b/awx/lib/site-packages/requests/packages/urllib3/response.py new file mode 100644 index 0000000000..2fa407887d --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/response.py @@ -0,0 +1,241 @@ +# urllib3/response.py +# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +import logging +import zlib + +from .exceptions import DecodeError +from .packages.six import string_types as basestring, binary_type + + +log = logging.getLogger(__name__) + + +class DeflateDecoder(object): + + def __init__(self): + self._first_try = True + self._data = binary_type() + self._obj = zlib.decompressobj() + + def __getattr__(self, name): + return getattr(self._obj, name) + + def decompress(self, data): + if not self._first_try: + return self._obj.decompress(data) + + self._data += data + try: + return self._obj.decompress(data) + except zlib.error: + self._first_try = False + self._obj = zlib.decompressobj(-zlib.MAX_WBITS) + try: + return self.decompress(self._data) + finally: + self._data = None + + +def _get_decoder(mode): + if mode == 'gzip': + return zlib.decompressobj(16 + zlib.MAX_WBITS) + + return DeflateDecoder() + + +class HTTPResponse(object): + """ + HTTP Response container. + + Backwards-compatible to httplib's HTTPResponse but the response ``body`` is + loaded and decoded on-demand when the ``data`` property is accessed. + + Extra parameters for behaviour not present in httplib.HTTPResponse: + + :param preload_content: + If True, the response's body will be preloaded during construction. + + :param decode_content: + If True, attempts to decode specific content-encoding's based on headers + (like 'gzip' and 'deflate') will be skipped and raw data will be used + instead. + + :param original_response: + When this HTTPResponse wrapper is generated from an httplib.HTTPResponse + object, it's convenient to include the original for debug purposes. It's + otherwise unused. + """ + + CONTENT_DECODERS = ['gzip', 'deflate'] + + def __init__(self, body='', headers=None, status=0, version=0, reason=None, + strict=0, preload_content=True, decode_content=True, + original_response=None, pool=None, connection=None): + self.headers = headers or {} + self.status = status + self.version = version + self.reason = reason + self.strict = strict + self.decode_content = decode_content + + self._decoder = None + self._body = body if body and isinstance(body, basestring) else None + self._fp = None + self._original_response = original_response + + self._pool = pool + self._connection = connection + + if hasattr(body, 'read'): + self._fp = body + + if preload_content and not self._body: + self._body = self.read(decode_content=decode_content) + + def get_redirect_location(self): + """ + Should we redirect and where to? + + :returns: Truthy redirect location string if we got a redirect status + code and valid location. ``None`` if redirect status and no + location. ``False`` if not a redirect status code. + """ + if self.status in [301, 302, 303, 307]: + return self.headers.get('location') + + return False + + def release_conn(self): + if not self._pool or not self._connection: + return + + self._pool._put_conn(self._connection) + self._connection = None + + @property + def data(self): + # For backwords-compat with earlier urllib3 0.4 and earlier. + if self._body: + return self._body + + if self._fp: + return self.read(cache_content=True) + + def read(self, amt=None, decode_content=None, cache_content=False): + """ + Similar to :meth:`httplib.HTTPResponse.read`, but with two additional + parameters: ``decode_content`` and ``cache_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param cache_content: + If True, will save the returned data such that the same result is + returned despite of the state of the underlying file object. This + is useful if you want the ``.data`` property to continue working + after having ``.read()`` the file object. (Overridden if ``amt`` is + set.) + """ + # Note: content-encoding value should be case-insensitive, per RFC 2616 + # Section 3.5 + content_encoding = self.headers.get('content-encoding', '').lower() + if self._decoder is None: + if content_encoding in self.CONTENT_DECODERS: + self._decoder = _get_decoder(content_encoding) + if decode_content is None: + decode_content = self.decode_content + + if self._fp is None: + return + + flush_decoder = False + + try: + if amt is None: + # cStringIO doesn't like amt=None + data = self._fp.read() + flush_decoder = True + else: + cache_content = False + data = self._fp.read(amt) + if amt != 0 and not data: # Platform-specific: Buggy versions of Python. + # Close the connection when no data is returned + # + # This is redundant to what httplib/http.client _should_ + # already do. However, versions of python released before + # December 15, 2012 (http://bugs.python.org/issue16298) do not + # properly close the connection in all cases. There is no harm + # in redundantly calling close. + self._fp.close() + flush_decoder = True + + try: + if decode_content and self._decoder: + data = self._decoder.decompress(data) + except (IOError, zlib.error): + raise DecodeError("Received response with content-encoding: %s, but " + "failed to decode it." % content_encoding) + + if flush_decoder and self._decoder: + buf = self._decoder.decompress(binary_type()) + data += buf + self._decoder.flush() + + if cache_content: + self._body = data + + return data + + finally: + if self._original_response and self._original_response.isclosed(): + self.release_conn() + + @classmethod + def from_httplib(ResponseCls, r, **response_kw): + """ + Given an :class:`httplib.HTTPResponse` instance ``r``, return a + corresponding :class:`urllib3.response.HTTPResponse` object. + + Remaining parameters are passed to the HTTPResponse constructor, along + with ``original_response=r``. + """ + + # Normalize headers between different versions of Python + headers = {} + for k, v in r.getheaders(): + # Python 3: Header keys are returned capitalised + k = k.lower() + + has_value = headers.get(k) + if has_value: # Python 3: Repeating header keys are unmerged. + v = ', '.join([has_value, v]) + + headers[k] = v + + # HTTPResponse objects in Python 3 don't have a .strict attribute + strict = getattr(r, 'strict', 0) + return ResponseCls(body=r, + headers=headers, + status=r.status, + version=r.version, + reason=r.reason, + strict=strict, + original_response=r, + **response_kw) + + # Backwards-compatibility methods for httplib.HTTPResponse + def getheaders(self): + return self.headers + + def getheader(self, name, default=None): + return self.headers.get(name, default) diff --git a/awx/lib/site-packages/requests/packages/urllib3/util.py b/awx/lib/site-packages/requests/packages/urllib3/util.py new file mode 100644 index 0000000000..544f9ed9d6 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/util.py @@ -0,0 +1,378 @@ +# urllib3/util.py +# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +from base64 import b64encode +from collections import namedtuple +from socket import error as SocketError +from hashlib import md5, sha1 +from binascii import hexlify, unhexlify + +try: + from select import poll, POLLIN +except ImportError: # `poll` doesn't exist on OSX and other platforms + poll = False + try: + from select import select + except ImportError: # `select` doesn't exist on AppEngine. + select = False + +try: # Test for SSL features + SSLContext = None + HAS_SNI = False + + import ssl + from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 + from ssl import SSLContext # Modern SSL? + from ssl import HAS_SNI # Has SNI? +except ImportError: + pass + + +from .packages import six +from .exceptions import LocationParseError, SSLError + + +class Url(namedtuple('Url', ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'])): + """ + Datastructure for representing an HTTP URL. Used as a return value for + :func:`parse_url`. + """ + slots = () + + def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None): + return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment) + + @property + def hostname(self): + """For backwards-compatibility with urlparse. We're nice like that.""" + return self.host + + @property + def request_uri(self): + """Absolute path including the query string.""" + uri = self.path or '/' + + if self.query is not None: + uri += '?' + self.query + + return uri + + +def split_first(s, delims): + """ + Given a string and an iterable of delimiters, split on the first found + delimiter. Return two split parts and the matched delimiter. + + If not found, then the first part is the full input string. + + Example: :: + + >>> split_first('foo/bar?baz', '?/=') + ('foo', 'bar?baz', '/') + >>> split_first('foo/bar?baz', '123') + ('foo/bar?baz', '', None) + + Scales linearly with number of delims. Not ideal for large number of delims. + """ + min_idx = None + min_delim = None + for d in delims: + idx = s.find(d) + if idx < 0: + continue + + if min_idx is None or idx < min_idx: + min_idx = idx + min_delim = d + + if min_idx is None or min_idx < 0: + return s, '', None + + return s[:min_idx], s[min_idx+1:], min_delim + + +def parse_url(url): + """ + Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is + performed to parse incomplete urls. Fields not provided will be None. + + Partly backwards-compatible with :mod:`urlparse`. + + Example: :: + + >>> parse_url('http://google.com/mail/') + Url(scheme='http', host='google.com', port=None, path='/', ...) + >>> parse_url('google.com:80') + Url(scheme=None, host='google.com', port=80, path=None, ...) + >>> parse_url('/foo?bar') + Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) + """ + + # While this code has overlap with stdlib's urlparse, it is much + # simplified for our needs and less annoying. + # Additionally, this imeplementations does silly things to be optimal + # on CPython. + + scheme = None + auth = None + host = None + port = None + path = None + fragment = None + query = None + + # Scheme + if '://' in url: + scheme, url = url.split('://', 1) + + # Find the earliest Authority Terminator + # (http://tools.ietf.org/html/rfc3986#section-3.2) + url, path_, delim = split_first(url, ['/', '?', '#']) + + if delim: + # Reassemble the path + path = delim + path_ + + # Auth + if '@' in url: + auth, url = url.split('@', 1) + + # IPv6 + if url and url[0] == '[': + host, url = url[1:].split(']', 1) + + # Port + if ':' in url: + _host, port = url.split(':', 1) + + if not host: + host = _host + + if not port.isdigit(): + raise LocationParseError("Failed to parse: %s" % url) + + port = int(port) + + elif not host and url: + host = url + + if not path: + return Url(scheme, auth, host, port, path, query, fragment) + + # Fragment + if '#' in path: + path, fragment = path.split('#', 1) + + # Query + if '?' in path: + path, query = path.split('?', 1) + + return Url(scheme, auth, host, port, path, query, fragment) + + +def get_host(url): + """ + Deprecated. Use :func:`.parse_url` instead. + """ + p = parse_url(url) + return p.scheme or 'http', p.hostname, p.port + + +def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, + basic_auth=None): + """ + Shortcuts for generating request headers. + + :param keep_alive: + If ``True``, adds 'connection: keep-alive' header. + + :param accept_encoding: + Can be a boolean, list, or string. + ``True`` translates to 'gzip,deflate'. + List will get joined by comma. + String will be used as provided. + + :param user_agent: + String representing the user-agent you want, such as + "python-urllib3/0.6" + + :param basic_auth: + Colon-separated username:password string for 'authorization: basic ...' + auth header. + + Example: :: + + >>> make_headers(keep_alive=True, user_agent="Batman/1.0") + {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} + >>> make_headers(accept_encoding=True) + {'accept-encoding': 'gzip,deflate'} + """ + headers = {} + if accept_encoding: + if isinstance(accept_encoding, str): + pass + elif isinstance(accept_encoding, list): + accept_encoding = ','.join(accept_encoding) + else: + accept_encoding = 'gzip,deflate' + headers['accept-encoding'] = accept_encoding + + if user_agent: + headers['user-agent'] = user_agent + + if keep_alive: + headers['connection'] = 'keep-alive' + + if basic_auth: + headers['authorization'] = 'Basic ' + \ + b64encode(six.b(basic_auth)).decode('utf-8') + + return headers + + +def is_connection_dropped(conn): # Platform-specific + """ + Returns True if the connection is dropped and should be closed. + + :param conn: + :class:`httplib.HTTPConnection` object. + + Note: For platforms like AppEngine, this will always return ``False`` to + let the platform handle connection recycling transparently for us. + """ + sock = getattr(conn, 'sock', False) + if not sock: # Platform-specific: AppEngine + return False + + if not poll: + if not select: # Platform-specific: AppEngine + return False + + try: + return select([sock], [], [], 0.0)[0] + except SocketError: + return True + + # This version is better on platforms that support it. + p = poll() + p.register(sock, POLLIN) + for (fno, ev) in p.poll(0.0): + if fno == sock.fileno(): + # Either data is buffered (bad), or the connection is dropped. + return True + + +def resolve_cert_reqs(candidate): + """ + Resolves the argument to a numeric constant, which can be passed to + the wrap_socket function/method from the ssl module. + Defaults to :data:`ssl.CERT_NONE`. + If given a string it is assumed to be the name of the constant in the + :mod:`ssl` module or its abbrevation. + (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. + If it's neither `None` nor a string we assume it is already the numeric + constant which can directly be passed to wrap_socket. + """ + if candidate is None: + return CERT_NONE + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, 'CERT_' + candidate) + return res + + return candidate + + +def resolve_ssl_version(candidate): + """ + like resolve_cert_reqs + """ + if candidate is None: + return PROTOCOL_SSLv23 + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, 'PROTOCOL_' + candidate) + return res + + return candidate + + +def assert_fingerprint(cert, fingerprint): + """ + Checks if given fingerprint matches the supplied certificate. + + :param cert: + Certificate as bytes object. + :param fingerprint: + Fingerprint as string of hexdigits, can be interspersed by colons. + """ + + # Maps the length of a digest to a possible hash function producing + # this digest. + hashfunc_map = { + 16: md5, + 20: sha1 + } + + fingerprint = fingerprint.replace(':', '').lower() + + digest_length, rest = divmod(len(fingerprint), 2) + + if rest or digest_length not in hashfunc_map: + raise SSLError('Fingerprint is of invalid length.') + + # We need encode() here for py32; works on py2 and p33. + fingerprint_bytes = unhexlify(fingerprint.encode()) + + hashfunc = hashfunc_map[digest_length] + + cert_digest = hashfunc(cert).digest() + + if not cert_digest == fingerprint_bytes: + raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' + .format(hexlify(fingerprint_bytes), + hexlify(cert_digest))) + + +if SSLContext is not None: # Python 3.2+ + def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, + ca_certs=None, server_hostname=None, + ssl_version=None): + """ + All arguments except `server_hostname` have the same meaning as for + :func:`ssl.wrap_socket` + + :param server_hostname: + Hostname of the expected certificate + """ + context = SSLContext(ssl_version) + context.verify_mode = cert_reqs + if ca_certs: + try: + context.load_verify_locations(ca_certs) + # Py32 raises IOError + # Py33 raises FileNotFoundError + except Exception as e: # Reraise as SSLError + raise SSLError(e) + if certfile: + # FIXME: This block needs a test. + context.load_cert_chain(certfile, keyfile) + if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI + return context.wrap_socket(sock, server_hostname=server_hostname) + return context.wrap_socket(sock) + +else: # Python 3.1 and earlier + def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, + ca_certs=None, server_hostname=None, + ssl_version=None): + return wrap_socket(sock, keyfile=keyfile, certfile=certfile, + ca_certs=ca_certs, cert_reqs=cert_reqs, + ssl_version=ssl_version) diff --git a/awx/lib/site-packages/requests/sessions.py b/awx/lib/site-packages/requests/sessions.py new file mode 100644 index 0000000000..f4aeeee6df --- /dev/null +++ b/awx/lib/site-packages/requests/sessions.py @@ -0,0 +1,501 @@ +# -*- coding: utf-8 -*- + +""" +requests.session +~~~~~~~~~~~~~~~~ + +This module provides a Session object to manage and persist settings across +requests (cookies, auth, proxies). + +""" +import os +from collections import Mapping +from datetime import datetime + +from .compat import cookielib, OrderedDict, urljoin, urlparse +from .cookies import cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar +from .models import Request, PreparedRequest +from .hooks import default_hooks, dispatch_hook +from .utils import to_key_val_list, default_headers +from .exceptions import TooManyRedirects, InvalidSchema +from .structures import CaseInsensitiveDict + +from .adapters import HTTPAdapter + +from .utils import requote_uri, get_environ_proxies, get_netrc_auth + +from .status_codes import codes +REDIRECT_STATI = ( + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 + codes.temporary_moved, # 307 +) +DEFAULT_REDIRECT_LIMIT = 30 + + +def merge_setting(request_setting, session_setting, dict_class=OrderedDict): + """ + Determines appropriate setting for a given request, taking into account the + explicit setting on that request, and the setting in the session. If a + setting is a dictionary, they will be merged together using `dict_class` + """ + + if session_setting is None: + return request_setting + + if request_setting is None: + return session_setting + + # Bypass if not a dictionary (e.g. verify) + if not ( + isinstance(session_setting, Mapping) and + isinstance(request_setting, Mapping) + ): + return request_setting + + merged_setting = dict_class(to_key_val_list(session_setting)) + merged_setting.update(to_key_val_list(request_setting)) + + # Remove keys that are set to None. + for (k, v) in request_setting.items(): + if v is None: + del merged_setting[k] + + return merged_setting + + +class SessionRedirectMixin(object): + def resolve_redirects(self, resp, req, stream=False, timeout=None, + verify=True, cert=None, proxies=None): + """Receives a Response. Returns a generator of Responses.""" + + i = 0 + prepared_request = PreparedRequest() + prepared_request.body = req.body + prepared_request.headers = req.headers.copy() + prepared_request.hooks = req.hooks + prepared_request.method = req.method + prepared_request.url = req.url + + # ((resp.status_code is codes.see_other)) + while (('location' in resp.headers and resp.status_code in REDIRECT_STATI)): + + resp.content # Consume socket so it can be released + + if i >= self.max_redirects: + raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects) + + # Release the connection back into the pool. + resp.close() + + url = resp.headers['location'] + method = prepared_request.method + + # Handle redirection without scheme (see: RFC 1808 Section 4) + if url.startswith('//'): + parsed_rurl = urlparse(resp.url) + url = '%s:%s' % (parsed_rurl.scheme, url) + + # Facilitate non-RFC2616-compliant 'location' headers + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + # Compliant with RFC3986, we percent encode the url. + if not urlparse(url).netloc: + url = urljoin(resp.url, requote_uri(url)) + else: + url = requote_uri(url) + + prepared_request.url = url + + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4 + if (resp.status_code == codes.see_other and + prepared_request.method != 'HEAD'): + method = 'GET' + + # Do what the browsers do, despite standards... + if (resp.status_code in (codes.moved, codes.found) and + prepared_request.method not in ('GET', 'HEAD')): + method = 'GET' + + prepared_request.method = method + + # https://github.com/kennethreitz/requests/issues/1084 + if resp.status_code not in (codes.temporary, codes.resume): + if 'Content-Length' in prepared_request.headers: + del prepared_request.headers['Content-Length'] + + prepared_request.body = None + + headers = prepared_request.headers + try: + del headers['Cookie'] + except KeyError: + pass + + prepared_request.prepare_cookies(self.cookies) + + resp = self.send( + prepared_request, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + allow_redirects=False, + ) + + extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) + + i += 1 + yield resp + + +class Session(SessionRedirectMixin): + """A Requests session. + + Provides cookie persistience, connection-pooling, and configuration. + + Basic Usage:: + + >>> import requests + >>> s = requests.Session() + >>> s.get('http://httpbin.org/get') + 200 + """ + + __attrs__ = [ + 'headers', 'cookies', 'auth', 'timeout', 'proxies', 'hooks', + 'params', 'verify', 'cert', 'prefetch', 'adapters', 'stream', + 'trust_env', 'max_redirects'] + + def __init__(self): + + #: A case-insensitive dictionary of headers to be sent on each + #: :class:`Request <Request>` sent from this + #: :class:`Session <Session>`. + self.headers = default_headers() + + #: Default Authentication tuple or object to attach to + #: :class:`Request <Request>`. + self.auth = None + + #: Dictionary mapping protocol to the URL of the proxy (e.g. + #: {'http': 'foo.bar:3128'}) to be used on each + #: :class:`Request <Request>`. + self.proxies = {} + + #: Event-handling hooks. + self.hooks = default_hooks() + + #: Dictionary of querystring data to attach to each + #: :class:`Request <Request>`. The dictionary values may be lists for + #: representing multivalued query parameters. + self.params = {} + + #: Stream response content default. + self.stream = False + + #: SSL Verification default. + self.verify = True + + #: SSL certificate default. + self.cert = None + + #: Maximum number of redirects allowed. If the request exceeds this + #: limit, a :class:`TooManyRedirects` exception is raised. + self.max_redirects = DEFAULT_REDIRECT_LIMIT + + #: Should we trust the environment? + self.trust_env = True + + # Set up a CookieJar to be used by default + self.cookies = cookiejar_from_dict({}) + + # Default connection adapters. + self.adapters = OrderedDict() + self.mount('https://', HTTPAdapter()) + self.mount('http://', HTTPAdapter()) + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def request(self, method, url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=None, + allow_redirects=True, + proxies=None, + hooks=None, + stream=None, + verify=None, + cert=None): + """Constructs a :class:`Request <Request>`, prepares it and sends it. + Returns :class:`Response <Response>` object. + + :param method: method for the new :class:`Request` object. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query + string for the :class:`Request`. + :param data: (optional) Dictionary or bytes to send in the body of the + :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the + :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the + :class:`Request`. + :param files: (optional) Dictionary of 'filename': file-like-objects + for multipart encoding upload. + :param auth: (optional) Auth tuple or callable to enable + Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) Float describing the timeout of the + request. + :param allow_redirects: (optional) Boolean. Set to True by default. + :param proxies: (optional) Dictionary mapping protocol to the URL of + the proxy. + :param stream: (optional) whether to immediately download the response + content. Defaults to ``False``. + :param verify: (optional) if ``True``, the SSL cert will be verified. + A CA_BUNDLE path can also be provided. + :param cert: (optional) if String, path to ssl client cert file (.pem). + If Tuple, ('cert', 'key') pair. + """ + + cookies = cookies or {} + proxies = proxies or {} + + # Bootstrap CookieJar. + if not isinstance(cookies, cookielib.CookieJar): + cookies = cookiejar_from_dict(cookies) + + # Merge with session cookies + merged_cookies = RequestsCookieJar() + merged_cookies.update(self.cookies) + merged_cookies.update(cookies) + cookies = merged_cookies + + # Gather clues from the surrounding environment. + if self.trust_env: + # Set environment's proxies. + env_proxies = get_environ_proxies(url) or {} + for (k, v) in env_proxies.items(): + proxies.setdefault(k, v) + + # Set environment's basic authentication. + if not auth: + auth = get_netrc_auth(url) + + # Look for configuration. + if not verify and verify is not False: + verify = os.environ.get('REQUESTS_CA_BUNDLE') + + # Curl compatibility. + if not verify and verify is not False: + verify = os.environ.get('CURL_CA_BUNDLE') + + # Merge all the kwargs. + params = merge_setting(params, self.params) + headers = merge_setting(headers, self.headers, dict_class=CaseInsensitiveDict) + auth = merge_setting(auth, self.auth) + proxies = merge_setting(proxies, self.proxies) + hooks = merge_setting(hooks, self.hooks) + stream = merge_setting(stream, self.stream) + verify = merge_setting(verify, self.verify) + cert = merge_setting(cert, self.cert) + + # Create the Request. + req = Request() + req.method = method.upper() + req.url = url + req.headers = headers + req.files = files + req.data = data + req.params = params + req.auth = auth + req.cookies = cookies + req.hooks = hooks + + # Prepare the Request. + prep = req.prepare() + + # Send the request. + send_kwargs = { + 'stream': stream, + 'timeout': timeout, + 'verify': verify, + 'cert': cert, + 'proxies': proxies, + 'allow_redirects': allow_redirects, + } + resp = self.send(prep, **send_kwargs) + + return resp + + def get(self, url, **kwargs): + """Sends a GET request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + """ + + kwargs.setdefault('allow_redirects', True) + return self.request('GET', url, **kwargs) + + def options(self, url, **kwargs): + """Sends a OPTIONS request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + """ + + kwargs.setdefault('allow_redirects', True) + return self.request('OPTIONS', url, **kwargs) + + def head(self, url, **kwargs): + """Sends a HEAD request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + """ + + kwargs.setdefault('allow_redirects', False) + return self.request('HEAD', url, **kwargs) + + def post(self, url, data=None, **kwargs): + """Sends a POST request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + """ + + return self.request('POST', url, data=data, **kwargs) + + def put(self, url, data=None, **kwargs): + """Sends a PUT request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + """ + + return self.request('PUT', url, data=data, **kwargs) + + def patch(self, url, data=None, **kwargs): + """Sends a PATCH request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + """ + + return self.request('PATCH', url, data=data, **kwargs) + + def delete(self, url, **kwargs): + """Sends a DELETE request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + """ + + return self.request('DELETE', url, **kwargs) + + def send(self, request, **kwargs): + """Send a given PreparedRequest.""" + # Set defaults that the hooks can utilize to ensure they always have + # the correct parameters to reproduce the previous request. + kwargs.setdefault('stream', self.stream) + kwargs.setdefault('verify', self.verify) + kwargs.setdefault('cert', self.cert) + kwargs.setdefault('proxies', self.proxies) + + # It's possible that users might accidentally send a Request object. + # Guard against that specific failure case. + if getattr(request, 'prepare', None): + raise ValueError('You can only send PreparedRequests.') + + # Set up variables needed for resolve_redirects and dispatching of + # hooks + allow_redirects = kwargs.pop('allow_redirects', True) + stream = kwargs.get('stream') + timeout = kwargs.get('timeout') + verify = kwargs.get('verify') + cert = kwargs.get('cert') + proxies = kwargs.get('proxies') + hooks = request.hooks + + # Get the appropriate adapter to use + adapter = self.get_adapter(url=request.url) + + # Start time (approximately) of the request + start = datetime.utcnow() + # Send the request + r = adapter.send(request, **kwargs) + # Total elapsed time of the request (approximately) + r.elapsed = datetime.utcnow() - start + + # Response manipulation hooks + r = dispatch_hook('response', hooks, r, **kwargs) + + # Persist cookies + extract_cookies_to_jar(self.cookies, request, r.raw) + + # Redirect resolving generator. + gen = self.resolve_redirects(r, request, stream=stream, + timeout=timeout, verify=verify, cert=cert, + proxies=proxies) + + # Resolve redirects if allowed. + history = [resp for resp in gen] if allow_redirects else [] + + # Shuffle things around if there's history. + if history: + # Insert the first (original) request at the start + history.insert(0, r) + # Get the last request made + r = history.pop() + r.history = tuple(history) + + return r + + def get_adapter(self, url): + """Returns the appropriate connnection adapter for the given URL.""" + for (prefix, adapter) in self.adapters.items(): + + if url.startswith(prefix): + return adapter + + # Nothing matches :-/ + raise InvalidSchema("No connection adapters were found for '%s'" % url) + + def close(self): + """Closes all adapters and as such the session""" + for _, v in self.adapters.items(): + v.close() + + def mount(self, prefix, adapter): + """Registers a connection adapter to a prefix. + + Adapters are sorted in descending order by key length.""" + self.adapters[prefix] = adapter + keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] + for key in keys_to_move: + self.adapters[key] = self.adapters.pop(key) + + def __getstate__(self): + return dict((attr, getattr(self, attr, None)) for attr in self.__attrs__) + + def __setstate__(self, state): + for attr, value in state.items(): + setattr(self, attr, value) + + +def session(): + """Returns a :class:`Session` for context-management.""" + + return Session() diff --git a/awx/lib/site-packages/requests/status_codes.py b/awx/lib/site-packages/requests/status_codes.py new file mode 100644 index 0000000000..de384865fc --- /dev/null +++ b/awx/lib/site-packages/requests/status_codes.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- + +from .structures import LookupDict + +_codes = { + + # Informational. + 100: ('continue',), + 101: ('switching_protocols',), + 102: ('processing',), + 103: ('checkpoint',), + 122: ('uri_too_long', 'request_uri_too_long'), + 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), + 201: ('created',), + 202: ('accepted',), + 203: ('non_authoritative_info', 'non_authoritative_information'), + 204: ('no_content',), + 205: ('reset_content', 'reset'), + 206: ('partial_content', 'partial'), + 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), + 208: ('im_used',), + + # Redirection. + 300: ('multiple_choices',), + 301: ('moved_permanently', 'moved', '\\o-'), + 302: ('found',), + 303: ('see_other', 'other'), + 304: ('not_modified',), + 305: ('use_proxy',), + 306: ('switch_proxy',), + 307: ('temporary_redirect', 'temporary_moved', 'temporary'), + 308: ('resume_incomplete', 'resume'), + + # Client Error. + 400: ('bad_request', 'bad'), + 401: ('unauthorized',), + 402: ('payment_required', 'payment'), + 403: ('forbidden',), + 404: ('not_found', '-o-'), + 405: ('method_not_allowed', 'not_allowed'), + 406: ('not_acceptable',), + 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), + 408: ('request_timeout', 'timeout'), + 409: ('conflict',), + 410: ('gone',), + 411: ('length_required',), + 412: ('precondition_failed', 'precondition'), + 413: ('request_entity_too_large',), + 414: ('request_uri_too_large',), + 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), + 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), + 417: ('expectation_failed',), + 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), + 422: ('unprocessable_entity', 'unprocessable'), + 423: ('locked',), + 424: ('failed_dependency', 'dependency'), + 425: ('unordered_collection', 'unordered'), + 426: ('upgrade_required', 'upgrade'), + 428: ('precondition_required', 'precondition'), + 429: ('too_many_requests', 'too_many'), + 431: ('header_fields_too_large', 'fields_too_large'), + 444: ('no_response', 'none'), + 449: ('retry_with', 'retry'), + 450: ('blocked_by_windows_parental_controls', 'parental_controls'), + 451: ('unavailable_for_legal_reasons', 'legal_reasons'), + 499: ('client_closed_request',), + + # Server Error. + 500: ('internal_server_error', 'server_error', '/o\\', '✗'), + 501: ('not_implemented',), + 502: ('bad_gateway',), + 503: ('service_unavailable', 'unavailable'), + 504: ('gateway_timeout',), + 505: ('http_version_not_supported', 'http_version'), + 506: ('variant_also_negotiates',), + 507: ('insufficient_storage',), + 509: ('bandwidth_limit_exceeded', 'bandwidth'), + 510: ('not_extended',), +} + +codes = LookupDict(name='status_codes') + +for (code, titles) in list(_codes.items()): + for title in titles: + setattr(codes, title, code) + if not title.startswith('\\'): + setattr(codes, title.upper(), code) diff --git a/awx/lib/site-packages/requests/structures.py b/awx/lib/site-packages/requests/structures.py new file mode 100644 index 0000000000..8d02ea67b6 --- /dev/null +++ b/awx/lib/site-packages/requests/structures.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- + +""" +requests.structures +~~~~~~~~~~~~~~~~~~~ + +Data structures that power Requests. + +""" + +import os +import collections +from itertools import islice + + +class IteratorProxy(object): + """docstring for IteratorProxy""" + def __init__(self, i): + self.i = i + # self.i = chain.from_iterable(i) + + def __iter__(self): + return self.i + + def __len__(self): + if hasattr(self.i, '__len__'): + return len(self.i) + if hasattr(self.i, 'len'): + return self.i.len + if hasattr(self.i, 'fileno'): + return os.fstat(self.i.fileno()).st_size + + def read(self, n): + return "".join(islice(self.i, None, n)) + + +class CaseInsensitiveDict(collections.MutableMapping): + """ + A case-insensitive ``dict``-like object. + + Implements all methods and operations of + ``collections.MutableMapping`` as well as dict's ``copy``. Also + provides ``lower_items``. + + All keys are expected to be strings. The structure remembers the + case of the last key to be set, and ``iter(instance)``, + ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` + will contain case-sensitive keys. However, querying and contains + testing is case insensitive: + + cid = CaseInsensitiveDict() + cid['Accept'] = 'application/json' + cid['aCCEPT'] == 'application/json' # True + list(cid) == ['Accept'] # True + + For example, ``headers['content-encoding']`` will return the + value of a ``'Content-Encoding'`` response header, regardless + of how the header name was originally stored. + + If the constructor, ``.update``, or equality comparison + operations are given keys that have equal ``.lower()``s, the + behavior is undefined. + + """ + def __init__(self, data=None, **kwargs): + self._store = dict() + if data is None: + data = {} + self.update(data, **kwargs) + + def __setitem__(self, key, value): + # Use the lowercased key for lookups, but store the actual + # key alongside the value. + self._store[key.lower()] = (key, value) + + def __getitem__(self, key): + return self._store[key.lower()][1] + + def __delitem__(self, key): + del self._store[key.lower()] + + def __iter__(self): + return (casedkey for casedkey, mappedvalue in self._store.values()) + + def __len__(self): + return len(self._store) + + def lower_items(self): + """Like iteritems(), but with all lowercase keys.""" + return ( + (lowerkey, keyval[1]) + for (lowerkey, keyval) + in self._store.items() + ) + + def __eq__(self, other): + if isinstance(other, collections.Mapping): + other = CaseInsensitiveDict(other) + else: + return NotImplemented + # Compare insensitively + return dict(self.lower_items()) == dict(other.lower_items()) + + # Copy is required + def copy(self): + return CaseInsensitiveDict(self._store.values()) + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, dict(self.items())) + + +class LookupDict(dict): + """Dictionary lookup object.""" + + def __init__(self, name=None): + self.name = name + super(LookupDict, self).__init__() + + def __repr__(self): + return '<lookup \'%s\'>' % (self.name) + + def __getitem__(self, key): + # We allow fall-through here, so values default to None + + return self.__dict__.get(key, None) + + def get(self, key, default=None): + return self.__dict__.get(key, default) diff --git a/awx/lib/site-packages/requests/utils.py b/awx/lib/site-packages/requests/utils.py new file mode 100644 index 0000000000..b21bf8fb76 --- /dev/null +++ b/awx/lib/site-packages/requests/utils.py @@ -0,0 +1,550 @@ +# -*- coding: utf-8 -*- + +""" +requests.utils +~~~~~~~~~~~~~~ + +This module provides utility functions that are used within Requests +that are also useful for external consumption. + +""" + +import cgi +import codecs +import collections +import os +import platform +import re +import sys +from netrc import netrc, NetrcParseError + +from . import __version__ +from . import certs +from .compat import parse_http_list as _parse_list_header +from .compat import quote, urlparse, bytes, str, OrderedDict, urlunparse +from .cookies import RequestsCookieJar, cookiejar_from_dict +from .structures import CaseInsensitiveDict + +_hush_pyflakes = (RequestsCookieJar,) + +NETRC_FILES = ('.netrc', '_netrc') + +DEFAULT_CA_BUNDLE_PATH = certs.where() + + +def dict_to_sequence(d): + """Returns an internal sequence dictionary update.""" + + if hasattr(d, 'items'): + d = d.items() + + return d + + +def super_len(o): + if hasattr(o, '__len__'): + return len(o) + if hasattr(o, 'len'): + return o.len + if hasattr(o, 'fileno'): + return os.fstat(o.fileno()).st_size + + +def get_netrc_auth(url): + """Returns the Requests tuple auth for a given url from netrc.""" + + try: + locations = (os.path.expanduser('~/{0}'.format(f)) for f in NETRC_FILES) + netrc_path = None + + for loc in locations: + if os.path.exists(loc) and not netrc_path: + netrc_path = loc + + # Abort early if there isn't one. + if netrc_path is None: + return netrc_path + + ri = urlparse(url) + + # Strip port numbers from netloc + host = ri.netloc.split(':')[0] + + try: + _netrc = netrc(netrc_path).authenticators(host) + if _netrc: + # Return with login / password + login_i = (0 if _netrc[0] else 1) + return (_netrc[login_i], _netrc[2]) + except (NetrcParseError, IOError): + # If there was a parsing error or a permissions issue reading the file, + # we'll just skip netrc auth + pass + + # AppEngine hackiness. + except (ImportError, AttributeError): + pass + + +def guess_filename(obj): + """Tries to guess the filename of the given object.""" + name = getattr(obj, 'name', None) + if name and name[0] != '<' and name[-1] != '>': + return os.path.basename(name) + + +def from_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. Unless it can not be represented as such, return an + OrderedDict, e.g., + + :: + + >>> from_key_val_list([('key', 'val')]) + OrderedDict([('key', 'val')]) + >>> from_key_val_list('string') + ValueError: need more than 1 value to unpack + >>> from_key_val_list({'key': 'val'}) + OrderedDict([('key', 'val')]) + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError('cannot encode objects that are not 2-tuples') + + return OrderedDict(value) + + +def to_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. If it can be, return a list of tuples, e.g., + + :: + + >>> to_key_val_list([('key', 'val')]) + [('key', 'val')] + >>> to_key_val_list({'key': 'val'}) + [('key', 'val')] + >>> to_key_val_list('string') + ValueError: cannot encode objects that are not 2-tuples. + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError('cannot encode objects that are not 2-tuples') + + if isinstance(value, collections.Mapping): + value = value.items() + + return list(value) + + +# From mitsuhiko/werkzeug (used with permission). +def parse_list_header(value): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + It basically works like :func:`parse_set_header` just that items + may appear multiple times and case sensitivity is preserved. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + To create a header from the :class:`list` again, use the + :func:`dump_header` function. + + :param value: a string with a list header. + :return: :class:`list` + """ + result = [] + for item in _parse_list_header(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +# From mitsuhiko/werkzeug (used with permission). +def parse_dict_header(value): + """Parse lists of key, value pairs as described by RFC 2068 Section 2 and + convert them into a python dict: + + >>> d = parse_dict_header('foo="is a fish", bar="as well"') + >>> type(d) is dict + True + >>> sorted(d.items()) + [('bar', 'as well'), ('foo', 'is a fish')] + + If there is no value for a key it will be `None`: + + >>> parse_dict_header('key_without_value') + {'key_without_value': None} + + To create a header from the :class:`dict` again, use the + :func:`dump_header` function. + + :param value: a string with a dict header. + :return: :class:`dict` + """ + result = {} + for item in _parse_list_header(value): + if '=' not in item: + result[item] = None + continue + name, value = item.split('=', 1) + if value[:1] == value[-1:] == '"': + value = unquote_header_value(value[1:-1]) + result[name] = value + return result + + +# From mitsuhiko/werkzeug (used with permission). +def unquote_header_value(value, is_filename=False): + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + + :param value: the header value to unquote. + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != '\\\\': + return value.replace('\\\\', '\\').replace('\\"', '"') + return value + + +def dict_from_cookiejar(cj): + """Returns a key/value dictionary from a CookieJar. + + :param cj: CookieJar object to extract cookies from. + """ + + cookie_dict = {} + + for cookie in cj: + cookie_dict[cookie.name] = cookie.value + + return cookie_dict + + +def add_dict_to_cookiejar(cj, cookie_dict): + """Returns a CookieJar from a key/value dictionary. + + :param cj: CookieJar to insert cookies into. + :param cookie_dict: Dict of key/values to insert into CookieJar. + """ + + cj2 = cookiejar_from_dict(cookie_dict) + cj.update(cj2) + return cj + + +def get_encodings_from_content(content): + """Returns encodings from given content string. + + :param content: bytestring to extract encodings from. + """ + + charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I) + + return charset_re.findall(content) + + +def get_encoding_from_headers(headers): + """Returns encodings from given HTTP Header Dict. + + :param headers: dictionary to extract encoding from. + """ + + content_type = headers.get('content-type') + + if not content_type: + return None + + content_type, params = cgi.parse_header(content_type) + + if 'charset' in params: + return params['charset'].strip("'\"") + + if 'text' in content_type: + return 'ISO-8859-1' + + +def stream_decode_response_unicode(iterator, r): + """Stream decodes a iterator.""" + + if r.encoding is None: + for item in iterator: + yield item + return + + decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') + for chunk in iterator: + rv = decoder.decode(chunk) + if rv: + yield rv + rv = decoder.decode('', final=True) + if rv: + yield rv + + +def iter_slices(string, slice_length): + """Iterate over slices of a string.""" + pos = 0 + while pos < len(string): + yield string[pos:pos + slice_length] + pos += slice_length + + +def get_unicode_from_response(r): + """Returns the requested content back in unicode. + + :param r: Response object to get unicode content from. + + Tried: + + 1. charset from content-type + + 2. every encodings from ``<meta ... charset=XXX>`` + + 3. fall back and replace all unicode characters + + """ + + tried_encodings = [] + + # Try charset from content-type + encoding = get_encoding_from_headers(r.headers) + + if encoding: + try: + return str(r.content, encoding) + except UnicodeError: + tried_encodings.append(encoding) + + # Fall back: + try: + return str(r.content, encoding, errors='replace') + except TypeError: + return r.content + + +# The unreserved URI characters (RFC 3986) +UNRESERVED_SET = frozenset( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + + "0123456789-._~") + + +def unquote_unreserved(uri): + """Un-escape any percent-escape sequences in a URI that are unreserved + characters. This leaves all reserved, illegal and non-ASCII bytes encoded. + """ + parts = uri.split('%') + for i in range(1, len(parts)): + h = parts[i][0:2] + if len(h) == 2 and h.isalnum(): + c = chr(int(h, 16)) + if c in UNRESERVED_SET: + parts[i] = c + parts[i][2:] + else: + parts[i] = '%' + parts[i] + else: + parts[i] = '%' + parts[i] + return ''.join(parts) + + +def requote_uri(uri): + """Re-quote the given URI. + + This function passes the given URI through an unquote/quote cycle to + ensure that it is fully and consistently quoted. + """ + # Unquote only the unreserved characters + # Then quote only illegal characters (do not quote reserved, unreserved, + # or '%') + return quote(unquote_unreserved(uri), safe="!#$%&'()*+,/:;=?@[]~") + + +def get_environ_proxies(url): + """Return a dict of environment proxies.""" + + proxy_keys = [ + 'all', + 'http', + 'https', + 'ftp', + 'socks' + ] + + get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) + + # First check whether no_proxy is defined. If it is, check that the URL + # we're getting isn't in the no_proxy list. + no_proxy = get_proxy('no_proxy') + + if no_proxy: + # We need to check whether we match here. We need to see if we match + # the end of the netloc, both with and without the port. + no_proxy = no_proxy.split(',') + netloc = urlparse(url).netloc + + for host in no_proxy: + if netloc.endswith(host) or netloc.split(':')[0].endswith(host): + # The URL does match something in no_proxy, so we don't want + # to apply the proxies on this URL. + return {} + + # If we get here, we either didn't have no_proxy set or we're not going + # anywhere that no_proxy applies to. + proxies = [(key, get_proxy(key + '_proxy')) for key in proxy_keys] + return dict([(key, val) for (key, val) in proxies if val]) + + +def default_user_agent(): + """Return a string representing the default user agent.""" + _implementation = platform.python_implementation() + + if _implementation == 'CPython': + _implementation_version = platform.python_version() + elif _implementation == 'PyPy': + _implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, + sys.pypy_version_info.minor, + sys.pypy_version_info.micro) + if sys.pypy_version_info.releaselevel != 'final': + _implementation_version = ''.join([_implementation_version, sys.pypy_version_info.releaselevel]) + elif _implementation == 'Jython': + _implementation_version = platform.python_version() # Complete Guess + elif _implementation == 'IronPython': + _implementation_version = platform.python_version() # Complete Guess + else: + _implementation_version = 'Unknown' + + try: + p_system = platform.system() + p_release = platform.release() + except IOError: + p_system = 'Unknown' + p_release = 'Unknown' + + return " ".join(['python-requests/%s' % __version__, + '%s/%s' % (_implementation, _implementation_version), + '%s/%s' % (p_system, p_release)]) + + +def default_headers(): + return CaseInsensitiveDict({ + 'User-Agent': default_user_agent(), + 'Accept-Encoding': ', '.join(('gzip', 'deflate', 'compress')), + 'Accept': '*/*' + }) + + +def parse_header_links(value): + """Return a dict of parsed link headers proxies. + + i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg" + + """ + + links = [] + + replace_chars = " '\"" + + for val in value.split(","): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, '' + + link = {} + + link["url"] = url.strip("<> '\"") + + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + + link[key.strip(replace_chars)] = value.strip(replace_chars) + + links.append(link) + + return links + + +# Null bytes; no need to recreate these on each call to guess_json_utf +_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3 +_null2 = _null * 2 +_null3 = _null * 3 + + +def guess_json_utf(data): + # JSON always starts with two ASCII characters, so detection is as + # easy as counting the nulls and from their location and count + # determine the encoding. Also detect a BOM, if present. + sample = data[:4] + if sample in (codecs.BOM_UTF32_LE, codecs.BOM32_BE): + return 'utf-32' # BOM included + if sample[:3] == codecs.BOM_UTF8: + return 'utf-8-sig' # BOM included, MS style (discouraged) + if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + return 'utf-16' # BOM included + nullcount = sample.count(_null) + if nullcount == 0: + return 'utf-8' + if nullcount == 2: + if sample[::2] == _null2: # 1st and 3rd are null + return 'utf-16-be' + if sample[1::2] == _null2: # 2nd and 4th are null + return 'utf-16-le' + # Did not detect 2 valid UTF-16 ascii-range characters + if nullcount == 3: + if sample[:3] == _null3: + return 'utf-32-be' + if sample[1:] == _null3: + return 'utf-32-le' + # Did not detect a valid UTF-32 ascii-range character + return None + + +def prepend_scheme_if_needed(url, new_scheme): + '''Given a URL that may or may not have a scheme, prepend the given scheme. + Does not replace a present scheme with the one provided as an argument.''' + scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) + + # urlparse is a finicky beast, and sometimes decides that there isn't a + # netloc present. Assume that it's being over-cautious, and switch netloc + # and path if urlparse decided there was no netloc. + if not netloc: + netloc, path = path, netloc + + return urlunparse((scheme, netloc, path, params, query, fragment)) + + +def get_auth_from_url(url): + """Given a url with authentication components, extract them into a tuple of + username,password.""" + if url: + parsed = urlparse(url) + return (parsed.username, parsed.password) + else: + return ('', '') diff --git a/awx/lib/site-packages/rest_framework/__init__.py b/awx/lib/site-packages/rest_framework/__init__.py new file mode 100644 index 0000000000..0a21018634 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/__init__.py @@ -0,0 +1,9 @@ +__version__ = '2.3.5' + +VERSION = __version__ # synonym + +# Header encoding (see RFC5987) +HTTP_HEADER_ENCODING = 'iso-8859-1' + +# Default datetime input and output formats +ISO_8601 = 'iso-8601' diff --git a/awx/lib/site-packages/rest_framework/authentication.py b/awx/lib/site-packages/rest_framework/authentication.py new file mode 100644 index 0000000000..9caca78894 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/authentication.py @@ -0,0 +1,342 @@ +""" +Provides various authentication policies. +""" +from __future__ import unicode_literals +import base64 +from datetime import datetime + +from django.contrib.auth import authenticate +from django.core.exceptions import ImproperlyConfigured +from rest_framework import exceptions, HTTP_HEADER_ENCODING +from rest_framework.compat import CsrfViewMiddleware +from rest_framework.compat import oauth, oauth_provider, oauth_provider_store +from rest_framework.compat import oauth2_provider +from rest_framework.authtoken.models import Token + + +def get_authorization_header(request): + """ + Return request's 'Authorization:' header, as a bytestring. + + Hide some test client ickyness where the header can be unicode. + """ + auth = request.META.get('HTTP_AUTHORIZATION', b'') + if type(auth) == type(''): + # Work around django test client oddness + auth = auth.encode(HTTP_HEADER_ENCODING) + return auth + + +class BaseAuthentication(object): + """ + All authentication classes should extend BaseAuthentication. + """ + + def authenticate(self, request): + """ + Authenticate the request and return a two-tuple of (user, token). + """ + raise NotImplementedError(".authenticate() must be overridden.") + + def authenticate_header(self, request): + """ + Return a string to be used as the value of the `WWW-Authenticate` + header in a `401 Unauthenticated` response, or `None` if the + authentication scheme should return `403 Permission Denied` responses. + """ + pass + + +class BasicAuthentication(BaseAuthentication): + """ + HTTP Basic authentication against username/password. + """ + www_authenticate_realm = 'api' + + def authenticate(self, request): + """ + Returns a `User` if a correct username and password have been supplied + using HTTP Basic authentication. Otherwise returns `None`. + """ + auth = get_authorization_header(request).split() + + if not auth or auth[0].lower() != b'basic': + return None + + if len(auth) == 1: + msg = 'Invalid basic header. No credentials provided.' + raise exceptions.AuthenticationFailed(msg) + elif len(auth) > 2: + msg = 'Invalid basic header. Credentials string should not contain spaces.' + raise exceptions.AuthenticationFailed(msg) + + try: + auth_parts = base64.b64decode(auth[1]).decode(HTTP_HEADER_ENCODING).partition(':') + except (TypeError, UnicodeDecodeError): + msg = 'Invalid basic header. Credentials not correctly base64 encoded' + raise exceptions.AuthenticationFailed(msg) + + userid, password = auth_parts[0], auth_parts[2] + return self.authenticate_credentials(userid, password) + + def authenticate_credentials(self, userid, password): + """ + Authenticate the userid and password against username and password. + """ + user = authenticate(username=userid, password=password) + if user is None or not user.is_active: + raise exceptions.AuthenticationFailed('Invalid username/password') + return (user, None) + + def authenticate_header(self, request): + return 'Basic realm="%s"' % self.www_authenticate_realm + + +class SessionAuthentication(BaseAuthentication): + """ + Use Django's session framework for authentication. + """ + + def authenticate(self, request): + """ + Returns a `User` if the request session currently has a logged in user. + Otherwise returns `None`. + """ + + # Get the underlying HttpRequest object + http_request = request._request + user = getattr(http_request, 'user', None) + + # Unauthenticated, CSRF validation not required + if not user or not user.is_active: + return None + + # Enforce CSRF validation for session based authentication. + class CSRFCheck(CsrfViewMiddleware): + def _reject(self, request, reason): + # Return the failure reason instead of an HttpResponse + return reason + + reason = CSRFCheck().process_view(http_request, None, (), {}) + if reason: + # CSRF failed, bail with explicit error message + raise exceptions.AuthenticationFailed('CSRF Failed: %s' % reason) + + # CSRF passed with authenticated user + return (user, None) + + +class TokenAuthentication(BaseAuthentication): + """ + Simple token based authentication. + + Clients should authenticate by passing the token key in the "Authorization" + HTTP header, prepended with the string "Token ". For example: + + Authorization: Token 401f7ac837da42b97f613d789819ff93537bee6a + """ + + model = Token + """ + A custom token model may be used, but must have the following properties. + + * key -- The string identifying the token + * user -- The user to which the token belongs + """ + + def authenticate(self, request): + auth = get_authorization_header(request).split() + + if not auth or auth[0].lower() != b'token': + return None + + if len(auth) == 1: + msg = 'Invalid token header. No credentials provided.' + raise exceptions.AuthenticationFailed(msg) + elif len(auth) > 2: + msg = 'Invalid token header. Token string should not contain spaces.' + raise exceptions.AuthenticationFailed(msg) + + return self.authenticate_credentials(auth[1]) + + def authenticate_credentials(self, key): + try: + token = self.model.objects.get(key=key) + except self.model.DoesNotExist: + raise exceptions.AuthenticationFailed('Invalid token') + + if not token.user.is_active: + raise exceptions.AuthenticationFailed('User inactive or deleted') + + return (token.user, token) + + def authenticate_header(self, request): + return 'Token' + + +class OAuthAuthentication(BaseAuthentication): + """ + OAuth 1.0a authentication backend using `django-oauth-plus` and `oauth2`. + + Note: The `oauth2` package actually provides oauth1.0a support. Urg. + We import it from the `compat` module as `oauth`. + """ + www_authenticate_realm = 'api' + + def __init__(self, *args, **kwargs): + super(OAuthAuthentication, self).__init__(*args, **kwargs) + + if oauth is None: + raise ImproperlyConfigured( + "The 'oauth2' package could not be imported." + "It is required for use with the 'OAuthAuthentication' class.") + + if oauth_provider is None: + raise ImproperlyConfigured( + "The 'django-oauth-plus' package could not be imported." + "It is required for use with the 'OAuthAuthentication' class.") + + def authenticate(self, request): + """ + Returns two-tuple of (user, token) if authentication succeeds, + or None otherwise. + """ + try: + oauth_request = oauth_provider.utils.get_oauth_request(request) + except oauth.Error as err: + raise exceptions.AuthenticationFailed(err.message) + + if not oauth_request: + return None + + oauth_params = oauth_provider.consts.OAUTH_PARAMETERS_NAMES + + found = any(param for param in oauth_params if param in oauth_request) + missing = list(param for param in oauth_params if param not in oauth_request) + + if not found: + # OAuth authentication was not attempted. + return None + + if missing: + # OAuth was attempted but missing parameters. + msg = 'Missing parameters: %s' % (', '.join(missing)) + raise exceptions.AuthenticationFailed(msg) + + if not self.check_nonce(request, oauth_request): + msg = 'Nonce check failed' + raise exceptions.AuthenticationFailed(msg) + + try: + consumer_key = oauth_request.get_parameter('oauth_consumer_key') + consumer = oauth_provider_store.get_consumer(request, oauth_request, consumer_key) + except oauth_provider.store.InvalidConsumerError as err: + raise exceptions.AuthenticationFailed(err) + + if consumer.status != oauth_provider.consts.ACCEPTED: + msg = 'Invalid consumer key status: %s' % consumer.get_status_display() + raise exceptions.AuthenticationFailed(msg) + + try: + token_param = oauth_request.get_parameter('oauth_token') + token = oauth_provider_store.get_access_token(request, oauth_request, consumer, token_param) + except oauth_provider.store.InvalidTokenError: + msg = 'Invalid access token: %s' % oauth_request.get_parameter('oauth_token') + raise exceptions.AuthenticationFailed(msg) + + try: + self.validate_token(request, consumer, token) + except oauth.Error as err: + raise exceptions.AuthenticationFailed(err.message) + + user = token.user + + if not user.is_active: + msg = 'User inactive or deleted: %s' % user.username + raise exceptions.AuthenticationFailed(msg) + + return (token.user, token) + + def authenticate_header(self, request): + """ + If permission is denied, return a '401 Unauthorized' response, + with an appropraite 'WWW-Authenticate' header. + """ + return 'OAuth realm="%s"' % self.www_authenticate_realm + + def validate_token(self, request, consumer, token): + """ + Check the token and raise an `oauth.Error` exception if invalid. + """ + oauth_server, oauth_request = oauth_provider.utils.initialize_server_request(request) + oauth_server.verify_request(oauth_request, consumer, token) + + def check_nonce(self, request, oauth_request): + """ + Checks nonce of request, and return True if valid. + """ + return oauth_provider_store.check_nonce(request, oauth_request, oauth_request['oauth_nonce']) + + +class OAuth2Authentication(BaseAuthentication): + """ + OAuth 2 authentication backend using `django-oauth2-provider` + """ + www_authenticate_realm = 'api' + + def __init__(self, *args, **kwargs): + super(OAuth2Authentication, self).__init__(*args, **kwargs) + + if oauth2_provider is None: + raise ImproperlyConfigured( + "The 'django-oauth2-provider' package could not be imported. " + "It is required for use with the 'OAuth2Authentication' class.") + + def authenticate(self, request): + """ + Returns two-tuple of (user, token) if authentication succeeds, + or None otherwise. + """ + + auth = get_authorization_header(request).split() + + if not auth or auth[0].lower() != b'bearer': + return None + + if len(auth) == 1: + msg = 'Invalid bearer header. No credentials provided.' + raise exceptions.AuthenticationFailed(msg) + elif len(auth) > 2: + msg = 'Invalid bearer header. Token string should not contain spaces.' + raise exceptions.AuthenticationFailed(msg) + + return self.authenticate_credentials(request, auth[1]) + + def authenticate_credentials(self, request, access_token): + """ + Authenticate the request, given the access token. + """ + + try: + token = oauth2_provider.models.AccessToken.objects.select_related('user') + # TODO: Change to timezone aware datetime when oauth2_provider add + # support to it. + token = token.get(token=access_token, expires__gt=datetime.now()) + except oauth2_provider.models.AccessToken.DoesNotExist: + raise exceptions.AuthenticationFailed('Invalid token') + + user = token.user + + if not user.is_active: + msg = 'User inactive or deleted: %s' % user.username + raise exceptions.AuthenticationFailed(msg) + + return (user, token) + + def authenticate_header(self, request): + """ + Bearer is the only finalized type currently + + Check details on the `OAuth2Authentication.authenticate` method + """ + return 'Bearer realm="%s"' % self.www_authenticate_realm diff --git a/awx/lib/site-packages/rest_framework/authtoken/__init__.py b/awx/lib/site-packages/rest_framework/authtoken/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/rest_framework/authtoken/migrations/0001_initial.py b/awx/lib/site-packages/rest_framework/authtoken/migrations/0001_initial.py new file mode 100644 index 0000000000..d5965e4042 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/authtoken/migrations/0001_initial.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +from rest_framework.settings import api_settings + + +try: + from django.contrib.auth import get_user_model +except ImportError: # django < 1.5 + from django.contrib.auth.models import User +else: + User = get_user_model() + + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding model 'Token' + db.create_table('authtoken_token', ( + ('key', self.gf('django.db.models.fields.CharField')(max_length=40, primary_key=True)), + ('user', self.gf('django.db.models.fields.related.OneToOneField')(related_name='auth_token', unique=True, to=orm['%s.%s' % (User._meta.app_label, User._meta.object_name)])), + ('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), + )) + db.send_create_signal('authtoken', ['Token']) + + + def backwards(self, orm): + # Deleting model 'Token' + db.delete_table('authtoken_token') + + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + "%s.%s" % (User._meta.app_label, User._meta.module_name): { + 'Meta': {'object_name': User._meta.module_name}, + }, + 'authtoken.token': { + 'Meta': {'object_name': 'Token'}, + 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), + 'key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}), + 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'auth_token'", 'unique': 'True', 'to': "orm['%s.%s']" % (User._meta.app_label, User._meta.object_name)}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + } + } + + complete_apps = ['authtoken'] diff --git a/awx/lib/site-packages/rest_framework/authtoken/migrations/__init__.py b/awx/lib/site-packages/rest_framework/authtoken/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/rest_framework/authtoken/models.py b/awx/lib/site-packages/rest_framework/authtoken/models.py new file mode 100644 index 0000000000..52c45ad11f --- /dev/null +++ b/awx/lib/site-packages/rest_framework/authtoken/models.py @@ -0,0 +1,35 @@ +import uuid +import hmac +from hashlib import sha1 +from rest_framework.compat import User +from django.conf import settings +from django.db import models + + +class Token(models.Model): + """ + The default authorization token model. + """ + key = models.CharField(max_length=40, primary_key=True) + user = models.OneToOneField(User, related_name='auth_token') + created = models.DateTimeField(auto_now_add=True) + + class Meta: + # Work around for a bug in Django: + # https://code.djangoproject.com/ticket/19422 + # + # Also see corresponding ticket: + # https://github.com/tomchristie/django-rest-framework/issues/705 + abstract = 'rest_framework.authtoken' not in settings.INSTALLED_APPS + + def save(self, *args, **kwargs): + if not self.key: + self.key = self.generate_key() + return super(Token, self).save(*args, **kwargs) + + def generate_key(self): + unique = uuid.uuid4() + return hmac.new(unique.bytes, digestmod=sha1).hexdigest() + + def __unicode__(self): + return self.key diff --git a/awx/lib/site-packages/rest_framework/authtoken/serializers.py b/awx/lib/site-packages/rest_framework/authtoken/serializers.py new file mode 100644 index 0000000000..60a3740e76 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/authtoken/serializers.py @@ -0,0 +1,24 @@ +from django.contrib.auth import authenticate +from rest_framework import serializers + + +class AuthTokenSerializer(serializers.Serializer): + username = serializers.CharField() + password = serializers.CharField() + + def validate(self, attrs): + username = attrs.get('username') + password = attrs.get('password') + + if username and password: + user = authenticate(username=username, password=password) + + if user: + if not user.is_active: + raise serializers.ValidationError('User account is disabled.') + attrs['user'] = user + return attrs + else: + raise serializers.ValidationError('Unable to login with provided credentials.') + else: + raise serializers.ValidationError('Must include "username" and "password"') diff --git a/awx/lib/site-packages/rest_framework/authtoken/views.py b/awx/lib/site-packages/rest_framework/authtoken/views.py new file mode 100644 index 0000000000..7c03cb7666 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/authtoken/views.py @@ -0,0 +1,26 @@ +from rest_framework.views import APIView +from rest_framework import status +from rest_framework import parsers +from rest_framework import renderers +from rest_framework.response import Response +from rest_framework.authtoken.models import Token +from rest_framework.authtoken.serializers import AuthTokenSerializer + + +class ObtainAuthToken(APIView): + throttle_classes = () + permission_classes = () + parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,) + renderer_classes = (renderers.JSONRenderer,) + serializer_class = AuthTokenSerializer + model = Token + + def post(self, request): + serializer = self.serializer_class(data=request.DATA) + if serializer.is_valid(): + token, created = Token.objects.get_or_create(user=serializer.object['user']) + return Response({'token': token.key}) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +obtain_auth_token = ObtainAuthToken.as_view() diff --git a/awx/lib/site-packages/rest_framework/compat.py b/awx/lib/site-packages/rest_framework/compat.py new file mode 100644 index 0000000000..76dc00526c --- /dev/null +++ b/awx/lib/site-packages/rest_framework/compat.py @@ -0,0 +1,510 @@ +""" +The `compat` module provides support for backwards compatibility with older +versions of django/python, and compatibility wrappers around optional packages. +""" +# flake8: noqa +from __future__ import unicode_literals + +import django +from django.core.exceptions import ImproperlyConfigured + +# Try to import six from Django, fallback to included `six`. +try: + from django.utils import six +except ImportError: + from rest_framework import six + +# location of patterns, url, include changes in 1.4 onwards +try: + from django.conf.urls import patterns, url, include +except ImportError: + from django.conf.urls.defaults import patterns, url, include + +# Handle django.utils.encoding rename: +# smart_unicode -> smart_text +# force_unicode -> force_text +try: + from django.utils.encoding import smart_text +except ImportError: + from django.utils.encoding import smart_unicode as smart_text +try: + from django.utils.encoding import force_text +except ImportError: + from django.utils.encoding import force_unicode as force_text + + +# django-filter is optional +try: + import django_filters +except ImportError: + django_filters = None + + +# cStringIO only if it's available, otherwise StringIO +try: + import cStringIO.StringIO as StringIO +except ImportError: + StringIO = six.StringIO + +BytesIO = six.BytesIO + + +# urlparse compat import (Required because it changed in python 3.x) +try: + from urllib import parse as urlparse +except ImportError: + import urlparse + + +# Try to import PIL in either of the two ways it can end up installed. +try: + from PIL import Image +except ImportError: + try: + import Image + except ImportError: + Image = None + + +def get_concrete_model(model_cls): + try: + return model_cls._meta.concrete_model + except AttributeError: + # 1.3 does not include concrete model + return model_cls + + +# Django 1.5 add support for custom auth user model +if django.VERSION >= (1, 5): + from django.conf import settings + if hasattr(settings, 'AUTH_USER_MODEL'): + User = settings.AUTH_USER_MODEL + else: + from django.contrib.auth.models import User +else: + try: + from django.contrib.auth.models import User + except ImportError: + raise ImportError("User model is not to be found.") + + +if django.VERSION >= (1, 5): + from django.views.generic import View +else: + from django.views.generic import View as _View + from django.utils.decorators import classonlymethod + from django.utils.functional import update_wrapper + + class View(_View): + # 1.3 does not include head method in base View class + # See: https://code.djangoproject.com/ticket/15668 + @classonlymethod + def as_view(cls, **initkwargs): + """ + Main entry point for a request-response process. + """ + # sanitize keyword arguments + for key in initkwargs: + if key in cls.http_method_names: + raise TypeError("You tried to pass in the %s method name as a " + "keyword argument to %s(). Don't do that." + % (key, cls.__name__)) + if not hasattr(cls, key): + raise TypeError("%s() received an invalid keyword %r" % ( + cls.__name__, key)) + + def view(request, *args, **kwargs): + self = cls(**initkwargs) + if hasattr(self, 'get') and not hasattr(self, 'head'): + self.head = self.get + return self.dispatch(request, *args, **kwargs) + + # take name and docstring from class + update_wrapper(view, cls, updated=()) + + # and possible attributes set by decorators + # like csrf_exempt from dispatch + update_wrapper(view, cls.dispatch, assigned=()) + return view + + # _allowed_methods only present from 1.5 onwards + def _allowed_methods(self): + return [m.upper() for m in self.http_method_names if hasattr(self, m)] + + +# PATCH method is not implemented by Django +if 'patch' not in View.http_method_names: + View.http_method_names = View.http_method_names + ['patch'] + + +# PUT, DELETE do not require CSRF until 1.4. They should. Make it better. +if django.VERSION >= (1, 4): + from django.middleware.csrf import CsrfViewMiddleware +else: + import hashlib + import re + import random + import logging + + from django.conf import settings + from django.core.urlresolvers import get_callable + + try: + from logging import NullHandler + except ImportError: + class NullHandler(logging.Handler): + def emit(self, record): + pass + + logger = logging.getLogger('django.request') + + if not logger.handlers: + logger.addHandler(NullHandler()) + + def same_origin(url1, url2): + """ + Checks if two URLs are 'same-origin' + """ + p1, p2 = urlparse.urlparse(url1), urlparse.urlparse(url2) + return p1[0:2] == p2[0:2] + + def constant_time_compare(val1, val2): + """ + Returns True if the two strings are equal, False otherwise. + + The time taken is independent of the number of characters that match. + """ + if len(val1) != len(val2): + return False + result = 0 + for x, y in zip(val1, val2): + result |= ord(x) ^ ord(y) + return result == 0 + + # Use the system (hardware-based) random number generator if it exists. + if hasattr(random, 'SystemRandom'): + randrange = random.SystemRandom().randrange + else: + randrange = random.randrange + + _MAX_CSRF_KEY = 18446744073709551616 # 2 << 63 + + REASON_NO_REFERER = "Referer checking failed - no Referer." + REASON_BAD_REFERER = "Referer checking failed - %s does not match %s." + REASON_NO_CSRF_COOKIE = "CSRF cookie not set." + REASON_BAD_TOKEN = "CSRF token missing or incorrect." + + def _get_failure_view(): + """ + Returns the view to be used for CSRF rejections + """ + return get_callable(settings.CSRF_FAILURE_VIEW) + + def _get_new_csrf_key(): + return hashlib.md5("%s%s" % (randrange(0, _MAX_CSRF_KEY), settings.SECRET_KEY)).hexdigest() + + def get_token(request): + """ + Returns the the CSRF token required for a POST form. The token is an + alphanumeric value. + + A side effect of calling this function is to make the the csrf_protect + decorator and the CsrfViewMiddleware add a CSRF cookie and a 'Vary: Cookie' + header to the outgoing response. For this reason, you may need to use this + function lazily, as is done by the csrf context processor. + """ + request.META["CSRF_COOKIE_USED"] = True + return request.META.get("CSRF_COOKIE", None) + + def _sanitize_token(token): + # Allow only alphanum, and ensure we return a 'str' for the sake of the post + # processing middleware. + token = re.sub('[^a-zA-Z0-9]', '', str(token.decode('ascii', 'ignore'))) + if token == "": + # In case the cookie has been truncated to nothing at some point. + return _get_new_csrf_key() + else: + return token + + class CsrfViewMiddleware(object): + """ + Middleware that requires a present and correct csrfmiddlewaretoken + for POST requests that have a CSRF cookie, and sets an outgoing + CSRF cookie. + + This middleware should be used in conjunction with the csrf_token template + tag. + """ + # The _accept and _reject methods currently only exist for the sake of the + # requires_csrf_token decorator. + def _accept(self, request): + # Avoid checking the request twice by adding a custom attribute to + # request. This will be relevant when both decorator and middleware + # are used. + request.csrf_processing_done = True + return None + + def _reject(self, request, reason): + return _get_failure_view()(request, reason=reason) + + def process_view(self, request, callback, callback_args, callback_kwargs): + + if getattr(request, 'csrf_processing_done', False): + return None + + try: + csrf_token = _sanitize_token(request.COOKIES[settings.CSRF_COOKIE_NAME]) + # Use same token next time + request.META['CSRF_COOKIE'] = csrf_token + except KeyError: + csrf_token = None + # Generate token and store it in the request, so it's available to the view. + request.META["CSRF_COOKIE"] = _get_new_csrf_key() + + # Wait until request.META["CSRF_COOKIE"] has been manipulated before + # bailing out, so that get_token still works + if getattr(callback, 'csrf_exempt', False): + return None + + # Assume that anything not defined as 'safe' by RC2616 needs protection. + if request.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'): + if getattr(request, '_dont_enforce_csrf_checks', False): + # Mechanism to turn off CSRF checks for test suite. It comes after + # the creation of CSRF cookies, so that everything else continues to + # work exactly the same (e.g. cookies are sent etc), but before the + # any branches that call reject() + return self._accept(request) + + if request.is_secure(): + # Suppose user visits http://example.com/ + # An active network attacker,(man-in-the-middle, MITM) sends a + # POST form which targets https://example.com/detonate-bomb/ and + # submits it via javascript. + # + # The attacker will need to provide a CSRF cookie and token, but + # that is no problem for a MITM and the session independent + # nonce we are using. So the MITM can circumvent the CSRF + # protection. This is true for any HTTP connection, but anyone + # using HTTPS expects better! For this reason, for + # https://example.com/ we need additional protection that treats + # http://example.com/ as completely untrusted. Under HTTPS, + # Barth et al. found that the Referer header is missing for + # same-domain requests in only about 0.2% of cases or less, so + # we can use strict Referer checking. + referer = request.META.get('HTTP_REFERER') + if referer is None: + logger.warning('Forbidden (%s): %s' % (REASON_NO_REFERER, request.path), + extra={ + 'status_code': 403, + 'request': request, + } + ) + return self._reject(request, REASON_NO_REFERER) + + # Note that request.get_host() includes the port + good_referer = 'https://%s/' % request.get_host() + if not same_origin(referer, good_referer): + reason = REASON_BAD_REFERER % (referer, good_referer) + logger.warning('Forbidden (%s): %s' % (reason, request.path), + extra={ + 'status_code': 403, + 'request': request, + } + ) + return self._reject(request, reason) + + if csrf_token is None: + # No CSRF cookie. For POST requests, we insist on a CSRF cookie, + # and in this way we can avoid all CSRF attacks, including login + # CSRF. + logger.warning('Forbidden (%s): %s' % (REASON_NO_CSRF_COOKIE, request.path), + extra={ + 'status_code': 403, + 'request': request, + } + ) + return self._reject(request, REASON_NO_CSRF_COOKIE) + + # check non-cookie token for match + request_csrf_token = "" + if request.method == "POST": + request_csrf_token = request.POST.get('csrfmiddlewaretoken', '') + + if request_csrf_token == "": + # Fall back to X-CSRFToken, to make things easier for AJAX, + # and possible for PUT/DELETE + request_csrf_token = request.META.get('HTTP_X_CSRFTOKEN', '') + + if not constant_time_compare(request_csrf_token, csrf_token): + logger.warning('Forbidden (%s): %s' % (REASON_BAD_TOKEN, request.path), + extra={ + 'status_code': 403, + 'request': request, + } + ) + return self._reject(request, REASON_BAD_TOKEN) + + return self._accept(request) + +# timezone support is new in Django 1.4 +try: + from django.utils import timezone +except ImportError: + timezone = None + +# dateparse is ALSO new in Django 1.4 +try: + from django.utils.dateparse import parse_date, parse_datetime, parse_time +except ImportError: + import datetime + import re + + date_re = re.compile( + r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})$' + ) + + datetime_re = re.compile( + r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})' + r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})' + r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?' + r'(?P<tzinfo>Z|[+-]\d{1,2}:\d{1,2})?$' + ) + + time_re = re.compile( + r'(?P<hour>\d{1,2}):(?P<minute>\d{1,2})' + r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?' + ) + + def parse_date(value): + match = date_re.match(value) + if match: + kw = dict((k, int(v)) for k, v in match.groupdict().iteritems()) + return datetime.date(**kw) + + def parse_time(value): + match = time_re.match(value) + if match: + kw = match.groupdict() + if kw['microsecond']: + kw['microsecond'] = kw['microsecond'].ljust(6, '0') + kw = dict((k, int(v)) for k, v in kw.iteritems() if v is not None) + return datetime.time(**kw) + + def parse_datetime(value): + """Parse datetime, but w/o the timezone awareness in 1.4""" + match = datetime_re.match(value) + if match: + kw = match.groupdict() + if kw['microsecond']: + kw['microsecond'] = kw['microsecond'].ljust(6, '0') + kw = dict((k, int(v)) for k, v in kw.iteritems() if v is not None) + return datetime.datetime(**kw) + + +# smart_urlquote is new on Django 1.4 +try: + from django.utils.html import smart_urlquote +except ImportError: + import re + from django.utils.encoding import smart_str + try: + from urllib.parse import quote, urlsplit, urlunsplit + except ImportError: # Python 2 + from urllib import quote + from urlparse import urlsplit, urlunsplit + + unquoted_percents_re = re.compile(r'%(?![0-9A-Fa-f]{2})') + + def smart_urlquote(url): + "Quotes a URL if it isn't already quoted." + # Handle IDN before quoting. + scheme, netloc, path, query, fragment = urlsplit(url) + try: + netloc = netloc.encode('idna').decode('ascii') # IDN -> ACE + except UnicodeError: # invalid domain part + pass + else: + url = urlunsplit((scheme, netloc, path, query, fragment)) + + # An URL is considered unquoted if it contains no % characters or + # contains a % not followed by two hexadecimal digits. See #9655. + if '%' not in url or unquoted_percents_re.search(url): + # See http://bugs.python.org/issue2637 + url = quote(smart_str(url), safe=b'!*\'();:@&=+$,/?#[]~') + + return force_text(url) + + +# Markdown is optional +try: + import markdown + + def apply_markdown(text): + """ + Simple wrapper around :func:`markdown.markdown` to set the base level + of '#' style headers to <h2>. + """ + + extensions = ['headerid(level=2)'] + safe_mode = False + md = markdown.Markdown(extensions=extensions, safe_mode=safe_mode) + return md.convert(text) + +except ImportError: + apply_markdown = None + + +# Yaml is optional +try: + import yaml +except ImportError: + yaml = None + + +# XML is optional +try: + import defusedxml.ElementTree as etree +except ImportError: + etree = None + +# OAuth is optional +try: + # Note: The `oauth2` package actually provides oauth1.0a support. Urg. + import oauth2 as oauth +except ImportError: + oauth = None + +# OAuth is optional +try: + import oauth_provider + from oauth_provider.store import store as oauth_provider_store +except (ImportError, ImproperlyConfigured): + oauth_provider = None + oauth_provider_store = None + +# OAuth 2 support is optional +try: + import provider.oauth2 as oauth2_provider + from provider.oauth2 import models as oauth2_provider_models + from provider.oauth2 import forms as oauth2_provider_forms + from provider import scope as oauth2_provider_scope + from provider import constants as oauth2_constants +except ImportError: + oauth2_provider = None + oauth2_provider_models = None + oauth2_provider_forms = None + oauth2_provider_scope = None + oauth2_constants = None + +# Handle lazy strings +from django.utils.functional import Promise + +if six.PY3: + def is_non_str_iterable(obj): + if (isinstance(obj, str) or + (isinstance(obj, Promise) and obj._delegate_text)): + return False + return hasattr(obj, '__iter__') +else: + def is_non_str_iterable(obj): + return hasattr(obj, '__iter__') diff --git a/awx/lib/site-packages/rest_framework/decorators.py b/awx/lib/site-packages/rest_framework/decorators.py new file mode 100644 index 0000000000..c69756a430 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/decorators.py @@ -0,0 +1,129 @@ +""" +The most important decorator in this module is `@api_view`, which is used +for writing function-based views with REST framework. + +There are also various decorators for setting the API policies on function +based views, as well as the `@action` and `@link` decorators, which are +used to annotate methods on viewsets that should be included by routers. +""" +from __future__ import unicode_literals +from rest_framework.compat import six +from rest_framework.views import APIView +import types + + +def api_view(http_method_names): + + """ + Decorator that converts a function-based view into an APIView subclass. + Takes a list of allowed methods for the view as an argument. + """ + + def decorator(func): + + WrappedAPIView = type( + six.PY3 and 'WrappedAPIView' or b'WrappedAPIView', + (APIView,), + {'__doc__': func.__doc__} + ) + + # Note, the above allows us to set the docstring. + # It is the equivalent of: + # + # class WrappedAPIView(APIView): + # pass + # WrappedAPIView.__doc__ = func.doc <--- Not possible to do this + + # api_view applied without (method_names) + assert not(isinstance(http_method_names, types.FunctionType)), \ + '@api_view missing list of allowed HTTP methods' + + # api_view applied with eg. string instead of list of strings + assert isinstance(http_method_names, (list, tuple)), \ + '@api_view expected a list of strings, received %s' % type(http_method_names).__name__ + + allowed_methods = set(http_method_names) | set(('options',)) + WrappedAPIView.http_method_names = [method.lower() for method in allowed_methods] + + def handler(self, *args, **kwargs): + return func(*args, **kwargs) + + for method in http_method_names: + setattr(WrappedAPIView, method.lower(), handler) + + WrappedAPIView.__name__ = func.__name__ + + WrappedAPIView.renderer_classes = getattr(func, 'renderer_classes', + APIView.renderer_classes) + + WrappedAPIView.parser_classes = getattr(func, 'parser_classes', + APIView.parser_classes) + + WrappedAPIView.authentication_classes = getattr(func, 'authentication_classes', + APIView.authentication_classes) + + WrappedAPIView.throttle_classes = getattr(func, 'throttle_classes', + APIView.throttle_classes) + + WrappedAPIView.permission_classes = getattr(func, 'permission_classes', + APIView.permission_classes) + + return WrappedAPIView.as_view() + return decorator + + +def renderer_classes(renderer_classes): + def decorator(func): + func.renderer_classes = renderer_classes + return func + return decorator + + +def parser_classes(parser_classes): + def decorator(func): + func.parser_classes = parser_classes + return func + return decorator + + +def authentication_classes(authentication_classes): + def decorator(func): + func.authentication_classes = authentication_classes + return func + return decorator + + +def throttle_classes(throttle_classes): + def decorator(func): + func.throttle_classes = throttle_classes + return func + return decorator + + +def permission_classes(permission_classes): + def decorator(func): + func.permission_classes = permission_classes + return func + return decorator + + +def link(**kwargs): + """ + Used to mark a method on a ViewSet that should be routed for GET requests. + """ + def decorator(func): + func.bind_to_methods = ['get'] + func.kwargs = kwargs + return func + return decorator + + +def action(methods=['post'], **kwargs): + """ + Used to mark a method on a ViewSet that should be routed for POST requests. + """ + def decorator(func): + func.bind_to_methods = methods + func.kwargs = kwargs + return func + return decorator diff --git a/awx/lib/site-packages/rest_framework/exceptions.py b/awx/lib/site-packages/rest_framework/exceptions.py new file mode 100644 index 0000000000..0c96ecdd52 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/exceptions.py @@ -0,0 +1,95 @@ +""" +Handled exceptions raised by REST framework. + +In addition Django's built in 403 and 404 exceptions are handled. +(`django.http.Http404` and `django.core.exceptions.PermissionDenied`) +""" +from __future__ import unicode_literals +from rest_framework import status + + +class APIException(Exception): + """ + Base class for REST framework exceptions. + Subclasses should provide `.status_code` and `.detail` properties. + """ + pass + + +class ParseError(APIException): + status_code = status.HTTP_400_BAD_REQUEST + default_detail = 'Malformed request.' + + def __init__(self, detail=None): + self.detail = detail or self.default_detail + + +class AuthenticationFailed(APIException): + status_code = status.HTTP_401_UNAUTHORIZED + default_detail = 'Incorrect authentication credentials.' + + def __init__(self, detail=None): + self.detail = detail or self.default_detail + + +class NotAuthenticated(APIException): + status_code = status.HTTP_401_UNAUTHORIZED + default_detail = 'Authentication credentials were not provided.' + + def __init__(self, detail=None): + self.detail = detail or self.default_detail + + +class PermissionDenied(APIException): + status_code = status.HTTP_403_FORBIDDEN + default_detail = 'You do not have permission to perform this action.' + + def __init__(self, detail=None): + self.detail = detail or self.default_detail + + +class MethodNotAllowed(APIException): + status_code = status.HTTP_405_METHOD_NOT_ALLOWED + default_detail = "Method '%s' not allowed." + + def __init__(self, method, detail=None): + self.detail = (detail or self.default_detail) % method + + +class NotAcceptable(APIException): + status_code = status.HTTP_406_NOT_ACCEPTABLE + default_detail = "Could not satisfy the request's Accept header" + + def __init__(self, detail=None, available_renderers=None): + self.detail = detail or self.default_detail + self.available_renderers = available_renderers + + +class UnsupportedMediaType(APIException): + status_code = status.HTTP_415_UNSUPPORTED_MEDIA_TYPE + default_detail = "Unsupported media type '%s' in request." + + def __init__(self, media_type, detail=None): + self.detail = (detail or self.default_detail) % media_type + + +class Throttled(APIException): + status_code = status.HTTP_429_TOO_MANY_REQUESTS + default_detail = "Request was throttled." + extra_detail = "Expected available in %d second%s." + + def __init__(self, wait=None, detail=None): + import math + self.wait = wait and math.ceil(wait) or None + if wait is not None: + format = detail or self.default_detail + self.extra_detail + self.detail = format % (self.wait, self.wait != 1 and 's' or '') + else: + self.detail = detail or self.default_detail + + +class ConfigurationError(Exception): + """ + Indicates an internal server error. + """ + pass diff --git a/awx/lib/site-packages/rest_framework/fields.py b/awx/lib/site-packages/rest_framework/fields.py new file mode 100644 index 0000000000..535aa2ac8e --- /dev/null +++ b/awx/lib/site-packages/rest_framework/fields.py @@ -0,0 +1,962 @@ +""" +Serializer fields perform validation on incoming data. + +They are very similar to Django's form fields. +""" +from __future__ import unicode_literals + +import copy +import datetime +from decimal import Decimal, DecimalException +import inspect +import re +import warnings +from django.core import validators +from django.core.exceptions import ValidationError +from django.conf import settings +from django.db.models.fields import BLANK_CHOICE_DASH +from django import forms +from django.forms import widgets +from django.utils.encoding import is_protected_type +from django.utils.translation import ugettext_lazy as _ +from django.utils.datastructures import SortedDict +from rest_framework import ISO_8601 +from rest_framework.compat import (timezone, parse_date, parse_datetime, + parse_time) +from rest_framework.compat import BytesIO +from rest_framework.compat import six +from rest_framework.compat import smart_text, force_text, is_non_str_iterable +from rest_framework.settings import api_settings + + +def is_simple_callable(obj): + """ + True if the object is a callable that takes no arguments. + """ + function = inspect.isfunction(obj) + method = inspect.ismethod(obj) + + if not (function or method): + return False + + args, _, _, defaults = inspect.getargspec(obj) + len_args = len(args) if function else len(args) - 1 + len_defaults = len(defaults) if defaults else 0 + return len_args <= len_defaults + + +def get_component(obj, attr_name): + """ + Given an object, and an attribute name, + return that attribute on the object. + """ + if isinstance(obj, dict): + val = obj.get(attr_name) + else: + val = getattr(obj, attr_name) + + if is_simple_callable(val): + return val() + return val + + +def readable_datetime_formats(formats): + format = ', '.join(formats).replace(ISO_8601, + 'YYYY-MM-DDThh:mm[:ss[.uuuuuu]][+HHMM|-HHMM|Z]') + return humanize_strptime(format) + + +def readable_date_formats(formats): + format = ', '.join(formats).replace(ISO_8601, 'YYYY[-MM[-DD]]') + return humanize_strptime(format) + + +def readable_time_formats(formats): + format = ', '.join(formats).replace(ISO_8601, 'hh:mm[:ss[.uuuuuu]]') + return humanize_strptime(format) + + +def humanize_strptime(format_string): + # Note that we're missing some of the locale specific mappings that + # don't really make sense. + mapping = { + "%Y": "YYYY", + "%y": "YY", + "%m": "MM", + "%b": "[Jan-Dec]", + "%B": "[January-December]", + "%d": "DD", + "%H": "hh", + "%I": "hh", # Requires '%p' to differentiate from '%H'. + "%M": "mm", + "%S": "ss", + "%f": "uuuuuu", + "%a": "[Mon-Sun]", + "%A": "[Monday-Sunday]", + "%p": "[AM|PM]", + "%z": "[+HHMM|-HHMM]" + } + for key, val in mapping.items(): + format_string = format_string.replace(key, val) + return format_string + + +class Field(object): + read_only = True + creation_counter = 0 + empty = '' + type_name = None + partial = False + use_files = False + form_field_class = forms.CharField + type_label = 'field' + + def __init__(self, source=None, label=None, help_text=None): + self.parent = None + + self.creation_counter = Field.creation_counter + Field.creation_counter += 1 + + self.source = source + + if label is not None: + self.label = smart_text(label) + + if help_text is not None: + self.help_text = smart_text(help_text) + + def initialize(self, parent, field_name): + """ + Called to set up a field prior to field_to_native or field_from_native. + + parent - The parent serializer. + model_field - The model field this field corresponds to, if one exists. + """ + self.parent = parent + self.root = parent.root or parent + self.context = self.root.context + self.partial = self.root.partial + if self.partial: + self.required = False + + def field_from_native(self, data, files, field_name, into): + """ + Given a dictionary and a field name, updates the dictionary `into`, + with the field and it's deserialized value. + """ + return + + def field_to_native(self, obj, field_name): + """ + Given and object and a field name, returns the value that should be + serialized for that field. + """ + if obj is None: + return self.empty + + if self.source == '*': + return self.to_native(obj) + + source = self.source or field_name + value = obj + + for component in source.split('.'): + value = get_component(value, component) + if value is None: + break + + return self.to_native(value) + + def to_native(self, value): + """ + Converts the field's value into it's simple representation. + """ + if is_simple_callable(value): + value = value() + + if is_protected_type(value): + return value + elif (is_non_str_iterable(value) and + not isinstance(value, (dict, six.string_types))): + return [self.to_native(item) for item in value] + elif isinstance(value, dict): + # Make sure we preserve field ordering, if it exists + ret = SortedDict() + for key, val in value.items(): + ret[key] = self.to_native(val) + return ret + return force_text(value) + + def attributes(self): + """ + Returns a dictionary of attributes to be used when serializing to xml. + """ + if self.type_name: + return {'type': self.type_name} + return {} + + def metadata(self): + metadata = SortedDict() + metadata['type'] = self.type_label + metadata['required'] = getattr(self, 'required', False) + optional_attrs = ['read_only', 'label', 'help_text', + 'min_length', 'max_length'] + for attr in optional_attrs: + value = getattr(self, attr, None) + if value is not None and value != '': + metadata[attr] = force_text(value, strings_only=True) + return metadata + + +class WritableField(Field): + """ + Base for read/write fields. + """ + default_validators = [] + default_error_messages = { + 'required': _('This field is required.'), + 'invalid': _('Invalid value.'), + } + widget = widgets.TextInput + default = None + + def __init__(self, source=None, label=None, help_text=None, + read_only=False, required=None, + validators=[], error_messages=None, widget=None, + default=None, blank=None): + + # 'blank' is to be deprecated in favor of 'required' + if blank is not None: + warnings.warn('The `blank` keyword argument is deprecated. ' + 'Use the `required` keyword argument instead.', + DeprecationWarning, stacklevel=2) + required = not(blank) + + super(WritableField, self).__init__(source=source, label=label, help_text=help_text) + + self.read_only = read_only + if required is None: + self.required = not(read_only) + else: + assert not (read_only and required), "Cannot set required=True and read_only=True" + self.required = required + + messages = {} + for c in reversed(self.__class__.__mro__): + messages.update(getattr(c, 'default_error_messages', {})) + messages.update(error_messages or {}) + self.error_messages = messages + + self.validators = self.default_validators + validators + self.default = default if default is not None else self.default + + # Widgets are ony used for HTML forms. + widget = widget or self.widget + if isinstance(widget, type): + widget = widget() + self.widget = widget + + def validate(self, value): + if value in validators.EMPTY_VALUES and self.required: + raise ValidationError(self.error_messages['required']) + + def run_validators(self, value): + if value in validators.EMPTY_VALUES: + return + errors = [] + for v in self.validators: + try: + v(value) + except ValidationError as e: + if hasattr(e, 'code') and e.code in self.error_messages: + message = self.error_messages[e.code] + if e.params: + message = message % e.params + errors.append(message) + else: + errors.extend(e.messages) + if errors: + raise ValidationError(errors) + + def field_from_native(self, data, files, field_name, into): + """ + Given a dictionary and a field name, updates the dictionary `into`, + with the field and it's deserialized value. + """ + if self.read_only: + return + + try: + if self.use_files: + files = files or {} + native = files[field_name] + else: + native = data[field_name] + except KeyError: + if self.default is not None and not self.partial: + # Note: partial updates shouldn't set defaults + if is_simple_callable(self.default): + native = self.default() + else: + native = self.default + else: + if self.required: + raise ValidationError(self.error_messages['required']) + return + + value = self.from_native(native) + if self.source == '*': + if value: + into.update(value) + else: + self.validate(value) + self.run_validators(value) + into[self.source or field_name] = value + + def from_native(self, value): + """ + Reverts a simple representation back to the field's value. + """ + return value + + +class ModelField(WritableField): + """ + A generic field that can be used against an arbitrary model field. + """ + def __init__(self, *args, **kwargs): + try: + self.model_field = kwargs.pop('model_field') + except KeyError: + raise ValueError("ModelField requires 'model_field' kwarg") + + self.min_length = kwargs.pop('min_length', + getattr(self.model_field, 'min_length', None)) + self.max_length = kwargs.pop('max_length', + getattr(self.model_field, 'max_length', None)) + + super(ModelField, self).__init__(*args, **kwargs) + + if self.min_length is not None: + self.validators.append(validators.MinLengthValidator(self.min_length)) + if self.max_length is not None: + self.validators.append(validators.MaxLengthValidator(self.max_length)) + + def from_native(self, value): + rel = getattr(self.model_field, "rel", None) + if rel is not None: + return rel.to._meta.get_field(rel.field_name).to_python(value) + else: + return self.model_field.to_python(value) + + def field_to_native(self, obj, field_name): + value = self.model_field._get_val_from_obj(obj) + if is_protected_type(value): + return value + return self.model_field.value_to_string(obj) + + def attributes(self): + return { + "type": self.model_field.get_internal_type() + } + + +##### Typed Fields ##### + +class BooleanField(WritableField): + type_name = 'BooleanField' + type_label = 'boolean' + form_field_class = forms.BooleanField + widget = widgets.CheckboxInput + default_error_messages = { + 'invalid': _("'%s' value must be either True or False."), + } + empty = False + + # Note: we set default to `False` in order to fill in missing value not + # supplied by html form. TODO: Fix so that only html form input gets + # this behavior. + default = False + + def from_native(self, value): + if value in ('true', 't', 'True', '1'): + return True + if value in ('false', 'f', 'False', '0'): + return False + return bool(value) + + +class CharField(WritableField): + type_name = 'CharField' + type_label = 'string' + form_field_class = forms.CharField + + def __init__(self, max_length=None, min_length=None, *args, **kwargs): + self.max_length, self.min_length = max_length, min_length + super(CharField, self).__init__(*args, **kwargs) + if min_length is not None: + self.validators.append(validators.MinLengthValidator(min_length)) + if max_length is not None: + self.validators.append(validators.MaxLengthValidator(max_length)) + + def from_native(self, value): + if isinstance(value, six.string_types) or value is None: + return value + return smart_text(value) + + +class URLField(CharField): + type_name = 'URLField' + type_label = 'url' + + def __init__(self, **kwargs): + kwargs['validators'] = [validators.URLValidator()] + super(URLField, self).__init__(**kwargs) + + +class SlugField(CharField): + type_name = 'SlugField' + type_label = 'slug' + form_field_class = forms.SlugField + + default_error_messages = { + 'invalid': _("Enter a valid 'slug' consisting of letters, numbers," + " underscores or hyphens."), + } + default_validators = [validators.validate_slug] + + def __init__(self, *args, **kwargs): + super(SlugField, self).__init__(*args, **kwargs) + + def __deepcopy__(self, memo): + result = copy.copy(self) + memo[id(self)] = result + #result.widget = copy.deepcopy(self.widget, memo) + result.validators = self.validators[:] + return result + + +class ChoiceField(WritableField): + type_name = 'ChoiceField' + type_label = 'multiple choice' + form_field_class = forms.ChoiceField + widget = widgets.Select + default_error_messages = { + 'invalid_choice': _('Select a valid choice. %(value)s is not one of ' + 'the available choices.'), + } + + def __init__(self, choices=(), *args, **kwargs): + super(ChoiceField, self).__init__(*args, **kwargs) + self.choices = choices + if not self.required: + self.choices = BLANK_CHOICE_DASH + self.choices + + def _get_choices(self): + return self._choices + + def _set_choices(self, value): + # Setting choices also sets the choices on the widget. + # choices can be any iterable, but we call list() on it because + # it will be consumed more than once. + self._choices = self.widget.choices = list(value) + + choices = property(_get_choices, _set_choices) + + def validate(self, value): + """ + Validates that the input is in self.choices. + """ + super(ChoiceField, self).validate(value) + if value and not self.valid_value(value): + raise ValidationError(self.error_messages['invalid_choice'] % {'value': value}) + + def valid_value(self, value): + """ + Check to see if the provided value is a valid choice. + """ + for k, v in self.choices: + if isinstance(v, (list, tuple)): + # This is an optgroup, so look inside the group for options + for k2, v2 in v: + if value == smart_text(k2): + return True + else: + if value == smart_text(k) or value == k: + return True + return False + + +class EmailField(CharField): + type_name = 'EmailField' + type_label = 'email' + form_field_class = forms.EmailField + + default_error_messages = { + 'invalid': _('Enter a valid e-mail address.'), + } + default_validators = [validators.validate_email] + + def from_native(self, value): + ret = super(EmailField, self).from_native(value) + if ret is None: + return None + return ret.strip() + + def __deepcopy__(self, memo): + result = copy.copy(self) + memo[id(self)] = result + #result.widget = copy.deepcopy(self.widget, memo) + result.validators = self.validators[:] + return result + + +class RegexField(CharField): + type_name = 'RegexField' + type_label = 'regex' + form_field_class = forms.RegexField + + def __init__(self, regex, max_length=None, min_length=None, *args, **kwargs): + super(RegexField, self).__init__(max_length, min_length, *args, **kwargs) + self.regex = regex + + def _get_regex(self): + return self._regex + + def _set_regex(self, regex): + if isinstance(regex, six.string_types): + regex = re.compile(regex) + self._regex = regex + if hasattr(self, '_regex_validator') and self._regex_validator in self.validators: + self.validators.remove(self._regex_validator) + self._regex_validator = validators.RegexValidator(regex=regex) + self.validators.append(self._regex_validator) + + regex = property(_get_regex, _set_regex) + + def __deepcopy__(self, memo): + result = copy.copy(self) + memo[id(self)] = result + result.validators = self.validators[:] + return result + + +class DateField(WritableField): + type_name = 'DateField' + type_label = 'date' + widget = widgets.DateInput + form_field_class = forms.DateField + + default_error_messages = { + 'invalid': _("Date has wrong format. Use one of these formats instead: %s"), + } + empty = None + input_formats = api_settings.DATE_INPUT_FORMATS + format = api_settings.DATE_FORMAT + + def __init__(self, input_formats=None, format=None, *args, **kwargs): + self.input_formats = input_formats if input_formats is not None else self.input_formats + self.format = format if format is not None else self.format + super(DateField, self).__init__(*args, **kwargs) + + def from_native(self, value): + if value in validators.EMPTY_VALUES: + return None + + if isinstance(value, datetime.datetime): + if timezone and settings.USE_TZ and timezone.is_aware(value): + # Convert aware datetimes to the default time zone + # before casting them to dates (#17742). + default_timezone = timezone.get_default_timezone() + value = timezone.make_naive(value, default_timezone) + return value.date() + if isinstance(value, datetime.date): + return value + + for format in self.input_formats: + if format.lower() == ISO_8601: + try: + parsed = parse_date(value) + except (ValueError, TypeError): + pass + else: + if parsed is not None: + return parsed + else: + try: + parsed = datetime.datetime.strptime(value, format) + except (ValueError, TypeError): + pass + else: + return parsed.date() + + msg = self.error_messages['invalid'] % readable_date_formats(self.input_formats) + raise ValidationError(msg) + + def to_native(self, value): + if value is None or self.format is None: + return value + + if isinstance(value, datetime.datetime): + value = value.date() + + if self.format.lower() == ISO_8601: + return value.isoformat() + return value.strftime(self.format) + + +class DateTimeField(WritableField): + type_name = 'DateTimeField' + type_label = 'datetime' + widget = widgets.DateTimeInput + form_field_class = forms.DateTimeField + + default_error_messages = { + 'invalid': _("Datetime has wrong format. Use one of these formats instead: %s"), + } + empty = None + input_formats = api_settings.DATETIME_INPUT_FORMATS + format = api_settings.DATETIME_FORMAT + + def __init__(self, input_formats=None, format=None, *args, **kwargs): + self.input_formats = input_formats if input_formats is not None else self.input_formats + self.format = format if format is not None else self.format + super(DateTimeField, self).__init__(*args, **kwargs) + + def from_native(self, value): + if value in validators.EMPTY_VALUES: + return None + + if isinstance(value, datetime.datetime): + return value + if isinstance(value, datetime.date): + value = datetime.datetime(value.year, value.month, value.day) + if settings.USE_TZ: + # For backwards compatibility, interpret naive datetimes in + # local time. This won't work during DST change, but we can't + # do much about it, so we let the exceptions percolate up the + # call stack. + warnings.warn("DateTimeField received a naive datetime (%s)" + " while time zone support is active." % value, + RuntimeWarning) + default_timezone = timezone.get_default_timezone() + value = timezone.make_aware(value, default_timezone) + return value + + for format in self.input_formats: + if format.lower() == ISO_8601: + try: + parsed = parse_datetime(value) + except (ValueError, TypeError): + pass + else: + if parsed is not None: + return parsed + else: + try: + parsed = datetime.datetime.strptime(value, format) + except (ValueError, TypeError): + pass + else: + return parsed + + msg = self.error_messages['invalid'] % readable_datetime_formats(self.input_formats) + raise ValidationError(msg) + + def to_native(self, value): + if value is None or self.format is None: + return value + + if self.format.lower() == ISO_8601: + ret = value.isoformat() + if ret.endswith('+00:00'): + ret = ret[:-6] + 'Z' + return ret + return value.strftime(self.format) + + +class TimeField(WritableField): + type_name = 'TimeField' + type_label = 'time' + widget = widgets.TimeInput + form_field_class = forms.TimeField + + default_error_messages = { + 'invalid': _("Time has wrong format. Use one of these formats instead: %s"), + } + empty = None + input_formats = api_settings.TIME_INPUT_FORMATS + format = api_settings.TIME_FORMAT + + def __init__(self, input_formats=None, format=None, *args, **kwargs): + self.input_formats = input_formats if input_formats is not None else self.input_formats + self.format = format if format is not None else self.format + super(TimeField, self).__init__(*args, **kwargs) + + def from_native(self, value): + if value in validators.EMPTY_VALUES: + return None + + if isinstance(value, datetime.time): + return value + + for format in self.input_formats: + if format.lower() == ISO_8601: + try: + parsed = parse_time(value) + except (ValueError, TypeError): + pass + else: + if parsed is not None: + return parsed + else: + try: + parsed = datetime.datetime.strptime(value, format) + except (ValueError, TypeError): + pass + else: + return parsed.time() + + msg = self.error_messages['invalid'] % readable_time_formats(self.input_formats) + raise ValidationError(msg) + + def to_native(self, value): + if value is None or self.format is None: + return value + + if isinstance(value, datetime.datetime): + value = value.time() + + if self.format.lower() == ISO_8601: + return value.isoformat() + return value.strftime(self.format) + + +class IntegerField(WritableField): + type_name = 'IntegerField' + type_label = 'integer' + form_field_class = forms.IntegerField + + default_error_messages = { + 'invalid': _('Enter a whole number.'), + 'max_value': _('Ensure this value is less than or equal to %(limit_value)s.'), + 'min_value': _('Ensure this value is greater than or equal to %(limit_value)s.'), + } + + def __init__(self, max_value=None, min_value=None, *args, **kwargs): + self.max_value, self.min_value = max_value, min_value + super(IntegerField, self).__init__(*args, **kwargs) + + if max_value is not None: + self.validators.append(validators.MaxValueValidator(max_value)) + if min_value is not None: + self.validators.append(validators.MinValueValidator(min_value)) + + def from_native(self, value): + if value in validators.EMPTY_VALUES: + return None + + try: + value = int(str(value)) + except (ValueError, TypeError): + raise ValidationError(self.error_messages['invalid']) + return value + + +class FloatField(WritableField): + type_name = 'FloatField' + type_label = 'float' + form_field_class = forms.FloatField + + default_error_messages = { + 'invalid': _("'%s' value must be a float."), + } + + def from_native(self, value): + if value in validators.EMPTY_VALUES: + return None + + try: + return float(value) + except (TypeError, ValueError): + msg = self.error_messages['invalid'] % value + raise ValidationError(msg) + + +class DecimalField(WritableField): + type_name = 'DecimalField' + type_label = 'decimal' + form_field_class = forms.DecimalField + + default_error_messages = { + 'invalid': _('Enter a number.'), + 'max_value': _('Ensure this value is less than or equal to %(limit_value)s.'), + 'min_value': _('Ensure this value is greater than or equal to %(limit_value)s.'), + 'max_digits': _('Ensure that there are no more than %s digits in total.'), + 'max_decimal_places': _('Ensure that there are no more than %s decimal places.'), + 'max_whole_digits': _('Ensure that there are no more than %s digits before the decimal point.') + } + + def __init__(self, max_value=None, min_value=None, max_digits=None, decimal_places=None, *args, **kwargs): + self.max_value, self.min_value = max_value, min_value + self.max_digits, self.decimal_places = max_digits, decimal_places + super(DecimalField, self).__init__(*args, **kwargs) + + if max_value is not None: + self.validators.append(validators.MaxValueValidator(max_value)) + if min_value is not None: + self.validators.append(validators.MinValueValidator(min_value)) + + def from_native(self, value): + """ + Validates that the input is a decimal number. Returns a Decimal + instance. Returns None for empty values. Ensures that there are no more + than max_digits in the number, and no more than decimal_places digits + after the decimal point. + """ + if value in validators.EMPTY_VALUES: + return None + value = smart_text(value).strip() + try: + value = Decimal(value) + except DecimalException: + raise ValidationError(self.error_messages['invalid']) + return value + + def validate(self, value): + super(DecimalField, self).validate(value) + if value in validators.EMPTY_VALUES: + return + # Check for NaN, Inf and -Inf values. We can't compare directly for NaN, + # since it is never equal to itself. However, NaN is the only value that + # isn't equal to itself, so we can use this to identify NaN + if value != value or value == Decimal("Inf") or value == Decimal("-Inf"): + raise ValidationError(self.error_messages['invalid']) + sign, digittuple, exponent = value.as_tuple() + decimals = abs(exponent) + # digittuple doesn't include any leading zeros. + digits = len(digittuple) + if decimals > digits: + # We have leading zeros up to or past the decimal point. Count + # everything past the decimal point as a digit. We do not count + # 0 before the decimal point as a digit since that would mean + # we would not allow max_digits = decimal_places. + digits = decimals + whole_digits = digits - decimals + + if self.max_digits is not None and digits > self.max_digits: + raise ValidationError(self.error_messages['max_digits'] % self.max_digits) + if self.decimal_places is not None and decimals > self.decimal_places: + raise ValidationError(self.error_messages['max_decimal_places'] % self.decimal_places) + if self.max_digits is not None and self.decimal_places is not None and whole_digits > (self.max_digits - self.decimal_places): + raise ValidationError(self.error_messages['max_whole_digits'] % (self.max_digits - self.decimal_places)) + return value + + +class FileField(WritableField): + use_files = True + type_name = 'FileField' + type_label = 'file upload' + form_field_class = forms.FileField + widget = widgets.FileInput + + default_error_messages = { + 'invalid': _("No file was submitted. Check the encoding type on the form."), + 'missing': _("No file was submitted."), + 'empty': _("The submitted file is empty."), + 'max_length': _('Ensure this filename has at most %(max)d characters (it has %(length)d).'), + 'contradiction': _('Please either submit a file or check the clear checkbox, not both.') + } + + def __init__(self, *args, **kwargs): + self.max_length = kwargs.pop('max_length', None) + self.allow_empty_file = kwargs.pop('allow_empty_file', False) + super(FileField, self).__init__(*args, **kwargs) + + def from_native(self, data): + if data in validators.EMPTY_VALUES: + return None + + # UploadedFile objects should have name and size attributes. + try: + file_name = data.name + file_size = data.size + except AttributeError: + raise ValidationError(self.error_messages['invalid']) + + if self.max_length is not None and len(file_name) > self.max_length: + error_values = {'max': self.max_length, 'length': len(file_name)} + raise ValidationError(self.error_messages['max_length'] % error_values) + if not file_name: + raise ValidationError(self.error_messages['invalid']) + if not self.allow_empty_file and not file_size: + raise ValidationError(self.error_messages['empty']) + + return data + + def to_native(self, value): + return value.name + + +class ImageField(FileField): + use_files = True + type_name = 'ImageField' + type_label = 'image upload' + form_field_class = forms.ImageField + + default_error_messages = { + 'invalid_image': _("Upload a valid image. The file you uploaded was " + "either not an image or a corrupted image."), + } + + def from_native(self, data): + """ + Checks that the file-upload field data contains a valid image (GIF, JPG, + PNG, possibly others -- whatever the Python Imaging Library supports). + """ + f = super(ImageField, self).from_native(data) + if f is None: + return None + + from compat import Image + assert Image is not None, 'PIL must be installed for ImageField support' + + # We need to get a file object for PIL. We might have a path or we might + # have to read the data into memory. + if hasattr(data, 'temporary_file_path'): + file = data.temporary_file_path() + else: + if hasattr(data, 'read'): + file = BytesIO(data.read()) + else: + file = BytesIO(data['content']) + + try: + # load() could spot a truncated JPEG, but it loads the entire + # image in memory, which is a DoS vector. See #3848 and #18520. + # verify() must be called immediately after the constructor. + Image.open(file).verify() + except ImportError: + # Under PyPy, it is possible to import PIL. However, the underlying + # _imaging C module isn't available, so an ImportError will be + # raised. Catch and re-raise. + raise + except Exception: # Python Imaging Library doesn't recognize it as an image + raise ValidationError(self.error_messages['invalid_image']) + if hasattr(f, 'seek') and callable(f.seek): + f.seek(0) + return f + + +class SerializerMethodField(Field): + """ + A field that gets its value by calling a method on the serializer it's attached to. + """ + + def __init__(self, method_name): + self.method_name = method_name + super(SerializerMethodField, self).__init__() + + def field_to_native(self, obj, field_name): + value = getattr(self.parent, self.method_name)(obj) + return self.to_native(value) diff --git a/awx/lib/site-packages/rest_framework/filters.py b/awx/lib/site-packages/rest_framework/filters.py new file mode 100644 index 0000000000..c058bc715e --- /dev/null +++ b/awx/lib/site-packages/rest_framework/filters.py @@ -0,0 +1,143 @@ +""" +Provides generic filtering backends that can be used to filter the results +returned by list views. +""" +from __future__ import unicode_literals +from django.db import models +from rest_framework.compat import django_filters, six +from functools import reduce +import operator + +FilterSet = django_filters and django_filters.FilterSet or None + + +class BaseFilterBackend(object): + """ + A base class from which all filter backend classes should inherit. + """ + + def filter_queryset(self, request, queryset, view): + """ + Return a filtered queryset. + """ + raise NotImplementedError(".filter_queryset() must be overridden.") + + +class DjangoFilterBackend(BaseFilterBackend): + """ + A filter backend that uses django-filter. + """ + default_filter_set = FilterSet + + def __init__(self): + assert django_filters, 'Using DjangoFilterBackend, but django-filter is not installed' + + def get_filter_class(self, view, queryset=None): + """ + Return the django-filters `FilterSet` used to filter the queryset. + """ + filter_class = getattr(view, 'filter_class', None) + filter_fields = getattr(view, 'filter_fields', None) + + if filter_class: + filter_model = filter_class.Meta.model + + assert issubclass(filter_model, queryset.model), \ + 'FilterSet model %s does not match queryset model %s' % \ + (filter_model, queryset.model) + + return filter_class + + if filter_fields: + class AutoFilterSet(self.default_filter_set): + class Meta: + model = queryset.model + fields = filter_fields + return AutoFilterSet + + return None + + def filter_queryset(self, request, queryset, view): + filter_class = self.get_filter_class(view, queryset) + + if filter_class: + return filter_class(request.QUERY_PARAMS, queryset=queryset).qs + + return queryset + + +class SearchFilter(BaseFilterBackend): + search_param = 'search' # The URL query parameter used for the search. + + def get_search_terms(self, request): + """ + Search terms are set by a ?search=... query parameter, + and may be comma and/or whitespace delimited. + """ + params = request.QUERY_PARAMS.get(self.search_param, '') + return params.replace(',', ' ').split() + + def construct_search(self, field_name): + if field_name.startswith('^'): + return "%s__istartswith" % field_name[1:] + elif field_name.startswith('='): + return "%s__iexact" % field_name[1:] + elif field_name.startswith('@'): + return "%s__search" % field_name[1:] + else: + return "%s__icontains" % field_name + + def filter_queryset(self, request, queryset, view): + search_fields = getattr(view, 'search_fields', None) + + if not search_fields: + return queryset + + orm_lookups = [self.construct_search(str(search_field)) + for search_field in search_fields] + + for search_term in self.get_search_terms(request): + or_queries = [models.Q(**{orm_lookup: search_term}) + for orm_lookup in orm_lookups] + queryset = queryset.filter(reduce(operator.or_, or_queries)) + + return queryset + + +class OrderingFilter(BaseFilterBackend): + ordering_param = 'ordering' # The URL query parameter used for the ordering. + + def get_ordering(self, request): + """ + Search terms are set by a ?search=... query parameter, + and may be comma and/or whitespace delimited. + """ + params = request.QUERY_PARAMS.get(self.ordering_param) + if params: + return [param.strip() for param in params.split(',')] + + def get_default_ordering(self, view): + ordering = getattr(view, 'ordering', None) + if isinstance(ordering, six.string_types): + return (ordering,) + return ordering + + def remove_invalid_fields(self, queryset, ordering): + field_names = [field.name for field in queryset.model._meta.fields] + return [term for term in ordering if term.lstrip('-') in field_names] + + def filter_queryset(self, request, queryset, view): + ordering = self.get_ordering(request) + + if ordering: + # Skip any incorrect parameters + ordering = self.remove_invalid_fields(queryset, ordering) + + if not ordering: + # Use 'ordering' attribtue by default + ordering = self.get_default_ordering(view) + + if ordering: + return queryset.order_by(*ordering) + + return queryset diff --git a/awx/lib/site-packages/rest_framework/generics.py b/awx/lib/site-packages/rest_framework/generics.py new file mode 100644 index 0000000000..9ccc789805 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/generics.py @@ -0,0 +1,499 @@ +""" +Generic views that provide commonly needed behaviour. +""" +from __future__ import unicode_literals + +from django.core.exceptions import ImproperlyConfigured, PermissionDenied +from django.core.paginator import Paginator, InvalidPage +from django.http import Http404 +from django.shortcuts import get_object_or_404 as _get_object_or_404 +from django.utils.translation import ugettext as _ +from rest_framework import views, mixins, exceptions +from rest_framework.request import clone_request +from rest_framework.settings import api_settings +import warnings + + +def get_object_or_404(queryset, **filter_kwargs): + """ + Same as Django's standard shortcut, but make sure to raise 404 + if the filter_kwargs don't match the required types. + """ + try: + return _get_object_or_404(queryset, **filter_kwargs) + except (TypeError, ValueError): + raise Http404 + + +class GenericAPIView(views.APIView): + """ + Base class for all other generic views. + """ + + # You'll need to either set these attributes, + # or override `get_queryset()`/`get_serializer_class()`. + queryset = None + serializer_class = None + + # This shortcut may be used instead of setting either or both + # of the `queryset`/`serializer_class` attributes, although using + # the explicit style is generally preferred. + model = None + + # If you want to use object lookups other than pk, set this attribute. + # For more complex lookup requirements override `get_object()`. + lookup_field = 'pk' + + # Pagination settings + paginate_by = api_settings.PAGINATE_BY + paginate_by_param = api_settings.PAGINATE_BY_PARAM + pagination_serializer_class = api_settings.DEFAULT_PAGINATION_SERIALIZER_CLASS + page_kwarg = 'page' + + # The filter backend classes to use for queryset filtering + filter_backends = api_settings.DEFAULT_FILTER_BACKENDS + + # The following attributes may be subject to change, + # and should be considered private API. + model_serializer_class = api_settings.DEFAULT_MODEL_SERIALIZER_CLASS + paginator_class = Paginator + + ###################################### + # These are pending deprecation... + + pk_url_kwarg = 'pk' + slug_url_kwarg = 'slug' + slug_field = 'slug' + allow_empty = True + filter_backend = api_settings.FILTER_BACKEND + + def get_serializer_context(self): + """ + Extra context provided to the serializer class. + """ + return { + 'request': self.request, + 'format': self.format_kwarg, + 'view': self + } + + def get_serializer(self, instance=None, data=None, + files=None, many=False, partial=False): + """ + Return the serializer instance that should be used for validating and + deserializing input, and for serializing output. + """ + serializer_class = self.get_serializer_class() + context = self.get_serializer_context() + return serializer_class(instance, data=data, files=files, + many=many, partial=partial, context=context) + + def get_pagination_serializer(self, page): + """ + Return a serializer instance to use with paginated data. + """ + class SerializerClass(self.pagination_serializer_class): + class Meta: + object_serializer_class = self.get_serializer_class() + + pagination_serializer_class = SerializerClass + context = self.get_serializer_context() + return pagination_serializer_class(instance=page, context=context) + + def paginate_queryset(self, queryset, page_size=None): + """ + Paginate a queryset if required, either returning a page object, + or `None` if pagination is not configured for this view. + """ + deprecated_style = False + if page_size is not None: + warnings.warn('The `page_size` parameter to `paginate_queryset()` ' + 'is due to be deprecated. ' + 'Note that the return style of this method is also ' + 'changed, and will simply return a page object ' + 'when called without a `page_size` argument.', + PendingDeprecationWarning, stacklevel=2) + deprecated_style = True + else: + # Determine the required page size. + # If pagination is not configured, simply return None. + page_size = self.get_paginate_by() + if not page_size: + return None + + if not self.allow_empty: + warnings.warn( + 'The `allow_empty` parameter is due to be deprecated. ' + 'To use `allow_empty=False` style behavior, You should override ' + '`get_queryset()` and explicitly raise a 404 on empty querysets.', + PendingDeprecationWarning, stacklevel=2 + ) + + paginator = self.paginator_class(queryset, page_size, + allow_empty_first_page=self.allow_empty) + page_kwarg = self.kwargs.get(self.page_kwarg) + page_query_param = self.request.QUERY_PARAMS.get(self.page_kwarg) + page = page_kwarg or page_query_param or 1 + try: + page_number = int(page) + except ValueError: + if page == 'last': + page_number = paginator.num_pages + else: + raise Http404(_("Page is not 'last', nor can it be converted to an int.")) + try: + page = paginator.page(page_number) + except InvalidPage as e: + raise Http404(_('Invalid page (%(page_number)s): %(message)s') % { + 'page_number': page_number, + 'message': str(e) + }) + + if deprecated_style: + return (paginator, page, page.object_list, page.has_other_pages()) + return page + + def filter_queryset(self, queryset): + """ + Given a queryset, filter it with whichever filter backend is in use. + + You are unlikely to want to override this method, although you may need + to call it either from a list view, or from a custom `get_object` + method if you want to apply the configured filtering backend to the + default queryset. + """ + filter_backends = self.filter_backends or [] + if not filter_backends and self.filter_backend: + warnings.warn( + 'The `filter_backend` attribute and `FILTER_BACKEND` setting ' + 'are due to be deprecated in favor of a `filter_backends` ' + 'attribute and `DEFAULT_FILTER_BACKENDS` setting, that take ' + 'a *list* of filter backend classes.', + PendingDeprecationWarning, stacklevel=2 + ) + filter_backends = [self.filter_backend] + + for backend in filter_backends: + queryset = backend().filter_queryset(self.request, queryset, self) + return queryset + + ######################## + ### The following methods provide default implementations + ### that you may want to override for more complex cases. + + def get_paginate_by(self, queryset=None): + """ + Return the size of pages to use with pagination. + + If `PAGINATE_BY_PARAM` is set it will attempt to get the page size + from a named query parameter in the url, eg. ?page_size=100 + + Otherwise defaults to using `self.paginate_by`. + """ + if queryset is not None: + warnings.warn('The `queryset` parameter to `get_paginate_by()` ' + 'is due to be deprecated.', + PendingDeprecationWarning, stacklevel=2) + + if self.paginate_by_param: + query_params = self.request.QUERY_PARAMS + try: + return int(query_params[self.paginate_by_param]) + except (KeyError, ValueError): + pass + + return self.paginate_by + + def get_serializer_class(self): + """ + Return the class to use for the serializer. + Defaults to using `self.serializer_class`. + + You may want to override this if you need to provide different + serializations depending on the incoming request. + + (Eg. admins get full serialization, others get basic serilization) + """ + serializer_class = self.serializer_class + if serializer_class is not None: + return serializer_class + + assert self.model is not None, \ + "'%s' should either include a 'serializer_class' attribute, " \ + "or use the 'model' attribute as a shortcut for " \ + "automatically generating a serializer class." \ + % self.__class__.__name__ + + class DefaultSerializer(self.model_serializer_class): + class Meta: + model = self.model + return DefaultSerializer + + def get_queryset(self): + """ + Get the list of items for this view. + This must be an iterable, and may be a queryset. + Defaults to using `self.queryset`. + + You may want to override this if you need to provide different + querysets depending on the incoming request. + + (Eg. return a list of items that is specific to the user) + """ + if self.queryset is not None: + return self.queryset._clone() + + if self.model is not None: + return self.model._default_manager.all() + + raise ImproperlyConfigured("'%s' must define 'queryset' or 'model'" + % self.__class__.__name__) + + def get_object(self, queryset=None): + """ + Returns the object the view is displaying. + + You may want to override this if you need to provide non-standard + queryset lookups. Eg if objects are referenced using multiple + keyword arguments in the url conf. + """ + # Determine the base queryset to use. + if queryset is None: + queryset = self.filter_queryset(self.get_queryset()) + else: + pass # Deprecation warning + + # Perform the lookup filtering. + pk = self.kwargs.get(self.pk_url_kwarg, None) + slug = self.kwargs.get(self.slug_url_kwarg, None) + lookup = self.kwargs.get(self.lookup_field, None) + + if lookup is not None: + filter_kwargs = {self.lookup_field: lookup} + elif pk is not None and self.lookup_field == 'pk': + warnings.warn( + 'The `pk_url_kwarg` attribute is due to be deprecated. ' + 'Use the `lookup_field` attribute instead', + PendingDeprecationWarning + ) + filter_kwargs = {'pk': pk} + elif slug is not None and self.lookup_field == 'pk': + warnings.warn( + 'The `slug_url_kwarg` attribute is due to be deprecated. ' + 'Use the `lookup_field` attribute instead', + PendingDeprecationWarning + ) + filter_kwargs = {self.slug_field: slug} + else: + raise exceptions.ConfigurationError( + 'Expected view %s to be called with a URL keyword argument ' + 'named "%s". Fix your URL conf, or set the `.lookup_field` ' + 'attribute on the view correctly.' % + (self.__class__.__name__, self.lookup_field) + ) + + obj = get_object_or_404(queryset, **filter_kwargs) + + # May raise a permission denied + self.check_object_permissions(self.request, obj) + + return obj + + ######################## + ### The following are placeholder methods, + ### and are intended to be overridden. + ### + ### The are not called by GenericAPIView directly, + ### but are used by the mixin methods. + + def pre_save(self, obj): + """ + Placeholder method for calling before saving an object. + + May be used to set attributes on the object that are implicit + in either the request, or the url. + """ + pass + + def post_save(self, obj, created=False): + """ + Placeholder method for calling after saving an object. + """ + pass + + def metadata(self, request): + """ + Return a dictionary of metadata about the view. + Used to return responses for OPTIONS requests. + + We override the default behavior, and add some extra information + about the required request body for POST and PUT operations. + """ + ret = super(GenericAPIView, self).metadata(request) + + actions = {} + for method in ('PUT', 'POST'): + if method not in self.allowed_methods: + continue + + cloned_request = clone_request(request, method) + try: + # Test global permissions + self.check_permissions(cloned_request) + # Test object permissions + if method == 'PUT': + self.get_object() + except (exceptions.APIException, PermissionDenied, Http404): + pass + else: + # If user has appropriate permissions for the view, include + # appropriate metadata about the fields that should be supplied. + serializer = self.get_serializer() + actions[method] = serializer.metadata() + + if actions: + ret['actions'] = actions + + return ret + + +########################################################## +### Concrete view classes that provide method handlers ### +### by composing the mixin classes with the base view. ### +########################################################## + +class CreateAPIView(mixins.CreateModelMixin, + GenericAPIView): + + """ + Concrete view for creating a model instance. + """ + def post(self, request, *args, **kwargs): + return self.create(request, *args, **kwargs) + + +class ListAPIView(mixins.ListModelMixin, + GenericAPIView): + """ + Concrete view for listing a queryset. + """ + def get(self, request, *args, **kwargs): + return self.list(request, *args, **kwargs) + + +class RetrieveAPIView(mixins.RetrieveModelMixin, + GenericAPIView): + """ + Concrete view for retrieving a model instance. + """ + def get(self, request, *args, **kwargs): + return self.retrieve(request, *args, **kwargs) + + +class DestroyAPIView(mixins.DestroyModelMixin, + GenericAPIView): + + """ + Concrete view for deleting a model instance. + """ + def delete(self, request, *args, **kwargs): + return self.destroy(request, *args, **kwargs) + + +class UpdateAPIView(mixins.UpdateModelMixin, + GenericAPIView): + + """ + Concrete view for updating a model instance. + """ + def put(self, request, *args, **kwargs): + return self.update(request, *args, **kwargs) + + def patch(self, request, *args, **kwargs): + return self.partial_update(request, *args, **kwargs) + + +class ListCreateAPIView(mixins.ListModelMixin, + mixins.CreateModelMixin, + GenericAPIView): + """ + Concrete view for listing a queryset or creating a model instance. + """ + def get(self, request, *args, **kwargs): + return self.list(request, *args, **kwargs) + + def post(self, request, *args, **kwargs): + return self.create(request, *args, **kwargs) + + +class RetrieveUpdateAPIView(mixins.RetrieveModelMixin, + mixins.UpdateModelMixin, + GenericAPIView): + """ + Concrete view for retrieving, updating a model instance. + """ + def get(self, request, *args, **kwargs): + return self.retrieve(request, *args, **kwargs) + + def put(self, request, *args, **kwargs): + return self.update(request, *args, **kwargs) + + def patch(self, request, *args, **kwargs): + return self.partial_update(request, *args, **kwargs) + + +class RetrieveDestroyAPIView(mixins.RetrieveModelMixin, + mixins.DestroyModelMixin, + GenericAPIView): + """ + Concrete view for retrieving or deleting a model instance. + """ + def get(self, request, *args, **kwargs): + return self.retrieve(request, *args, **kwargs) + + def delete(self, request, *args, **kwargs): + return self.destroy(request, *args, **kwargs) + + +class RetrieveUpdateDestroyAPIView(mixins.RetrieveModelMixin, + mixins.UpdateModelMixin, + mixins.DestroyModelMixin, + GenericAPIView): + """ + Concrete view for retrieving, updating or deleting a model instance. + """ + def get(self, request, *args, **kwargs): + return self.retrieve(request, *args, **kwargs) + + def put(self, request, *args, **kwargs): + return self.update(request, *args, **kwargs) + + def patch(self, request, *args, **kwargs): + return self.partial_update(request, *args, **kwargs) + + def delete(self, request, *args, **kwargs): + return self.destroy(request, *args, **kwargs) + + +########################## +### Deprecated classes ### +########################## + +class MultipleObjectAPIView(GenericAPIView): + def __init__(self, *args, **kwargs): + warnings.warn( + 'Subclassing `MultipleObjectAPIView` is due to be deprecated. ' + 'You should simply subclass `GenericAPIView` instead.', + PendingDeprecationWarning, stacklevel=2 + ) + super(MultipleObjectAPIView, self).__init__(*args, **kwargs) + + +class SingleObjectAPIView(GenericAPIView): + def __init__(self, *args, **kwargs): + warnings.warn( + 'Subclassing `SingleObjectAPIView` is due to be deprecated. ' + 'You should simply subclass `GenericAPIView` instead.', + PendingDeprecationWarning, stacklevel=2 + ) + super(SingleObjectAPIView, self).__init__(*args, **kwargs) diff --git a/awx/lib/site-packages/rest_framework/mixins.py b/awx/lib/site-packages/rest_framework/mixins.py new file mode 100644 index 0000000000..f11def6d41 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/mixins.py @@ -0,0 +1,184 @@ +""" +Basic building blocks for generic class based views. + +We don't bind behaviour to http method handlers yet, +which allows mixin classes to be composed in interesting ways. +""" +from __future__ import unicode_literals + +from django.http import Http404 +from rest_framework import status +from rest_framework.response import Response +from rest_framework.request import clone_request +import warnings + + +def _get_validation_exclusions(obj, pk=None, slug_field=None, lookup_field=None): + """ + Given a model instance, and an optional pk and slug field, + return the full list of all other field names on that model. + + For use when performing full_clean on a model instance, + so we only clean the required fields. + """ + include = [] + + if pk: + # Pending deprecation + pk_field = obj._meta.pk + while pk_field.rel: + pk_field = pk_field.rel.to._meta.pk + include.append(pk_field.name) + + if slug_field: + # Pending deprecation + include.append(slug_field) + + if lookup_field and lookup_field != 'pk': + include.append(lookup_field) + + return [field.name for field in obj._meta.fields if field.name not in include] + + +class CreateModelMixin(object): + """ + Create a model instance. + """ + def create(self, request, *args, **kwargs): + serializer = self.get_serializer(data=request.DATA, files=request.FILES) + + if serializer.is_valid(): + self.pre_save(serializer.object) + self.object = serializer.save(force_insert=True) + self.post_save(self.object, created=True) + headers = self.get_success_headers(serializer.data) + return Response(serializer.data, status=status.HTTP_201_CREATED, + headers=headers) + + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def get_success_headers(self, data): + try: + return {'Location': data['url']} + except (TypeError, KeyError): + return {} + + +class ListModelMixin(object): + """ + List a queryset. + """ + empty_error = "Empty list and '%(class_name)s.allow_empty' is False." + + def list(self, request, *args, **kwargs): + self.object_list = self.filter_queryset(self.get_queryset()) + + # Default is to allow empty querysets. This can be altered by setting + # `.allow_empty = False`, to raise 404 errors on empty querysets. + if not self.allow_empty and not self.object_list: + warnings.warn( + 'The `allow_empty` parameter is due to be deprecated. ' + 'To use `allow_empty=False` style behavior, You should override ' + '`get_queryset()` and explicitly raise a 404 on empty querysets.', + PendingDeprecationWarning + ) + class_name = self.__class__.__name__ + error_msg = self.empty_error % {'class_name': class_name} + raise Http404(error_msg) + + # Switch between paginated or standard style responses + page = self.paginate_queryset(self.object_list) + if page is not None: + serializer = self.get_pagination_serializer(page) + else: + serializer = self.get_serializer(self.object_list, many=True) + + return Response(serializer.data) + + +class RetrieveModelMixin(object): + """ + Retrieve a model instance. + """ + def retrieve(self, request, *args, **kwargs): + self.object = self.get_object() + serializer = self.get_serializer(self.object) + return Response(serializer.data) + + +class UpdateModelMixin(object): + """ + Update a model instance. + """ + def update(self, request, *args, **kwargs): + partial = kwargs.pop('partial', False) + self.object = self.get_object_or_none() + + if self.object is None: + created = True + save_kwargs = {'force_insert': True} + success_status_code = status.HTTP_201_CREATED + else: + created = False + save_kwargs = {'force_update': True} + success_status_code = status.HTTP_200_OK + + serializer = self.get_serializer(self.object, data=request.DATA, + files=request.FILES, partial=partial) + + if serializer.is_valid(): + self.pre_save(serializer.object) + self.object = serializer.save(**save_kwargs) + self.post_save(self.object, created=created) + return Response(serializer.data, status=success_status_code) + + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def partial_update(self, request, *args, **kwargs): + kwargs['partial'] = True + return self.update(request, *args, **kwargs) + + def get_object_or_none(self): + try: + return self.get_object() + except Http404: + # If this is a PUT-as-create operation, we need to ensure that + # we have relevant permissions, as if this was a POST request. + # This will either raise a PermissionDenied exception, + # or simply return None + self.check_permissions(clone_request(self.request, 'POST')) + + def pre_save(self, obj): + """ + Set any attributes on the object that are implicit in the request. + """ + # pk and/or slug attributes are implicit in the URL. + lookup = self.kwargs.get(self.lookup_field, None) + pk = self.kwargs.get(self.pk_url_kwarg, None) + slug = self.kwargs.get(self.slug_url_kwarg, None) + slug_field = slug and self.slug_field or None + + if lookup: + setattr(obj, self.lookup_field, lookup) + + if pk: + setattr(obj, 'pk', pk) + + if slug: + setattr(obj, slug_field, slug) + + # Ensure we clean the attributes so that we don't eg return integer + # pk using a string representation, as provided by the url conf kwarg. + if hasattr(obj, 'full_clean'): + exclude = _get_validation_exclusions(obj, pk, slug_field, self.lookup_field) + obj.full_clean(exclude) + + +class DestroyModelMixin(object): + """ + Destroy a model instance. + """ + def destroy(self, request, *args, **kwargs): + obj = self.get_object() + obj.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/awx/lib/site-packages/rest_framework/models.py b/awx/lib/site-packages/rest_framework/models.py new file mode 100644 index 0000000000..5b53a52641 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/models.py @@ -0,0 +1 @@ +# Just to keep things like ./manage.py test happy diff --git a/awx/lib/site-packages/rest_framework/negotiation.py b/awx/lib/site-packages/rest_framework/negotiation.py new file mode 100644 index 0000000000..4d205c0e85 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/negotiation.py @@ -0,0 +1,89 @@ +""" +Content negotiation deals with selecting an appropriate renderer given the +incoming request. Typically this will be based on the request's Accept header. +""" +from __future__ import unicode_literals +from django.http import Http404 +from rest_framework import exceptions +from rest_framework.settings import api_settings +from rest_framework.utils.mediatypes import order_by_precedence, media_type_matches +from rest_framework.utils.mediatypes import _MediaType + + +class BaseContentNegotiation(object): + def select_parser(self, request, parsers): + raise NotImplementedError('.select_parser() must be implemented') + + def select_renderer(self, request, renderers, format_suffix=None): + raise NotImplementedError('.select_renderer() must be implemented') + + +class DefaultContentNegotiation(BaseContentNegotiation): + settings = api_settings + + def select_parser(self, request, parsers): + """ + Given a list of parsers and a media type, return the appropriate + parser to handle the incoming request. + """ + for parser in parsers: + if media_type_matches(parser.media_type, request.content_type): + return parser + return None + + def select_renderer(self, request, renderers, format_suffix=None): + """ + Given a request and a list of renderers, return a two-tuple of: + (renderer, media type). + """ + # Allow URL style format override. eg. "?format=json + format_query_param = self.settings.URL_FORMAT_OVERRIDE + format = format_suffix or request.QUERY_PARAMS.get(format_query_param) + + if format: + renderers = self.filter_renderers(renderers, format) + + accepts = self.get_accept_list(request) + + # Check the acceptable media types against each renderer, + # attempting more specific media types first + # NB. The inner loop here isn't as bad as it first looks :) + # Worst case is we're looping over len(accept_list) * len(self.renderers) + for media_type_set in order_by_precedence(accepts): + for renderer in renderers: + for media_type in media_type_set: + if media_type_matches(renderer.media_type, media_type): + # Return the most specific media type as accepted. + if (_MediaType(renderer.media_type).precedence > + _MediaType(media_type).precedence): + # Eg client requests '*/*' + # Accepted media type is 'application/json' + return renderer, renderer.media_type + else: + # Eg client requests 'application/json; indent=8' + # Accepted media type is 'application/json; indent=8' + return renderer, media_type + + raise exceptions.NotAcceptable(available_renderers=renderers) + + def filter_renderers(self, renderers, format): + """ + If there is a '.json' style format suffix, filter the renderers + so that we only negotiation against those that accept that format. + """ + renderers = [renderer for renderer in renderers + if renderer.format == format] + if not renderers: + raise Http404 + return renderers + + def get_accept_list(self, request): + """ + Given the incoming request, return a tokenised list of media + type strings. + + Allows URL style accept override. eg. "?accept=application/json" + """ + header = request.META.get('HTTP_ACCEPT', '*/*') + header = request.QUERY_PARAMS.get(self.settings.URL_ACCEPT_OVERRIDE, header) + return [token.strip() for token in header.split(',')] diff --git a/awx/lib/site-packages/rest_framework/pagination.py b/awx/lib/site-packages/rest_framework/pagination.py new file mode 100644 index 0000000000..d51ea929be --- /dev/null +++ b/awx/lib/site-packages/rest_framework/pagination.py @@ -0,0 +1,94 @@ +""" +Pagination serializers determine the structure of the output that should +be used for paginated responses. +""" +from __future__ import unicode_literals +from rest_framework import serializers +from rest_framework.templatetags.rest_framework import replace_query_param + + +class NextPageField(serializers.Field): + """ + Field that returns a link to the next page in paginated results. + """ + page_field = 'page' + + def to_native(self, value): + if not value.has_next(): + return None + page = value.next_page_number() + request = self.context.get('request') + url = request and request.build_absolute_uri() or '' + return replace_query_param(url, self.page_field, page) + + +class PreviousPageField(serializers.Field): + """ + Field that returns a link to the previous page in paginated results. + """ + page_field = 'page' + + def to_native(self, value): + if not value.has_previous(): + return None + page = value.previous_page_number() + request = self.context.get('request') + url = request and request.build_absolute_uri() or '' + return replace_query_param(url, self.page_field, page) + + +class DefaultObjectSerializer(serializers.Field): + """ + If no object serializer is specified, then this serializer will be applied + as the default. + """ + + def __init__(self, source=None, context=None): + # Note: Swallow context kwarg - only required for eg. ModelSerializer. + super(DefaultObjectSerializer, self).__init__(source=source) + + +class PaginationSerializerOptions(serializers.SerializerOptions): + """ + An object that stores the options that may be provided to a + pagination serializer by using the inner `Meta` class. + + Accessible on the instance as `serializer.opts`. + """ + def __init__(self, meta): + super(PaginationSerializerOptions, self).__init__(meta) + self.object_serializer_class = getattr(meta, 'object_serializer_class', + DefaultObjectSerializer) + + +class BasePaginationSerializer(serializers.Serializer): + """ + A base class for pagination serializers to inherit from, + to make implementing custom serializers more easy. + """ + _options_class = PaginationSerializerOptions + results_field = 'results' + + def __init__(self, *args, **kwargs): + """ + Override init to add in the object serializer field on-the-fly. + """ + super(BasePaginationSerializer, self).__init__(*args, **kwargs) + results_field = self.results_field + object_serializer = self.opts.object_serializer_class + + if 'context' in kwargs: + context_kwarg = {'context': kwargs['context']} + else: + context_kwarg = {} + + self.fields[results_field] = object_serializer(source='object_list', **context_kwarg) + + +class PaginationSerializer(BasePaginationSerializer): + """ + A default implementation of a pagination serializer. + """ + count = serializers.Field(source='paginator.count') + next = NextPageField(source='*') + previous = PreviousPageField(source='*') diff --git a/awx/lib/site-packages/rest_framework/parsers.py b/awx/lib/site-packages/rest_framework/parsers.py new file mode 100644 index 0000000000..25be2e6abc --- /dev/null +++ b/awx/lib/site-packages/rest_framework/parsers.py @@ -0,0 +1,295 @@ +""" +Parsers are used to parse the content of incoming HTTP requests. + +They give us a generic way of being able to handle various media types +on the request, such as form content or json encoded data. +""" +from __future__ import unicode_literals +from django.conf import settings +from django.core.files.uploadhandler import StopFutureHandlers +from django.http import QueryDict +from django.http.multipartparser import MultiPartParser as DjangoMultiPartParser +from django.http.multipartparser import MultiPartParserError, parse_header, ChunkIter +from rest_framework.compat import yaml, etree +from rest_framework.exceptions import ParseError +from rest_framework.compat import six +import json +import datetime +import decimal + + +class DataAndFiles(object): + def __init__(self, data, files): + self.data = data + self.files = files + + +class BaseParser(object): + """ + All parsers should extend `BaseParser`, specifying a `media_type` + attribute, and overriding the `.parse()` method. + """ + + media_type = None + + def parse(self, stream, media_type=None, parser_context=None): + """ + Given a stream to read from, return the parsed representation. + Should return parsed data, or a `DataAndFiles` object consisting of the + parsed data and files. + """ + raise NotImplementedError(".parse() must be overridden.") + + +class JSONParser(BaseParser): + """ + Parses JSON-serialized data. + """ + + media_type = 'application/json' + + def parse(self, stream, media_type=None, parser_context=None): + """ + Returns a 2-tuple of `(data, files)`. + + `data` will be an object which is the parsed content of the response. + `files` will always be `None`. + """ + parser_context = parser_context or {} + encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) + + try: + data = stream.read().decode(encoding) + return json.loads(data) + except ValueError as exc: + raise ParseError('JSON parse error - %s' % six.text_type(exc)) + + +class YAMLParser(BaseParser): + """ + Parses YAML-serialized data. + """ + + media_type = 'application/yaml' + + def parse(self, stream, media_type=None, parser_context=None): + """ + Returns a 2-tuple of `(data, files)`. + + `data` will be an object which is the parsed content of the response. + `files` will always be `None`. + """ + assert yaml, 'YAMLParser requires pyyaml to be installed' + + parser_context = parser_context or {} + encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) + + try: + data = stream.read().decode(encoding) + return yaml.safe_load(data) + except (ValueError, yaml.parser.ParserError) as exc: + raise ParseError('YAML parse error - %s' % six.u(exc)) + + +class FormParser(BaseParser): + """ + Parser for form data. + """ + + media_type = 'application/x-www-form-urlencoded' + + def parse(self, stream, media_type=None, parser_context=None): + """ + Returns a 2-tuple of `(data, files)`. + + `data` will be a :class:`QueryDict` containing all the form parameters. + `files` will always be :const:`None`. + """ + parser_context = parser_context or {} + encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) + data = QueryDict(stream.read(), encoding=encoding) + return data + + +class MultiPartParser(BaseParser): + """ + Parser for multipart form data, which may include file data. + """ + + media_type = 'multipart/form-data' + + def parse(self, stream, media_type=None, parser_context=None): + """ + Returns a DataAndFiles object. + + `.data` will be a `QueryDict` containing all the form parameters. + `.files` will be a `QueryDict` containing all the form files. + """ + parser_context = parser_context or {} + request = parser_context['request'] + encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) + meta = request.META + upload_handlers = request.upload_handlers + + try: + parser = DjangoMultiPartParser(meta, stream, upload_handlers, encoding) + data, files = parser.parse() + return DataAndFiles(data, files) + except MultiPartParserError as exc: + raise ParseError('Multipart form parse error - %s' % six.u(exc)) + + +class XMLParser(BaseParser): + """ + XML parser. + """ + + media_type = 'application/xml' + + def parse(self, stream, media_type=None, parser_context=None): + assert etree, 'XMLParser requires defusedxml to be installed' + + parser_context = parser_context or {} + encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) + parser = etree.DefusedXMLParser(encoding=encoding) + try: + tree = etree.parse(stream, parser=parser, forbid_dtd=True) + except (etree.ParseError, ValueError) as exc: + raise ParseError('XML parse error - %s' % six.u(exc)) + data = self._xml_convert(tree.getroot()) + + return data + + def _xml_convert(self, element): + """ + convert the xml `element` into the corresponding python object + """ + + children = list(element) + + if len(children) == 0: + return self._type_convert(element.text) + else: + # if the fist child tag is list-item means all children are list-item + if children[0].tag == "list-item": + data = [] + for child in children: + data.append(self._xml_convert(child)) + else: + data = {} + for child in children: + data[child.tag] = self._xml_convert(child) + + return data + + def _type_convert(self, value): + """ + Converts the value returned by the XMl parse into the equivalent + Python type + """ + if value is None: + return value + + try: + return datetime.datetime.strptime(value, '%Y-%m-%d %H:%M:%S') + except ValueError: + pass + + try: + return int(value) + except ValueError: + pass + + try: + return decimal.Decimal(value) + except decimal.InvalidOperation: + pass + + return value + + +class FileUploadParser(BaseParser): + """ + Parser for file upload data. + """ + media_type = '*/*' + + def parse(self, stream, media_type=None, parser_context=None): + """ + Returns a DataAndFiles object. + + `.data` will be None (we expect request body to be a file content). + `.files` will be a `QueryDict` containing one 'file' element. + """ + + parser_context = parser_context or {} + request = parser_context['request'] + encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) + meta = request.META + upload_handlers = request.upload_handlers + filename = self.get_filename(stream, media_type, parser_context) + + # Note that this code is extracted from Django's handling of + # file uploads in MultiPartParser. + content_type = meta.get('HTTP_CONTENT_TYPE', + meta.get('CONTENT_TYPE', '')) + try: + content_length = int(meta.get('HTTP_CONTENT_LENGTH', + meta.get('CONTENT_LENGTH', 0))) + except (ValueError, TypeError): + content_length = None + + # See if the handler will want to take care of the parsing. + for handler in upload_handlers: + result = handler.handle_raw_input(None, + meta, + content_length, + None, + encoding) + if result is not None: + return DataAndFiles(None, {'file': result[1]}) + + # This is the standard case. + possible_sizes = [x.chunk_size for x in upload_handlers if x.chunk_size] + chunk_size = min([2 ** 31 - 4] + possible_sizes) + chunks = ChunkIter(stream, chunk_size) + counters = [0] * len(upload_handlers) + + for handler in upload_handlers: + try: + handler.new_file(None, filename, content_type, + content_length, encoding) + except StopFutureHandlers: + break + + for chunk in chunks: + for i, handler in enumerate(upload_handlers): + chunk_length = len(chunk) + chunk = handler.receive_data_chunk(chunk, counters[i]) + counters[i] += chunk_length + if chunk is None: + break + + for i, handler in enumerate(upload_handlers): + file_obj = handler.file_complete(counters[i]) + if file_obj: + return DataAndFiles(None, {'file': file_obj}) + raise ParseError("FileUpload parse error - " + "none of upload handlers can handle the stream") + + def get_filename(self, stream, media_type, parser_context): + """ + Detects the uploaded file name. First searches a 'filename' url kwarg. + Then tries to parse Content-Disposition header. + """ + try: + return parser_context['kwargs']['filename'] + except KeyError: + pass + + try: + meta = parser_context['request'].META + disposition = parse_header(meta['HTTP_CONTENT_DISPOSITION']) + return disposition[1]['filename'] + except (AttributeError, KeyError): + pass diff --git a/awx/lib/site-packages/rest_framework/permissions.py b/awx/lib/site-packages/rest_framework/permissions.py new file mode 100644 index 0000000000..45fcfd6658 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/permissions.py @@ -0,0 +1,174 @@ +""" +Provides a set of pluggable permission policies. +""" +from __future__ import unicode_literals +import inspect +import warnings + +SAFE_METHODS = ['GET', 'HEAD', 'OPTIONS'] + +from rest_framework.compat import oauth2_provider_scope, oauth2_constants + + +class BasePermission(object): + """ + A base class from which all permission classes should inherit. + """ + + def has_permission(self, request, view): + """ + Return `True` if permission is granted, `False` otherwise. + """ + return True + + def has_object_permission(self, request, view, obj): + """ + Return `True` if permission is granted, `False` otherwise. + """ + if len(inspect.getargspec(self.has_permission).args) == 4: + warnings.warn( + 'The `obj` argument in `has_permission` is deprecated. ' + 'Use `has_object_permission()` instead for object permissions.', + DeprecationWarning, stacklevel=2 + ) + return self.has_permission(request, view, obj) + return True + + +class AllowAny(BasePermission): + """ + Allow any access. + This isn't strictly required, since you could use an empty + permission_classes list, but it's useful because it makes the intention + more explicit. + """ + def has_permission(self, request, view): + return True + + +class IsAuthenticated(BasePermission): + """ + Allows access only to authenticated users. + """ + + def has_permission(self, request, view): + if request.user and request.user.is_authenticated(): + return True + return False + + +class IsAdminUser(BasePermission): + """ + Allows access only to admin users. + """ + + def has_permission(self, request, view): + if request.user and request.user.is_staff: + return True + return False + + +class IsAuthenticatedOrReadOnly(BasePermission): + """ + The request is authenticated as a user, or is a read-only request. + """ + + def has_permission(self, request, view): + if (request.method in SAFE_METHODS or + request.user and + request.user.is_authenticated()): + return True + return False + + +class DjangoModelPermissions(BasePermission): + """ + The request is authenticated using `django.contrib.auth` permissions. + See: https://docs.djangoproject.com/en/dev/topics/auth/#permissions + + It ensures that the user is authenticated, and has the appropriate + `add`/`change`/`delete` permissions on the model. + + This permission can only be applied against view classes that + provide a `.model` or `.queryset` attribute. + """ + + # Map methods into required permission codes. + # Override this if you need to also provide 'view' permissions, + # or if you want to provide custom permission codes. + perms_map = { + 'GET': [], + 'OPTIONS': [], + 'HEAD': [], + 'POST': ['%(app_label)s.add_%(model_name)s'], + 'PUT': ['%(app_label)s.change_%(model_name)s'], + 'PATCH': ['%(app_label)s.change_%(model_name)s'], + 'DELETE': ['%(app_label)s.delete_%(model_name)s'], + } + + authenticated_users_only = True + + def get_required_permissions(self, method, model_cls): + """ + Given a model and an HTTP method, return the list of permission + codes that the user is required to have. + """ + kwargs = { + 'app_label': model_cls._meta.app_label, + 'model_name': model_cls._meta.module_name + } + return [perm % kwargs for perm in self.perms_map[method]] + + def has_permission(self, request, view): + model_cls = getattr(view, 'model', None) + queryset = getattr(view, 'queryset', None) + + if model_cls is None and queryset is not None: + model_cls = queryset.model + + # Workaround to ensure DjangoModelPermissions are not applied + # to the root view when using DefaultRouter. + if model_cls is None and getattr(view, '_ignore_model_permissions'): + return True + + assert model_cls, ('Cannot apply DjangoModelPermissions on a view that' + ' does not have `.model` or `.queryset` property.') + + perms = self.get_required_permissions(request.method, model_cls) + + if (request.user and + (request.user.is_authenticated() or not self.authenticated_users_only) and + request.user.has_perms(perms)): + return True + return False + + +class DjangoModelPermissionsOrAnonReadOnly(DjangoModelPermissions): + """ + Similar to DjangoModelPermissions, except that anonymous users are + allowed read-only access. + """ + authenticated_users_only = False + + +class TokenHasReadWriteScope(BasePermission): + """ + The request is authenticated as a user and the token used has the right scope + """ + + def has_permission(self, request, view): + token = request.auth + read_only = request.method in SAFE_METHODS + + if not token: + return False + + if hasattr(token, 'resource'): # OAuth 1 + return read_only or not request.auth.resource.is_readonly + elif hasattr(token, 'scope'): # OAuth 2 + required = oauth2_constants.READ if read_only else oauth2_constants.WRITE + return oauth2_provider_scope.check(required, request.auth.scope) + + assert False, ('TokenHasReadWriteScope requires either the' + '`OAuthAuthentication` or `OAuth2Authentication` authentication ' + 'class to be used.') diff --git a/awx/lib/site-packages/rest_framework/relations.py b/awx/lib/site-packages/rest_framework/relations.py new file mode 100644 index 0000000000..e3675b5124 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/relations.py @@ -0,0 +1,623 @@ +""" +Serializer fields that deal with relationships. + +These fields allow you to specify the style that should be used to represent +model relationships, including hyperlinks, primary keys, or slugs. +""" +from __future__ import unicode_literals +from django.core.exceptions import ObjectDoesNotExist, ValidationError +from django.core.urlresolvers import resolve, get_script_prefix, NoReverseMatch +from django import forms +from django.db.models.fields import BLANK_CHOICE_DASH +from django.forms import widgets +from django.forms.models import ModelChoiceIterator +from django.utils.translation import ugettext_lazy as _ +from rest_framework.fields import Field, WritableField, get_component +from rest_framework.reverse import reverse +from rest_framework.compat import urlparse +from rest_framework.compat import smart_text +import warnings + + +##### Relational fields ##### + + +# Not actually Writable, but subclasses may need to be. +class RelatedField(WritableField): + """ + Base class for related model fields. + + This represents a relationship using the unicode representation of the target. + """ + widget = widgets.Select + many_widget = widgets.SelectMultiple + form_field_class = forms.ChoiceField + many_form_field_class = forms.MultipleChoiceField + + cache_choices = False + empty_label = None + read_only = True + many = False + + def __init__(self, *args, **kwargs): + + # 'null' is to be deprecated in favor of 'required' + if 'null' in kwargs: + warnings.warn('The `null` keyword argument is deprecated. ' + 'Use the `required` keyword argument instead.', + DeprecationWarning, stacklevel=2) + kwargs['required'] = not kwargs.pop('null') + + queryset = kwargs.pop('queryset', None) + self.many = kwargs.pop('many', self.many) + if self.many: + self.widget = self.many_widget + self.form_field_class = self.many_form_field_class + + kwargs['read_only'] = kwargs.pop('read_only', self.read_only) + super(RelatedField, self).__init__(*args, **kwargs) + + if not self.required: + self.empty_label = BLANK_CHOICE_DASH[0][1] + + self.queryset = queryset + + def initialize(self, parent, field_name): + super(RelatedField, self).initialize(parent, field_name) + if self.queryset is None and not self.read_only: + try: + manager = getattr(self.parent.opts.model, self.source or field_name) + if hasattr(manager, 'related'): # Forward + self.queryset = manager.related.model._default_manager.all() + else: # Reverse + self.queryset = manager.field.rel.to._default_manager.all() + except Exception: + msg = ('Serializer related fields must include a `queryset`' + + ' argument or set `read_only=True') + raise Exception(msg) + + ### We need this stuff to make form choices work... + + def prepare_value(self, obj): + return self.to_native(obj) + + def label_from_instance(self, obj): + """ + Return a readable representation for use with eg. select widgets. + """ + desc = smart_text(obj) + ident = smart_text(self.to_native(obj)) + if desc == ident: + return desc + return "%s - %s" % (desc, ident) + + def _get_queryset(self): + return self._queryset + + def _set_queryset(self, queryset): + self._queryset = queryset + self.widget.choices = self.choices + + queryset = property(_get_queryset, _set_queryset) + + def _get_choices(self): + # If self._choices is set, then somebody must have manually set + # the property self.choices. In this case, just return self._choices. + if hasattr(self, '_choices'): + return self._choices + + # Otherwise, execute the QuerySet in self.queryset to determine the + # choices dynamically. Return a fresh ModelChoiceIterator that has not been + # consumed. Note that we're instantiating a new ModelChoiceIterator *each* + # time _get_choices() is called (and, thus, each time self.choices is + # accessed) so that we can ensure the QuerySet has not been consumed. This + # construct might look complicated but it allows for lazy evaluation of + # the queryset. + return ModelChoiceIterator(self) + + def _set_choices(self, value): + # Setting choices also sets the choices on the widget. + # choices can be any iterable, but we call list() on it because + # it will be consumed more than once. + self._choices = self.widget.choices = list(value) + + choices = property(_get_choices, _set_choices) + + ### Regular serializer stuff... + + def field_to_native(self, obj, field_name): + try: + if self.source == '*': + return self.to_native(obj) + + source = self.source or field_name + value = obj + + for component in source.split('.'): + value = get_component(value, component) + if value is None: + break + except ObjectDoesNotExist: + return None + + if value is None: + return None + + if self.many: + return [self.to_native(item) for item in value.all()] + return self.to_native(value) + + def field_from_native(self, data, files, field_name, into): + if self.read_only: + return + + try: + if self.many: + try: + # Form data + value = data.getlist(field_name) + if value == [''] or value == []: + raise KeyError + except AttributeError: + # Non-form data + value = data[field_name] + else: + value = data[field_name] + except KeyError: + if self.partial: + return + value = [] if self.many else None + + if value in (None, '') and self.required: + raise ValidationError(self.error_messages['required']) + elif value in (None, ''): + into[(self.source or field_name)] = None + elif self.many: + into[(self.source or field_name)] = [self.from_native(item) for item in value] + else: + into[(self.source or field_name)] = self.from_native(value) + + +### PrimaryKey relationships + +class PrimaryKeyRelatedField(RelatedField): + """ + Represents a relationship as a pk value. + """ + read_only = False + + default_error_messages = { + 'does_not_exist': _("Invalid pk '%s' - object does not exist."), + 'incorrect_type': _('Incorrect type. Expected pk value, received %s.'), + } + + # TODO: Remove these field hacks... + def prepare_value(self, obj): + return self.to_native(obj.pk) + + def label_from_instance(self, obj): + """ + Return a readable representation for use with eg. select widgets. + """ + desc = smart_text(obj) + ident = smart_text(self.to_native(obj.pk)) + if desc == ident: + return desc + return "%s - %s" % (desc, ident) + + # TODO: Possibly change this to just take `obj`, through prob less performant + def to_native(self, pk): + return pk + + def from_native(self, data): + if self.queryset is None: + raise Exception('Writable related fields must include a `queryset` argument') + + try: + return self.queryset.get(pk=data) + except ObjectDoesNotExist: + msg = self.error_messages['does_not_exist'] % smart_text(data) + raise ValidationError(msg) + except (TypeError, ValueError): + received = type(data).__name__ + msg = self.error_messages['incorrect_type'] % received + raise ValidationError(msg) + + def field_to_native(self, obj, field_name): + if self.many: + # To-many relationship + + queryset = None + if not self.source: + # Prefer obj.serializable_value for performance reasons + try: + queryset = obj.serializable_value(field_name) + except AttributeError: + pass + if queryset is None: + # RelatedManager (reverse relationship) + source = self.source or field_name + queryset = obj + for component in source.split('.'): + queryset = get_component(queryset, component) + + # Forward relationship + return [self.to_native(item.pk) for item in queryset.all()] + + # To-one relationship + try: + # Prefer obj.serializable_value for performance reasons + pk = obj.serializable_value(self.source or field_name) + except AttributeError: + # RelatedObject (reverse relationship) + try: + pk = getattr(obj, self.source or field_name).pk + except ObjectDoesNotExist: + return None + + # Forward relationship + return self.to_native(pk) + + +### Slug relationships + + +class SlugRelatedField(RelatedField): + """ + Represents a relationship using a unique field on the target. + """ + read_only = False + + default_error_messages = { + 'does_not_exist': _("Object with %s=%s does not exist."), + 'invalid': _('Invalid value.'), + } + + def __init__(self, *args, **kwargs): + self.slug_field = kwargs.pop('slug_field', None) + assert self.slug_field, 'slug_field is required' + super(SlugRelatedField, self).__init__(*args, **kwargs) + + def to_native(self, obj): + return getattr(obj, self.slug_field) + + def from_native(self, data): + if self.queryset is None: + raise Exception('Writable related fields must include a `queryset` argument') + + try: + return self.queryset.get(**{self.slug_field: data}) + except ObjectDoesNotExist: + raise ValidationError(self.error_messages['does_not_exist'] % + (self.slug_field, smart_text(data))) + except (TypeError, ValueError): + msg = self.error_messages['invalid'] + raise ValidationError(msg) + + +### Hyperlinked relationships + +class HyperlinkedRelatedField(RelatedField): + """ + Represents a relationship using hyperlinking. + """ + read_only = False + lookup_field = 'pk' + + default_error_messages = { + 'no_match': _('Invalid hyperlink - No URL match'), + 'incorrect_match': _('Invalid hyperlink - Incorrect URL match'), + 'configuration_error': _('Invalid hyperlink due to configuration error'), + 'does_not_exist': _("Invalid hyperlink - object does not exist."), + 'incorrect_type': _('Incorrect type. Expected url string, received %s.'), + } + + # These are all pending deprecation + pk_url_kwarg = 'pk' + slug_field = 'slug' + slug_url_kwarg = None # Defaults to same as `slug_field` unless overridden + + def __init__(self, *args, **kwargs): + try: + self.view_name = kwargs.pop('view_name') + except KeyError: + raise ValueError("Hyperlinked field requires 'view_name' kwarg") + + self.lookup_field = kwargs.pop('lookup_field', self.lookup_field) + self.format = kwargs.pop('format', None) + + # These are pending deprecation + if 'pk_url_kwarg' in kwargs: + msg = 'pk_url_kwarg is pending deprecation. Use lookup_field instead.' + warnings.warn(msg, PendingDeprecationWarning, stacklevel=2) + if 'slug_url_kwarg' in kwargs: + msg = 'slug_url_kwarg is pending deprecation. Use lookup_field instead.' + warnings.warn(msg, PendingDeprecationWarning, stacklevel=2) + if 'slug_field' in kwargs: + msg = 'slug_field is pending deprecation. Use lookup_field instead.' + warnings.warn(msg, PendingDeprecationWarning, stacklevel=2) + + self.pk_url_kwarg = kwargs.pop('pk_url_kwarg', self.pk_url_kwarg) + self.slug_field = kwargs.pop('slug_field', self.slug_field) + default_slug_kwarg = self.slug_url_kwarg or self.slug_field + self.slug_url_kwarg = kwargs.pop('slug_url_kwarg', default_slug_kwarg) + + super(HyperlinkedRelatedField, self).__init__(*args, **kwargs) + + def get_url(self, obj, view_name, request, format): + """ + Given an object, return the URL that hyperlinks to the object. + + May raise a `NoReverseMatch` if the `view_name` and `lookup_field` + attributes are not configured to correctly match the URL conf. + """ + lookup_field = getattr(obj, self.lookup_field) + kwargs = {self.lookup_field: lookup_field} + try: + return reverse(view_name, kwargs=kwargs, request=request, format=format) + except NoReverseMatch: + pass + + if self.pk_url_kwarg != 'pk': + # Only try pk if it has been explicitly set. + # Otherwise, the default `lookup_field = 'pk'` has us covered. + pk = obj.pk + kwargs = {self.pk_url_kwarg: pk} + try: + return reverse(view_name, kwargs=kwargs, request=request, format=format) + except NoReverseMatch: + pass + + slug = getattr(obj, self.slug_field, None) + if slug is not None: + # Only try slug if it corresponds to an attribute on the object. + kwargs = {self.slug_url_kwarg: slug} + try: + ret = reverse(view_name, kwargs=kwargs, request=request, format=format) + if self.slug_field == 'slug' and self.slug_url_kwarg == 'slug': + # If the lookup succeeds using the default slug params, + # then `slug_field` is being used implicitly, and we + # we need to warn about the pending deprecation. + msg = 'Implicit slug field hyperlinked fields are pending deprecation.' \ + 'You should set `lookup_field=slug` on the HyperlinkedRelatedField.' + warnings.warn(msg, PendingDeprecationWarning, stacklevel=2) + return ret + except NoReverseMatch: + pass + + raise NoReverseMatch() + + def get_object(self, queryset, view_name, view_args, view_kwargs): + """ + Return the object corresponding to a matched URL. + + Takes the matched URL conf arguments, and the queryset, and should + return an object instance, or raise an `ObjectDoesNotExist` exception. + """ + lookup = view_kwargs.get(self.lookup_field, None) + pk = view_kwargs.get(self.pk_url_kwarg, None) + slug = view_kwargs.get(self.slug_url_kwarg, None) + + if lookup is not None: + filter_kwargs = {self.lookup_field: lookup} + elif pk is not None: + filter_kwargs = {'pk': pk} + elif slug is not None: + filter_kwargs = {self.slug_field: slug} + else: + raise ObjectDoesNotExist() + + return queryset.get(**filter_kwargs) + + def to_native(self, obj): + view_name = self.view_name + request = self.context.get('request', None) + format = self.format or self.context.get('format', None) + + if request is None: + msg = ( + "Using `HyperlinkedRelatedField` without including the request " + "in the serializer context is deprecated. " + "Add `context={'request': request}` when instantiating " + "the serializer." + ) + warnings.warn(msg, DeprecationWarning, stacklevel=4) + + # If the object has not yet been saved then we cannot hyperlink to it. + if getattr(obj, 'pk', None) is None: + return + + # Return the hyperlink, or error if incorrectly configured. + try: + return self.get_url(obj, view_name, request, format) + except NoReverseMatch: + msg = ( + 'Could not resolve URL for hyperlinked relationship using ' + 'view name "%s". You may have failed to include the related ' + 'model in your API, or incorrectly configured the ' + '`lookup_field` attribute on this field.' + ) + raise Exception(msg % view_name) + + def from_native(self, value): + # Convert URL -> model instance pk + # TODO: Use values_list + queryset = self.queryset + if queryset is None: + raise Exception('Writable related fields must include a `queryset` argument') + + try: + http_prefix = value.startswith(('http:', 'https:')) + except AttributeError: + msg = self.error_messages['incorrect_type'] + raise ValidationError(msg % type(value).__name__) + + if http_prefix: + # If needed convert absolute URLs to relative path + value = urlparse.urlparse(value).path + prefix = get_script_prefix() + if value.startswith(prefix): + value = '/' + value[len(prefix):] + + try: + match = resolve(value) + except Exception: + raise ValidationError(self.error_messages['no_match']) + + if match.view_name != self.view_name: + raise ValidationError(self.error_messages['incorrect_match']) + + try: + return self.get_object(queryset, match.view_name, + match.args, match.kwargs) + except (ObjectDoesNotExist, TypeError, ValueError): + raise ValidationError(self.error_messages['does_not_exist']) + + +class HyperlinkedIdentityField(Field): + """ + Represents the instance, or a property on the instance, using hyperlinking. + """ + lookup_field = 'pk' + read_only = True + + # These are all pending deprecation + pk_url_kwarg = 'pk' + slug_field = 'slug' + slug_url_kwarg = None # Defaults to same as `slug_field` unless overridden + + def __init__(self, *args, **kwargs): + try: + self.view_name = kwargs.pop('view_name') + except KeyError: + msg = "HyperlinkedIdentityField requires 'view_name' argument" + raise ValueError(msg) + + self.format = kwargs.pop('format', None) + lookup_field = kwargs.pop('lookup_field', None) + self.lookup_field = lookup_field or self.lookup_field + + # These are pending deprecation + if 'pk_url_kwarg' in kwargs: + msg = 'pk_url_kwarg is pending deprecation. Use lookup_field instead.' + warnings.warn(msg, PendingDeprecationWarning, stacklevel=2) + if 'slug_url_kwarg' in kwargs: + msg = 'slug_url_kwarg is pending deprecation. Use lookup_field instead.' + warnings.warn(msg, PendingDeprecationWarning, stacklevel=2) + if 'slug_field' in kwargs: + msg = 'slug_field is pending deprecation. Use lookup_field instead.' + warnings.warn(msg, PendingDeprecationWarning, stacklevel=2) + + self.slug_field = kwargs.pop('slug_field', self.slug_field) + default_slug_kwarg = self.slug_url_kwarg or self.slug_field + self.pk_url_kwarg = kwargs.pop('pk_url_kwarg', self.pk_url_kwarg) + self.slug_url_kwarg = kwargs.pop('slug_url_kwarg', default_slug_kwarg) + + super(HyperlinkedIdentityField, self).__init__(*args, **kwargs) + + def field_to_native(self, obj, field_name): + request = self.context.get('request', None) + format = self.context.get('format', None) + view_name = self.view_name + + if request is None: + warnings.warn("Using `HyperlinkedIdentityField` without including the " + "request in the serializer context is deprecated. " + "Add `context={'request': request}` when instantiating the serializer.", + DeprecationWarning, stacklevel=4) + + # By default use whatever format is given for the current context + # unless the target is a different type to the source. + # + # Eg. Consider a HyperlinkedIdentityField pointing from a json + # representation to an html property of that representation... + # + # '/snippets/1/' should link to '/snippets/1/highlight/' + # ...but... + # '/snippets/1/.json' should link to '/snippets/1/highlight/.html' + if format and self.format and self.format != format: + format = self.format + + # Return the hyperlink, or error if incorrectly configured. + try: + return self.get_url(obj, view_name, request, format) + except NoReverseMatch: + msg = ( + 'Could not resolve URL for hyperlinked relationship using ' + 'view name "%s". You may have failed to include the related ' + 'model in your API, or incorrectly configured the ' + '`lookup_field` attribute on this field.' + ) + raise Exception(msg % view_name) + + def get_url(self, obj, view_name, request, format): + """ + Given an object, return the URL that hyperlinks to the object. + + May raise a `NoReverseMatch` if the `view_name` and `lookup_field` + attributes are not configured to correctly match the URL conf. + """ + lookup_field = getattr(obj, self.lookup_field) + kwargs = {self.lookup_field: lookup_field} + try: + return reverse(view_name, kwargs=kwargs, request=request, format=format) + except NoReverseMatch: + pass + + if self.pk_url_kwarg != 'pk': + # Only try pk lookup if it has been explicitly set. + # Otherwise, the default `lookup_field = 'pk'` has us covered. + kwargs = {self.pk_url_kwarg: obj.pk} + try: + return reverse(view_name, kwargs=kwargs, request=request, format=format) + except NoReverseMatch: + pass + + slug = getattr(obj, self.slug_field, None) + if slug: + # Only use slug lookup if a slug field exists on the model + kwargs = {self.slug_url_kwarg: slug} + try: + return reverse(view_name, kwargs=kwargs, request=request, format=format) + except NoReverseMatch: + pass + + raise NoReverseMatch() + + +### Old-style many classes for backwards compat + +class ManyRelatedField(RelatedField): + def __init__(self, *args, **kwargs): + warnings.warn('`ManyRelatedField()` is deprecated. ' + 'Use `RelatedField(many=True)` instead.', + DeprecationWarning, stacklevel=2) + kwargs['many'] = True + super(ManyRelatedField, self).__init__(*args, **kwargs) + + +class ManyPrimaryKeyRelatedField(PrimaryKeyRelatedField): + def __init__(self, *args, **kwargs): + warnings.warn('`ManyPrimaryKeyRelatedField()` is deprecated. ' + 'Use `PrimaryKeyRelatedField(many=True)` instead.', + DeprecationWarning, stacklevel=2) + kwargs['many'] = True + super(ManyPrimaryKeyRelatedField, self).__init__(*args, **kwargs) + + +class ManySlugRelatedField(SlugRelatedField): + def __init__(self, *args, **kwargs): + warnings.warn('`ManySlugRelatedField()` is deprecated. ' + 'Use `SlugRelatedField(many=True)` instead.', + DeprecationWarning, stacklevel=2) + kwargs['many'] = True + super(ManySlugRelatedField, self).__init__(*args, **kwargs) + + +class ManyHyperlinkedRelatedField(HyperlinkedRelatedField): + def __init__(self, *args, **kwargs): + warnings.warn('`ManyHyperlinkedRelatedField()` is deprecated. ' + 'Use `HyperlinkedRelatedField(many=True)` instead.', + DeprecationWarning, stacklevel=2) + kwargs['many'] = True + super(ManyHyperlinkedRelatedField, self).__init__(*args, **kwargs) diff --git a/awx/lib/site-packages/rest_framework/renderers.py b/awx/lib/site-packages/rest_framework/renderers.py new file mode 100644 index 0000000000..b2fe43eac2 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/renderers.py @@ -0,0 +1,573 @@ +""" +Renderers are used to serialize a response into specific media types. + +They give us a generic way of being able to handle various media types +on the response, such as JSON encoded data or HTML output. + +REST framework also provides an HTML renderer the renders the browsable API. +""" +from __future__ import unicode_literals + +import copy +import json +from django import forms +from django.http.multipartparser import parse_header +from django.template import RequestContext, loader, Template +from django.utils.xmlutils import SimplerXMLGenerator +from rest_framework.compat import StringIO +from rest_framework.compat import six +from rest_framework.compat import smart_text +from rest_framework.compat import yaml +from rest_framework.exceptions import ConfigurationError +from rest_framework.settings import api_settings +from rest_framework.request import clone_request +from rest_framework.utils import encoders +from rest_framework.utils.breadcrumbs import get_breadcrumbs +from rest_framework.utils.formatting import get_view_name, get_view_description +from rest_framework import exceptions, parsers, status, VERSION + + +class BaseRenderer(object): + """ + All renderers should extend this class, setting the `media_type` + and `format` attributes, and override the `.render()` method. + """ + + media_type = None + format = None + charset = 'utf-8' + + def render(self, data, accepted_media_type=None, renderer_context=None): + raise NotImplemented('Renderer class requires .render() to be implemented') + + +class JSONRenderer(BaseRenderer): + """ + Renderer which serializes to JSON. + Applies JSON's backslash-u character escaping for non-ascii characters. + """ + + media_type = 'application/json' + format = 'json' + encoder_class = encoders.JSONEncoder + ensure_ascii = True + charset = 'utf-8' + # Note that JSON encodings must be utf-8, utf-16 or utf-32. + # See: http://www.ietf.org/rfc/rfc4627.txt + + def render(self, data, accepted_media_type=None, renderer_context=None): + """ + Render `data` into JSON. + """ + if data is None: + return '' + + # If 'indent' is provided in the context, then pretty print the result. + # E.g. If we're being called by the BrowsableAPIRenderer. + renderer_context = renderer_context or {} + indent = renderer_context.get('indent', None) + + if accepted_media_type: + # If the media type looks like 'application/json; indent=4', + # then pretty print the result. + base_media_type, params = parse_header(accepted_media_type.encode('ascii')) + indent = params.get('indent', indent) + try: + indent = max(min(int(indent), 8), 0) + except (ValueError, TypeError): + indent = None + + ret = json.dumps(data, cls=self.encoder_class, + indent=indent, ensure_ascii=self.ensure_ascii) + + # On python 2.x json.dumps() returns bytestrings if ensure_ascii=True, + # but if ensure_ascii=False, the return type is underspecified, + # and may (or may not) be unicode. + # On python 3.x json.dumps() returns unicode strings. + if isinstance(ret, six.text_type): + return bytes(ret.encode(self.charset)) + return ret + + +class UnicodeJSONRenderer(JSONRenderer): + ensure_ascii = False + charset = 'utf-8' + """ + Renderer which serializes to JSON. + Does *not* apply JSON's character escaping for non-ascii characters. + """ + + +class JSONPRenderer(JSONRenderer): + """ + Renderer which serializes to json, + wrapping the json output in a callback function. + """ + + media_type = 'application/javascript' + format = 'jsonp' + callback_parameter = 'callback' + default_callback = 'callback' + + def get_callback(self, renderer_context): + """ + Determine the name of the callback to wrap around the json output. + """ + request = renderer_context.get('request', None) + params = request and request.QUERY_PARAMS or {} + return params.get(self.callback_parameter, self.default_callback) + + def render(self, data, accepted_media_type=None, renderer_context=None): + """ + Renders into jsonp, wrapping the json output in a callback function. + + Clients may set the callback function name using a query parameter + on the URL, for example: ?callback=exampleCallbackName + """ + renderer_context = renderer_context or {} + callback = self.get_callback(renderer_context) + json = super(JSONPRenderer, self).render(data, accepted_media_type, + renderer_context) + return callback.encode(self.charset) + b'(' + json + b');' + + +class XMLRenderer(BaseRenderer): + """ + Renderer which serializes to XML. + """ + + media_type = 'application/xml' + format = 'xml' + charset = 'utf-8' + + def render(self, data, accepted_media_type=None, renderer_context=None): + """ + Renders *obj* into serialized XML. + """ + if data is None: + return '' + + stream = StringIO() + + xml = SimplerXMLGenerator(stream, self.charset) + xml.startDocument() + xml.startElement("root", {}) + + self._to_xml(xml, data) + + xml.endElement("root") + xml.endDocument() + return stream.getvalue() + + def _to_xml(self, xml, data): + if isinstance(data, (list, tuple)): + for item in data: + xml.startElement("list-item", {}) + self._to_xml(xml, item) + xml.endElement("list-item") + + elif isinstance(data, dict): + for key, value in six.iteritems(data): + xml.startElement(key, {}) + self._to_xml(xml, value) + xml.endElement(key) + + elif data is None: + # Don't output any value + pass + + else: + xml.characters(smart_text(data)) + + +class YAMLRenderer(BaseRenderer): + """ + Renderer which serializes to YAML. + """ + + media_type = 'application/yaml' + format = 'yaml' + encoder = encoders.SafeDumper + charset = 'utf-8' + + def render(self, data, accepted_media_type=None, renderer_context=None): + """ + Renders *obj* into serialized YAML. + """ + assert yaml, 'YAMLRenderer requires pyyaml to be installed' + + if data is None: + return '' + + return yaml.dump(data, stream=None, encoding=self.charset, Dumper=self.encoder) + + +class TemplateHTMLRenderer(BaseRenderer): + """ + An HTML renderer for use with templates. + + The data supplied to the Response object should be a dictionary that will + be used as context for the template. + + The template name is determined by (in order of preference): + + 1. An explicit `.template_name` attribute set on the response. + 2. An explicit `.template_name` attribute set on this class. + 3. The return result of calling `view.get_template_names()`. + + For example: + data = {'users': User.objects.all()} + return Response(data, template_name='users.html') + + For pre-rendered HTML, see StaticHTMLRenderer. + """ + + media_type = 'text/html' + format = 'html' + template_name = None + exception_template_names = [ + '%(status_code)s.html', + 'api_exception.html' + ] + charset = 'utf-8' + + def render(self, data, accepted_media_type=None, renderer_context=None): + """ + Renders data to HTML, using Django's standard template rendering. + + The template name is determined by (in order of preference): + + 1. An explicit .template_name set on the response. + 2. An explicit .template_name set on this class. + 3. The return result of calling view.get_template_names(). + """ + renderer_context = renderer_context or {} + view = renderer_context['view'] + request = renderer_context['request'] + response = renderer_context['response'] + + if response.exception: + template = self.get_exception_template(response) + else: + template_names = self.get_template_names(response, view) + template = self.resolve_template(template_names) + + context = self.resolve_context(data, request, response) + return template.render(context) + + def resolve_template(self, template_names): + return loader.select_template(template_names) + + def resolve_context(self, data, request, response): + if response.exception: + data['status_code'] = response.status_code + return RequestContext(request, data) + + def get_template_names(self, response, view): + if response.template_name: + return [response.template_name] + elif self.template_name: + return [self.template_name] + elif hasattr(view, 'get_template_names'): + return view.get_template_names() + raise ConfigurationError('Returned a template response with no template_name') + + def get_exception_template(self, response): + template_names = [name % {'status_code': response.status_code} + for name in self.exception_template_names] + + try: + # Try to find an appropriate error template + return self.resolve_template(template_names) + except Exception: + # Fall back to using eg '404 Not Found' + return Template('%d %s' % (response.status_code, + response.status_text.title())) + + +# Note, subclass TemplateHTMLRenderer simply for the exception behavior +class StaticHTMLRenderer(TemplateHTMLRenderer): + """ + An HTML renderer class that simply returns pre-rendered HTML. + + The data supplied to the Response object should be a string representing + the pre-rendered HTML content. + + For example: + data = '<html><body>example</body></html>' + return Response(data) + + For template rendered HTML, see TemplateHTMLRenderer. + """ + media_type = 'text/html' + format = 'html' + charset = 'utf-8' + + def render(self, data, accepted_media_type=None, renderer_context=None): + renderer_context = renderer_context or {} + response = renderer_context['response'] + + if response and response.exception: + request = renderer_context['request'] + template = self.get_exception_template(response) + context = self.resolve_context(data, request, response) + return template.render(context) + + return data + + +class BrowsableAPIRenderer(BaseRenderer): + """ + HTML renderer used to self-document the API. + """ + media_type = 'text/html' + format = 'api' + template = 'rest_framework/api.html' + charset = 'utf-8' + + def get_default_renderer(self, view): + """ + Return an instance of the first valid renderer. + (Don't use another documenting renderer.) + """ + renderers = [renderer for renderer in view.renderer_classes + if not issubclass(renderer, BrowsableAPIRenderer)] + if not renderers: + return None + return renderers[0]() + + def get_content(self, renderer, data, + accepted_media_type, renderer_context): + """ + Get the content as if it had been rendered by the default + non-documenting renderer. + """ + if not renderer: + return '[No renderers were found]' + + renderer_context['indent'] = 4 + content = renderer.render(data, accepted_media_type, renderer_context) + + if renderer.charset is None: + return '[%d bytes of binary content]' % len(content) + + return content + + def show_form_for_method(self, view, method, request, obj): + """ + Returns True if a form should be shown for this method. + """ + if not method in view.allowed_methods: + return # Not a valid method + + if not api_settings.FORM_METHOD_OVERRIDE: + return # Cannot use form overloading + + try: + view.check_permissions(request) + if obj is not None: + view.check_object_permissions(request, obj) + except exceptions.APIException: + return False # Doesn't have permissions + return True + + def serializer_to_form_fields(self, serializer): + fields = {} + for k, v in serializer.get_fields().items(): + if getattr(v, 'read_only', True): + continue + + kwargs = {} + kwargs['required'] = v.required + + #if getattr(v, 'queryset', None): + # kwargs['queryset'] = v.queryset + + if getattr(v, 'choices', None) is not None: + kwargs['choices'] = v.choices + + if getattr(v, 'regex', None) is not None: + kwargs['regex'] = v.regex + + if getattr(v, 'widget', None): + widget = copy.deepcopy(v.widget) + kwargs['widget'] = widget + + if getattr(v, 'default', None) is not None: + kwargs['initial'] = v.default + + if getattr(v, 'label', None) is not None: + kwargs['label'] = v.label + + if getattr(v, 'help_text', None) is not None: + kwargs['help_text'] = v.help_text + + fields[k] = v.form_field_class(**kwargs) + + return fields + + def _get_form(self, view, method, request): + # We need to impersonate a request with the correct method, + # so that eg. any dynamic get_serializer_class methods return the + # correct form for each method. + restore = view.request + request = clone_request(request, method) + view.request = request + try: + return self.get_form(view, method, request) + finally: + view.request = restore + + def _get_raw_data_form(self, view, method, request, media_types): + # We need to impersonate a request with the correct method, + # so that eg. any dynamic get_serializer_class methods return the + # correct form for each method. + restore = view.request + request = clone_request(request, method) + view.request = request + try: + return self.get_raw_data_form(view, method, request, media_types) + finally: + view.request = restore + + def get_form(self, view, method, request): + """ + Get a form, possibly bound to either the input or output data. + In the absence on of the Resource having an associated form then + provide a form that can be used to submit arbitrary content. + """ + obj = getattr(view, 'object', None) + if not self.show_form_for_method(view, method, request, obj): + return + + if method in ('DELETE', 'OPTIONS'): + return True # Don't actually need to return a form + + if not getattr(view, 'get_serializer', None) or not parsers.FormParser in view.parser_classes: + return + + serializer = view.get_serializer(instance=obj) + fields = self.serializer_to_form_fields(serializer) + + # Creating an on the fly form see: + # http://stackoverflow.com/questions/3915024/dynamically-creating-classes-python + OnTheFlyForm = type(str("OnTheFlyForm"), (forms.Form,), fields) + data = (obj is not None) and serializer.data or None + form_instance = OnTheFlyForm(data) + return form_instance + + def get_raw_data_form(self, view, method, request, media_types): + """ + Returns a form that allows for arbitrary content types to be tunneled + via standard HTML forms. + (Which are typically application/x-www-form-urlencoded) + """ + + # If we're not using content overloading there's no point in supplying a generic form, + # as the view won't treat the form's value as the content of the request. + if not (api_settings.FORM_CONTENT_OVERRIDE + and api_settings.FORM_CONTENTTYPE_OVERRIDE): + return None + + # Check permissions + obj = getattr(view, 'object', None) + if not self.show_form_for_method(view, method, request, obj): + return + + content_type_field = api_settings.FORM_CONTENTTYPE_OVERRIDE + content_field = api_settings.FORM_CONTENT_OVERRIDE + choices = [(media_type, media_type) for media_type in media_types] + initial = media_types[0] + + # NB. http://jacobian.org/writing/dynamic-form-generation/ + class GenericContentForm(forms.Form): + def __init__(self): + super(GenericContentForm, self).__init__() + + self.fields[content_type_field] = forms.ChoiceField( + label='Media type', + choices=choices, + initial=initial + ) + self.fields[content_field] = forms.CharField( + label='Content', + widget=forms.Textarea + ) + + return GenericContentForm() + + def get_name(self, view): + return get_view_name(view.__class__, getattr(view, 'suffix', None)) + + def get_description(self, view): + return get_view_description(view.__class__, html=True) + + def get_breadcrumbs(self, request): + return get_breadcrumbs(request.path) + + def render(self, data, accepted_media_type=None, renderer_context=None): + """ + Render the HTML for the browsable API representation. + """ + accepted_media_type = accepted_media_type or '' + renderer_context = renderer_context or {} + + view = renderer_context['view'] + request = renderer_context['request'] + response = renderer_context['response'] + media_types = [parser.media_type for parser in view.parser_classes] + + renderer = self.get_default_renderer(view) + content = self.get_content(renderer, data, accepted_media_type, renderer_context) + + put_form = self._get_form(view, 'PUT', request) + post_form = self._get_form(view, 'POST', request) + patch_form = self._get_form(view, 'PATCH', request) + delete_form = self._get_form(view, 'DELETE', request) + options_form = self._get_form(view, 'OPTIONS', request) + + raw_data_put_form = self._get_raw_data_form(view, 'PUT', request, media_types) + raw_data_post_form = self._get_raw_data_form(view, 'POST', request, media_types) + raw_data_patch_form = self._get_raw_data_form(view, 'PATCH', request, media_types) + raw_data_put_or_patch_form = raw_data_put_form or raw_data_patch_form + + name = self.get_name(view) + description = self.get_description(view) + breadcrumb_list = self.get_breadcrumbs(request) + + template = loader.get_template(self.template) + context = RequestContext(request, { + 'content': content, + 'view': view, + 'request': request, + 'response': response, + 'description': description, + 'name': name, + 'version': VERSION, + 'breadcrumblist': breadcrumb_list, + 'allowed_methods': view.allowed_methods, + 'available_formats': [renderer.format for renderer in view.renderer_classes], + + 'put_form': put_form, + 'post_form': post_form, + 'patch_form': patch_form, + 'delete_form': delete_form, + 'options_form': options_form, + + 'raw_data_put_form': raw_data_put_form, + 'raw_data_post_form': raw_data_post_form, + 'raw_data_patch_form': raw_data_patch_form, + 'raw_data_put_or_patch_form': raw_data_put_or_patch_form, + + 'api_settings': api_settings + }) + + ret = template.render(context) + + # Munge DELETE Response code to allow us to return content + # (Do this *after* we've rendered the template so that we include + # the normal deletion response code in the output) + if response.status_code == status.HTTP_204_NO_CONTENT: + response.status_code = status.HTTP_200_OK + + return ret diff --git a/awx/lib/site-packages/rest_framework/request.py b/awx/lib/site-packages/rest_framework/request.py new file mode 100644 index 0000000000..0d88ebc7e4 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/request.py @@ -0,0 +1,369 @@ +""" +The Request class is used as a wrapper around the standard request object. + +The wrapped request then offers a richer API, in particular : + + - content automatically parsed according to `Content-Type` header, + and available as `request.DATA` + - full support of PUT method, including support for file uploads + - form overloading of HTTP method, content type and content +""" +from __future__ import unicode_literals +from django.conf import settings +from django.http import QueryDict +from django.http.multipartparser import parse_header +from django.utils.datastructures import MultiValueDict +from rest_framework import HTTP_HEADER_ENCODING +from rest_framework import exceptions +from rest_framework.compat import BytesIO +from rest_framework.settings import api_settings + + +def is_form_media_type(media_type): + """ + Return True if the media type is a valid form media type. + """ + base_media_type, params = parse_header(media_type.encode(HTTP_HEADER_ENCODING)) + return (base_media_type == 'application/x-www-form-urlencoded' or + base_media_type == 'multipart/form-data') + + +class Empty(object): + """ + Placeholder for unset attributes. + Cannot use `None`, as that may be a valid value. + """ + pass + + +def _hasattr(obj, name): + return not getattr(obj, name) is Empty + + +def clone_request(request, method): + """ + Internal helper method to clone a request, replacing with a different + HTTP method. Used for checking permissions against other methods. + """ + ret = Request(request=request._request, + parsers=request.parsers, + authenticators=request.authenticators, + negotiator=request.negotiator, + parser_context=request.parser_context) + ret._data = request._data + ret._files = request._files + ret._content_type = request._content_type + ret._stream = request._stream + ret._method = method + if hasattr(request, '_user'): + ret._user = request._user + if hasattr(request, '_auth'): + ret._auth = request._auth + if hasattr(request, '_authenticator'): + ret._authenticator = request._authenticator + return ret + + +class Request(object): + """ + Wrapper allowing to enhance a standard `HttpRequest` instance. + + Kwargs: + - request(HttpRequest). The original request instance. + - parsers_classes(list/tuple). The parsers to use for parsing the + request content. + - authentication_classes(list/tuple). The authentications used to try + authenticating the request's user. + """ + + _METHOD_PARAM = api_settings.FORM_METHOD_OVERRIDE + _CONTENT_PARAM = api_settings.FORM_CONTENT_OVERRIDE + _CONTENTTYPE_PARAM = api_settings.FORM_CONTENTTYPE_OVERRIDE + + def __init__(self, request, parsers=None, authenticators=None, + negotiator=None, parser_context=None): + self._request = request + self.parsers = parsers or () + self.authenticators = authenticators or () + self.negotiator = negotiator or self._default_negotiator() + self.parser_context = parser_context + self._data = Empty + self._files = Empty + self._method = Empty + self._content_type = Empty + self._stream = Empty + + if self.parser_context is None: + self.parser_context = {} + self.parser_context['request'] = self + self.parser_context['encoding'] = request.encoding or settings.DEFAULT_CHARSET + + def _default_negotiator(self): + return api_settings.DEFAULT_CONTENT_NEGOTIATION_CLASS() + + @property + def method(self): + """ + Returns the HTTP method. + + This allows the `method` to be overridden by using a hidden `form` + field on a form POST request. + """ + if not _hasattr(self, '_method'): + self._load_method_and_content_type() + return self._method + + @property + def content_type(self): + """ + Returns the content type header. + + This should be used instead of `request.META.get('HTTP_CONTENT_TYPE')`, + as it allows the content type to be overridden by using a hidden form + field on a form POST request. + """ + if not _hasattr(self, '_content_type'): + self._load_method_and_content_type() + return self._content_type + + @property + def stream(self): + """ + Returns an object that may be used to stream the request content. + """ + if not _hasattr(self, '_stream'): + self._load_stream() + return self._stream + + @property + def QUERY_PARAMS(self): + """ + More semantically correct name for request.GET. + """ + return self._request.GET + + @property + def DATA(self): + """ + Parses the request body and returns the data. + + Similar to usual behaviour of `request.POST`, except that it handles + arbitrary parsers, and also works on methods other than POST (eg PUT). + """ + if not _hasattr(self, '_data'): + self._load_data_and_files() + return self._data + + @property + def FILES(self): + """ + Parses the request body and returns any files uploaded in the request. + + Similar to usual behaviour of `request.FILES`, except that it handles + arbitrary parsers, and also works on methods other than POST (eg PUT). + """ + if not _hasattr(self, '_files'): + self._load_data_and_files() + return self._files + + @property + def user(self): + """ + Returns the user associated with the current request, as authenticated + by the authentication classes provided to the request. + """ + if not hasattr(self, '_user'): + self._authenticate() + return self._user + + @user.setter + def user(self, value): + """ + Sets the user on the current request. This is necessary to maintain + compatilbility with django.contrib.auth where the user proprety is + set in the login and logout functions. + """ + self._user = value + + @property + def auth(self): + """ + Returns any non-user authentication information associated with the + request, such as an authentication token. + """ + if not hasattr(self, '_auth'): + self._authenticate() + return self._auth + + @auth.setter + def auth(self, value): + """ + Sets any non-user authentication information associated with the + request, such as an authentication token. + """ + self._auth = value + + @property + def successful_authenticator(self): + """ + Return the instance of the authentication instance class that was used + to authenticate the request, or `None`. + """ + if not hasattr(self, '_authenticator'): + self._authenticate() + return self._authenticator + + def _load_data_and_files(self): + """ + Parses the request content into self.DATA and self.FILES. + """ + if not _hasattr(self, '_content_type'): + self._load_method_and_content_type() + + if not _hasattr(self, '_data'): + self._data, self._files = self._parse() + + def _load_method_and_content_type(self): + """ + Sets the method and content_type, and then check if they've + been overridden. + """ + self._content_type = self.META.get('HTTP_CONTENT_TYPE', + self.META.get('CONTENT_TYPE', '')) + + self._perform_form_overloading() + + if not _hasattr(self, '_method'): + self._method = self._request.method + + if self._method == 'POST': + # Allow X-HTTP-METHOD-OVERRIDE header + self._method = self.META.get('HTTP_X_HTTP_METHOD_OVERRIDE', + self._method) + + def _load_stream(self): + """ + Return the content body of the request, as a stream. + """ + try: + content_length = int(self.META.get('CONTENT_LENGTH', + self.META.get('HTTP_CONTENT_LENGTH'))) + except (ValueError, TypeError): + content_length = 0 + + if content_length == 0: + self._stream = None + elif hasattr(self._request, 'read'): + self._stream = self._request + else: + self._stream = BytesIO(self.raw_post_data) + + def _perform_form_overloading(self): + """ + If this is a form POST request, then we need to check if the method and + content/content_type have been overridden by setting them in hidden + form fields or not. + """ + + USE_FORM_OVERLOADING = ( + self._METHOD_PARAM or + (self._CONTENT_PARAM and self._CONTENTTYPE_PARAM) + ) + + # We only need to use form overloading on form POST requests. + if (not USE_FORM_OVERLOADING + or self._request.method != 'POST' + or not is_form_media_type(self._content_type)): + return + + # At this point we're committed to parsing the request as form data. + self._data = self._request.POST + self._files = self._request.FILES + + # Method overloading - change the method and remove the param from the content. + if (self._METHOD_PARAM and + self._METHOD_PARAM in self._data): + self._method = self._data[self._METHOD_PARAM].upper() + + # Content overloading - modify the content type, and force re-parse. + if (self._CONTENT_PARAM and + self._CONTENTTYPE_PARAM and + self._CONTENT_PARAM in self._data and + self._CONTENTTYPE_PARAM in self._data): + self._content_type = self._data[self._CONTENTTYPE_PARAM] + self._stream = BytesIO(self._data[self._CONTENT_PARAM].encode(HTTP_HEADER_ENCODING)) + self._data, self._files = (Empty, Empty) + + def _parse(self): + """ + Parse the request content, returning a two-tuple of (data, files) + + May raise an `UnsupportedMediaType`, or `ParseError` exception. + """ + stream = self.stream + media_type = self.content_type + + if stream is None or media_type is None: + empty_data = QueryDict('', self._request._encoding) + empty_files = MultiValueDict() + return (empty_data, empty_files) + + parser = self.negotiator.select_parser(self, self.parsers) + + if not parser: + raise exceptions.UnsupportedMediaType(media_type) + + parsed = parser.parse(stream, media_type, self.parser_context) + + # Parser classes may return the raw data, or a + # DataAndFiles object. Unpack the result as required. + try: + return (parsed.data, parsed.files) + except AttributeError: + empty_files = MultiValueDict() + return (parsed, empty_files) + + def _authenticate(self): + """ + Attempt to authenticate the request using each authentication instance + in turn. + Returns a three-tuple of (authenticator, user, authtoken). + """ + for authenticator in self.authenticators: + try: + user_auth_tuple = authenticator.authenticate(self) + except exceptions.APIException: + self._not_authenticated() + raise + + if not user_auth_tuple is None: + self._authenticator = authenticator + self._user, self._auth = user_auth_tuple + return + + self._not_authenticated() + + def _not_authenticated(self): + """ + Return a three-tuple of (authenticator, user, authtoken), representing + an unauthenticated request. + + By default this will be (None, AnonymousUser, None). + """ + self._authenticator = None + + if api_settings.UNAUTHENTICATED_USER: + self._user = api_settings.UNAUTHENTICATED_USER() + else: + self._user = None + + if api_settings.UNAUTHENTICATED_TOKEN: + self._auth = api_settings.UNAUTHENTICATED_TOKEN() + else: + self._auth = None + + def __getattr__(self, attr): + """ + Proxy other attributes to the underlying HttpRequest object. + """ + return getattr(self._request, attr) diff --git a/awx/lib/site-packages/rest_framework/response.py b/awx/lib/site-packages/rest_framework/response.py new file mode 100644 index 0000000000..3ee52ae01f --- /dev/null +++ b/awx/lib/site-packages/rest_framework/response.py @@ -0,0 +1,84 @@ +""" +The Response class in REST framework is similar to HTTPResponse, except that +it is initialized with unrendered data, instead of a pre-rendered string. + +The appropriate renderer is called during Django's template response rendering. +""" +from __future__ import unicode_literals +from django.core.handlers.wsgi import STATUS_CODE_TEXT +from django.template.response import SimpleTemplateResponse +from rest_framework.compat import six + + +class Response(SimpleTemplateResponse): + """ + An HttpResponse that allows it's data to be rendered into + arbitrary media types. + """ + + def __init__(self, data=None, status=200, + template_name=None, headers=None, + exception=False, content_type=None): + """ + Alters the init arguments slightly. + For example, drop 'template_name', and instead use 'data'. + + Setting 'renderer' and 'media_type' will typically be deferred, + For example being set automatically by the `APIView`. + """ + super(Response, self).__init__(None, status=status) + self.data = data + self.template_name = template_name + self.exception = exception + self.content_type = content_type + + if headers: + for name, value in six.iteritems(headers): + self[name] = value + + @property + def rendered_content(self): + renderer = getattr(self, 'accepted_renderer', None) + media_type = getattr(self, 'accepted_media_type', None) + context = getattr(self, 'renderer_context', None) + + assert renderer, ".accepted_renderer not set on Response" + assert media_type, ".accepted_media_type not set on Response" + assert context, ".renderer_context not set on Response" + context['response'] = self + + charset = renderer.charset + content_type = self.content_type + + if content_type is None and charset is not None: + content_type = "{0}; charset={1}".format(media_type, charset) + elif content_type is None: + content_type = media_type + self['Content-Type'] = content_type + + ret = renderer.render(self.data, media_type, context) + if isinstance(ret, six.text_type): + assert charset, 'renderer returned unicode, and did not specify ' \ + 'a charset value.' + return bytes(ret.encode(charset)) + return ret + + @property + def status_text(self): + """ + Returns reason text corresponding to our HTTP response status code. + Provided for convenience. + """ + # TODO: Deprecate and use a template tag instead + # TODO: Status code text for RFC 6585 status codes + return STATUS_CODE_TEXT.get(self.status_code, '') + + def __getstate__(self): + """ + Remove attributes from the response that shouldn't be cached + """ + state = super(Response, self).__getstate__() + for key in ('accepted_renderer', 'renderer_context', 'data'): + if key in state: + del state[key] + return state diff --git a/awx/lib/site-packages/rest_framework/reverse.py b/awx/lib/site-packages/rest_framework/reverse.py new file mode 100644 index 0000000000..a51b07f540 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/reverse.py @@ -0,0 +1,23 @@ +""" +Provide reverse functions that return fully qualified URLs +""" +from __future__ import unicode_literals +from django.core.urlresolvers import reverse as django_reverse +from django.utils.functional import lazy + + +def reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra): + """ + Same as `django.core.urlresolvers.reverse`, but optionally takes a request + and returns a fully qualified URL, using the request to get the base URL. + """ + if format is not None: + kwargs = kwargs or {} + kwargs['format'] = format + url = django_reverse(viewname, args=args, kwargs=kwargs, **extra) + if request: + return request.build_absolute_uri(url) + return url + + +reverse_lazy = lazy(reverse, str) diff --git a/awx/lib/site-packages/rest_framework/routers.py b/awx/lib/site-packages/rest_framework/routers.py new file mode 100644 index 0000000000..6c5fd00483 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/routers.py @@ -0,0 +1,249 @@ +""" +Routers provide a convenient and consistent way of automatically +determining the URL conf for your API. + +They are used by simply instantiating a Router class, and then registering +all the required ViewSets with that router. + +For example, you might have a `urls.py` that looks something like this: + + router = routers.DefaultRouter() + router.register('users', UserViewSet, 'user') + router.register('accounts', AccountViewSet, 'account') + + urlpatterns = router.urls +""" +from __future__ import unicode_literals + +from collections import namedtuple +from rest_framework import views +from rest_framework.compat import patterns, url +from rest_framework.decorators import api_view +from rest_framework.response import Response +from rest_framework.reverse import reverse +from rest_framework.urlpatterns import format_suffix_patterns + + +Route = namedtuple('Route', ['url', 'mapping', 'name', 'initkwargs']) + + +def replace_methodname(format_string, methodname): + """ + Partially format a format_string, swapping out any + '{methodname}' or '{methodnamehyphen}' components. + """ + methodnamehyphen = methodname.replace('_', '-') + ret = format_string + ret = ret.replace('{methodname}', methodname) + ret = ret.replace('{methodnamehyphen}', methodnamehyphen) + return ret + + +class BaseRouter(object): + def __init__(self): + self.registry = [] + + def register(self, prefix, viewset, base_name=None): + if base_name is None: + base_name = self.get_default_base_name(viewset) + self.registry.append((prefix, viewset, base_name)) + + def get_default_base_name(self, viewset): + """ + If `base_name` is not specified, attempt to automatically determine + it from the viewset. + """ + raise NotImplemented('get_default_base_name must be overridden') + + def get_urls(self): + """ + Return a list of URL patterns, given the registered viewsets. + """ + raise NotImplemented('get_urls must be overridden') + + @property + def urls(self): + if not hasattr(self, '_urls'): + self._urls = patterns('', *self.get_urls()) + return self._urls + + +class SimpleRouter(BaseRouter): + routes = [ + # List route. + Route( + url=r'^{prefix}/$', + mapping={ + 'get': 'list', + 'post': 'create' + }, + name='{basename}-list', + initkwargs={'suffix': 'List'} + ), + # Detail route. + Route( + url=r'^{prefix}/{lookup}/$', + mapping={ + 'get': 'retrieve', + 'put': 'update', + 'patch': 'partial_update', + 'delete': 'destroy' + }, + name='{basename}-detail', + initkwargs={'suffix': 'Instance'} + ), + # Dynamically generated routes. + # Generated using @action or @link decorators on methods of the viewset. + Route( + url=r'^{prefix}/{lookup}/{methodname}/$', + mapping={ + '{httpmethod}': '{methodname}', + }, + name='{basename}-{methodnamehyphen}', + initkwargs={} + ), + ] + + def get_default_base_name(self, viewset): + """ + If `base_name` is not specified, attempt to automatically determine + it from the viewset. + """ + model_cls = getattr(viewset, 'model', None) + queryset = getattr(viewset, 'queryset', None) + if model_cls is None and queryset is not None: + model_cls = queryset.model + + assert model_cls, '`name` not argument not specified, and could ' \ + 'not automatically determine the name from the viewset, as ' \ + 'it does not have a `.model` or `.queryset` attribute.' + + return model_cls._meta.object_name.lower() + + def get_routes(self, viewset): + """ + Augment `self.routes` with any dynamically generated routes. + + Returns a list of the Route namedtuple. + """ + + # Determine any `@action` or `@link` decorated methods on the viewset + dynamic_routes = [] + for methodname in dir(viewset): + attr = getattr(viewset, methodname) + httpmethods = getattr(attr, 'bind_to_methods', None) + if httpmethods: + dynamic_routes.append((httpmethods, methodname)) + + ret = [] + for route in self.routes: + if route.mapping == {'{httpmethod}': '{methodname}'}: + # Dynamic routes (@link or @action decorator) + for httpmethods, methodname in dynamic_routes: + initkwargs = route.initkwargs.copy() + initkwargs.update(getattr(viewset, methodname).kwargs) + ret.append(Route( + url=replace_methodname(route.url, methodname), + mapping=dict((httpmethod, methodname) for httpmethod in httpmethods), + name=replace_methodname(route.name, methodname), + initkwargs=initkwargs, + )) + else: + # Standard route + ret.append(route) + + return ret + + def get_method_map(self, viewset, method_map): + """ + Given a viewset, and a mapping of http methods to actions, + return a new mapping which only includes any mappings that + are actually implemented by the viewset. + """ + bound_methods = {} + for method, action in method_map.items(): + if hasattr(viewset, action): + bound_methods[method] = action + return bound_methods + + def get_lookup_regex(self, viewset): + """ + Given a viewset, return the portion of URL regex that is used + to match against a single instance. + """ + base_regex = '(?P<{lookup_field}>[^/]+)' + lookup_field = getattr(viewset, 'lookup_field', 'pk') + return base_regex.format(lookup_field=lookup_field) + + def get_urls(self): + """ + Use the registered viewsets to generate a list of URL patterns. + """ + ret = [] + + for prefix, viewset, basename in self.registry: + lookup = self.get_lookup_regex(viewset) + routes = self.get_routes(viewset) + + for route in routes: + + # Only actions which actually exist on the viewset will be bound + mapping = self.get_method_map(viewset, route.mapping) + if not mapping: + continue + + # Build the url pattern + regex = route.url.format(prefix=prefix, lookup=lookup) + view = viewset.as_view(mapping, **route.initkwargs) + name = route.name.format(basename=basename) + ret.append(url(regex, view, name=name)) + + return ret + + +class DefaultRouter(SimpleRouter): + """ + The default router extends the SimpleRouter, but also adds in a default + API root view, and adds format suffix patterns to the URLs. + """ + include_root_view = True + include_format_suffixes = True + + def get_api_root_view(self): + """ + Return a view to use as the API root. + """ + api_root_dict = {} + list_name = self.routes[0].name + for prefix, viewset, basename in self.registry: + api_root_dict[prefix] = list_name.format(basename=basename) + + class APIRoot(views.APIView): + _ignore_model_permissions = True + + def get(self, request, format=None): + ret = {} + for key, url_name in api_root_dict.items(): + ret[key] = reverse(url_name, request=request, format=format) + return Response(ret) + + return APIRoot.as_view() + + def get_urls(self): + """ + Generate the list of URL patterns, including a default root view + for the API, and appending `.json` style format suffixes. + """ + urls = [] + + if self.include_root_view: + root_url = url(r'^$', self.get_api_root_view(), name='api-root') + urls.append(root_url) + + default_urls = super(DefaultRouter, self).get_urls() + urls.extend(default_urls) + + if self.include_format_suffixes: + urls = format_suffix_patterns(urls) + + return urls diff --git a/awx/lib/site-packages/rest_framework/runtests/__init__.py b/awx/lib/site-packages/rest_framework/runtests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/rest_framework/runtests/runcoverage.py b/awx/lib/site-packages/rest_framework/runtests/runcoverage.py new file mode 100644 index 0000000000..ce11b213e5 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/runtests/runcoverage.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python +""" +Useful tool to run the test suite for rest_framework and generate a coverage report. +""" + +# http://ericholscher.com/blog/2009/jun/29/enable-setuppy-test-your-django-apps/ +# http://www.travisswicegood.com/2010/01/17/django-virtualenv-pip-and-fabric/ +# http://code.djangoproject.com/svn/django/trunk/tests/runtests.py +import os +import sys + +# fix sys path so we don't need to setup PYTHONPATH +sys.path.append(os.path.join(os.path.dirname(__file__), "../..")) +os.environ['DJANGO_SETTINGS_MODULE'] = 'rest_framework.runtests.settings' + +from coverage import coverage + + +def main(): + """Run the tests for rest_framework and generate a coverage report.""" + + cov = coverage() + cov.erase() + cov.start() + + from django.conf import settings + from django.test.utils import get_runner + TestRunner = get_runner(settings) + + if hasattr(TestRunner, 'func_name'): + # Pre 1.2 test runners were just functions, + # and did not support the 'failfast' option. + import warnings + warnings.warn( + 'Function-based test runners are deprecated. Test runners should be classes with a run_tests() method.', + DeprecationWarning + ) + failures = TestRunner(['tests']) + else: + test_runner = TestRunner() + failures = test_runner.run_tests(['tests']) + cov.stop() + + # Discover the list of all modules that we should test coverage for + import rest_framework + + project_dir = os.path.dirname(rest_framework.__file__) + cov_files = [] + + for (path, dirs, files) in os.walk(project_dir): + # Drop tests and runtests directories from the test coverage report + if os.path.basename(path) in ['tests', 'runtests', 'migrations']: + continue + + # Drop the compat and six modules from coverage, since we're not interested in the coverage + # of modules which are specifically for resolving environment dependant imports. + # (Because we'll end up getting different coverage reports for it for each environment) + if 'compat.py' in files: + files.remove('compat.py') + + if 'six.py' in files: + files.remove('six.py') + + # Same applies to template tags module. + # This module has to include branching on Django versions, + # so it's never possible for it to have full coverage. + if 'rest_framework.py' in files: + files.remove('rest_framework.py') + + cov_files.extend([os.path.join(path, file) for file in files if file.endswith('.py')]) + + cov.report(cov_files) + if '--html' in sys.argv: + cov.html_report(cov_files, directory='coverage') + sys.exit(failures) + +if __name__ == '__main__': + main() diff --git a/awx/lib/site-packages/rest_framework/runtests/runtests.py b/awx/lib/site-packages/rest_framework/runtests/runtests.py new file mode 100644 index 0000000000..da36d23fcc --- /dev/null +++ b/awx/lib/site-packages/rest_framework/runtests/runtests.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python + +# http://ericholscher.com/blog/2009/jun/29/enable-setuppy-test-your-django-apps/ +# http://www.travisswicegood.com/2010/01/17/django-virtualenv-pip-and-fabric/ +# http://code.djangoproject.com/svn/django/trunk/tests/runtests.py +import os +import sys + +# fix sys path so we don't need to setup PYTHONPATH +sys.path.append(os.path.join(os.path.dirname(__file__), "../..")) +os.environ['DJANGO_SETTINGS_MODULE'] = 'rest_framework.runtests.settings' + +import django +from django.conf import settings +from django.test.utils import get_runner + + +def usage(): + return """ + Usage: python runtests.py [UnitTestClass].[method] + + You can pass the Class name of the `UnitTestClass` you want to test. + + Append a method name if you only want to test a specific method of that class. + """ + + +def main(): + TestRunner = get_runner(settings) + + test_runner = TestRunner() + if len(sys.argv) == 2: + test_case = '.' + sys.argv[1] + elif len(sys.argv) == 1: + test_case = '' + else: + print(usage()) + sys.exit(1) + test_module_name = 'rest_framework.tests' + if django.VERSION[0] == 1 and django.VERSION[1] < 6: + test_module_name = 'tests' + + failures = test_runner.run_tests([test_module_name + test_case]) + + sys.exit(failures) + +if __name__ == '__main__': + main() diff --git a/awx/lib/site-packages/rest_framework/runtests/settings.py b/awx/lib/site-packages/rest_framework/runtests/settings.py new file mode 100644 index 0000000000..9dd7b545e6 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/runtests/settings.py @@ -0,0 +1,149 @@ +# Django settings for testproject project. + +DEBUG = True +TEMPLATE_DEBUG = DEBUG +DEBUG_PROPAGATE_EXCEPTIONS = True + +ALLOWED_HOSTS = ['*'] + +ADMINS = ( + # ('Your Name', 'your_email@domain.com'), +) + +MANAGERS = ADMINS + +DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'. + 'NAME': 'sqlite.db', # Or path to database file if using sqlite3. + 'USER': '', # Not used with sqlite3. + 'PASSWORD': '', # Not used with sqlite3. + 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. + 'PORT': '', # Set to empty string for default. Not used with sqlite3. + } +} + +CACHES = { + 'default': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + } +} + +# Local time zone for this installation. Choices can be found here: +# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name +# although not all choices may be available on all operating systems. +# On Unix systems, a value of None will cause Django to use the same +# timezone as the operating system. +# If running in a Windows environment this must be set to the same as your +# system time zone. +TIME_ZONE = 'Europe/London' + +# Language code for this installation. All choices can be found here: +# http://www.i18nguy.com/unicode/language-identifiers.html +LANGUAGE_CODE = 'en-uk' + +SITE_ID = 1 + +# If you set this to False, Django will make some optimizations so as not +# to load the internationalization machinery. +USE_I18N = True + +# If you set this to False, Django will not format dates, numbers and +# calendars according to the current locale +USE_L10N = True + +# Absolute filesystem path to the directory that will hold user-uploaded files. +# Example: "/home/media/media.lawrence.com/" +MEDIA_ROOT = '' + +# URL that handles the media served from MEDIA_ROOT. Make sure to use a +# trailing slash if there is a path component (optional in other cases). +# Examples: "http://media.lawrence.com", "http://example.com/media/" +MEDIA_URL = '' + +# Make this unique, and don't share it with anybody. +SECRET_KEY = 'u@x-aj9(hoh#rb-^ymf#g2jx_hp0vj7u5#b@ag1n^seu9e!%cy' + +# List of callables that know how to import templates from various sources. +TEMPLATE_LOADERS = ( + 'django.template.loaders.filesystem.Loader', + 'django.template.loaders.app_directories.Loader', +# 'django.template.loaders.eggs.Loader', +) + +MIDDLEWARE_CLASSES = ( + 'django.middleware.common.CommonMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', +) + +ROOT_URLCONF = 'urls' + +TEMPLATE_DIRS = ( + # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". + # Always use forward slashes, even on Windows. + # Don't forget to use absolute paths, not relative paths. +) + +INSTALLED_APPS = ( + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.sites', + 'django.contrib.messages', + # Uncomment the next line to enable the admin: + # 'django.contrib.admin', + # Uncomment the next line to enable admin documentation: + # 'django.contrib.admindocs', + 'rest_framework', + 'rest_framework.authtoken', + 'rest_framework.tests', +) + +# OAuth is optional and won't work if there is no oauth_provider & oauth2 +try: + import oauth_provider + import oauth2 +except ImportError: + pass +else: + INSTALLED_APPS += ( + 'oauth_provider', + ) + +try: + import provider +except ImportError: + pass +else: + INSTALLED_APPS += ( + 'provider', + 'provider.oauth2', + ) + +STATIC_URL = '/static/' + +PASSWORD_HASHERS = ( + 'django.contrib.auth.hashers.SHA1PasswordHasher', + 'django.contrib.auth.hashers.PBKDF2PasswordHasher', + 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher', + 'django.contrib.auth.hashers.BCryptPasswordHasher', + 'django.contrib.auth.hashers.MD5PasswordHasher', + 'django.contrib.auth.hashers.CryptPasswordHasher', +) + +import django + +if django.VERSION < (1, 3): + INSTALLED_APPS += ('staticfiles',) + + +# If we're running on the Jenkins server we want to archive the coverage reports as XML. +import os +if os.environ.get('HUDSON_URL', None): + TEST_RUNNER = 'xmlrunner.extra.djangotestrunner.XMLTestRunner' + TEST_OUTPUT_VERBOSE = True + TEST_OUTPUT_DESCRIPTIONS = True + TEST_OUTPUT_DIR = 'xmlrunner' diff --git a/awx/lib/site-packages/rest_framework/runtests/urls.py b/awx/lib/site-packages/rest_framework/runtests/urls.py new file mode 100644 index 0000000000..ed5baeae6a --- /dev/null +++ b/awx/lib/site-packages/rest_framework/runtests/urls.py @@ -0,0 +1,7 @@ +""" +Blank URLConf just to keep runtests.py happy. +""" +from rest_framework.compat import patterns + +urlpatterns = patterns('', +) diff --git a/awx/lib/site-packages/rest_framework/serializers.py b/awx/lib/site-packages/rest_framework/serializers.py new file mode 100644 index 0000000000..11ead02e4f --- /dev/null +++ b/awx/lib/site-packages/rest_framework/serializers.py @@ -0,0 +1,968 @@ +""" +Serializers and ModelSerializers are similar to Forms and ModelForms. +Unlike forms, they are not constrained to dealing with HTML output, and +form encoded input. + +Serialization in REST framework is a two-phase process: + +1. Serializers marshal between complex types like model instances, and +python primatives. +2. The process of marshalling between python primatives and request and +response content is handled by parsers and renderers. +""" +from __future__ import unicode_literals +import copy +import datetime +import types +from decimal import Decimal +from django.core.paginator import Page +from django.db import models +from django.forms import widgets +from django.utils.datastructures import SortedDict +from rest_framework.compat import get_concrete_model, six + +# Note: We do the following so that users of the framework can use this style: +# +# example_field = serializers.CharField(...) +# +# This helps keep the separation between model fields, form fields, and +# serializer fields more explicit. + +from rest_framework.relations import * +from rest_framework.fields import * + + +class NestedValidationError(ValidationError): + """ + The default ValidationError behavior is to stringify each item in the list + if the messages are a list of error messages. + + In the case of nested serializers, where the parent has many children, + then the child's `serializer.errors` will be a list of dicts. In the case + of a single child, the `serializer.errors` will be a dict. + + We need to override the default behavior to get properly nested error dicts. + """ + + def __init__(self, message): + if isinstance(message, dict): + self.messages = [message] + else: + self.messages = message + + +class DictWithMetadata(dict): + """ + A dict-like object, that can have additional properties attached. + """ + def __getstate__(self): + """ + Used by pickle (e.g., caching). + Overridden to remove the metadata from the dict, since it shouldn't be + pickled and may in some instances be unpickleable. + """ + return dict(self) + + +class SortedDictWithMetadata(SortedDict): + """ + A sorted dict-like object, that can have additional properties attached. + """ + def __getstate__(self): + """ + Used by pickle (e.g., caching). + Overriden to remove the metadata from the dict, since it shouldn't be + pickle and may in some instances be unpickleable. + """ + return SortedDict(self).__dict__ + + +def _is_protected_type(obj): + """ + True if the object is a native datatype that does not need to + be serialized further. + """ + return isinstance(obj, ( + types.NoneType, + int, long, + datetime.datetime, datetime.date, datetime.time, + float, Decimal, + basestring) + ) + + +def _get_declared_fields(bases, attrs): + """ + Create a list of serializer field instances from the passed in 'attrs', + plus any fields on the base classes (in 'bases'). + + Note that all fields from the base classes are used. + """ + fields = [(field_name, attrs.pop(field_name)) + for field_name, obj in list(six.iteritems(attrs)) + if isinstance(obj, Field)] + fields.sort(key=lambda x: x[1].creation_counter) + + # If this class is subclassing another Serializer, add that Serializer's + # fields. Note that we loop over the bases in *reverse*. This is necessary + # in order to maintain the correct order of fields. + for base in bases[::-1]: + if hasattr(base, 'base_fields'): + fields = list(base.base_fields.items()) + fields + + return SortedDict(fields) + + +class SerializerMetaclass(type): + def __new__(cls, name, bases, attrs): + attrs['base_fields'] = _get_declared_fields(bases, attrs) + return super(SerializerMetaclass, cls).__new__(cls, name, bases, attrs) + + +class SerializerOptions(object): + """ + Meta class options for Serializer + """ + def __init__(self, meta): + self.depth = getattr(meta, 'depth', 0) + self.fields = getattr(meta, 'fields', ()) + self.exclude = getattr(meta, 'exclude', ()) + + +class BaseSerializer(WritableField): + """ + This is the Serializer implementation. + We need to implement it as `BaseSerializer` due to metaclass magicks. + """ + class Meta(object): + pass + + _options_class = SerializerOptions + _dict_class = SortedDictWithMetadata + + def __init__(self, instance=None, data=None, files=None, + context=None, partial=False, many=None, + allow_add_remove=False, **kwargs): + super(BaseSerializer, self).__init__(**kwargs) + self.opts = self._options_class(self.Meta) + self.parent = None + self.root = None + self.partial = partial + self.many = many + self.allow_add_remove = allow_add_remove + + self.context = context or {} + + self.init_data = data + self.init_files = files + self.object = instance + self.fields = self.get_fields() + + self._data = None + self._files = None + self._errors = None + self._deleted = None + + if many and instance is not None and not hasattr(instance, '__iter__'): + raise ValueError('instance should be a queryset or other iterable with many=True') + + if allow_add_remove and not many: + raise ValueError('allow_add_remove should only be used for bulk updates, but you have not set many=True') + + ##### + # Methods to determine which fields to use when (de)serializing objects. + + def get_default_fields(self): + """ + Return the complete set of default fields for the object, as a dict. + """ + return {} + + def get_fields(self): + """ + Returns the complete set of fields for the object as a dict. + + This will be the set of any explicitly declared fields, + plus the set of fields returned by get_default_fields(). + """ + ret = SortedDict() + + # Get the explicitly declared fields + base_fields = copy.deepcopy(self.base_fields) + for key, field in base_fields.items(): + ret[key] = field + + # Add in the default fields + default_fields = self.get_default_fields() + for key, val in default_fields.items(): + if key not in ret: + ret[key] = val + + # If 'fields' is specified, use those fields, in that order. + if self.opts.fields: + assert isinstance(self.opts.fields, (list, tuple)), '`fields` must be a list or tuple' + new = SortedDict() + for key in self.opts.fields: + new[key] = ret[key] + ret = new + + # Remove anything in 'exclude' + if self.opts.exclude: + assert isinstance(self.opts.exclude, (list, tuple)), '`exclude` must be a list or tuple' + for key in self.opts.exclude: + ret.pop(key, None) + + for key, field in ret.items(): + field.initialize(parent=self, field_name=key) + + return ret + + ##### + # Methods to convert or revert from objects <--> primitive representations. + + def get_field_key(self, field_name): + """ + Return the key that should be used for a given field. + """ + return field_name + + def restore_fields(self, data, files): + """ + Core of deserialization, together with `restore_object`. + Converts a dictionary of data into a dictionary of deserialized fields. + """ + reverted_data = {} + + if data is not None and not isinstance(data, dict): + self._errors['non_field_errors'] = ['Invalid data'] + return None + + for field_name, field in self.fields.items(): + field.initialize(parent=self, field_name=field_name) + try: + field.field_from_native(data, files, field_name, reverted_data) + except ValidationError as err: + self._errors[field_name] = list(err.messages) + + return reverted_data + + def perform_validation(self, attrs): + """ + Run `validate_<fieldname>()` and `validate()` methods on the serializer + """ + for field_name, field in self.fields.items(): + if field_name in self._errors: + continue + try: + validate_method = getattr(self, 'validate_%s' % field_name, None) + if validate_method: + source = field.source or field_name + attrs = validate_method(attrs, source) + except ValidationError as err: + self._errors[field_name] = self._errors.get(field_name, []) + list(err.messages) + + # If there are already errors, we don't run .validate() because + # field-validation failed and thus `attrs` may not be complete. + # which in turn can cause inconsistent validation errors. + if not self._errors: + try: + attrs = self.validate(attrs) + except ValidationError as err: + if hasattr(err, 'message_dict'): + for field_name, error_messages in err.message_dict.items(): + self._errors[field_name] = self._errors.get(field_name, []) + list(error_messages) + elif hasattr(err, 'messages'): + self._errors['non_field_errors'] = err.messages + + return attrs + + def validate(self, attrs): + """ + Stub method, to be overridden in Serializer subclasses + """ + return attrs + + def restore_object(self, attrs, instance=None): + """ + Deserialize a dictionary of attributes into an object instance. + You should override this method to control how deserialized objects + are instantiated. + """ + if instance is not None: + instance.update(attrs) + return instance + return attrs + + def to_native(self, obj): + """ + Serialize objects -> primitives. + """ + ret = self._dict_class() + ret.fields = {} + + for field_name, field in self.fields.items(): + field.initialize(parent=self, field_name=field_name) + key = self.get_field_key(field_name) + value = field.field_to_native(obj, field_name) + ret[key] = value + ret.fields[key] = field + return ret + + def from_native(self, data, files): + """ + Deserialize primitives -> objects. + """ + self._errors = {} + if data is not None or files is not None: + attrs = self.restore_fields(data, files) + if attrs is not None: + attrs = self.perform_validation(attrs) + else: + self._errors['non_field_errors'] = ['No input provided'] + + if not self._errors: + return self.restore_object(attrs, instance=getattr(self, 'object', None)) + + def field_to_native(self, obj, field_name): + """ + Override default so that the serializer can be used as a nested field + across relationships. + """ + if self.source == '*': + return self.to_native(obj) + + try: + source = self.source or field_name + value = obj + + for component in source.split('.'): + value = get_component(value, component) + if value is None: + break + except ObjectDoesNotExist: + return None + + if is_simple_callable(getattr(value, 'all', None)): + return [self.to_native(item) for item in value.all()] + + if value is None: + return None + + if self.many is not None: + many = self.many + else: + many = hasattr(value, '__iter__') and not isinstance(value, (Page, dict, six.text_type)) + + if many: + return [self.to_native(item) for item in value] + return self.to_native(value) + + def field_from_native(self, data, files, field_name, into): + """ + Override default so that the serializer can be used as a writable + nested field across relationships. + """ + if self.read_only: + return + + try: + value = data[field_name] + except KeyError: + if self.default is not None and not self.partial: + # Note: partial updates shouldn't set defaults + value = copy.deepcopy(self.default) + else: + if self.required: + raise ValidationError(self.error_messages['required']) + return + + # Set the serializer object if it exists + obj = getattr(self.parent.object, field_name) if self.parent.object else None + + if self.source == '*': + if value: + into.update(value) + else: + if value in (None, ''): + into[(self.source or field_name)] = None + else: + kwargs = { + 'instance': obj, + 'data': value, + 'context': self.context, + 'partial': self.partial, + 'many': self.many + } + serializer = self.__class__(**kwargs) + + if serializer.is_valid(): + into[self.source or field_name] = serializer.object + else: + # Propagate errors up to our parent + raise NestedValidationError(serializer.errors) + + def get_identity(self, data): + """ + This hook is required for bulk update. + It is used to determine the canonical identity of a given object. + + Note that the data has not been validated at this point, so we need + to make sure that we catch any cases of incorrect datatypes being + passed to this method. + """ + try: + return data.get('id', None) + except AttributeError: + return None + + @property + def errors(self): + """ + Run deserialization and return error data, + setting self.object if no errors occurred. + """ + if self._errors is None: + data, files = self.init_data, self.init_files + + if self.many is not None: + many = self.many + else: + many = hasattr(data, '__iter__') and not isinstance(data, (Page, dict, six.text_type)) + if many: + warnings.warn('Implict list/queryset serialization is deprecated. ' + 'Use the `many=True` flag when instantiating the serializer.', + DeprecationWarning, stacklevel=3) + + if many: + ret = [] + errors = [] + update = self.object is not None + + if update: + # If this is a bulk update we need to map all the objects + # to a canonical identity so we can determine which + # individual object is being updated for each item in the + # incoming data + objects = self.object + identities = [self.get_identity(self.to_native(obj)) for obj in objects] + identity_to_objects = dict(zip(identities, objects)) + + if hasattr(data, '__iter__') and not isinstance(data, (dict, six.text_type)): + for item in data: + if update: + # Determine which object we're updating + identity = self.get_identity(item) + self.object = identity_to_objects.pop(identity, None) + if self.object is None and not self.allow_add_remove: + ret.append(None) + errors.append({'non_field_errors': ['Cannot create a new item, only existing items may be updated.']}) + continue + + ret.append(self.from_native(item, None)) + errors.append(self._errors) + + if update: + self._deleted = identity_to_objects.values() + + self._errors = any(errors) and errors or [] + else: + self._errors = {'non_field_errors': ['Expected a list of items.']} + else: + ret = self.from_native(data, files) + + if not self._errors: + self.object = ret + + return self._errors + + def is_valid(self): + return not self.errors + + @property + def data(self): + """ + Returns the serialized data on the serializer. + """ + if self._data is None: + obj = self.object + + if self.many is not None: + many = self.many + else: + many = hasattr(obj, '__iter__') and not isinstance(obj, (Page, dict)) + if many: + warnings.warn('Implict list/queryset serialization is deprecated. ' + 'Use the `many=True` flag when instantiating the serializer.', + DeprecationWarning, stacklevel=2) + + if many: + self._data = [self.to_native(item) for item in obj] + else: + self._data = self.to_native(obj) + + return self._data + + def save_object(self, obj, **kwargs): + obj.save(**kwargs) + + def delete_object(self, obj): + obj.delete() + + def save(self, **kwargs): + """ + Save the deserialized object and return it. + """ + if isinstance(self.object, list): + [self.save_object(item, **kwargs) for item in self.object] + else: + self.save_object(self.object, **kwargs) + + if self.allow_add_remove and self._deleted: + [self.delete_object(item) for item in self._deleted] + + return self.object + + def metadata(self): + """ + Return a dictionary of metadata about the fields on the serializer. + Useful for things like responding to OPTIONS requests, or generating + API schemas for auto-documentation. + """ + return SortedDict( + [(field_name, field.metadata()) + for field_name, field in six.iteritems(self.fields)] + ) + + +class Serializer(six.with_metaclass(SerializerMetaclass, BaseSerializer)): + pass + + +class ModelSerializerOptions(SerializerOptions): + """ + Meta class options for ModelSerializer + """ + def __init__(self, meta): + super(ModelSerializerOptions, self).__init__(meta) + self.model = getattr(meta, 'model', None) + self.read_only_fields = getattr(meta, 'read_only_fields', ()) + + +class ModelSerializer(Serializer): + """ + A serializer that deals with model instances and querysets. + """ + _options_class = ModelSerializerOptions + + field_mapping = { + models.AutoField: IntegerField, + models.FloatField: FloatField, + models.IntegerField: IntegerField, + models.PositiveIntegerField: IntegerField, + models.SmallIntegerField: IntegerField, + models.PositiveSmallIntegerField: IntegerField, + models.DateTimeField: DateTimeField, + models.DateField: DateField, + models.TimeField: TimeField, + models.DecimalField: DecimalField, + models.EmailField: EmailField, + models.CharField: CharField, + models.URLField: URLField, + models.SlugField: SlugField, + models.TextField: CharField, + models.CommaSeparatedIntegerField: CharField, + models.BooleanField: BooleanField, + models.FileField: FileField, + models.ImageField: ImageField, + } + + def get_default_fields(self): + """ + Return all the fields that should be serialized for the model. + """ + + cls = self.opts.model + assert cls is not None, \ + "Serializer class '%s' is missing 'model' Meta option" % self.__class__.__name__ + opts = get_concrete_model(cls)._meta + ret = SortedDict() + nested = bool(self.opts.depth) + + # Deal with adding the primary key field + pk_field = opts.pk + while pk_field.rel and pk_field.rel.parent_link: + # If model is a child via multitable inheritance, use parent's pk + pk_field = pk_field.rel.to._meta.pk + + field = self.get_pk_field(pk_field) + if field: + ret[pk_field.name] = field + + # Deal with forward relationships + forward_rels = [field for field in opts.fields if field.serialize] + forward_rels += [field for field in opts.many_to_many if field.serialize] + + for model_field in forward_rels: + has_through_model = False + + if model_field.rel: + to_many = isinstance(model_field, + models.fields.related.ManyToManyField) + related_model = model_field.rel.to + + if to_many and not model_field.rel.through._meta.auto_created: + has_through_model = True + + if model_field.rel and nested: + if len(inspect.getargspec(self.get_nested_field).args) == 2: + warnings.warn( + 'The `get_nested_field(model_field)` call signature ' + 'is due to be deprecated. ' + 'Use `get_nested_field(model_field, related_model, ' + 'to_many) instead', + PendingDeprecationWarning + ) + field = self.get_nested_field(model_field) + else: + field = self.get_nested_field(model_field, related_model, to_many) + elif model_field.rel: + if len(inspect.getargspec(self.get_nested_field).args) == 3: + warnings.warn( + 'The `get_related_field(model_field, to_many)` call ' + 'signature is due to be deprecated. ' + 'Use `get_related_field(model_field, related_model, ' + 'to_many) instead', + PendingDeprecationWarning + ) + field = self.get_related_field(model_field, to_many=to_many) + else: + field = self.get_related_field(model_field, related_model, to_many) + else: + field = self.get_field(model_field) + + if field: + if has_through_model: + field.read_only = True + + ret[model_field.name] = field + + # Deal with reverse relationships + if not self.opts.fields: + reverse_rels = [] + else: + # Reverse relationships are only included if they are explicitly + # present in the `fields` option on the serializer + reverse_rels = opts.get_all_related_objects() + reverse_rels += opts.get_all_related_many_to_many_objects() + + for relation in reverse_rels: + accessor_name = relation.get_accessor_name() + if not self.opts.fields or accessor_name not in self.opts.fields: + continue + related_model = relation.model + to_many = relation.field.rel.multiple + has_through_model = False + is_m2m = isinstance(relation.field, + models.fields.related.ManyToManyField) + + if is_m2m and not relation.field.rel.through._meta.auto_created: + has_through_model = True + + if nested: + field = self.get_nested_field(None, related_model, to_many) + else: + field = self.get_related_field(None, related_model, to_many) + + if field: + if has_through_model: + field.read_only = True + + ret[accessor_name] = field + + # Add the `read_only` flag to any fields that have bee specified + # in the `read_only_fields` option + for field_name in self.opts.read_only_fields: + assert field_name not in self.base_fields.keys(), \ + "field '%s' on serializer '%s' specfied in " \ + "`read_only_fields`, but also added " \ + "as an explict field. Remove it from `read_only_fields`." % \ + (field_name, self.__class__.__name__) + assert field_name in ret, \ + "Noexistant field '%s' specified in `read_only_fields` " \ + "on serializer '%s'." % \ + (self.__class__.__name__, field_name) + ret[field_name].read_only = True + + return ret + + def get_pk_field(self, model_field): + """ + Returns a default instance of the pk field. + """ + return self.get_field(model_field) + + def get_nested_field(self, model_field, related_model, to_many): + """ + Creates a default instance of a nested relational field. + + Note that model_field will be `None` for reverse relationships. + """ + class NestedModelSerializer(ModelSerializer): + class Meta: + model = related_model + depth = self.opts.depth - 1 + + return NestedModelSerializer(many=to_many) + + def get_related_field(self, model_field, related_model, to_many): + """ + Creates a default instance of a flat relational field. + + Note that model_field will be `None` for reverse relationships. + """ + # TODO: filter queryset using: + # .using(db).complex_filter(self.rel.limit_choices_to) + + kwargs = { + 'queryset': related_model._default_manager, + 'many': to_many + } + + if model_field: + kwargs['required'] = not(model_field.null or model_field.blank) + + return PrimaryKeyRelatedField(**kwargs) + + def get_field(self, model_field): + """ + Creates a default instance of a basic non-relational field. + """ + kwargs = {} + + if model_field.null or model_field.blank: + kwargs['required'] = False + + if isinstance(model_field, models.AutoField) or not model_field.editable: + kwargs['read_only'] = True + + if model_field.has_default(): + kwargs['default'] = model_field.get_default() + + if issubclass(model_field.__class__, models.TextField): + kwargs['widget'] = widgets.Textarea + + if model_field.verbose_name is not None: + kwargs['label'] = model_field.verbose_name + + if model_field.help_text is not None: + kwargs['help_text'] = model_field.help_text + + # TODO: TypedChoiceField? + if model_field.flatchoices: # This ModelField contains choices + kwargs['choices'] = model_field.flatchoices + return ChoiceField(**kwargs) + + # put this below the ChoiceField because min_value isn't a valid initializer + if issubclass(model_field.__class__, models.PositiveIntegerField) or\ + issubclass(model_field.__class__, models.PositiveSmallIntegerField): + kwargs['min_value'] = 0 + + attribute_dict = { + models.CharField: ['max_length'], + models.CommaSeparatedIntegerField: ['max_length'], + models.DecimalField: ['max_digits', 'decimal_places'], + models.EmailField: ['max_length'], + models.FileField: ['max_length'], + models.ImageField: ['max_length'], + models.SlugField: ['max_length'], + models.URLField: ['max_length'], + } + + if model_field.__class__ in attribute_dict: + attributes = attribute_dict[model_field.__class__] + for attribute in attributes: + kwargs.update({attribute: getattr(model_field, attribute)}) + + try: + return self.field_mapping[model_field.__class__](**kwargs) + except KeyError: + return ModelField(model_field=model_field, **kwargs) + + def get_validation_exclusions(self): + """ + Return a list of field names to exclude from model validation. + """ + cls = self.opts.model + opts = get_concrete_model(cls)._meta + exclusions = [field.name for field in opts.fields + opts.many_to_many] + for field_name, field in self.fields.items(): + field_name = field.source or field_name + if field_name in exclusions and not field.read_only: + exclusions.remove(field_name) + return exclusions + + def full_clean(self, instance): + """ + Perform Django's full_clean, and populate the `errors` dictionary + if any validation errors occur. + + Note that we don't perform this inside the `.restore_object()` method, + so that subclasses can override `.restore_object()`, and still get + the full_clean validation checking. + """ + try: + instance.full_clean(exclude=self.get_validation_exclusions()) + except ValidationError as err: + self._errors = err.message_dict + return None + return instance + + def restore_object(self, attrs, instance=None): + """ + Restore the model instance. + """ + m2m_data = {} + related_data = {} + meta = self.opts.model._meta + + # Reverse fk or one-to-one relations + for (obj, model) in meta.get_all_related_objects_with_model(): + field_name = obj.field.related_query_name() + if field_name in attrs: + related_data[field_name] = attrs.pop(field_name) + + # Reverse m2m relations + for (obj, model) in meta.get_all_related_m2m_objects_with_model(): + field_name = obj.field.related_query_name() + if field_name in attrs: + m2m_data[field_name] = attrs.pop(field_name) + + # Forward m2m relations + for field in meta.many_to_many: + if field.name in attrs: + m2m_data[field.name] = attrs.pop(field.name) + + # Update an existing instance... + if instance is not None: + for key, val in attrs.items(): + setattr(instance, key, val) + + # ...or create a new instance + else: + instance = self.opts.model(**attrs) + + # Any relations that cannot be set until we've + # saved the model get hidden away on these + # private attributes, so we can deal with them + # at the point of save. + instance._related_data = related_data + instance._m2m_data = m2m_data + + return instance + + def from_native(self, data, files): + """ + Override the default method to also include model field validation. + """ + instance = super(ModelSerializer, self).from_native(data, files) + if not self._errors: + return self.full_clean(instance) + + def save_object(self, obj, **kwargs): + """ + Save the deserialized object and return it. + """ + obj.save(**kwargs) + + if getattr(obj, '_m2m_data', None): + for accessor_name, object_list in obj._m2m_data.items(): + setattr(obj, accessor_name, object_list) + del(obj._m2m_data) + + if getattr(obj, '_related_data', None): + for accessor_name, related in obj._related_data.items(): + setattr(obj, accessor_name, related) + del(obj._related_data) + + +class HyperlinkedModelSerializerOptions(ModelSerializerOptions): + """ + Options for HyperlinkedModelSerializer + """ + def __init__(self, meta): + super(HyperlinkedModelSerializerOptions, self).__init__(meta) + self.view_name = getattr(meta, 'view_name', None) + self.lookup_field = getattr(meta, 'lookup_field', None) + + +class HyperlinkedModelSerializer(ModelSerializer): + """ + A subclass of ModelSerializer that uses hyperlinked relationships, + instead of primary key relationships. + """ + _options_class = HyperlinkedModelSerializerOptions + _default_view_name = '%(model_name)s-detail' + _hyperlink_field_class = HyperlinkedRelatedField + + # Just a placeholder to ensure 'url' is the first field + # The field itself is actually created on initialization, + # when the view_name and lookup_field arguments are available. + url = Field() + + def __init__(self, *args, **kwargs): + super(HyperlinkedModelSerializer, self).__init__(*args, **kwargs) + + if self.opts.view_name is None: + self.opts.view_name = self._get_default_view_name(self.opts.model) + + url_field = HyperlinkedIdentityField( + view_name=self.opts.view_name, + lookup_field=self.opts.lookup_field + ) + url_field.initialize(self, 'url') + self.fields['url'] = url_field + + def _get_default_view_name(self, model): + """ + Return the view name to use if 'view_name' is not specified in 'Meta' + """ + model_meta = model._meta + format_kwargs = { + 'app_label': model_meta.app_label, + 'model_name': model_meta.object_name.lower() + } + return self._default_view_name % format_kwargs + + def get_pk_field(self, model_field): + if self.opts.fields and model_field.name in self.opts.fields: + return self.get_field(model_field) + + def get_related_field(self, model_field, related_model, to_many): + """ + Creates a default instance of a flat relational field. + """ + # TODO: filter queryset using: + # .using(db).complex_filter(self.rel.limit_choices_to) + kwargs = { + 'queryset': related_model._default_manager, + 'view_name': self._get_default_view_name(related_model), + 'many': to_many + } + + if model_field: + kwargs['required'] = not(model_field.null or model_field.blank) + + if self.opts.lookup_field: + kwargs['lookup_field'] = self.opts.lookup_field + + return self._hyperlink_field_class(**kwargs) + + def get_identity(self, data): + """ + This hook is required for bulk update. + We need to override the default, to use the url as the identity. + """ + try: + return data.get('url', None) + except AttributeError: + return None diff --git a/awx/lib/site-packages/rest_framework/settings.py b/awx/lib/site-packages/rest_framework/settings.py new file mode 100644 index 0000000000..beb511aca2 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/settings.py @@ -0,0 +1,192 @@ +""" +Settings for REST framework are all namespaced in the REST_FRAMEWORK setting. +For example your project's `settings.py` file might look like this: + +REST_FRAMEWORK = { + 'DEFAULT_RENDERER_CLASSES': ( + 'rest_framework.renderers.JSONRenderer', + 'rest_framework.renderers.YAMLRenderer', + ) + 'DEFAULT_PARSER_CLASSES': ( + 'rest_framework.parsers.JSONParser', + 'rest_framework.parsers.YAMLParser', + ) +} + +This module provides the `api_setting` object, that is used to access +REST framework settings, checking for user settings first, then falling +back to the defaults. +""" +from __future__ import unicode_literals + +from django.conf import settings +from django.utils import importlib + +from rest_framework import ISO_8601 +from rest_framework.compat import six + + +USER_SETTINGS = getattr(settings, 'REST_FRAMEWORK', None) + +DEFAULTS = { + # Base API policies + 'DEFAULT_RENDERER_CLASSES': ( + 'rest_framework.renderers.JSONRenderer', + 'rest_framework.renderers.BrowsableAPIRenderer', + ), + 'DEFAULT_PARSER_CLASSES': ( + 'rest_framework.parsers.JSONParser', + 'rest_framework.parsers.FormParser', + 'rest_framework.parsers.MultiPartParser' + ), + 'DEFAULT_AUTHENTICATION_CLASSES': ( + 'rest_framework.authentication.SessionAuthentication', + 'rest_framework.authentication.BasicAuthentication' + ), + 'DEFAULT_PERMISSION_CLASSES': ( + 'rest_framework.permissions.AllowAny', + ), + 'DEFAULT_THROTTLE_CLASSES': ( + ), + + 'DEFAULT_CONTENT_NEGOTIATION_CLASS': + 'rest_framework.negotiation.DefaultContentNegotiation', + + # Genric view behavior + 'DEFAULT_MODEL_SERIALIZER_CLASS': + 'rest_framework.serializers.ModelSerializer', + 'DEFAULT_PAGINATION_SERIALIZER_CLASS': + 'rest_framework.pagination.PaginationSerializer', + 'DEFAULT_FILTER_BACKENDS': (), + + # Throttling + 'DEFAULT_THROTTLE_RATES': { + 'user': None, + 'anon': None, + }, + + # Pagination + 'PAGINATE_BY': None, + 'PAGINATE_BY_PARAM': None, + + # Authentication + 'UNAUTHENTICATED_USER': 'django.contrib.auth.models.AnonymousUser', + 'UNAUTHENTICATED_TOKEN': None, + + # Browser enhancements + 'FORM_METHOD_OVERRIDE': '_method', + 'FORM_CONTENT_OVERRIDE': '_content', + 'FORM_CONTENTTYPE_OVERRIDE': '_content_type', + 'URL_ACCEPT_OVERRIDE': 'accept', + 'URL_FORMAT_OVERRIDE': 'format', + + 'FORMAT_SUFFIX_KWARG': 'format', + + # Input and output formats + 'DATE_INPUT_FORMATS': ( + ISO_8601, + ), + 'DATE_FORMAT': None, + + 'DATETIME_INPUT_FORMATS': ( + ISO_8601, + ), + 'DATETIME_FORMAT': None, + + 'TIME_INPUT_FORMATS': ( + ISO_8601, + ), + 'TIME_FORMAT': None, + + # Pending deprecation + 'FILTER_BACKEND': None, +} + + +# List of settings that may be in string import notation. +IMPORT_STRINGS = ( + 'DEFAULT_RENDERER_CLASSES', + 'DEFAULT_PARSER_CLASSES', + 'DEFAULT_AUTHENTICATION_CLASSES', + 'DEFAULT_PERMISSION_CLASSES', + 'DEFAULT_THROTTLE_CLASSES', + 'DEFAULT_CONTENT_NEGOTIATION_CLASS', + 'DEFAULT_MODEL_SERIALIZER_CLASS', + 'DEFAULT_PAGINATION_SERIALIZER_CLASS', + 'DEFAULT_FILTER_BACKENDS', + 'FILTER_BACKEND', + 'UNAUTHENTICATED_USER', + 'UNAUTHENTICATED_TOKEN', +) + + +def perform_import(val, setting_name): + """ + If the given setting is a string import notation, + then perform the necessary import or imports. + """ + if isinstance(val, six.string_types): + return import_from_string(val, setting_name) + elif isinstance(val, (list, tuple)): + return [import_from_string(item, setting_name) for item in val] + return val + + +def import_from_string(val, setting_name): + """ + Attempt to import a class from a string representation. + """ + try: + # Nod to tastypie's use of importlib. + parts = val.split('.') + module_path, class_name = '.'.join(parts[:-1]), parts[-1] + module = importlib.import_module(module_path) + return getattr(module, class_name) + except ImportError as e: + msg = "Could not import '%s' for API setting '%s'. %s: %s." % (val, setting_name, e.__class__.__name__, e) + raise ImportError(msg) + + +class APISettings(object): + """ + A settings object, that allows API settings to be accessed as properties. + For example: + + from rest_framework.settings import api_settings + print api_settings.DEFAULT_RENDERER_CLASSES + + Any setting with string import paths will be automatically resolved + and return the class, rather than the string literal. + """ + def __init__(self, user_settings=None, defaults=None, import_strings=None): + self.user_settings = user_settings or {} + self.defaults = defaults or {} + self.import_strings = import_strings or () + + def __getattr__(self, attr): + if attr not in self.defaults.keys(): + raise AttributeError("Invalid API setting: '%s'" % attr) + + try: + # Check if present in user settings + val = self.user_settings[attr] + except KeyError: + # Fall back to defaults + val = self.defaults[attr] + + # Coerce import strings into classes + if val and attr in self.import_strings: + val = perform_import(val, attr) + + self.validate_setting(attr, val) + + # Cache the result + setattr(self, attr, val) + return val + + def validate_setting(self, attr, val): + if attr == 'FILTER_BACKEND' and val is not None: + # Make sure we can initialize the class + val() + +api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS) diff --git a/awx/lib/site-packages/rest_framework/six.py b/awx/lib/site-packages/rest_framework/six.py new file mode 100644 index 0000000000..9e3823128f --- /dev/null +++ b/awx/lib/site-packages/rest_framework/six.py @@ -0,0 +1,389 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +import operator +import sys +import types + +__author__ = "Benjamin Peterson <benjamin@python.org>" +__version__ = "1.2.0" + + +# True if we are running on Python 3. +PY3 = sys.version_info[0] == 3 + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform == "java": + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) + # This is a bit ugly, but it avoids running this again. + delattr(tp, self.name) + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + + +class _MovedItems(types.ModuleType): + """Lazy loading of moved objects""" + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("reload_module", "__builtin__", "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("winreg", "_winreg"), +] +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) +del attr + +moves = sys.modules["django.utils.six.moves"] = _MovedItems("moves") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_code = "__code__" + _func_defaults = "__defaults__" + + _iterkeys = "keys" + _itervalues = "values" + _iteritems = "items" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_code = "func_code" + _func_defaults = "func_defaults" + + _iterkeys = "iterkeys" + _itervalues = "itervalues" + _iteritems = "iteritems" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +if PY3: + def get_unbound_function(unbound): + return unbound + + Iterator = object + + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) +else: + def get_unbound_function(unbound): + return unbound.im_func + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) + + +def iterkeys(d): + """Return an iterator over the keys of a dictionary.""" + return iter(getattr(d, _iterkeys)()) + +def itervalues(d): + """Return an iterator over the values of a dictionary.""" + return iter(getattr(d, _itervalues)()) + +def iteritems(d): + """Return an iterator over the (key, value) pairs of a dictionary.""" + return iter(getattr(d, _iteritems)()) + + +if PY3: + def b(s): + return s.encode("latin-1") + def u(s): + return s + if sys.version_info[1] <= 1: + def int2byte(i): + return bytes((i,)) + else: + # This is about 2x faster than the implementation above on 3.2+ + int2byte = operator.methodcaller("to_bytes", 1, "big") + import io + StringIO = io.StringIO + BytesIO = io.BytesIO +else: + def b(s): + return s + def u(s): + return unicode(s, "unicode_escape") + int2byte = chr + import StringIO + StringIO = BytesIO = StringIO.StringIO +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +if PY3: + import builtins + exec_ = getattr(builtins, "exec") + + + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + + + print_ = getattr(builtins, "print") + del builtins + +else: + def exec_(code, globs=None, locs=None): + """Execute code in a namespace.""" + if globs is None: + frame = sys._getframe(1) + globs = frame.f_globals + if locs is None: + locs = frame.f_locals + del frame + elif locs is None: + locs = globs + exec("""exec code in globs, locs""") + + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + + def print_(*args, **kwargs): + """The new-style print function.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + def write(data): + if not isinstance(data, basestring): + data = str(data) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) + +_add_doc(reraise, """Reraise an exception.""") + + +def with_metaclass(meta, base=object): + """Create a base class with a metaclass.""" + return meta("NewBase", (base,), {}) + + +### Additional customizations for Django ### + +if PY3: + _iterlists = "lists" + _assertRaisesRegex = "assertRaisesRegex" +else: + _iterlists = "iterlists" + _assertRaisesRegex = "assertRaisesRegexp" + + +def iterlists(d): + """Return an iterator over the values of a MultiValueDict.""" + return getattr(d, _iterlists)() + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +add_move(MovedModule("_dummy_thread", "dummy_thread")) +add_move(MovedModule("_thread", "thread")) diff --git a/awx/lib/site-packages/rest_framework/static/rest_framework/css/bootstrap-tweaks.css b/awx/lib/site-packages/rest_framework/static/rest_framework/css/bootstrap-tweaks.css new file mode 100644 index 0000000000..6bfb778ccf --- /dev/null +++ b/awx/lib/site-packages/rest_framework/static/rest_framework/css/bootstrap-tweaks.css @@ -0,0 +1,185 @@ +/* + +This CSS file contains some tweaks specific to the included Bootstrap theme. +It's separate from `style.css` so that it can be easily overridden by replacing +a single block in the template. + +*/ + + +.form-actions { + background: transparent; + border-top-color: transparent; + padding-top: 0; +} + +.navbar-inverse .brand a { + color: #999; +} +.navbar-inverse .brand:hover a { + color: white; + text-decoration: none; +} + +/* custom navigation styles */ +.wrapper .navbar{ + width: 100%; + position: absolute; + left: 0; + top: 0; +} + +.navbar .navbar-inner{ + background: #2C2C2C; + color: white; + border: none; + border-top: 5px solid #A30000; + border-radius: 0px; +} + +.navbar .navbar-inner .nav li, .navbar .navbar-inner .nav li a, .navbar .navbar-inner .brand:hover{ + color: white; +} + +.nav-list > .active > a, .nav-list > .active > a:hover { + background: #2c2c2c; +} + +.navbar .navbar-inner .dropdown-menu li a, .navbar .navbar-inner .dropdown-menu li{ + color: #A30000; +} +.navbar .navbar-inner .dropdown-menu li a:hover{ + background: #eeeeee; + color: #c20000; +} + +/*=== dabapps bootstrap styles ====*/ + +html{ + width:100%; + background: none; +} + +body, .navbar .navbar-inner .container-fluid { + max-width: 1150px; + margin: 0 auto; +} + +body{ + background: url("../img/grid.png") repeat-x; + background-attachment: fixed; +} + +#content{ + margin: 0; +} + +/* sticky footer and footer */ +html, body { + height: 100%; +} +.wrapper { + min-height: 100%; + height: auto !important; + height: 100%; + margin: 0 auto -60px; +} + +.form-switcher { + margin-bottom: 0; +} + +.well { + -webkit-box-shadow: none; + -moz-box-shadow: none; + box-shadow: none; +} + +.well .form-actions { + padding-bottom: 0; + margin-bottom: 0; +} + +.well form { + margin-bottom: 0; +} + +.well form .help-block { + color: #999; +} + +.nav-tabs { + border: 0; +} + +.nav-tabs > li { + float: right; +} + +.nav-tabs li a { + margin-right: 0; +} + +.nav-tabs > .active > a { + background: #f5f5f5; +} + +.nav-tabs > .active > a:hover { + background: #f5f5f5; +} + +.tabbable.first-tab-active .tab-content +{ + border-top-right-radius: 0; +} + +#footer, #push { + height: 60px; /* .push must be the same height as .footer */ +} + +#footer{ + text-align: right; +} + +#footer p { + text-align: center; + color: gray; + border-top: 1px solid #DDD; + padding-top: 10px; +} + +#footer a { + color: gray; + font-weight: bold; +} + +#footer a:hover { + color: gray; +} + +.page-header { + border-bottom: none; + padding-bottom: 0px; + margin-bottom: 20px; +} + +/* custom general page styles */ +.hero-unit h2, .hero-unit h1{ + color: #A30000; +} + +body a, body a{ + color: #A30000; +} + +body a:hover{ + color: #c20000; +} + +#content a span{ + text-decoration: underline; + } + +.request-info { + clear:both; +} diff --git a/awx/lib/site-packages/rest_framework/static/rest_framework/css/bootstrap.min.css b/awx/lib/site-packages/rest_framework/static/rest_framework/css/bootstrap.min.css new file mode 100644 index 0000000000..373f4b430b --- /dev/null +++ b/awx/lib/site-packages/rest_framework/static/rest_framework/css/bootstrap.min.css @@ -0,0 +1,841 @@ +/*! + * Bootstrap v2.1.1 + * + * Copyright 2012 Twitter, Inc + * Licensed under the Apache License v2.0 + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Designed and built with all the love in the world @twitter by @mdo and @fat. + */ +.clearfix{*zoom:1;}.clearfix:before,.clearfix:after{display:table;content:"";line-height:0;} +.clearfix:after{clear:both;} +.hide-text{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0;} +.input-block-level{display:block;width:100%;min-height:30px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;} +article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block;} +audio,canvas,video{display:inline-block;*display:inline;*zoom:1;} +audio:not([controls]){display:none;} +html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%;} +a:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px;} +a:hover,a:active{outline:0;} +sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline;} +sup{top:-0.5em;} +sub{bottom:-0.25em;} +img{max-width:100%;width:auto\9;height:auto;vertical-align:middle;border:0;-ms-interpolation-mode:bicubic;} +#map_canvas img{max-width:none;} +button,input,select,textarea{margin:0;font-size:100%;vertical-align:middle;} +button,input{*overflow:visible;line-height:normal;} +button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0;} +button,input[type="button"],input[type="reset"],input[type="submit"]{cursor:pointer;-webkit-appearance:button;} +input[type="search"]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield;} +input[type="search"]::-webkit-search-decoration,input[type="search"]::-webkit-search-cancel-button{-webkit-appearance:none;} +textarea{overflow:auto;vertical-align:top;} +body{margin:0;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:20px;color:#333333;background-color:#ffffff;} +a{color:#0088cc;text-decoration:none;} +a:hover{color:#005580;text-decoration:underline;} +.img-rounded{-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;} +.img-polaroid{padding:4px;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0, 0, 0, 0.2);-webkit-box-shadow:0 1px 3px rgba(0, 0, 0, 0.1);-moz-box-shadow:0 1px 3px rgba(0, 0, 0, 0.1);box-shadow:0 1px 3px rgba(0, 0, 0, 0.1);} +.img-circle{-webkit-border-radius:500px;-moz-border-radius:500px;border-radius:500px;} +.row{margin-left:-20px;*zoom:1;}.row:before,.row:after{display:table;content:"";line-height:0;} +.row:after{clear:both;} +[class*="span"]{float:left;min-height:1px;margin-left:20px;} +.container,.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:940px;} +.span12{width:940px;} +.span11{width:860px;} +.span10{width:780px;} +.span9{width:700px;} +.span8{width:620px;} +.span7{width:540px;} +.span6{width:460px;} +.span5{width:380px;} +.span4{width:300px;} +.span3{width:220px;} +.span2{width:140px;} +.span1{width:60px;} +.offset12{margin-left:980px;} +.offset11{margin-left:900px;} +.offset10{margin-left:820px;} +.offset9{margin-left:740px;} +.offset8{margin-left:660px;} +.offset7{margin-left:580px;} +.offset6{margin-left:500px;} +.offset5{margin-left:420px;} +.offset4{margin-left:340px;} +.offset3{margin-left:260px;} +.offset2{margin-left:180px;} +.offset1{margin-left:100px;} +.row-fluid{width:100%;*zoom:1;}.row-fluid:before,.row-fluid:after{display:table;content:"";line-height:0;} +.row-fluid:after{clear:both;} +.row-fluid [class*="span"]{display:block;width:100%;min-height:30px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;float:left;margin-left:2.127659574468085%;*margin-left:2.074468085106383%;} +.row-fluid [class*="span"]:first-child{margin-left:0;} +.row-fluid .span12{width:100%;*width:99.94680851063829%;} +.row-fluid .span11{width:91.48936170212765%;*width:91.43617021276594%;} +.row-fluid .span10{width:82.97872340425532%;*width:82.92553191489361%;} +.row-fluid .span9{width:74.46808510638297%;*width:74.41489361702126%;} +.row-fluid .span8{width:65.95744680851064%;*width:65.90425531914893%;} +.row-fluid .span7{width:57.44680851063829%;*width:57.39361702127659%;} +.row-fluid .span6{width:48.93617021276595%;*width:48.88297872340425%;} +.row-fluid .span5{width:40.42553191489362%;*width:40.37234042553192%;} +.row-fluid .span4{width:31.914893617021278%;*width:31.861702127659576%;} +.row-fluid .span3{width:23.404255319148934%;*width:23.351063829787233%;} +.row-fluid .span2{width:14.893617021276595%;*width:14.840425531914894%;} +.row-fluid .span1{width:6.382978723404255%;*width:6.329787234042553%;} +.row-fluid .offset12{margin-left:104.25531914893617%;*margin-left:104.14893617021275%;} +.row-fluid .offset12:first-child{margin-left:102.12765957446808%;*margin-left:102.02127659574467%;} +.row-fluid .offset11{margin-left:95.74468085106382%;*margin-left:95.6382978723404%;} +.row-fluid .offset11:first-child{margin-left:93.61702127659574%;*margin-left:93.51063829787232%;} +.row-fluid .offset10{margin-left:87.23404255319149%;*margin-left:87.12765957446807%;} +.row-fluid .offset10:first-child{margin-left:85.1063829787234%;*margin-left:84.99999999999999%;} +.row-fluid .offset9{margin-left:78.72340425531914%;*margin-left:78.61702127659572%;} +.row-fluid .offset9:first-child{margin-left:76.59574468085106%;*margin-left:76.48936170212764%;} +.row-fluid .offset8{margin-left:70.2127659574468%;*margin-left:70.10638297872339%;} +.row-fluid .offset8:first-child{margin-left:68.08510638297872%;*margin-left:67.9787234042553%;} +.row-fluid .offset7{margin-left:61.70212765957446%;*margin-left:61.59574468085106%;} +.row-fluid .offset7:first-child{margin-left:59.574468085106375%;*margin-left:59.46808510638297%;} +.row-fluid .offset6{margin-left:53.191489361702125%;*margin-left:53.085106382978715%;} +.row-fluid .offset6:first-child{margin-left:51.063829787234035%;*margin-left:50.95744680851063%;} +.row-fluid .offset5{margin-left:44.68085106382979%;*margin-left:44.57446808510638%;} +.row-fluid .offset5:first-child{margin-left:42.5531914893617%;*margin-left:42.4468085106383%;} +.row-fluid .offset4{margin-left:36.170212765957444%;*margin-left:36.06382978723405%;} +.row-fluid .offset4:first-child{margin-left:34.04255319148936%;*margin-left:33.93617021276596%;} +.row-fluid .offset3{margin-left:27.659574468085104%;*margin-left:27.5531914893617%;} +.row-fluid .offset3:first-child{margin-left:25.53191489361702%;*margin-left:25.425531914893618%;} +.row-fluid .offset2{margin-left:19.148936170212764%;*margin-left:19.04255319148936%;} +.row-fluid .offset2:first-child{margin-left:17.02127659574468%;*margin-left:16.914893617021278%;} +.row-fluid .offset1{margin-left:10.638297872340425%;*margin-left:10.53191489361702%;} +.row-fluid .offset1:first-child{margin-left:8.51063829787234%;*margin-left:8.404255319148938%;} +[class*="span"].hide,.row-fluid [class*="span"].hide{display:none;} +[class*="span"].pull-right,.row-fluid [class*="span"].pull-right{float:right;} +.container{margin-right:auto;margin-left:auto;*zoom:1;}.container:before,.container:after{display:table;content:"";line-height:0;} +.container:after{clear:both;} +.container-fluid{padding-right:20px;padding-left:20px;*zoom:1;}.container-fluid:before,.container-fluid:after{display:table;content:"";line-height:0;} +.container-fluid:after{clear:both;} +p{margin:0 0 10px;} +.lead{margin-bottom:20px;font-size:21px;font-weight:200;line-height:30px;} +small{font-size:85%;} +strong{font-weight:bold;} +em{font-style:italic;} +cite{font-style:normal;} +.muted{color:#999999;} +.text-warning{color:#c09853;} +.text-error{color:#b94a48;} +.text-info{color:#3a87ad;} +.text-success{color:#468847;} +h1,h2,h3,h4,h5,h6{margin:10px 0;font-family:inherit;font-weight:bold;line-height:1;color:inherit;text-rendering:optimizelegibility;}h1 small,h2 small,h3 small,h4 small,h5 small,h6 small{font-weight:normal;line-height:1;color:#999999;} +h1{font-size:36px;line-height:40px;} +h2{font-size:30px;line-height:40px;} +h3{font-size:24px;line-height:40px;} +h4{font-size:18px;line-height:20px;} +h5{font-size:14px;line-height:20px;} +h6{font-size:12px;line-height:20px;} +h1 small{font-size:24px;} +h2 small{font-size:18px;} +h3 small{font-size:14px;} +h4 small{font-size:14px;} +.page-header{padding-bottom:9px;margin:20px 0 30px;border-bottom:1px solid #eeeeee;} +ul,ol{padding:0;margin:0 0 10px 25px;} +ul ul,ul ol,ol ol,ol ul{margin-bottom:0;} +li{line-height:20px;} +ul.unstyled,ol.unstyled{margin-left:0;list-style:none;} +dl{margin-bottom:20px;} +dt,dd{line-height:20px;} +dt{font-weight:bold;} +dd{margin-left:10px;} +.dl-horizontal{*zoom:1;}.dl-horizontal:before,.dl-horizontal:after{display:table;content:"";line-height:0;} +.dl-horizontal:after{clear:both;} +.dl-horizontal dt{float:left;width:160px;clear:left;text-align:right;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;} +.dl-horizontal dd{margin-left:180px;} +hr{margin:20px 0;border:0;border-top:1px solid #eeeeee;border-bottom:1px solid #ffffff;} +abbr[title]{cursor:help;border-bottom:1px dotted #999999;} +abbr.initialism{font-size:90%;text-transform:uppercase;} +blockquote{padding:0 0 0 15px;margin:0 0 20px;border-left:5px solid #eeeeee;}blockquote p{margin-bottom:0;font-size:16px;font-weight:300;line-height:25px;} +blockquote small{display:block;line-height:20px;color:#999999;}blockquote small:before{content:'\2014 \00A0';} +blockquote.pull-right{float:right;padding-right:15px;padding-left:0;border-right:5px solid #eeeeee;border-left:0;}blockquote.pull-right p,blockquote.pull-right small{text-align:right;} +blockquote.pull-right small:before{content:'';} +blockquote.pull-right small:after{content:'\00A0 \2014';} +q:before,q:after,blockquote:before,blockquote:after{content:"";} +address{display:block;margin-bottom:20px;font-style:normal;line-height:20px;} +code,pre{padding:0 3px 2px;font-family:Monaco,Menlo,Consolas,"Courier New",monospace;font-size:12px;color:#333333;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;} +code{padding:2px 4px;color:#d14;background-color:#f7f7f9;border:1px solid #e1e1e8;} +pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:20px;word-break:break-all;word-wrap:break-word;white-space:pre;white-space:pre-wrap;background-color:#f5f5f5;border:1px solid #ccc;border:1px solid rgba(0, 0, 0, 0.15);-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;}pre.prettyprint{margin-bottom:20px;} +pre code{padding:0;color:inherit;background-color:transparent;border:0;} +.pre-scrollable{max-height:340px;overflow-y:scroll;} +.label,.badge{font-size:11.844px;font-weight:bold;line-height:14px;color:#ffffff;vertical-align:baseline;white-space:nowrap;text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);background-color:#999999;} +.label{padding:1px 4px 2px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;} +.badge{padding:1px 9px 2px;-webkit-border-radius:9px;-moz-border-radius:9px;border-radius:9px;} +a.label:hover,a.badge:hover{color:#ffffff;text-decoration:none;cursor:pointer;} +.label-important,.badge-important{background-color:#b94a48;} +.label-important[href],.badge-important[href]{background-color:#953b39;} +.label-warning,.badge-warning{background-color:#f89406;} +.label-warning[href],.badge-warning[href]{background-color:#c67605;} +.label-success,.badge-success{background-color:#468847;} +.label-success[href],.badge-success[href]{background-color:#356635;} +.label-info,.badge-info{background-color:#3a87ad;} +.label-info[href],.badge-info[href]{background-color:#2d6987;} +.label-inverse,.badge-inverse{background-color:#333333;} +.label-inverse[href],.badge-inverse[href]{background-color:#1a1a1a;} +.btn .label,.btn .badge{position:relative;top:-1px;} +.btn-mini .label,.btn-mini .badge{top:0;} +table{max-width:100%;background-color:transparent;border-collapse:collapse;border-spacing:0;} +.table{width:100%;margin-bottom:20px;}.table th,.table td{padding:8px;line-height:20px;text-align:left;vertical-align:top;border-top:1px solid #dddddd;} +.table th{font-weight:bold;} +.table thead th{vertical-align:bottom;} +.table caption+thead tr:first-child th,.table caption+thead tr:first-child td,.table colgroup+thead tr:first-child th,.table colgroup+thead tr:first-child td,.table thead:first-child tr:first-child th,.table thead:first-child tr:first-child td{border-top:0;} +.table tbody+tbody{border-top:2px solid #dddddd;} +.table-condensed th,.table-condensed td{padding:4px 5px;} +.table-bordered{border:1px solid #dddddd;border-collapse:separate;*border-collapse:collapse;border-left:0;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;}.table-bordered th,.table-bordered td{border-left:1px solid #dddddd;} +.table-bordered caption+thead tr:first-child th,.table-bordered caption+tbody tr:first-child th,.table-bordered caption+tbody tr:first-child td,.table-bordered colgroup+thead tr:first-child th,.table-bordered colgroup+tbody tr:first-child th,.table-bordered colgroup+tbody tr:first-child td,.table-bordered thead:first-child tr:first-child th,.table-bordered tbody:first-child tr:first-child th,.table-bordered tbody:first-child tr:first-child td{border-top:0;} +.table-bordered thead:first-child tr:first-child th:first-child,.table-bordered tbody:first-child tr:first-child td:first-child{-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topleft:4px;} +.table-bordered thead:first-child tr:first-child th:last-child,.table-bordered tbody:first-child tr:first-child td:last-child{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-moz-border-radius-topright:4px;} +.table-bordered thead:last-child tr:last-child th:first-child,.table-bordered tbody:last-child tr:last-child td:first-child,.table-bordered tfoot:last-child tr:last-child td:first-child{-webkit-border-radius:0 0 0 4px;-moz-border-radius:0 0 0 4px;border-radius:0 0 0 4px;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-moz-border-radius-bottomleft:4px;} +.table-bordered thead:last-child tr:last-child th:last-child,.table-bordered tbody:last-child tr:last-child td:last-child,.table-bordered tfoot:last-child tr:last-child td:last-child{-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-moz-border-radius-bottomright:4px;} +.table-bordered caption+thead tr:first-child th:first-child,.table-bordered caption+tbody tr:first-child td:first-child,.table-bordered colgroup+thead tr:first-child th:first-child,.table-bordered colgroup+tbody tr:first-child td:first-child{-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topleft:4px;} +.table-bordered caption+thead tr:first-child th:last-child,.table-bordered caption+tbody tr:first-child td:last-child,.table-bordered colgroup+thead tr:first-child th:last-child,.table-bordered colgroup+tbody tr:first-child td:last-child{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-moz-border-radius-topleft:4px;} +.table-striped tbody tr:nth-child(odd) td,.table-striped tbody tr:nth-child(odd) th{background-color:#f9f9f9;} +.table-hover tbody tr:hover td,.table-hover tbody tr:hover th{background-color:#f5f5f5;} +table [class*=span],.row-fluid table [class*=span]{display:table-cell;float:none;margin-left:0;} +.table .span1{float:none;width:44px;margin-left:0;} +.table .span2{float:none;width:124px;margin-left:0;} +.table .span3{float:none;width:204px;margin-left:0;} +.table .span4{float:none;width:284px;margin-left:0;} +.table .span5{float:none;width:364px;margin-left:0;} +.table .span6{float:none;width:444px;margin-left:0;} +.table .span7{float:none;width:524px;margin-left:0;} +.table .span8{float:none;width:604px;margin-left:0;} +.table .span9{float:none;width:684px;margin-left:0;} +.table .span10{float:none;width:764px;margin-left:0;} +.table .span11{float:none;width:844px;margin-left:0;} +.table .span12{float:none;width:924px;margin-left:0;} +.table .span13{float:none;width:1004px;margin-left:0;} +.table .span14{float:none;width:1084px;margin-left:0;} +.table .span15{float:none;width:1164px;margin-left:0;} +.table .span16{float:none;width:1244px;margin-left:0;} +.table .span17{float:none;width:1324px;margin-left:0;} +.table .span18{float:none;width:1404px;margin-left:0;} +.table .span19{float:none;width:1484px;margin-left:0;} +.table .span20{float:none;width:1564px;margin-left:0;} +.table .span21{float:none;width:1644px;margin-left:0;} +.table .span22{float:none;width:1724px;margin-left:0;} +.table .span23{float:none;width:1804px;margin-left:0;} +.table .span24{float:none;width:1884px;margin-left:0;} +.table tbody tr.success td{background-color:#dff0d8;} +.table tbody tr.error td{background-color:#f2dede;} +.table tbody tr.warning td{background-color:#fcf8e3;} +.table tbody tr.info td{background-color:#d9edf7;} +.table-hover tbody tr.success:hover td{background-color:#d0e9c6;} +.table-hover tbody tr.error:hover td{background-color:#ebcccc;} +.table-hover tbody tr.warning:hover td{background-color:#faf2cc;} +.table-hover tbody tr.info:hover td{background-color:#c4e3f3;} +form{margin:0 0 20px;} +fieldset{padding:0;margin:0;border:0;} +legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:40px;color:#333333;border:0;border-bottom:1px solid #e5e5e5;}legend small{font-size:15px;color:#999999;} +label,input,button,select,textarea{font-size:14px;font-weight:normal;line-height:20px;} +input,button,select,textarea{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;} +label{display:block;margin-bottom:5px;} +select,textarea,input[type="text"],input[type="password"],input[type="datetime"],input[type="datetime-local"],input[type="date"],input[type="month"],input[type="time"],input[type="week"],input[type="number"],input[type="email"],input[type="url"],input[type="search"],input[type="tel"],input[type="color"],.uneditable-input{display:inline-block;height:20px;padding:4px 6px;margin-bottom:9px;font-size:14px;line-height:20px;color:#555555;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;} +input,textarea,.uneditable-input{width:206px;} +textarea{height:auto;} +textarea,input[type="text"],input[type="password"],input[type="datetime"],input[type="datetime-local"],input[type="date"],input[type="month"],input[type="time"],input[type="week"],input[type="number"],input[type="email"],input[type="url"],input[type="search"],input[type="tel"],input[type="color"],.uneditable-input{background-color:#ffffff;border:1px solid #cccccc;-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);-webkit-transition:border linear .2s, box-shadow linear .2s;-moz-transition:border linear .2s, box-shadow linear .2s;-o-transition:border linear .2s, box-shadow linear .2s;transition:border linear .2s, box-shadow linear .2s;}textarea:focus,input[type="text"]:focus,input[type="password"]:focus,input[type="datetime"]:focus,input[type="datetime-local"]:focus,input[type="date"]:focus,input[type="month"]:focus,input[type="time"]:focus,input[type="week"]:focus,input[type="number"]:focus,input[type="email"]:focus,input[type="url"]:focus,input[type="search"]:focus,input[type="tel"]:focus,input[type="color"]:focus,.uneditable-input:focus{border-color:rgba(82, 168, 236, 0.8);outline:0;outline:thin dotted \9;-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 8px rgba(82, 168, 236, 0.6);-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 8px rgba(82, 168, 236, 0.6);box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 8px rgba(82, 168, 236, 0.6);} +input[type="radio"],input[type="checkbox"]{margin:4px 0 0;*margin-top:0;margin-top:1px \9;line-height:normal;cursor:pointer;} +input[type="file"],input[type="image"],input[type="submit"],input[type="reset"],input[type="button"],input[type="radio"],input[type="checkbox"]{width:auto;} +select,input[type="file"]{height:30px;*margin-top:4px;line-height:30px;} +select{width:220px;border:1px solid #cccccc;background-color:#ffffff;} +select[multiple],select[size]{height:auto;} +select:focus,input[type="file"]:focus,input[type="radio"]:focus,input[type="checkbox"]:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px;} +.uneditable-input,.uneditable-textarea{color:#999999;background-color:#fcfcfc;border-color:#cccccc;-webkit-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.025);-moz-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.025);box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.025);cursor:not-allowed;} +.uneditable-input{overflow:hidden;white-space:nowrap;} +.uneditable-textarea{width:auto;height:auto;} +input:-moz-placeholder,textarea:-moz-placeholder{color:#999999;} +input:-ms-input-placeholder,textarea:-ms-input-placeholder{color:#999999;} +input::-webkit-input-placeholder,textarea::-webkit-input-placeholder{color:#999999;} +.radio,.checkbox{min-height:18px;padding-left:18px;} +.radio input[type="radio"],.checkbox input[type="checkbox"]{float:left;margin-left:-18px;} +.controls>.radio:first-child,.controls>.checkbox:first-child{padding-top:5px;} +.radio.inline,.checkbox.inline{display:inline-block;padding-top:5px;margin-bottom:0;vertical-align:middle;} +.radio.inline+.radio.inline,.checkbox.inline+.checkbox.inline{margin-left:10px;} +.input-mini{width:60px;} +.input-small{width:90px;} +.input-medium{width:150px;} +.input-large{width:210px;} +.input-xlarge{width:270px;} +.input-xxlarge{width:530px;} +input[class*="span"],select[class*="span"],textarea[class*="span"],.uneditable-input[class*="span"],.row-fluid input[class*="span"],.row-fluid select[class*="span"],.row-fluid textarea[class*="span"],.row-fluid .uneditable-input[class*="span"]{float:none;margin-left:0;} +.input-append input[class*="span"],.input-append .uneditable-input[class*="span"],.input-prepend input[class*="span"],.input-prepend .uneditable-input[class*="span"],.row-fluid input[class*="span"],.row-fluid select[class*="span"],.row-fluid textarea[class*="span"],.row-fluid .uneditable-input[class*="span"],.row-fluid .input-prepend [class*="span"],.row-fluid .input-append [class*="span"]{display:inline-block;} +input,textarea,.uneditable-input{margin-left:0;} +.controls-row [class*="span"]+[class*="span"]{margin-left:20px;} +input.span12, textarea.span12, .uneditable-input.span12{width:926px;} +input.span11, textarea.span11, .uneditable-input.span11{width:846px;} +input.span10, textarea.span10, .uneditable-input.span10{width:766px;} +input.span9, textarea.span9, .uneditable-input.span9{width:686px;} +input.span8, textarea.span8, .uneditable-input.span8{width:606px;} +input.span7, textarea.span7, .uneditable-input.span7{width:526px;} +input.span6, textarea.span6, .uneditable-input.span6{width:446px;} +input.span5, textarea.span5, .uneditable-input.span5{width:366px;} +input.span4, textarea.span4, .uneditable-input.span4{width:286px;} +input.span3, textarea.span3, .uneditable-input.span3{width:206px;} +input.span2, textarea.span2, .uneditable-input.span2{width:126px;} +input.span1, textarea.span1, .uneditable-input.span1{width:46px;} +.controls-row{*zoom:1;}.controls-row:before,.controls-row:after{display:table;content:"";line-height:0;} +.controls-row:after{clear:both;} +.controls-row [class*="span"]{float:left;} +input[disabled],select[disabled],textarea[disabled],input[readonly],select[readonly],textarea[readonly]{cursor:not-allowed;background-color:#eeeeee;} +input[type="radio"][disabled],input[type="checkbox"][disabled],input[type="radio"][readonly],input[type="checkbox"][readonly]{background-color:transparent;} +.control-group.warning>label,.control-group.warning .help-block,.control-group.warning .help-inline{color:#c09853;} +.control-group.warning .checkbox,.control-group.warning .radio,.control-group.warning input,.control-group.warning select,.control-group.warning textarea{color:#c09853;} +.control-group.warning input,.control-group.warning select,.control-group.warning textarea{border-color:#c09853;-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);}.control-group.warning input:focus,.control-group.warning select:focus,.control-group.warning textarea:focus{border-color:#a47e3c;-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #dbc59e;-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #dbc59e;box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #dbc59e;} +.control-group.warning .input-prepend .add-on,.control-group.warning .input-append .add-on{color:#c09853;background-color:#fcf8e3;border-color:#c09853;} +.control-group.error>label,.control-group.error .help-block,.control-group.error .help-inline{color:#b94a48;} +.control-group.error .checkbox,.control-group.error .radio,.control-group.error input,.control-group.error select,.control-group.error textarea{color:#b94a48;} +.control-group.error input,.control-group.error select,.control-group.error textarea{border-color:#b94a48;-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);}.control-group.error input:focus,.control-group.error select:focus,.control-group.error textarea:focus{border-color:#953b39;-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #d59392;-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #d59392;box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #d59392;} +.control-group.error .input-prepend .add-on,.control-group.error .input-append .add-on{color:#b94a48;background-color:#f2dede;border-color:#b94a48;} +.control-group.success>label,.control-group.success .help-block,.control-group.success .help-inline{color:#468847;} +.control-group.success .checkbox,.control-group.success .radio,.control-group.success input,.control-group.success select,.control-group.success textarea{color:#468847;} +.control-group.success input,.control-group.success select,.control-group.success textarea{border-color:#468847;-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);}.control-group.success input:focus,.control-group.success select:focus,.control-group.success textarea:focus{border-color:#356635;-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #7aba7b;-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #7aba7b;box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #7aba7b;} +.control-group.success .input-prepend .add-on,.control-group.success .input-append .add-on{color:#468847;background-color:#dff0d8;border-color:#468847;} +.control-group.info>label,.control-group.info .help-block,.control-group.info .help-inline{color:#3a87ad;} +.control-group.info .checkbox,.control-group.info .radio,.control-group.info input,.control-group.info select,.control-group.info textarea{color:#3a87ad;} +.control-group.info input,.control-group.info select,.control-group.info textarea{border-color:#3a87ad;-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);}.control-group.info input:focus,.control-group.info select:focus,.control-group.info textarea:focus{border-color:#2d6987;-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #7ab5d3;-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #7ab5d3;box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #7ab5d3;} +.control-group.info .input-prepend .add-on,.control-group.info .input-append .add-on{color:#3a87ad;background-color:#d9edf7;border-color:#3a87ad;} +input:focus:required:invalid,textarea:focus:required:invalid,select:focus:required:invalid{color:#b94a48;border-color:#ee5f5b;}input:focus:required:invalid:focus,textarea:focus:required:invalid:focus,select:focus:required:invalid:focus{border-color:#e9322d;-webkit-box-shadow:0 0 6px #f8b9b7;-moz-box-shadow:0 0 6px #f8b9b7;box-shadow:0 0 6px #f8b9b7;} +.form-actions{padding:19px 20px 20px;margin-top:20px;margin-bottom:20px;background-color:#f5f5f5;border-top:1px solid #e5e5e5;*zoom:1;}.form-actions:before,.form-actions:after{display:table;content:"";line-height:0;} +.form-actions:after{clear:both;} +.help-block,.help-inline{color:#595959;} +.help-block{display:block;margin-bottom:10px;} +.help-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle;padding-left:5px;} +.input-append,.input-prepend{margin-bottom:5px;font-size:0;white-space:nowrap;}.input-append input,.input-prepend input,.input-append select,.input-prepend select,.input-append .uneditable-input,.input-prepend .uneditable-input{position:relative;margin-bottom:0;*margin-left:0;font-size:14px;vertical-align:top;-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0;}.input-append input:focus,.input-prepend input:focus,.input-append select:focus,.input-prepend select:focus,.input-append .uneditable-input:focus,.input-prepend .uneditable-input:focus{z-index:2;} +.input-append .add-on,.input-prepend .add-on{display:inline-block;width:auto;height:20px;min-width:16px;padding:4px 5px;font-size:14px;font-weight:normal;line-height:20px;text-align:center;text-shadow:0 1px 0 #ffffff;background-color:#eeeeee;border:1px solid #ccc;} +.input-append .add-on,.input-prepend .add-on,.input-append .btn,.input-prepend .btn{vertical-align:top;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;} +.input-append .active,.input-prepend .active{background-color:#a9dba9;border-color:#46a546;} +.input-prepend .add-on,.input-prepend .btn{margin-right:-1px;} +.input-prepend .add-on:first-child,.input-prepend .btn:first-child{-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px;} +.input-append input,.input-append select,.input-append .uneditable-input{-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px;} +.input-append .add-on,.input-append .btn{margin-left:-1px;} +.input-append .add-on:last-child,.input-append .btn:last-child{-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0;} +.input-prepend.input-append input,.input-prepend.input-append select,.input-prepend.input-append .uneditable-input{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;} +.input-prepend.input-append .add-on:first-child,.input-prepend.input-append .btn:first-child{margin-right:-1px;-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px;} +.input-prepend.input-append .add-on:last-child,.input-prepend.input-append .btn:last-child{margin-left:-1px;-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0;} +input.search-query{padding-right:14px;padding-right:4px \9;padding-left:14px;padding-left:4px \9;margin-bottom:0;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px;} +.form-search .input-append .search-query,.form-search .input-prepend .search-query{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;} +.form-search .input-append .search-query{-webkit-border-radius:14px 0 0 14px;-moz-border-radius:14px 0 0 14px;border-radius:14px 0 0 14px;} +.form-search .input-append .btn{-webkit-border-radius:0 14px 14px 0;-moz-border-radius:0 14px 14px 0;border-radius:0 14px 14px 0;} +.form-search .input-prepend .search-query{-webkit-border-radius:0 14px 14px 0;-moz-border-radius:0 14px 14px 0;border-radius:0 14px 14px 0;} +.form-search .input-prepend .btn{-webkit-border-radius:14px 0 0 14px;-moz-border-radius:14px 0 0 14px;border-radius:14px 0 0 14px;} +.form-search input,.form-inline input,.form-horizontal input,.form-search textarea,.form-inline textarea,.form-horizontal textarea,.form-search select,.form-inline select,.form-horizontal select,.form-search .help-inline,.form-inline .help-inline,.form-horizontal .help-inline,.form-search .uneditable-input,.form-inline .uneditable-input,.form-horizontal .uneditable-input,.form-search .input-prepend,.form-inline .input-prepend,.form-horizontal .input-prepend,.form-search .input-append,.form-inline .input-append,.form-horizontal .input-append{display:inline-block;*display:inline;*zoom:1;margin-bottom:0;vertical-align:middle;} +.form-search .hide,.form-inline .hide,.form-horizontal .hide{display:none;} +.form-search label,.form-inline label,.form-search .btn-group,.form-inline .btn-group{display:inline-block;} +.form-search .input-append,.form-inline .input-append,.form-search .input-prepend,.form-inline .input-prepend{margin-bottom:0;} +.form-search .radio,.form-search .checkbox,.form-inline .radio,.form-inline .checkbox{padding-left:0;margin-bottom:0;vertical-align:middle;} +.form-search .radio input[type="radio"],.form-search .checkbox input[type="checkbox"],.form-inline .radio input[type="radio"],.form-inline .checkbox input[type="checkbox"]{float:left;margin-right:3px;margin-left:0;} +.control-group{margin-bottom:10px;} +legend+.control-group{margin-top:20px;-webkit-margin-top-collapse:separate;} +.form-horizontal .control-group{margin-bottom:20px;*zoom:1;}.form-horizontal .control-group:before,.form-horizontal .control-group:after{display:table;content:"";line-height:0;} +.form-horizontal .control-group:after{clear:both;} +.form-horizontal .control-label{float:left;width:160px;padding-top:5px;text-align:right;} +.form-horizontal .controls{*display:inline-block;*padding-left:20px;margin-left:180px;*margin-left:0;}.form-horizontal .controls:first-child{*padding-left:180px;} +.form-horizontal .help-block{margin-bottom:0;} +.form-horizontal input+.help-block,.form-horizontal select+.help-block,.form-horizontal textarea+.help-block{margin-top:10px;} +.form-horizontal .form-actions{padding-left:180px;} +.btn{display:inline-block;*display:inline;*zoom:1;padding:4px 14px;margin-bottom:0;font-size:14px;line-height:20px;*line-height:20px;text-align:center;vertical-align:middle;cursor:pointer;color:#333333;text-shadow:0 1px 1px rgba(255, 255, 255, 0.75);background-color:#f5f5f5;background-image:-moz-linear-gradient(top, #ffffff, #e6e6e6);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#ffffff), to(#e6e6e6));background-image:-webkit-linear-gradient(top, #ffffff, #e6e6e6);background-image:-o-linear-gradient(top, #ffffff, #e6e6e6);background-image:linear-gradient(to bottom, #ffffff, #e6e6e6);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#ffe6e6e6', GradientType=0);border-color:#e6e6e6 #e6e6e6 #bfbfbf;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);*background-color:#e6e6e6;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);border:1px solid #bbbbbb;*border:0;border-bottom-color:#a2a2a2;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;*margin-left:.3em;-webkit-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05);}.btn:hover,.btn:active,.btn.active,.btn.disabled,.btn[disabled]{color:#333333;background-color:#e6e6e6;*background-color:#d9d9d9;} +.btn:active,.btn.active{background-color:#cccccc \9;} +.btn:first-child{*margin-left:0;} +.btn:hover{color:#333333;text-decoration:none;background-color:#e6e6e6;*background-color:#d9d9d9;background-position:0 -15px;-webkit-transition:background-position 0.1s linear;-moz-transition:background-position 0.1s linear;-o-transition:background-position 0.1s linear;transition:background-position 0.1s linear;} +.btn:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px;} +.btn.active,.btn:active{background-color:#e6e6e6;background-color:#d9d9d9 \9;background-image:none;outline:0;-webkit-box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05);} +.btn.disabled,.btn[disabled]{cursor:default;background-color:#e6e6e6;background-image:none;opacity:0.65;filter:alpha(opacity=65);-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none;} +.btn-large{padding:9px 14px;font-size:16px;line-height:normal;-webkit-border-radius:5px;-moz-border-radius:5px;border-radius:5px;} +.btn-large [class^="icon-"]{margin-top:2px;} +.btn-small{padding:3px 9px;font-size:12px;line-height:18px;} +.btn-small [class^="icon-"]{margin-top:0;} +.btn-mini{padding:2px 6px;font-size:11px;line-height:17px;} +.btn-block{display:block;width:100%;padding-left:0;padding-right:0;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;} +.btn-block+.btn-block{margin-top:5px;} +input[type="submit"].btn-block,input[type="reset"].btn-block,input[type="button"].btn-block{width:100%;} +.btn-primary.active,.btn-warning.active,.btn-danger.active,.btn-success.active,.btn-info.active,.btn-inverse.active{color:rgba(255, 255, 255, 0.75);} +.btn{border-color:#c5c5c5;border-color:rgba(0, 0, 0, 0.15) rgba(0, 0, 0, 0.15) rgba(0, 0, 0, 0.25);} +.btn-primary{color:#ffffff;text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);background-color:#006dcc;background-image:-moz-linear-gradient(top, #0088cc, #0044cc);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#0088cc), to(#0044cc));background-image:-webkit-linear-gradient(top, #0088cc, #0044cc);background-image:-o-linear-gradient(top, #0088cc, #0044cc);background-image:linear-gradient(to bottom, #0088cc, #0044cc);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc', endColorstr='#ff0044cc', GradientType=0);border-color:#0044cc #0044cc #002a80;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);*background-color:#0044cc;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);}.btn-primary:hover,.btn-primary:active,.btn-primary.active,.btn-primary.disabled,.btn-primary[disabled]{color:#ffffff;background-color:#0044cc;*background-color:#003bb3;} +.btn-primary:active,.btn-primary.active{background-color:#003399 \9;} +.btn-warning{color:#ffffff;text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);background-color:#faa732;background-image:-moz-linear-gradient(top, #fbb450, #f89406);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#fbb450), to(#f89406));background-image:-webkit-linear-gradient(top, #fbb450, #f89406);background-image:-o-linear-gradient(top, #fbb450, #f89406);background-image:linear-gradient(to bottom, #fbb450, #f89406);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffbb450', endColorstr='#fff89406', GradientType=0);border-color:#f89406 #f89406 #ad6704;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);*background-color:#f89406;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);}.btn-warning:hover,.btn-warning:active,.btn-warning.active,.btn-warning.disabled,.btn-warning[disabled]{color:#ffffff;background-color:#f89406;*background-color:#df8505;} +.btn-warning:active,.btn-warning.active{background-color:#c67605 \9;} +.btn-danger{color:#ffffff;text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);background-color:#da4f49;background-image:-moz-linear-gradient(top, #ee5f5b, #bd362f);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#ee5f5b), to(#bd362f));background-image:-webkit-linear-gradient(top, #ee5f5b, #bd362f);background-image:-o-linear-gradient(top, #ee5f5b, #bd362f);background-image:linear-gradient(to bottom, #ee5f5b, #bd362f);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffee5f5b', endColorstr='#ffbd362f', GradientType=0);border-color:#bd362f #bd362f #802420;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);*background-color:#bd362f;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);}.btn-danger:hover,.btn-danger:active,.btn-danger.active,.btn-danger.disabled,.btn-danger[disabled]{color:#ffffff;background-color:#bd362f;*background-color:#a9302a;} +.btn-danger:active,.btn-danger.active{background-color:#942a25 \9;} +.btn-success{color:#ffffff;text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);background-color:#5bb75b;background-image:-moz-linear-gradient(top, #62c462, #51a351);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#62c462), to(#51a351));background-image:-webkit-linear-gradient(top, #62c462, #51a351);background-image:-o-linear-gradient(top, #62c462, #51a351);background-image:linear-gradient(to bottom, #62c462, #51a351);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff62c462', endColorstr='#ff51a351', GradientType=0);border-color:#51a351 #51a351 #387038;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);*background-color:#51a351;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);}.btn-success:hover,.btn-success:active,.btn-success.active,.btn-success.disabled,.btn-success[disabled]{color:#ffffff;background-color:#51a351;*background-color:#499249;} +.btn-success:active,.btn-success.active{background-color:#408140 \9;} +.btn-info{color:#ffffff;text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);background-color:#49afcd;background-image:-moz-linear-gradient(top, #5bc0de, #2f96b4);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#5bc0de), to(#2f96b4));background-image:-webkit-linear-gradient(top, #5bc0de, #2f96b4);background-image:-o-linear-gradient(top, #5bc0de, #2f96b4);background-image:linear-gradient(to bottom, #5bc0de, #2f96b4);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff2f96b4', GradientType=0);border-color:#2f96b4 #2f96b4 #1f6377;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);*background-color:#2f96b4;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);}.btn-info:hover,.btn-info:active,.btn-info.active,.btn-info.disabled,.btn-info[disabled]{color:#ffffff;background-color:#2f96b4;*background-color:#2a85a0;} +.btn-info:active,.btn-info.active{background-color:#24748c \9;} +.btn-inverse{color:#ffffff;text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);background-color:#363636;background-image:-moz-linear-gradient(top, #444444, #222222);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#444444), to(#222222));background-image:-webkit-linear-gradient(top, #444444, #222222);background-image:-o-linear-gradient(top, #444444, #222222);background-image:linear-gradient(to bottom, #444444, #222222);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff444444', endColorstr='#ff222222', GradientType=0);border-color:#222222 #222222 #000000;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);*background-color:#222222;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);}.btn-inverse:hover,.btn-inverse:active,.btn-inverse.active,.btn-inverse.disabled,.btn-inverse[disabled]{color:#ffffff;background-color:#222222;*background-color:#151515;} +.btn-inverse:active,.btn-inverse.active{background-color:#080808 \9;} +button.btn,input[type="submit"].btn{*padding-top:3px;*padding-bottom:3px;}button.btn::-moz-focus-inner,input[type="submit"].btn::-moz-focus-inner{padding:0;border:0;} +button.btn.btn-large,input[type="submit"].btn.btn-large{*padding-top:7px;*padding-bottom:7px;} +button.btn.btn-small,input[type="submit"].btn.btn-small{*padding-top:3px;*padding-bottom:3px;} +button.btn.btn-mini,input[type="submit"].btn.btn-mini{*padding-top:1px;*padding-bottom:1px;} +.btn-link,.btn-link:active,.btn-link[disabled]{background-color:transparent;background-image:none;-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none;} +.btn-link{border-color:transparent;cursor:pointer;color:#0088cc;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;} +.btn-link:hover{color:#005580;text-decoration:underline;background-color:transparent;} +.btn-link[disabled]:hover{color:#333333;text-decoration:none;} +[class^="icon-"],[class*=" icon-"]{display:inline-block;width:14px;height:14px;*margin-right:.3em;line-height:14px;vertical-align:text-top;background-image:url("../img/glyphicons-halflings.png");background-position:14px 14px;background-repeat:no-repeat;margin-top:1px;} +.icon-white,.nav-tabs>.active>a>[class^="icon-"],.nav-tabs>.active>a>[class*=" icon-"],.nav-pills>.active>a>[class^="icon-"],.nav-pills>.active>a>[class*=" icon-"],.nav-list>.active>a>[class^="icon-"],.nav-list>.active>a>[class*=" icon-"],.navbar-inverse .nav>.active>a>[class^="icon-"],.navbar-inverse .nav>.active>a>[class*=" icon-"],.dropdown-menu>li>a:hover>[class^="icon-"],.dropdown-menu>li>a:hover>[class*=" icon-"],.dropdown-menu>.active>a>[class^="icon-"],.dropdown-menu>.active>a>[class*=" icon-"]{background-image:url("../img/glyphicons-halflings-white.png");} +.icon-glass{background-position:0 0;} +.icon-music{background-position:-24px 0;} +.icon-search{background-position:-48px 0;} +.icon-envelope{background-position:-72px 0;} +.icon-heart{background-position:-96px 0;} +.icon-star{background-position:-120px 0;} +.icon-star-empty{background-position:-144px 0;} +.icon-user{background-position:-168px 0;} +.icon-film{background-position:-192px 0;} +.icon-th-large{background-position:-216px 0;} +.icon-th{background-position:-240px 0;} +.icon-th-list{background-position:-264px 0;} +.icon-ok{background-position:-288px 0;} +.icon-remove{background-position:-312px 0;} +.icon-zoom-in{background-position:-336px 0;} +.icon-zoom-out{background-position:-360px 0;} +.icon-off{background-position:-384px 0;} +.icon-signal{background-position:-408px 0;} +.icon-cog{background-position:-432px 0;} +.icon-trash{background-position:-456px 0;} +.icon-home{background-position:0 -24px;} +.icon-file{background-position:-24px -24px;} +.icon-time{background-position:-48px -24px;} +.icon-road{background-position:-72px -24px;} +.icon-download-alt{background-position:-96px -24px;} +.icon-download{background-position:-120px -24px;} +.icon-upload{background-position:-144px -24px;} +.icon-inbox{background-position:-168px -24px;} +.icon-play-circle{background-position:-192px -24px;} +.icon-repeat{background-position:-216px -24px;} +.icon-refresh{background-position:-240px -24px;} +.icon-list-alt{background-position:-264px -24px;} +.icon-lock{background-position:-287px -24px;} +.icon-flag{background-position:-312px -24px;} +.icon-headphones{background-position:-336px -24px;} +.icon-volume-off{background-position:-360px -24px;} +.icon-volume-down{background-position:-384px -24px;} +.icon-volume-up{background-position:-408px -24px;} +.icon-qrcode{background-position:-432px -24px;} +.icon-barcode{background-position:-456px -24px;} +.icon-tag{background-position:0 -48px;} +.icon-tags{background-position:-25px -48px;} +.icon-book{background-position:-48px -48px;} +.icon-bookmark{background-position:-72px -48px;} +.icon-print{background-position:-96px -48px;} +.icon-camera{background-position:-120px -48px;} +.icon-font{background-position:-144px -48px;} +.icon-bold{background-position:-167px -48px;} +.icon-italic{background-position:-192px -48px;} +.icon-text-height{background-position:-216px -48px;} +.icon-text-width{background-position:-240px -48px;} +.icon-align-left{background-position:-264px -48px;} +.icon-align-center{background-position:-288px -48px;} +.icon-align-right{background-position:-312px -48px;} +.icon-align-justify{background-position:-336px -48px;} +.icon-list{background-position:-360px -48px;} +.icon-indent-left{background-position:-384px -48px;} +.icon-indent-right{background-position:-408px -48px;} +.icon-facetime-video{background-position:-432px -48px;} +.icon-picture{background-position:-456px -48px;} +.icon-pencil{background-position:0 -72px;} +.icon-map-marker{background-position:-24px -72px;} +.icon-adjust{background-position:-48px -72px;} +.icon-tint{background-position:-72px -72px;} +.icon-edit{background-position:-96px -72px;} +.icon-share{background-position:-120px -72px;} +.icon-check{background-position:-144px -72px;} +.icon-move{background-position:-168px -72px;} +.icon-step-backward{background-position:-192px -72px;} +.icon-fast-backward{background-position:-216px -72px;} +.icon-backward{background-position:-240px -72px;} +.icon-play{background-position:-264px -72px;} +.icon-pause{background-position:-288px -72px;} +.icon-stop{background-position:-312px -72px;} +.icon-forward{background-position:-336px -72px;} +.icon-fast-forward{background-position:-360px -72px;} +.icon-step-forward{background-position:-384px -72px;} +.icon-eject{background-position:-408px -72px;} +.icon-chevron-left{background-position:-432px -72px;} +.icon-chevron-right{background-position:-456px -72px;} +.icon-plus-sign{background-position:0 -96px;} +.icon-minus-sign{background-position:-24px -96px;} +.icon-remove-sign{background-position:-48px -96px;} +.icon-ok-sign{background-position:-72px -96px;} +.icon-question-sign{background-position:-96px -96px;} +.icon-info-sign{background-position:-120px -96px;} +.icon-screenshot{background-position:-144px -96px;} +.icon-remove-circle{background-position:-168px -96px;} +.icon-ok-circle{background-position:-192px -96px;} +.icon-ban-circle{background-position:-216px -96px;} +.icon-arrow-left{background-position:-240px -96px;} +.icon-arrow-right{background-position:-264px -96px;} +.icon-arrow-up{background-position:-289px -96px;} +.icon-arrow-down{background-position:-312px -96px;} +.icon-share-alt{background-position:-336px -96px;} +.icon-resize-full{background-position:-360px -96px;} +.icon-resize-small{background-position:-384px -96px;} +.icon-plus{background-position:-408px -96px;} +.icon-minus{background-position:-433px -96px;} +.icon-asterisk{background-position:-456px -96px;} +.icon-exclamation-sign{background-position:0 -120px;} +.icon-gift{background-position:-24px -120px;} +.icon-leaf{background-position:-48px -120px;} +.icon-fire{background-position:-72px -120px;} +.icon-eye-open{background-position:-96px -120px;} +.icon-eye-close{background-position:-120px -120px;} +.icon-warning-sign{background-position:-144px -120px;} +.icon-plane{background-position:-168px -120px;} +.icon-calendar{background-position:-192px -120px;} +.icon-random{background-position:-216px -120px;width:16px;} +.icon-comment{background-position:-240px -120px;} +.icon-magnet{background-position:-264px -120px;} +.icon-chevron-up{background-position:-288px -120px;} +.icon-chevron-down{background-position:-313px -119px;} +.icon-retweet{background-position:-336px -120px;} +.icon-shopping-cart{background-position:-360px -120px;} +.icon-folder-close{background-position:-384px -120px;} +.icon-folder-open{background-position:-408px -120px;width:16px;} +.icon-resize-vertical{background-position:-432px -119px;} +.icon-resize-horizontal{background-position:-456px -118px;} +.icon-hdd{background-position:0 -144px;} +.icon-bullhorn{background-position:-24px -144px;} +.icon-bell{background-position:-48px -144px;} +.icon-certificate{background-position:-72px -144px;} +.icon-thumbs-up{background-position:-96px -144px;} +.icon-thumbs-down{background-position:-120px -144px;} +.icon-hand-right{background-position:-144px -144px;} +.icon-hand-left{background-position:-168px -144px;} +.icon-hand-up{background-position:-192px -144px;} +.icon-hand-down{background-position:-216px -144px;} +.icon-circle-arrow-right{background-position:-240px -144px;} +.icon-circle-arrow-left{background-position:-264px -144px;} +.icon-circle-arrow-up{background-position:-288px -144px;} +.icon-circle-arrow-down{background-position:-312px -144px;} +.icon-globe{background-position:-336px -144px;} +.icon-wrench{background-position:-360px -144px;} +.icon-tasks{background-position:-384px -144px;} +.icon-filter{background-position:-408px -144px;} +.icon-briefcase{background-position:-432px -144px;} +.icon-fullscreen{background-position:-456px -144px;} +.btn-group{position:relative;font-size:0;vertical-align:middle;white-space:nowrap;*margin-left:.3em;}.btn-group:first-child{*margin-left:0;} +.btn-group+.btn-group{margin-left:5px;} +.btn-toolbar{font-size:0;margin-top:10px;margin-bottom:10px;}.btn-toolbar .btn-group{display:inline-block;*display:inline;*zoom:1;} +.btn-toolbar .btn+.btn,.btn-toolbar .btn-group+.btn,.btn-toolbar .btn+.btn-group{margin-left:5px;} +.btn-group>.btn{position:relative;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;} +.btn-group>.btn+.btn{margin-left:-1px;} +.btn-group>.btn,.btn-group>.dropdown-menu{font-size:14px;} +.btn-group>.btn-mini{font-size:11px;} +.btn-group>.btn-small{font-size:12px;} +.btn-group>.btn-large{font-size:16px;} +.btn-group>.btn:first-child{margin-left:0;-webkit-border-top-left-radius:4px;-moz-border-radius-topleft:4px;border-top-left-radius:4px;-webkit-border-bottom-left-radius:4px;-moz-border-radius-bottomleft:4px;border-bottom-left-radius:4px;} +.btn-group>.btn:last-child,.btn-group>.dropdown-toggle{-webkit-border-top-right-radius:4px;-moz-border-radius-topright:4px;border-top-right-radius:4px;-webkit-border-bottom-right-radius:4px;-moz-border-radius-bottomright:4px;border-bottom-right-radius:4px;} +.btn-group>.btn.large:first-child{margin-left:0;-webkit-border-top-left-radius:6px;-moz-border-radius-topleft:6px;border-top-left-radius:6px;-webkit-border-bottom-left-radius:6px;-moz-border-radius-bottomleft:6px;border-bottom-left-radius:6px;} +.btn-group>.btn.large:last-child,.btn-group>.large.dropdown-toggle{-webkit-border-top-right-radius:6px;-moz-border-radius-topright:6px;border-top-right-radius:6px;-webkit-border-bottom-right-radius:6px;-moz-border-radius-bottomright:6px;border-bottom-right-radius:6px;} +.btn-group>.btn:hover,.btn-group>.btn:focus,.btn-group>.btn:active,.btn-group>.btn.active{z-index:2;} +.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0;} +.btn-group>.btn+.dropdown-toggle{padding-left:8px;padding-right:8px;-webkit-box-shadow:inset 1px 0 0 rgba(255, 255, 255, 0.125), inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 1px 0 0 rgba(255, 255, 255, 0.125), inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:inset 1px 0 0 rgba(255, 255, 255, 0.125), inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05);*padding-top:5px;*padding-bottom:5px;} +.btn-group>.btn-mini+.dropdown-toggle{padding-left:5px;padding-right:5px;*padding-top:2px;*padding-bottom:2px;} +.btn-group>.btn-small+.dropdown-toggle{*padding-top:5px;*padding-bottom:4px;} +.btn-group>.btn-large+.dropdown-toggle{padding-left:12px;padding-right:12px;*padding-top:7px;*padding-bottom:7px;} +.btn-group.open .dropdown-toggle{background-image:none;-webkit-box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05);} +.btn-group.open .btn.dropdown-toggle{background-color:#e6e6e6;} +.btn-group.open .btn-primary.dropdown-toggle{background-color:#0044cc;} +.btn-group.open .btn-warning.dropdown-toggle{background-color:#f89406;} +.btn-group.open .btn-danger.dropdown-toggle{background-color:#bd362f;} +.btn-group.open .btn-success.dropdown-toggle{background-color:#51a351;} +.btn-group.open .btn-info.dropdown-toggle{background-color:#2f96b4;} +.btn-group.open .btn-inverse.dropdown-toggle{background-color:#222222;} +.btn .caret{margin-top:8px;margin-left:0;} +.btn-mini .caret,.btn-small .caret,.btn-large .caret{margin-top:6px;} +.btn-large .caret{border-left-width:5px;border-right-width:5px;border-top-width:5px;} +.dropup .btn-large .caret{border-bottom:5px solid #000000;border-top:0;} +.btn-primary .caret,.btn-warning .caret,.btn-danger .caret,.btn-info .caret,.btn-success .caret,.btn-inverse .caret{border-top-color:#ffffff;border-bottom-color:#ffffff;} +.btn-group-vertical{display:inline-block;*display:inline;*zoom:1;} +.btn-group-vertical .btn{display:block;float:none;width:100%;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;} +.btn-group-vertical .btn+.btn{margin-left:0;margin-top:-1px;} +.btn-group-vertical .btn:first-child{-webkit-border-radius:4px 4px 0 0;-moz-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0;} +.btn-group-vertical .btn:last-child{-webkit-border-radius:0 0 4px 4px;-moz-border-radius:0 0 4px 4px;border-radius:0 0 4px 4px;} +.btn-group-vertical .btn-large:first-child{-webkit-border-radius:6px 6px 0 0;-moz-border-radius:6px 6px 0 0;border-radius:6px 6px 0 0;} +.btn-group-vertical .btn-large:last-child{-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px;} +.nav{margin-left:0;margin-bottom:20px;list-style:none;} +.nav>li>a{display:block;} +.nav>li>a:hover{text-decoration:none;background-color:#eeeeee;} +.nav>.pull-right{float:right;} +.nav-header{display:block;padding:3px 15px;font-size:11px;font-weight:bold;line-height:20px;color:#999999;text-shadow:0 1px 0 rgba(255, 255, 255, 0.5);text-transform:uppercase;} +.nav li+.nav-header{margin-top:9px;} +.nav-list{padding-left:15px;padding-right:15px;margin-bottom:0;} +.nav-list>li>a,.nav-list .nav-header{margin-left:-15px;margin-right:-15px;text-shadow:0 1px 0 rgba(255, 255, 255, 0.5);} +.nav-list>li>a{padding:3px 15px;} +.nav-list>.active>a,.nav-list>.active>a:hover{color:#ffffff;text-shadow:0 -1px 0 rgba(0, 0, 0, 0.2);background-color:#0088cc;} +.nav-list [class^="icon-"]{margin-right:2px;} +.nav-list .divider{*width:100%;height:1px;margin:9px 1px;*margin:-5px 0 5px;overflow:hidden;background-color:#e5e5e5;border-bottom:1px solid #ffffff;} +.nav-tabs,.nav-pills{*zoom:1;}.nav-tabs:before,.nav-pills:before,.nav-tabs:after,.nav-pills:after{display:table;content:"";line-height:0;} +.nav-tabs:after,.nav-pills:after{clear:both;} +.nav-tabs>li,.nav-pills>li{float:left;} +.nav-tabs>li>a,.nav-pills>li>a{padding-right:12px;padding-left:12px;margin-right:2px;line-height:14px;} +.nav-tabs{border-bottom:1px solid #ddd;} +.nav-tabs>li{margin-bottom:-1px;} +.nav-tabs>li>a{padding-top:8px;padding-bottom:8px;line-height:20px;border:1px solid transparent;-webkit-border-radius:4px 4px 0 0;-moz-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0;}.nav-tabs>li>a:hover{border-color:#eeeeee #eeeeee #dddddd;} +.nav-tabs>.active>a,.nav-tabs>.active>a:hover{color:#555555;background-color:#ffffff;border:1px solid #ddd;border-bottom-color:transparent;cursor:default;} +.nav-pills>li>a{padding-top:8px;padding-bottom:8px;margin-top:2px;margin-bottom:2px;-webkit-border-radius:5px;-moz-border-radius:5px;border-radius:5px;} +.nav-pills>.active>a,.nav-pills>.active>a:hover{color:#ffffff;background-color:#0088cc;} +.nav-stacked>li{float:none;} +.nav-stacked>li>a{margin-right:0;} +.nav-tabs.nav-stacked{border-bottom:0;} +.nav-tabs.nav-stacked>li>a{border:1px solid #ddd;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;} +.nav-tabs.nav-stacked>li:first-child>a{-webkit-border-top-right-radius:4px;-moz-border-radius-topright:4px;border-top-right-radius:4px;-webkit-border-top-left-radius:4px;-moz-border-radius-topleft:4px;border-top-left-radius:4px;} +.nav-tabs.nav-stacked>li:last-child>a{-webkit-border-bottom-right-radius:4px;-moz-border-radius-bottomright:4px;border-bottom-right-radius:4px;-webkit-border-bottom-left-radius:4px;-moz-border-radius-bottomleft:4px;border-bottom-left-radius:4px;} +.nav-tabs.nav-stacked>li>a:hover{border-color:#ddd;z-index:2;} +.nav-pills.nav-stacked>li>a{margin-bottom:3px;} +.nav-pills.nav-stacked>li:last-child>a{margin-bottom:1px;} +.nav-tabs .dropdown-menu{-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px;} +.nav-pills .dropdown-menu{-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;} +.nav .dropdown-toggle .caret{border-top-color:#0088cc;border-bottom-color:#0088cc;margin-top:6px;} +.nav .dropdown-toggle:hover .caret{border-top-color:#005580;border-bottom-color:#005580;} +.nav-tabs .dropdown-toggle .caret{margin-top:8px;} +.nav .active .dropdown-toggle .caret{border-top-color:#fff;border-bottom-color:#fff;} +.nav-tabs .active .dropdown-toggle .caret{border-top-color:#555555;border-bottom-color:#555555;} +.nav>.dropdown.active>a:hover{cursor:pointer;} +.nav-tabs .open .dropdown-toggle,.nav-pills .open .dropdown-toggle,.nav>li.dropdown.open.active>a:hover{color:#ffffff;background-color:#999999;border-color:#999999;} +.nav li.dropdown.open .caret,.nav li.dropdown.open.active .caret,.nav li.dropdown.open a:hover .caret{border-top-color:#ffffff;border-bottom-color:#ffffff;opacity:1;filter:alpha(opacity=100);} +.tabs-stacked .open>a:hover{border-color:#999999;} +.tabbable{*zoom:1;}.tabbable:before,.tabbable:after{display:table;content:"";line-height:0;} +.tabbable:after{clear:both;} +.tab-content{overflow:auto;} +.tabs-below>.nav-tabs,.tabs-right>.nav-tabs,.tabs-left>.nav-tabs{border-bottom:0;} +.tab-content>.tab-pane,.pill-content>.pill-pane{display:none;} +.tab-content>.active,.pill-content>.active{display:block;} +.tabs-below>.nav-tabs{border-top:1px solid #ddd;} +.tabs-below>.nav-tabs>li{margin-top:-1px;margin-bottom:0;} +.tabs-below>.nav-tabs>li>a{-webkit-border-radius:0 0 4px 4px;-moz-border-radius:0 0 4px 4px;border-radius:0 0 4px 4px;}.tabs-below>.nav-tabs>li>a:hover{border-bottom-color:transparent;border-top-color:#ddd;} +.tabs-below>.nav-tabs>.active>a,.tabs-below>.nav-tabs>.active>a:hover{border-color:transparent #ddd #ddd #ddd;} +.tabs-left>.nav-tabs>li,.tabs-right>.nav-tabs>li{float:none;} +.tabs-left>.nav-tabs>li>a,.tabs-right>.nav-tabs>li>a{min-width:74px;margin-right:0;margin-bottom:3px;} +.tabs-left>.nav-tabs{float:left;margin-right:19px;border-right:1px solid #ddd;} +.tabs-left>.nav-tabs>li>a{margin-right:-1px;-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px;} +.tabs-left>.nav-tabs>li>a:hover{border-color:#eeeeee #dddddd #eeeeee #eeeeee;} +.tabs-left>.nav-tabs .active>a,.tabs-left>.nav-tabs .active>a:hover{border-color:#ddd transparent #ddd #ddd;*border-right-color:#ffffff;} +.tabs-right>.nav-tabs{float:right;margin-left:19px;border-left:1px solid #ddd;} +.tabs-right>.nav-tabs>li>a{margin-left:-1px;-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0;} +.tabs-right>.nav-tabs>li>a:hover{border-color:#eeeeee #eeeeee #eeeeee #dddddd;} +.tabs-right>.nav-tabs .active>a,.tabs-right>.nav-tabs .active>a:hover{border-color:#ddd #ddd #ddd transparent;*border-left-color:#ffffff;} +.nav>.disabled>a{color:#999999;} +.nav>.disabled>a:hover{text-decoration:none;background-color:transparent;cursor:default;} +.navbar{overflow:visible;margin-bottom:20px;color:#777777;*position:relative;*z-index:2;} +.navbar-inner{min-height:40px;padding-left:20px;padding-right:20px;background-color:#fafafa;background-image:-moz-linear-gradient(top, #ffffff, #f2f2f2);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#ffffff), to(#f2f2f2));background-image:-webkit-linear-gradient(top, #ffffff, #f2f2f2);background-image:-o-linear-gradient(top, #ffffff, #f2f2f2);background-image:linear-gradient(to bottom, #ffffff, #f2f2f2);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#fff2f2f2', GradientType=0);border:1px solid #d4d4d4;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:0 1px 4px rgba(0, 0, 0, 0.065);-moz-box-shadow:0 1px 4px rgba(0, 0, 0, 0.065);box-shadow:0 1px 4px rgba(0, 0, 0, 0.065);*zoom:1;}.navbar-inner:before,.navbar-inner:after{display:table;content:"";line-height:0;} +.navbar-inner:after{clear:both;} +.navbar .container{width:auto;} +.nav-collapse.collapse{height:auto;} +.navbar .brand{float:left;display:block;padding:10px 20px 10px;margin-left:-20px;font-size:20px;font-weight:200;color:#777777;text-shadow:0 1px 0 #ffffff;}.navbar .brand:hover{text-decoration:none;} +.navbar-text{margin-bottom:0;line-height:40px;} +.navbar-link{color:#777777;}.navbar-link:hover{color:#333333;} +.navbar .divider-vertical{height:40px;margin:0 9px;border-left:1px solid #f2f2f2;border-right:1px solid #ffffff;} +.navbar .btn,.navbar .btn-group{margin-top:5px;} +.navbar .btn-group .btn,.navbar .input-prepend .btn,.navbar .input-append .btn{margin-top:0;} +.navbar-form{margin-bottom:0;*zoom:1;}.navbar-form:before,.navbar-form:after{display:table;content:"";line-height:0;} +.navbar-form:after{clear:both;} +.navbar-form input,.navbar-form select,.navbar-form .radio,.navbar-form .checkbox{margin-top:5px;} +.navbar-form input,.navbar-form select,.navbar-form .btn{display:inline-block;margin-bottom:0;} +.navbar-form input[type="image"],.navbar-form input[type="checkbox"],.navbar-form input[type="radio"]{margin-top:3px;} +.navbar-form .input-append,.navbar-form .input-prepend{margin-top:6px;white-space:nowrap;}.navbar-form .input-append input,.navbar-form .input-prepend input{margin-top:0;} +.navbar-search{position:relative;float:left;margin-top:5px;margin-bottom:0;}.navbar-search .search-query{margin-bottom:0;padding:4px 14px;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:13px;font-weight:normal;line-height:1;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px;} +.navbar-static-top{position:static;width:100%;margin-bottom:0;}.navbar-static-top .navbar-inner{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;} +.navbar-fixed-top,.navbar-fixed-bottom{position:fixed;right:0;left:0;z-index:1030;margin-bottom:0;} +.navbar-fixed-top .navbar-inner,.navbar-static-top .navbar-inner{border-width:0 0 1px;} +.navbar-fixed-bottom .navbar-inner{border-width:1px 0 0;} +.navbar-fixed-top .navbar-inner,.navbar-fixed-bottom .navbar-inner{padding-left:0;padding-right:0;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;} +.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:940px;} +.navbar-fixed-top{top:0;} +.navbar-fixed-top .navbar-inner,.navbar-static-top .navbar-inner{-webkit-box-shadow:inset 0 -1px 0 rgba(0, 0, 0, 0.1), 0 1px 10px rgba(0, 0, 0, 0.1);-moz-box-shadow:inset 0 -1px 0 rgba(0, 0, 0, 0.1), 0 1px 10px rgba(0, 0, 0, 0.1);box-shadow:inset 0 -1px 0 rgba(0, 0, 0, 0.1), 0 1px 10px rgba(0, 0, 0, 0.1);} +.navbar-fixed-bottom{bottom:0;}.navbar-fixed-bottom .navbar-inner{-webkit-box-shadow:inset 0 1px 0 rgba(0, 0, 0, 0.1), 0 -1px 10px rgba(0, 0, 0, 0.1);-moz-box-shadow:inset 0 1px 0 rgba(0, 0, 0, 0.1), 0 -1px 10px rgba(0, 0, 0, 0.1);box-shadow:inset 0 1px 0 rgba(0, 0, 0, 0.1), 0 -1px 10px rgba(0, 0, 0, 0.1);} +.navbar .nav{position:relative;left:0;display:block;float:left;margin:0 10px 0 0;} +.navbar .nav.pull-right{float:right;margin-right:0;} +.navbar .nav>li{float:left;} +.navbar .nav>li>a{float:none;padding:10px 15px 10px;color:#777777;text-decoration:none;text-shadow:0 1px 0 #ffffff;} +.navbar .nav .dropdown-toggle .caret{margin-top:8px;} +.navbar .nav>li>a:focus,.navbar .nav>li>a:hover{background-color:transparent;color:#333333;text-decoration:none;} +.navbar .nav>.active>a,.navbar .nav>.active>a:hover,.navbar .nav>.active>a:focus{color:#555555;text-decoration:none;background-color:#e5e5e5;-webkit-box-shadow:inset 0 3px 8px rgba(0, 0, 0, 0.125);-moz-box-shadow:inset 0 3px 8px rgba(0, 0, 0, 0.125);box-shadow:inset 0 3px 8px rgba(0, 0, 0, 0.125);} +.navbar .btn-navbar{display:none;float:right;padding:7px 10px;margin-left:5px;margin-right:5px;color:#ffffff;text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);background-color:#ededed;background-image:-moz-linear-gradient(top, #f2f2f2, #e5e5e5);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#f2f2f2), to(#e5e5e5));background-image:-webkit-linear-gradient(top, #f2f2f2, #e5e5e5);background-image:-o-linear-gradient(top, #f2f2f2, #e5e5e5);background-image:linear-gradient(to bottom, #f2f2f2, #e5e5e5);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2f2f2', endColorstr='#ffe5e5e5', GradientType=0);border-color:#e5e5e5 #e5e5e5 #bfbfbf;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);*background-color:#e5e5e5;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);-webkit-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.075);-moz-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.075);box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.075);}.navbar .btn-navbar:hover,.navbar .btn-navbar:active,.navbar .btn-navbar.active,.navbar .btn-navbar.disabled,.navbar .btn-navbar[disabled]{color:#ffffff;background-color:#e5e5e5;*background-color:#d9d9d9;} +.navbar .btn-navbar:active,.navbar .btn-navbar.active{background-color:#cccccc \9;} +.navbar .btn-navbar .icon-bar{display:block;width:18px;height:2px;background-color:#f5f5f5;-webkit-border-radius:1px;-moz-border-radius:1px;border-radius:1px;-webkit-box-shadow:0 1px 0 rgba(0, 0, 0, 0.25);-moz-box-shadow:0 1px 0 rgba(0, 0, 0, 0.25);box-shadow:0 1px 0 rgba(0, 0, 0, 0.25);} +.btn-navbar .icon-bar+.icon-bar{margin-top:3px;} +.navbar .nav>li>.dropdown-menu:before{content:'';display:inline-block;border-left:7px solid transparent;border-right:7px solid transparent;border-bottom:7px solid #ccc;border-bottom-color:rgba(0, 0, 0, 0.2);position:absolute;top:-7px;left:9px;} +.navbar .nav>li>.dropdown-menu:after{content:'';display:inline-block;border-left:6px solid transparent;border-right:6px solid transparent;border-bottom:6px solid #ffffff;position:absolute;top:-6px;left:10px;} +.navbar-fixed-bottom .nav>li>.dropdown-menu:before{border-top:7px solid #ccc;border-top-color:rgba(0, 0, 0, 0.2);border-bottom:0;bottom:-7px;top:auto;} +.navbar-fixed-bottom .nav>li>.dropdown-menu:after{border-top:6px solid #ffffff;border-bottom:0;bottom:-6px;top:auto;} +.navbar .nav li.dropdown.open>.dropdown-toggle,.navbar .nav li.dropdown.active>.dropdown-toggle,.navbar .nav li.dropdown.open.active>.dropdown-toggle{background-color:#e5e5e5;color:#555555;} +.navbar .nav li.dropdown>.dropdown-toggle .caret{border-top-color:#777777;border-bottom-color:#777777;} +.navbar .nav li.dropdown.open>.dropdown-toggle .caret,.navbar .nav li.dropdown.active>.dropdown-toggle .caret,.navbar .nav li.dropdown.open.active>.dropdown-toggle .caret{border-top-color:#555555;border-bottom-color:#555555;} +.navbar .pull-right>li>.dropdown-menu,.navbar .nav>li>.dropdown-menu.pull-right{left:auto;right:0;}.navbar .pull-right>li>.dropdown-menu:before,.navbar .nav>li>.dropdown-menu.pull-right:before{left:auto;right:12px;} +.navbar .pull-right>li>.dropdown-menu:after,.navbar .nav>li>.dropdown-menu.pull-right:after{left:auto;right:13px;} +.navbar .pull-right>li>.dropdown-menu .dropdown-menu,.navbar .nav>li>.dropdown-menu.pull-right .dropdown-menu{left:auto;right:100%;margin-left:0;margin-right:-1px;-webkit-border-radius:6px 0 6px 6px;-moz-border-radius:6px 0 6px 6px;border-radius:6px 0 6px 6px;} +.navbar-inverse{color:#999999;}.navbar-inverse .navbar-inner{background-color:#1b1b1b;background-image:-moz-linear-gradient(top, #222222, #111111);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#222222), to(#111111));background-image:-webkit-linear-gradient(top, #222222, #111111);background-image:-o-linear-gradient(top, #222222, #111111);background-image:linear-gradient(to bottom, #222222, #111111);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff222222', endColorstr='#ff111111', GradientType=0);border-color:#252525;} +.navbar-inverse .brand,.navbar-inverse .nav>li>a{color:#999999;text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);}.navbar-inverse .brand:hover,.navbar-inverse .nav>li>a:hover{color:#ffffff;} +.navbar-inverse .nav>li>a:focus,.navbar-inverse .nav>li>a:hover{background-color:transparent;color:#ffffff;} +.navbar-inverse .nav .active>a,.navbar-inverse .nav .active>a:hover,.navbar-inverse .nav .active>a:focus{color:#ffffff;background-color:#111111;} +.navbar-inverse .navbar-link{color:#999999;}.navbar-inverse .navbar-link:hover{color:#ffffff;} +.navbar-inverse .divider-vertical{border-left-color:#111111;border-right-color:#222222;} +.navbar-inverse .nav li.dropdown.open>.dropdown-toggle,.navbar-inverse .nav li.dropdown.active>.dropdown-toggle,.navbar-inverse .nav li.dropdown.open.active>.dropdown-toggle{background-color:#111111;color:#ffffff;} +.navbar-inverse .nav li.dropdown>.dropdown-toggle .caret{border-top-color:#999999;border-bottom-color:#999999;} +.navbar-inverse .nav li.dropdown.open>.dropdown-toggle .caret,.navbar-inverse .nav li.dropdown.active>.dropdown-toggle .caret,.navbar-inverse .nav li.dropdown.open.active>.dropdown-toggle .caret{border-top-color:#ffffff;border-bottom-color:#ffffff;} +.navbar-inverse .navbar-search .search-query{color:#ffffff;background-color:#515151;border-color:#111111;-webkit-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1), 0 1px 0 rgba(255, 255, 255, 0.15);-moz-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1), 0 1px 0 rgba(255, 255, 255, 0.15);box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1), 0 1px 0 rgba(255, 255, 255, 0.15);-webkit-transition:none;-moz-transition:none;-o-transition:none;transition:none;}.navbar-inverse .navbar-search .search-query:-moz-placeholder{color:#cccccc;} +.navbar-inverse .navbar-search .search-query:-ms-input-placeholder{color:#cccccc;} +.navbar-inverse .navbar-search .search-query::-webkit-input-placeholder{color:#cccccc;} +.navbar-inverse .navbar-search .search-query:focus,.navbar-inverse .navbar-search .search-query.focused{padding:5px 15px;color:#333333;text-shadow:0 1px 0 #ffffff;background-color:#ffffff;border:0;-webkit-box-shadow:0 0 3px rgba(0, 0, 0, 0.15);-moz-box-shadow:0 0 3px rgba(0, 0, 0, 0.15);box-shadow:0 0 3px rgba(0, 0, 0, 0.15);outline:0;} +.navbar-inverse .btn-navbar{color:#ffffff;text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);background-color:#0e0e0e;background-image:-moz-linear-gradient(top, #151515, #040404);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#151515), to(#040404));background-image:-webkit-linear-gradient(top, #151515, #040404);background-image:-o-linear-gradient(top, #151515, #040404);background-image:linear-gradient(to bottom, #151515, #040404);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff151515', endColorstr='#ff040404', GradientType=0);border-color:#040404 #040404 #000000;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);*background-color:#040404;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);}.navbar-inverse .btn-navbar:hover,.navbar-inverse .btn-navbar:active,.navbar-inverse .btn-navbar.active,.navbar-inverse .btn-navbar.disabled,.navbar-inverse .btn-navbar[disabled]{color:#ffffff;background-color:#040404;*background-color:#000000;} +.navbar-inverse .btn-navbar:active,.navbar-inverse .btn-navbar.active{background-color:#000000 \9;} +.breadcrumb{padding:8px 15px;margin:0 0 20px;list-style:none;background-color:#f5f5f5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;}.breadcrumb li{display:inline-block;*display:inline;*zoom:1;text-shadow:0 1px 0 #ffffff;} +.breadcrumb .divider{padding:0 5px;color:#ccc;} +.breadcrumb .active{color:#999999;} +.pagination{height:40px;margin:20px 0;} +.pagination ul{display:inline-block;*display:inline;*zoom:1;margin-left:0;margin-bottom:0;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;-webkit-box-shadow:0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:0 1px 2px rgba(0, 0, 0, 0.05);} +.pagination ul>li{display:inline;} +.pagination ul>li>a,.pagination ul>li>span{float:left;padding:0 14px;line-height:38px;text-decoration:none;background-color:#ffffff;border:1px solid #dddddd;border-left-width:0;} +.pagination ul>li>a:hover,.pagination ul>.active>a,.pagination ul>.active>span{background-color:#f5f5f5;} +.pagination ul>.active>a,.pagination ul>.active>span{color:#999999;cursor:default;} +.pagination ul>.disabled>span,.pagination ul>.disabled>a,.pagination ul>.disabled>a:hover{color:#999999;background-color:transparent;cursor:default;} +.pagination ul>li:first-child>a,.pagination ul>li:first-child>span{border-left-width:1px;-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px;} +.pagination ul>li:last-child>a,.pagination ul>li:last-child>span{-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0;} +.pagination-centered{text-align:center;} +.pagination-right{text-align:right;} +.pager{margin:20px 0;list-style:none;text-align:center;*zoom:1;}.pager:before,.pager:after{display:table;content:"";line-height:0;} +.pager:after{clear:both;} +.pager li{display:inline;} +.pager a,.pager span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px;} +.pager a:hover{text-decoration:none;background-color:#f5f5f5;} +.pager .next a,.pager .next span{float:right;} +.pager .previous a{float:left;} +.pager .disabled a,.pager .disabled a:hover,.pager .disabled span{color:#999999;background-color:#fff;cursor:default;} +.thumbnails{margin-left:-20px;list-style:none;*zoom:1;}.thumbnails:before,.thumbnails:after{display:table;content:"";line-height:0;} +.thumbnails:after{clear:both;} +.row-fluid .thumbnails{margin-left:0;} +.thumbnails>li{float:left;margin-bottom:20px;margin-left:20px;} +.thumbnail{display:block;padding:4px;line-height:20px;border:1px solid #ddd;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:0 1px 3px rgba(0, 0, 0, 0.055);-moz-box-shadow:0 1px 3px rgba(0, 0, 0, 0.055);box-shadow:0 1px 3px rgba(0, 0, 0, 0.055);-webkit-transition:all 0.2s ease-in-out;-moz-transition:all 0.2s ease-in-out;-o-transition:all 0.2s ease-in-out;transition:all 0.2s ease-in-out;} +a.thumbnail:hover{border-color:#0088cc;-webkit-box-shadow:0 1px 4px rgba(0, 105, 214, 0.25);-moz-box-shadow:0 1px 4px rgba(0, 105, 214, 0.25);box-shadow:0 1px 4px rgba(0, 105, 214, 0.25);} +.thumbnail>img{display:block;max-width:100%;margin-left:auto;margin-right:auto;} +.thumbnail .caption{padding:9px;color:#555555;} +.alert{padding:8px 35px 8px 14px;margin-bottom:20px;text-shadow:0 1px 0 rgba(255, 255, 255, 0.5);background-color:#fcf8e3;border:1px solid #fbeed5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;color:#c09853;} +.alert h4{margin:0;} +.alert .close{position:relative;top:-2px;right:-21px;line-height:20px;} +.alert-success{background-color:#dff0d8;border-color:#d6e9c6;color:#468847;} +.alert-danger,.alert-error{background-color:#f2dede;border-color:#eed3d7;color:#b94a48;} +.alert-info{background-color:#d9edf7;border-color:#bce8f1;color:#3a87ad;} +.alert-block{padding-top:14px;padding-bottom:14px;} +.alert-block>p,.alert-block>ul{margin-bottom:0;} +.alert-block p+p{margin-top:5px;} +@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0;} to{background-position:0 0;}}@-moz-keyframes progress-bar-stripes{from{background-position:40px 0;} to{background-position:0 0;}}@-ms-keyframes progress-bar-stripes{from{background-position:40px 0;} to{background-position:0 0;}}@-o-keyframes progress-bar-stripes{from{background-position:0 0;} to{background-position:40px 0;}}@keyframes progress-bar-stripes{from{background-position:40px 0;} to{background-position:0 0;}}.progress{overflow:hidden;height:20px;margin-bottom:20px;background-color:#f7f7f7;background-image:-moz-linear-gradient(top, #f5f5f5, #f9f9f9);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#f5f5f5), to(#f9f9f9));background-image:-webkit-linear-gradient(top, #f5f5f5, #f9f9f9);background-image:-o-linear-gradient(top, #f5f5f5, #f9f9f9);background-image:linear-gradient(to bottom, #f5f5f5, #f9f9f9);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#fff9f9f9', GradientType=0);-webkit-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1);-moz-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1);box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1);-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;} +.progress .bar{width:0%;height:100%;color:#ffffff;float:left;font-size:12px;text-align:center;text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);background-color:#0e90d2;background-image:-moz-linear-gradient(top, #149bdf, #0480be);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#149bdf), to(#0480be));background-image:-webkit-linear-gradient(top, #149bdf, #0480be);background-image:-o-linear-gradient(top, #149bdf, #0480be);background-image:linear-gradient(to bottom, #149bdf, #0480be);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff149bdf', endColorstr='#ff0480be', GradientType=0);-webkit-box-shadow:inset 0 -1px 0 rgba(0, 0, 0, 0.15);-moz-box-shadow:inset 0 -1px 0 rgba(0, 0, 0, 0.15);box-shadow:inset 0 -1px 0 rgba(0, 0, 0, 0.15);-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;-webkit-transition:width 0.6s ease;-moz-transition:width 0.6s ease;-o-transition:width 0.6s ease;transition:width 0.6s ease;} +.progress .bar+.bar{-webkit-box-shadow:inset 1px 0 0 rgba(0, 0, 0, 0.15), inset 0 -1px 0 rgba(0, 0, 0, 0.15);-moz-box-shadow:inset 1px 0 0 rgba(0, 0, 0, 0.15), inset 0 -1px 0 rgba(0, 0, 0, 0.15);box-shadow:inset 1px 0 0 rgba(0, 0, 0, 0.15), inset 0 -1px 0 rgba(0, 0, 0, 0.15);} +.progress-striped .bar{background-color:#149bdf;background-image:-webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent));background-image:-webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-moz-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);-webkit-background-size:40px 40px;-moz-background-size:40px 40px;-o-background-size:40px 40px;background-size:40px 40px;} +.progress.active .bar{-webkit-animation:progress-bar-stripes 2s linear infinite;-moz-animation:progress-bar-stripes 2s linear infinite;-ms-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite;} +.progress-danger .bar,.progress .bar-danger{background-color:#dd514c;background-image:-moz-linear-gradient(top, #ee5f5b, #c43c35);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#ee5f5b), to(#c43c35));background-image:-webkit-linear-gradient(top, #ee5f5b, #c43c35);background-image:-o-linear-gradient(top, #ee5f5b, #c43c35);background-image:linear-gradient(to bottom, #ee5f5b, #c43c35);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffee5f5b', endColorstr='#ffc43c35', GradientType=0);} +.progress-danger.progress-striped .bar,.progress-striped .bar-danger{background-color:#ee5f5b;background-image:-webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent));background-image:-webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-moz-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);} +.progress-success .bar,.progress .bar-success{background-color:#5eb95e;background-image:-moz-linear-gradient(top, #62c462, #57a957);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#62c462), to(#57a957));background-image:-webkit-linear-gradient(top, #62c462, #57a957);background-image:-o-linear-gradient(top, #62c462, #57a957);background-image:linear-gradient(to bottom, #62c462, #57a957);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff62c462', endColorstr='#ff57a957', GradientType=0);} +.progress-success.progress-striped .bar,.progress-striped .bar-success{background-color:#62c462;background-image:-webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent));background-image:-webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-moz-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);} +.progress-info .bar,.progress .bar-info{background-color:#4bb1cf;background-image:-moz-linear-gradient(top, #5bc0de, #339bb9);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#5bc0de), to(#339bb9));background-image:-webkit-linear-gradient(top, #5bc0de, #339bb9);background-image:-o-linear-gradient(top, #5bc0de, #339bb9);background-image:linear-gradient(to bottom, #5bc0de, #339bb9);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff339bb9', GradientType=0);} +.progress-info.progress-striped .bar,.progress-striped .bar-info{background-color:#5bc0de;background-image:-webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent));background-image:-webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-moz-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);} +.progress-warning .bar,.progress .bar-warning{background-color:#faa732;background-image:-moz-linear-gradient(top, #fbb450, #f89406);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#fbb450), to(#f89406));background-image:-webkit-linear-gradient(top, #fbb450, #f89406);background-image:-o-linear-gradient(top, #fbb450, #f89406);background-image:linear-gradient(to bottom, #fbb450, #f89406);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffbb450', endColorstr='#fff89406', GradientType=0);} +.progress-warning.progress-striped .bar,.progress-striped .bar-warning{background-color:#fbb450;background-image:-webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent));background-image:-webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-moz-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);} +.hero-unit{padding:60px;margin-bottom:30px;background-color:#eeeeee;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;}.hero-unit h1{margin-bottom:0;font-size:60px;line-height:1;color:inherit;letter-spacing:-1px;} +.hero-unit p{font-size:18px;font-weight:200;line-height:30px;color:inherit;} +.tooltip{position:absolute;z-index:1030;display:block;visibility:visible;padding:5px;font-size:11px;opacity:0;filter:alpha(opacity=0);}.tooltip.in{opacity:0.8;filter:alpha(opacity=80);} +.tooltip.top{margin-top:-3px;} +.tooltip.right{margin-left:3px;} +.tooltip.bottom{margin-top:3px;} +.tooltip.left{margin-left:-3px;} +.tooltip-inner{max-width:200px;padding:3px 8px;color:#ffffff;text-align:center;text-decoration:none;background-color:#000000;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;} +.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid;} +.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-width:5px 5px 0;border-top-color:#000000;} +.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-width:5px 5px 5px 0;border-right-color:#000000;} +.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-width:5px 0 5px 5px;border-left-color:#000000;} +.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-width:0 5px 5px;border-bottom-color:#000000;} +.popover{position:absolute;top:0;left:0;z-index:1010;display:none;width:236px;padding:1px;background-color:#ffffff;-webkit-background-clip:padding-box;-moz-background-clip:padding;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0, 0, 0, 0.2);-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0, 0, 0, 0.2);-moz-box-shadow:0 5px 10px rgba(0, 0, 0, 0.2);box-shadow:0 5px 10px rgba(0, 0, 0, 0.2);}.popover.top{margin-bottom:10px;} +.popover.right{margin-left:10px;} +.popover.bottom{margin-top:10px;} +.popover.left{margin-right:10px;} +.popover-title{margin:0;padding:8px 14px;font-size:14px;font-weight:normal;line-height:18px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;-webkit-border-radius:5px 5px 0 0;-moz-border-radius:5px 5px 0 0;border-radius:5px 5px 0 0;} +.popover-content{padding:9px 14px;}.popover-content p,.popover-content ul,.popover-content ol{margin-bottom:0;} +.popover .arrow,.popover .arrow:after{position:absolute;display:inline-block;width:0;height:0;border-color:transparent;border-style:solid;} +.popover .arrow:after{content:"";z-index:-1;} +.popover.top .arrow{bottom:-10px;left:50%;margin-left:-10px;border-width:10px 10px 0;border-top-color:#ffffff;}.popover.top .arrow:after{border-width:11px 11px 0;border-top-color:rgba(0, 0, 0, 0.25);bottom:-1px;left:-11px;} +.popover.right .arrow{top:50%;left:-10px;margin-top:-10px;border-width:10px 10px 10px 0;border-right-color:#ffffff;}.popover.right .arrow:after{border-width:11px 11px 11px 0;border-right-color:rgba(0, 0, 0, 0.25);bottom:-11px;left:-1px;} +.popover.bottom .arrow{top:-10px;left:50%;margin-left:-10px;border-width:0 10px 10px;border-bottom-color:#ffffff;}.popover.bottom .arrow:after{border-width:0 11px 11px;border-bottom-color:rgba(0, 0, 0, 0.25);top:-1px;left:-11px;} +.popover.left .arrow{top:50%;right:-10px;margin-top:-10px;border-width:10px 0 10px 10px;border-left-color:#ffffff;}.popover.left .arrow:after{border-width:11px 0 11px 11px;border-left-color:rgba(0, 0, 0, 0.25);bottom:-11px;right:-1px;} +.modal-open .modal .dropdown-menu{z-index:2050;} +.modal-open .modal .dropdown.open{*z-index:2050;} +.modal-open .modal .popover{z-index:2060;} +.modal-open .modal .tooltip{z-index:2080;} +.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000000;}.modal-backdrop.fade{opacity:0;} +.modal-backdrop,.modal-backdrop.fade.in{opacity:0.8;filter:alpha(opacity=80);} +.modal{position:fixed;top:50%;left:50%;z-index:1050;overflow:auto;width:560px;margin:-250px 0 0 -280px;background-color:#ffffff;border:1px solid #999;border:1px solid rgba(0, 0, 0, 0.3);*border:1px solid #999;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 3px 7px rgba(0, 0, 0, 0.3);-moz-box-shadow:0 3px 7px rgba(0, 0, 0, 0.3);box-shadow:0 3px 7px rgba(0, 0, 0, 0.3);-webkit-background-clip:padding-box;-moz-background-clip:padding-box;background-clip:padding-box;}.modal.fade{-webkit-transition:opacity .3s linear, top .3s ease-out;-moz-transition:opacity .3s linear, top .3s ease-out;-o-transition:opacity .3s linear, top .3s ease-out;transition:opacity .3s linear, top .3s ease-out;top:-25%;} +.modal.fade.in{top:50%;} +.modal-header{padding:9px 15px;border-bottom:1px solid #eee;}.modal-header .close{margin-top:2px;} +.modal-header h3{margin:0;line-height:30px;} +.modal-body{overflow-y:auto;max-height:400px;padding:15px;} +.modal-form{margin-bottom:0;} +.modal-footer{padding:14px 15px 15px;margin-bottom:0;text-align:right;background-color:#f5f5f5;border-top:1px solid #ddd;-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px;-webkit-box-shadow:inset 0 1px 0 #ffffff;-moz-box-shadow:inset 0 1px 0 #ffffff;box-shadow:inset 0 1px 0 #ffffff;*zoom:1;}.modal-footer:before,.modal-footer:after{display:table;content:"";line-height:0;} +.modal-footer:after{clear:both;} +.modal-footer .btn+.btn{margin-left:5px;margin-bottom:0;} +.modal-footer .btn-group .btn+.btn{margin-left:-1px;} +.dropup,.dropdown{position:relative;} +.dropdown-toggle{*margin-bottom:-3px;} +.dropdown-toggle:active,.open .dropdown-toggle{outline:0;} +.caret{display:inline-block;width:0;height:0;vertical-align:top;border-top:4px solid #000000;border-right:4px solid transparent;border-left:4px solid transparent;content:"";} +.dropdown .caret{margin-top:8px;margin-left:2px;} +.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;list-style:none;background-color:#ffffff;border:1px solid #ccc;border:1px solid rgba(0, 0, 0, 0.2);*border-right-width:2px;*border-bottom-width:2px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0, 0, 0, 0.2);-moz-box-shadow:0 5px 10px rgba(0, 0, 0, 0.2);box-shadow:0 5px 10px rgba(0, 0, 0, 0.2);-webkit-background-clip:padding-box;-moz-background-clip:padding;background-clip:padding-box;}.dropdown-menu.pull-right{right:0;left:auto;} +.dropdown-menu .divider{*width:100%;height:1px;margin:9px 1px;*margin:-5px 0 5px;overflow:hidden;background-color:#e5e5e5;border-bottom:1px solid #ffffff;} +.dropdown-menu a{display:block;padding:3px 20px;clear:both;font-weight:normal;line-height:20px;color:#333333;white-space:nowrap;} +.dropdown-menu li>a:hover,.dropdown-menu li>a:focus,.dropdown-submenu:hover>a{text-decoration:none;color:#ffffff;background-color:#0088cc;background-color:#0081c2;background-image:-moz-linear-gradient(top, #0088cc, #0077b3);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#0088cc), to(#0077b3));background-image:-webkit-linear-gradient(top, #0088cc, #0077b3);background-image:-o-linear-gradient(top, #0088cc, #0077b3);background-image:linear-gradient(to bottom, #0088cc, #0077b3);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc', endColorstr='#ff0077b3', GradientType=0);} +.dropdown-menu .active>a,.dropdown-menu .active>a:hover{color:#ffffff;text-decoration:none;outline:0;background-color:#0088cc;background-color:#0081c2;background-image:-moz-linear-gradient(top, #0088cc, #0077b3);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#0088cc), to(#0077b3));background-image:-webkit-linear-gradient(top, #0088cc, #0077b3);background-image:-o-linear-gradient(top, #0088cc, #0077b3);background-image:linear-gradient(to bottom, #0088cc, #0077b3);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc', endColorstr='#ff0077b3', GradientType=0);} +.dropdown-menu .disabled>a,.dropdown-menu .disabled>a:hover{color:#999999;} +.dropdown-menu .disabled>a:hover{text-decoration:none;background-color:transparent;cursor:default;} +.open{*z-index:1000;}.open >.dropdown-menu{display:block;} +.pull-right>.dropdown-menu{right:0;left:auto;} +.dropup .caret,.navbar-fixed-bottom .dropdown .caret{border-top:0;border-bottom:4px solid #000000;content:"";} +.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:1px;} +.dropdown-submenu{position:relative;} +.dropdown-submenu>.dropdown-menu{top:0;left:100%;margin-top:-6px;margin-left:-1px;-webkit-border-radius:0 6px 6px 6px;-moz-border-radius:0 6px 6px 6px;border-radius:0 6px 6px 6px;} +.dropdown-submenu:hover>.dropdown-menu{display:block;} +.dropdown-submenu>a:after{display:block;content:" ";float:right;width:0;height:0;border-color:transparent;border-style:solid;border-width:5px 0 5px 5px;border-left-color:#cccccc;margin-top:5px;margin-right:-10px;} +.dropdown-submenu:hover>a:after{border-left-color:#ffffff;} +.dropdown .dropdown-menu .nav-header{padding-left:20px;padding-right:20px;} +.typeahead{margin-top:2px;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;} +.accordion{margin-bottom:20px;} +.accordion-group{margin-bottom:2px;border:1px solid #e5e5e5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;} +.accordion-heading{border-bottom:0;} +.accordion-heading .accordion-toggle{display:block;padding:8px 15px;} +.accordion-toggle{cursor:pointer;} +.accordion-inner{padding:9px 15px;border-top:1px solid #e5e5e5;} +.carousel{position:relative;margin-bottom:20px;line-height:1;} +.carousel-inner{overflow:hidden;width:100%;position:relative;} +.carousel .item{display:none;position:relative;-webkit-transition:0.6s ease-in-out left;-moz-transition:0.6s ease-in-out left;-o-transition:0.6s ease-in-out left;transition:0.6s ease-in-out left;} +.carousel .item>img{display:block;line-height:1;} +.carousel .active,.carousel .next,.carousel .prev{display:block;} +.carousel .active{left:0;} +.carousel .next,.carousel .prev{position:absolute;top:0;width:100%;} +.carousel .next{left:100%;} +.carousel .prev{left:-100%;} +.carousel .next.left,.carousel .prev.right{left:0;} +.carousel .active.left{left:-100%;} +.carousel .active.right{left:100%;} +.carousel-control{position:absolute;top:40%;left:15px;width:40px;height:40px;margin-top:-20px;font-size:60px;font-weight:100;line-height:30px;color:#ffffff;text-align:center;background:#222222;border:3px solid #ffffff;-webkit-border-radius:23px;-moz-border-radius:23px;border-radius:23px;opacity:0.5;filter:alpha(opacity=50);}.carousel-control.right{left:auto;right:15px;} +.carousel-control:hover{color:#ffffff;text-decoration:none;opacity:0.9;filter:alpha(opacity=90);} +.carousel-caption{position:absolute;left:0;right:0;bottom:0;padding:15px;background:#333333;background:rgba(0, 0, 0, 0.75);} +.carousel-caption h4,.carousel-caption p{color:#ffffff;line-height:20px;} +.carousel-caption h4{margin:0 0 5px;} +.carousel-caption p{margin-bottom:0;} +.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.05);box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.05);}.well blockquote{border-color:#ddd;border-color:rgba(0, 0, 0, 0.15);} +.well-large{padding:24px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;} +.well-small{padding:9px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;} +.close{float:right;font-size:20px;font-weight:bold;line-height:20px;color:#000000;text-shadow:0 1px 0 #ffffff;opacity:0.2;filter:alpha(opacity=20);}.close:hover{color:#000000;text-decoration:none;cursor:pointer;opacity:0.4;filter:alpha(opacity=40);} +button.close{padding:0;cursor:pointer;background:transparent;border:0;-webkit-appearance:none;} +.pull-right{float:right;} +.pull-left{float:left;} +.hide{display:none;} +.show{display:block;} +.invisible{visibility:hidden;} +.affix{position:fixed;} +.fade{opacity:0;-webkit-transition:opacity 0.15s linear;-moz-transition:opacity 0.15s linear;-o-transition:opacity 0.15s linear;transition:opacity 0.15s linear;}.fade.in{opacity:1;} +.collapse{position:relative;height:0;overflow:hidden;-webkit-transition:height 0.35s ease;-moz-transition:height 0.35s ease;-o-transition:height 0.35s ease;transition:height 0.35s ease;}.collapse.in{height:auto;} +.hidden{display:none;visibility:hidden;} +.visible-phone{display:none !important;} +.visible-tablet{display:none !important;} +.hidden-desktop{display:none !important;} +.visible-desktop{display:inherit !important;} +@media (min-width:768px) and (max-width:979px){.hidden-desktop{display:inherit !important;} .visible-desktop{display:none !important ;} .visible-tablet{display:inherit !important;} .hidden-tablet{display:none !important;}}@media (max-width:767px){.hidden-desktop{display:inherit !important;} .visible-desktop{display:none !important;} .visible-phone{display:inherit !important;} .hidden-phone{display:none !important;}}@media (max-width:767px){body{padding-left:20px;padding-right:20px;} .navbar-fixed-top,.navbar-fixed-bottom,.navbar-static-top{margin-left:-20px;margin-right:-20px;} .container-fluid{padding:0;} .dl-horizontal dt{float:none;clear:none;width:auto;text-align:left;} .dl-horizontal dd{margin-left:0;} .container{width:auto;} .row-fluid{width:100%;} .row,.thumbnails{margin-left:0;} .thumbnails>li{float:none;margin-left:0;} [class*="span"],.row-fluid [class*="span"]{float:none;display:block;width:100%;margin-left:0;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;} .span12,.row-fluid .span12{width:100%;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;} .input-large,.input-xlarge,.input-xxlarge,input[class*="span"],select[class*="span"],textarea[class*="span"],.uneditable-input{display:block;width:100%;min-height:30px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;} .input-prepend input,.input-append input,.input-prepend input[class*="span"],.input-append input[class*="span"]{display:inline-block;width:auto;} .controls-row [class*="span"]+[class*="span"]{margin-left:0;} .modal{position:fixed;top:20px;left:20px;right:20px;width:auto;margin:0;}.modal.fade.in{top:auto;}}@media (max-width:480px){.nav-collapse{-webkit-transform:translate3d(0, 0, 0);} .page-header h1 small{display:block;line-height:20px;} input[type="checkbox"],input[type="radio"]{border:1px solid #ccc;} .form-horizontal .control-label{float:none;width:auto;padding-top:0;text-align:left;} .form-horizontal .controls{margin-left:0;} .form-horizontal .control-list{padding-top:0;} .form-horizontal .form-actions{padding-left:10px;padding-right:10px;} .modal{top:10px;left:10px;right:10px;} .modal-header .close{padding:10px;margin:-10px;} .carousel-caption{position:static;}}@media (min-width:768px) and (max-width:979px){.row{margin-left:-20px;*zoom:1;}.row:before,.row:after{display:table;content:"";line-height:0;} .row:after{clear:both;} [class*="span"]{float:left;min-height:1px;margin-left:20px;} .container,.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:724px;} .span12{width:724px;} .span11{width:662px;} .span10{width:600px;} .span9{width:538px;} .span8{width:476px;} .span7{width:414px;} .span6{width:352px;} .span5{width:290px;} .span4{width:228px;} .span3{width:166px;} .span2{width:104px;} .span1{width:42px;} .offset12{margin-left:764px;} .offset11{margin-left:702px;} .offset10{margin-left:640px;} .offset9{margin-left:578px;} .offset8{margin-left:516px;} .offset7{margin-left:454px;} .offset6{margin-left:392px;} .offset5{margin-left:330px;} .offset4{margin-left:268px;} .offset3{margin-left:206px;} .offset2{margin-left:144px;} .offset1{margin-left:82px;} .row-fluid{width:100%;*zoom:1;}.row-fluid:before,.row-fluid:after{display:table;content:"";line-height:0;} .row-fluid:after{clear:both;} .row-fluid [class*="span"]{display:block;width:100%;min-height:30px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;float:left;margin-left:2.7624309392265194%;*margin-left:2.709239449864817%;} .row-fluid [class*="span"]:first-child{margin-left:0;} .row-fluid .span12{width:100%;*width:99.94680851063829%;} .row-fluid .span11{width:91.43646408839778%;*width:91.38327259903608%;} .row-fluid .span10{width:82.87292817679558%;*width:82.81973668743387%;} .row-fluid .span9{width:74.30939226519337%;*width:74.25620077583166%;} .row-fluid .span8{width:65.74585635359117%;*width:65.69266486422946%;} .row-fluid .span7{width:57.18232044198895%;*width:57.12912895262725%;} .row-fluid .span6{width:48.61878453038674%;*width:48.56559304102504%;} .row-fluid .span5{width:40.05524861878453%;*width:40.00205712942283%;} .row-fluid .span4{width:31.491712707182323%;*width:31.43852121782062%;} .row-fluid .span3{width:22.92817679558011%;*width:22.87498530621841%;} .row-fluid .span2{width:14.3646408839779%;*width:14.311449394616199%;} .row-fluid .span1{width:5.801104972375691%;*width:5.747913483013988%;} .row-fluid .offset12{margin-left:105.52486187845304%;*margin-left:105.41847889972962%;} .row-fluid .offset12:first-child{margin-left:102.76243093922652%;*margin-left:102.6560479605031%;} .row-fluid .offset11{margin-left:96.96132596685082%;*margin-left:96.8549429881274%;} .row-fluid .offset11:first-child{margin-left:94.1988950276243%;*margin-left:94.09251204890089%;} .row-fluid .offset10{margin-left:88.39779005524862%;*margin-left:88.2914070765252%;} .row-fluid .offset10:first-child{margin-left:85.6353591160221%;*margin-left:85.52897613729868%;} .row-fluid .offset9{margin-left:79.8342541436464%;*margin-left:79.72787116492299%;} .row-fluid .offset9:first-child{margin-left:77.07182320441989%;*margin-left:76.96544022569647%;} .row-fluid .offset8{margin-left:71.2707182320442%;*margin-left:71.16433525332079%;} .row-fluid .offset8:first-child{margin-left:68.50828729281768%;*margin-left:68.40190431409427%;} .row-fluid .offset7{margin-left:62.70718232044199%;*margin-left:62.600799341718584%;} .row-fluid .offset7:first-child{margin-left:59.94475138121547%;*margin-left:59.838368402492065%;} .row-fluid .offset6{margin-left:54.14364640883978%;*margin-left:54.037263430116376%;} .row-fluid .offset6:first-child{margin-left:51.38121546961326%;*margin-left:51.27483249088986%;} .row-fluid .offset5{margin-left:45.58011049723757%;*margin-left:45.47372751851417%;} .row-fluid .offset5:first-child{margin-left:42.81767955801105%;*margin-left:42.71129657928765%;} .row-fluid .offset4{margin-left:37.01657458563536%;*margin-left:36.91019160691196%;} .row-fluid .offset4:first-child{margin-left:34.25414364640884%;*margin-left:34.14776066768544%;} .row-fluid .offset3{margin-left:28.45303867403315%;*margin-left:28.346655695309746%;} .row-fluid .offset3:first-child{margin-left:25.69060773480663%;*margin-left:25.584224756083227%;} .row-fluid .offset2{margin-left:19.88950276243094%;*margin-left:19.783119783707537%;} .row-fluid .offset2:first-child{margin-left:17.12707182320442%;*margin-left:17.02068884448102%;} .row-fluid .offset1{margin-left:11.32596685082873%;*margin-left:11.219583872105325%;} .row-fluid .offset1:first-child{margin-left:8.56353591160221%;*margin-left:8.457152932878806%;} input,textarea,.uneditable-input{margin-left:0;} .controls-row [class*="span"]+[class*="span"]{margin-left:20px;} input.span12, textarea.span12, .uneditable-input.span12{width:710px;} input.span11, textarea.span11, .uneditable-input.span11{width:648px;} input.span10, textarea.span10, .uneditable-input.span10{width:586px;} input.span9, textarea.span9, .uneditable-input.span9{width:524px;} input.span8, textarea.span8, .uneditable-input.span8{width:462px;} input.span7, textarea.span7, .uneditable-input.span7{width:400px;} input.span6, textarea.span6, .uneditable-input.span6{width:338px;} input.span5, textarea.span5, .uneditable-input.span5{width:276px;} input.span4, textarea.span4, .uneditable-input.span4{width:214px;} input.span3, textarea.span3, .uneditable-input.span3{width:152px;} input.span2, textarea.span2, .uneditable-input.span2{width:90px;} input.span1, textarea.span1, .uneditable-input.span1{width:28px;}}@media (min-width:1200px){.row{margin-left:-30px;*zoom:1;}.row:before,.row:after{display:table;content:"";line-height:0;} .row:after{clear:both;} [class*="span"]{float:left;min-height:1px;margin-left:30px;} .container,.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:1170px;} .span12{width:1170px;} .span11{width:1070px;} .span10{width:970px;} .span9{width:870px;} .span8{width:770px;} .span7{width:670px;} .span6{width:570px;} .span5{width:470px;} .span4{width:370px;} .span3{width:270px;} .span2{width:170px;} .span1{width:70px;} .offset12{margin-left:1230px;} .offset11{margin-left:1130px;} .offset10{margin-left:1030px;} .offset9{margin-left:930px;} .offset8{margin-left:830px;} .offset7{margin-left:730px;} .offset6{margin-left:630px;} .offset5{margin-left:530px;} .offset4{margin-left:430px;} .offset3{margin-left:330px;} .offset2{margin-left:230px;} .offset1{margin-left:130px;} .row-fluid{width:100%;*zoom:1;}.row-fluid:before,.row-fluid:after{display:table;content:"";line-height:0;} .row-fluid:after{clear:both;} .row-fluid [class*="span"]{display:block;width:100%;min-height:30px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;float:left;margin-left:2.564102564102564%;*margin-left:2.5109110747408616%;} .row-fluid [class*="span"]:first-child{margin-left:0;} .row-fluid .span12{width:100%;*width:99.94680851063829%;} .row-fluid .span11{width:91.45299145299145%;*width:91.39979996362975%;} .row-fluid .span10{width:82.90598290598291%;*width:82.8527914166212%;} .row-fluid .span9{width:74.35897435897436%;*width:74.30578286961266%;} .row-fluid .span8{width:65.81196581196582%;*width:65.75877432260411%;} .row-fluid .span7{width:57.26495726495726%;*width:57.21176577559556%;} .row-fluid .span6{width:48.717948717948715%;*width:48.664757228587014%;} .row-fluid .span5{width:40.17094017094017%;*width:40.11774868157847%;} .row-fluid .span4{width:31.623931623931625%;*width:31.570740134569924%;} .row-fluid .span3{width:23.076923076923077%;*width:23.023731587561375%;} .row-fluid .span2{width:14.52991452991453%;*width:14.476723040552828%;} .row-fluid .span1{width:5.982905982905983%;*width:5.929714493544281%;} .row-fluid .offset12{margin-left:105.12820512820512%;*margin-left:105.02182214948171%;} .row-fluid .offset12:first-child{margin-left:102.56410256410257%;*margin-left:102.45771958537915%;} .row-fluid .offset11{margin-left:96.58119658119658%;*margin-left:96.47481360247316%;} .row-fluid .offset11:first-child{margin-left:94.01709401709402%;*margin-left:93.91071103837061%;} .row-fluid .offset10{margin-left:88.03418803418803%;*margin-left:87.92780505546462%;} .row-fluid .offset10:first-child{margin-left:85.47008547008548%;*margin-left:85.36370249136206%;} .row-fluid .offset9{margin-left:79.48717948717949%;*margin-left:79.38079650845607%;} .row-fluid .offset9:first-child{margin-left:76.92307692307693%;*margin-left:76.81669394435352%;} .row-fluid .offset8{margin-left:70.94017094017094%;*margin-left:70.83378796144753%;} .row-fluid .offset8:first-child{margin-left:68.37606837606839%;*margin-left:68.26968539734497%;} .row-fluid .offset7{margin-left:62.393162393162385%;*margin-left:62.28677941443899%;} .row-fluid .offset7:first-child{margin-left:59.82905982905982%;*margin-left:59.72267685033642%;} .row-fluid .offset6{margin-left:53.84615384615384%;*margin-left:53.739770867430444%;} .row-fluid .offset6:first-child{margin-left:51.28205128205128%;*margin-left:51.175668303327875%;} .row-fluid .offset5{margin-left:45.299145299145295%;*margin-left:45.1927623204219%;} .row-fluid .offset5:first-child{margin-left:42.73504273504273%;*margin-left:42.62865975631933%;} .row-fluid .offset4{margin-left:36.75213675213675%;*margin-left:36.645753773413354%;} .row-fluid .offset4:first-child{margin-left:34.18803418803419%;*margin-left:34.081651209310785%;} .row-fluid .offset3{margin-left:28.205128205128204%;*margin-left:28.0987452264048%;} .row-fluid .offset3:first-child{margin-left:25.641025641025642%;*margin-left:25.53464266230224%;} .row-fluid .offset2{margin-left:19.65811965811966%;*margin-left:19.551736679396257%;} .row-fluid .offset2:first-child{margin-left:17.094017094017094%;*margin-left:16.98763411529369%;} .row-fluid .offset1{margin-left:11.11111111111111%;*margin-left:11.004728132387708%;} .row-fluid .offset1:first-child{margin-left:8.547008547008547%;*margin-left:8.440625568285142%;} input,textarea,.uneditable-input{margin-left:0;} .controls-row [class*="span"]+[class*="span"]{margin-left:30px;} input.span12, textarea.span12, .uneditable-input.span12{width:1156px;} input.span11, textarea.span11, .uneditable-input.span11{width:1056px;} input.span10, textarea.span10, .uneditable-input.span10{width:956px;} input.span9, textarea.span9, .uneditable-input.span9{width:856px;} input.span8, textarea.span8, .uneditable-input.span8{width:756px;} input.span7, textarea.span7, .uneditable-input.span7{width:656px;} input.span6, textarea.span6, .uneditable-input.span6{width:556px;} input.span5, textarea.span5, .uneditable-input.span5{width:456px;} input.span4, textarea.span4, .uneditable-input.span4{width:356px;} input.span3, textarea.span3, .uneditable-input.span3{width:256px;} input.span2, textarea.span2, .uneditable-input.span2{width:156px;} input.span1, textarea.span1, .uneditable-input.span1{width:56px;} .thumbnails{margin-left:-30px;} .thumbnails>li{margin-left:30px;} .row-fluid .thumbnails{margin-left:0;}}@media (max-width:979px){body{padding-top:0;} .navbar-fixed-top,.navbar-fixed-bottom{position:static;} .navbar-fixed-top{margin-bottom:20px;} .navbar-fixed-bottom{margin-top:20px;} .navbar-fixed-top .navbar-inner,.navbar-fixed-bottom .navbar-inner{padding:5px;} .navbar .container{width:auto;padding:0;} .navbar .brand{padding-left:10px;padding-right:10px;margin:0 0 0 -5px;} .nav-collapse{clear:both;} .nav-collapse .nav{float:none;margin:0 0 10px;} .nav-collapse .nav>li{float:none;} .nav-collapse .nav>li>a{margin-bottom:2px;} .nav-collapse .nav>.divider-vertical{display:none;} .nav-collapse .nav .nav-header{color:#777777;text-shadow:none;} .nav-collapse .nav>li>a,.nav-collapse .dropdown-menu a{padding:9px 15px;font-weight:bold;color:#777777;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;} .nav-collapse .btn{padding:4px 10px 4px;font-weight:normal;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;} .nav-collapse .dropdown-menu li+li a{margin-bottom:2px;} .nav-collapse .nav>li>a:hover,.nav-collapse .dropdown-menu a:hover{background-color:#f2f2f2;} .navbar-inverse .nav-collapse .nav>li>a:hover,.navbar-inverse .nav-collapse .dropdown-menu a:hover{background-color:#111111;} .nav-collapse.in .btn-group{margin-top:5px;padding:0;} .nav-collapse .dropdown-menu{position:static;top:auto;left:auto;float:none;display:block;max-width:none;margin:0 15px;padding:0;background-color:transparent;border:none;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none;} .nav-collapse .dropdown-menu:before,.nav-collapse .dropdown-menu:after{display:none;} .nav-collapse .dropdown-menu .divider{display:none;} .nav-collapse .nav>li>.dropdown-menu:before,.nav-collapse .nav>li>.dropdown-menu:after{display:none;} .nav-collapse .navbar-form,.nav-collapse .navbar-search{float:none;padding:10px 15px;margin:10px 0;border-top:1px solid #f2f2f2;border-bottom:1px solid #f2f2f2;-webkit-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.1);-moz-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.1);box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.1);} .navbar-inverse .nav-collapse .navbar-form,.navbar-inverse .nav-collapse .navbar-search{border-top-color:#111111;border-bottom-color:#111111;} .navbar .nav-collapse .nav.pull-right{float:none;margin-left:0;} .nav-collapse,.nav-collapse.collapse{overflow:hidden;height:0;} .navbar .btn-navbar{display:block;} .navbar-static .navbar-inner{padding-left:10px;padding-right:10px;}}@media (min-width:980px){.nav-collapse.collapse{height:auto !important;overflow:visible !important;}} diff --git a/awx/lib/site-packages/rest_framework/static/rest_framework/css/default.css b/awx/lib/site-packages/rest_framework/static/rest_framework/css/default.css new file mode 100644 index 0000000000..0261a3038b --- /dev/null +++ b/awx/lib/site-packages/rest_framework/static/rest_framework/css/default.css @@ -0,0 +1,71 @@ + +/* The navbar is fixed at >= 980px wide, so add padding to the body to prevent +content running up underneath it. */ + +h1 { + font-weight: 500; +} + +h2, h3 { + font-weight: 300; +} + +.resource-description, .response-info { + margin-bottom: 2em; +} +.version:before { + content: "v"; + opacity: 0.6; + padding-right: 0.25em; +} + +.version { + font-size: 70%; +} + +.format-option { + font-family: Menlo, Consolas, "Andale Mono", "Lucida Console", monospace; +} + +.button-form { + float: right; + margin-right: 1em; +} + +ul.breadcrumb { + margin: 58px 0 0 0; +} + +form select, form input, form textarea { + width: 90%; +} + +form select[multiple] { + height: 150px; +} +/* To allow tooltips to work on disabled elements */ +.disabled-tooltip-shield { + position: absolute; + top: 0; + right: 0; + bottom: 0; + left: 0; +} + +.errorlist { + margin-top: 0.5em; +} + +pre { + overflow: auto; + word-wrap: normal; + white-space: pre; + font-size: 12px; +} + +.page-header { + border-bottom: none; + padding-bottom: 0px; + margin-bottom: 20px; +} + diff --git a/awx/lib/site-packages/rest_framework/static/rest_framework/css/prettify.css b/awx/lib/site-packages/rest_framework/static/rest_framework/css/prettify.css new file mode 100644 index 0000000000..d437aff62b --- /dev/null +++ b/awx/lib/site-packages/rest_framework/static/rest_framework/css/prettify.css @@ -0,0 +1,30 @@ +.com { color: #93a1a1; } +.lit { color: #195f91; } +.pun, .opn, .clo { color: #93a1a1; } +.fun { color: #dc322f; } +.str, .atv { color: #D14; } +.kwd, .prettyprint .tag { color: #1e347b; } +.typ, .atn, .dec, .var { color: teal; } +.pln { color: #48484c; } + +.prettyprint { + padding: 8px; + background-color: #f7f7f9; + border: 1px solid #e1e1e8; +} +.prettyprint.linenums { + -webkit-box-shadow: inset 40px 0 0 #fbfbfc, inset 41px 0 0 #ececf0; + -moz-box-shadow: inset 40px 0 0 #fbfbfc, inset 41px 0 0 #ececf0; + box-shadow: inset 40px 0 0 #fbfbfc, inset 41px 0 0 #ececf0; +} + +/* Specify class=linenums on a pre to get line numbering */ +ol.linenums { + margin: 0 0 0 33px; /* IE indents via margin-left */ +} +ol.linenums li { + padding-left: 12px; + color: #bebec5; + line-height: 20px; + text-shadow: 0 1px 0 #fff; +} \ No newline at end of file diff --git a/awx/lib/site-packages/rest_framework/static/rest_framework/img/glyphicons-halflings-white.png b/awx/lib/site-packages/rest_framework/static/rest_framework/img/glyphicons-halflings-white.png new file mode 100644 index 0000000000..3bf6484a29 Binary files /dev/null and b/awx/lib/site-packages/rest_framework/static/rest_framework/img/glyphicons-halflings-white.png differ diff --git a/awx/lib/site-packages/rest_framework/static/rest_framework/img/glyphicons-halflings.png b/awx/lib/site-packages/rest_framework/static/rest_framework/img/glyphicons-halflings.png new file mode 100644 index 0000000000..36c3b1ed9e Binary files /dev/null and b/awx/lib/site-packages/rest_framework/static/rest_framework/img/glyphicons-halflings.png differ diff --git a/awx/lib/site-packages/rest_framework/static/rest_framework/img/grid.png b/awx/lib/site-packages/rest_framework/static/rest_framework/img/grid.png new file mode 100644 index 0000000000..878c3ed5c1 Binary files /dev/null and b/awx/lib/site-packages/rest_framework/static/rest_framework/img/grid.png differ diff --git a/awx/lib/site-packages/rest_framework/static/rest_framework/js/bootstrap.min.js b/awx/lib/site-packages/rest_framework/static/rest_framework/js/bootstrap.min.js new file mode 100644 index 0000000000..e0b220f40c --- /dev/null +++ b/awx/lib/site-packages/rest_framework/static/rest_framework/js/bootstrap.min.js @@ -0,0 +1,7 @@ +/** +* Bootstrap.js by @fat & @mdo +* plugins: bootstrap-transition.js, bootstrap-modal.js, bootstrap-dropdown.js, bootstrap-scrollspy.js, bootstrap-tab.js, bootstrap-tooltip.js, bootstrap-popover.js, bootstrap-affix.js, bootstrap-alert.js, bootstrap-button.js, bootstrap-collapse.js, bootstrap-carousel.js, bootstrap-typeahead.js +* Copyright 2012 Twitter, Inc. +* http://www.apache.org/licenses/LICENSE-2.0.txt +*/ +!function(a){a(function(){a.support.transition=function(){var a=function(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"},c;for(c in b)if(a.style[c]!==undefined)return b[c]}();return a&&{end:a}}()})}(window.jQuery),!function(a){var b=function(b,c){this.options=c,this.$element=a(b).delegate('[data-dismiss="modal"]',"click.dismiss.modal",a.proxy(this.hide,this)),this.options.remote&&this.$element.find(".modal-body").load(this.options.remote)};b.prototype={constructor:b,toggle:function(){return this[this.isShown?"hide":"show"]()},show:function(){var b=this,c=a.Event("show");this.$element.trigger(c);if(this.isShown||c.isDefaultPrevented())return;a("body").addClass("modal-open"),this.isShown=!0,this.escape(),this.backdrop(function(){var c=a.support.transition&&b.$element.hasClass("fade");b.$element.parent().length||b.$element.appendTo(document.body),b.$element.show(),c&&b.$element[0].offsetWidth,b.$element.addClass("in").attr("aria-hidden",!1).focus(),b.enforceFocus(),c?b.$element.one(a.support.transition.end,function(){b.$element.trigger("shown")}):b.$element.trigger("shown")})},hide:function(b){b&&b.preventDefault();var c=this;b=a.Event("hide"),this.$element.trigger(b);if(!this.isShown||b.isDefaultPrevented())return;this.isShown=!1,a("body").removeClass("modal-open"),this.escape(),a(document).off("focusin.modal"),this.$element.removeClass("in").attr("aria-hidden",!0),a.support.transition&&this.$element.hasClass("fade")?this.hideWithTransition():this.hideModal()},enforceFocus:function(){var b=this;a(document).on("focusin.modal",function(a){b.$element[0]!==a.target&&!b.$element.has(a.target).length&&b.$element.focus()})},escape:function(){var a=this;this.isShown&&this.options.keyboard?this.$element.on("keyup.dismiss.modal",function(b){b.which==27&&a.hide()}):this.isShown||this.$element.off("keyup.dismiss.modal")},hideWithTransition:function(){var b=this,c=setTimeout(function(){b.$element.off(a.support.transition.end),b.hideModal()},500);this.$element.one(a.support.transition.end,function(){clearTimeout(c),b.hideModal()})},hideModal:function(a){this.$element.hide().trigger("hidden"),this.backdrop()},removeBackdrop:function(){this.$backdrop.remove(),this.$backdrop=null},backdrop:function(b){var c=this,d=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var e=a.support.transition&&d;this.$backdrop=a('<div class="modal-backdrop '+d+'" />').appendTo(document.body),this.options.backdrop!="static"&&this.$backdrop.click(a.proxy(this.hide,this)),e&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),e?this.$backdrop.one(a.support.transition.end,b):b()}else!this.isShown&&this.$backdrop?(this.$backdrop.removeClass("in"),a.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one(a.support.transition.end,a.proxy(this.removeBackdrop,this)):this.removeBackdrop()):b&&b()}},a.fn.modal=function(c){return this.each(function(){var d=a(this),e=d.data("modal"),f=a.extend({},a.fn.modal.defaults,d.data(),typeof c=="object"&&c);e||d.data("modal",e=new b(this,f)),typeof c=="string"?e[c]():f.show&&e.show()})},a.fn.modal.defaults={backdrop:!0,keyboard:!0,show:!0},a.fn.modal.Constructor=b,a(function(){a("body").on("click.modal.data-api",'[data-toggle="modal"]',function(b){var c=a(this),d=c.attr("href"),e=a(c.attr("data-target")||d&&d.replace(/.*(?=#[^\s]+$)/,"")),f=e.data("modal")?"toggle":a.extend({remote:!/#/.test(d)&&d},e.data(),c.data());b.preventDefault(),e.modal(f).one("hide",function(){c.focus()})})})}(window.jQuery),!function(a){function d(){e(a(b)).removeClass("open")}function e(b){var c=b.attr("data-target"),d;return c||(c=b.attr("href"),c=c&&/#/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,"")),d=a(c),d.length||(d=b.parent()),d}var b="[data-toggle=dropdown]",c=function(b){var c=a(b).on("click.dropdown.data-api",this.toggle);a("html").on("click.dropdown.data-api",function(){c.parent().removeClass("open")})};c.prototype={constructor:c,toggle:function(b){var c=a(this),f,g;if(c.is(".disabled, :disabled"))return;return f=e(c),g=f.hasClass("open"),d(),g||(f.toggleClass("open"),c.focus()),!1},keydown:function(b){var c,d,f,g,h,i;if(!/(38|40|27)/.test(b.keyCode))return;c=a(this),b.preventDefault(),b.stopPropagation();if(c.is(".disabled, :disabled"))return;g=e(c),h=g.hasClass("open");if(!h||h&&b.keyCode==27)return c.click();d=a("[role=menu] li:not(.divider) a",g);if(!d.length)return;i=d.index(d.filter(":focus")),b.keyCode==38&&i>0&&i--,b.keyCode==40&&i<d.length-1&&i++,~i||(i=0),d.eq(i).focus()}},a.fn.dropdown=function(b){return this.each(function(){var d=a(this),e=d.data("dropdown");e||d.data("dropdown",e=new c(this)),typeof b=="string"&&e[b].call(d)})},a.fn.dropdown.Constructor=c,a(function(){a("html").on("click.dropdown.data-api touchstart.dropdown.data-api",d),a("body").on("click.dropdown touchstart.dropdown.data-api",".dropdown form",function(a){a.stopPropagation()}).on("click.dropdown.data-api touchstart.dropdown.data-api",b,c.prototype.toggle).on("keydown.dropdown.data-api touchstart.dropdown.data-api",b+", [role=menu]",c.prototype.keydown)})}(window.jQuery),!function(a){function b(b,c){var d=a.proxy(this.process,this),e=a(b).is("body")?a(window):a(b),f;this.options=a.extend({},a.fn.scrollspy.defaults,c),this.$scrollElement=e.on("scroll.scroll-spy.data-api",d),this.selector=(this.options.target||(f=a(b).attr("href"))&&f.replace(/.*(?=#[^\s]+$)/,"")||"")+" .nav li > a",this.$body=a("body"),this.refresh(),this.process()}b.prototype={constructor:b,refresh:function(){var b=this,c;this.offsets=a([]),this.targets=a([]),c=this.$body.find(this.selector).map(function(){var b=a(this),c=b.data("target")||b.attr("href"),d=/^#\w/.test(c)&&a(c);return d&&d.length&&[[d.position().top,c]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){b.offsets.push(this[0]),b.targets.push(this[1])})},process:function(){var a=this.$scrollElement.scrollTop()+this.options.offset,b=this.$scrollElement[0].scrollHeight||this.$body[0].scrollHeight,c=b-this.$scrollElement.height(),d=this.offsets,e=this.targets,f=this.activeTarget,g;if(a>=c)return f!=(g=e.last()[0])&&this.activate(g);for(g=d.length;g--;)f!=e[g]&&a>=d[g]&&(!d[g+1]||a<=d[g+1])&&this.activate(e[g])},activate:function(b){var c,d;this.activeTarget=b,a(this.selector).parent(".active").removeClass("active"),d=this.selector+'[data-target="'+b+'"],'+this.selector+'[href="'+b+'"]',c=a(d).parent("li").addClass("active"),c.parent(".dropdown-menu").length&&(c=c.closest("li.dropdown").addClass("active")),c.trigger("activate")}},a.fn.scrollspy=function(c){return this.each(function(){var d=a(this),e=d.data("scrollspy"),f=typeof c=="object"&&c;e||d.data("scrollspy",e=new b(this,f)),typeof c=="string"&&e[c]()})},a.fn.scrollspy.Constructor=b,a.fn.scrollspy.defaults={offset:10},a(window).on("load",function(){a('[data-spy="scroll"]').each(function(){var b=a(this);b.scrollspy(b.data())})})}(window.jQuery),!function(a){var b=function(b){this.element=a(b)};b.prototype={constructor:b,show:function(){var b=this.element,c=b.closest("ul:not(.dropdown-menu)"),d=b.attr("data-target"),e,f,g;d||(d=b.attr("href"),d=d&&d.replace(/.*(?=#[^\s]*$)/,""));if(b.parent("li").hasClass("active"))return;e=c.find(".active a").last()[0],g=a.Event("show",{relatedTarget:e}),b.trigger(g);if(g.isDefaultPrevented())return;f=a(d),this.activate(b.parent("li"),c),this.activate(f,f.parent(),function(){b.trigger({type:"shown",relatedTarget:e})})},activate:function(b,c,d){function g(){e.removeClass("active").find("> .dropdown-menu > .active").removeClass("active"),b.addClass("active"),f?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu")&&b.closest("li.dropdown").addClass("active"),d&&d()}var e=c.find("> .active"),f=d&&a.support.transition&&e.hasClass("fade");f?e.one(a.support.transition.end,g):g(),e.removeClass("in")}},a.fn.tab=function(c){return this.each(function(){var d=a(this),e=d.data("tab");e||d.data("tab",e=new b(this)),typeof c=="string"&&e[c]()})},a.fn.tab.Constructor=b,a(function(){a("body").on("click.tab.data-api",'[data-toggle="tab"], [data-toggle="pill"]',function(b){b.preventDefault(),a(this).tab("show")})})}(window.jQuery),!function(a){var b=function(a,b){this.init("tooltip",a,b)};b.prototype={constructor:b,init:function(b,c,d){var e,f;this.type=b,this.$element=a(c),this.options=this.getOptions(d),this.enabled=!0,this.options.trigger=="click"?this.$element.on("click."+this.type,this.options.selector,a.proxy(this.toggle,this)):this.options.trigger!="manual"&&(e=this.options.trigger=="hover"?"mouseenter":"focus",f=this.options.trigger=="hover"?"mouseleave":"blur",this.$element.on(e+"."+this.type,this.options.selector,a.proxy(this.enter,this)),this.$element.on(f+"."+this.type,this.options.selector,a.proxy(this.leave,this))),this.options.selector?this._options=a.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},getOptions:function(b){return b=a.extend({},a.fn[this.type].defaults,b,this.$element.data()),b.delay&&typeof b.delay=="number"&&(b.delay={show:b.delay,hide:b.delay}),b},enter:function(b){var c=a(b.currentTarget)[this.type](this._options).data(this.type);if(!c.options.delay||!c.options.delay.show)return c.show();clearTimeout(this.timeout),c.hoverState="in",this.timeout=setTimeout(function(){c.hoverState=="in"&&c.show()},c.options.delay.show)},leave:function(b){var c=a(b.currentTarget)[this.type](this._options).data(this.type);this.timeout&&clearTimeout(this.timeout);if(!c.options.delay||!c.options.delay.hide)return c.hide();c.hoverState="out",this.timeout=setTimeout(function(){c.hoverState=="out"&&c.hide()},c.options.delay.hide)},show:function(){var a,b,c,d,e,f,g;if(this.hasContent()&&this.enabled){a=this.tip(),this.setContent(),this.options.animation&&a.addClass("fade"),f=typeof this.options.placement=="function"?this.options.placement.call(this,a[0],this.$element[0]):this.options.placement,b=/in/.test(f),a.remove().css({top:0,left:0,display:"block"}).appendTo(b?this.$element:document.body),c=this.getPosition(b),d=a[0].offsetWidth,e=a[0].offsetHeight;switch(b?f.split(" ")[1]:f){case"bottom":g={top:c.top+c.height,left:c.left+c.width/2-d/2};break;case"top":g={top:c.top-e,left:c.left+c.width/2-d/2};break;case"left":g={top:c.top+c.height/2-e/2,left:c.left-d};break;case"right":g={top:c.top+c.height/2-e/2,left:c.left+c.width}}a.css(g).addClass(f).addClass("in")}},setContent:function(){var a=this.tip(),b=this.getTitle();a.find(".tooltip-inner")[this.options.html?"html":"text"](b),a.removeClass("fade in top bottom left right")},hide:function(){function d(){var b=setTimeout(function(){c.off(a.support.transition.end).remove()},500);c.one(a.support.transition.end,function(){clearTimeout(b),c.remove()})}var b=this,c=this.tip();return c.removeClass("in"),a.support.transition&&this.$tip.hasClass("fade")?d():c.remove(),this},fixTitle:function(){var a=this.$element;(a.attr("title")||typeof a.attr("data-original-title")!="string")&&a.attr("data-original-title",a.attr("title")||"").removeAttr("title")},hasContent:function(){return this.getTitle()},getPosition:function(b){return a.extend({},b?{top:0,left:0}:this.$element.offset(),{width:this.$element[0].offsetWidth,height:this.$element[0].offsetHeight})},getTitle:function(){var a,b=this.$element,c=this.options;return a=b.attr("data-original-title")||(typeof c.title=="function"?c.title.call(b[0]):c.title),a},tip:function(){return this.$tip=this.$tip||a(this.options.template)},validate:function(){this.$element[0].parentNode||(this.hide(),this.$element=null,this.options=null)},enable:function(){this.enabled=!0},disable:function(){this.enabled=!1},toggleEnabled:function(){this.enabled=!this.enabled},toggle:function(){this[this.tip().hasClass("in")?"hide":"show"]()},destroy:function(){this.hide().$element.off("."+this.type).removeData(this.type)}},a.fn.tooltip=function(c){return this.each(function(){var d=a(this),e=d.data("tooltip"),f=typeof c=="object"&&c;e||d.data("tooltip",e=new b(this,f)),typeof c=="string"&&e[c]()})},a.fn.tooltip.Constructor=b,a.fn.tooltip.defaults={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover",title:"",delay:0,html:!0}}(window.jQuery),!function(a){var b=function(a,b){this.init("popover",a,b)};b.prototype=a.extend({},a.fn.tooltip.Constructor.prototype,{constructor:b,setContent:function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content > *")[this.options.html?"html":"text"](c),a.removeClass("fade top bottom left right in")},hasContent:function(){return this.getTitle()||this.getContent()},getContent:function(){var a,b=this.$element,c=this.options;return a=b.attr("data-content")||(typeof c.content=="function"?c.content.call(b[0]):c.content),a},tip:function(){return this.$tip||(this.$tip=a(this.options.template)),this.$tip},destroy:function(){this.hide().$element.off("."+this.type).removeData(this.type)}}),a.fn.popover=function(c){return this.each(function(){var d=a(this),e=d.data("popover"),f=typeof c=="object"&&c;e||d.data("popover",e=new b(this,f)),typeof c=="string"&&e[c]()})},a.fn.popover.Constructor=b,a.fn.popover.defaults=a.extend({},a.fn.tooltip.defaults,{placement:"right",trigger:"click",content:"",template:'<div class="popover"><div class="arrow"></div><div class="popover-inner"><h3 class="popover-title"></h3><div class="popover-content"><p></p></div></div></div>'})}(window.jQuery),!function(a){var b=function(b,c){this.options=a.extend({},a.fn.affix.defaults,c),this.$window=a(window).on("scroll.affix.data-api",a.proxy(this.checkPosition,this)),this.$element=a(b),this.checkPosition()};b.prototype.checkPosition=function(){if(!this.$element.is(":visible"))return;var b=a(document).height(),c=this.$window.scrollTop(),d=this.$element.offset(),e=this.options.offset,f=e.bottom,g=e.top,h="affix affix-top affix-bottom",i;typeof e!="object"&&(f=g=e),typeof g=="function"&&(g=e.top()),typeof f=="function"&&(f=e.bottom()),i=this.unpin!=null&&c+this.unpin<=d.top?!1:f!=null&&d.top+this.$element.height()>=b-f?"bottom":g!=null&&c<=g?"top":!1;if(this.affixed===i)return;this.affixed=i,this.unpin=i=="bottom"?d.top-c:null,this.$element.removeClass(h).addClass("affix"+(i?"-"+i:""))},a.fn.affix=function(c){return this.each(function(){var d=a(this),e=d.data("affix"),f=typeof c=="object"&&c;e||d.data("affix",e=new b(this,f)),typeof c=="string"&&e[c]()})},a.fn.affix.Constructor=b,a.fn.affix.defaults={offset:0},a(window).on("load",function(){a('[data-spy="affix"]').each(function(){var b=a(this),c=b.data();c.offset=c.offset||{},c.offsetBottom&&(c.offset.bottom=c.offsetBottom),c.offsetTop&&(c.offset.top=c.offsetTop),b.affix(c)})})}(window.jQuery),!function(a){var b='[data-dismiss="alert"]',c=function(c){a(c).on("click",b,this.close)};c.prototype.close=function(b){function f(){e.trigger("closed").remove()}var c=a(this),d=c.attr("data-target"),e;d||(d=c.attr("href"),d=d&&d.replace(/.*(?=#[^\s]*$)/,"")),e=a(d),b&&b.preventDefault(),e.length||(e=c.hasClass("alert")?c:c.parent()),e.trigger(b=a.Event("close"));if(b.isDefaultPrevented())return;e.removeClass("in"),a.support.transition&&e.hasClass("fade")?e.on(a.support.transition.end,f):f()},a.fn.alert=function(b){return this.each(function(){var d=a(this),e=d.data("alert");e||d.data("alert",e=new c(this)),typeof b=="string"&&e[b].call(d)})},a.fn.alert.Constructor=c,a(function(){a("body").on("click.alert.data-api",b,c.prototype.close)})}(window.jQuery),!function(a){var b=function(b,c){this.$element=a(b),this.options=a.extend({},a.fn.button.defaults,c)};b.prototype.setState=function(a){var b="disabled",c=this.$element,d=c.data(),e=c.is("input")?"val":"html";a+="Text",d.resetText||c.data("resetText",c[e]()),c[e](d[a]||this.options[a]),setTimeout(function(){a=="loadingText"?c.addClass(b).attr(b,b):c.removeClass(b).removeAttr(b)},0)},b.prototype.toggle=function(){var a=this.$element.closest('[data-toggle="buttons-radio"]');a&&a.find(".active").removeClass("active"),this.$element.toggleClass("active")},a.fn.button=function(c){return this.each(function(){var d=a(this),e=d.data("button"),f=typeof c=="object"&&c;e||d.data("button",e=new b(this,f)),c=="toggle"?e.toggle():c&&e.setState(c)})},a.fn.button.defaults={loadingText:"loading..."},a.fn.button.Constructor=b,a(function(){a("body").on("click.button.data-api","[data-toggle^=button]",function(b){var c=a(b.target);c.hasClass("btn")||(c=c.closest(".btn")),c.button("toggle")})})}(window.jQuery),!function(a){var b=function(b,c){this.$element=a(b),this.options=a.extend({},a.fn.collapse.defaults,c),this.options.parent&&(this.$parent=a(this.options.parent)),this.options.toggle&&this.toggle()};b.prototype={constructor:b,dimension:function(){var a=this.$element.hasClass("width");return a?"width":"height"},show:function(){var b,c,d,e;if(this.transitioning)return;b=this.dimension(),c=a.camelCase(["scroll",b].join("-")),d=this.$parent&&this.$parent.find("> .accordion-group > .in");if(d&&d.length){e=d.data("collapse");if(e&&e.transitioning)return;d.collapse("hide"),e||d.data("collapse",null)}this.$element[b](0),this.transition("addClass",a.Event("show"),"shown"),a.support.transition&&this.$element[b](this.$element[0][c])},hide:function(){var b;if(this.transitioning)return;b=this.dimension(),this.reset(this.$element[b]()),this.transition("removeClass",a.Event("hide"),"hidden"),this.$element[b](0)},reset:function(a){var b=this.dimension();return this.$element.removeClass("collapse")[b](a||"auto")[0].offsetWidth,this.$element[a!==null?"addClass":"removeClass"]("collapse"),this},transition:function(b,c,d){var e=this,f=function(){c.type=="show"&&e.reset(),e.transitioning=0,e.$element.trigger(d)};this.$element.trigger(c);if(c.isDefaultPrevented())return;this.transitioning=1,this.$element[b]("in"),a.support.transition&&this.$element.hasClass("collapse")?this.$element.one(a.support.transition.end,f):f()},toggle:function(){this[this.$element.hasClass("in")?"hide":"show"]()}},a.fn.collapse=function(c){return this.each(function(){var d=a(this),e=d.data("collapse"),f=typeof c=="object"&&c;e||d.data("collapse",e=new b(this,f)),typeof c=="string"&&e[c]()})},a.fn.collapse.defaults={toggle:!0},a.fn.collapse.Constructor=b,a(function(){a("body").on("click.collapse.data-api","[data-toggle=collapse]",function(b){var c=a(this),d,e=c.attr("data-target")||b.preventDefault()||(d=c.attr("href"))&&d.replace(/.*(?=#[^\s]+$)/,""),f=a(e).data("collapse")?"toggle":c.data();c[a(e).hasClass("in")?"addClass":"removeClass"]("collapsed"),a(e).collapse(f)})})}(window.jQuery),!function(a){var b=function(b,c){this.$element=a(b),this.options=c,this.options.slide&&this.slide(this.options.slide),this.options.pause=="hover"&&this.$element.on("mouseenter",a.proxy(this.pause,this)).on("mouseleave",a.proxy(this.cycle,this))};b.prototype={cycle:function(b){return b||(this.paused=!1),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},to:function(b){var c=this.$element.find(".item.active"),d=c.parent().children(),e=d.index(c),f=this;if(b>d.length-1||b<0)return;return this.sliding?this.$element.one("slid",function(){f.to(b)}):e==b?this.pause().cycle():this.slide(b>e?"next":"prev",a(d[b]))},pause:function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition.end&&(this.$element.trigger(a.support.transition.end),this.cycle()),clearInterval(this.interval),this.interval=null,this},next:function(){if(this.sliding)return;return this.slide("next")},prev:function(){if(this.sliding)return;return this.slide("prev")},slide:function(b,c){var d=this.$element.find(".item.active"),e=c||d[b](),f=this.interval,g=b=="next"?"left":"right",h=b=="next"?"first":"last",i=this,j=a.Event("slide",{relatedTarget:e[0]});this.sliding=!0,f&&this.pause(),e=e.length?e:this.$element.find(".item")[h]();if(e.hasClass("active"))return;if(a.support.transition&&this.$element.hasClass("slide")){this.$element.trigger(j);if(j.isDefaultPrevented())return;e.addClass(b),e[0].offsetWidth,d.addClass(g),e.addClass(g),this.$element.one(a.support.transition.end,function(){e.removeClass([b,g].join(" ")).addClass("active"),d.removeClass(["active",g].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger("slid")},0)})}else{this.$element.trigger(j);if(j.isDefaultPrevented())return;d.removeClass("active"),e.addClass("active"),this.sliding=!1,this.$element.trigger("slid")}return f&&this.cycle(),this}},a.fn.carousel=function(c){return this.each(function(){var d=a(this),e=d.data("carousel"),f=a.extend({},a.fn.carousel.defaults,typeof c=="object"&&c),g=typeof c=="string"?c:f.slide;e||d.data("carousel",e=new b(this,f)),typeof c=="number"?e.to(c):g?e[g]():f.interval&&e.cycle()})},a.fn.carousel.defaults={interval:5e3,pause:"hover"},a.fn.carousel.Constructor=b,a(function(){a("body").on("click.carousel.data-api","[data-slide]",function(b){var c=a(this),d,e=a(c.attr("data-target")||(d=c.attr("href"))&&d.replace(/.*(?=#[^\s]+$)/,"")),f=!e.data("modal")&&a.extend({},e.data(),c.data());e.carousel(f),b.preventDefault()})})}(window.jQuery),!function(a){var b=function(b,c){this.$element=a(b),this.options=a.extend({},a.fn.typeahead.defaults,c),this.matcher=this.options.matcher||this.matcher,this.sorter=this.options.sorter||this.sorter,this.highlighter=this.options.highlighter||this.highlighter,this.updater=this.options.updater||this.updater,this.$menu=a(this.options.menu).appendTo("body"),this.source=this.options.source,this.shown=!1,this.listen()};b.prototype={constructor:b,select:function(){var a=this.$menu.find(".active").attr("data-value");return this.$element.val(this.updater(a)).change(),this.hide()},updater:function(a){return a},show:function(){var b=a.extend({},this.$element.offset(),{height:this.$element[0].offsetHeight});return this.$menu.css({top:b.top+b.height,left:b.left}),this.$menu.show(),this.shown=!0,this},hide:function(){return this.$menu.hide(),this.shown=!1,this},lookup:function(b){var c;return this.query=this.$element.val(),!this.query||this.query.length<this.options.minLength?this.shown?this.hide():this:(c=a.isFunction(this.source)?this.source(this.query,a.proxy(this.process,this)):this.source,c?this.process(c):this)},process:function(b){var c=this;return b=a.grep(b,function(a){return c.matcher(a)}),b=this.sorter(b),b.length?this.render(b.slice(0,this.options.items)).show():this.shown?this.hide():this},matcher:function(a){return~a.toLowerCase().indexOf(this.query.toLowerCase())},sorter:function(a){var b=[],c=[],d=[],e;while(e=a.shift())e.toLowerCase().indexOf(this.query.toLowerCase())?~e.indexOf(this.query)?c.push(e):d.push(e):b.push(e);return b.concat(c,d)},highlighter:function(a){var b=this.query.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g,"\\$&");return a.replace(new RegExp("("+b+")","ig"),function(a,b){return"<strong>"+b+"</strong>"})},render:function(b){var c=this;return b=a(b).map(function(b,d){return b=a(c.options.item).attr("data-value",d),b.find("a").html(c.highlighter(d)),b[0]}),b.first().addClass("active"),this.$menu.html(b),this},next:function(b){var c=this.$menu.find(".active").removeClass("active"),d=c.next();d.length||(d=a(this.$menu.find("li")[0])),d.addClass("active")},prev:function(a){var b=this.$menu.find(".active").removeClass("active"),c=b.prev();c.length||(c=this.$menu.find("li").last()),c.addClass("active")},listen:function(){this.$element.on("blur",a.proxy(this.blur,this)).on("keypress",a.proxy(this.keypress,this)).on("keyup",a.proxy(this.keyup,this)),(a.browser.chrome||a.browser.webkit||a.browser.msie)&&this.$element.on("keydown",a.proxy(this.keydown,this)),this.$menu.on("click",a.proxy(this.click,this)).on("mouseenter","li",a.proxy(this.mouseenter,this))},move:function(a){if(!this.shown)return;switch(a.keyCode){case 9:case 13:case 27:a.preventDefault();break;case 38:a.preventDefault(),this.prev();break;case 40:a.preventDefault(),this.next()}a.stopPropagation()},keydown:function(b){this.suppressKeyPressRepeat=!~a.inArray(b.keyCode,[40,38,9,13,27]),this.move(b)},keypress:function(a){if(this.suppressKeyPressRepeat)return;this.move(a)},keyup:function(a){switch(a.keyCode){case 40:case 38:break;case 9:case 13:if(!this.shown)return;this.select();break;case 27:if(!this.shown)return;this.hide();break;default:this.lookup()}a.stopPropagation(),a.preventDefault()},blur:function(a){var b=this;setTimeout(function(){b.hide()},150)},click:function(a){a.stopPropagation(),a.preventDefault(),this.select()},mouseenter:function(b){this.$menu.find(".active").removeClass("active"),a(b.currentTarget).addClass("active")}},a.fn.typeahead=function(c){return this.each(function(){var d=a(this),e=d.data("typeahead"),f=typeof c=="object"&&c;e||d.data("typeahead",e=new b(this,f)),typeof c=="string"&&e[c]()})},a.fn.typeahead.defaults={source:[],items:8,menu:'<ul class="typeahead dropdown-menu"></ul>',item:'<li><a href="#"></a></li>',minLength:1},a.fn.typeahead.Constructor=b,a(function(){a("body").on("focus.typeahead.data-api",'[data-provide="typeahead"]',function(b){var c=a(this);if(c.data("typeahead"))return;b.preventDefault(),c.typeahead(c.data())})})}(window.jQuery) \ No newline at end of file diff --git a/awx/lib/site-packages/rest_framework/static/rest_framework/js/default.js b/awx/lib/site-packages/rest_framework/static/rest_framework/js/default.js new file mode 100644 index 0000000000..c74829d7d5 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/static/rest_framework/js/default.js @@ -0,0 +1,13 @@ +prettyPrint(); + +$('.js-tooltip').tooltip({ + delay: 1000 +}); + +$('a[data-toggle="tab"]:first').on('shown', function (e) { + $(e.target).parents('.tabbable').addClass('first-tab-active'); +}); +$('a[data-toggle="tab"]:not(:first)').on('shown', function (e) { + $(e.target).parents('.tabbable').removeClass('first-tab-active'); +}); +$('.form-switcher a:first').tab('show'); diff --git a/awx/lib/site-packages/rest_framework/static/rest_framework/js/jquery-1.8.1-min.js b/awx/lib/site-packages/rest_framework/static/rest_framework/js/jquery-1.8.1-min.js new file mode 100644 index 0000000000..3b8d15d064 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/static/rest_framework/js/jquery-1.8.1-min.js @@ -0,0 +1,2 @@ +/*! jQuery v@1.8.1 jquery.com | jquery.org/license */ +(function(a,b){function G(a){var b=F[a]={};return p.each(a.split(s),function(a,c){b[c]=!0}),b}function J(a,c,d){if(d===b&&a.nodeType===1){var e="data-"+c.replace(I,"-$1").toLowerCase();d=a.getAttribute(e);if(typeof d=="string"){try{d=d==="true"?!0:d==="false"?!1:d==="null"?null:+d+""===d?+d:H.test(d)?p.parseJSON(d):d}catch(f){}p.data(a,c,d)}else d=b}return d}function K(a){var b;for(b in a){if(b==="data"&&p.isEmptyObject(a[b]))continue;if(b!=="toJSON")return!1}return!0}function ba(){return!1}function bb(){return!0}function bh(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function bi(a,b){do a=a[b];while(a&&a.nodeType!==1);return a}function bj(a,b,c){b=b||0;if(p.isFunction(b))return p.grep(a,function(a,d){var e=!!b.call(a,d,a);return e===c});if(b.nodeType)return p.grep(a,function(a,d){return a===b===c});if(typeof b=="string"){var d=p.grep(a,function(a){return a.nodeType===1});if(be.test(b))return p.filter(b,d,!c);b=p.filter(b,d)}return p.grep(a,function(a,d){return p.inArray(a,b)>=0===c})}function bk(a){var b=bl.split("|"),c=a.createDocumentFragment();if(c.createElement)while(b.length)c.createElement(b.pop());return c}function bC(a,b){return a.getElementsByTagName(b)[0]||a.appendChild(a.ownerDocument.createElement(b))}function bD(a,b){if(b.nodeType!==1||!p.hasData(a))return;var c,d,e,f=p._data(a),g=p._data(b,f),h=f.events;if(h){delete g.handle,g.events={};for(c in h)for(d=0,e=h[c].length;d<e;d++)p.event.add(b,c,h[c][d])}g.data&&(g.data=p.extend({},g.data))}function bE(a,b){var c;if(b.nodeType!==1)return;b.clearAttributes&&b.clearAttributes(),b.mergeAttributes&&b.mergeAttributes(a),c=b.nodeName.toLowerCase(),c==="object"?(b.parentNode&&(b.outerHTML=a.outerHTML),p.support.html5Clone&&a.innerHTML&&!p.trim(b.innerHTML)&&(b.innerHTML=a.innerHTML)):c==="input"&&bv.test(a.type)?(b.defaultChecked=b.checked=a.checked,b.value!==a.value&&(b.value=a.value)):c==="option"?b.selected=a.defaultSelected:c==="input"||c==="textarea"?b.defaultValue=a.defaultValue:c==="script"&&b.text!==a.text&&(b.text=a.text),b.removeAttribute(p.expando)}function bF(a){return typeof a.getElementsByTagName!="undefined"?a.getElementsByTagName("*"):typeof a.querySelectorAll!="undefined"?a.querySelectorAll("*"):[]}function bG(a){bv.test(a.type)&&(a.defaultChecked=a.checked)}function bY(a,b){if(b in a)return b;var c=b.charAt(0).toUpperCase()+b.slice(1),d=b,e=bW.length;while(e--){b=bW[e]+c;if(b in a)return b}return d}function bZ(a,b){return a=b||a,p.css(a,"display")==="none"||!p.contains(a.ownerDocument,a)}function b$(a,b){var c,d,e=[],f=0,g=a.length;for(;f<g;f++){c=a[f];if(!c.style)continue;e[f]=p._data(c,"olddisplay"),b?(!e[f]&&c.style.display==="none"&&(c.style.display=""),c.style.display===""&&bZ(c)&&(e[f]=p._data(c,"olddisplay",cc(c.nodeName)))):(d=bH(c,"display"),!e[f]&&d!=="none"&&p._data(c,"olddisplay",d))}for(f=0;f<g;f++){c=a[f];if(!c.style)continue;if(!b||c.style.display==="none"||c.style.display==="")c.style.display=b?e[f]||"":"none"}return a}function b_(a,b,c){var d=bP.exec(b);return d?Math.max(0,d[1]-(c||0))+(d[2]||"px"):b}function ca(a,b,c,d){var e=c===(d?"border":"content")?4:b==="width"?1:0,f=0;for(;e<4;e+=2)c==="margin"&&(f+=p.css(a,c+bV[e],!0)),d?(c==="content"&&(f-=parseFloat(bH(a,"padding"+bV[e]))||0),c!=="margin"&&(f-=parseFloat(bH(a,"border"+bV[e]+"Width"))||0)):(f+=parseFloat(bH(a,"padding"+bV[e]))||0,c!=="padding"&&(f+=parseFloat(bH(a,"border"+bV[e]+"Width"))||0));return f}function cb(a,b,c){var d=b==="width"?a.offsetWidth:a.offsetHeight,e=!0,f=p.support.boxSizing&&p.css(a,"boxSizing")==="border-box";if(d<=0||d==null){d=bH(a,b);if(d<0||d==null)d=a.style[b];if(bQ.test(d))return d;e=f&&(p.support.boxSizingReliable||d===a.style[b]),d=parseFloat(d)||0}return d+ca(a,b,c||(f?"border":"content"),e)+"px"}function cc(a){if(bS[a])return bS[a];var b=p("<"+a+">").appendTo(e.body),c=b.css("display");b.remove();if(c==="none"||c===""){bI=e.body.appendChild(bI||p.extend(e.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!bJ||!bI.createElement)bJ=(bI.contentWindow||bI.contentDocument).document,bJ.write("<!doctype html><html><body>"),bJ.close();b=bJ.body.appendChild(bJ.createElement(a)),c=bH(b,"display"),e.body.removeChild(bI)}return bS[a]=c,c}function ci(a,b,c,d){var e;if(p.isArray(b))p.each(b,function(b,e){c||ce.test(a)?d(a,e):ci(a+"["+(typeof e=="object"?b:"")+"]",e,c,d)});else if(!c&&p.type(b)==="object")for(e in b)ci(a+"["+e+"]",b[e],c,d);else d(a,b)}function cz(a){return function(b,c){typeof b!="string"&&(c=b,b="*");var d,e,f,g=b.toLowerCase().split(s),h=0,i=g.length;if(p.isFunction(c))for(;h<i;h++)d=g[h],f=/^\+/.test(d),f&&(d=d.substr(1)||"*"),e=a[d]=a[d]||[],e[f?"unshift":"push"](c)}}function cA(a,c,d,e,f,g){f=f||c.dataTypes[0],g=g||{},g[f]=!0;var h,i=a[f],j=0,k=i?i.length:0,l=a===cv;for(;j<k&&(l||!h);j++)h=i[j](c,d,e),typeof h=="string"&&(!l||g[h]?h=b:(c.dataTypes.unshift(h),h=cA(a,c,d,e,h,g)));return(l||!h)&&!g["*"]&&(h=cA(a,c,d,e,"*",g)),h}function cB(a,c){var d,e,f=p.ajaxSettings.flatOptions||{};for(d in c)c[d]!==b&&((f[d]?a:e||(e={}))[d]=c[d]);e&&p.extend(!0,a,e)}function cC(a,c,d){var e,f,g,h,i=a.contents,j=a.dataTypes,k=a.responseFields;for(f in k)f in d&&(c[k[f]]=d[f]);while(j[0]==="*")j.shift(),e===b&&(e=a.mimeType||c.getResponseHeader("content-type"));if(e)for(f in i)if(i[f]&&i[f].test(e)){j.unshift(f);break}if(j[0]in d)g=j[0];else{for(f in d){if(!j[0]||a.converters[f+" "+j[0]]){g=f;break}h||(h=f)}g=g||h}if(g)return g!==j[0]&&j.unshift(g),d[g]}function cD(a,b){var c,d,e,f,g=a.dataTypes.slice(),h=g[0],i={},j=0;a.dataFilter&&(b=a.dataFilter(b,a.dataType));if(g[1])for(c in a.converters)i[c.toLowerCase()]=a.converters[c];for(;e=g[++j];)if(e!=="*"){if(h!=="*"&&h!==e){c=i[h+" "+e]||i["* "+e];if(!c)for(d in i){f=d.split(" ");if(f[1]===e){c=i[h+" "+f[0]]||i["* "+f[0]];if(c){c===!0?c=i[d]:i[d]!==!0&&(e=f[0],g.splice(j--,0,e));break}}}if(c!==!0)if(c&&a["throws"])b=c(b);else try{b=c(b)}catch(k){return{state:"parsererror",error:c?k:"No conversion from "+h+" to "+e}}}h=e}return{state:"success",data:b}}function cL(){try{return new a.XMLHttpRequest}catch(b){}}function cM(){try{return new a.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}}function cU(){return setTimeout(function(){cN=b},0),cN=p.now()}function cV(a,b){p.each(b,function(b,c){var d=(cT[b]||[]).concat(cT["*"]),e=0,f=d.length;for(;e<f;e++)if(d[e].call(a,b,c))return})}function cW(a,b,c){var d,e=0,f=0,g=cS.length,h=p.Deferred().always(function(){delete i.elem}),i=function(){var b=cN||cU(),c=Math.max(0,j.startTime+j.duration-b),d=1-(c/j.duration||0),e=0,f=j.tweens.length;for(;e<f;e++)j.tweens[e].run(d);return h.notifyWith(a,[j,d,c]),d<1&&f?c:(h.resolveWith(a,[j]),!1)},j=h.promise({elem:a,props:p.extend({},b),opts:p.extend(!0,{specialEasing:{}},c),originalProperties:b,originalOptions:c,startTime:cN||cU(),duration:c.duration,tweens:[],createTween:function(b,c,d){var e=p.Tween(a,j.opts,b,c,j.opts.specialEasing[b]||j.opts.easing);return j.tweens.push(e),e},stop:function(b){var c=0,d=b?j.tweens.length:0;for(;c<d;c++)j.tweens[c].run(1);return b?h.resolveWith(a,[j,b]):h.rejectWith(a,[j,b]),this}}),k=j.props;cX(k,j.opts.specialEasing);for(;e<g;e++){d=cS[e].call(j,a,k,j.opts);if(d)return d}return cV(j,k),p.isFunction(j.opts.start)&&j.opts.start.call(a,j),p.fx.timer(p.extend(i,{anim:j,queue:j.opts.queue,elem:a})),j.progress(j.opts.progress).done(j.opts.done,j.opts.complete).fail(j.opts.fail).always(j.opts.always)}function cX(a,b){var c,d,e,f,g;for(c in a){d=p.camelCase(c),e=b[d],f=a[c],p.isArray(f)&&(e=f[1],f=a[c]=f[0]),c!==d&&(a[d]=f,delete a[c]),g=p.cssHooks[d];if(g&&"expand"in g){f=g.expand(f),delete a[d];for(c in f)c in a||(a[c]=f[c],b[c]=e)}else b[d]=e}}function cY(a,b,c){var d,e,f,g,h,i,j,k,l=this,m=a.style,n={},o=[],q=a.nodeType&&bZ(a);c.queue||(j=p._queueHooks(a,"fx"),j.unqueued==null&&(j.unqueued=0,k=j.empty.fire,j.empty.fire=function(){j.unqueued||k()}),j.unqueued++,l.always(function(){l.always(function(){j.unqueued--,p.queue(a,"fx").length||j.empty.fire()})})),a.nodeType===1&&("height"in b||"width"in b)&&(c.overflow=[m.overflow,m.overflowX,m.overflowY],p.css(a,"display")==="inline"&&p.css(a,"float")==="none"&&(!p.support.inlineBlockNeedsLayout||cc(a.nodeName)==="inline"?m.display="inline-block":m.zoom=1)),c.overflow&&(m.overflow="hidden",p.support.shrinkWrapBlocks||l.done(function(){m.overflow=c.overflow[0],m.overflowX=c.overflow[1],m.overflowY=c.overflow[2]}));for(d in b){f=b[d];if(cP.exec(f)){delete b[d];if(f===(q?"hide":"show"))continue;o.push(d)}}g=o.length;if(g){h=p._data(a,"fxshow")||p._data(a,"fxshow",{}),q?p(a).show():l.done(function(){p(a).hide()}),l.done(function(){var b;p.removeData(a,"fxshow",!0);for(b in n)p.style(a,b,n[b])});for(d=0;d<g;d++)e=o[d],i=l.createTween(e,q?h[e]:0),n[e]=h[e]||p.style(a,e),e in h||(h[e]=i.start,q&&(i.end=i.start,i.start=e==="width"||e==="height"?1:0))}}function cZ(a,b,c,d,e){return new cZ.prototype.init(a,b,c,d,e)}function c$(a,b){var c,d={height:a},e=0;b=b?1:0;for(;e<4;e+=2-b)c=bV[e],d["margin"+c]=d["padding"+c]=a;return b&&(d.opacity=d.width=a),d}function da(a){return p.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:!1}var c,d,e=a.document,f=a.location,g=a.navigator,h=a.jQuery,i=a.$,j=Array.prototype.push,k=Array.prototype.slice,l=Array.prototype.indexOf,m=Object.prototype.toString,n=Object.prototype.hasOwnProperty,o=String.prototype.trim,p=function(a,b){return new p.fn.init(a,b,c)},q=/[\-+]?(?:\d*\.|)\d+(?:[eE][\-+]?\d+|)/.source,r=/\S/,s=/\s+/,t=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,u=/^(?:[^#<]*(<[\w\W]+>)[^>]*$|#([\w\-]*)$)/,v=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,w=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,y=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,z=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,A=/^-ms-/,B=/-([\da-z])/gi,C=function(a,b){return(b+"").toUpperCase()},D=function(){e.addEventListener?(e.removeEventListener("DOMContentLoaded",D,!1),p.ready()):e.readyState==="complete"&&(e.detachEvent("onreadystatechange",D),p.ready())},E={};p.fn=p.prototype={constructor:p,init:function(a,c,d){var f,g,h,i;if(!a)return this;if(a.nodeType)return this.context=this[0]=a,this.length=1,this;if(typeof a=="string"){a.charAt(0)==="<"&&a.charAt(a.length-1)===">"&&a.length>=3?f=[null,a,null]:f=u.exec(a);if(f&&(f[1]||!c)){if(f[1])return c=c instanceof p?c[0]:c,i=c&&c.nodeType?c.ownerDocument||c:e,a=p.parseHTML(f[1],i,!0),v.test(f[1])&&p.isPlainObject(c)&&this.attr.call(a,c,!0),p.merge(this,a);g=e.getElementById(f[2]);if(g&&g.parentNode){if(g.id!==f[2])return d.find(a);this.length=1,this[0]=g}return this.context=e,this.selector=a,this}return!c||c.jquery?(c||d).find(a):this.constructor(c).find(a)}return p.isFunction(a)?d.ready(a):(a.selector!==b&&(this.selector=a.selector,this.context=a.context),p.makeArray(a,this))},selector:"",jquery:"1.8.1",length:0,size:function(){return this.length},toArray:function(){return k.call(this)},get:function(a){return a==null?this.toArray():a<0?this[this.length+a]:this[a]},pushStack:function(a,b,c){var d=p.merge(this.constructor(),a);return d.prevObject=this,d.context=this.context,b==="find"?d.selector=this.selector+(this.selector?" ":"")+c:b&&(d.selector=this.selector+"."+b+"("+c+")"),d},each:function(a,b){return p.each(this,a,b)},ready:function(a){return p.ready.promise().done(a),this},eq:function(a){return a=+a,a===-1?this.slice(a):this.slice(a,a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(k.apply(this,arguments),"slice",k.call(arguments).join(","))},map:function(a){return this.pushStack(p.map(this,function(b,c){return a.call(b,c,b)}))},end:function(){return this.prevObject||this.constructor(null)},push:j,sort:[].sort,splice:[].splice},p.fn.init.prototype=p.fn,p.extend=p.fn.extend=function(){var a,c,d,e,f,g,h=arguments[0]||{},i=1,j=arguments.length,k=!1;typeof h=="boolean"&&(k=h,h=arguments[1]||{},i=2),typeof h!="object"&&!p.isFunction(h)&&(h={}),j===i&&(h=this,--i);for(;i<j;i++)if((a=arguments[i])!=null)for(c in a){d=h[c],e=a[c];if(h===e)continue;k&&e&&(p.isPlainObject(e)||(f=p.isArray(e)))?(f?(f=!1,g=d&&p.isArray(d)?d:[]):g=d&&p.isPlainObject(d)?d:{},h[c]=p.extend(k,g,e)):e!==b&&(h[c]=e)}return h},p.extend({noConflict:function(b){return a.$===p&&(a.$=i),b&&a.jQuery===p&&(a.jQuery=h),p},isReady:!1,readyWait:1,holdReady:function(a){a?p.readyWait++:p.ready(!0)},ready:function(a){if(a===!0?--p.readyWait:p.isReady)return;if(!e.body)return setTimeout(p.ready,1);p.isReady=!0;if(a!==!0&&--p.readyWait>0)return;d.resolveWith(e,[p]),p.fn.trigger&&p(e).trigger("ready").off("ready")},isFunction:function(a){return p.type(a)==="function"},isArray:Array.isArray||function(a){return p.type(a)==="array"},isWindow:function(a){return a!=null&&a==a.window},isNumeric:function(a){return!isNaN(parseFloat(a))&&isFinite(a)},type:function(a){return a==null?String(a):E[m.call(a)]||"object"},isPlainObject:function(a){if(!a||p.type(a)!=="object"||a.nodeType||p.isWindow(a))return!1;try{if(a.constructor&&!n.call(a,"constructor")&&!n.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}var d;for(d in a);return d===b||n.call(a,d)},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},error:function(a){throw new Error(a)},parseHTML:function(a,b,c){var d;return!a||typeof a!="string"?null:(typeof b=="boolean"&&(c=b,b=0),b=b||e,(d=v.exec(a))?[b.createElement(d[1])]:(d=p.buildFragment([a],b,c?null:[]),p.merge([],(d.cacheable?p.clone(d.fragment):d.fragment).childNodes)))},parseJSON:function(b){if(!b||typeof b!="string")return null;b=p.trim(b);if(a.JSON&&a.JSON.parse)return a.JSON.parse(b);if(w.test(b.replace(y,"@").replace(z,"]").replace(x,"")))return(new Function("return "+b))();p.error("Invalid JSON: "+b)},parseXML:function(c){var d,e;if(!c||typeof c!="string")return null;try{a.DOMParser?(e=new DOMParser,d=e.parseFromString(c,"text/xml")):(d=new ActiveXObject("Microsoft.XMLDOM"),d.async="false",d.loadXML(c))}catch(f){d=b}return(!d||!d.documentElement||d.getElementsByTagName("parsererror").length)&&p.error("Invalid XML: "+c),d},noop:function(){},globalEval:function(b){b&&r.test(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(A,"ms-").replace(B,C)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,c,d){var e,f=0,g=a.length,h=g===b||p.isFunction(a);if(d){if(h){for(e in a)if(c.apply(a[e],d)===!1)break}else for(;f<g;)if(c.apply(a[f++],d)===!1)break}else if(h){for(e in a)if(c.call(a[e],e,a[e])===!1)break}else for(;f<g;)if(c.call(a[f],f,a[f++])===!1)break;return a},trim:o&&!o.call(" ")?function(a){return a==null?"":o.call(a)}:function(a){return a==null?"":a.toString().replace(t,"")},makeArray:function(a,b){var c,d=b||[];return a!=null&&(c=p.type(a),a.length==null||c==="string"||c==="function"||c==="regexp"||p.isWindow(a)?j.call(d,a):p.merge(d,a)),d},inArray:function(a,b,c){var d;if(b){if(l)return l.call(b,a,c);d=b.length,c=c?c<0?Math.max(0,d+c):c:0;for(;c<d;c++)if(c in b&&b[c]===a)return c}return-1},merge:function(a,c){var d=c.length,e=a.length,f=0;if(typeof d=="number")for(;f<d;f++)a[e++]=c[f];else while(c[f]!==b)a[e++]=c[f++];return a.length=e,a},grep:function(a,b,c){var d,e=[],f=0,g=a.length;c=!!c;for(;f<g;f++)d=!!b(a[f],f),c!==d&&e.push(a[f]);return e},map:function(a,c,d){var e,f,g=[],h=0,i=a.length,j=a instanceof p||i!==b&&typeof i=="number"&&(i>0&&a[0]&&a[i-1]||i===0||p.isArray(a));if(j)for(;h<i;h++)e=c(a[h],h,d),e!=null&&(g[g.length]=e);else for(f in a)e=c(a[f],f,d),e!=null&&(g[g.length]=e);return g.concat.apply([],g)},guid:1,proxy:function(a,c){var d,e,f;return typeof c=="string"&&(d=a[c],c=a,a=d),p.isFunction(a)?(e=k.call(arguments,2),f=function(){return a.apply(c,e.concat(k.call(arguments)))},f.guid=a.guid=a.guid||f.guid||p.guid++,f):b},access:function(a,c,d,e,f,g,h){var i,j=d==null,k=0,l=a.length;if(d&&typeof d=="object"){for(k in d)p.access(a,c,k,d[k],1,g,e);f=1}else if(e!==b){i=h===b&&p.isFunction(e),j&&(i?(i=c,c=function(a,b,c){return i.call(p(a),c)}):(c.call(a,e),c=null));if(c)for(;k<l;k++)c(a[k],d,i?e.call(a[k],k,c(a[k],d)):e,h);f=1}return f?a:j?c.call(a):l?c(a[0],d):g},now:function(){return(new Date).getTime()}}),p.ready.promise=function(b){if(!d){d=p.Deferred();if(e.readyState==="complete")setTimeout(p.ready,1);else if(e.addEventListener)e.addEventListener("DOMContentLoaded",D,!1),a.addEventListener("load",p.ready,!1);else{e.attachEvent("onreadystatechange",D),a.attachEvent("onload",p.ready);var c=!1;try{c=a.frameElement==null&&e.documentElement}catch(f){}c&&c.doScroll&&function g(){if(!p.isReady){try{c.doScroll("left")}catch(a){return setTimeout(g,50)}p.ready()}}()}}return d.promise(b)},p.each("Boolean Number String Function Array Date RegExp Object".split(" "),function(a,b){E["[object "+b+"]"]=b.toLowerCase()}),c=p(e);var F={};p.Callbacks=function(a){a=typeof a=="string"?F[a]||G(a):p.extend({},a);var c,d,e,f,g,h,i=[],j=!a.once&&[],k=function(b){c=a.memory&&b,d=!0,h=f||0,f=0,g=i.length,e=!0;for(;i&&h<g;h++)if(i[h].apply(b[0],b[1])===!1&&a.stopOnFalse){c=!1;break}e=!1,i&&(j?j.length&&k(j.shift()):c?i=[]:l.disable())},l={add:function(){if(i){var b=i.length;(function d(b){p.each(b,function(b,c){var e=p.type(c);e==="function"&&(!a.unique||!l.has(c))?i.push(c):c&&c.length&&e!=="string"&&d(c)})})(arguments),e?g=i.length:c&&(f=b,k(c))}return this},remove:function(){return i&&p.each(arguments,function(a,b){var c;while((c=p.inArray(b,i,c))>-1)i.splice(c,1),e&&(c<=g&&g--,c<=h&&h--)}),this},has:function(a){return p.inArray(a,i)>-1},empty:function(){return i=[],this},disable:function(){return i=j=c=b,this},disabled:function(){return!i},lock:function(){return j=b,c||l.disable(),this},locked:function(){return!j},fireWith:function(a,b){return b=b||[],b=[a,b.slice?b.slice():b],i&&(!d||j)&&(e?j.push(b):k(b)),this},fire:function(){return l.fireWith(this,arguments),this},fired:function(){return!!d}};return l},p.extend({Deferred:function(a){var b=[["resolve","done",p.Callbacks("once memory"),"resolved"],["reject","fail",p.Callbacks("once memory"),"rejected"],["notify","progress",p.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return p.Deferred(function(c){p.each(b,function(b,d){var f=d[0],g=a[b];e[d[1]](p.isFunction(g)?function(){var a=g.apply(this,arguments);a&&p.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f+"With"](this===e?c:this,[a])}:c[f])}),a=null}).promise()},promise:function(a){return typeof a=="object"?p.extend(a,d):d}},e={};return d.pipe=d.then,p.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[a^1][2].disable,b[2][2].lock),e[f[0]]=g.fire,e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=k.call(arguments),d=c.length,e=d!==1||a&&p.isFunction(a.promise)?d:0,f=e===1?a:p.Deferred(),g=function(a,b,c){return function(d){b[a]=this,c[a]=arguments.length>1?k.call(arguments):d,c===h?f.notifyWith(b,c):--e||f.resolveWith(b,c)}},h,i,j;if(d>1){h=new Array(d),i=new Array(d),j=new Array(d);for(;b<d;b++)c[b]&&p.isFunction(c[b].promise)?c[b].promise().done(g(b,j,c)).fail(f.reject).progress(g(b,i,h)):--e}return e||f.resolveWith(j,c),f.promise()}}),p.support=function(){var b,c,d,f,g,h,i,j,k,l,m,n=e.createElement("div");n.setAttribute("className","t"),n.innerHTML=" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",c=n.getElementsByTagName("*"),d=n.getElementsByTagName("a")[0],d.style.cssText="top:1px;float:left;opacity:.5";if(!c||!c.length||!d)return{};f=e.createElement("select"),g=f.appendChild(e.createElement("option")),h=n.getElementsByTagName("input")[0],b={leadingWhitespace:n.firstChild.nodeType===3,tbody:!n.getElementsByTagName("tbody").length,htmlSerialize:!!n.getElementsByTagName("link").length,style:/top/.test(d.getAttribute("style")),hrefNormalized:d.getAttribute("href")==="/a",opacity:/^0.5/.test(d.style.opacity),cssFloat:!!d.style.cssFloat,checkOn:h.value==="on",optSelected:g.selected,getSetAttribute:n.className!=="t",enctype:!!e.createElement("form").enctype,html5Clone:e.createElement("nav").cloneNode(!0).outerHTML!=="<:nav></:nav>",boxModel:e.compatMode==="CSS1Compat",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},h.checked=!0,b.noCloneChecked=h.cloneNode(!0).checked,f.disabled=!0,b.optDisabled=!g.disabled;try{delete n.test}catch(o){b.deleteExpando=!1}!n.addEventListener&&n.attachEvent&&n.fireEvent&&(n.attachEvent("onclick",m=function(){b.noCloneEvent=!1}),n.cloneNode(!0).fireEvent("onclick"),n.detachEvent("onclick",m)),h=e.createElement("input"),h.value="t",h.setAttribute("type","radio"),b.radioValue=h.value==="t",h.setAttribute("checked","checked"),h.setAttribute("name","t"),n.appendChild(h),i=e.createDocumentFragment(),i.appendChild(n.lastChild),b.checkClone=i.cloneNode(!0).cloneNode(!0).lastChild.checked,b.appendChecked=h.checked,i.removeChild(h),i.appendChild(n);if(n.attachEvent)for(k in{submit:!0,change:!0,focusin:!0})j="on"+k,l=j in n,l||(n.setAttribute(j,"return;"),l=typeof n[j]=="function"),b[k+"Bubbles"]=l;return p(function(){var c,d,f,g,h="padding:0;margin:0;border:0;display:block;overflow:hidden;",i=e.getElementsByTagName("body")[0];if(!i)return;c=e.createElement("div"),c.style.cssText="visibility:hidden;border:0;width:0;height:0;position:static;top:0;margin-top:1px",i.insertBefore(c,i.firstChild),d=e.createElement("div"),c.appendChild(d),d.innerHTML="<table><tr><td></td><td>t</td></tr></table>",f=d.getElementsByTagName("td"),f[0].style.cssText="padding:0;margin:0;border:0;display:none",l=f[0].offsetHeight===0,f[0].style.display="",f[1].style.display="none",b.reliableHiddenOffsets=l&&f[0].offsetHeight===0,d.innerHTML="",d.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",b.boxSizing=d.offsetWidth===4,b.doesNotIncludeMarginInBodyOffset=i.offsetTop!==1,a.getComputedStyle&&(b.pixelPosition=(a.getComputedStyle(d,null)||{}).top!=="1%",b.boxSizingReliable=(a.getComputedStyle(d,null)||{width:"4px"}).width==="4px",g=e.createElement("div"),g.style.cssText=d.style.cssText=h,g.style.marginRight=g.style.width="0",d.style.width="1px",d.appendChild(g),b.reliableMarginRight=!parseFloat((a.getComputedStyle(g,null)||{}).marginRight)),typeof d.style.zoom!="undefined"&&(d.innerHTML="",d.style.cssText=h+"width:1px;padding:1px;display:inline;zoom:1",b.inlineBlockNeedsLayout=d.offsetWidth===3,d.style.display="block",d.style.overflow="visible",d.innerHTML="<div></div>",d.firstChild.style.width="5px",b.shrinkWrapBlocks=d.offsetWidth!==3,c.style.zoom=1),i.removeChild(c),c=d=f=g=null}),i.removeChild(n),c=d=f=g=h=i=n=null,b}();var H=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,I=/([A-Z])/g;p.extend({cache:{},deletedIds:[],uuid:0,expando:"jQuery"+(p.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(a){return a=a.nodeType?p.cache[a[p.expando]]:a[p.expando],!!a&&!K(a)},data:function(a,c,d,e){if(!p.acceptData(a))return;var f,g,h=p.expando,i=typeof c=="string",j=a.nodeType,k=j?p.cache:a,l=j?a[h]:a[h]&&h;if((!l||!k[l]||!e&&!k[l].data)&&i&&d===b)return;l||(j?a[h]=l=p.deletedIds.pop()||++p.uuid:l=h),k[l]||(k[l]={},j||(k[l].toJSON=p.noop));if(typeof c=="object"||typeof c=="function")e?k[l]=p.extend(k[l],c):k[l].data=p.extend(k[l].data,c);return f=k[l],e||(f.data||(f.data={}),f=f.data),d!==b&&(f[p.camelCase(c)]=d),i?(g=f[c],g==null&&(g=f[p.camelCase(c)])):g=f,g},removeData:function(a,b,c){if(!p.acceptData(a))return;var d,e,f,g=a.nodeType,h=g?p.cache:a,i=g?a[p.expando]:p.expando;if(!h[i])return;if(b){d=c?h[i]:h[i].data;if(d){p.isArray(b)||(b in d?b=[b]:(b=p.camelCase(b),b in d?b=[b]:b=b.split(" ")));for(e=0,f=b.length;e<f;e++)delete d[b[e]];if(!(c?K:p.isEmptyObject)(d))return}}if(!c){delete h[i].data;if(!K(h[i]))return}g?p.cleanData([a],!0):p.support.deleteExpando||h!=h.window?delete h[i]:h[i]=null},_data:function(a,b,c){return p.data(a,b,c,!0)},acceptData:function(a){var b=a.nodeName&&p.noData[a.nodeName.toLowerCase()];return!b||b!==!0&&a.getAttribute("classid")===b}}),p.fn.extend({data:function(a,c){var d,e,f,g,h,i=this[0],j=0,k=null;if(a===b){if(this.length){k=p.data(i);if(i.nodeType===1&&!p._data(i,"parsedAttrs")){f=i.attributes;for(h=f.length;j<h;j++)g=f[j].name,g.indexOf("data-")===0&&(g=p.camelCase(g.substring(5)),J(i,g,k[g]));p._data(i,"parsedAttrs",!0)}}return k}return typeof a=="object"?this.each(function(){p.data(this,a)}):(d=a.split(".",2),d[1]=d[1]?"."+d[1]:"",e=d[1]+"!",p.access(this,function(c){if(c===b)return k=this.triggerHandler("getData"+e,[d[0]]),k===b&&i&&(k=p.data(i,a),k=J(i,a,k)),k===b&&d[1]?this.data(d[0]):k;d[1]=c,this.each(function(){var b=p(this);b.triggerHandler("setData"+e,d),p.data(this,a,c),b.triggerHandler("changeData"+e,d)})},null,c,arguments.length>1,null,!1))},removeData:function(a){return this.each(function(){p.removeData(this,a)})}}),p.extend({queue:function(a,b,c){var d;if(a)return b=(b||"fx")+"queue",d=p._data(a,b),c&&(!d||p.isArray(c)?d=p._data(a,b,p.makeArray(c)):d.push(c)),d||[]},dequeue:function(a,b){b=b||"fx";var c=p.queue(a,b),d=c.length,e=c.shift(),f=p._queueHooks(a,b),g=function(){p.dequeue(a,b)};e==="inprogress"&&(e=c.shift(),d--),e&&(b==="fx"&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return p._data(a,c)||p._data(a,c,{empty:p.Callbacks("once memory").add(function(){p.removeData(a,b+"queue",!0),p.removeData(a,c,!0)})})}}),p.fn.extend({queue:function(a,c){var d=2;return typeof a!="string"&&(c=a,a="fx",d--),arguments.length<d?p.queue(this[0],a):c===b?this:this.each(function(){var b=p.queue(this,a,c);p._queueHooks(this,a),a==="fx"&&b[0]!=="inprogress"&&p.dequeue(this,a)})},dequeue:function(a){return this.each(function(){p.dequeue(this,a)})},delay:function(a,b){return a=p.fx?p.fx.speeds[a]||a:a,b=b||"fx",this.queue(b,function(b,c){var d=setTimeout(b,a);c.stop=function(){clearTimeout(d)}})},clearQueue:function(a){return this.queue(a||"fx",[])},promise:function(a,c){var d,e=1,f=p.Deferred(),g=this,h=this.length,i=function(){--e||f.resolveWith(g,[g])};typeof a!="string"&&(c=a,a=b),a=a||"fx";while(h--)d=p._data(g[h],a+"queueHooks"),d&&d.empty&&(e++,d.empty.add(i));return i(),f.promise(c)}});var L,M,N,O=/[\t\r\n]/g,P=/\r/g,Q=/^(?:button|input)$/i,R=/^(?:button|input|object|select|textarea)$/i,S=/^a(?:rea|)$/i,T=/^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i,U=p.support.getSetAttribute;p.fn.extend({attr:function(a,b){return p.access(this,p.attr,a,b,arguments.length>1)},removeAttr:function(a){return this.each(function(){p.removeAttr(this,a)})},prop:function(a,b){return p.access(this,p.prop,a,b,arguments.length>1)},removeProp:function(a){return a=p.propFix[a]||a,this.each(function(){try{this[a]=b,delete this[a]}catch(c){}})},addClass:function(a){var b,c,d,e,f,g,h;if(p.isFunction(a))return this.each(function(b){p(this).addClass(a.call(this,b,this.className))});if(a&&typeof a=="string"){b=a.split(s);for(c=0,d=this.length;c<d;c++){e=this[c];if(e.nodeType===1)if(!e.className&&b.length===1)e.className=a;else{f=" "+e.className+" ";for(g=0,h=b.length;g<h;g++)~f.indexOf(" "+b[g]+" ")||(f+=b[g]+" ");e.className=p.trim(f)}}}return this},removeClass:function(a){var c,d,e,f,g,h,i;if(p.isFunction(a))return this.each(function(b){p(this).removeClass(a.call(this,b,this.className))});if(a&&typeof a=="string"||a===b){c=(a||"").split(s);for(h=0,i=this.length;h<i;h++){e=this[h];if(e.nodeType===1&&e.className){d=(" "+e.className+" ").replace(O," ");for(f=0,g=c.length;f<g;f++)while(d.indexOf(" "+c[f]+" ")>-1)d=d.replace(" "+c[f]+" "," ");e.className=a?p.trim(d):""}}}return this},toggleClass:function(a,b){var c=typeof a,d=typeof b=="boolean";return p.isFunction(a)?this.each(function(c){p(this).toggleClass(a.call(this,c,this.className,b),b)}):this.each(function(){if(c==="string"){var e,f=0,g=p(this),h=b,i=a.split(s);while(e=i[f++])h=d?h:!g.hasClass(e),g[h?"addClass":"removeClass"](e)}else if(c==="undefined"||c==="boolean")this.className&&p._data(this,"__className__",this.className),this.className=this.className||a===!1?"":p._data(this,"__className__")||""})},hasClass:function(a){var b=" "+a+" ",c=0,d=this.length;for(;c<d;c++)if(this[c].nodeType===1&&(" "+this[c].className+" ").replace(O," ").indexOf(b)>-1)return!0;return!1},val:function(a){var c,d,e,f=this[0];if(!arguments.length){if(f)return c=p.valHooks[f.type]||p.valHooks[f.nodeName.toLowerCase()],c&&"get"in c&&(d=c.get(f,"value"))!==b?d:(d=f.value,typeof d=="string"?d.replace(P,""):d==null?"":d);return}return e=p.isFunction(a),this.each(function(d){var f,g=p(this);if(this.nodeType!==1)return;e?f=a.call(this,d,g.val()):f=a,f==null?f="":typeof f=="number"?f+="":p.isArray(f)&&(f=p.map(f,function(a){return a==null?"":a+""})),c=p.valHooks[this.type]||p.valHooks[this.nodeName.toLowerCase()];if(!c||!("set"in c)||c.set(this,f,"value")===b)this.value=f})}}),p.extend({valHooks:{option:{get:function(a){var b=a.attributes.value;return!b||b.specified?a.value:a.text}},select:{get:function(a){var b,c,d,e,f=a.selectedIndex,g=[],h=a.options,i=a.type==="select-one";if(f<0)return null;c=i?f:0,d=i?f+1:h.length;for(;c<d;c++){e=h[c];if(e.selected&&(p.support.optDisabled?!e.disabled:e.getAttribute("disabled")===null)&&(!e.parentNode.disabled||!p.nodeName(e.parentNode,"optgroup"))){b=p(e).val();if(i)return b;g.push(b)}}return i&&!g.length&&h.length?p(h[f]).val():g},set:function(a,b){var c=p.makeArray(b);return p(a).find("option").each(function(){this.selected=p.inArray(p(this).val(),c)>=0}),c.length||(a.selectedIndex=-1),c}}},attrFn:{},attr:function(a,c,d,e){var f,g,h,i=a.nodeType;if(!a||i===3||i===8||i===2)return;if(e&&p.isFunction(p.fn[c]))return p(a)[c](d);if(typeof a.getAttribute=="undefined")return p.prop(a,c,d);h=i!==1||!p.isXMLDoc(a),h&&(c=c.toLowerCase(),g=p.attrHooks[c]||(T.test(c)?M:L));if(d!==b){if(d===null){p.removeAttr(a,c);return}return g&&"set"in g&&h&&(f=g.set(a,d,c))!==b?f:(a.setAttribute(c,""+d),d)}return g&&"get"in g&&h&&(f=g.get(a,c))!==null?f:(f=a.getAttribute(c),f===null?b:f)},removeAttr:function(a,b){var c,d,e,f,g=0;if(b&&a.nodeType===1){d=b.split(s);for(;g<d.length;g++)e=d[g],e&&(c=p.propFix[e]||e,f=T.test(e),f||p.attr(a,e,""),a.removeAttribute(U?e:c),f&&c in a&&(a[c]=!1))}},attrHooks:{type:{set:function(a,b){if(Q.test(a.nodeName)&&a.parentNode)p.error("type property can't be changed");else if(!p.support.radioValue&&b==="radio"&&p.nodeName(a,"input")){var c=a.value;return a.setAttribute("type",b),c&&(a.value=c),b}}},value:{get:function(a,b){return L&&p.nodeName(a,"button")?L.get(a,b):b in a?a.value:null},set:function(a,b,c){if(L&&p.nodeName(a,"button"))return L.set(a,b,c);a.value=b}}},propFix:{tabindex:"tabIndex",readonly:"readOnly","for":"htmlFor","class":"className",maxlength:"maxLength",cellspacing:"cellSpacing",cellpadding:"cellPadding",rowspan:"rowSpan",colspan:"colSpan",usemap:"useMap",frameborder:"frameBorder",contenteditable:"contentEditable"},prop:function(a,c,d){var e,f,g,h=a.nodeType;if(!a||h===3||h===8||h===2)return;return g=h!==1||!p.isXMLDoc(a),g&&(c=p.propFix[c]||c,f=p.propHooks[c]),d!==b?f&&"set"in f&&(e=f.set(a,d,c))!==b?e:a[c]=d:f&&"get"in f&&(e=f.get(a,c))!==null?e:a[c]},propHooks:{tabIndex:{get:function(a){var c=a.getAttributeNode("tabindex");return c&&c.specified?parseInt(c.value,10):R.test(a.nodeName)||S.test(a.nodeName)&&a.href?0:b}}}}),M={get:function(a,c){var d,e=p.prop(a,c);return e===!0||typeof e!="boolean"&&(d=a.getAttributeNode(c))&&d.nodeValue!==!1?c.toLowerCase():b},set:function(a,b,c){var d;return b===!1?p.removeAttr(a,c):(d=p.propFix[c]||c,d in a&&(a[d]=!0),a.setAttribute(c,c.toLowerCase())),c}},U||(N={name:!0,id:!0,coords:!0},L=p.valHooks.button={get:function(a,c){var d;return d=a.getAttributeNode(c),d&&(N[c]?d.value!=="":d.specified)?d.value:b},set:function(a,b,c){var d=a.getAttributeNode(c);return d||(d=e.createAttribute(c),a.setAttributeNode(d)),d.value=b+""}},p.each(["width","height"],function(a,b){p.attrHooks[b]=p.extend(p.attrHooks[b],{set:function(a,c){if(c==="")return a.setAttribute(b,"auto"),c}})}),p.attrHooks.contenteditable={get:L.get,set:function(a,b,c){b===""&&(b="false"),L.set(a,b,c)}}),p.support.hrefNormalized||p.each(["href","src","width","height"],function(a,c){p.attrHooks[c]=p.extend(p.attrHooks[c],{get:function(a){var d=a.getAttribute(c,2);return d===null?b:d}})}),p.support.style||(p.attrHooks.style={get:function(a){return a.style.cssText.toLowerCase()||b},set:function(a,b){return a.style.cssText=""+b}}),p.support.optSelected||(p.propHooks.selected=p.extend(p.propHooks.selected,{get:function(a){var b=a.parentNode;return b&&(b.selectedIndex,b.parentNode&&b.parentNode.selectedIndex),null}})),p.support.enctype||(p.propFix.enctype="encoding"),p.support.checkOn||p.each(["radio","checkbox"],function(){p.valHooks[this]={get:function(a){return a.getAttribute("value")===null?"on":a.value}}}),p.each(["radio","checkbox"],function(){p.valHooks[this]=p.extend(p.valHooks[this],{set:function(a,b){if(p.isArray(b))return a.checked=p.inArray(p(a).val(),b)>=0}})});var V=/^(?:textarea|input|select)$/i,W=/^([^\.]*|)(?:\.(.+)|)$/,X=/(?:^|\s)hover(\.\S+|)\b/,Y=/^key/,Z=/^(?:mouse|contextmenu)|click/,$=/^(?:focusinfocus|focusoutblur)$/,_=function(a){return p.event.special.hover?a:a.replace(X,"mouseenter$1 mouseleave$1")};p.event={add:function(a,c,d,e,f){var g,h,i,j,k,l,m,n,o,q,r;if(a.nodeType===3||a.nodeType===8||!c||!d||!(g=p._data(a)))return;d.handler&&(o=d,d=o.handler,f=o.selector),d.guid||(d.guid=p.guid++),i=g.events,i||(g.events=i={}),h=g.handle,h||(g.handle=h=function(a){return typeof p!="undefined"&&(!a||p.event.triggered!==a.type)?p.event.dispatch.apply(h.elem,arguments):b},h.elem=a),c=p.trim(_(c)).split(" ");for(j=0;j<c.length;j++){k=W.exec(c[j])||[],l=k[1],m=(k[2]||"").split(".").sort(),r=p.event.special[l]||{},l=(f?r.delegateType:r.bindType)||l,r=p.event.special[l]||{},n=p.extend({type:l,origType:k[1],data:e,handler:d,guid:d.guid,selector:f,namespace:m.join(".")},o),q=i[l];if(!q){q=i[l]=[],q.delegateCount=0;if(!r.setup||r.setup.call(a,e,m,h)===!1)a.addEventListener?a.addEventListener(l,h,!1):a.attachEvent&&a.attachEvent("on"+l,h)}r.add&&(r.add.call(a,n),n.handler.guid||(n.handler.guid=d.guid)),f?q.splice(q.delegateCount++,0,n):q.push(n),p.event.global[l]=!0}a=null},global:{},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,n,o,q,r=p.hasData(a)&&p._data(a);if(!r||!(m=r.events))return;b=p.trim(_(b||"")).split(" ");for(f=0;f<b.length;f++){g=W.exec(b[f])||[],h=i=g[1],j=g[2];if(!h){for(h in m)p.event.remove(a,h+b[f],c,d,!0);continue}n=p.event.special[h]||{},h=(d?n.delegateType:n.bindType)||h,o=m[h]||[],k=o.length,j=j?new RegExp("(^|\\.)"+j.split(".").sort().join("\\.(?:.*\\.|)")+"(\\.|$)"):null;for(l=0;l<o.length;l++)q=o[l],(e||i===q.origType)&&(!c||c.guid===q.guid)&&(!j||j.test(q.namespace))&&(!d||d===q.selector||d==="**"&&q.selector)&&(o.splice(l--,1),q.selector&&o.delegateCount--,n.remove&&n.remove.call(a,q));o.length===0&&k!==o.length&&((!n.teardown||n.teardown.call(a,j,r.handle)===!1)&&p.removeEvent(a,h,r.handle),delete m[h])}p.isEmptyObject(m)&&(delete r.handle,p.removeData(a,"events",!0))},customEvent:{getData:!0,setData:!0,changeData:!0},trigger:function(c,d,f,g){if(!f||f.nodeType!==3&&f.nodeType!==8){var h,i,j,k,l,m,n,o,q,r,s=c.type||c,t=[];if($.test(s+p.event.triggered))return;s.indexOf("!")>=0&&(s=s.slice(0,-1),i=!0),s.indexOf(".")>=0&&(t=s.split("."),s=t.shift(),t.sort());if((!f||p.event.customEvent[s])&&!p.event.global[s])return;c=typeof c=="object"?c[p.expando]?c:new p.Event(s,c):new p.Event(s),c.type=s,c.isTrigger=!0,c.exclusive=i,c.namespace=t.join("."),c.namespace_re=c.namespace?new RegExp("(^|\\.)"+t.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,m=s.indexOf(":")<0?"on"+s:"";if(!f){h=p.cache;for(j in h)h[j].events&&h[j].events[s]&&p.event.trigger(c,d,h[j].handle.elem,!0);return}c.result=b,c.target||(c.target=f),d=d!=null?p.makeArray(d):[],d.unshift(c),n=p.event.special[s]||{};if(n.trigger&&n.trigger.apply(f,d)===!1)return;q=[[f,n.bindType||s]];if(!g&&!n.noBubble&&!p.isWindow(f)){r=n.delegateType||s,k=$.test(r+s)?f:f.parentNode;for(l=f;k;k=k.parentNode)q.push([k,r]),l=k;l===(f.ownerDocument||e)&&q.push([l.defaultView||l.parentWindow||a,r])}for(j=0;j<q.length&&!c.isPropagationStopped();j++)k=q[j][0],c.type=q[j][1],o=(p._data(k,"events")||{})[c.type]&&p._data(k,"handle"),o&&o.apply(k,d),o=m&&k[m],o&&p.acceptData(k)&&o.apply(k,d)===!1&&c.preventDefault();return c.type=s,!g&&!c.isDefaultPrevented()&&(!n._default||n._default.apply(f.ownerDocument,d)===!1)&&(s!=="click"||!p.nodeName(f,"a"))&&p.acceptData(f)&&m&&f[s]&&(s!=="focus"&&s!=="blur"||c.target.offsetWidth!==0)&&!p.isWindow(f)&&(l=f[m],l&&(f[m]=null),p.event.triggered=s,f[s](),p.event.triggered=b,l&&(f[m]=l)),c.result}return},dispatch:function(c){c=p.event.fix(c||a.event);var d,e,f,g,h,i,j,k,l,m,n=(p._data(this,"events")||{})[c.type]||[],o=n.delegateCount,q=[].slice.call(arguments),r=!c.exclusive&&!c.namespace,s=p.event.special[c.type]||{},t=[];q[0]=c,c.delegateTarget=this;if(s.preDispatch&&s.preDispatch.call(this,c)===!1)return;if(o&&(!c.button||c.type!=="click"))for(f=c.target;f!=this;f=f.parentNode||this)if(f.disabled!==!0||c.type!=="click"){h={},j=[];for(d=0;d<o;d++)k=n[d],l=k.selector,h[l]===b&&(h[l]=p(l,this).index(f)>=0),h[l]&&j.push(k);j.length&&t.push({elem:f,matches:j})}n.length>o&&t.push({elem:this,matches:n.slice(o)});for(d=0;d<t.length&&!c.isPropagationStopped();d++){i=t[d],c.currentTarget=i.elem;for(e=0;e<i.matches.length&&!c.isImmediatePropagationStopped();e++){k=i.matches[e];if(r||!c.namespace&&!k.namespace||c.namespace_re&&c.namespace_re.test(k.namespace))c.data=k.data,c.handleObj=k,g=((p.event.special[k.origType]||{}).handle||k.handler).apply(i.elem,q),g!==b&&(c.result=g,g===!1&&(c.preventDefault(),c.stopPropagation()))}}return s.postDispatch&&s.postDispatch.call(this,c),c.result},props:"attrChange attrName relatedNode srcElement altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(a,b){return a.which==null&&(a.which=b.charCode!=null?b.charCode:b.keyCode),a}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(a,c){var d,f,g,h=c.button,i=c.fromElement;return a.pageX==null&&c.clientX!=null&&(d=a.target.ownerDocument||e,f=d.documentElement,g=d.body,a.pageX=c.clientX+(f&&f.scrollLeft||g&&g.scrollLeft||0)-(f&&f.clientLeft||g&&g.clientLeft||0),a.pageY=c.clientY+(f&&f.scrollTop||g&&g.scrollTop||0)-(f&&f.clientTop||g&&g.clientTop||0)),!a.relatedTarget&&i&&(a.relatedTarget=i===a.target?c.toElement:i),!a.which&&h!==b&&(a.which=h&1?1:h&2?3:h&4?2:0),a}},fix:function(a){if(a[p.expando])return a;var b,c,d=a,f=p.event.fixHooks[a.type]||{},g=f.props?this.props.concat(f.props):this.props;a=p.Event(d);for(b=g.length;b;)c=g[--b],a[c]=d[c];return a.target||(a.target=d.srcElement||e),a.target.nodeType===3&&(a.target=a.target.parentNode),a.metaKey=!!a.metaKey,f.filter?f.filter(a,d):a},special:{load:{noBubble:!0},focus:{delegateType:"focusin"},blur:{delegateType:"focusout"},beforeunload:{setup:function(a,b,c){p.isWindow(this)&&(this.onbeforeunload=c)},teardown:function(a,b){this.onbeforeunload===b&&(this.onbeforeunload=null)}}},simulate:function(a,b,c,d){var e=p.extend(new p.Event,c,{type:a,isSimulated:!0,originalEvent:{}});d?p.event.trigger(e,null,b):p.event.dispatch.call(b,e),e.isDefaultPrevented()&&c.preventDefault()}},p.event.handle=p.event.dispatch,p.removeEvent=e.removeEventListener?function(a,b,c){a.removeEventListener&&a.removeEventListener(b,c,!1)}:function(a,b,c){var d="on"+b;a.detachEvent&&(typeof a[d]=="undefined"&&(a[d]=null),a.detachEvent(d,c))},p.Event=function(a,b){if(this instanceof p.Event)a&&a.type?(this.originalEvent=a,this.type=a.type,this.isDefaultPrevented=a.defaultPrevented||a.returnValue===!1||a.getPreventDefault&&a.getPreventDefault()?bb:ba):this.type=a,b&&p.extend(this,b),this.timeStamp=a&&a.timeStamp||p.now(),this[p.expando]=!0;else return new p.Event(a,b)},p.Event.prototype={preventDefault:function(){this.isDefaultPrevented=bb;var a=this.originalEvent;if(!a)return;a.preventDefault?a.preventDefault():a.returnValue=!1},stopPropagation:function(){this.isPropagationStopped=bb;var a=this.originalEvent;if(!a)return;a.stopPropagation&&a.stopPropagation(),a.cancelBubble=!0},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=bb,this.stopPropagation()},isDefaultPrevented:ba,isPropagationStopped:ba,isImmediatePropagationStopped:ba},p.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){p.event.special[a]={delegateType:b,bindType:b,handle:function(a){var c,d=this,e=a.relatedTarget,f=a.handleObj,g=f.selector;if(!e||e!==d&&!p.contains(d,e))a.type=f.origType,c=f.handler.apply(this,arguments),a.type=b;return c}}}),p.support.submitBubbles||(p.event.special.submit={setup:function(){if(p.nodeName(this,"form"))return!1;p.event.add(this,"click._submit keypress._submit",function(a){var c=a.target,d=p.nodeName(c,"input")||p.nodeName(c,"button")?c.form:b;d&&!p._data(d,"_submit_attached")&&(p.event.add(d,"submit._submit",function(a){a._submit_bubble=!0}),p._data(d,"_submit_attached",!0))})},postDispatch:function(a){a._submit_bubble&&(delete a._submit_bubble,this.parentNode&&!a.isTrigger&&p.event.simulate("submit",this.parentNode,a,!0))},teardown:function(){if(p.nodeName(this,"form"))return!1;p.event.remove(this,"._submit")}}),p.support.changeBubbles||(p.event.special.change={setup:function(){if(V.test(this.nodeName)){if(this.type==="checkbox"||this.type==="radio")p.event.add(this,"propertychange._change",function(a){a.originalEvent.propertyName==="checked"&&(this._just_changed=!0)}),p.event.add(this,"click._change",function(a){this._just_changed&&!a.isTrigger&&(this._just_changed=!1),p.event.simulate("change",this,a,!0)});return!1}p.event.add(this,"beforeactivate._change",function(a){var b=a.target;V.test(b.nodeName)&&!p._data(b,"_change_attached")&&(p.event.add(b,"change._change",function(a){this.parentNode&&!a.isSimulated&&!a.isTrigger&&p.event.simulate("change",this.parentNode,a,!0)}),p._data(b,"_change_attached",!0))})},handle:function(a){var b=a.target;if(this!==b||a.isSimulated||a.isTrigger||b.type!=="radio"&&b.type!=="checkbox")return a.handleObj.handler.apply(this,arguments)},teardown:function(){return p.event.remove(this,"._change"),!V.test(this.nodeName)}}),p.support.focusinBubbles||p.each({focus:"focusin",blur:"focusout"},function(a,b){var c=0,d=function(a){p.event.simulate(b,a.target,p.event.fix(a),!0)};p.event.special[b]={setup:function(){c++===0&&e.addEventListener(a,d,!0)},teardown:function(){--c===0&&e.removeEventListener(a,d,!0)}}}),p.fn.extend({on:function(a,c,d,e,f){var g,h;if(typeof a=="object"){typeof c!="string"&&(d=d||c,c=b);for(h in a)this.on(h,c,d,a[h],f);return this}d==null&&e==null?(e=c,d=c=b):e==null&&(typeof c=="string"?(e=d,d=b):(e=d,d=c,c=b));if(e===!1)e=ba;else if(!e)return this;return f===1&&(g=e,e=function(a){return p().off(a),g.apply(this,arguments)},e.guid=g.guid||(g.guid=p.guid++)),this.each(function(){p.event.add(this,a,e,d,c)})},one:function(a,b,c,d){return this.on(a,b,c,d,1)},off:function(a,c,d){var e,f;if(a&&a.preventDefault&&a.handleObj)return e=a.handleObj,p(a.delegateTarget).off(e.namespace?e.origType+"."+e.namespace:e.origType,e.selector,e.handler),this;if(typeof a=="object"){for(f in a)this.off(f,c,a[f]);return this}if(c===!1||typeof c=="function")d=c,c=b;return d===!1&&(d=ba),this.each(function(){p.event.remove(this,a,d,c)})},bind:function(a,b,c){return this.on(a,null,b,c)},unbind:function(a,b){return this.off(a,null,b)},live:function(a,b,c){return p(this.context).on(a,this.selector,b,c),this},die:function(a,b){return p(this.context).off(a,this.selector||"**",b),this},delegate:function(a,b,c,d){return this.on(b,a,c,d)},undelegate:function(a,b,c){return arguments.length==1?this.off(a,"**"):this.off(b,a||"**",c)},trigger:function(a,b){return this.each(function(){p.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0])return p.event.trigger(a,b,this[0],!0)},toggle:function(a){var b=arguments,c=a.guid||p.guid++,d=0,e=function(c){var e=(p._data(this,"lastToggle"+a.guid)||0)%d;return p._data(this,"lastToggle"+a.guid,e+1),c.preventDefault(),b[e].apply(this,arguments)||!1};e.guid=c;while(d<b.length)b[d++].guid=c;return this.click(e)},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}}),p.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(a,b){p.fn[b]=function(a,c){return c==null&&(c=a,a=null),arguments.length>0?this.on(b,null,a,c):this.trigger(b)},Y.test(b)&&(p.event.fixHooks[b]=p.event.keyHooks),Z.test(b)&&(p.event.fixHooks[b]=p.event.mouseHooks)}),function(a,b){function $(a,b,c,d){c=c||[],b=b||q;var e,f,g,j,k=b.nodeType;if(k!==1&&k!==9)return[];if(!a||typeof a!="string")return c;g=h(b);if(!g&&!d)if(e=L.exec(a))if(j=e[1]){if(k===9){f=b.getElementById(j);if(!f||!f.parentNode)return c;if(f.id===j)return c.push(f),c}else if(b.ownerDocument&&(f=b.ownerDocument.getElementById(j))&&i(b,f)&&f.id===j)return c.push(f),c}else{if(e[2])return u.apply(c,t.call(b.getElementsByTagName(a),0)),c;if((j=e[3])&&X&&b.getElementsByClassName)return u.apply(c,t.call(b.getElementsByClassName(j),0)),c}return bk(a,b,c,d,g)}function _(a){return function(b){var c=b.nodeName.toLowerCase();return c==="input"&&b.type===a}}function ba(a){return function(b){var c=b.nodeName.toLowerCase();return(c==="input"||c==="button")&&b.type===a}}function bb(a,b,c){if(a===b)return c;var d=a.nextSibling;while(d){if(d===b)return-1;d=d.nextSibling}return 1}function bc(a,b,c,d){var e,g,h,i,j,k,l,m,n,p,r=!c&&b!==q,s=(r?"<s>":"")+a.replace(H,"$1<s>"),u=y[o][s];if(u)return d?0:t.call(u,0);j=a,k=[],m=0,n=f.preFilter,p=f.filter;while(j){if(!e||(g=I.exec(j)))g&&(j=j.slice(g[0].length),h.selector=l),k.push(h=[]),l="",r&&(j=" "+j);e=!1;if(g=J.exec(j))l+=g[0],j=j.slice(g[0].length),e=h.push({part:g.pop().replace(H," "),string:g[0],captures:g});for(i in p)(g=S[i].exec(j))&&(!n[i]||(g=n[i](g,b,c)))&&(l+=g[0],j=j.slice(g[0].length),e=h.push({part:i,string:g.shift(),captures:g}));if(!e)break}return l&&(h.selector=l),d?j.length:j?$.error(a):t.call(y(s,k),0)}function bd(a,b,e,f){var g=b.dir,h=s++;return a||(a=function(a){return a===e}),b.first?function(b){while(b=b[g])if(b.nodeType===1)return a(b)&&b}:f?function(b){while(b=b[g])if(b.nodeType===1&&a(b))return b}:function(b){var e,f=h+"."+c,i=f+"."+d;while(b=b[g])if(b.nodeType===1){if((e=b[o])===i)return b.sizset;if(typeof e=="string"&&e.indexOf(f)===0){if(b.sizset)return b}else{b[o]=i;if(a(b))return b.sizset=!0,b;b.sizset=!1}}}}function be(a,b){return a?function(c){var d=b(c);return d&&a(d===!0?c:d)}:b}function bf(a,b,c){var d,e,g=0;for(;d=a[g];g++)f.relative[d.part]?e=bd(e,f.relative[d.part],b,c):e=be(e,f.filter[d.part].apply(null,d.captures.concat(b,c)));return e}function bg(a){return function(b){var c,d=0;for(;c=a[d];d++)if(c(b))return!0;return!1}}function bh(a,b,c,d){var e=0,f=b.length;for(;e<f;e++)$(a,b[e],c,d)}function bi(a,b,c,d,e,g){var h,i=f.setFilters[b.toLowerCase()];return i||$.error(b),(a||!(h=e))&&bh(a||"*",d,h=[],e),h.length>0?i(h,c,g):[]}function bj(a,c,d,e){var f,g,h,i,j,k,l,m,n,o,p,q,r,s=0,t=a.length,v=S.POS,w=new RegExp("^"+v.source+"(?!"+A+")","i"),x=function(){var a=1,c=arguments.length-2;for(;a<c;a++)arguments[a]===b&&(n[a]=b)};for(;s<t;s++){f=a[s],g="",m=e;for(h=0,i=f.length;h<i;h++){j=f[h],k=j.string;if(j.part==="PSEUDO"){v.exec(""),l=0;while(n=v.exec(k)){o=!0,p=v.lastIndex=n.index+n[0].length;if(p>l){g+=k.slice(l,n.index),l=p,q=[c],J.test(g)&&(m&&(q=m),m=e);if(r=O.test(g))g=g.slice(0,-5).replace(J,"$&*"),l++;n.length>1&&n[0].replace(w,x),m=bi(g,n[1],n[2],q,m,r)}g=""}}o||(g+=k),o=!1}g?J.test(g)?bh(g,m||[c],d,e):$(g,c,d,e?e.concat(m):m):u.apply(d,m)}return t===1?d:$.uniqueSort(d)}function bk(a,b,e,g,h){a=a.replace(H,"$1");var i,k,l,m,n,o,p,q,r,s,v=bc(a,b,h),w=b.nodeType;if(S.POS.test(a))return bj(v,b,e,g);if(g)i=t.call(g,0);else if(v.length===1){if((o=t.call(v[0],0)).length>2&&(p=o[0]).part==="ID"&&w===9&&!h&&f.relative[o[1].part]){b=f.find.ID(p.captures[0].replace(R,""),b,h)[0];if(!b)return e;a=a.slice(o.shift().string.length)}r=(v=N.exec(o[0].string))&&!v.index&&b.parentNode||b,q="";for(n=o.length-1;n>=0;n--){p=o[n],s=p.part,q=p.string+q;if(f.relative[s])break;if(f.order.test(s)){i=f.find[s](p.captures[0].replace(R,""),r,h);if(i==null)continue;a=a.slice(0,a.length-q.length)+q.replace(S[s],""),a||u.apply(e,t.call(i,0));break}}}if(a){k=j(a,b,h),c=k.dirruns++,i==null&&(i=f.find.TAG("*",N.test(a)&&b.parentNode||b));for(n=0;m=i[n];n++)d=k.runs++,k(m)&&e.push(m)}return e}var c,d,e,f,g,h,i,j,k,l,m=!0,n="undefined",o=("sizcache"+Math.random()).replace(".",""),q=a.document,r=q.documentElement,s=0,t=[].slice,u=[].push,v=function(a,b){return a[o]=b||!0,a},w=function(){var a={},b=[];return v(function(c,d){return b.push(c)>f.cacheLength&&delete a[b.shift()],a[c]=d},a)},x=w(),y=w(),z=w(),A="[\\x20\\t\\r\\n\\f]",B="(?:\\\\.|[-\\w]|[^\\x00-\\xa0])+",C=B.replace("w","w#"),D="([*^$|!~]?=)",E="\\["+A+"*("+B+")"+A+"*(?:"+D+A+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+C+")|)|)"+A+"*\\]",F=":("+B+")(?:\\((?:(['\"])((?:\\\\.|[^\\\\])*?)\\2|([^()[\\]]*|(?:(?:"+E+")|[^:]|\\\\.)*|.*))\\)|)",G=":(nth|eq|gt|lt|first|last|even|odd)(?:\\(((?:-\\d)?\\d*)\\)|)(?=[^-]|$)",H=new RegExp("^"+A+"+|((?:^|[^\\\\])(?:\\\\.)*)"+A+"+$","g"),I=new RegExp("^"+A+"*,"+A+"*"),J=new RegExp("^"+A+"*([\\x20\\t\\r\\n\\f>+~])"+A+"*"),K=new RegExp(F),L=/^(?:#([\w\-]+)|(\w+)|\.([\w\-]+))$/,M=/^:not/,N=/[\x20\t\r\n\f]*[+~]/,O=/:not\($/,P=/h\d/i,Q=/input|select|textarea|button/i,R=/\\(?!\\)/g,S={ID:new RegExp("^#("+B+")"),CLASS:new RegExp("^\\.("+B+")"),NAME:new RegExp("^\\[name=['\"]?("+B+")['\"]?\\]"),TAG:new RegExp("^("+B.replace("w","w*")+")"),ATTR:new RegExp("^"+E),PSEUDO:new RegExp("^"+F),CHILD:new RegExp("^:(only|nth|last|first)-child(?:\\("+A+"*(even|odd|(([+-]|)(\\d*)n|)"+A+"*(?:([+-]|)"+A+"*(\\d+)|))"+A+"*\\)|)","i"),POS:new RegExp(G,"ig"),needsContext:new RegExp("^"+A+"*[>+~]|"+G,"i")},T=function(a){var b=q.createElement("div");try{return a(b)}catch(c){return!1}finally{b=null}},U=T(function(a){return a.appendChild(q.createComment("")),!a.getElementsByTagName("*").length}),V=T(function(a){return a.innerHTML="<a href='#'></a>",a.firstChild&&typeof a.firstChild.getAttribute!==n&&a.firstChild.getAttribute("href")==="#"}),W=T(function(a){a.innerHTML="<select></select>";var b=typeof a.lastChild.getAttribute("multiple");return b!=="boolean"&&b!=="string"}),X=T(function(a){return a.innerHTML="<div class='hidden e'></div><div class='hidden'></div>",!a.getElementsByClassName||!a.getElementsByClassName("e").length?!1:(a.lastChild.className="e",a.getElementsByClassName("e").length===2)}),Y=T(function(a){a.id=o+0,a.innerHTML="<a name='"+o+"'></a><div name='"+o+"'></div>",r.insertBefore(a,r.firstChild);var b=q.getElementsByName&&q.getElementsByName(o).length===2+q.getElementsByName(o+0).length;return e=!q.getElementById(o),r.removeChild(a),b});try{t.call(r.childNodes,0)[0].nodeType}catch(Z){t=function(a){var b,c=[];for(;b=this[a];a++)c.push(b);return c}}$.matches=function(a,b){return $(a,null,null,b)},$.matchesSelector=function(a,b){return $(b,null,null,[a]).length>0},g=$.getText=function(a){var b,c="",d=0,e=a.nodeType;if(e){if(e===1||e===9||e===11){if(typeof a.textContent=="string")return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=g(a)}else if(e===3||e===4)return a.nodeValue}else for(;b=a[d];d++)c+=g(b);return c},h=$.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?b.nodeName!=="HTML":!1},i=$.contains=r.contains?function(a,b){var c=a.nodeType===9?a.documentElement:a,d=b&&b.parentNode;return a===d||!!(d&&d.nodeType===1&&c.contains&&c.contains(d))}:r.compareDocumentPosition?function(a,b){return b&&!!(a.compareDocumentPosition(b)&16)}:function(a,b){while(b=b.parentNode)if(b===a)return!0;return!1},$.attr=function(a,b){var c,d=h(a);return d||(b=b.toLowerCase()),f.attrHandle[b]?f.attrHandle[b](a):W||d?a.getAttribute(b):(c=a.getAttributeNode(b),c?typeof a[b]=="boolean"?a[b]?b:null:c.specified?c.value:null:null)},f=$.selectors={cacheLength:50,createPseudo:v,match:S,order:new RegExp("ID|TAG"+(Y?"|NAME":"")+(X?"|CLASS":"")),attrHandle:V?{}:{href:function(a){return a.getAttribute("href",2)},type:function(a){return a.getAttribute("type")}},find:{ID:e?function(a,b,c){if(typeof b.getElementById!==n&&!c){var d=b.getElementById(a);return d&&d.parentNode?[d]:[]}}:function(a,c,d){if(typeof c.getElementById!==n&&!d){var e=c.getElementById(a);return e?e.id===a||typeof e.getAttributeNode!==n&&e.getAttributeNode("id").value===a?[e]:b:[]}},TAG:U?function(a,b){if(typeof b.getElementsByTagName!==n)return b.getElementsByTagName(a)}:function(a,b){var c=b.getElementsByTagName(a);if(a==="*"){var d,e=[],f=0;for(;d=c[f];f++)d.nodeType===1&&e.push(d);return e}return c},NAME:function(a,b){if(typeof b.getElementsByName!==n)return b.getElementsByName(name)},CLASS:function(a,b,c){if(typeof b.getElementsByClassName!==n&&!c)return b.getElementsByClassName(a)}},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(R,""),a[3]=(a[4]||a[5]||"").replace(R,""),a[2]==="~="&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),a[1]==="nth"?(a[2]||$.error(a[0]),a[3]=+(a[3]?a[4]+(a[5]||1):2*(a[2]==="even"||a[2]==="odd")),a[4]=+(a[6]+a[7]||a[2]==="odd")):a[2]&&$.error(a[0]),a},PSEUDO:function(a,b,c){var d,e;if(S.CHILD.test(a[0]))return null;if(a[3])a[2]=a[3];else if(d=a[4])K.test(d)&&(e=bc(d,b,c,!0))&&(e=d.indexOf(")",d.length-e)-d.length)&&(d=d.slice(0,e),a[0]=a[0].slice(0,e)),a[2]=d;return a.slice(0,3)}},filter:{ID:e?function(a){return a=a.replace(R,""),function(b){return b.getAttribute("id")===a}}:function(a){return a=a.replace(R,""),function(b){var c=typeof b.getAttributeNode!==n&&b.getAttributeNode("id");return c&&c.value===a}},TAG:function(a){return a==="*"?function(){return!0}:(a=a.replace(R,"").toLowerCase(),function(b){return b.nodeName&&b.nodeName.toLowerCase()===a})},CLASS:function(a){var b=x[o][a];return b||(b=x(a,new RegExp("(^|"+A+")"+a+"("+A+"|$)"))),function(a){return b.test(a.className||typeof a.getAttribute!==n&&a.getAttribute("class")||"")}},ATTR:function(a,b,c){return b?function(d){var e=$.attr(d,a),f=e+"";if(e==null)return b==="!=";switch(b){case"=":return f===c;case"!=":return f!==c;case"^=":return c&&f.indexOf(c)===0;case"*=":return c&&f.indexOf(c)>-1;case"$=":return c&&f.substr(f.length-c.length)===c;case"~=":return(" "+f+" ").indexOf(c)>-1;case"|=":return f===c||f.substr(0,c.length+1)===c+"-"}}:function(b){return $.attr(b,a)!=null}},CHILD:function(a,b,c,d){if(a==="nth"){var e=s++;return function(a){var b,f,g=0,h=a;if(c===1&&d===0)return!0;b=a.parentNode;if(b&&(b[o]!==e||!a.sizset)){for(h=b.firstChild;h;h=h.nextSibling)if(h.nodeType===1){h.sizset=++g;if(h===a)break}b[o]=e}return f=a.sizset-d,c===0?f===0:f%c===0&&f/c>=0}}return function(b){var c=b;switch(a){case"only":case"first":while(c=c.previousSibling)if(c.nodeType===1)return!1;if(a==="first")return!0;c=b;case"last":while(c=c.nextSibling)if(c.nodeType===1)return!1;return!0}}},PSEUDO:function(a,b,c,d){var e,g=f.pseudos[a]||f.pseudos[a.toLowerCase()];return g||$.error("unsupported pseudo: "+a),g[o]?g(b,c,d):g.length>1?(e=[a,a,"",b],function(a){return g(a,0,e)}):g}},pseudos:{not:v(function(a,b,c){var d=j(a.replace(H,"$1"),b,c);return function(a){return!d(a)}}),enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return b==="input"&&!!a.checked||b==="option"&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},parent:function(a){return!f.pseudos.empty(a)},empty:function(a){var b;a=a.firstChild;while(a){if(a.nodeName>"@"||(b=a.nodeType)===3||b===4)return!1;a=a.nextSibling}return!0},contains:v(function(a){return function(b){return(b.textContent||b.innerText||g(b)).indexOf(a)>-1}}),has:v(function(a){return function(b){return $(a,b).length>0}}),header:function(a){return P.test(a.nodeName)},text:function(a){var b,c;return a.nodeName.toLowerCase()==="input"&&(b=a.type)==="text"&&((c=a.getAttribute("type"))==null||c.toLowerCase()===b)},radio:_("radio"),checkbox:_("checkbox"),file:_("file"),password:_("password"),image:_("image"),submit:ba("submit"),reset:ba("reset"),button:function(a){var b=a.nodeName.toLowerCase();return b==="input"&&a.type==="button"||b==="button"},input:function(a){return Q.test(a.nodeName)},focus:function(a){var b=a.ownerDocument;return a===b.activeElement&&(!b.hasFocus||b.hasFocus())&&(!!a.type||!!a.href)},active:function(a){return a===a.ownerDocument.activeElement}},setFilters:{first:function(a,b,c){return c?a.slice(1):[a[0]]},last:function(a,b,c){var d=a.pop();return c?a:[d]},even:function(a,b,c){var d=[],e=c?1:0,f=a.length;for(;e<f;e=e+2)d.push(a[e]);return d},odd:function(a,b,c){var d=[],e=c?0:1,f=a.length;for(;e<f;e=e+2)d.push(a[e]);return d},lt:function(a,b,c){return c?a.slice(+b):a.slice(0,+b)},gt:function(a,b,c){return c?a.slice(0,+b+1):a.slice(+b+1)},eq:function(a,b,c){var d=a.splice(+b,1);return c?a:d}}},k=r.compareDocumentPosition?function(a,b){return a===b?(l=!0,0):(!a.compareDocumentPosition||!b.compareDocumentPosition?a.compareDocumentPosition:a.compareDocumentPosition(b)&4)?-1:1}:function(a,b){if(a===b)return l=!0,0;if(a.sourceIndex&&b.sourceIndex)return a.sourceIndex-b.sourceIndex;var c,d,e=[],f=[],g=a.parentNode,h=b.parentNode,i=g;if(g===h)return bb(a,b);if(!g)return-1;if(!h)return 1;while(i)e.unshift(i),i=i.parentNode;i=h;while(i)f.unshift(i),i=i.parentNode;c=e.length,d=f.length;for(var j=0;j<c&&j<d;j++)if(e[j]!==f[j])return bb(e[j],f[j]);return j===c?bb(a,f[j],-1):bb(e[j],b,1)},[0,0].sort(k),m=!l,$.uniqueSort=function(a){var b,c=1;l=m,a.sort(k);if(l)for(;b=a[c];c++)b===a[c-1]&&a.splice(c--,1);return a},$.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},j=$.compile=function(a,b,c){var d,e,f,g=z[o][a];if(g&&g.context===b)return g;d=bc(a,b,c);for(e=0,f=d.length;e<f;e++)d[e]=bf(d[e],b,c);return g=z(a,bg(d)),g.context=b,g.runs=g.dirruns=0,g},q.querySelectorAll&&function(){var a,b=bk,c=/'|\\/g,d=/\=[\x20\t\r\n\f]*([^'"\]]*)[\x20\t\r\n\f]*\]/g,e=[],f=[":active"],g=r.matchesSelector||r.mozMatchesSelector||r.webkitMatchesSelector||r.oMatchesSelector||r.msMatchesSelector;T(function(a){a.innerHTML="<select><option selected=''></option></select>",a.querySelectorAll("[selected]").length||e.push("\\["+A+"*(?:checked|disabled|ismap|multiple|readonly|selected|value)"),a.querySelectorAll(":checked").length||e.push(":checked")}),T(function(a){a.innerHTML="<p test=''></p>",a.querySelectorAll("[test^='']").length&&e.push("[*^$]="+A+"*(?:\"\"|'')"),a.innerHTML="<input type='hidden'/>",a.querySelectorAll(":enabled").length||e.push(":enabled",":disabled")}),e=e.length&&new RegExp(e.join("|")),bk=function(a,d,f,g,h){if(!g&&!h&&(!e||!e.test(a)))if(d.nodeType===9)try{return u.apply(f,t.call(d.querySelectorAll(a),0)),f}catch(i){}else if(d.nodeType===1&&d.nodeName.toLowerCase()!=="object"){var j,k,l,m=d.getAttribute("id"),n=m||o,p=N.test(a)&&d.parentNode||d;m?n=n.replace(c,"\\$&"):d.setAttribute("id",n),j=bc(a,d,h),n="[id='"+n+"']";for(k=0,l=j.length;k<l;k++)j[k]=n+j[k].selector;try{return u.apply(f,t.call(p.querySelectorAll(j.join(",")),0)),f}catch(i){}finally{m||d.removeAttribute("id")}}return b(a,d,f,g,h)},g&&(T(function(b){a=g.call(b,"div");try{g.call(b,"[test!='']:sizzle"),f.push(S.PSEUDO.source,S.POS.source,"!=")}catch(c){}}),f=new RegExp(f.join("|")),$.matchesSelector=function(b,c){c=c.replace(d,"='$1']");if(!h(b)&&!f.test(c)&&(!e||!e.test(c)))try{var i=g.call(b,c);if(i||a||b.document&&b.document.nodeType!==11)return i}catch(j){}return $(c,null,null,[b]).length>0})}(),f.setFilters.nth=f.setFilters.eq,f.filters=f.pseudos,$.attr=p.attr,p.find=$,p.expr=$.selectors,p.expr[":"]=p.expr.pseudos,p.unique=$.uniqueSort,p.text=$.getText,p.isXMLDoc=$.isXML,p.contains=$.contains}(a);var bc=/Until$/,bd=/^(?:parents|prev(?:Until|All))/,be=/^.[^:#\[\.,]*$/,bf=p.expr.match.needsContext,bg={children:!0,contents:!0,next:!0,prev:!0};p.fn.extend({find:function(a){var b,c,d,e,f,g,h=this;if(typeof a!="string")return p(a).filter(function(){for(b=0,c=h.length;b<c;b++)if(p.contains(h[b],this))return!0});g=this.pushStack("","find",a);for(b=0,c=this.length;b<c;b++){d=g.length,p.find(a,this[b],g);if(b>0)for(e=d;e<g.length;e++)for(f=0;f<d;f++)if(g[f]===g[e]){g.splice(e--,1);break}}return g},has:function(a){var b,c=p(a,this),d=c.length;return this.filter(function(){for(b=0;b<d;b++)if(p.contains(this,c[b]))return!0})},not:function(a){return this.pushStack(bj(this,a,!1),"not",a)},filter:function(a){return this.pushStack(bj(this,a,!0),"filter",a)},is:function(a){return!!a&&(typeof a=="string"?bf.test(a)?p(a,this.context).index(this[0])>=0:p.filter(a,this).length>0:this.filter(a).length>0)},closest:function(a,b){var c,d=0,e=this.length,f=[],g=bf.test(a)||typeof a!="string"?p(a,b||this.context):0;for(;d<e;d++){c=this[d];while(c&&c.ownerDocument&&c!==b&&c.nodeType!==11){if(g?g.index(c)>-1:p.find.matchesSelector(c,a)){f.push(c);break}c=c.parentNode}}return f=f.length>1?p.unique(f):f,this.pushStack(f,"closest",a)},index:function(a){return a?typeof a=="string"?p.inArray(this[0],p(a)):p.inArray(a.jquery?a[0]:a,this):this[0]&&this[0].parentNode?this.prevAll().length:-1},add:function(a,b){var c=typeof a=="string"?p(a,b):p.makeArray(a&&a.nodeType?[a]:a),d=p.merge(this.get(),c);return this.pushStack(bh(c[0])||bh(d[0])?d:p.unique(d))},addBack:function(a){return this.add(a==null?this.prevObject:this.prevObject.filter(a))}}),p.fn.andSelf=p.fn.addBack,p.each({parent:function(a){var b=a.parentNode;return b&&b.nodeType!==11?b:null},parents:function(a){return p.dir(a,"parentNode")},parentsUntil:function(a,b,c){return p.dir(a,"parentNode",c)},next:function(a){return bi(a,"nextSibling")},prev:function(a){return bi(a,"previousSibling")},nextAll:function(a){return p.dir(a,"nextSibling")},prevAll:function(a){return p.dir(a,"previousSibling")},nextUntil:function(a,b,c){return p.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return p.dir(a,"previousSibling",c)},siblings:function(a){return p.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return p.sibling(a.firstChild)},contents:function(a){return p.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:p.merge([],a.childNodes)}},function(a,b){p.fn[a]=function(c,d){var e=p.map(this,b,c);return bc.test(a)||(d=c),d&&typeof d=="string"&&(e=p.filter(d,e)),e=this.length>1&&!bg[a]?p.unique(e):e,this.length>1&&bd.test(a)&&(e=e.reverse()),this.pushStack(e,a,k.call(arguments).join(","))}}),p.extend({filter:function(a,b,c){return c&&(a=":not("+a+")"),b.length===1?p.find.matchesSelector(b[0],a)?[b[0]]:[]:p.find.matches(a,b)},dir:function(a,c,d){var e=[],f=a[c];while(f&&f.nodeType!==9&&(d===b||f.nodeType!==1||!p(f).is(d)))f.nodeType===1&&e.push(f),f=f[c];return e},sibling:function(a,b){var c=[];for(;a;a=a.nextSibling)a.nodeType===1&&a!==b&&c.push(a);return c}});var bl="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",bm=/ jQuery\d+="(?:null|\d+)"/g,bn=/^\s+/,bo=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,bp=/<([\w:]+)/,bq=/<tbody/i,br=/<|&#?\w+;/,bs=/<(?:script|style|link)/i,bt=/<(?:script|object|embed|option|style)/i,bu=new RegExp("<(?:"+bl+")[\\s/>]","i"),bv=/^(?:checkbox|radio)$/,bw=/checked\s*(?:[^=]|=\s*.checked.)/i,bx=/\/(java|ecma)script/i,by=/^\s*<!(?:\[CDATA\[|\-\-)|[\]\-]{2}>\s*$/g,bz={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]},bA=bk(e),bB=bA.appendChild(e.createElement("div"));bz.optgroup=bz.option,bz.tbody=bz.tfoot=bz.colgroup=bz.caption=bz.thead,bz.th=bz.td,p.support.htmlSerialize||(bz._default=[1,"X<div>","</div>"]),p.fn.extend({text:function(a){return p.access(this,function(a){return a===b?p.text(this):this.empty().append((this[0]&&this[0].ownerDocument||e).createTextNode(a))},null,a,arguments.length)},wrapAll:function(a){if(p.isFunction(a))return this.each(function(b){p(this).wrapAll(a.call(this,b))});if(this[0]){var b=p(a,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstChild&&a.firstChild.nodeType===1)a=a.firstChild;return a}).append(this)}return this},wrapInner:function(a){return p.isFunction(a)?this.each(function(b){p(this).wrapInner(a.call(this,b))}):this.each(function(){var b=p(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=p.isFunction(a);return this.each(function(c){p(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){p.nodeName(this,"body")||p(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(a){(this.nodeType===1||this.nodeType===11)&&this.appendChild(a)})},prepend:function(){return this.domManip(arguments,!0,function(a){(this.nodeType===1||this.nodeType===11)&&this.insertBefore(a,this.firstChild)})},before:function(){if(!bh(this[0]))return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this)});if(arguments.length){var a=p.clean(arguments);return this.pushStack(p.merge(a,this),"before",this.selector)}},after:function(){if(!bh(this[0]))return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this.nextSibling)});if(arguments.length){var a=p.clean(arguments);return this.pushStack(p.merge(this,a),"after",this.selector)}},remove:function(a,b){var c,d=0;for(;(c=this[d])!=null;d++)if(!a||p.filter(a,[c]).length)!b&&c.nodeType===1&&(p.cleanData(c.getElementsByTagName("*")),p.cleanData([c])),c.parentNode&&c.parentNode.removeChild(c);return this},empty:function(){var a,b=0;for(;(a=this[b])!=null;b++){a.nodeType===1&&p.cleanData(a.getElementsByTagName("*"));while(a.firstChild)a.removeChild(a.firstChild)}return this},clone:function(a,b){return a=a==null?!1:a,b=b==null?a:b,this.map(function(){return p.clone(this,a,b)})},html:function(a){return p.access(this,function(a){var c=this[0]||{},d=0,e=this.length;if(a===b)return c.nodeType===1?c.innerHTML.replace(bm,""):b;if(typeof a=="string"&&!bs.test(a)&&(p.support.htmlSerialize||!bu.test(a))&&(p.support.leadingWhitespace||!bn.test(a))&&!bz[(bp.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(bo,"<$1></$2>");try{for(;d<e;d++)c=this[d]||{},c.nodeType===1&&(p.cleanData(c.getElementsByTagName("*")),c.innerHTML=a);c=0}catch(f){}}c&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(a){return bh(this[0])?this.length?this.pushStack(p(p.isFunction(a)?a():a),"replaceWith",a):this:p.isFunction(a)?this.each(function(b){var c=p(this),d=c.html();c.replaceWith(a.call(this,b,d))}):(typeof a!="string"&&(a=p(a).detach()),this.each(function(){var b=this.nextSibling,c=this.parentNode;p(this).remove(),b?p(b).before(a):p(c).append(a)}))},detach:function(a){return this.remove(a,!0)},domManip:function(a,c,d){a=[].concat.apply([],a);var e,f,g,h,i=0,j=a[0],k=[],l=this.length;if(!p.support.checkClone&&l>1&&typeof j=="string"&&bw.test(j))return this.each(function(){p(this).domManip(a,c,d)});if(p.isFunction(j))return this.each(function(e){var f=p(this);a[0]=j.call(this,e,c?f.html():b),f.domManip(a,c,d)});if(this[0]){e=p.buildFragment(a,this,k),g=e.fragment,f=g.firstChild,g.childNodes.length===1&&(g=f);if(f){c=c&&p.nodeName(f,"tr");for(h=e.cacheable||l-1;i<l;i++)d.call(c&&p.nodeName(this[i],"table")?bC(this[i],"tbody"):this[i],i===h?g:p.clone(g,!0,!0))}g=f=null,k.length&&p.each(k,function(a,b){b.src?p.ajax?p.ajax({url:b.src,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0}):p.error("no ajax"):p.globalEval((b.text||b.textContent||b.innerHTML||"").replace(by,"")),b.parentNode&&b.parentNode.removeChild(b)})}return this}}),p.buildFragment=function(a,c,d){var f,g,h,i=a[0];return c=c||e,c=!c.nodeType&&c[0]||c,c=c.ownerDocument||c,a.length===1&&typeof i=="string"&&i.length<512&&c===e&&i.charAt(0)==="<"&&!bt.test(i)&&(p.support.checkClone||!bw.test(i))&&(p.support.html5Clone||!bu.test(i))&&(g=!0,f=p.fragments[i],h=f!==b),f||(f=c.createDocumentFragment(),p.clean(a,c,f,d),g&&(p.fragments[i]=h&&f)),{fragment:f,cacheable:g}},p.fragments={},p.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){p.fn[a]=function(c){var d,e=0,f=[],g=p(c),h=g.length,i=this.length===1&&this[0].parentNode;if((i==null||i&&i.nodeType===11&&i.childNodes.length===1)&&h===1)return g[b](this[0]),this;for(;e<h;e++)d=(e>0?this.clone(!0):this).get(),p(g[e])[b](d),f=f.concat(d);return this.pushStack(f,a,g.selector)}}),p.extend({clone:function(a,b,c){var d,e,f,g;p.support.html5Clone||p.isXMLDoc(a)||!bu.test("<"+a.nodeName+">")?g=a.cloneNode(!0):(bB.innerHTML=a.outerHTML,bB.removeChild(g=bB.firstChild));if((!p.support.noCloneEvent||!p.support.noCloneChecked)&&(a.nodeType===1||a.nodeType===11)&&!p.isXMLDoc(a)){bE(a,g),d=bF(a),e=bF(g);for(f=0;d[f];++f)e[f]&&bE(d[f],e[f])}if(b){bD(a,g);if(c){d=bF(a),e=bF(g);for(f=0;d[f];++f)bD(d[f],e[f])}}return d=e=null,g},clean:function(a,b,c,d){var f,g,h,i,j,k,l,m,n,o,q,r,s=b===e&&bA,t=[];if(!b||typeof b.createDocumentFragment=="undefined")b=e;for(f=0;(h=a[f])!=null;f++){typeof h=="number"&&(h+="");if(!h)continue;if(typeof h=="string")if(!br.test(h))h=b.createTextNode(h);else{s=s||bk(b),l=b.createElement("div"),s.appendChild(l),h=h.replace(bo,"<$1></$2>"),i=(bp.exec(h)||["",""])[1].toLowerCase(),j=bz[i]||bz._default,k=j[0],l.innerHTML=j[1]+h+j[2];while(k--)l=l.lastChild;if(!p.support.tbody){m=bq.test(h),n=i==="table"&&!m?l.firstChild&&l.firstChild.childNodes:j[1]==="<table>"&&!m?l.childNodes:[];for(g=n.length-1;g>=0;--g)p.nodeName(n[g],"tbody")&&!n[g].childNodes.length&&n[g].parentNode.removeChild(n[g])}!p.support.leadingWhitespace&&bn.test(h)&&l.insertBefore(b.createTextNode(bn.exec(h)[0]),l.firstChild),h=l.childNodes,l.parentNode.removeChild(l)}h.nodeType?t.push(h):p.merge(t,h)}l&&(h=l=s=null);if(!p.support.appendChecked)for(f=0;(h=t[f])!=null;f++)p.nodeName(h,"input")?bG(h):typeof h.getElementsByTagName!="undefined"&&p.grep(h.getElementsByTagName("input"),bG);if(c){q=function(a){if(!a.type||bx.test(a.type))return d?d.push(a.parentNode?a.parentNode.removeChild(a):a):c.appendChild(a)};for(f=0;(h=t[f])!=null;f++)if(!p.nodeName(h,"script")||!q(h))c.appendChild(h),typeof h.getElementsByTagName!="undefined"&&(r=p.grep(p.merge([],h.getElementsByTagName("script")),q),t.splice.apply(t,[f+1,0].concat(r)),f+=r.length)}return t},cleanData:function(a,b){var c,d,e,f,g=0,h=p.expando,i=p.cache,j=p.support.deleteExpando,k=p.event.special;for(;(e=a[g])!=null;g++)if(b||p.acceptData(e)){d=e[h],c=d&&i[d];if(c){if(c.events)for(f in c.events)k[f]?p.event.remove(e,f):p.removeEvent(e,f,c.handle);i[d]&&(delete i[d],j?delete e[h]:e.removeAttribute?e.removeAttribute(h):e[h]=null,p.deletedIds.push(d))}}}}),function(){var a,b;p.uaMatch=function(a){a=a.toLowerCase();var b=/(chrome)[ \/]([\w.]+)/.exec(a)||/(webkit)[ \/]([\w.]+)/.exec(a)||/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(a)||/(msie) ([\w.]+)/.exec(a)||a.indexOf("compatible")<0&&/(mozilla)(?:.*? rv:([\w.]+)|)/.exec(a)||[];return{browser:b[1]||"",version:b[2]||"0"}},a=p.uaMatch(g.userAgent),b={},a.browser&&(b[a.browser]=!0,b.version=a.version),b.chrome?b.webkit=!0:b.webkit&&(b.safari=!0),p.browser=b,p.sub=function(){function a(b,c){return new a.fn.init(b,c)}p.extend(!0,a,this),a.superclass=this,a.fn=a.prototype=this(),a.fn.constructor=a,a.sub=this.sub,a.fn.init=function c(c,d){return d&&d instanceof p&&!(d instanceof a)&&(d=a(d)),p.fn.init.call(this,c,d,b)},a.fn.init.prototype=a.fn;var b=a(e);return a}}();var bH,bI,bJ,bK=/alpha\([^)]*\)/i,bL=/opacity=([^)]*)/,bM=/^(top|right|bottom|left)$/,bN=/^(none|table(?!-c[ea]).+)/,bO=/^margin/,bP=new RegExp("^("+q+")(.*)$","i"),bQ=new RegExp("^("+q+")(?!px)[a-z%]+$","i"),bR=new RegExp("^([-+])=("+q+")","i"),bS={},bT={position:"absolute",visibility:"hidden",display:"block"},bU={letterSpacing:0,fontWeight:400},bV=["Top","Right","Bottom","Left"],bW=["Webkit","O","Moz","ms"],bX=p.fn.toggle;p.fn.extend({css:function(a,c){return p.access(this,function(a,c,d){return d!==b?p.style(a,c,d):p.css(a,c)},a,c,arguments.length>1)},show:function(){return b$(this,!0)},hide:function(){return b$(this)},toggle:function(a,b){var c=typeof a=="boolean";return p.isFunction(a)&&p.isFunction(b)?bX.apply(this,arguments):this.each(function(){(c?a:bZ(this))?p(this).show():p(this).hide()})}}),p.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=bH(a,"opacity");return c===""?"1":c}}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":p.support.cssFloat?"cssFloat":"styleFloat"},style:function(a,c,d,e){if(!a||a.nodeType===3||a.nodeType===8||!a.style)return;var f,g,h,i=p.camelCase(c),j=a.style;c=p.cssProps[i]||(p.cssProps[i]=bY(j,i)),h=p.cssHooks[c]||p.cssHooks[i];if(d===b)return h&&"get"in h&&(f=h.get(a,!1,e))!==b?f:j[c];g=typeof d,g==="string"&&(f=bR.exec(d))&&(d=(f[1]+1)*f[2]+parseFloat(p.css(a,c)),g="number");if(d==null||g==="number"&&isNaN(d))return;g==="number"&&!p.cssNumber[i]&&(d+="px");if(!h||!("set"in h)||(d=h.set(a,d,e))!==b)try{j[c]=d}catch(k){}},css:function(a,c,d,e){var f,g,h,i=p.camelCase(c);return c=p.cssProps[i]||(p.cssProps[i]=bY(a.style,i)),h=p.cssHooks[c]||p.cssHooks[i],h&&"get"in h&&(f=h.get(a,!0,e)),f===b&&(f=bH(a,c)),f==="normal"&&c in bU&&(f=bU[c]),d||e!==b?(g=parseFloat(f),d||p.isNumeric(g)?g||0:f):f},swap:function(a,b,c){var d,e,f={};for(e in b)f[e]=a.style[e],a.style[e]=b[e];d=c.call(a);for(e in b)a.style[e]=f[e];return d}}),a.getComputedStyle?bH=function(b,c){var d,e,f,g,h=a.getComputedStyle(b,null),i=b.style;return h&&(d=h[c],d===""&&!p.contains(b.ownerDocument,b)&&(d=p.style(b,c)),bQ.test(d)&&bO.test(c)&&(e=i.width,f=i.minWidth,g=i.maxWidth,i.minWidth=i.maxWidth=i.width=d,d=h.width,i.width=e,i.minWidth=f,i.maxWidth=g)),d}:e.documentElement.currentStyle&&(bH=function(a,b){var c,d,e=a.currentStyle&&a.currentStyle[b],f=a.style;return e==null&&f&&f[b]&&(e=f[b]),bQ.test(e)&&!bM.test(b)&&(c=f.left,d=a.runtimeStyle&&a.runtimeStyle.left,d&&(a.runtimeStyle.left=a.currentStyle.left),f.left=b==="fontSize"?"1em":e,e=f.pixelLeft+"px",f.left=c,d&&(a.runtimeStyle.left=d)),e===""?"auto":e}),p.each(["height","width"],function(a,b){p.cssHooks[b]={get:function(a,c,d){if(c)return a.offsetWidth===0&&bN.test(bH(a,"display"))?p.swap(a,bT,function(){return cb(a,b,d)}):cb(a,b,d)},set:function(a,c,d){return b_(a,c,d?ca(a,b,d,p.support.boxSizing&&p.css(a,"boxSizing")==="border-box"):0)}}}),p.support.opacity||(p.cssHooks.opacity={get:function(a,b){return bL.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":b?"1":""},set:function(a,b){var c=a.style,d=a.currentStyle,e=p.isNumeric(b)?"alpha(opacity="+b*100+")":"",f=d&&d.filter||c.filter||"";c.zoom=1;if(b>=1&&p.trim(f.replace(bK,""))===""&&c.removeAttribute){c.removeAttribute("filter");if(d&&!d.filter)return}c.filter=bK.test(f)?f.replace(bK,e):f+" "+e}}),p(function(){p.support.reliableMarginRight||(p.cssHooks.marginRight={get:function(a,b){return p.swap(a,{display:"inline-block"},function(){if(b)return bH(a,"marginRight")})}}),!p.support.pixelPosition&&p.fn.position&&p.each(["top","left"],function(a,b){p.cssHooks[b]={get:function(a,c){if(c){var d=bH(a,b);return bQ.test(d)?p(a).position()[b]+"px":d}}}})}),p.expr&&p.expr.filters&&(p.expr.filters.hidden=function(a){return a.offsetWidth===0&&a.offsetHeight===0||!p.support.reliableHiddenOffsets&&(a.style&&a.style.display||bH(a,"display"))==="none"},p.expr.filters.visible=function(a){return!p.expr.filters.hidden(a)}),p.each({margin:"",padding:"",border:"Width"},function(a,b){p.cssHooks[a+b]={expand:function(c){var d,e=typeof c=="string"?c.split(" "):[c],f={};for(d=0;d<4;d++)f[a+bV[d]+b]=e[d]||e[d-2]||e[0];return f}},bO.test(a)||(p.cssHooks[a+b].set=b_)});var cd=/%20/g,ce=/\[\]$/,cf=/\r?\n/g,cg=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,ch=/^(?:select|textarea)/i;p.fn.extend({serialize:function(){return p.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?p.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ch.test(this.nodeName)||cg.test(this.type))}).map(function(a,b){var c=p(this).val();return c==null?null:p.isArray(c)?p.map(c,function(a,c){return{name:b.name,value:a.replace(cf,"\r\n")}}):{name:b.name,value:c.replace(cf,"\r\n")}}).get()}}),p.param=function(a,c){var d,e=[],f=function(a,b){b=p.isFunction(b)?b():b==null?"":b,e[e.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};c===b&&(c=p.ajaxSettings&&p.ajaxSettings.traditional);if(p.isArray(a)||a.jquery&&!p.isPlainObject(a))p.each(a,function(){f(this.name,this.value)});else for(d in a)ci(d,a[d],c,f);return e.join("&").replace(cd,"+")};var cj,ck,cl=/#.*$/,cm=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,cn=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,co=/^(?:GET|HEAD)$/,cp=/^\/\//,cq=/\?/,cr=/<script\b[^<]*(?:(?!<\/script>)<[^<]*)*<\/script>/gi,cs=/([?&])_=[^&]*/,ct=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,cu=p.fn.load,cv={},cw={},cx=["*/"]+["*"];try{cj=f.href}catch(cy){cj=e.createElement("a"),cj.href="",cj=cj.href}ck=ct.exec(cj.toLowerCase())||[],p.fn.load=function(a,c,d){if(typeof a!="string"&&cu)return cu.apply(this,arguments);if(!this.length)return this;var e,f,g,h=this,i=a.indexOf(" ");return i>=0&&(e=a.slice(i,a.length),a=a.slice(0,i)),p.isFunction(c)?(d=c,c=b):c&&typeof c=="object"&&(f="POST"),p.ajax({url:a,type:f,dataType:"html",data:c,complete:function(a,b){d&&h.each(d,g||[a.responseText,b,a])}}).done(function(a){g=arguments,h.html(e?p("<div>").append(a.replace(cr,"")).find(e):a)}),this},p.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){p.fn[b]=function(a){return this.on(b,a)}}),p.each(["get","post"],function(a,c){p[c]=function(a,d,e,f){return p.isFunction(d)&&(f=f||e,e=d,d=b),p.ajax({type:c,url:a,data:d,success:e,dataType:f})}}),p.extend({getScript:function(a,c){return p.get(a,b,c,"script")},getJSON:function(a,b,c){return p.get(a,b,c,"json")},ajaxSetup:function(a,b){return b?cB(a,p.ajaxSettings):(b=a,a=p.ajaxSettings),cB(a,b),a},ajaxSettings:{url:cj,isLocal:cn.test(ck[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded; charset=UTF-8",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":cx},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":a.String,"text html":!0,"text json":p.parseJSON,"text xml":p.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:cz(cv),ajaxTransport:cz(cw),ajax:function(a,c){function y(a,c,f,i){var k,s,t,u,w,y=c;if(v===2)return;v=2,h&&clearTimeout(h),g=b,e=i||"",x.readyState=a>0?4:0,f&&(u=cC(l,x,f));if(a>=200&&a<300||a===304)l.ifModified&&(w=x.getResponseHeader("Last-Modified"),w&&(p.lastModified[d]=w),w=x.getResponseHeader("Etag"),w&&(p.etag[d]=w)),a===304?(y="notmodified",k=!0):(k=cD(l,u),y=k.state,s=k.data,t=k.error,k=!t);else{t=y;if(!y||a)y="error",a<0&&(a=0)}x.status=a,x.statusText=""+(c||y),k?o.resolveWith(m,[s,y,x]):o.rejectWith(m,[x,y,t]),x.statusCode(r),r=b,j&&n.trigger("ajax"+(k?"Success":"Error"),[x,l,k?s:t]),q.fireWith(m,[x,y]),j&&(n.trigger("ajaxComplete",[x,l]),--p.active||p.event.trigger("ajaxStop"))}typeof a=="object"&&(c=a,a=b),c=c||{};var d,e,f,g,h,i,j,k,l=p.ajaxSetup({},c),m=l.context||l,n=m!==l&&(m.nodeType||m instanceof p)?p(m):p.event,o=p.Deferred(),q=p.Callbacks("once memory"),r=l.statusCode||{},t={},u={},v=0,w="canceled",x={readyState:0,setRequestHeader:function(a,b){if(!v){var c=a.toLowerCase();a=u[c]=u[c]||a,t[a]=b}return this},getAllResponseHeaders:function(){return v===2?e:null},getResponseHeader:function(a){var c;if(v===2){if(!f){f={};while(c=cm.exec(e))f[c[1].toLowerCase()]=c[2]}c=f[a.toLowerCase()]}return c===b?null:c},overrideMimeType:function(a){return v||(l.mimeType=a),this},abort:function(a){return a=a||w,g&&g.abort(a),y(0,a),this}};o.promise(x),x.success=x.done,x.error=x.fail,x.complete=q.add,x.statusCode=function(a){if(a){var b;if(v<2)for(b in a)r[b]=[r[b],a[b]];else b=a[x.status],x.always(b)}return this},l.url=((a||l.url)+"").replace(cl,"").replace(cp,ck[1]+"//"),l.dataTypes=p.trim(l.dataType||"*").toLowerCase().split(s),l.crossDomain==null&&(i=ct.exec(l.url.toLowerCase()),l.crossDomain=!(!i||i[1]==ck[1]&&i[2]==ck[2]&&(i[3]||(i[1]==="http:"?80:443))==(ck[3]||(ck[1]==="http:"?80:443)))),l.data&&l.processData&&typeof l.data!="string"&&(l.data=p.param(l.data,l.traditional)),cA(cv,l,c,x);if(v===2)return x;j=l.global,l.type=l.type.toUpperCase(),l.hasContent=!co.test(l.type),j&&p.active++===0&&p.event.trigger("ajaxStart");if(!l.hasContent){l.data&&(l.url+=(cq.test(l.url)?"&":"?")+l.data,delete l.data),d=l.url;if(l.cache===!1){var z=p.now(),A=l.url.replace(cs,"$1_="+z);l.url=A+(A===l.url?(cq.test(l.url)?"&":"?")+"_="+z:"")}}(l.data&&l.hasContent&&l.contentType!==!1||c.contentType)&&x.setRequestHeader("Content-Type",l.contentType),l.ifModified&&(d=d||l.url,p.lastModified[d]&&x.setRequestHeader("If-Modified-Since",p.lastModified[d]),p.etag[d]&&x.setRequestHeader("If-None-Match",p.etag[d])),x.setRequestHeader("Accept",l.dataTypes[0]&&l.accepts[l.dataTypes[0]]?l.accepts[l.dataTypes[0]]+(l.dataTypes[0]!=="*"?", "+cx+"; q=0.01":""):l.accepts["*"]);for(k in l.headers)x.setRequestHeader(k,l.headers[k]);if(!l.beforeSend||l.beforeSend.call(m,x,l)!==!1&&v!==2){w="abort";for(k in{success:1,error:1,complete:1})x[k](l[k]);g=cA(cw,l,c,x);if(!g)y(-1,"No Transport");else{x.readyState=1,j&&n.trigger("ajaxSend",[x,l]),l.async&&l.timeout>0&&(h=setTimeout(function(){x.abort("timeout")},l.timeout));try{v=1,g.send(t,y)}catch(B){if(v<2)y(-1,B);else throw B}}return x}return x.abort()},active:0,lastModified:{},etag:{}});var cE=[],cF=/\?/,cG=/(=)\?(?=&|$)|\?\?/,cH=p.now();p.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var a=cE.pop()||p.expando+"_"+cH++;return this[a]=!0,a}}),p.ajaxPrefilter("json jsonp",function(c,d,e){var f,g,h,i=c.data,j=c.url,k=c.jsonp!==!1,l=k&&cG.test(j),m=k&&!l&&typeof i=="string"&&!(c.contentType||"").indexOf("application/x-www-form-urlencoded")&&cG.test(i);if(c.dataTypes[0]==="jsonp"||l||m)return f=c.jsonpCallback=p.isFunction(c.jsonpCallback)?c.jsonpCallback():c.jsonpCallback,g=a[f],l?c.url=j.replace(cG,"$1"+f):m?c.data=i.replace(cG,"$1"+f):k&&(c.url+=(cF.test(j)?"&":"?")+c.jsonp+"="+f),c.converters["script json"]=function(){return h||p.error(f+" was not called"),h[0]},c.dataTypes[0]="json",a[f]=function(){h=arguments},e.always(function(){a[f]=g,c[f]&&(c.jsonpCallback=d.jsonpCallback,cE.push(f)),h&&p.isFunction(g)&&g(h[0]),h=g=b}),"script"}),p.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(a){return p.globalEval(a),a}}}),p.ajaxPrefilter("script",function(a){a.cache===b&&(a.cache=!1),a.crossDomain&&(a.type="GET",a.global=!1)}),p.ajaxTransport("script",function(a){if(a.crossDomain){var c,d=e.head||e.getElementsByTagName("head")[0]||e.documentElement;return{send:function(f,g){c=e.createElement("script"),c.async="async",a.scriptCharset&&(c.charset=a.scriptCharset),c.src=a.url,c.onload=c.onreadystatechange=function(a,e){if(e||!c.readyState||/loaded|complete/.test(c.readyState))c.onload=c.onreadystatechange=null,d&&c.parentNode&&d.removeChild(c),c=b,e||g(200,"success")},d.insertBefore(c,d.firstChild)},abort:function(){c&&c.onload(0,1)}}}});var cI,cJ=a.ActiveXObject?function(){for(var a in cI)cI[a](0,1)}:!1,cK=0;p.ajaxSettings.xhr=a.ActiveXObject?function(){return!this.isLocal&&cL()||cM()}:cL,function(a){p.extend(p.support,{ajax:!!a,cors:!!a&&"withCredentials"in a})}(p.ajaxSettings.xhr()),p.support.ajax&&p.ajaxTransport(function(c){if(!c.crossDomain||p.support.cors){var d;return{send:function(e,f){var g,h,i=c.xhr();c.username?i.open(c.type,c.url,c.async,c.username,c.password):i.open(c.type,c.url,c.async);if(c.xhrFields)for(h in c.xhrFields)i[h]=c.xhrFields[h];c.mimeType&&i.overrideMimeType&&i.overrideMimeType(c.mimeType),!c.crossDomain&&!e["X-Requested-With"]&&(e["X-Requested-With"]="XMLHttpRequest");try{for(h in e)i.setRequestHeader(h,e[h])}catch(j){}i.send(c.hasContent&&c.data||null),d=function(a,e){var h,j,k,l,m;try{if(d&&(e||i.readyState===4)){d=b,g&&(i.onreadystatechange=p.noop,cJ&&delete cI[g]);if(e)i.readyState!==4&&i.abort();else{h=i.status,k=i.getAllResponseHeaders(),l={},m=i.responseXML,m&&m.documentElement&&(l.xml=m);try{l.text=i.responseText}catch(a){}try{j=i.statusText}catch(n){j=""}!h&&c.isLocal&&!c.crossDomain?h=l.text?200:404:h===1223&&(h=204)}}}catch(o){e||f(-1,o)}l&&f(h,j,l,k)},c.async?i.readyState===4?setTimeout(d,0):(g=++cK,cJ&&(cI||(cI={},p(a).unload(cJ)),cI[g]=d),i.onreadystatechange=d):d()},abort:function(){d&&d(0,1)}}}});var cN,cO,cP=/^(?:toggle|show|hide)$/,cQ=new RegExp("^(?:([-+])=|)("+q+")([a-z%]*)$","i"),cR=/queueHooks$/,cS=[cY],cT={"*":[function(a,b){var c,d,e,f=this.createTween(a,b),g=cQ.exec(b),h=f.cur(),i=+h||0,j=1;if(g){c=+g[2],d=g[3]||(p.cssNumber[a]?"":"px");if(d!=="px"&&i){i=p.css(f.elem,a,!0)||c||1;do e=j=j||".5",i=i/j,p.style(f.elem,a,i+d),j=f.cur()/h;while(j!==1&&j!==e)}f.unit=d,f.start=i,f.end=g[1]?i+(g[1]+1)*c:c}return f}]};p.Animation=p.extend(cW,{tweener:function(a,b){p.isFunction(a)?(b=a,a=["*"]):a=a.split(" ");var c,d=0,e=a.length;for(;d<e;d++)c=a[d],cT[c]=cT[c]||[],cT[c].unshift(b)},prefilter:function(a,b){b?cS.unshift(a):cS.push(a)}}),p.Tween=cZ,cZ.prototype={constructor:cZ,init:function(a,b,c,d,e,f){this.elem=a,this.prop=c,this.easing=e||"swing",this.options=b,this.start=this.now=this.cur(),this.end=d,this.unit=f||(p.cssNumber[c]?"":"px")},cur:function(){var a=cZ.propHooks[this.prop];return a&&a.get?a.get(this):cZ.propHooks._default.get(this)},run:function(a){var b,c=cZ.propHooks[this.prop];return this.options.duration?this.pos=b=p.easing[this.easing](a,this.options.duration*a,0,1,this.options.duration):this.pos=b=a,this.now=(this.end-this.start)*b+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),c&&c.set?c.set(this):cZ.propHooks._default.set(this),this}},cZ.prototype.init.prototype=cZ.prototype,cZ.propHooks={_default:{get:function(a){var b;return a.elem[a.prop]==null||!!a.elem.style&&a.elem.style[a.prop]!=null?(b=p.css(a.elem,a.prop,!1,""),!b||b==="auto"?0:b):a.elem[a.prop]},set:function(a){p.fx.step[a.prop]?p.fx.step[a.prop](a):a.elem.style&&(a.elem.style[p.cssProps[a.prop]]!=null||p.cssHooks[a.prop])?p.style(a.elem,a.prop,a.now+a.unit):a.elem[a.prop]=a.now}}},cZ.propHooks.scrollTop=cZ.propHooks.scrollLeft={set:function(a){a.elem.nodeType&&a.elem.parentNode&&(a.elem[a.prop]=a.now)}},p.each(["toggle","show","hide"],function(a,b){var c=p.fn[b];p.fn[b]=function(d,e,f){return d==null||typeof d=="boolean"||!a&&p.isFunction(d)&&p.isFunction(e)?c.apply(this,arguments):this.animate(c$(b,!0),d,e,f)}}),p.fn.extend({fadeTo:function(a,b,c,d){return this.filter(bZ).css("opacity",0).show().end().animate({opacity:b},a,c,d)},animate:function(a,b,c,d){var e=p.isEmptyObject(a),f=p.speed(b,c,d),g=function(){var b=cW(this,p.extend({},a),f);e&&b.stop(!0)};return e||f.queue===!1?this.each(g):this.queue(f.queue,g)},stop:function(a,c,d){var e=function(a){var b=a.stop;delete a.stop,b(d)};return typeof a!="string"&&(d=c,c=a,a=b),c&&a!==!1&&this.queue(a||"fx",[]),this.each(function(){var b=!0,c=a!=null&&a+"queueHooks",f=p.timers,g=p._data(this);if(c)g[c]&&g[c].stop&&e(g[c]);else for(c in g)g[c]&&g[c].stop&&cR.test(c)&&e(g[c]);for(c=f.length;c--;)f[c].elem===this&&(a==null||f[c].queue===a)&&(f[c].anim.stop(d),b=!1,f.splice(c,1));(b||!d)&&p.dequeue(this,a)})}}),p.each({slideDown:c$("show"),slideUp:c$("hide"),slideToggle:c$("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a,b){p.fn[a]=function(a,c,d){return this.animate(b,a,c,d)}}),p.speed=function(a,b,c){var d=a&&typeof a=="object"?p.extend({},a):{complete:c||!c&&b||p.isFunction(a)&&a,duration:a,easing:c&&b||b&&!p.isFunction(b)&&b};d.duration=p.fx.off?0:typeof d.duration=="number"?d.duration:d.duration in p.fx.speeds?p.fx.speeds[d.duration]:p.fx.speeds._default;if(d.queue==null||d.queue===!0)d.queue="fx";return d.old=d.complete,d.complete=function(){p.isFunction(d.old)&&d.old.call(this),d.queue&&p.dequeue(this,d.queue)},d},p.easing={linear:function(a){return a},swing:function(a){return.5-Math.cos(a*Math.PI)/2}},p.timers=[],p.fx=cZ.prototype.init,p.fx.tick=function(){var a,b=p.timers,c=0;for(;c<b.length;c++)a=b[c],!a()&&b[c]===a&&b.splice(c--,1);b.length||p.fx.stop()},p.fx.timer=function(a){a()&&p.timers.push(a)&&!cO&&(cO=setInterval(p.fx.tick,p.fx.interval))},p.fx.interval=13,p.fx.stop=function(){clearInterval(cO),cO=null},p.fx.speeds={slow:600,fast:200,_default:400},p.fx.step={},p.expr&&p.expr.filters&&(p.expr.filters.animated=function(a){return p.grep(p.timers,function(b){return a===b.elem}).length});var c_=/^(?:body|html)$/i;p.fn.offset=function(a){if(arguments.length)return a===b?this:this.each(function(b){p.offset.setOffset(this,a,b)});var c,d,e,f,g,h,i,j,k,l,m=this[0],n=m&&m.ownerDocument;if(!n)return;return(e=n.body)===m?p.offset.bodyOffset(m):(d=n.documentElement,p.contains(d,m)?(c=m.getBoundingClientRect(),f=da(n),g=d.clientTop||e.clientTop||0,h=d.clientLeft||e.clientLeft||0,i=f.pageYOffset||d.scrollTop,j=f.pageXOffset||d.scrollLeft,k=c.top+i-g,l=c.left+j-h,{top:k,left:l}):{top:0,left:0})},p.offset={bodyOffset:function(a){var b=a.offsetTop,c=a.offsetLeft;return p.support.doesNotIncludeMarginInBodyOffset&&(b+=parseFloat(p.css(a,"marginTop"))||0,c+=parseFloat(p.css(a,"marginLeft"))||0),{top:b,left:c}},setOffset:function(a,b,c){var d=p.css(a,"position");d==="static"&&(a.style.position="relative");var e=p(a),f=e.offset(),g=p.css(a,"top"),h=p.css(a,"left"),i=(d==="absolute"||d==="fixed")&&p.inArray("auto",[g,h])>-1,j={},k={},l,m;i?(k=e.position(),l=k.top,m=k.left):(l=parseFloat(g)||0,m=parseFloat(h)||0),p.isFunction(b)&&(b=b.call(a,c,f)),b.top!=null&&(j.top=b.top-f.top+l),b.left!=null&&(j.left=b.left-f.left+m),"using"in b?b.using.call(a,j):e.css(j)}},p.fn.extend({position:function(){if(!this[0])return;var a=this[0],b=this.offsetParent(),c=this.offset(),d=c_.test(b[0].nodeName)?{top:0,left:0}:b.offset();return c.top-=parseFloat(p.css(a,"marginTop"))||0,c.left-=parseFloat(p.css(a,"marginLeft"))||0,d.top+=parseFloat(p.css(b[0],"borderTopWidth"))||0,d.left+=parseFloat(p.css(b[0],"borderLeftWidth"))||0,{top:c.top-d.top,left:c.left-d.left}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||e.body;while(a&&!c_.test(a.nodeName)&&p.css(a,"position")==="static")a=a.offsetParent;return a||e.body})}}),p.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(a,c){var d=/Y/.test(c);p.fn[a]=function(e){return p.access(this,function(a,e,f){var g=da(a);if(f===b)return g?c in g?g[c]:g.document.documentElement[e]:a[e];g?g.scrollTo(d?p(g).scrollLeft():f,d?f:p(g).scrollTop()):a[e]=f},a,e,arguments.length,null)}}),p.each({Height:"height",Width:"width"},function(a,c){p.each({padding:"inner"+a,content:c,"":"outer"+a},function(d,e){p.fn[e]=function(e,f){var g=arguments.length&&(d||typeof e!="boolean"),h=d||(e===!0||f===!0?"margin":"border");return p.access(this,function(c,d,e){var f;return p.isWindow(c)?c.document.documentElement["client"+a]:c.nodeType===9?(f=c.documentElement,Math.max(c.body["scroll"+a],f["scroll"+a],c.body["offset"+a],f["offset"+a],f["client"+a])):e===b?p.css(c,d,e,h):p.style(c,d,e,h)},c,g?e:b,g,null)}})}),a.jQuery=a.$=p,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return p})})(window); \ No newline at end of file diff --git a/awx/lib/site-packages/rest_framework/static/rest_framework/js/prettify-min.js b/awx/lib/site-packages/rest_framework/static/rest_framework/js/prettify-min.js new file mode 100644 index 0000000000..eef5ad7e6a --- /dev/null +++ b/awx/lib/site-packages/rest_framework/static/rest_framework/js/prettify-min.js @@ -0,0 +1,28 @@ +var q=null;window.PR_SHOULD_USE_CONTINUATION=!0; +(function(){function L(a){function m(a){var f=a.charCodeAt(0);if(f!==92)return f;var b=a.charAt(1);return(f=r[b])?f:"0"<=b&&b<="7"?parseInt(a.substring(1),8):b==="u"||b==="x"?parseInt(a.substring(2),16):a.charCodeAt(1)}function e(a){if(a<32)return(a<16?"\\x0":"\\x")+a.toString(16);a=String.fromCharCode(a);if(a==="\\"||a==="-"||a==="["||a==="]")a="\\"+a;return a}function h(a){for(var f=a.substring(1,a.length-1).match(/\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\[0-3][0-7]{0,2}|\\[0-7]{1,2}|\\[\S\s]|[^\\]/g),a= +[],b=[],o=f[0]==="^",c=o?1:0,i=f.length;c<i;++c){var j=f[c];if(/\\[bdsw]/i.test(j))a.push(j);else{var j=m(j),d;c+2<i&&"-"===f[c+1]?(d=m(f[c+2]),c+=2):d=j;b.push([j,d]);d<65||j>122||(d<65||j>90||b.push([Math.max(65,j)|32,Math.min(d,90)|32]),d<97||j>122||b.push([Math.max(97,j)&-33,Math.min(d,122)&-33]))}}b.sort(function(a,f){return a[0]-f[0]||f[1]-a[1]});f=[];j=[NaN,NaN];for(c=0;c<b.length;++c)i=b[c],i[0]<=j[1]+1?j[1]=Math.max(j[1],i[1]):f.push(j=i);b=["["];o&&b.push("^");b.push.apply(b,a);for(c=0;c< +f.length;++c)i=f[c],b.push(e(i[0])),i[1]>i[0]&&(i[1]+1>i[0]&&b.push("-"),b.push(e(i[1])));b.push("]");return b.join("")}function y(a){for(var f=a.source.match(/\[(?:[^\\\]]|\\[\S\s])*]|\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\\d+|\\[^\dux]|\(\?[!:=]|[()^]|[^()[\\^]+/g),b=f.length,d=[],c=0,i=0;c<b;++c){var j=f[c];j==="("?++i:"\\"===j.charAt(0)&&(j=+j.substring(1))&&j<=i&&(d[j]=-1)}for(c=1;c<d.length;++c)-1===d[c]&&(d[c]=++t);for(i=c=0;c<b;++c)j=f[c],j==="("?(++i,d[i]===void 0&&(f[c]="(?:")):"\\"===j.charAt(0)&& +(j=+j.substring(1))&&j<=i&&(f[c]="\\"+d[i]);for(i=c=0;c<b;++c)"^"===f[c]&&"^"!==f[c+1]&&(f[c]="");if(a.ignoreCase&&s)for(c=0;c<b;++c)j=f[c],a=j.charAt(0),j.length>=2&&a==="["?f[c]=h(j):a!=="\\"&&(f[c]=j.replace(/[A-Za-z]/g,function(a){a=a.charCodeAt(0);return"["+String.fromCharCode(a&-33,a|32)+"]"}));return f.join("")}for(var t=0,s=!1,l=!1,p=0,d=a.length;p<d;++p){var g=a[p];if(g.ignoreCase)l=!0;else if(/[a-z]/i.test(g.source.replace(/\\u[\da-f]{4}|\\x[\da-f]{2}|\\[^UXux]/gi,""))){s=!0;l=!1;break}}for(var r= +{b:8,t:9,n:10,v:11,f:12,r:13},n=[],p=0,d=a.length;p<d;++p){g=a[p];if(g.global||g.multiline)throw Error(""+g);n.push("(?:"+y(g)+")")}return RegExp(n.join("|"),l?"gi":"g")}function M(a){function m(a){switch(a.nodeType){case 1:if(e.test(a.className))break;for(var g=a.firstChild;g;g=g.nextSibling)m(g);g=a.nodeName;if("BR"===g||"LI"===g)h[s]="\n",t[s<<1]=y++,t[s++<<1|1]=a;break;case 3:case 4:g=a.nodeValue,g.length&&(g=p?g.replace(/\r\n?/g,"\n"):g.replace(/[\t\n\r ]+/g," "),h[s]=g,t[s<<1]=y,y+=g.length, +t[s++<<1|1]=a)}}var e=/(?:^|\s)nocode(?:\s|$)/,h=[],y=0,t=[],s=0,l;a.currentStyle?l=a.currentStyle.whiteSpace:window.getComputedStyle&&(l=document.defaultView.getComputedStyle(a,q).getPropertyValue("white-space"));var p=l&&"pre"===l.substring(0,3);m(a);return{a:h.join("").replace(/\n$/,""),c:t}}function B(a,m,e,h){m&&(a={a:m,d:a},e(a),h.push.apply(h,a.e))}function x(a,m){function e(a){for(var l=a.d,p=[l,"pln"],d=0,g=a.a.match(y)||[],r={},n=0,z=g.length;n<z;++n){var f=g[n],b=r[f],o=void 0,c;if(typeof b=== +"string")c=!1;else{var i=h[f.charAt(0)];if(i)o=f.match(i[1]),b=i[0];else{for(c=0;c<t;++c)if(i=m[c],o=f.match(i[1])){b=i[0];break}o||(b="pln")}if((c=b.length>=5&&"lang-"===b.substring(0,5))&&!(o&&typeof o[1]==="string"))c=!1,b="src";c||(r[f]=b)}i=d;d+=f.length;if(c){c=o[1];var j=f.indexOf(c),k=j+c.length;o[2]&&(k=f.length-o[2].length,j=k-c.length);b=b.substring(5);B(l+i,f.substring(0,j),e,p);B(l+i+j,c,C(b,c),p);B(l+i+k,f.substring(k),e,p)}else p.push(l+i,b)}a.e=p}var h={},y;(function(){for(var e=a.concat(m), +l=[],p={},d=0,g=e.length;d<g;++d){var r=e[d],n=r[3];if(n)for(var k=n.length;--k>=0;)h[n.charAt(k)]=r;r=r[1];n=""+r;p.hasOwnProperty(n)||(l.push(r),p[n]=q)}l.push(/[\S\s]/);y=L(l)})();var t=m.length;return e}function u(a){var m=[],e=[];a.tripleQuotedStrings?m.push(["str",/^(?:'''(?:[^'\\]|\\[\S\s]|''?(?=[^']))*(?:'''|$)|"""(?:[^"\\]|\\[\S\s]|""?(?=[^"]))*(?:"""|$)|'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$))/,q,"'\""]):a.multiLineStrings?m.push(["str",/^(?:'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$)|`(?:[^\\`]|\\[\S\s])*(?:`|$))/, +q,"'\"`"]):m.push(["str",/^(?:'(?:[^\n\r'\\]|\\.)*(?:'|$)|"(?:[^\n\r"\\]|\\.)*(?:"|$))/,q,"\"'"]);a.verbatimStrings&&e.push(["str",/^@"(?:[^"]|"")*(?:"|$)/,q]);var h=a.hashComments;h&&(a.cStyleComments?(h>1?m.push(["com",/^#(?:##(?:[^#]|#(?!##))*(?:###|$)|.*)/,q,"#"]):m.push(["com",/^#(?:(?:define|elif|else|endif|error|ifdef|include|ifndef|line|pragma|undef|warning)\b|[^\n\r]*)/,q,"#"]),e.push(["str",/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h|[a-z]\w*)>/,q])):m.push(["com",/^#[^\n\r]*/, +q,"#"]));a.cStyleComments&&(e.push(["com",/^\/\/[^\n\r]*/,q]),e.push(["com",/^\/\*[\S\s]*?(?:\*\/|$)/,q]));a.regexLiterals&&e.push(["lang-regex",/^(?:^^\.?|[!+-]|!=|!==|#|%|%=|&|&&|&&=|&=|\(|\*|\*=|\+=|,|-=|->|\/|\/=|:|::|;|<|<<|<<=|<=|=|==|===|>|>=|>>|>>=|>>>|>>>=|[?@[^]|\^=|\^\^|\^\^=|{|\||\|=|\|\||\|\|=|~|break|case|continue|delete|do|else|finally|instanceof|return|throw|try|typeof)\s*(\/(?=[^*/])(?:[^/[\\]|\\[\S\s]|\[(?:[^\\\]]|\\[\S\s])*(?:]|$))+\/)/]);(h=a.types)&&e.push(["typ",h]);a=(""+a.keywords).replace(/^ | $/g, +"");a.length&&e.push(["kwd",RegExp("^(?:"+a.replace(/[\s,]+/g,"|")+")\\b"),q]);m.push(["pln",/^\s+/,q," \r\n\t\xa0"]);e.push(["lit",/^@[$_a-z][\w$@]*/i,q],["typ",/^(?:[@_]?[A-Z]+[a-z][\w$@]*|\w+_t\b)/,q],["pln",/^[$_a-z][\w$@]*/i,q],["lit",/^(?:0x[\da-f]+|(?:\d(?:_\d+)*\d*(?:\.\d*)?|\.\d\+)(?:e[+-]?\d+)?)[a-z]*/i,q,"0123456789"],["pln",/^\\[\S\s]?/,q],["pun",/^.[^\s\w"-$'./@\\`]*/,q]);return x(m,e)}function D(a,m){function e(a){switch(a.nodeType){case 1:if(k.test(a.className))break;if("BR"===a.nodeName)h(a), +a.parentNode&&a.parentNode.removeChild(a);else for(a=a.firstChild;a;a=a.nextSibling)e(a);break;case 3:case 4:if(p){var b=a.nodeValue,d=b.match(t);if(d){var c=b.substring(0,d.index);a.nodeValue=c;(b=b.substring(d.index+d[0].length))&&a.parentNode.insertBefore(s.createTextNode(b),a.nextSibling);h(a);c||a.parentNode.removeChild(a)}}}}function h(a){function b(a,d){var e=d?a.cloneNode(!1):a,f=a.parentNode;if(f){var f=b(f,1),g=a.nextSibling;f.appendChild(e);for(var h=g;h;h=g)g=h.nextSibling,f.appendChild(h)}return e} +for(;!a.nextSibling;)if(a=a.parentNode,!a)return;for(var a=b(a.nextSibling,0),e;(e=a.parentNode)&&e.nodeType===1;)a=e;d.push(a)}var k=/(?:^|\s)nocode(?:\s|$)/,t=/\r\n?|\n/,s=a.ownerDocument,l;a.currentStyle?l=a.currentStyle.whiteSpace:window.getComputedStyle&&(l=s.defaultView.getComputedStyle(a,q).getPropertyValue("white-space"));var p=l&&"pre"===l.substring(0,3);for(l=s.createElement("LI");a.firstChild;)l.appendChild(a.firstChild);for(var d=[l],g=0;g<d.length;++g)e(d[g]);m===(m|0)&&d[0].setAttribute("value", +m);var r=s.createElement("OL");r.className="linenums";for(var n=Math.max(0,m-1|0)||0,g=0,z=d.length;g<z;++g)l=d[g],l.className="L"+(g+n)%10,l.firstChild||l.appendChild(s.createTextNode("\xa0")),r.appendChild(l);a.appendChild(r)}function k(a,m){for(var e=m.length;--e>=0;){var h=m[e];A.hasOwnProperty(h)?window.console&&console.warn("cannot override language handler %s",h):A[h]=a}}function C(a,m){if(!a||!A.hasOwnProperty(a))a=/^\s*</.test(m)?"default-markup":"default-code";return A[a]}function E(a){var m= +a.g;try{var e=M(a.h),h=e.a;a.a=h;a.c=e.c;a.d=0;C(m,h)(a);var k=/\bMSIE\b/.test(navigator.userAgent),m=/\n/g,t=a.a,s=t.length,e=0,l=a.c,p=l.length,h=0,d=a.e,g=d.length,a=0;d[g]=s;var r,n;for(n=r=0;n<g;)d[n]!==d[n+2]?(d[r++]=d[n++],d[r++]=d[n++]):n+=2;g=r;for(n=r=0;n<g;){for(var z=d[n],f=d[n+1],b=n+2;b+2<=g&&d[b+1]===f;)b+=2;d[r++]=z;d[r++]=f;n=b}for(d.length=r;h<p;){var o=l[h+2]||s,c=d[a+2]||s,b=Math.min(o,c),i=l[h+1],j;if(i.nodeType!==1&&(j=t.substring(e,b))){k&&(j=j.replace(m,"\r"));i.nodeValue= +j;var u=i.ownerDocument,v=u.createElement("SPAN");v.className=d[a+1];var x=i.parentNode;x.replaceChild(v,i);v.appendChild(i);e<o&&(l[h+1]=i=u.createTextNode(t.substring(b,o)),x.insertBefore(i,v.nextSibling))}e=b;e>=o&&(h+=2);e>=c&&(a+=2)}}catch(w){"console"in window&&console.log(w&&w.stack?w.stack:w)}}var v=["break,continue,do,else,for,if,return,while"],w=[[v,"auto,case,char,const,default,double,enum,extern,float,goto,int,long,register,short,signed,sizeof,static,struct,switch,typedef,union,unsigned,void,volatile"], +"catch,class,delete,false,import,new,operator,private,protected,public,this,throw,true,try,typeof"],F=[w,"alignof,align_union,asm,axiom,bool,concept,concept_map,const_cast,constexpr,decltype,dynamic_cast,explicit,export,friend,inline,late_check,mutable,namespace,nullptr,reinterpret_cast,static_assert,static_cast,template,typeid,typename,using,virtual,where"],G=[w,"abstract,boolean,byte,extends,final,finally,implements,import,instanceof,null,native,package,strictfp,super,synchronized,throws,transient"], +H=[G,"as,base,by,checked,decimal,delegate,descending,dynamic,event,fixed,foreach,from,group,implicit,in,interface,internal,into,is,lock,object,out,override,orderby,params,partial,readonly,ref,sbyte,sealed,stackalloc,string,select,uint,ulong,unchecked,unsafe,ushort,var"],w=[w,"debugger,eval,export,function,get,null,set,undefined,var,with,Infinity,NaN"],I=[v,"and,as,assert,class,def,del,elif,except,exec,finally,from,global,import,in,is,lambda,nonlocal,not,or,pass,print,raise,try,with,yield,False,True,None"], +J=[v,"alias,and,begin,case,class,def,defined,elsif,end,ensure,false,in,module,next,nil,not,or,redo,rescue,retry,self,super,then,true,undef,unless,until,when,yield,BEGIN,END"],v=[v,"case,done,elif,esac,eval,fi,function,in,local,set,then,until"],K=/^(DIR|FILE|vector|(de|priority_)?queue|list|stack|(const_)?iterator|(multi)?(set|map)|bitset|u?(int|float)\d*)/,N=/\S/,O=u({keywords:[F,H,w,"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END"+ +I,J,v],hashComments:!0,cStyleComments:!0,multiLineStrings:!0,regexLiterals:!0}),A={};k(O,["default-code"]);k(x([],[["pln",/^[^<?]+/],["dec",/^<!\w[^>]*(?:>|$)/],["com",/^<\!--[\S\s]*?(?:--\>|$)/],["lang-",/^<\?([\S\s]+?)(?:\?>|$)/],["lang-",/^<%([\S\s]+?)(?:%>|$)/],["pun",/^(?:<[%?]|[%?]>)/],["lang-",/^<xmp\b[^>]*>([\S\s]+?)<\/xmp\b[^>]*>/i],["lang-js",/^<script\b[^>]*>([\S\s]*?)(<\/script\b[^>]*>)/i],["lang-css",/^<style\b[^>]*>([\S\s]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i]]), +["default-markup","htm","html","mxml","xhtml","xml","xsl"]);k(x([["pln",/^\s+/,q," \t\r\n"],["atv",/^(?:"[^"]*"?|'[^']*'?)/,q,"\"'"]],[["tag",/^^<\/?[a-z](?:[\w-.:]*\w)?|\/?>$/i],["atn",/^(?!style[\s=]|on)[a-z](?:[\w:-]*\w)?/i],["lang-uq.val",/^=\s*([^\s"'>]*(?:[^\s"'/>]|\/(?=\s)))/],["pun",/^[/<->]+/],["lang-js",/^on\w+\s*=\s*"([^"]+)"/i],["lang-js",/^on\w+\s*=\s*'([^']+)'/i],["lang-js",/^on\w+\s*=\s*([^\s"'>]+)/i],["lang-css",/^style\s*=\s*"([^"]+)"/i],["lang-css",/^style\s*=\s*'([^']+)'/i],["lang-css", +/^style\s*=\s*([^\s"'>]+)/i]]),["in.tag"]);k(x([],[["atv",/^[\S\s]+/]]),["uq.val"]);k(u({keywords:F,hashComments:!0,cStyleComments:!0,types:K}),["c","cc","cpp","cxx","cyc","m"]);k(u({keywords:"null,true,false"}),["json"]);k(u({keywords:H,hashComments:!0,cStyleComments:!0,verbatimStrings:!0,types:K}),["cs"]);k(u({keywords:G,cStyleComments:!0}),["java"]);k(u({keywords:v,hashComments:!0,multiLineStrings:!0}),["bsh","csh","sh"]);k(u({keywords:I,hashComments:!0,multiLineStrings:!0,tripleQuotedStrings:!0}), +["cv","py"]);k(u({keywords:"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END",hashComments:!0,multiLineStrings:!0,regexLiterals:!0}),["perl","pl","pm"]);k(u({keywords:J,hashComments:!0,multiLineStrings:!0,regexLiterals:!0}),["rb"]);k(u({keywords:w,cStyleComments:!0,regexLiterals:!0}),["js"]);k(u({keywords:"all,and,by,catch,class,else,extends,false,finally,for,if,in,is,isnt,loop,new,no,not,null,of,off,on,or,return,super,then,true,try,unless,until,when,while,yes", +hashComments:3,cStyleComments:!0,multilineStrings:!0,tripleQuotedStrings:!0,regexLiterals:!0}),["coffee"]);k(x([],[["str",/^[\S\s]+/]]),["regex"]);window.prettyPrintOne=function(a,m,e){var h=document.createElement("PRE");h.innerHTML=a;e&&D(h,e);E({g:m,i:e,h:h});return h.innerHTML};window.prettyPrint=function(a){function m(){for(var e=window.PR_SHOULD_USE_CONTINUATION?l.now()+250:Infinity;p<h.length&&l.now()<e;p++){var n=h[p],k=n.className;if(k.indexOf("prettyprint")>=0){var k=k.match(g),f,b;if(b= +!k){b=n;for(var o=void 0,c=b.firstChild;c;c=c.nextSibling)var i=c.nodeType,o=i===1?o?b:c:i===3?N.test(c.nodeValue)?b:o:o;b=(f=o===b?void 0:o)&&"CODE"===f.tagName}b&&(k=f.className.match(g));k&&(k=k[1]);b=!1;for(o=n.parentNode;o;o=o.parentNode)if((o.tagName==="pre"||o.tagName==="code"||o.tagName==="xmp")&&o.className&&o.className.indexOf("prettyprint")>=0){b=!0;break}b||((b=(b=n.className.match(/\blinenums\b(?::(\d+))?/))?b[1]&&b[1].length?+b[1]:!0:!1)&&D(n,b),d={g:k,h:n,i:b},E(d))}}p<h.length?setTimeout(m, +250):a&&a()}for(var e=[document.getElementsByTagName("pre"),document.getElementsByTagName("code"),document.getElementsByTagName("xmp")],h=[],k=0;k<e.length;++k)for(var t=0,s=e[k].length;t<s;++t)h.push(e[k][t]);var e=q,l=Date;l.now||(l={now:function(){return+new Date}});var p=0,d,g=/\blang(?:uage)?-([\w.]+)(?!\S)/;m()};window.PR={createSimpleLexer:x,registerLangHandler:k,sourceDecorator:u,PR_ATTRIB_NAME:"atn",PR_ATTRIB_VALUE:"atv",PR_COMMENT:"com",PR_DECLARATION:"dec",PR_KEYWORD:"kwd",PR_LITERAL:"lit", +PR_NOCODE:"nocode",PR_PLAIN:"pln",PR_PUNCTUATION:"pun",PR_SOURCE:"src",PR_STRING:"str",PR_TAG:"tag",PR_TYPE:"typ"}})(); diff --git a/awx/lib/site-packages/rest_framework/status.py b/awx/lib/site-packages/rest_framework/status.py new file mode 100644 index 0000000000..b9f249f9fc --- /dev/null +++ b/awx/lib/site-packages/rest_framework/status.py @@ -0,0 +1,53 @@ +""" +Descriptive HTTP status codes, for code readability. + +See RFC 2616 - http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html +And RFC 6585 - http://tools.ietf.org/html/rfc6585 +""" +from __future__ import unicode_literals + +HTTP_100_CONTINUE = 100 +HTTP_101_SWITCHING_PROTOCOLS = 101 +HTTP_200_OK = 200 +HTTP_201_CREATED = 201 +HTTP_202_ACCEPTED = 202 +HTTP_203_NON_AUTHORITATIVE_INFORMATION = 203 +HTTP_204_NO_CONTENT = 204 +HTTP_205_RESET_CONTENT = 205 +HTTP_206_PARTIAL_CONTENT = 206 +HTTP_300_MULTIPLE_CHOICES = 300 +HTTP_301_MOVED_PERMANENTLY = 301 +HTTP_302_FOUND = 302 +HTTP_303_SEE_OTHER = 303 +HTTP_304_NOT_MODIFIED = 304 +HTTP_305_USE_PROXY = 305 +HTTP_306_RESERVED = 306 +HTTP_307_TEMPORARY_REDIRECT = 307 +HTTP_400_BAD_REQUEST = 400 +HTTP_401_UNAUTHORIZED = 401 +HTTP_402_PAYMENT_REQUIRED = 402 +HTTP_403_FORBIDDEN = 403 +HTTP_404_NOT_FOUND = 404 +HTTP_405_METHOD_NOT_ALLOWED = 405 +HTTP_406_NOT_ACCEPTABLE = 406 +HTTP_407_PROXY_AUTHENTICATION_REQUIRED = 407 +HTTP_408_REQUEST_TIMEOUT = 408 +HTTP_409_CONFLICT = 409 +HTTP_410_GONE = 410 +HTTP_411_LENGTH_REQUIRED = 411 +HTTP_412_PRECONDITION_FAILED = 412 +HTTP_413_REQUEST_ENTITY_TOO_LARGE = 413 +HTTP_414_REQUEST_URI_TOO_LONG = 414 +HTTP_415_UNSUPPORTED_MEDIA_TYPE = 415 +HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE = 416 +HTTP_417_EXPECTATION_FAILED = 417 +HTTP_428_PRECONDITION_REQUIRED = 428 +HTTP_429_TOO_MANY_REQUESTS = 429 +HTTP_431_REQUEST_HEADER_FIELDS_TOO_LARGE = 431 +HTTP_500_INTERNAL_SERVER_ERROR = 500 +HTTP_501_NOT_IMPLEMENTED = 501 +HTTP_502_BAD_GATEWAY = 502 +HTTP_503_SERVICE_UNAVAILABLE = 503 +HTTP_504_GATEWAY_TIMEOUT = 504 +HTTP_505_HTTP_VERSION_NOT_SUPPORTED = 505 +HTTP_511_NETWORK_AUTHENTICATION_REQUIRED = 511 diff --git a/awx/lib/site-packages/rest_framework/templates/rest_framework/api.html b/awx/lib/site-packages/rest_framework/templates/rest_framework/api.html new file mode 100644 index 0000000000..81d277e967 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/templates/rest_framework/api.html @@ -0,0 +1,3 @@ +{% extends "rest_framework/base.html" %} + +{# Override this template in your own templates directory to customize #} diff --git a/awx/lib/site-packages/rest_framework/templates/rest_framework/base.html b/awx/lib/site-packages/rest_framework/templates/rest_framework/base.html new file mode 100644 index 0000000000..9d939e738b --- /dev/null +++ b/awx/lib/site-packages/rest_framework/templates/rest_framework/base.html @@ -0,0 +1,236 @@ +{% load url from future %} +{% load rest_framework %} +<!DOCTYPE html> +<html> + <head> + {% block head %} + + {% block meta %} + <meta http-equiv="Content-Type" content="text/html; charset=utf-8"/> + <meta name="robots" content="NONE,NOARCHIVE" /> + {% endblock %} + + <title>{% block title %}Django REST framework{% endblock %} + + {% block style %} + {% block bootstrap_theme %} + + + {% endblock %} + + + {% endblock %} + + {% endblock %} + + + + +
+ + {% block navbar %} + + {% endblock %} + + {% block breadcrumbs %} + + {% endblock %} + + +
+ + {% if 'GET' in allowed_methods %} +
+
+
+ GET + + + +
+ +
+
+ {% endif %} + + {% if options_form %} +
+ {% csrf_token %} + + +
+ {% endif %} + + {% if delete_form %} +
+ {% csrf_token %} + + +
+ {% endif %} + +
+ + {{ description }} +
+
{{ request.method }} {{ request.get_full_path }}
+
+
+
HTTP {{ response.status_code }} {{ response.status_text }}{% autoescape off %} +{% for key, val in response.items %}{{ key }}: {{ val|break_long_headers|urlize_quoted_links }} +{% endfor %} +
{{ content|urlize_quoted_links }}
{% endautoescape %} +
+
+ + {% if response.status_code != 403 %} + + {% if post_form or raw_data_post_form %} +
+ {% if post_form %} + + {% endif %} +
+ {% if post_form %} +
+ {% with form=post_form %} +
+
+ {% include "rest_framework/form.html" %} +
+ +
+
+
+ {% endwith %} +
+ {% endif %} +
+ {% with form=raw_data_post_form %} +
+
+ {% include "rest_framework/form.html" %} +
+ +
+
+
+ {% endwith %} +
+
+
+ {% endif %} + + {% if put_form or raw_data_put_form or raw_data_patch_form %} +
+ {% if put_form %} + + {% endif %} +
+ {% if put_form %} +
+ {% with form=put_form %} +
+
+ {% include "rest_framework/form.html" %} +
+ +
+
+
+ {% endwith %} +
+ {% endif %} +
+ {% with form=raw_data_put_or_patch_form %} +
+
+ {% include "rest_framework/form.html" %} +
+ {% if raw_data_put_form %} + + {% endif %} + {% if raw_data_patch_form %} + + {% endif %} +
+
+
+ {% endwith %} +
+
+
+ {% endif %} + {% endif %} + +
+ + +
+ + +
+ + + + + + {% block footer %} + + {% endblock %} + + {% block script %} + + + + + {% endblock %} + + diff --git a/awx/lib/site-packages/rest_framework/templates/rest_framework/form.html b/awx/lib/site-packages/rest_framework/templates/rest_framework/form.html new file mode 100644 index 0000000000..b27f652e98 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/templates/rest_framework/form.html @@ -0,0 +1,13 @@ +{% load rest_framework %} +{% csrf_token %} +{{ form.non_field_errors }} +{% for field in form %} +
+ {{ field.label_tag|add_class:"control-label" }} +
+ {{ field }} + {{ field.help_text }} + +
+
+{% endfor %} diff --git a/awx/lib/site-packages/rest_framework/templates/rest_framework/login.html b/awx/lib/site-packages/rest_framework/templates/rest_framework/login.html new file mode 100644 index 0000000000..b76293279a --- /dev/null +++ b/awx/lib/site-packages/rest_framework/templates/rest_framework/login.html @@ -0,0 +1,3 @@ +{% extends "rest_framework/login_base.html" %} + +{# Override this template in your own templates directory to customize #} diff --git a/awx/lib/site-packages/rest_framework/templates/rest_framework/login_base.html b/awx/lib/site-packages/rest_framework/templates/rest_framework/login_base.html new file mode 100644 index 0000000000..be9a0072a8 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/templates/rest_framework/login_base.html @@ -0,0 +1,53 @@ +{% load url from future %} +{% load rest_framework %} + + + + {% block style %} + {% block bootstrap_theme %} + + + {% endblock %} + + {% endblock %} + + + + +
+
+
+
+
+ {% block branding %}

Django REST framework

{% endblock %} +
+
+ +
+
+
+ {% csrf_token %} +
+
+ + +
+
+
+
+ + +
+
+ +
+ +
+
+
+
+
+
+
+ + diff --git a/awx/lib/site-packages/rest_framework/templatetags/__init__.py b/awx/lib/site-packages/rest_framework/templatetags/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/rest_framework/templatetags/rest_framework.py b/awx/lib/site-packages/rest_framework/templatetags/rest_framework.py new file mode 100644 index 0000000000..e9c1cdd545 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/templatetags/rest_framework.py @@ -0,0 +1,273 @@ +from __future__ import unicode_literals, absolute_import +from django import template +from django.core.urlresolvers import reverse, NoReverseMatch +from django.http import QueryDict +from django.utils.html import escape +from django.utils.safestring import SafeData, mark_safe +from rest_framework.compat import urlparse, force_text, six, smart_urlquote +import re, string + +register = template.Library() + + +# Note we don't use 'load staticfiles', because we need a 1.3 compatible +# version, so instead we include the `static` template tag ourselves. + +# When 1.3 becomes unsupported by REST framework, we can instead start to +# use the {% load staticfiles %} tag, remove the following code, +# and add a dependency that `django.contrib.staticfiles` must be installed. + +# Note: We can't put this into the `compat` module because the compat import +# from rest_framework.compat import ... +# conflicts with this rest_framework template tag module. + +try: # Django 1.5+ + from django.contrib.staticfiles.templatetags.staticfiles import StaticFilesNode + + @register.tag('static') + def do_static(parser, token): + return StaticFilesNode.handle_token(parser, token) + +except ImportError: + try: # Django 1.4 + from django.contrib.staticfiles.storage import staticfiles_storage + + @register.simple_tag + def static(path): + """ + A template tag that returns the URL to a file + using staticfiles' storage backend + """ + return staticfiles_storage.url(path) + + except ImportError: # Django 1.3 + from urlparse import urljoin + from django import template + from django.templatetags.static import PrefixNode + + class StaticNode(template.Node): + def __init__(self, varname=None, path=None): + if path is None: + raise template.TemplateSyntaxError( + "Static template nodes must be given a path to return.") + self.path = path + self.varname = varname + + def url(self, context): + path = self.path.resolve(context) + return self.handle_simple(path) + + def render(self, context): + url = self.url(context) + if self.varname is None: + return url + context[self.varname] = url + return '' + + @classmethod + def handle_simple(cls, path): + return urljoin(PrefixNode.handle_simple("STATIC_URL"), path) + + @classmethod + def handle_token(cls, parser, token): + """ + Class method to parse prefix node and return a Node. + """ + bits = token.split_contents() + + if len(bits) < 2: + raise template.TemplateSyntaxError( + "'%s' takes at least one argument (path to file)" % bits[0]) + + path = parser.compile_filter(bits[1]) + + if len(bits) >= 2 and bits[-2] == 'as': + varname = bits[3] + else: + varname = None + + return cls(varname, path) + + @register.tag('static') + def do_static_13(parser, token): + return StaticNode.handle_token(parser, token) + + +def replace_query_param(url, key, val): + """ + Given a URL and a key/val pair, set or replace an item in the query + parameters of the URL, and return the new URL. + """ + (scheme, netloc, path, query, fragment) = urlparse.urlsplit(url) + query_dict = QueryDict(query).copy() + query_dict[key] = val + query = query_dict.urlencode() + return urlparse.urlunsplit((scheme, netloc, path, query, fragment)) + + +# Regex for adding classes to html snippets +class_re = re.compile(r'(?<=class=["\'])(.*)(?=["\'])') + + +# And the template tags themselves... + +@register.simple_tag +def optional_login(request): + """ + Include a login snippet if REST framework's login view is in the URLconf. + """ + try: + login_url = reverse('rest_framework:login') + except NoReverseMatch: + return '' + + snippet = "Log in" % (login_url, request.path) + return snippet + + +@register.simple_tag +def optional_logout(request): + """ + Include a logout snippet if REST framework's logout view is in the URLconf. + """ + try: + logout_url = reverse('rest_framework:logout') + except NoReverseMatch: + return '' + + snippet = "Log out" % (logout_url, request.path) + return snippet + + +@register.simple_tag +def add_query_param(request, key, val): + """ + Add a query parameter to the current request url, and return the new url. + """ + return replace_query_param(request.get_full_path(), key, val) + + +@register.filter +def add_class(value, css_class): + """ + http://stackoverflow.com/questions/4124220/django-adding-css-classes-when-rendering-form-fields-in-a-template + + Inserts classes into template variables that contain HTML tags, + useful for modifying forms without needing to change the Form objects. + + Usage: + + {{ field.label_tag|add_class:"control-label" }} + + In the case of REST Framework, the filter is used to add Bootstrap-specific + classes to the forms. + """ + html = six.text_type(value) + match = class_re.search(html) + if match: + m = re.search(r'^%s$|^%s\s|\s%s\s|\s%s$' % (css_class, css_class, + css_class, css_class), + match.group(1)) + if not m: + return mark_safe(class_re.sub(match.group(1) + " " + css_class, + html)) + else: + return mark_safe(html.replace('>', ' class="%s">' % css_class, 1)) + return value + + +# Bunch of stuff cloned from urlize +TRAILING_PUNCTUATION = ['.', ',', ':', ';', '.)', '"', "'"] +WRAPPING_PUNCTUATION = [('(', ')'), ('<', '>'), ('[', ']'), ('<', '>'), + ('"', '"'), ("'", "'")] +word_split_re = re.compile(r'(\s+)') +simple_url_re = re.compile(r'^https?://\[?\w', re.IGNORECASE) +simple_url_2_re = re.compile(r'^www\.|^(?!http)\w[^@]+\.(com|edu|gov|int|mil|net|org)$', re.IGNORECASE) +simple_email_re = re.compile(r'^\S+@\S+\.\S+$') + + +@register.filter +def urlize_quoted_links(text, trim_url_limit=None, nofollow=True, autoescape=True): + """ + Converts any URLs in text into clickable links. + + Works on http://, https://, www. links, and also on links ending in one of + the original seven gTLDs (.com, .edu, .gov, .int, .mil, .net, and .org). + Links can have trailing punctuation (periods, commas, close-parens) and + leading punctuation (opening parens) and it'll still do the right thing. + + If trim_url_limit is not None, the URLs in link text longer than this limit + will truncated to trim_url_limit-3 characters and appended with an elipsis. + + If nofollow is True, the URLs in link text will get a rel="nofollow" + attribute. + + If autoescape is True, the link text and URLs will get autoescaped. + """ + trim_url = lambda x, limit=trim_url_limit: limit is not None and (len(x) > limit and ('%s...' % x[:max(0, limit - 3)])) or x + safe_input = isinstance(text, SafeData) + words = word_split_re.split(force_text(text)) + for i, word in enumerate(words): + match = None + if '.' in word or '@' in word or ':' in word: + # Deal with punctuation. + lead, middle, trail = '', word, '' + for punctuation in TRAILING_PUNCTUATION: + if middle.endswith(punctuation): + middle = middle[:-len(punctuation)] + trail = punctuation + trail + for opening, closing in WRAPPING_PUNCTUATION: + if middle.startswith(opening): + middle = middle[len(opening):] + lead = lead + opening + # Keep parentheses at the end only if they're balanced. + if (middle.endswith(closing) + and middle.count(closing) == middle.count(opening) + 1): + middle = middle[:-len(closing)] + trail = closing + trail + + # Make URL we want to point to. + url = None + nofollow_attr = ' rel="nofollow"' if nofollow else '' + if simple_url_re.match(middle): + url = smart_urlquote(middle) + elif simple_url_2_re.match(middle): + url = smart_urlquote('http://%s' % middle) + elif not ':' in middle and simple_email_re.match(middle): + local, domain = middle.rsplit('@', 1) + try: + domain = domain.encode('idna').decode('ascii') + except UnicodeError: + continue + url = 'mailto:%s@%s' % (local, domain) + nofollow_attr = '' + + # Make link. + if url: + trimmed = trim_url(middle) + if autoescape and not safe_input: + lead, trail = escape(lead), escape(trail) + url, trimmed = escape(url), escape(trimmed) + middle = '%s' % (url, nofollow_attr, trimmed) + words[i] = mark_safe('%s%s%s' % (lead, middle, trail)) + else: + if safe_input: + words[i] = mark_safe(word) + elif autoescape: + words[i] = escape(word) + elif safe_input: + words[i] = mark_safe(word) + elif autoescape: + words[i] = escape(word) + return ''.join(words) + + +@register.filter +def break_long_headers(header): + """ + Breaks headers longer than 160 characters (~page length) + when possible (are comma separated) + """ + if len(header) > 160 and ',' in header: + header = mark_safe('
' + ',
'.join(header.split(','))) + return header diff --git a/awx/lib/site-packages/rest_framework/tests/__init__.py b/awx/lib/site-packages/rest_framework/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/rest_framework/tests/extras/__init__.py b/awx/lib/site-packages/rest_framework/tests/extras/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/rest_framework/tests/extras/bad_import.py b/awx/lib/site-packages/rest_framework/tests/extras/bad_import.py new file mode 100644 index 0000000000..68263d9474 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/extras/bad_import.py @@ -0,0 +1 @@ +raise ValueError diff --git a/awx/lib/site-packages/rest_framework/tests/models.py b/awx/lib/site-packages/rest_framework/tests/models.py new file mode 100644 index 0000000000..e2d4eacdc9 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/models.py @@ -0,0 +1,169 @@ +from __future__ import unicode_literals +from django.db import models +from django.utils.translation import ugettext_lazy as _ +from rest_framework import serializers + + +def foobar(): + return 'foobar' + + +class CustomField(models.CharField): + + def __init__(self, *args, **kwargs): + kwargs['max_length'] = 12 + super(CustomField, self).__init__(*args, **kwargs) + + +class RESTFrameworkModel(models.Model): + """ + Base for test models that sets app_label, so they play nicely. + """ + class Meta: + app_label = 'tests' + abstract = True + + +class HasPositiveIntegerAsChoice(RESTFrameworkModel): + some_choices = ((1, 'A'), (2, 'B'), (3, 'C')) + some_integer = models.PositiveIntegerField(choices=some_choices) + + +class Anchor(RESTFrameworkModel): + text = models.CharField(max_length=100, default='anchor') + + +class BasicModel(RESTFrameworkModel): + text = models.CharField(max_length=100, verbose_name=_("Text comes here"), help_text=_("Text description.")) + + +class SlugBasedModel(RESTFrameworkModel): + text = models.CharField(max_length=100) + slug = models.SlugField(max_length=32) + + +class DefaultValueModel(RESTFrameworkModel): + text = models.CharField(default='foobar', max_length=100) + extra = models.CharField(blank=True, null=True, max_length=100) + + +class CallableDefaultValueModel(RESTFrameworkModel): + text = models.CharField(default=foobar, max_length=100) + + +class ManyToManyModel(RESTFrameworkModel): + rel = models.ManyToManyField(Anchor) + + +class ReadOnlyManyToManyModel(RESTFrameworkModel): + text = models.CharField(max_length=100, default='anchor') + rel = models.ManyToManyField(Anchor) + + +# Model for regression test for #285 + +class Comment(RESTFrameworkModel): + email = models.EmailField() + content = models.CharField(max_length=200) + created = models.DateTimeField(auto_now_add=True) + + +class ActionItem(RESTFrameworkModel): + title = models.CharField(max_length=200) + done = models.BooleanField(default=False) + info = CustomField(default='---', max_length=12) + + +# Models for reverse relations +class Person(RESTFrameworkModel): + name = models.CharField(max_length=10) + age = models.IntegerField(null=True, blank=True) + + @property + def info(self): + return { + 'name': self.name, + 'age': self.age, + } + + +class BlogPost(RESTFrameworkModel): + title = models.CharField(max_length=100) + writer = models.ForeignKey(Person, null=True, blank=True) + + def get_first_comment(self): + return self.blogpostcomment_set.all()[0] + + +class BlogPostComment(RESTFrameworkModel): + text = models.TextField() + blog_post = models.ForeignKey(BlogPost) + + +class Album(RESTFrameworkModel): + title = models.CharField(max_length=100, unique=True) + + +class Photo(RESTFrameworkModel): + description = models.TextField() + album = models.ForeignKey(Album) + + +# Model for issue #324 +class BlankFieldModel(RESTFrameworkModel): + title = models.CharField(max_length=100, blank=True, null=False) + + +# Model for issue #380 +class OptionalRelationModel(RESTFrameworkModel): + other = models.ForeignKey('OptionalRelationModel', blank=True, null=True) + + +# Model for RegexField +class Book(RESTFrameworkModel): + isbn = models.CharField(max_length=13) + + +# Models for relations tests +# ManyToMany +class ManyToManyTarget(RESTFrameworkModel): + name = models.CharField(max_length=100) + + +class ManyToManySource(RESTFrameworkModel): + name = models.CharField(max_length=100) + targets = models.ManyToManyField(ManyToManyTarget, related_name='sources') + + +# ForeignKey +class ForeignKeyTarget(RESTFrameworkModel): + name = models.CharField(max_length=100) + + +class ForeignKeySource(RESTFrameworkModel): + name = models.CharField(max_length=100) + target = models.ForeignKey(ForeignKeyTarget, related_name='sources') + + +# Nullable ForeignKey +class NullableForeignKeySource(RESTFrameworkModel): + name = models.CharField(max_length=100) + target = models.ForeignKey(ForeignKeyTarget, null=True, blank=True, + related_name='nullable_sources') + + +# OneToOne +class OneToOneTarget(RESTFrameworkModel): + name = models.CharField(max_length=100) + + +class NullableOneToOneSource(RESTFrameworkModel): + name = models.CharField(max_length=100) + target = models.OneToOneField(OneToOneTarget, null=True, blank=True, + related_name='nullable_source') + + +# Serializer used to test BasicModel +class BasicModelSerializer(serializers.ModelSerializer): + class Meta: + model = BasicModel diff --git a/awx/lib/site-packages/rest_framework/tests/test_authentication.py b/awx/lib/site-packages/rest_framework/tests/test_authentication.py new file mode 100644 index 0000000000..d46ac07985 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_authentication.py @@ -0,0 +1,594 @@ +from __future__ import unicode_literals +from django.contrib.auth.models import User +from django.http import HttpResponse +from django.test import Client, TestCase +from django.utils import unittest +from rest_framework import HTTP_HEADER_ENCODING +from rest_framework import exceptions +from rest_framework import permissions +from rest_framework import renderers +from rest_framework.response import Response +from rest_framework import status +from rest_framework.authentication import ( + BaseAuthentication, + TokenAuthentication, + BasicAuthentication, + SessionAuthentication, + OAuthAuthentication, + OAuth2Authentication +) +from rest_framework.authtoken.models import Token +from rest_framework.compat import patterns, url, include +from rest_framework.compat import oauth2_provider, oauth2_provider_models, oauth2_provider_scope +from rest_framework.compat import oauth, oauth_provider +from rest_framework.tests.utils import RequestFactory +from rest_framework.views import APIView +import json +import base64 +import time +import datetime + +factory = RequestFactory() + + +class MockView(APIView): + permission_classes = (permissions.IsAuthenticated,) + + def get(self, request): + return HttpResponse({'a': 1, 'b': 2, 'c': 3}) + + def post(self, request): + return HttpResponse({'a': 1, 'b': 2, 'c': 3}) + + def put(self, request): + return HttpResponse({'a': 1, 'b': 2, 'c': 3}) + + +urlpatterns = patterns('', + (r'^session/$', MockView.as_view(authentication_classes=[SessionAuthentication])), + (r'^basic/$', MockView.as_view(authentication_classes=[BasicAuthentication])), + (r'^token/$', MockView.as_view(authentication_classes=[TokenAuthentication])), + (r'^auth-token/$', 'rest_framework.authtoken.views.obtain_auth_token'), + (r'^oauth/$', MockView.as_view(authentication_classes=[OAuthAuthentication])), + (r'^oauth-with-scope/$', MockView.as_view(authentication_classes=[OAuthAuthentication], + permission_classes=[permissions.TokenHasReadWriteScope])) +) + +if oauth2_provider is not None: + urlpatterns += patterns('', + url(r'^oauth2/', include('provider.oauth2.urls', namespace='oauth2')), + url(r'^oauth2-test/$', MockView.as_view(authentication_classes=[OAuth2Authentication])), + url(r'^oauth2-with-scope-test/$', MockView.as_view(authentication_classes=[OAuth2Authentication], + permission_classes=[permissions.TokenHasReadWriteScope])), + ) + + +class BasicAuthTests(TestCase): + """Basic authentication""" + urls = 'rest_framework.tests.test_authentication' + + def setUp(self): + self.csrf_client = Client(enforce_csrf_checks=True) + self.username = 'john' + self.email = 'lennon@thebeatles.com' + self.password = 'password' + self.user = User.objects.create_user(self.username, self.email, self.password) + + def test_post_form_passing_basic_auth(self): + """Ensure POSTing json over basic auth with correct credentials passes and does not require CSRF""" + credentials = ('%s:%s' % (self.username, self.password)) + base64_credentials = base64.b64encode(credentials.encode(HTTP_HEADER_ENCODING)).decode(HTTP_HEADER_ENCODING) + auth = 'Basic %s' % base64_credentials + response = self.csrf_client.post('/basic/', {'example': 'example'}, HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_post_json_passing_basic_auth(self): + """Ensure POSTing form over basic auth with correct credentials passes and does not require CSRF""" + credentials = ('%s:%s' % (self.username, self.password)) + base64_credentials = base64.b64encode(credentials.encode(HTTP_HEADER_ENCODING)).decode(HTTP_HEADER_ENCODING) + auth = 'Basic %s' % base64_credentials + response = self.csrf_client.post('/basic/', json.dumps({'example': 'example'}), 'application/json', HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_post_form_failing_basic_auth(self): + """Ensure POSTing form over basic auth without correct credentials fails""" + response = self.csrf_client.post('/basic/', {'example': 'example'}) + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) + + def test_post_json_failing_basic_auth(self): + """Ensure POSTing json over basic auth without correct credentials fails""" + response = self.csrf_client.post('/basic/', json.dumps({'example': 'example'}), 'application/json') + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) + self.assertEqual(response['WWW-Authenticate'], 'Basic realm="api"') + + +class SessionAuthTests(TestCase): + """User session authentication""" + urls = 'rest_framework.tests.test_authentication' + + def setUp(self): + self.csrf_client = Client(enforce_csrf_checks=True) + self.non_csrf_client = Client(enforce_csrf_checks=False) + self.username = 'john' + self.email = 'lennon@thebeatles.com' + self.password = 'password' + self.user = User.objects.create_user(self.username, self.email, self.password) + + def tearDown(self): + self.csrf_client.logout() + + def test_post_form_session_auth_failing_csrf(self): + """ + Ensure POSTing form over session authentication without CSRF token fails. + """ + self.csrf_client.login(username=self.username, password=self.password) + response = self.csrf_client.post('/session/', {'example': 'example'}) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + def test_post_form_session_auth_passing(self): + """ + Ensure POSTing form over session authentication with logged in user and CSRF token passes. + """ + self.non_csrf_client.login(username=self.username, password=self.password) + response = self.non_csrf_client.post('/session/', {'example': 'example'}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_put_form_session_auth_passing(self): + """ + Ensure PUTting form over session authentication with logged in user and CSRF token passes. + """ + self.non_csrf_client.login(username=self.username, password=self.password) + response = self.non_csrf_client.put('/session/', {'example': 'example'}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_post_form_session_auth_failing(self): + """ + Ensure POSTing form over session authentication without logged in user fails. + """ + response = self.csrf_client.post('/session/', {'example': 'example'}) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + +class TokenAuthTests(TestCase): + """Token authentication""" + urls = 'rest_framework.tests.test_authentication' + + def setUp(self): + self.csrf_client = Client(enforce_csrf_checks=True) + self.username = 'john' + self.email = 'lennon@thebeatles.com' + self.password = 'password' + self.user = User.objects.create_user(self.username, self.email, self.password) + + self.key = 'abcd1234' + self.token = Token.objects.create(key=self.key, user=self.user) + + def test_post_form_passing_token_auth(self): + """Ensure POSTing json over token auth with correct credentials passes and does not require CSRF""" + auth = 'Token ' + self.key + response = self.csrf_client.post('/token/', {'example': 'example'}, HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_post_json_passing_token_auth(self): + """Ensure POSTing form over token auth with correct credentials passes and does not require CSRF""" + auth = "Token " + self.key + response = self.csrf_client.post('/token/', json.dumps({'example': 'example'}), 'application/json', HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_post_form_failing_token_auth(self): + """Ensure POSTing form over token auth without correct credentials fails""" + response = self.csrf_client.post('/token/', {'example': 'example'}) + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) + + def test_post_json_failing_token_auth(self): + """Ensure POSTing json over token auth without correct credentials fails""" + response = self.csrf_client.post('/token/', json.dumps({'example': 'example'}), 'application/json') + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) + + def test_token_has_auto_assigned_key_if_none_provided(self): + """Ensure creating a token with no key will auto-assign a key""" + self.token.delete() + token = Token.objects.create(user=self.user) + self.assertTrue(bool(token.key)) + + def test_token_login_json(self): + """Ensure token login view using JSON POST works.""" + client = Client(enforce_csrf_checks=True) + response = client.post('/auth-token/', + json.dumps({'username': self.username, 'password': self.password}), 'application/json') + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(json.loads(response.content.decode('ascii'))['token'], self.key) + + def test_token_login_json_bad_creds(self): + """Ensure token login view using JSON POST fails if bad credentials are used.""" + client = Client(enforce_csrf_checks=True) + response = client.post('/auth-token/', + json.dumps({'username': self.username, 'password': "badpass"}), 'application/json') + self.assertEqual(response.status_code, 400) + + def test_token_login_json_missing_fields(self): + """Ensure token login view using JSON POST fails if missing fields.""" + client = Client(enforce_csrf_checks=True) + response = client.post('/auth-token/', + json.dumps({'username': self.username}), 'application/json') + self.assertEqual(response.status_code, 400) + + def test_token_login_form(self): + """Ensure token login view using form POST works.""" + client = Client(enforce_csrf_checks=True) + response = client.post('/auth-token/', + {'username': self.username, 'password': self.password}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(json.loads(response.content.decode('ascii'))['token'], self.key) + + +class IncorrectCredentialsTests(TestCase): + def test_incorrect_credentials(self): + """ + If a request contains bad authentication credentials, then + authentication should run and error, even if no permissions + are set on the view. + """ + class IncorrectCredentialsAuth(BaseAuthentication): + def authenticate(self, request): + raise exceptions.AuthenticationFailed('Bad credentials') + + request = factory.get('/') + view = MockView.as_view( + authentication_classes=(IncorrectCredentialsAuth,), + permission_classes=() + ) + response = view(request) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + self.assertEqual(response.data, {'detail': 'Bad credentials'}) + + +class OAuthTests(TestCase): + """OAuth 1.0a authentication""" + urls = 'rest_framework.tests.test_authentication' + + def setUp(self): + # these imports are here because oauth is optional and hiding them in try..except block or compat + # could obscure problems if something breaks + from oauth_provider.models import Consumer, Resource + from oauth_provider.models import Token as OAuthToken + from oauth_provider import consts + + self.consts = consts + + self.csrf_client = Client(enforce_csrf_checks=True) + self.username = 'john' + self.email = 'lennon@thebeatles.com' + self.password = 'password' + self.user = User.objects.create_user(self.username, self.email, self.password) + + self.CONSUMER_KEY = 'consumer_key' + self.CONSUMER_SECRET = 'consumer_secret' + self.TOKEN_KEY = "token_key" + self.TOKEN_SECRET = "token_secret" + + self.consumer = Consumer.objects.create(key=self.CONSUMER_KEY, secret=self.CONSUMER_SECRET, + name='example', user=self.user, status=self.consts.ACCEPTED) + + self.resource = Resource.objects.create(name="resource name", url="api/") + self.token = OAuthToken.objects.create(user=self.user, consumer=self.consumer, resource=self.resource, + token_type=OAuthToken.ACCESS, key=self.TOKEN_KEY, secret=self.TOKEN_SECRET, is_approved=True + ) + + def _create_authorization_header(self): + params = { + 'oauth_version': "1.0", + 'oauth_nonce': oauth.generate_nonce(), + 'oauth_timestamp': int(time.time()), + 'oauth_token': self.token.key, + 'oauth_consumer_key': self.consumer.key + } + + req = oauth.Request(method="GET", url="http://example.com", parameters=params) + + signature_method = oauth.SignatureMethod_PLAINTEXT() + req.sign_request(signature_method, self.consumer, self.token) + + return req.to_header()["Authorization"] + + def _create_authorization_url_parameters(self): + params = { + 'oauth_version': "1.0", + 'oauth_nonce': oauth.generate_nonce(), + 'oauth_timestamp': int(time.time()), + 'oauth_token': self.token.key, + 'oauth_consumer_key': self.consumer.key + } + + req = oauth.Request(method="GET", url="http://example.com", parameters=params) + + signature_method = oauth.SignatureMethod_PLAINTEXT() + req.sign_request(signature_method, self.consumer, self.token) + return dict(req) + + @unittest.skipUnless(oauth_provider, 'django-oauth-plus not installed') + @unittest.skipUnless(oauth, 'oauth2 not installed') + def test_post_form_passing_oauth(self): + """Ensure POSTing form over OAuth with correct credentials passes and does not require CSRF""" + auth = self._create_authorization_header() + response = self.csrf_client.post('/oauth/', {'example': 'example'}, HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, 200) + + @unittest.skipUnless(oauth_provider, 'django-oauth-plus not installed') + @unittest.skipUnless(oauth, 'oauth2 not installed') + def test_post_form_repeated_nonce_failing_oauth(self): + """Ensure POSTing form over OAuth with repeated auth (same nonces and timestamp) credentials fails""" + auth = self._create_authorization_header() + response = self.csrf_client.post('/oauth/', {'example': 'example'}, HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, 200) + + # simulate reply attack auth header containes already used (nonce, timestamp) pair + response = self.csrf_client.post('/oauth/', {'example': 'example'}, HTTP_AUTHORIZATION=auth) + self.assertIn(response.status_code, (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)) + + @unittest.skipUnless(oauth_provider, 'django-oauth-plus not installed') + @unittest.skipUnless(oauth, 'oauth2 not installed') + def test_post_form_token_removed_failing_oauth(self): + """Ensure POSTing when there is no OAuth access token in db fails""" + self.token.delete() + auth = self._create_authorization_header() + response = self.csrf_client.post('/oauth/', {'example': 'example'}, HTTP_AUTHORIZATION=auth) + self.assertIn(response.status_code, (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)) + + @unittest.skipUnless(oauth_provider, 'django-oauth-plus not installed') + @unittest.skipUnless(oauth, 'oauth2 not installed') + def test_post_form_consumer_status_not_accepted_failing_oauth(self): + """Ensure POSTing when consumer status is anything other than ACCEPTED fails""" + for consumer_status in (self.consts.CANCELED, self.consts.PENDING, self.consts.REJECTED): + self.consumer.status = consumer_status + self.consumer.save() + + auth = self._create_authorization_header() + response = self.csrf_client.post('/oauth/', {'example': 'example'}, HTTP_AUTHORIZATION=auth) + self.assertIn(response.status_code, (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)) + + @unittest.skipUnless(oauth_provider, 'django-oauth-plus not installed') + @unittest.skipUnless(oauth, 'oauth2 not installed') + def test_post_form_with_request_token_failing_oauth(self): + """Ensure POSTing with unauthorized request token instead of access token fails""" + self.token.token_type = self.token.REQUEST + self.token.save() + + auth = self._create_authorization_header() + response = self.csrf_client.post('/oauth/', {'example': 'example'}, HTTP_AUTHORIZATION=auth) + self.assertIn(response.status_code, (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)) + + @unittest.skipUnless(oauth_provider, 'django-oauth-plus not installed') + @unittest.skipUnless(oauth, 'oauth2 not installed') + def test_post_form_with_urlencoded_parameters(self): + """Ensure POSTing with x-www-form-urlencoded auth parameters passes""" + params = self._create_authorization_url_parameters() + response = self.csrf_client.post('/oauth/', params) + self.assertEqual(response.status_code, 200) + + @unittest.skipUnless(oauth_provider, 'django-oauth-plus not installed') + @unittest.skipUnless(oauth, 'oauth2 not installed') + def test_get_form_with_url_parameters(self): + """Ensure GETing with auth in url parameters passes""" + params = self._create_authorization_url_parameters() + response = self.csrf_client.get('/oauth/', params) + self.assertEqual(response.status_code, 200) + + @unittest.skipUnless(oauth_provider, 'django-oauth-plus not installed') + @unittest.skipUnless(oauth, 'oauth2 not installed') + def test_post_hmac_sha1_signature_passes(self): + """Ensure POSTing using HMAC_SHA1 signature method passes""" + params = { + 'oauth_version': "1.0", + 'oauth_nonce': oauth.generate_nonce(), + 'oauth_timestamp': int(time.time()), + 'oauth_token': self.token.key, + 'oauth_consumer_key': self.consumer.key + } + + req = oauth.Request(method="POST", url="http://testserver/oauth/", parameters=params) + + signature_method = oauth.SignatureMethod_HMAC_SHA1() + req.sign_request(signature_method, self.consumer, self.token) + auth = req.to_header()["Authorization"] + + response = self.csrf_client.post('/oauth/', HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, 200) + + @unittest.skipUnless(oauth_provider, 'django-oauth-plus not installed') + @unittest.skipUnless(oauth, 'oauth2 not installed') + def test_get_form_with_readonly_resource_passing_auth(self): + """Ensure POSTing with a readonly resource instead of a write scope fails""" + read_only_access_token = self.token + read_only_access_token.resource.is_readonly = True + read_only_access_token.resource.save() + params = self._create_authorization_url_parameters() + response = self.csrf_client.get('/oauth-with-scope/', params) + self.assertEqual(response.status_code, 200) + + @unittest.skipUnless(oauth_provider, 'django-oauth-plus not installed') + @unittest.skipUnless(oauth, 'oauth2 not installed') + def test_post_form_with_readonly_resource_failing_auth(self): + """Ensure POSTing with a readonly resource instead of a write scope fails""" + read_only_access_token = self.token + read_only_access_token.resource.is_readonly = True + read_only_access_token.resource.save() + params = self._create_authorization_url_parameters() + response = self.csrf_client.post('/oauth-with-scope/', params) + self.assertIn(response.status_code, (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)) + + @unittest.skipUnless(oauth_provider, 'django-oauth-plus not installed') + @unittest.skipUnless(oauth, 'oauth2 not installed') + def test_post_form_with_write_resource_passing_auth(self): + """Ensure POSTing with a write resource succeed""" + read_write_access_token = self.token + read_write_access_token.resource.is_readonly = False + read_write_access_token.resource.save() + params = self._create_authorization_url_parameters() + response = self.csrf_client.post('/oauth-with-scope/', params) + self.assertEqual(response.status_code, 200) + + +class OAuth2Tests(TestCase): + """OAuth 2.0 authentication""" + urls = 'rest_framework.tests.test_authentication' + + def setUp(self): + self.csrf_client = Client(enforce_csrf_checks=True) + self.username = 'john' + self.email = 'lennon@thebeatles.com' + self.password = 'password' + self.user = User.objects.create_user(self.username, self.email, self.password) + + self.CLIENT_ID = 'client_key' + self.CLIENT_SECRET = 'client_secret' + self.ACCESS_TOKEN = "access_token" + self.REFRESH_TOKEN = "refresh_token" + + self.oauth2_client = oauth2_provider_models.Client.objects.create( + client_id=self.CLIENT_ID, + client_secret=self.CLIENT_SECRET, + redirect_uri='', + client_type=0, + name='example', + user=None, + ) + + self.access_token = oauth2_provider_models.AccessToken.objects.create( + token=self.ACCESS_TOKEN, + client=self.oauth2_client, + user=self.user, + ) + self.refresh_token = oauth2_provider_models.RefreshToken.objects.create( + user=self.user, + access_token=self.access_token, + client=self.oauth2_client + ) + + def _create_authorization_header(self, token=None): + return "Bearer {0}".format(token or self.access_token.token) + + @unittest.skipUnless(oauth2_provider, 'django-oauth2-provider not installed') + def test_get_form_with_wrong_authorization_header_token_type_failing(self): + """Ensure that a wrong token type lead to the correct HTTP error status code""" + auth = "Wrong token-type-obsviously" + response = self.csrf_client.get('/oauth2-test/', {}, HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, 401) + response = self.csrf_client.get('/oauth2-test/', HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, 401) + + @unittest.skipUnless(oauth2_provider, 'django-oauth2-provider not installed') + def test_get_form_with_wrong_authorization_header_token_format_failing(self): + """Ensure that a wrong token format lead to the correct HTTP error status code""" + auth = "Bearer wrong token format" + response = self.csrf_client.get('/oauth2-test/', {}, HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, 401) + response = self.csrf_client.get('/oauth2-test/', HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, 401) + + @unittest.skipUnless(oauth2_provider, 'django-oauth2-provider not installed') + def test_get_form_with_wrong_authorization_header_token_failing(self): + """Ensure that a wrong token lead to the correct HTTP error status code""" + auth = "Bearer wrong-token" + response = self.csrf_client.get('/oauth2-test/', {}, HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, 401) + response = self.csrf_client.get('/oauth2-test/', HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, 401) + + @unittest.skipUnless(oauth2_provider, 'django-oauth2-provider not installed') + def test_get_form_passing_auth(self): + """Ensure GETing form over OAuth with correct client credentials succeed""" + auth = self._create_authorization_header() + response = self.csrf_client.get('/oauth2-test/', HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, 200) + + @unittest.skipUnless(oauth2_provider, 'django-oauth2-provider not installed') + def test_post_form_passing_auth(self): + """Ensure POSTing form over OAuth with correct credentials passes and does not require CSRF""" + auth = self._create_authorization_header() + response = self.csrf_client.post('/oauth2-test/', HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, 200) + + @unittest.skipUnless(oauth2_provider, 'django-oauth2-provider not installed') + def test_post_form_token_removed_failing_auth(self): + """Ensure POSTing when there is no OAuth access token in db fails""" + self.access_token.delete() + auth = self._create_authorization_header() + response = self.csrf_client.post('/oauth2-test/', HTTP_AUTHORIZATION=auth) + self.assertIn(response.status_code, (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)) + + @unittest.skipUnless(oauth2_provider, 'django-oauth2-provider not installed') + def test_post_form_with_refresh_token_failing_auth(self): + """Ensure POSTing with refresh token instead of access token fails""" + auth = self._create_authorization_header(token=self.refresh_token.token) + response = self.csrf_client.post('/oauth2-test/', HTTP_AUTHORIZATION=auth) + self.assertIn(response.status_code, (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)) + + @unittest.skipUnless(oauth2_provider, 'django-oauth2-provider not installed') + def test_post_form_with_expired_access_token_failing_auth(self): + """Ensure POSTing with expired access token fails with an 'Invalid token' error""" + self.access_token.expires = datetime.datetime.now() - datetime.timedelta(seconds=10) # 10 seconds late + self.access_token.save() + auth = self._create_authorization_header() + response = self.csrf_client.post('/oauth2-test/', HTTP_AUTHORIZATION=auth) + self.assertIn(response.status_code, (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)) + self.assertIn('Invalid token', response.content) + + @unittest.skipUnless(oauth2_provider, 'django-oauth2-provider not installed') + def test_post_form_with_invalid_scope_failing_auth(self): + """Ensure POSTing with a readonly scope instead of a write scope fails""" + read_only_access_token = self.access_token + read_only_access_token.scope = oauth2_provider_scope.SCOPE_NAME_DICT['read'] + read_only_access_token.save() + auth = self._create_authorization_header(token=read_only_access_token.token) + response = self.csrf_client.get('/oauth2-with-scope-test/', HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, 200) + response = self.csrf_client.post('/oauth2-with-scope-test/', HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + @unittest.skipUnless(oauth2_provider, 'django-oauth2-provider not installed') + def test_post_form_with_valid_scope_passing_auth(self): + """Ensure POSTing with a write scope succeed""" + read_write_access_token = self.access_token + read_write_access_token.scope = oauth2_provider_scope.SCOPE_NAME_DICT['write'] + read_write_access_token.save() + auth = self._create_authorization_header(token=read_write_access_token.token) + response = self.csrf_client.post('/oauth2-with-scope-test/', HTTP_AUTHORIZATION=auth) + self.assertEqual(response.status_code, 200) + + +class FailingAuthAccessedInRenderer(TestCase): + def setUp(self): + class AuthAccessingRenderer(renderers.BaseRenderer): + media_type = 'text/plain' + format = 'txt' + + def render(self, data, media_type=None, renderer_context=None): + request = renderer_context['request'] + if request.user.is_authenticated(): + return b'authenticated' + return b'not authenticated' + + class FailingAuth(BaseAuthentication): + def authenticate(self, request): + raise exceptions.AuthenticationFailed('authentication failed') + + class ExampleView(APIView): + authentication_classes = (FailingAuth,) + renderer_classes = (AuthAccessingRenderer,) + + def get(self, request): + return Response({'foo': 'bar'}) + + self.view = ExampleView.as_view() + + def test_failing_auth_accessed_in_renderer(self): + """ + When authentication fails the renderer should still be able to access + `request.user` without raising an exception. Particularly relevant + to HTML responses that might reasonably access `request.user`. + """ + request = factory.get('/') + response = self.view(request) + content = response.render().content + self.assertEqual(content, b'not authenticated') diff --git a/awx/lib/site-packages/rest_framework/tests/test_breadcrumbs.py b/awx/lib/site-packages/rest_framework/tests/test_breadcrumbs.py new file mode 100644 index 0000000000..41ddf2ceac --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_breadcrumbs.py @@ -0,0 +1,73 @@ +from __future__ import unicode_literals +from django.test import TestCase +from rest_framework.compat import patterns, url +from rest_framework.utils.breadcrumbs import get_breadcrumbs +from rest_framework.views import APIView + + +class Root(APIView): + pass + + +class ResourceRoot(APIView): + pass + + +class ResourceInstance(APIView): + pass + + +class NestedResourceRoot(APIView): + pass + + +class NestedResourceInstance(APIView): + pass + +urlpatterns = patterns('', + url(r'^$', Root.as_view()), + url(r'^resource/$', ResourceRoot.as_view()), + url(r'^resource/(?P[0-9]+)$', ResourceInstance.as_view()), + url(r'^resource/(?P[0-9]+)/$', NestedResourceRoot.as_view()), + url(r'^resource/(?P[0-9]+)/(?P[A-Za-z]+)$', NestedResourceInstance.as_view()), +) + + +class BreadcrumbTests(TestCase): + """Tests the breadcrumb functionality used by the HTML renderer.""" + + urls = 'rest_framework.tests.test_breadcrumbs' + + def test_root_breadcrumbs(self): + url = '/' + self.assertEqual(get_breadcrumbs(url), [('Root', '/')]) + + def test_resource_root_breadcrumbs(self): + url = '/resource/' + self.assertEqual(get_breadcrumbs(url), [('Root', '/'), + ('Resource Root', '/resource/')]) + + def test_resource_instance_breadcrumbs(self): + url = '/resource/123' + self.assertEqual(get_breadcrumbs(url), [('Root', '/'), + ('Resource Root', '/resource/'), + ('Resource Instance', '/resource/123')]) + + def test_nested_resource_breadcrumbs(self): + url = '/resource/123/' + self.assertEqual(get_breadcrumbs(url), [('Root', '/'), + ('Resource Root', '/resource/'), + ('Resource Instance', '/resource/123'), + ('Nested Resource Root', '/resource/123/')]) + + def test_nested_resource_instance_breadcrumbs(self): + url = '/resource/123/abc' + self.assertEqual(get_breadcrumbs(url), [('Root', '/'), + ('Resource Root', '/resource/'), + ('Resource Instance', '/resource/123'), + ('Nested Resource Root', '/resource/123/'), + ('Nested Resource Instance', '/resource/123/abc')]) + + def test_broken_url_breadcrumbs_handled_gracefully(self): + url = '/foobar' + self.assertEqual(get_breadcrumbs(url), [('Root', '/')]) diff --git a/awx/lib/site-packages/rest_framework/tests/test_decorators.py b/awx/lib/site-packages/rest_framework/tests/test_decorators.py new file mode 100644 index 0000000000..1016fed3ff --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_decorators.py @@ -0,0 +1,158 @@ +from __future__ import unicode_literals +from django.test import TestCase +from rest_framework import status +from rest_framework.response import Response +from rest_framework.renderers import JSONRenderer +from rest_framework.parsers import JSONParser +from rest_framework.authentication import BasicAuthentication +from rest_framework.throttling import UserRateThrottle +from rest_framework.permissions import IsAuthenticated +from rest_framework.views import APIView +from rest_framework.decorators import ( + api_view, + renderer_classes, + parser_classes, + authentication_classes, + throttle_classes, + permission_classes, +) + +from rest_framework.tests.utils import RequestFactory + + +class DecoratorTestCase(TestCase): + + def setUp(self): + self.factory = RequestFactory() + + def _finalize_response(self, request, response, *args, **kwargs): + response.request = request + return APIView.finalize_response(self, request, response, *args, **kwargs) + + def test_api_view_incorrect(self): + """ + If @api_view is not applied correct, we should raise an assertion. + """ + + @api_view + def view(request): + return Response() + + request = self.factory.get('/') + self.assertRaises(AssertionError, view, request) + + def test_api_view_incorrect_arguments(self): + """ + If @api_view is missing arguments, we should raise an assertion. + """ + + with self.assertRaises(AssertionError): + @api_view('GET') + def view(request): + return Response() + + def test_calling_method(self): + + @api_view(['GET']) + def view(request): + return Response({}) + + request = self.factory.get('/') + response = view(request) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + request = self.factory.post('/') + response = view(request) + self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) + + def test_calling_put_method(self): + + @api_view(['GET', 'PUT']) + def view(request): + return Response({}) + + request = self.factory.put('/') + response = view(request) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + request = self.factory.post('/') + response = view(request) + self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) + + def test_calling_patch_method(self): + + @api_view(['GET', 'PATCH']) + def view(request): + return Response({}) + + request = self.factory.patch('/') + response = view(request) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + request = self.factory.post('/') + response = view(request) + self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) + + def test_renderer_classes(self): + + @api_view(['GET']) + @renderer_classes([JSONRenderer]) + def view(request): + return Response({}) + + request = self.factory.get('/') + response = view(request) + self.assertTrue(isinstance(response.accepted_renderer, JSONRenderer)) + + def test_parser_classes(self): + + @api_view(['GET']) + @parser_classes([JSONParser]) + def view(request): + self.assertEqual(len(request.parsers), 1) + self.assertTrue(isinstance(request.parsers[0], + JSONParser)) + return Response({}) + + request = self.factory.get('/') + view(request) + + def test_authentication_classes(self): + + @api_view(['GET']) + @authentication_classes([BasicAuthentication]) + def view(request): + self.assertEqual(len(request.authenticators), 1) + self.assertTrue(isinstance(request.authenticators[0], + BasicAuthentication)) + return Response({}) + + request = self.factory.get('/') + view(request) + + def test_permission_classes(self): + + @api_view(['GET']) + @permission_classes([IsAuthenticated]) + def view(request): + return Response({}) + + request = self.factory.get('/') + response = view(request) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + def test_throttle_classes(self): + class OncePerDayUserThrottle(UserRateThrottle): + rate = '1/day' + + @api_view(['GET']) + @throttle_classes([OncePerDayUserThrottle]) + def view(request): + return Response({}) + + request = self.factory.get('/') + response = view(request) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response = view(request) + self.assertEqual(response.status_code, status.HTTP_429_TOO_MANY_REQUESTS) diff --git a/awx/lib/site-packages/rest_framework/tests/test_description.py b/awx/lib/site-packages/rest_framework/tests/test_description.py new file mode 100644 index 0000000000..52c1a34c10 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_description.py @@ -0,0 +1,108 @@ +# -- coding: utf-8 -- + +from __future__ import unicode_literals +from django.test import TestCase +from rest_framework.views import APIView +from rest_framework.compat import apply_markdown +from rest_framework.utils.formatting import get_view_name, get_view_description + +# We check that docstrings get nicely un-indented. +DESCRIPTION = """an example docstring +==================== + +* list +* list + +another header +-------------- + + code block + +indented + +# hash style header #""" + +# If markdown is installed we also test it's working +# (and that our wrapped forces '=' to h2 and '-' to h3) + +# We support markdown < 2.1 and markdown >= 2.1 +MARKED_DOWN_lt_21 = """

an example docstring

+
    +
  • list
  • +
  • list
  • +
+

another header

+
code block
+
+

indented

+

hash style header

""" + +MARKED_DOWN_gte_21 = """

an example docstring

+
    +
  • list
  • +
  • list
  • +
+

another header

+
code block
+
+

indented

+

hash style header

""" + + +class TestViewNamesAndDescriptions(TestCase): + def test_view_name_uses_class_name(self): + """ + Ensure view names are based on the class name. + """ + class MockView(APIView): + pass + self.assertEqual(get_view_name(MockView), 'Mock') + + def test_view_description_uses_docstring(self): + """Ensure view descriptions are based on the docstring.""" + class MockView(APIView): + """an example docstring + ==================== + + * list + * list + + another header + -------------- + + code block + + indented + + # hash style header #""" + + self.assertEqual(get_view_description(MockView), DESCRIPTION) + + def test_view_description_supports_unicode(self): + """ + Unicode in docstrings should be respected. + """ + + class MockView(APIView): + """Проверка""" + pass + + self.assertEqual(get_view_description(MockView), "Проверка") + + def test_view_description_can_be_empty(self): + """ + Ensure that if a view has no docstring, + then it's description is the empty string. + """ + class MockView(APIView): + pass + self.assertEqual(get_view_description(MockView), '') + + def test_markdown(self): + """ + Ensure markdown to HTML works as expected. + """ + if apply_markdown: + gte_21_match = apply_markdown(DESCRIPTION) == MARKED_DOWN_gte_21 + lt_21_match = apply_markdown(DESCRIPTION) == MARKED_DOWN_lt_21 + self.assertTrue(gte_21_match or lt_21_match) diff --git a/awx/lib/site-packages/rest_framework/tests/test_fields.py b/awx/lib/site-packages/rest_framework/tests/test_fields.py new file mode 100644 index 0000000000..de3710011c --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_fields.py @@ -0,0 +1,868 @@ +""" +General serializer field tests. +""" +from __future__ import unicode_literals + +import datetime +from decimal import Decimal +from uuid import uuid4 +from django.core import validators +from django.db import models +from django.test import TestCase +from django.utils.datastructures import SortedDict +from rest_framework import serializers +from rest_framework.tests.models import RESTFrameworkModel + + +class TimestampedModel(models.Model): + added = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + +class CharPrimaryKeyModel(models.Model): + id = models.CharField(max_length=20, primary_key=True) + + +class TimestampedModelSerializer(serializers.ModelSerializer): + class Meta: + model = TimestampedModel + + +class CharPrimaryKeyModelSerializer(serializers.ModelSerializer): + class Meta: + model = CharPrimaryKeyModel + + +class TimeFieldModel(models.Model): + clock = models.TimeField() + + +class TimeFieldModelSerializer(serializers.ModelSerializer): + class Meta: + model = TimeFieldModel + + +class BasicFieldTests(TestCase): + def test_auto_now_fields_read_only(self): + """ + auto_now and auto_now_add fields should be read_only by default. + """ + serializer = TimestampedModelSerializer() + self.assertEqual(serializer.fields['added'].read_only, True) + + def test_auto_pk_fields_read_only(self): + """ + AutoField fields should be read_only by default. + """ + serializer = TimestampedModelSerializer() + self.assertEqual(serializer.fields['id'].read_only, True) + + def test_non_auto_pk_fields_not_read_only(self): + """ + PK fields other than AutoField fields should not be read_only by default. + """ + serializer = CharPrimaryKeyModelSerializer() + self.assertEqual(serializer.fields['id'].read_only, False) + + def test_dict_field_ordering(self): + """ + Field should preserve dictionary ordering, if it exists. + See: https://github.com/tomchristie/django-rest-framework/issues/832 + """ + ret = SortedDict() + ret['c'] = 1 + ret['b'] = 1 + ret['a'] = 1 + ret['z'] = 1 + field = serializers.Field() + keys = list(field.to_native(ret).keys()) + self.assertEqual(keys, ['c', 'b', 'a', 'z']) + + +class DateFieldTest(TestCase): + """ + Tests for the DateFieldTest from_native() and to_native() behavior + """ + + def test_from_native_string(self): + """ + Make sure from_native() accepts default iso input formats. + """ + f = serializers.DateField() + result_1 = f.from_native('1984-07-31') + + self.assertEqual(datetime.date(1984, 7, 31), result_1) + + def test_from_native_datetime_date(self): + """ + Make sure from_native() accepts a datetime.date instance. + """ + f = serializers.DateField() + result_1 = f.from_native(datetime.date(1984, 7, 31)) + + self.assertEqual(result_1, datetime.date(1984, 7, 31)) + + def test_from_native_custom_format(self): + """ + Make sure from_native() accepts custom input formats. + """ + f = serializers.DateField(input_formats=['%Y -- %d']) + result = f.from_native('1984 -- 31') + + self.assertEqual(datetime.date(1984, 1, 31), result) + + def test_from_native_invalid_default_on_custom_format(self): + """ + Make sure from_native() don't accept default formats if custom format is preset + """ + f = serializers.DateField(input_formats=['%Y -- %d']) + + try: + f.from_native('1984-07-31') + except validators.ValidationError as e: + self.assertEqual(e.messages, ["Date has wrong format. Use one of these formats instead: YYYY -- DD"]) + else: + self.fail("ValidationError was not properly raised") + + def test_from_native_empty(self): + """ + Make sure from_native() returns None on empty param. + """ + f = serializers.DateField() + result = f.from_native('') + + self.assertEqual(result, None) + + def test_from_native_none(self): + """ + Make sure from_native() returns None on None param. + """ + f = serializers.DateField() + result = f.from_native(None) + + self.assertEqual(result, None) + + def test_from_native_invalid_date(self): + """ + Make sure from_native() raises a ValidationError on passing an invalid date. + """ + f = serializers.DateField() + + try: + f.from_native('1984-13-31') + except validators.ValidationError as e: + self.assertEqual(e.messages, ["Date has wrong format. Use one of these formats instead: YYYY[-MM[-DD]]"]) + else: + self.fail("ValidationError was not properly raised") + + def test_from_native_invalid_format(self): + """ + Make sure from_native() raises a ValidationError on passing an invalid format. + """ + f = serializers.DateField() + + try: + f.from_native('1984 -- 31') + except validators.ValidationError as e: + self.assertEqual(e.messages, ["Date has wrong format. Use one of these formats instead: YYYY[-MM[-DD]]"]) + else: + self.fail("ValidationError was not properly raised") + + def test_to_native(self): + """ + Make sure to_native() returns datetime as default. + """ + f = serializers.DateField() + + result_1 = f.to_native(datetime.date(1984, 7, 31)) + + self.assertEqual(datetime.date(1984, 7, 31), result_1) + + def test_to_native_iso(self): + """ + Make sure to_native() with 'iso-8601' returns iso formated date. + """ + f = serializers.DateField(format='iso-8601') + + result_1 = f.to_native(datetime.date(1984, 7, 31)) + + self.assertEqual('1984-07-31', result_1) + + def test_to_native_custom_format(self): + """ + Make sure to_native() returns correct custom format. + """ + f = serializers.DateField(format="%Y - %m.%d") + + result_1 = f.to_native(datetime.date(1984, 7, 31)) + + self.assertEqual('1984 - 07.31', result_1) + + def test_to_native_none(self): + """ + Make sure from_native() returns None on None param. + """ + f = serializers.DateField(required=False) + self.assertEqual(None, f.to_native(None)) + + +class DateTimeFieldTest(TestCase): + """ + Tests for the DateTimeField from_native() and to_native() behavior + """ + + def test_from_native_string(self): + """ + Make sure from_native() accepts default iso input formats. + """ + f = serializers.DateTimeField() + result_1 = f.from_native('1984-07-31 04:31') + result_2 = f.from_native('1984-07-31 04:31:59') + result_3 = f.from_native('1984-07-31 04:31:59.000200') + + self.assertEqual(datetime.datetime(1984, 7, 31, 4, 31), result_1) + self.assertEqual(datetime.datetime(1984, 7, 31, 4, 31, 59), result_2) + self.assertEqual(datetime.datetime(1984, 7, 31, 4, 31, 59, 200), result_3) + + def test_from_native_datetime_datetime(self): + """ + Make sure from_native() accepts a datetime.datetime instance. + """ + f = serializers.DateTimeField() + result_1 = f.from_native(datetime.datetime(1984, 7, 31, 4, 31)) + result_2 = f.from_native(datetime.datetime(1984, 7, 31, 4, 31, 59)) + result_3 = f.from_native(datetime.datetime(1984, 7, 31, 4, 31, 59, 200)) + + self.assertEqual(result_1, datetime.datetime(1984, 7, 31, 4, 31)) + self.assertEqual(result_2, datetime.datetime(1984, 7, 31, 4, 31, 59)) + self.assertEqual(result_3, datetime.datetime(1984, 7, 31, 4, 31, 59, 200)) + + def test_from_native_custom_format(self): + """ + Make sure from_native() accepts custom input formats. + """ + f = serializers.DateTimeField(input_formats=['%Y -- %H:%M']) + result = f.from_native('1984 -- 04:59') + + self.assertEqual(datetime.datetime(1984, 1, 1, 4, 59), result) + + def test_from_native_invalid_default_on_custom_format(self): + """ + Make sure from_native() don't accept default formats if custom format is preset + """ + f = serializers.DateTimeField(input_formats=['%Y -- %H:%M']) + + try: + f.from_native('1984-07-31 04:31:59') + except validators.ValidationError as e: + self.assertEqual(e.messages, ["Datetime has wrong format. Use one of these formats instead: YYYY -- hh:mm"]) + else: + self.fail("ValidationError was not properly raised") + + def test_from_native_empty(self): + """ + Make sure from_native() returns None on empty param. + """ + f = serializers.DateTimeField() + result = f.from_native('') + + self.assertEqual(result, None) + + def test_from_native_none(self): + """ + Make sure from_native() returns None on None param. + """ + f = serializers.DateTimeField() + result = f.from_native(None) + + self.assertEqual(result, None) + + def test_from_native_invalid_datetime(self): + """ + Make sure from_native() raises a ValidationError on passing an invalid datetime. + """ + f = serializers.DateTimeField() + + try: + f.from_native('04:61:59') + except validators.ValidationError as e: + self.assertEqual(e.messages, ["Datetime has wrong format. Use one of these formats instead: " + "YYYY-MM-DDThh:mm[:ss[.uuuuuu]][+HHMM|-HHMM|Z]"]) + else: + self.fail("ValidationError was not properly raised") + + def test_from_native_invalid_format(self): + """ + Make sure from_native() raises a ValidationError on passing an invalid format. + """ + f = serializers.DateTimeField() + + try: + f.from_native('04 -- 31') + except validators.ValidationError as e: + self.assertEqual(e.messages, ["Datetime has wrong format. Use one of these formats instead: " + "YYYY-MM-DDThh:mm[:ss[.uuuuuu]][+HHMM|-HHMM|Z]"]) + else: + self.fail("ValidationError was not properly raised") + + def test_to_native(self): + """ + Make sure to_native() returns isoformat as default. + """ + f = serializers.DateTimeField() + + result_1 = f.to_native(datetime.datetime(1984, 7, 31)) + result_2 = f.to_native(datetime.datetime(1984, 7, 31, 4, 31)) + result_3 = f.to_native(datetime.datetime(1984, 7, 31, 4, 31, 59)) + result_4 = f.to_native(datetime.datetime(1984, 7, 31, 4, 31, 59, 200)) + + self.assertEqual(datetime.datetime(1984, 7, 31), result_1) + self.assertEqual(datetime.datetime(1984, 7, 31, 4, 31), result_2) + self.assertEqual(datetime.datetime(1984, 7, 31, 4, 31, 59), result_3) + self.assertEqual(datetime.datetime(1984, 7, 31, 4, 31, 59, 200), result_4) + + def test_to_native_iso(self): + """ + Make sure to_native() with format=iso-8601 returns iso formatted datetime. + """ + f = serializers.DateTimeField(format='iso-8601') + + result_1 = f.to_native(datetime.datetime(1984, 7, 31)) + result_2 = f.to_native(datetime.datetime(1984, 7, 31, 4, 31)) + result_3 = f.to_native(datetime.datetime(1984, 7, 31, 4, 31, 59)) + result_4 = f.to_native(datetime.datetime(1984, 7, 31, 4, 31, 59, 200)) + + self.assertEqual('1984-07-31T00:00:00', result_1) + self.assertEqual('1984-07-31T04:31:00', result_2) + self.assertEqual('1984-07-31T04:31:59', result_3) + self.assertEqual('1984-07-31T04:31:59.000200', result_4) + + def test_to_native_custom_format(self): + """ + Make sure to_native() returns correct custom format. + """ + f = serializers.DateTimeField(format="%Y - %H:%M") + + result_1 = f.to_native(datetime.datetime(1984, 7, 31)) + result_2 = f.to_native(datetime.datetime(1984, 7, 31, 4, 31)) + result_3 = f.to_native(datetime.datetime(1984, 7, 31, 4, 31, 59)) + result_4 = f.to_native(datetime.datetime(1984, 7, 31, 4, 31, 59, 200)) + + self.assertEqual('1984 - 00:00', result_1) + self.assertEqual('1984 - 04:31', result_2) + self.assertEqual('1984 - 04:31', result_3) + self.assertEqual('1984 - 04:31', result_4) + + def test_to_native_none(self): + """ + Make sure from_native() returns None on None param. + """ + f = serializers.DateTimeField(required=False) + self.assertEqual(None, f.to_native(None)) + + +class TimeFieldTest(TestCase): + """ + Tests for the TimeField from_native() and to_native() behavior + """ + + def test_from_native_string(self): + """ + Make sure from_native() accepts default iso input formats. + """ + f = serializers.TimeField() + result_1 = f.from_native('04:31') + result_2 = f.from_native('04:31:59') + result_3 = f.from_native('04:31:59.000200') + + self.assertEqual(datetime.time(4, 31), result_1) + self.assertEqual(datetime.time(4, 31, 59), result_2) + self.assertEqual(datetime.time(4, 31, 59, 200), result_3) + + def test_from_native_datetime_time(self): + """ + Make sure from_native() accepts a datetime.time instance. + """ + f = serializers.TimeField() + result_1 = f.from_native(datetime.time(4, 31)) + result_2 = f.from_native(datetime.time(4, 31, 59)) + result_3 = f.from_native(datetime.time(4, 31, 59, 200)) + + self.assertEqual(result_1, datetime.time(4, 31)) + self.assertEqual(result_2, datetime.time(4, 31, 59)) + self.assertEqual(result_3, datetime.time(4, 31, 59, 200)) + + def test_from_native_custom_format(self): + """ + Make sure from_native() accepts custom input formats. + """ + f = serializers.TimeField(input_formats=['%H -- %M']) + result = f.from_native('04 -- 31') + + self.assertEqual(datetime.time(4, 31), result) + + def test_from_native_invalid_default_on_custom_format(self): + """ + Make sure from_native() don't accept default formats if custom format is preset + """ + f = serializers.TimeField(input_formats=['%H -- %M']) + + try: + f.from_native('04:31:59') + except validators.ValidationError as e: + self.assertEqual(e.messages, ["Time has wrong format. Use one of these formats instead: hh -- mm"]) + else: + self.fail("ValidationError was not properly raised") + + def test_from_native_empty(self): + """ + Make sure from_native() returns None on empty param. + """ + f = serializers.TimeField() + result = f.from_native('') + + self.assertEqual(result, None) + + def test_from_native_none(self): + """ + Make sure from_native() returns None on None param. + """ + f = serializers.TimeField() + result = f.from_native(None) + + self.assertEqual(result, None) + + def test_from_native_invalid_time(self): + """ + Make sure from_native() raises a ValidationError on passing an invalid time. + """ + f = serializers.TimeField() + + try: + f.from_native('04:61:59') + except validators.ValidationError as e: + self.assertEqual(e.messages, ["Time has wrong format. Use one of these formats instead: " + "hh:mm[:ss[.uuuuuu]]"]) + else: + self.fail("ValidationError was not properly raised") + + def test_from_native_invalid_format(self): + """ + Make sure from_native() raises a ValidationError on passing an invalid format. + """ + f = serializers.TimeField() + + try: + f.from_native('04 -- 31') + except validators.ValidationError as e: + self.assertEqual(e.messages, ["Time has wrong format. Use one of these formats instead: " + "hh:mm[:ss[.uuuuuu]]"]) + else: + self.fail("ValidationError was not properly raised") + + def test_to_native(self): + """ + Make sure to_native() returns time object as default. + """ + f = serializers.TimeField() + result_1 = f.to_native(datetime.time(4, 31)) + result_2 = f.to_native(datetime.time(4, 31, 59)) + result_3 = f.to_native(datetime.time(4, 31, 59, 200)) + + self.assertEqual(datetime.time(4, 31), result_1) + self.assertEqual(datetime.time(4, 31, 59), result_2) + self.assertEqual(datetime.time(4, 31, 59, 200), result_3) + + def test_to_native_iso(self): + """ + Make sure to_native() with format='iso-8601' returns iso formatted time. + """ + f = serializers.TimeField(format='iso-8601') + result_1 = f.to_native(datetime.time(4, 31)) + result_2 = f.to_native(datetime.time(4, 31, 59)) + result_3 = f.to_native(datetime.time(4, 31, 59, 200)) + + self.assertEqual('04:31:00', result_1) + self.assertEqual('04:31:59', result_2) + self.assertEqual('04:31:59.000200', result_3) + + def test_to_native_custom_format(self): + """ + Make sure to_native() returns correct custom format. + """ + f = serializers.TimeField(format="%H - %S [%f]") + result_1 = f.to_native(datetime.time(4, 31)) + result_2 = f.to_native(datetime.time(4, 31, 59)) + result_3 = f.to_native(datetime.time(4, 31, 59, 200)) + + self.assertEqual('04 - 00 [000000]', result_1) + self.assertEqual('04 - 59 [000000]', result_2) + self.assertEqual('04 - 59 [000200]', result_3) + + +class DecimalFieldTest(TestCase): + """ + Tests for the DecimalField from_native() and to_native() behavior + """ + + def test_from_native_string(self): + """ + Make sure from_native() accepts string values + """ + f = serializers.DecimalField() + result_1 = f.from_native('9000') + result_2 = f.from_native('1.00000001') + + self.assertEqual(Decimal('9000'), result_1) + self.assertEqual(Decimal('1.00000001'), result_2) + + def test_from_native_invalid_string(self): + """ + Make sure from_native() raises ValidationError on passing invalid string + """ + f = serializers.DecimalField() + + try: + f.from_native('123.45.6') + except validators.ValidationError as e: + self.assertEqual(e.messages, ["Enter a number."]) + else: + self.fail("ValidationError was not properly raised") + + def test_from_native_integer(self): + """ + Make sure from_native() accepts integer values + """ + f = serializers.DecimalField() + result = f.from_native(9000) + + self.assertEqual(Decimal('9000'), result) + + def test_from_native_float(self): + """ + Make sure from_native() accepts float values + """ + f = serializers.DecimalField() + result = f.from_native(1.00000001) + + self.assertEqual(Decimal('1.00000001'), result) + + def test_from_native_empty(self): + """ + Make sure from_native() returns None on empty param. + """ + f = serializers.DecimalField() + result = f.from_native('') + + self.assertEqual(result, None) + + def test_from_native_none(self): + """ + Make sure from_native() returns None on None param. + """ + f = serializers.DecimalField() + result = f.from_native(None) + + self.assertEqual(result, None) + + def test_to_native(self): + """ + Make sure to_native() returns Decimal as string. + """ + f = serializers.DecimalField() + + result_1 = f.to_native(Decimal('9000')) + result_2 = f.to_native(Decimal('1.00000001')) + + self.assertEqual(Decimal('9000'), result_1) + self.assertEqual(Decimal('1.00000001'), result_2) + + def test_to_native_none(self): + """ + Make sure from_native() returns None on None param. + """ + f = serializers.DecimalField(required=False) + self.assertEqual(None, f.to_native(None)) + + def test_valid_serialization(self): + """ + Make sure the serializer works correctly + """ + class DecimalSerializer(serializers.Serializer): + decimal_field = serializers.DecimalField(max_value=9010, + min_value=9000, + max_digits=6, + decimal_places=2) + + self.assertTrue(DecimalSerializer(data={'decimal_field': '9001'}).is_valid()) + self.assertTrue(DecimalSerializer(data={'decimal_field': '9001.2'}).is_valid()) + self.assertTrue(DecimalSerializer(data={'decimal_field': '9001.23'}).is_valid()) + + self.assertFalse(DecimalSerializer(data={'decimal_field': '8000'}).is_valid()) + self.assertFalse(DecimalSerializer(data={'decimal_field': '9900'}).is_valid()) + self.assertFalse(DecimalSerializer(data={'decimal_field': '9001.234'}).is_valid()) + + def test_raise_max_value(self): + """ + Make sure max_value violations raises ValidationError + """ + class DecimalSerializer(serializers.Serializer): + decimal_field = serializers.DecimalField(max_value=100) + + s = DecimalSerializer(data={'decimal_field': '123'}) + + self.assertFalse(s.is_valid()) + self.assertEqual(s.errors, {'decimal_field': ['Ensure this value is less than or equal to 100.']}) + + def test_raise_min_value(self): + """ + Make sure min_value violations raises ValidationError + """ + class DecimalSerializer(serializers.Serializer): + decimal_field = serializers.DecimalField(min_value=100) + + s = DecimalSerializer(data={'decimal_field': '99'}) + + self.assertFalse(s.is_valid()) + self.assertEqual(s.errors, {'decimal_field': ['Ensure this value is greater than or equal to 100.']}) + + def test_raise_max_digits(self): + """ + Make sure max_digits violations raises ValidationError + """ + class DecimalSerializer(serializers.Serializer): + decimal_field = serializers.DecimalField(max_digits=5) + + s = DecimalSerializer(data={'decimal_field': '123.456'}) + + self.assertFalse(s.is_valid()) + self.assertEqual(s.errors, {'decimal_field': ['Ensure that there are no more than 5 digits in total.']}) + + def test_raise_max_decimal_places(self): + """ + Make sure max_decimal_places violations raises ValidationError + """ + class DecimalSerializer(serializers.Serializer): + decimal_field = serializers.DecimalField(decimal_places=3) + + s = DecimalSerializer(data={'decimal_field': '123.4567'}) + + self.assertFalse(s.is_valid()) + self.assertEqual(s.errors, {'decimal_field': ['Ensure that there are no more than 3 decimal places.']}) + + def test_raise_max_whole_digits(self): + """ + Make sure max_whole_digits violations raises ValidationError + """ + class DecimalSerializer(serializers.Serializer): + decimal_field = serializers.DecimalField(max_digits=4, decimal_places=3) + + s = DecimalSerializer(data={'decimal_field': '12345.6'}) + + self.assertFalse(s.is_valid()) + self.assertEqual(s.errors, {'decimal_field': ['Ensure that there are no more than 4 digits in total.']}) + + +class ChoiceFieldTests(TestCase): + """ + Tests for the ChoiceField options generator + """ + + SAMPLE_CHOICES = [ + ('red', 'Red'), + ('green', 'Green'), + ('blue', 'Blue'), + ] + + def test_choices_required(self): + """ + Make sure proper choices are rendered if field is required + """ + f = serializers.ChoiceField(required=True, choices=self.SAMPLE_CHOICES) + self.assertEqual(f.choices, self.SAMPLE_CHOICES) + + def test_choices_not_required(self): + """ + Make sure proper choices (plus blank) are rendered if the field isn't required + """ + f = serializers.ChoiceField(required=False, choices=self.SAMPLE_CHOICES) + self.assertEqual(f.choices, models.fields.BLANK_CHOICE_DASH + self.SAMPLE_CHOICES) + + +class EmailFieldTests(TestCase): + """ + Tests for EmailField attribute values + """ + + class EmailFieldModel(RESTFrameworkModel): + email_field = models.EmailField(blank=True) + + class EmailFieldWithGivenMaxLengthModel(RESTFrameworkModel): + email_field = models.EmailField(max_length=150, blank=True) + + def test_default_model_value(self): + class EmailFieldSerializer(serializers.ModelSerializer): + class Meta: + model = self.EmailFieldModel + + serializer = EmailFieldSerializer(data={}) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(getattr(serializer.fields['email_field'], 'max_length'), 75) + + def test_given_model_value(self): + class EmailFieldSerializer(serializers.ModelSerializer): + class Meta: + model = self.EmailFieldWithGivenMaxLengthModel + + serializer = EmailFieldSerializer(data={}) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(getattr(serializer.fields['email_field'], 'max_length'), 150) + + def test_given_serializer_value(self): + class EmailFieldSerializer(serializers.ModelSerializer): + email_field = serializers.EmailField(source='email_field', max_length=20, required=False) + + class Meta: + model = self.EmailFieldModel + + serializer = EmailFieldSerializer(data={}) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(getattr(serializer.fields['email_field'], 'max_length'), 20) + + +class SlugFieldTests(TestCase): + """ + Tests for SlugField attribute values + """ + + class SlugFieldModel(RESTFrameworkModel): + slug_field = models.SlugField(blank=True) + + class SlugFieldWithGivenMaxLengthModel(RESTFrameworkModel): + slug_field = models.SlugField(max_length=84, blank=True) + + def test_default_model_value(self): + class SlugFieldSerializer(serializers.ModelSerializer): + class Meta: + model = self.SlugFieldModel + + serializer = SlugFieldSerializer(data={}) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(getattr(serializer.fields['slug_field'], 'max_length'), 50) + + def test_given_model_value(self): + class SlugFieldSerializer(serializers.ModelSerializer): + class Meta: + model = self.SlugFieldWithGivenMaxLengthModel + + serializer = SlugFieldSerializer(data={}) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(getattr(serializer.fields['slug_field'], 'max_length'), 84) + + def test_given_serializer_value(self): + class SlugFieldSerializer(serializers.ModelSerializer): + slug_field = serializers.SlugField(source='slug_field', + max_length=20, required=False) + + class Meta: + model = self.SlugFieldModel + + serializer = SlugFieldSerializer(data={}) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(getattr(serializer.fields['slug_field'], + 'max_length'), 20) + + def test_invalid_slug(self): + """ + Make sure an invalid slug raises ValidationError + """ + class SlugFieldSerializer(serializers.ModelSerializer): + slug_field = serializers.SlugField(source='slug_field', max_length=20, required=True) + + class Meta: + model = self.SlugFieldModel + + s = SlugFieldSerializer(data={'slug_field': 'a b'}) + + self.assertEqual(s.is_valid(), False) + self.assertEqual(s.errors, {'slug_field': ["Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens."]}) + + +class URLFieldTests(TestCase): + """ + Tests for URLField attribute values + """ + + class URLFieldModel(RESTFrameworkModel): + url_field = models.URLField(blank=True) + + class URLFieldWithGivenMaxLengthModel(RESTFrameworkModel): + url_field = models.URLField(max_length=128, blank=True) + + def test_default_model_value(self): + class URLFieldSerializer(serializers.ModelSerializer): + class Meta: + model = self.URLFieldModel + + serializer = URLFieldSerializer(data={}) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(getattr(serializer.fields['url_field'], + 'max_length'), 200) + + def test_given_model_value(self): + class URLFieldSerializer(serializers.ModelSerializer): + class Meta: + model = self.URLFieldWithGivenMaxLengthModel + + serializer = URLFieldSerializer(data={}) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(getattr(serializer.fields['url_field'], + 'max_length'), 128) + + def test_given_serializer_value(self): + class URLFieldSerializer(serializers.ModelSerializer): + url_field = serializers.URLField(source='url_field', + max_length=20, required=False) + + class Meta: + model = self.URLFieldWithGivenMaxLengthModel + + serializer = URLFieldSerializer(data={}) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(getattr(serializer.fields['url_field'], + 'max_length'), 20) + + +class FieldMetadata(TestCase): + def setUp(self): + self.required_field = serializers.Field() + self.required_field.label = uuid4().hex + self.required_field.required = True + + self.optional_field = serializers.Field() + self.optional_field.label = uuid4().hex + self.optional_field.required = False + + def test_required(self): + self.assertEqual(self.required_field.metadata()['required'], True) + + def test_optional(self): + self.assertEqual(self.optional_field.metadata()['required'], False) + + def test_label(self): + for field in (self.required_field, self.optional_field): + self.assertEqual(field.metadata()['label'], field.label) + + +class FieldCallableDefault(TestCase): + def setUp(self): + self.simple_callable = lambda: 'foo bar' + + def test_default_can_be_simple_callable(self): + """ + Ensure that the 'default' argument can also be a simple callable. + """ + field = serializers.WritableField(default=self.simple_callable) + into = {} + field.field_from_native({}, {}, 'field', into) + self.assertEquals(into, {'field': 'foo bar'}) diff --git a/awx/lib/site-packages/rest_framework/tests/test_files.py b/awx/lib/site-packages/rest_framework/tests/test_files.py new file mode 100644 index 0000000000..487046aca4 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_files.py @@ -0,0 +1,51 @@ +from __future__ import unicode_literals +from django.test import TestCase +from rest_framework import serializers +from rest_framework.compat import BytesIO +from rest_framework.compat import six +import datetime + + +class UploadedFile(object): + def __init__(self, file, created=None): + self.file = file + self.created = created or datetime.datetime.now() + + +class UploadedFileSerializer(serializers.Serializer): + file = serializers.FileField() + created = serializers.DateTimeField() + + def restore_object(self, attrs, instance=None): + if instance: + instance.file = attrs['file'] + instance.created = attrs['created'] + return instance + return UploadedFile(**attrs) + + +class FileSerializerTests(TestCase): + def test_create(self): + now = datetime.datetime.now() + file = BytesIO(six.b('stuff')) + file.name = 'stuff.txt' + file.size = len(file.getvalue()) + serializer = UploadedFileSerializer(data={'created': now}, files={'file': file}) + uploaded_file = UploadedFile(file=file, created=now) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.object.created, uploaded_file.created) + self.assertEqual(serializer.object.file, uploaded_file.file) + self.assertFalse(serializer.object is uploaded_file) + + def test_creation_failure(self): + """ + Passing files=None should result in an ValidationError + + Regression test for: + https://github.com/tomchristie/django-rest-framework/issues/542 + """ + now = datetime.datetime.now() + + serializer = UploadedFileSerializer(data={'created': now}) + self.assertFalse(serializer.is_valid()) + self.assertIn('file', serializer.errors) diff --git a/awx/lib/site-packages/rest_framework/tests/test_filters.py b/awx/lib/site-packages/rest_framework/tests/test_filters.py new file mode 100644 index 0000000000..aaed624782 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_filters.py @@ -0,0 +1,474 @@ +from __future__ import unicode_literals +import datetime +from decimal import Decimal +from django.db import models +from django.core.urlresolvers import reverse +from django.test import TestCase +from django.test.client import RequestFactory +from django.utils import unittest +from rest_framework import generics, serializers, status, filters +from rest_framework.compat import django_filters, patterns, url +from rest_framework.tests.models import BasicModel + +factory = RequestFactory() + + +class FilterableItem(models.Model): + text = models.CharField(max_length=100) + decimal = models.DecimalField(max_digits=4, decimal_places=2) + date = models.DateField() + + +if django_filters: + # Basic filter on a list view. + class FilterFieldsRootView(generics.ListCreateAPIView): + model = FilterableItem + filter_fields = ['decimal', 'date'] + filter_backends = (filters.DjangoFilterBackend,) + + # These class are used to test a filter class. + class SeveralFieldsFilter(django_filters.FilterSet): + text = django_filters.CharFilter(lookup_type='icontains') + decimal = django_filters.NumberFilter(lookup_type='lt') + date = django_filters.DateFilter(lookup_type='gt') + + class Meta: + model = FilterableItem + fields = ['text', 'decimal', 'date'] + + class FilterClassRootView(generics.ListCreateAPIView): + model = FilterableItem + filter_class = SeveralFieldsFilter + filter_backends = (filters.DjangoFilterBackend,) + + # These classes are used to test a misconfigured filter class. + class MisconfiguredFilter(django_filters.FilterSet): + text = django_filters.CharFilter(lookup_type='icontains') + + class Meta: + model = BasicModel + fields = ['text'] + + class IncorrectlyConfiguredRootView(generics.ListCreateAPIView): + model = FilterableItem + filter_class = MisconfiguredFilter + filter_backends = (filters.DjangoFilterBackend,) + + class FilterClassDetailView(generics.RetrieveAPIView): + model = FilterableItem + filter_class = SeveralFieldsFilter + filter_backends = (filters.DjangoFilterBackend,) + + # Regression test for #814 + class FilterableItemSerializer(serializers.ModelSerializer): + class Meta: + model = FilterableItem + + class FilterFieldsQuerysetView(generics.ListCreateAPIView): + queryset = FilterableItem.objects.all() + serializer_class = FilterableItemSerializer + filter_fields = ['decimal', 'date'] + filter_backends = (filters.DjangoFilterBackend,) + + class GetQuerysetView(generics.ListCreateAPIView): + serializer_class = FilterableItemSerializer + filter_class = SeveralFieldsFilter + filter_backends = (filters.DjangoFilterBackend,) + + def get_queryset(self): + return FilterableItem.objects.all() + + urlpatterns = patterns('', + url(r'^(?P\d+)/$', FilterClassDetailView.as_view(), name='detail-view'), + url(r'^$', FilterClassRootView.as_view(), name='root-view'), + url(r'^get-queryset/$', GetQuerysetView.as_view(), + name='get-queryset-view'), + ) + + +class CommonFilteringTestCase(TestCase): + def _serialize_object(self, obj): + return {'id': obj.id, 'text': obj.text, 'decimal': obj.decimal, 'date': obj.date} + + def setUp(self): + """ + Create 10 FilterableItem instances. + """ + base_data = ('a', Decimal('0.25'), datetime.date(2012, 10, 8)) + for i in range(10): + text = chr(i + ord(base_data[0])) * 3 # Produces string 'aaa', 'bbb', etc. + decimal = base_data[1] + i + date = base_data[2] - datetime.timedelta(days=i * 2) + FilterableItem(text=text, decimal=decimal, date=date).save() + + self.objects = FilterableItem.objects + self.data = [ + self._serialize_object(obj) + for obj in self.objects.all() + ] + + +class IntegrationTestFiltering(CommonFilteringTestCase): + """ + Integration tests for filtered list views. + """ + + @unittest.skipUnless(django_filters, 'django-filters not installed') + def test_get_filtered_fields_root_view(self): + """ + GET requests to paginated ListCreateAPIView should return paginated results. + """ + view = FilterFieldsRootView.as_view() + + # Basic test with no filter. + request = factory.get('/') + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, self.data) + + # Tests that the decimal filter works. + search_decimal = Decimal('2.25') + request = factory.get('/?decimal=%s' % search_decimal) + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + expected_data = [f for f in self.data if f['decimal'] == search_decimal] + self.assertEqual(response.data, expected_data) + + # Tests that the date filter works. + search_date = datetime.date(2012, 9, 22) + request = factory.get('/?date=%s' % search_date) # search_date str: '2012-09-22' + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + expected_data = [f for f in self.data if f['date'] == search_date] + self.assertEqual(response.data, expected_data) + + @unittest.skipUnless(django_filters, 'django-filters not installed') + def test_filter_with_queryset(self): + """ + Regression test for #814. + """ + view = FilterFieldsQuerysetView.as_view() + + # Tests that the decimal filter works. + search_decimal = Decimal('2.25') + request = factory.get('/?decimal=%s' % search_decimal) + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + expected_data = [f for f in self.data if f['decimal'] == search_decimal] + self.assertEqual(response.data, expected_data) + + @unittest.skipUnless(django_filters, 'django-filters not installed') + def test_filter_with_get_queryset_only(self): + """ + Regression test for #834. + """ + view = GetQuerysetView.as_view() + request = factory.get('/get-queryset/') + view(request).render() + # Used to raise "issubclass() arg 2 must be a class or tuple of classes" + # here when neither `model' nor `queryset' was specified. + + @unittest.skipUnless(django_filters, 'django-filters not installed') + def test_get_filtered_class_root_view(self): + """ + GET requests to filtered ListCreateAPIView that have a filter_class set + should return filtered results. + """ + view = FilterClassRootView.as_view() + + # Basic test with no filter. + request = factory.get('/') + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, self.data) + + # Tests that the decimal filter set with 'lt' in the filter class works. + search_decimal = Decimal('4.25') + request = factory.get('/?decimal=%s' % search_decimal) + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + expected_data = [f for f in self.data if f['decimal'] < search_decimal] + self.assertEqual(response.data, expected_data) + + # Tests that the date filter set with 'gt' in the filter class works. + search_date = datetime.date(2012, 10, 2) + request = factory.get('/?date=%s' % search_date) # search_date str: '2012-10-02' + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + expected_data = [f for f in self.data if f['date'] > search_date] + self.assertEqual(response.data, expected_data) + + # Tests that the text filter set with 'icontains' in the filter class works. + search_text = 'ff' + request = factory.get('/?text=%s' % search_text) + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + expected_data = [f for f in self.data if search_text in f['text'].lower()] + self.assertEqual(response.data, expected_data) + + # Tests that multiple filters works. + search_decimal = Decimal('5.25') + search_date = datetime.date(2012, 10, 2) + request = factory.get('/?decimal=%s&date=%s' % (search_decimal, search_date)) + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + expected_data = [f for f in self.data if f['date'] > search_date and + f['decimal'] < search_decimal] + self.assertEqual(response.data, expected_data) + + @unittest.skipUnless(django_filters, 'django-filters not installed') + def test_incorrectly_configured_filter(self): + """ + An error should be displayed when the filter class is misconfigured. + """ + view = IncorrectlyConfiguredRootView.as_view() + + request = factory.get('/') + self.assertRaises(AssertionError, view, request) + + @unittest.skipUnless(django_filters, 'django-filters not installed') + def test_unknown_filter(self): + """ + GET requests with filters that aren't configured should return 200. + """ + view = FilterFieldsRootView.as_view() + + search_integer = 10 + request = factory.get('/?integer=%s' % search_integer) + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + + +class IntegrationTestDetailFiltering(CommonFilteringTestCase): + """ + Integration tests for filtered detail views. + """ + urls = 'rest_framework.tests.test_filters' + + def _get_url(self, item): + return reverse('detail-view', kwargs=dict(pk=item.pk)) + + @unittest.skipUnless(django_filters, 'django-filters not installed') + def test_get_filtered_detail_view(self): + """ + GET requests to filtered RetrieveAPIView that have a filter_class set + should return filtered results. + """ + item = self.objects.all()[0] + data = self._serialize_object(item) + + # Basic test with no filter. + response = self.client.get(self._get_url(item)) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, data) + + # Tests that the decimal filter set that should fail. + search_decimal = Decimal('4.25') + high_item = self.objects.filter(decimal__gt=search_decimal)[0] + response = self.client.get('{url}?decimal={param}'.format(url=self._get_url(high_item), param=search_decimal)) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + # Tests that the decimal filter set that should succeed. + search_decimal = Decimal('4.25') + low_item = self.objects.filter(decimal__lt=search_decimal)[0] + low_item_data = self._serialize_object(low_item) + response = self.client.get('{url}?decimal={param}'.format(url=self._get_url(low_item), param=search_decimal)) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, low_item_data) + + # Tests that multiple filters works. + search_decimal = Decimal('5.25') + search_date = datetime.date(2012, 10, 2) + valid_item = self.objects.filter(decimal__lt=search_decimal, date__gt=search_date)[0] + valid_item_data = self._serialize_object(valid_item) + response = self.client.get('{url}?decimal={decimal}&date={date}'.format(url=self._get_url(valid_item), decimal=search_decimal, date=search_date)) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, valid_item_data) + + +class SearchFilterModel(models.Model): + title = models.CharField(max_length=20) + text = models.CharField(max_length=100) + + +class SearchFilterTests(TestCase): + def setUp(self): + # Sequence of title/text is: + # + # z abc + # zz bcd + # zzz cde + # ... + for idx in range(10): + title = 'z' * (idx + 1) + text = ( + chr(idx + ord('a')) + + chr(idx + ord('b')) + + chr(idx + ord('c')) + ) + SearchFilterModel(title=title, text=text).save() + + def test_search(self): + class SearchListView(generics.ListAPIView): + model = SearchFilterModel + filter_backends = (filters.SearchFilter,) + search_fields = ('title', 'text') + + view = SearchListView.as_view() + request = factory.get('?search=b') + response = view(request) + self.assertEqual( + response.data, + [ + {'id': 1, 'title': 'z', 'text': 'abc'}, + {'id': 2, 'title': 'zz', 'text': 'bcd'} + ] + ) + + def test_exact_search(self): + class SearchListView(generics.ListAPIView): + model = SearchFilterModel + filter_backends = (filters.SearchFilter,) + search_fields = ('=title', 'text') + + view = SearchListView.as_view() + request = factory.get('?search=zzz') + response = view(request) + self.assertEqual( + response.data, + [ + {'id': 3, 'title': 'zzz', 'text': 'cde'} + ] + ) + + def test_startswith_search(self): + class SearchListView(generics.ListAPIView): + model = SearchFilterModel + filter_backends = (filters.SearchFilter,) + search_fields = ('title', '^text') + + view = SearchListView.as_view() + request = factory.get('?search=b') + response = view(request) + self.assertEqual( + response.data, + [ + {'id': 2, 'title': 'zz', 'text': 'bcd'} + ] + ) + + +class OrdringFilterModel(models.Model): + title = models.CharField(max_length=20) + text = models.CharField(max_length=100) + + +class OrderingFilterTests(TestCase): + def setUp(self): + # Sequence of title/text is: + # + # zyx abc + # yxw bcd + # xwv cde + for idx in range(3): + title = ( + chr(ord('z') - idx) + + chr(ord('y') - idx) + + chr(ord('x') - idx) + ) + text = ( + chr(idx + ord('a')) + + chr(idx + ord('b')) + + chr(idx + ord('c')) + ) + OrdringFilterModel(title=title, text=text).save() + + def test_ordering(self): + class OrderingListView(generics.ListAPIView): + model = OrdringFilterModel + filter_backends = (filters.OrderingFilter,) + ordering = ('title',) + + view = OrderingListView.as_view() + request = factory.get('?ordering=text') + response = view(request) + self.assertEqual( + response.data, + [ + {'id': 1, 'title': 'zyx', 'text': 'abc'}, + {'id': 2, 'title': 'yxw', 'text': 'bcd'}, + {'id': 3, 'title': 'xwv', 'text': 'cde'}, + ] + ) + + def test_reverse_ordering(self): + class OrderingListView(generics.ListAPIView): + model = OrdringFilterModel + filter_backends = (filters.OrderingFilter,) + ordering = ('title',) + + view = OrderingListView.as_view() + request = factory.get('?ordering=-text') + response = view(request) + self.assertEqual( + response.data, + [ + {'id': 3, 'title': 'xwv', 'text': 'cde'}, + {'id': 2, 'title': 'yxw', 'text': 'bcd'}, + {'id': 1, 'title': 'zyx', 'text': 'abc'}, + ] + ) + + def test_incorrectfield_ordering(self): + class OrderingListView(generics.ListAPIView): + model = OrdringFilterModel + filter_backends = (filters.OrderingFilter,) + ordering = ('title',) + + view = OrderingListView.as_view() + request = factory.get('?ordering=foobar') + response = view(request) + self.assertEqual( + response.data, + [ + {'id': 3, 'title': 'xwv', 'text': 'cde'}, + {'id': 2, 'title': 'yxw', 'text': 'bcd'}, + {'id': 1, 'title': 'zyx', 'text': 'abc'}, + ] + ) + + def test_default_ordering(self): + class OrderingListView(generics.ListAPIView): + model = OrdringFilterModel + filter_backends = (filters.OrderingFilter,) + ordering = ('title',) + + view = OrderingListView.as_view() + request = factory.get('') + response = view(request) + self.assertEqual( + response.data, + [ + {'id': 3, 'title': 'xwv', 'text': 'cde'}, + {'id': 2, 'title': 'yxw', 'text': 'bcd'}, + {'id': 1, 'title': 'zyx', 'text': 'abc'}, + ] + ) + + def test_default_ordering_using_string(self): + class OrderingListView(generics.ListAPIView): + model = OrdringFilterModel + filter_backends = (filters.OrderingFilter,) + ordering = 'title' + + view = OrderingListView.as_view() + request = factory.get('') + response = view(request) + self.assertEqual( + response.data, + [ + {'id': 3, 'title': 'xwv', 'text': 'cde'}, + {'id': 2, 'title': 'yxw', 'text': 'bcd'}, + {'id': 1, 'title': 'zyx', 'text': 'abc'}, + ] + ) diff --git a/awx/lib/site-packages/rest_framework/tests/test_genericrelations.py b/awx/lib/site-packages/rest_framework/tests/test_genericrelations.py new file mode 100644 index 0000000000..c38bfb9f36 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_genericrelations.py @@ -0,0 +1,100 @@ +from __future__ import unicode_literals +from django.contrib.contenttypes.models import ContentType +from django.contrib.contenttypes.generic import GenericRelation, GenericForeignKey +from django.db import models +from django.test import TestCase +from rest_framework import serializers + + +class Tag(models.Model): + """ + Tags have a descriptive slug, and are attached to an arbitrary object. + """ + tag = models.SlugField() + content_type = models.ForeignKey(ContentType) + object_id = models.PositiveIntegerField() + tagged_item = GenericForeignKey('content_type', 'object_id') + + def __unicode__(self): + return self.tag + + +class Bookmark(models.Model): + """ + A URL bookmark that may have multiple tags attached. + """ + url = models.URLField() + tags = GenericRelation(Tag) + + def __unicode__(self): + return 'Bookmark: %s' % self.url + + +class Note(models.Model): + """ + A textual note that may have multiple tags attached. + """ + text = models.TextField() + tags = GenericRelation(Tag) + + def __unicode__(self): + return 'Note: %s' % self.text + + +class TestGenericRelations(TestCase): + def setUp(self): + self.bookmark = Bookmark.objects.create(url='https://www.djangoproject.com/') + Tag.objects.create(tagged_item=self.bookmark, tag='django') + Tag.objects.create(tagged_item=self.bookmark, tag='python') + self.note = Note.objects.create(text='Remember the milk') + Tag.objects.create(tagged_item=self.note, tag='reminder') + + def test_generic_relation(self): + """ + Test a relationship that spans a GenericRelation field. + IE. A reverse generic relationship. + """ + + class BookmarkSerializer(serializers.ModelSerializer): + tags = serializers.RelatedField(many=True) + + class Meta: + model = Bookmark + exclude = ('id',) + + serializer = BookmarkSerializer(self.bookmark) + expected = { + 'tags': ['django', 'python'], + 'url': 'https://www.djangoproject.com/' + } + self.assertEqual(serializer.data, expected) + + def test_generic_fk(self): + """ + Test a relationship that spans a GenericForeignKey field. + IE. A forward generic relationship. + """ + + class TagSerializer(serializers.ModelSerializer): + tagged_item = serializers.RelatedField() + + class Meta: + model = Tag + exclude = ('id', 'content_type', 'object_id') + + serializer = TagSerializer(Tag.objects.all(), many=True) + expected = [ + { + 'tag': 'django', + 'tagged_item': 'Bookmark: https://www.djangoproject.com/' + }, + { + 'tag': 'python', + 'tagged_item': 'Bookmark: https://www.djangoproject.com/' + }, + { + 'tag': 'reminder', + 'tagged_item': 'Note: Remember the milk' + } + ] + self.assertEqual(serializer.data, expected) diff --git a/awx/lib/site-packages/rest_framework/tests/test_generics.py b/awx/lib/site-packages/rest_framework/tests/test_generics.py new file mode 100644 index 0000000000..37734195aa --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_generics.py @@ -0,0 +1,556 @@ +from __future__ import unicode_literals +from django.db import models +from django.shortcuts import get_object_or_404 +from django.test import TestCase +from rest_framework import generics, renderers, serializers, status +from rest_framework.tests.utils import RequestFactory +from rest_framework.tests.models import BasicModel, Comment, SlugBasedModel +from rest_framework.compat import six +import json + +factory = RequestFactory() + + +class RootView(generics.ListCreateAPIView): + """ + Example description for OPTIONS. + """ + model = BasicModel + + +class InstanceView(generics.RetrieveUpdateDestroyAPIView): + """ + Example description for OPTIONS. + """ + model = BasicModel + + +class SlugSerializer(serializers.ModelSerializer): + slug = serializers.Field() # read only + + class Meta: + model = SlugBasedModel + exclude = ('id',) + + +class SlugBasedInstanceView(InstanceView): + """ + A model with a slug-field. + """ + model = SlugBasedModel + serializer_class = SlugSerializer + lookup_field = 'slug' + + +class TestRootView(TestCase): + def setUp(self): + """ + Create 3 BasicModel instances. + """ + items = ['foo', 'bar', 'baz'] + for item in items: + BasicModel(text=item).save() + self.objects = BasicModel.objects + self.data = [ + {'id': obj.id, 'text': obj.text} + for obj in self.objects.all() + ] + self.view = RootView.as_view() + + def test_get_root_view(self): + """ + GET requests to ListCreateAPIView should return list of objects. + """ + request = factory.get('/') + with self.assertNumQueries(1): + response = self.view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, self.data) + + def test_post_root_view(self): + """ + POST requests to ListCreateAPIView should create a new object. + """ + content = {'text': 'foobar'} + request = factory.post('/', json.dumps(content), + content_type='application/json') + with self.assertNumQueries(1): + response = self.view(request).render() + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(response.data, {'id': 4, 'text': 'foobar'}) + created = self.objects.get(id=4) + self.assertEqual(created.text, 'foobar') + + def test_put_root_view(self): + """ + PUT requests to ListCreateAPIView should not be allowed + """ + content = {'text': 'foobar'} + request = factory.put('/', json.dumps(content), + content_type='application/json') + with self.assertNumQueries(0): + response = self.view(request).render() + self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) + self.assertEqual(response.data, {"detail": "Method 'PUT' not allowed."}) + + def test_delete_root_view(self): + """ + DELETE requests to ListCreateAPIView should not be allowed + """ + request = factory.delete('/') + with self.assertNumQueries(0): + response = self.view(request).render() + self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) + self.assertEqual(response.data, {"detail": "Method 'DELETE' not allowed."}) + + def test_options_root_view(self): + """ + OPTIONS requests to ListCreateAPIView should return metadata + """ + request = factory.options('/') + with self.assertNumQueries(0): + response = self.view(request).render() + expected = { + 'parses': [ + 'application/json', + 'application/x-www-form-urlencoded', + 'multipart/form-data' + ], + 'renders': [ + 'application/json', + 'text/html' + ], + 'name': 'Root', + 'description': 'Example description for OPTIONS.', + 'actions': { + 'POST': { + 'text': { + 'max_length': 100, + 'read_only': False, + 'required': True, + 'type': 'string', + "label": "Text comes here", + "help_text": "Text description." + }, + 'id': { + 'read_only': True, + 'required': False, + 'type': 'integer', + 'label': 'ID', + }, + } + } + } + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, expected) + + def test_post_cannot_set_id(self): + """ + POST requests to create a new object should not be able to set the id. + """ + content = {'id': 999, 'text': 'foobar'} + request = factory.post('/', json.dumps(content), + content_type='application/json') + with self.assertNumQueries(1): + response = self.view(request).render() + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(response.data, {'id': 4, 'text': 'foobar'}) + created = self.objects.get(id=4) + self.assertEqual(created.text, 'foobar') + + +class TestInstanceView(TestCase): + def setUp(self): + """ + Create 3 BasicModel intances. + """ + items = ['foo', 'bar', 'baz'] + for item in items: + BasicModel(text=item).save() + self.objects = BasicModel.objects + self.data = [ + {'id': obj.id, 'text': obj.text} + for obj in self.objects.all() + ] + self.view = InstanceView.as_view() + self.slug_based_view = SlugBasedInstanceView.as_view() + + def test_get_instance_view(self): + """ + GET requests to RetrieveUpdateDestroyAPIView should return a single object. + """ + request = factory.get('/1') + with self.assertNumQueries(1): + response = self.view(request, pk=1).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, self.data[0]) + + def test_post_instance_view(self): + """ + POST requests to RetrieveUpdateDestroyAPIView should not be allowed + """ + content = {'text': 'foobar'} + request = factory.post('/', json.dumps(content), + content_type='application/json') + with self.assertNumQueries(0): + response = self.view(request).render() + self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) + self.assertEqual(response.data, {"detail": "Method 'POST' not allowed."}) + + def test_put_instance_view(self): + """ + PUT requests to RetrieveUpdateDestroyAPIView should update an object. + """ + content = {'text': 'foobar'} + request = factory.put('/1', json.dumps(content), + content_type='application/json') + with self.assertNumQueries(2): + response = self.view(request, pk='1').render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, {'id': 1, 'text': 'foobar'}) + updated = self.objects.get(id=1) + self.assertEqual(updated.text, 'foobar') + + def test_patch_instance_view(self): + """ + PATCH requests to RetrieveUpdateDestroyAPIView should update an object. + """ + content = {'text': 'foobar'} + request = factory.patch('/1', json.dumps(content), + content_type='application/json') + + with self.assertNumQueries(2): + response = self.view(request, pk=1).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, {'id': 1, 'text': 'foobar'}) + updated = self.objects.get(id=1) + self.assertEqual(updated.text, 'foobar') + + def test_delete_instance_view(self): + """ + DELETE requests to RetrieveUpdateDestroyAPIView should delete an object. + """ + request = factory.delete('/1') + with self.assertNumQueries(2): + response = self.view(request, pk=1).render() + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + self.assertEqual(response.content, six.b('')) + ids = [obj.id for obj in self.objects.all()] + self.assertEqual(ids, [2, 3]) + + def test_options_instance_view(self): + """ + OPTIONS requests to RetrieveUpdateDestroyAPIView should return metadata + """ + request = factory.options('/1') + with self.assertNumQueries(1): + response = self.view(request, pk=1).render() + expected = { + 'parses': [ + 'application/json', + 'application/x-www-form-urlencoded', + 'multipart/form-data' + ], + 'renders': [ + 'application/json', + 'text/html' + ], + 'name': 'Instance', + 'description': 'Example description for OPTIONS.', + 'actions': { + 'PUT': { + 'text': { + 'max_length': 100, + 'read_only': False, + 'required': True, + 'type': 'string', + 'label': 'Text comes here', + 'help_text': 'Text description.' + }, + 'id': { + 'read_only': True, + 'required': False, + 'type': 'integer', + 'label': 'ID', + }, + } + } + } + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, expected) + + def test_get_instance_view_incorrect_arg(self): + """ + GET requests with an incorrect pk type, should raise 404, not 500. + Regression test for #890. + """ + request = factory.get('/a') + with self.assertNumQueries(0): + response = self.view(request, pk='a').render() + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + def test_put_cannot_set_id(self): + """ + PUT requests to create a new object should not be able to set the id. + """ + content = {'id': 999, 'text': 'foobar'} + request = factory.put('/1', json.dumps(content), + content_type='application/json') + with self.assertNumQueries(2): + response = self.view(request, pk=1).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, {'id': 1, 'text': 'foobar'}) + updated = self.objects.get(id=1) + self.assertEqual(updated.text, 'foobar') + + def test_put_to_deleted_instance(self): + """ + PUT requests to RetrieveUpdateDestroyAPIView should create an object + if it does not currently exist. + """ + self.objects.get(id=1).delete() + content = {'text': 'foobar'} + request = factory.put('/1', json.dumps(content), + content_type='application/json') + with self.assertNumQueries(3): + response = self.view(request, pk=1).render() + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(response.data, {'id': 1, 'text': 'foobar'}) + updated = self.objects.get(id=1) + self.assertEqual(updated.text, 'foobar') + + def test_put_as_create_on_id_based_url(self): + """ + PUT requests to RetrieveUpdateDestroyAPIView should create an object + at the requested url if it doesn't exist. + """ + content = {'text': 'foobar'} + # pk fields can not be created on demand, only the database can set the pk for a new object + request = factory.put('/5', json.dumps(content), + content_type='application/json') + with self.assertNumQueries(3): + response = self.view(request, pk=5).render() + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + new_obj = self.objects.get(pk=5) + self.assertEqual(new_obj.text, 'foobar') + + def test_put_as_create_on_slug_based_url(self): + """ + PUT requests to RetrieveUpdateDestroyAPIView should create an object + at the requested url if possible, else return HTTP_403_FORBIDDEN error-response. + """ + content = {'text': 'foobar'} + request = factory.put('/test_slug', json.dumps(content), + content_type='application/json') + with self.assertNumQueries(2): + response = self.slug_based_view(request, slug='test_slug').render() + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(response.data, {'slug': 'test_slug', 'text': 'foobar'}) + new_obj = SlugBasedModel.objects.get(slug='test_slug') + self.assertEqual(new_obj.text, 'foobar') + + +class TestOverriddenGetObject(TestCase): + """ + Test cases for a RetrieveUpdateDestroyAPIView that does NOT use the + queryset/model mechanism but instead overrides get_object() + """ + def setUp(self): + """ + Create 3 BasicModel intances. + """ + items = ['foo', 'bar', 'baz'] + for item in items: + BasicModel(text=item).save() + self.objects = BasicModel.objects + self.data = [ + {'id': obj.id, 'text': obj.text} + for obj in self.objects.all() + ] + + class OverriddenGetObjectView(generics.RetrieveUpdateDestroyAPIView): + """ + Example detail view for override of get_object(). + """ + model = BasicModel + + def get_object(self): + pk = int(self.kwargs['pk']) + return get_object_or_404(BasicModel.objects.all(), id=pk) + + self.view = OverriddenGetObjectView.as_view() + + def test_overridden_get_object_view(self): + """ + GET requests to RetrieveUpdateDestroyAPIView should return a single object. + """ + request = factory.get('/1') + with self.assertNumQueries(1): + response = self.view(request, pk=1).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, self.data[0]) + + +# Regression test for #285 + +class CommentSerializer(serializers.ModelSerializer): + class Meta: + model = Comment + exclude = ('created',) + + +class CommentView(generics.ListCreateAPIView): + serializer_class = CommentSerializer + model = Comment + + +class TestCreateModelWithAutoNowAddField(TestCase): + def setUp(self): + self.objects = Comment.objects + self.view = CommentView.as_view() + + def test_create_model_with_auto_now_add_field(self): + """ + Regression test for #285 + + https://github.com/tomchristie/django-rest-framework/issues/285 + """ + content = {'email': 'foobar@example.com', 'content': 'foobar'} + request = factory.post('/', json.dumps(content), + content_type='application/json') + response = self.view(request).render() + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + created = self.objects.get(id=1) + self.assertEqual(created.content, 'foobar') + + +# Test for particularly ugly regression with m2m in browsable API +class ClassB(models.Model): + name = models.CharField(max_length=255) + + +class ClassA(models.Model): + name = models.CharField(max_length=255) + childs = models.ManyToManyField(ClassB, blank=True, null=True) + + +class ClassASerializer(serializers.ModelSerializer): + childs = serializers.PrimaryKeyRelatedField(many=True, source='childs') + + class Meta: + model = ClassA + + +class ExampleView(generics.ListCreateAPIView): + serializer_class = ClassASerializer + model = ClassA + + +class TestM2MBrowseableAPI(TestCase): + def test_m2m_in_browseable_api(self): + """ + Test for particularly ugly regression with m2m in browsable API + """ + request = factory.get('/', HTTP_ACCEPT='text/html') + view = ExampleView().as_view() + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + + +class InclusiveFilterBackend(object): + def filter_queryset(self, request, queryset, view): + return queryset.filter(text='foo') + + +class ExclusiveFilterBackend(object): + def filter_queryset(self, request, queryset, view): + return queryset.filter(text='other') + + +class TestFilterBackendAppliedToViews(TestCase): + + def setUp(self): + """ + Create 3 BasicModel instances to filter on. + """ + items = ['foo', 'bar', 'baz'] + for item in items: + BasicModel(text=item).save() + self.objects = BasicModel.objects + self.data = [ + {'id': obj.id, 'text': obj.text} + for obj in self.objects.all() + ] + + def test_get_root_view_filters_by_name_with_filter_backend(self): + """ + GET requests to ListCreateAPIView should return filtered list. + """ + root_view = RootView.as_view(filter_backends=(InclusiveFilterBackend,)) + request = factory.get('/') + response = root_view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data), 1) + self.assertEqual(response.data, [{'id': 1, 'text': 'foo'}]) + + def test_get_root_view_filters_out_all_models_with_exclusive_filter_backend(self): + """ + GET requests to ListCreateAPIView should return empty list when all models are filtered out. + """ + root_view = RootView.as_view(filter_backends=(ExclusiveFilterBackend,)) + request = factory.get('/') + response = root_view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, []) + + def test_get_instance_view_filters_out_name_with_filter_backend(self): + """ + GET requests to RetrieveUpdateDestroyAPIView should raise 404 when model filtered out. + """ + instance_view = InstanceView.as_view(filter_backends=(ExclusiveFilterBackend,)) + request = factory.get('/1') + response = instance_view(request, pk=1).render() + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + self.assertEqual(response.data, {'detail': 'Not found'}) + + def test_get_instance_view_will_return_single_object_when_filter_does_not_exclude_it(self): + """ + GET requests to RetrieveUpdateDestroyAPIView should return a single object when not excluded + """ + instance_view = InstanceView.as_view(filter_backends=(InclusiveFilterBackend,)) + request = factory.get('/1') + response = instance_view(request, pk=1).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, {'id': 1, 'text': 'foo'}) + + +class TwoFieldModel(models.Model): + field_a = models.CharField(max_length=100) + field_b = models.CharField(max_length=100) + + +class DynamicSerializerView(generics.ListCreateAPIView): + model = TwoFieldModel + renderer_classes = (renderers.BrowsableAPIRenderer, renderers.JSONRenderer) + + def get_serializer_class(self): + if self.request.method == 'POST': + class DynamicSerializer(serializers.ModelSerializer): + class Meta: + model = TwoFieldModel + fields = ('field_b',) + return DynamicSerializer + return super(DynamicSerializerView, self).get_serializer_class() + + +class TestFilterBackendAppliedToViews(TestCase): + + def test_dynamic_serializer_form_in_browsable_api(self): + """ + GET requests to ListCreateAPIView should return filtered list. + """ + view = DynamicSerializerView.as_view() + request = factory.get('/') + response = view(request).render() + self.assertContains(response, 'field_b') + self.assertNotContains(response, 'field_a') diff --git a/awx/lib/site-packages/rest_framework/tests/test_htmlrenderer.py b/awx/lib/site-packages/rest_framework/tests/test_htmlrenderer.py new file mode 100644 index 0000000000..8957a43c72 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_htmlrenderer.py @@ -0,0 +1,118 @@ +from __future__ import unicode_literals +from django.core.exceptions import PermissionDenied +from django.http import Http404 +from django.test import TestCase +from django.template import TemplateDoesNotExist, Template +import django.template.loader +from rest_framework import status +from rest_framework.compat import patterns, url +from rest_framework.decorators import api_view, renderer_classes +from rest_framework.renderers import TemplateHTMLRenderer +from rest_framework.response import Response +from rest_framework.compat import six + + +@api_view(('GET',)) +@renderer_classes((TemplateHTMLRenderer,)) +def example(request): + """ + A view that can returns an HTML representation. + """ + data = {'object': 'foobar'} + return Response(data, template_name='example.html') + + +@api_view(('GET',)) +@renderer_classes((TemplateHTMLRenderer,)) +def permission_denied(request): + raise PermissionDenied() + + +@api_view(('GET',)) +@renderer_classes((TemplateHTMLRenderer,)) +def not_found(request): + raise Http404() + + +urlpatterns = patterns('', + url(r'^$', example), + url(r'^permission_denied$', permission_denied), + url(r'^not_found$', not_found), +) + + +class TemplateHTMLRendererTests(TestCase): + urls = 'rest_framework.tests.test_htmlrenderer' + + def setUp(self): + """ + Monkeypatch get_template + """ + self.get_template = django.template.loader.get_template + + def get_template(template_name): + if template_name == 'example.html': + return Template("example: {{ object }}") + raise TemplateDoesNotExist(template_name) + + django.template.loader.get_template = get_template + + def tearDown(self): + """ + Revert monkeypatching + """ + django.template.loader.get_template = self.get_template + + def test_simple_html_view(self): + response = self.client.get('/') + self.assertContains(response, "example: foobar") + self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8') + + def test_not_found_html_view(self): + response = self.client.get('/not_found') + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + self.assertEqual(response.content, six.b("404 Not Found")) + self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8') + + def test_permission_denied_html_view(self): + response = self.client.get('/permission_denied') + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + self.assertEqual(response.content, six.b("403 Forbidden")) + self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8') + + +class TemplateHTMLRendererExceptionTests(TestCase): + urls = 'rest_framework.tests.test_htmlrenderer' + + def setUp(self): + """ + Monkeypatch get_template + """ + self.get_template = django.template.loader.get_template + + def get_template(template_name): + if template_name == '404.html': + return Template("404: {{ detail }}") + if template_name == '403.html': + return Template("403: {{ detail }}") + raise TemplateDoesNotExist(template_name) + + django.template.loader.get_template = get_template + + def tearDown(self): + """ + Revert monkeypatching + """ + django.template.loader.get_template = self.get_template + + def test_not_found_html_view_with_template(self): + response = self.client.get('/not_found') + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + self.assertEqual(response.content, six.b("404: Not found")) + self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8') + + def test_permission_denied_html_view_with_template(self): + response = self.client.get('/permission_denied') + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + self.assertEqual(response.content, six.b("403: Permission denied")) + self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8') diff --git a/awx/lib/site-packages/rest_framework/tests/test_hyperlinkedserializers.py b/awx/lib/site-packages/rest_framework/tests/test_hyperlinkedserializers.py new file mode 100644 index 0000000000..1894ddb27e --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_hyperlinkedserializers.py @@ -0,0 +1,303 @@ +from __future__ import unicode_literals +import json +from django.test import TestCase +from django.test.client import RequestFactory +from rest_framework import generics, status, serializers +from rest_framework.compat import patterns, url +from rest_framework.tests.models import Anchor, BasicModel, ManyToManyModel, BlogPost, BlogPostComment, Album, Photo, OptionalRelationModel + +factory = RequestFactory() + + +class BlogPostCommentSerializer(serializers.ModelSerializer): + url = serializers.HyperlinkedIdentityField(view_name='blogpostcomment-detail') + text = serializers.CharField() + blog_post_url = serializers.HyperlinkedRelatedField(source='blog_post', view_name='blogpost-detail') + + class Meta: + model = BlogPostComment + fields = ('text', 'blog_post_url', 'url') + + +class PhotoSerializer(serializers.Serializer): + description = serializers.CharField() + album_url = serializers.HyperlinkedRelatedField(source='album', view_name='album-detail', queryset=Album.objects.all(), slug_field='title', slug_url_kwarg='title') + + def restore_object(self, attrs, instance=None): + return Photo(**attrs) + + +class AlbumSerializer(serializers.ModelSerializer): + url = serializers.HyperlinkedIdentityField(view_name='album-detail', lookup_field='title') + + class Meta: + model = Album + fields = ('title', 'url') + + +class BasicList(generics.ListCreateAPIView): + model = BasicModel + model_serializer_class = serializers.HyperlinkedModelSerializer + + +class BasicDetail(generics.RetrieveUpdateDestroyAPIView): + model = BasicModel + model_serializer_class = serializers.HyperlinkedModelSerializer + + +class AnchorDetail(generics.RetrieveAPIView): + model = Anchor + model_serializer_class = serializers.HyperlinkedModelSerializer + + +class ManyToManyList(generics.ListAPIView): + model = ManyToManyModel + model_serializer_class = serializers.HyperlinkedModelSerializer + + +class ManyToManyDetail(generics.RetrieveAPIView): + model = ManyToManyModel + model_serializer_class = serializers.HyperlinkedModelSerializer + + +class BlogPostCommentListCreate(generics.ListCreateAPIView): + model = BlogPostComment + serializer_class = BlogPostCommentSerializer + + +class BlogPostCommentDetail(generics.RetrieveAPIView): + model = BlogPostComment + serializer_class = BlogPostCommentSerializer + + +class BlogPostDetail(generics.RetrieveAPIView): + model = BlogPost + + +class PhotoListCreate(generics.ListCreateAPIView): + model = Photo + model_serializer_class = PhotoSerializer + + +class AlbumDetail(generics.RetrieveAPIView): + model = Album + serializer_class = AlbumSerializer + lookup_field = 'title' + + +class OptionalRelationDetail(generics.RetrieveUpdateDestroyAPIView): + model = OptionalRelationModel + model_serializer_class = serializers.HyperlinkedModelSerializer + + +urlpatterns = patterns('', + url(r'^basic/$', BasicList.as_view(), name='basicmodel-list'), + url(r'^basic/(?P\d+)/$', BasicDetail.as_view(), name='basicmodel-detail'), + url(r'^anchor/(?P\d+)/$', AnchorDetail.as_view(), name='anchor-detail'), + url(r'^manytomany/$', ManyToManyList.as_view(), name='manytomanymodel-list'), + url(r'^manytomany/(?P\d+)/$', ManyToManyDetail.as_view(), name='manytomanymodel-detail'), + url(r'^posts/(?P\d+)/$', BlogPostDetail.as_view(), name='blogpost-detail'), + url(r'^comments/$', BlogPostCommentListCreate.as_view(), name='blogpostcomment-list'), + url(r'^comments/(?P\d+)/$', BlogPostCommentDetail.as_view(), name='blogpostcomment-detail'), + url(r'^albums/(?P\w[\w-]*)/$', AlbumDetail.as_view(), name='album-detail'), + url(r'^photos/$', PhotoListCreate.as_view(), name='photo-list'), + url(r'^optionalrelation/(?P<pk>\d+)/$', OptionalRelationDetail.as_view(), name='optionalrelationmodel-detail'), +) + + +class TestBasicHyperlinkedView(TestCase): + urls = 'rest_framework.tests.test_hyperlinkedserializers' + + def setUp(self): + """ + Create 3 BasicModel instances. + """ + items = ['foo', 'bar', 'baz'] + for item in items: + BasicModel(text=item).save() + self.objects = BasicModel.objects + self.data = [ + {'url': 'http://testserver/basic/%d/' % obj.id, 'text': obj.text} + for obj in self.objects.all() + ] + self.list_view = BasicList.as_view() + self.detail_view = BasicDetail.as_view() + + def test_get_list_view(self): + """ + GET requests to ListCreateAPIView should return list of objects. + """ + request = factory.get('/basic/') + response = self.list_view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, self.data) + + def test_get_detail_view(self): + """ + GET requests to ListCreateAPIView should return list of objects. + """ + request = factory.get('/basic/1') + response = self.detail_view(request, pk=1).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, self.data[0]) + + +class TestManyToManyHyperlinkedView(TestCase): + urls = 'rest_framework.tests.test_hyperlinkedserializers' + + def setUp(self): + """ + Create 3 BasicModel instances. + """ + items = ['foo', 'bar', 'baz'] + anchors = [] + for item in items: + anchor = Anchor(text=item) + anchor.save() + anchors.append(anchor) + + manytomany = ManyToManyModel() + manytomany.save() + manytomany.rel.add(*anchors) + + self.data = [{ + 'url': 'http://testserver/manytomany/1/', + 'rel': [ + 'http://testserver/anchor/1/', + 'http://testserver/anchor/2/', + 'http://testserver/anchor/3/', + ] + }] + self.list_view = ManyToManyList.as_view() + self.detail_view = ManyToManyDetail.as_view() + + def test_get_list_view(self): + """ + GET requests to ListCreateAPIView should return list of objects. + """ + request = factory.get('/manytomany/') + response = self.list_view(request) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, self.data) + + def test_get_detail_view(self): + """ + GET requests to ListCreateAPIView should return list of objects. + """ + request = factory.get('/manytomany/1/') + response = self.detail_view(request, pk=1) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, self.data[0]) + + +class TestHyperlinkedIdentityFieldLookup(TestCase): + urls = 'rest_framework.tests.test_hyperlinkedserializers' + + def setUp(self): + """ + Create 3 Album instances. + """ + titles = ['foo', 'bar', 'baz'] + for title in titles: + album = Album(title=title) + album.save() + self.detail_view = AlbumDetail.as_view() + self.data = { + 'foo': {'title': 'foo', 'url': 'http://testserver/albums/foo/'}, + 'bar': {'title': 'bar', 'url': 'http://testserver/albums/bar/'}, + 'baz': {'title': 'baz', 'url': 'http://testserver/albums/baz/'} + } + + def test_lookup_field(self): + """ + GET requests to AlbumDetail view should return serialized Albums + with a url field keyed by `title`. + """ + for album in Album.objects.all(): + request = factory.get('/albums/{0}/'.format(album.title)) + response = self.detail_view(request, title=album.title) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, self.data[album.title]) + + +class TestCreateWithForeignKeys(TestCase): + urls = 'rest_framework.tests.test_hyperlinkedserializers' + + def setUp(self): + """ + Create a blog post + """ + self.post = BlogPost.objects.create(title="Test post") + self.create_view = BlogPostCommentListCreate.as_view() + + def test_create_comment(self): + + data = { + 'text': 'A test comment', + 'blog_post_url': 'http://testserver/posts/1/' + } + + request = factory.post('/comments/', data=data) + response = self.create_view(request) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(response['Location'], 'http://testserver/comments/1/') + self.assertEqual(self.post.blogpostcomment_set.count(), 1) + self.assertEqual(self.post.blogpostcomment_set.all()[0].text, 'A test comment') + + +class TestCreateWithForeignKeysAndCustomSlug(TestCase): + urls = 'rest_framework.tests.test_hyperlinkedserializers' + + def setUp(self): + """ + Create an Album + """ + self.post = Album.objects.create(title='test-album') + self.list_create_view = PhotoListCreate.as_view() + + def test_create_photo(self): + + data = { + 'description': 'A test photo', + 'album_url': 'http://testserver/albums/test-album/' + } + + request = factory.post('/photos/', data=data) + response = self.list_create_view(request) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertNotIn('Location', response, msg='Location should only be included if there is a "url" field on the serializer') + self.assertEqual(self.post.photo_set.count(), 1) + self.assertEqual(self.post.photo_set.all()[0].description, 'A test photo') + + +class TestOptionalRelationHyperlinkedView(TestCase): + urls = 'rest_framework.tests.test_hyperlinkedserializers' + + def setUp(self): + """ + Create 1 OptionalRelationModel instances. + """ + OptionalRelationModel().save() + self.objects = OptionalRelationModel.objects + self.detail_view = OptionalRelationDetail.as_view() + self.data = {"url": "http://testserver/optionalrelation/1/", "other": None} + + def test_get_detail_view(self): + """ + GET requests to RetrieveAPIView with optional relations should return None + for non existing relations. + """ + request = factory.get('/optionalrelationmodel-detail/1') + response = self.detail_view(request, pk=1) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, self.data) + + def test_put_detail_view(self): + """ + PUT requests to RetrieveUpdateDestroyAPIView with optional relations + should accept None for non existing relations. + """ + response = self.client.put('/optionalrelation/1/', + data=json.dumps(self.data), + content_type='application/json') + self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/awx/lib/site-packages/rest_framework/tests/test_multitable_inheritance.py b/awx/lib/site-packages/rest_framework/tests/test_multitable_inheritance.py new file mode 100644 index 0000000000..00c1532760 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_multitable_inheritance.py @@ -0,0 +1,67 @@ +from __future__ import unicode_literals +from django.db import models +from django.test import TestCase +from rest_framework import serializers +from rest_framework.tests.models import RESTFrameworkModel + + +# Models +class ParentModel(RESTFrameworkModel): + name1 = models.CharField(max_length=100) + + +class ChildModel(ParentModel): + name2 = models.CharField(max_length=100) + + +class AssociatedModel(RESTFrameworkModel): + ref = models.OneToOneField(ParentModel, primary_key=True) + name = models.CharField(max_length=100) + + +# Serializers +class DerivedModelSerializer(serializers.ModelSerializer): + class Meta: + model = ChildModel + + +class AssociatedModelSerializer(serializers.ModelSerializer): + class Meta: + model = AssociatedModel + + +# Tests +class IneritedModelSerializationTests(TestCase): + + def test_multitable_inherited_model_fields_as_expected(self): + """ + Assert that the parent pointer field is not included in the fields + serialized fields + """ + child = ChildModel(name1='parent name', name2='child name') + serializer = DerivedModelSerializer(child) + self.assertEqual(set(serializer.data.keys()), + set(['name1', 'name2', 'id'])) + + def test_onetoone_primary_key_model_fields_as_expected(self): + """ + Assert that a model with a onetoone field that is the primary key is + not treated like a derived model + """ + parent = ParentModel(name1='parent name') + associate = AssociatedModel(name='hello', ref=parent) + serializer = AssociatedModelSerializer(associate) + self.assertEqual(set(serializer.data.keys()), + set(['name', 'ref'])) + + def test_data_is_valid_without_parent_ptr(self): + """ + Assert that the pointer to the parent table is not a required field + for input data + """ + data = { + 'name1': 'parent name', + 'name2': 'child name', + } + serializer = DerivedModelSerializer(data=data) + self.assertEqual(serializer.is_valid(), True) diff --git a/awx/lib/site-packages/rest_framework/tests/test_negotiation.py b/awx/lib/site-packages/rest_framework/tests/test_negotiation.py new file mode 100644 index 0000000000..7f84827f0e --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_negotiation.py @@ -0,0 +1,45 @@ +from __future__ import unicode_literals +from django.test import TestCase +from django.test.client import RequestFactory +from rest_framework.negotiation import DefaultContentNegotiation +from rest_framework.request import Request +from rest_framework.renderers import BaseRenderer + + +factory = RequestFactory() + + +class MockJSONRenderer(BaseRenderer): + media_type = 'application/json' + + +class MockHTMLRenderer(BaseRenderer): + media_type = 'text/html' + + +class NoCharsetSpecifiedRenderer(BaseRenderer): + media_type = 'my/media' + + +class TestAcceptedMediaType(TestCase): + def setUp(self): + self.renderers = [MockJSONRenderer(), MockHTMLRenderer()] + self.negotiator = DefaultContentNegotiation() + + def select_renderer(self, request): + return self.negotiator.select_renderer(request, self.renderers) + + def test_client_without_accept_use_renderer(self): + request = Request(factory.get('/')) + accepted_renderer, accepted_media_type = self.select_renderer(request) + self.assertEqual(accepted_media_type, 'application/json') + + def test_client_underspecifies_accept_use_renderer(self): + request = Request(factory.get('/', HTTP_ACCEPT='*/*')) + accepted_renderer, accepted_media_type = self.select_renderer(request) + self.assertEqual(accepted_media_type, 'application/json') + + def test_client_overspecifies_accept_use_client(self): + request = Request(factory.get('/', HTTP_ACCEPT='application/json; indent=8')) + accepted_renderer, accepted_media_type = self.select_renderer(request) + self.assertEqual(accepted_media_type, 'application/json; indent=8') diff --git a/awx/lib/site-packages/rest_framework/tests/test_pagination.py b/awx/lib/site-packages/rest_framework/tests/test_pagination.py new file mode 100644 index 0000000000..e538a78e5b --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_pagination.py @@ -0,0 +1,385 @@ +from __future__ import unicode_literals +import datetime +from decimal import Decimal +from django.db import models +from django.core.paginator import Paginator +from django.test import TestCase +from django.test.client import RequestFactory +from django.utils import unittest +from rest_framework import generics, status, pagination, filters, serializers +from rest_framework.compat import django_filters +from rest_framework.tests.models import BasicModel + +factory = RequestFactory() + + +class FilterableItem(models.Model): + text = models.CharField(max_length=100) + decimal = models.DecimalField(max_digits=4, decimal_places=2) + date = models.DateField() + + +class RootView(generics.ListCreateAPIView): + """ + Example description for OPTIONS. + """ + model = BasicModel + paginate_by = 10 + + +class DefaultPageSizeKwargView(generics.ListAPIView): + """ + View for testing default paginate_by_param usage + """ + model = BasicModel + + +class PaginateByParamView(generics.ListAPIView): + """ + View for testing custom paginate_by_param usage + """ + model = BasicModel + paginate_by_param = 'page_size' + + +class IntegrationTestPagination(TestCase): + """ + Integration tests for paginated list views. + """ + + def setUp(self): + """ + Create 26 BasicModel instances. + """ + for char in 'abcdefghijklmnopqrstuvwxyz': + BasicModel(text=char * 3).save() + self.objects = BasicModel.objects + self.data = [ + {'id': obj.id, 'text': obj.text} + for obj in self.objects.all() + ] + self.view = RootView.as_view() + + def test_get_paginated_root_view(self): + """ + GET requests to paginated ListCreateAPIView should return paginated results. + """ + request = factory.get('/') + # Note: Database queries are a `SELECT COUNT`, and `SELECT <fields>` + with self.assertNumQueries(2): + response = self.view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['count'], 26) + self.assertEqual(response.data['results'], self.data[:10]) + self.assertNotEqual(response.data['next'], None) + self.assertEqual(response.data['previous'], None) + + request = factory.get(response.data['next']) + with self.assertNumQueries(2): + response = self.view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['count'], 26) + self.assertEqual(response.data['results'], self.data[10:20]) + self.assertNotEqual(response.data['next'], None) + self.assertNotEqual(response.data['previous'], None) + + request = factory.get(response.data['next']) + with self.assertNumQueries(2): + response = self.view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['count'], 26) + self.assertEqual(response.data['results'], self.data[20:]) + self.assertEqual(response.data['next'], None) + self.assertNotEqual(response.data['previous'], None) + + +class IntegrationTestPaginationAndFiltering(TestCase): + + def setUp(self): + """ + Create 50 FilterableItem instances. + """ + base_data = ('a', Decimal('0.25'), datetime.date(2012, 10, 8)) + for i in range(26): + text = chr(i + ord(base_data[0])) * 3 # Produces string 'aaa', 'bbb', etc. + decimal = base_data[1] + i + date = base_data[2] - datetime.timedelta(days=i * 2) + FilterableItem(text=text, decimal=decimal, date=date).save() + + self.objects = FilterableItem.objects + self.data = [ + {'id': obj.id, 'text': obj.text, 'decimal': obj.decimal, 'date': obj.date} + for obj in self.objects.all() + ] + + @unittest.skipUnless(django_filters, 'django-filters not installed') + def test_get_django_filter_paginated_filtered_root_view(self): + """ + GET requests to paginated filtered ListCreateAPIView should return + paginated results. The next and previous links should preserve the + filtered parameters. + """ + class DecimalFilter(django_filters.FilterSet): + decimal = django_filters.NumberFilter(lookup_type='lt') + + class Meta: + model = FilterableItem + fields = ['text', 'decimal', 'date'] + + class FilterFieldsRootView(generics.ListCreateAPIView): + model = FilterableItem + paginate_by = 10 + filter_class = DecimalFilter + filter_backends = (filters.DjangoFilterBackend,) + + view = FilterFieldsRootView.as_view() + + EXPECTED_NUM_QUERIES = 2 + + request = factory.get('/?decimal=15.20') + with self.assertNumQueries(EXPECTED_NUM_QUERIES): + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['count'], 15) + self.assertEqual(response.data['results'], self.data[:10]) + self.assertNotEqual(response.data['next'], None) + self.assertEqual(response.data['previous'], None) + + request = factory.get(response.data['next']) + with self.assertNumQueries(EXPECTED_NUM_QUERIES): + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['count'], 15) + self.assertEqual(response.data['results'], self.data[10:15]) + self.assertEqual(response.data['next'], None) + self.assertNotEqual(response.data['previous'], None) + + request = factory.get(response.data['previous']) + with self.assertNumQueries(EXPECTED_NUM_QUERIES): + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['count'], 15) + self.assertEqual(response.data['results'], self.data[:10]) + self.assertNotEqual(response.data['next'], None) + self.assertEqual(response.data['previous'], None) + + def test_get_basic_paginated_filtered_root_view(self): + """ + Same as `test_get_django_filter_paginated_filtered_root_view`, + except using a custom filter backend instead of the django-filter + backend, + """ + + class DecimalFilterBackend(filters.BaseFilterBackend): + def filter_queryset(self, request, queryset, view): + return queryset.filter(decimal__lt=Decimal(request.GET['decimal'])) + + class BasicFilterFieldsRootView(generics.ListCreateAPIView): + model = FilterableItem + paginate_by = 10 + filter_backends = (DecimalFilterBackend,) + + view = BasicFilterFieldsRootView.as_view() + + request = factory.get('/?decimal=15.20') + with self.assertNumQueries(2): + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['count'], 15) + self.assertEqual(response.data['results'], self.data[:10]) + self.assertNotEqual(response.data['next'], None) + self.assertEqual(response.data['previous'], None) + + request = factory.get(response.data['next']) + with self.assertNumQueries(2): + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['count'], 15) + self.assertEqual(response.data['results'], self.data[10:15]) + self.assertEqual(response.data['next'], None) + self.assertNotEqual(response.data['previous'], None) + + request = factory.get(response.data['previous']) + with self.assertNumQueries(2): + response = view(request).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['count'], 15) + self.assertEqual(response.data['results'], self.data[:10]) + self.assertNotEqual(response.data['next'], None) + self.assertEqual(response.data['previous'], None) + + +class PassOnContextPaginationSerializer(pagination.PaginationSerializer): + class Meta: + object_serializer_class = serializers.Serializer + + +class UnitTestPagination(TestCase): + """ + Unit tests for pagination of primitive objects. + """ + + def setUp(self): + self.objects = [char * 3 for char in 'abcdefghijklmnopqrstuvwxyz'] + paginator = Paginator(self.objects, 10) + self.first_page = paginator.page(1) + self.last_page = paginator.page(3) + + def test_native_pagination(self): + serializer = pagination.PaginationSerializer(self.first_page) + self.assertEqual(serializer.data['count'], 26) + self.assertEqual(serializer.data['next'], '?page=2') + self.assertEqual(serializer.data['previous'], None) + self.assertEqual(serializer.data['results'], self.objects[:10]) + + serializer = pagination.PaginationSerializer(self.last_page) + self.assertEqual(serializer.data['count'], 26) + self.assertEqual(serializer.data['next'], None) + self.assertEqual(serializer.data['previous'], '?page=2') + self.assertEqual(serializer.data['results'], self.objects[20:]) + + def test_context_available_in_result(self): + """ + Ensure context gets passed through to the object serializer. + """ + serializer = PassOnContextPaginationSerializer(self.first_page, context={'foo': 'bar'}) + serializer.data + results = serializer.fields[serializer.results_field] + self.assertEqual(serializer.context, results.context) + + +class TestUnpaginated(TestCase): + """ + Tests for list views without pagination. + """ + + def setUp(self): + """ + Create 13 BasicModel instances. + """ + for i in range(13): + BasicModel(text=i).save() + self.objects = BasicModel.objects + self.data = [ + {'id': obj.id, 'text': obj.text} + for obj in self.objects.all() + ] + self.view = DefaultPageSizeKwargView.as_view() + + def test_unpaginated(self): + """ + Tests the default page size for this view. + no page size --> no limit --> no meta data + """ + request = factory.get('/') + response = self.view(request) + self.assertEqual(response.data, self.data) + + +class TestCustomPaginateByParam(TestCase): + """ + Tests for list views with default page size kwarg + """ + + def setUp(self): + """ + Create 13 BasicModel instances. + """ + for i in range(13): + BasicModel(text=i).save() + self.objects = BasicModel.objects + self.data = [ + {'id': obj.id, 'text': obj.text} + for obj in self.objects.all() + ] + self.view = PaginateByParamView.as_view() + + def test_default_page_size(self): + """ + Tests the default page size for this view. + no page size --> no limit --> no meta data + """ + request = factory.get('/') + response = self.view(request).render() + self.assertEqual(response.data, self.data) + + def test_paginate_by_param(self): + """ + If paginate_by_param is set, the new kwarg should limit per view requests. + """ + request = factory.get('/?page_size=5') + response = self.view(request).render() + self.assertEqual(response.data['count'], 13) + self.assertEqual(response.data['results'], self.data[:5]) + + +### Tests for context in pagination serializers + +class CustomField(serializers.Field): + def to_native(self, value): + if not 'view' in self.context: + raise RuntimeError("context isn't getting passed into custom field") + return "value" + + +class BasicModelSerializer(serializers.Serializer): + text = CustomField() + + def __init__(self, *args, **kwargs): + super(BasicModelSerializer, self).__init__(*args, **kwargs) + if not 'view' in self.context: + raise RuntimeError("context isn't getting passed into serializer init") + + +class TestContextPassedToCustomField(TestCase): + def setUp(self): + BasicModel.objects.create(text='ala ma kota') + + def test_with_pagination(self): + class ListView(generics.ListCreateAPIView): + model = BasicModel + serializer_class = BasicModelSerializer + paginate_by = 1 + + self.view = ListView.as_view() + request = factory.get('/') + response = self.view(request).render() + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + +### Tests for custom pagination serializers + +class LinksSerializer(serializers.Serializer): + next = pagination.NextPageField(source='*') + prev = pagination.PreviousPageField(source='*') + + +class CustomPaginationSerializer(pagination.BasePaginationSerializer): + links = LinksSerializer(source='*') # Takes the page object as the source + total_results = serializers.Field(source='paginator.count') + + results_field = 'objects' + + +class TestCustomPaginationSerializer(TestCase): + def setUp(self): + objects = ['john', 'paul', 'george', 'ringo'] + paginator = Paginator(objects, 2) + self.page = paginator.page(1) + + def test_custom_pagination_serializer(self): + request = RequestFactory().get('/foobar') + serializer = CustomPaginationSerializer( + instance=self.page, + context={'request': request} + ) + expected = { + 'links': { + 'next': 'http://testserver/foobar?page=2', + 'prev': None + }, + 'total_results': 4, + 'objects': ['john', 'paul'] + } + self.assertEqual(serializer.data, expected) diff --git a/awx/lib/site-packages/rest_framework/tests/test_parsers.py b/awx/lib/site-packages/rest_framework/tests/test_parsers.py new file mode 100644 index 0000000000..7699e10c91 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_parsers.py @@ -0,0 +1,115 @@ +from __future__ import unicode_literals +from rest_framework.compat import StringIO +from django import forms +from django.core.files.uploadhandler import MemoryFileUploadHandler +from django.test import TestCase +from django.utils import unittest +from rest_framework.compat import etree +from rest_framework.parsers import FormParser, FileUploadParser +from rest_framework.parsers import XMLParser +import datetime + + +class Form(forms.Form): + field1 = forms.CharField(max_length=3) + field2 = forms.CharField() + + +class TestFormParser(TestCase): + def setUp(self): + self.string = "field1=abc&field2=defghijk" + + def test_parse(self): + """ Make sure the `QueryDict` works OK """ + parser = FormParser() + + stream = StringIO(self.string) + data = parser.parse(stream) + + self.assertEqual(Form(data).is_valid(), True) + + +class TestXMLParser(TestCase): + def setUp(self): + self._input = StringIO( + '<?xml version="1.0" encoding="utf-8"?>' + '<root>' + '<field_a>121.0</field_a>' + '<field_b>dasd</field_b>' + '<field_c></field_c>' + '<field_d>2011-12-25 12:45:00</field_d>' + '</root>' + ) + self._data = { + 'field_a': 121, + 'field_b': 'dasd', + 'field_c': None, + 'field_d': datetime.datetime(2011, 12, 25, 12, 45, 00) + } + self._complex_data_input = StringIO( + '<?xml version="1.0" encoding="utf-8"?>' + '<root>' + '<creation_date>2011-12-25 12:45:00</creation_date>' + '<sub_data_list>' + '<list-item><sub_id>1</sub_id><sub_name>first</sub_name></list-item>' + '<list-item><sub_id>2</sub_id><sub_name>second</sub_name></list-item>' + '</sub_data_list>' + '<name>name</name>' + '</root>' + ) + self._complex_data = { + "creation_date": datetime.datetime(2011, 12, 25, 12, 45, 00), + "name": "name", + "sub_data_list": [ + { + "sub_id": 1, + "sub_name": "first" + }, + { + "sub_id": 2, + "sub_name": "second" + } + ] + } + + @unittest.skipUnless(etree, 'defusedxml not installed') + def test_parse(self): + parser = XMLParser() + data = parser.parse(self._input) + self.assertEqual(data, self._data) + + @unittest.skipUnless(etree, 'defusedxml not installed') + def test_complex_data_parse(self): + parser = XMLParser() + data = parser.parse(self._complex_data_input) + self.assertEqual(data, self._complex_data) + + +class TestFileUploadParser(TestCase): + def setUp(self): + class MockRequest(object): + pass + from io import BytesIO + self.stream = BytesIO( + "Test text file".encode('utf-8') + ) + request = MockRequest() + request.upload_handlers = (MemoryFileUploadHandler(),) + request.META = { + 'HTTP_CONTENT_DISPOSITION': 'Content-Disposition: inline; filename=file.txt'.encode('utf-8'), + 'HTTP_CONTENT_LENGTH': 14, + } + self.parser_context = {'request': request, 'kwargs': {}} + + def test_parse(self): + """ Make sure the `QueryDict` works OK """ + parser = FileUploadParser() + self.stream.seek(0) + data_and_files = parser.parse(self.stream, None, self.parser_context) + file_obj = data_and_files.files['file'] + self.assertEqual(file_obj._size, 14) + + def test_get_filename(self): + parser = FileUploadParser() + filename = parser.get_filename(self.stream, None, self.parser_context) + self.assertEqual(filename, 'file.txt'.encode('utf-8')) diff --git a/awx/lib/site-packages/rest_framework/tests/test_permissions.py b/awx/lib/site-packages/rest_framework/tests/test_permissions.py new file mode 100644 index 0000000000..6caaf65b02 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_permissions.py @@ -0,0 +1,195 @@ +from __future__ import unicode_literals +from django.contrib.auth.models import User, Permission +from django.db import models +from django.test import TestCase +from rest_framework import generics, status, permissions, authentication, HTTP_HEADER_ENCODING +from rest_framework.tests.utils import RequestFactory +import base64 +import json + +factory = RequestFactory() + + +class BasicModel(models.Model): + text = models.CharField(max_length=100) + + +class RootView(generics.ListCreateAPIView): + model = BasicModel + authentication_classes = [authentication.BasicAuthentication] + permission_classes = [permissions.DjangoModelPermissions] + + +class InstanceView(generics.RetrieveUpdateDestroyAPIView): + model = BasicModel + authentication_classes = [authentication.BasicAuthentication] + permission_classes = [permissions.DjangoModelPermissions] + +root_view = RootView.as_view() +instance_view = InstanceView.as_view() + + +def basic_auth_header(username, password): + credentials = ('%s:%s' % (username, password)) + base64_credentials = base64.b64encode(credentials.encode(HTTP_HEADER_ENCODING)).decode(HTTP_HEADER_ENCODING) + return 'Basic %s' % base64_credentials + + +class ModelPermissionsIntegrationTests(TestCase): + def setUp(self): + User.objects.create_user('disallowed', 'disallowed@example.com', 'password') + user = User.objects.create_user('permitted', 'permitted@example.com', 'password') + user.user_permissions = [ + Permission.objects.get(codename='add_basicmodel'), + Permission.objects.get(codename='change_basicmodel'), + Permission.objects.get(codename='delete_basicmodel') + ] + user = User.objects.create_user('updateonly', 'updateonly@example.com', 'password') + user.user_permissions = [ + Permission.objects.get(codename='change_basicmodel'), + ] + + self.permitted_credentials = basic_auth_header('permitted', 'password') + self.disallowed_credentials = basic_auth_header('disallowed', 'password') + self.updateonly_credentials = basic_auth_header('updateonly', 'password') + + BasicModel(text='foo').save() + + def test_has_create_permissions(self): + request = factory.post('/', json.dumps({'text': 'foobar'}), + content_type='application/json', + HTTP_AUTHORIZATION=self.permitted_credentials) + response = root_view(request, pk=1) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + def test_has_put_permissions(self): + request = factory.put('/1', json.dumps({'text': 'foobar'}), + content_type='application/json', + HTTP_AUTHORIZATION=self.permitted_credentials) + response = instance_view(request, pk='1') + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_has_delete_permissions(self): + request = factory.delete('/1', HTTP_AUTHORIZATION=self.permitted_credentials) + response = instance_view(request, pk=1) + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + + def test_does_not_have_create_permissions(self): + request = factory.post('/', json.dumps({'text': 'foobar'}), + content_type='application/json', + HTTP_AUTHORIZATION=self.disallowed_credentials) + response = root_view(request, pk=1) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + def test_does_not_have_put_permissions(self): + request = factory.put('/1', json.dumps({'text': 'foobar'}), + content_type='application/json', + HTTP_AUTHORIZATION=self.disallowed_credentials) + response = instance_view(request, pk='1') + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + def test_does_not_have_delete_permissions(self): + request = factory.delete('/1', HTTP_AUTHORIZATION=self.disallowed_credentials) + response = instance_view(request, pk=1) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + def test_has_put_as_create_permissions(self): + # User only has update permissions - should be able to update an entity. + request = factory.put('/1', json.dumps({'text': 'foobar'}), + content_type='application/json', + HTTP_AUTHORIZATION=self.updateonly_credentials) + response = instance_view(request, pk='1') + self.assertEqual(response.status_code, status.HTTP_200_OK) + + # But if PUTing to a new entity, permission should be denied. + request = factory.put('/2', json.dumps({'text': 'foobar'}), + content_type='application/json', + HTTP_AUTHORIZATION=self.updateonly_credentials) + response = instance_view(request, pk='2') + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + def test_options_permitted(self): + request = factory.options('/', content_type='application/json', + HTTP_AUTHORIZATION=self.permitted_credentials) + response = root_view(request, pk='1') + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('actions', response.data) + self.assertEqual(list(response.data['actions'].keys()), ['POST']) + + request = factory.options('/1', content_type='application/json', + HTTP_AUTHORIZATION=self.permitted_credentials) + response = instance_view(request, pk='1') + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('actions', response.data) + self.assertEqual(list(response.data['actions'].keys()), ['PUT']) + + def test_options_disallowed(self): + request = factory.options('/', content_type='application/json', + HTTP_AUTHORIZATION=self.disallowed_credentials) + response = root_view(request, pk='1') + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertNotIn('actions', response.data) + + request = factory.options('/1', content_type='application/json', + HTTP_AUTHORIZATION=self.disallowed_credentials) + response = instance_view(request, pk='1') + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertNotIn('actions', response.data) + + def test_options_updateonly(self): + request = factory.options('/', content_type='application/json', + HTTP_AUTHORIZATION=self.updateonly_credentials) + response = root_view(request, pk='1') + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertNotIn('actions', response.data) + + request = factory.options('/1', content_type='application/json', + HTTP_AUTHORIZATION=self.updateonly_credentials) + response = instance_view(request, pk='1') + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('actions', response.data) + self.assertEqual(list(response.data['actions'].keys()), ['PUT']) + + +class OwnerModel(models.Model): + text = models.CharField(max_length=100) + owner = models.ForeignKey(User) + + +class IsOwnerPermission(permissions.BasePermission): + def has_object_permission(self, request, view, obj): + return request.user == obj.owner + + +class OwnerInstanceView(generics.RetrieveUpdateDestroyAPIView): + model = OwnerModel + authentication_classes = [authentication.BasicAuthentication] + permission_classes = [IsOwnerPermission] + + +owner_instance_view = OwnerInstanceView.as_view() + + +class ObjectPermissionsIntegrationTests(TestCase): + """ + Integration tests for the object level permissions API. + """ + + def setUp(self): + User.objects.create_user('not_owner', 'not_owner@example.com', 'password') + user = User.objects.create_user('owner', 'owner@example.com', 'password') + + self.not_owner_credentials = basic_auth_header('not_owner', 'password') + self.owner_credentials = basic_auth_header('owner', 'password') + + OwnerModel(text='foo', owner=user).save() + + def test_owner_has_delete_permissions(self): + request = factory.delete('/1', HTTP_AUTHORIZATION=self.owner_credentials) + response = owner_instance_view(request, pk='1') + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + + def test_non_owner_does_not_have_delete_permissions(self): + request = factory.delete('/1', HTTP_AUTHORIZATION=self.not_owner_credentials) + response = owner_instance_view(request, pk='1') + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) diff --git a/awx/lib/site-packages/rest_framework/tests/test_relations.py b/awx/lib/site-packages/rest_framework/tests/test_relations.py new file mode 100644 index 0000000000..d19219c908 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_relations.py @@ -0,0 +1,100 @@ +""" +General tests for relational fields. +""" +from __future__ import unicode_literals +from django.db import models +from django.test import TestCase +from rest_framework import serializers +from rest_framework.tests.models import BlogPost + + +class NullModel(models.Model): + pass + + +class FieldTests(TestCase): + def test_pk_related_field_with_empty_string(self): + """ + Regression test for #446 + + https://github.com/tomchristie/django-rest-framework/issues/446 + """ + field = serializers.PrimaryKeyRelatedField(queryset=NullModel.objects.all()) + self.assertRaises(serializers.ValidationError, field.from_native, '') + self.assertRaises(serializers.ValidationError, field.from_native, []) + + def test_hyperlinked_related_field_with_empty_string(self): + field = serializers.HyperlinkedRelatedField(queryset=NullModel.objects.all(), view_name='') + self.assertRaises(serializers.ValidationError, field.from_native, '') + self.assertRaises(serializers.ValidationError, field.from_native, []) + + def test_slug_related_field_with_empty_string(self): + field = serializers.SlugRelatedField(queryset=NullModel.objects.all(), slug_field='pk') + self.assertRaises(serializers.ValidationError, field.from_native, '') + self.assertRaises(serializers.ValidationError, field.from_native, []) + + +class TestManyRelatedMixin(TestCase): + def test_missing_many_to_many_related_field(self): + ''' + Regression test for #632 + + https://github.com/tomchristie/django-rest-framework/pull/632 + ''' + field = serializers.RelatedField(many=True, read_only=False) + + into = {} + field.field_from_native({}, None, 'field_name', into) + self.assertEqual(into['field_name'], []) + + +# Regression tests for #694 (`source` attribute on related fields) + +class RelatedFieldSourceTests(TestCase): + def test_related_manager_source(self): + """ + Relational fields should be able to use manager-returning methods as their source. + """ + BlogPost.objects.create(title='blah') + field = serializers.RelatedField(many=True, source='get_blogposts_manager') + + class ClassWithManagerMethod(object): + def get_blogposts_manager(self): + return BlogPost.objects + + obj = ClassWithManagerMethod() + value = field.field_to_native(obj, 'field_name') + self.assertEqual(value, ['BlogPost object']) + + def test_related_queryset_source(self): + """ + Relational fields should be able to use queryset-returning methods as their source. + """ + BlogPost.objects.create(title='blah') + field = serializers.RelatedField(many=True, source='get_blogposts_queryset') + + class ClassWithQuerysetMethod(object): + def get_blogposts_queryset(self): + return BlogPost.objects.all() + + obj = ClassWithQuerysetMethod() + value = field.field_to_native(obj, 'field_name') + self.assertEqual(value, ['BlogPost object']) + + def test_dotted_source(self): + """ + Source argument should support dotted.source notation. + """ + BlogPost.objects.create(title='blah') + field = serializers.RelatedField(many=True, source='a.b.c') + + class ClassWithQuerysetMethod(object): + a = { + 'b': { + 'c': BlogPost.objects.all() + } + } + + obj = ClassWithQuerysetMethod() + value = field.field_to_native(obj, 'field_name') + self.assertEqual(value, ['BlogPost object']) diff --git a/awx/lib/site-packages/rest_framework/tests/test_relations_hyperlink.py b/awx/lib/site-packages/rest_framework/tests/test_relations_hyperlink.py new file mode 100644 index 0000000000..2ca7f4f2b3 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_relations_hyperlink.py @@ -0,0 +1,524 @@ +from __future__ import unicode_literals +from django.test import TestCase +from django.test.client import RequestFactory +from rest_framework import serializers +from rest_framework.compat import patterns, url +from rest_framework.tests.models import ( + BlogPost, + ManyToManyTarget, ManyToManySource, ForeignKeyTarget, ForeignKeySource, + NullableForeignKeySource, OneToOneTarget, NullableOneToOneSource +) + +factory = RequestFactory() +request = factory.get('/') # Just to ensure we have a request in the serializer context + + +def dummy_view(request, pk): + pass + +urlpatterns = patterns('', + url(r'^dummyurl/(?P<pk>[0-9]+)/$', dummy_view, name='dummy-url'), + url(r'^manytomanysource/(?P<pk>[0-9]+)/$', dummy_view, name='manytomanysource-detail'), + url(r'^manytomanytarget/(?P<pk>[0-9]+)/$', dummy_view, name='manytomanytarget-detail'), + url(r'^foreignkeysource/(?P<pk>[0-9]+)/$', dummy_view, name='foreignkeysource-detail'), + url(r'^foreignkeytarget/(?P<pk>[0-9]+)/$', dummy_view, name='foreignkeytarget-detail'), + url(r'^nullableforeignkeysource/(?P<pk>[0-9]+)/$', dummy_view, name='nullableforeignkeysource-detail'), + url(r'^onetoonetarget/(?P<pk>[0-9]+)/$', dummy_view, name='onetoonetarget-detail'), + url(r'^nullableonetoonesource/(?P<pk>[0-9]+)/$', dummy_view, name='nullableonetoonesource-detail'), +) + + +# ManyToMany +class ManyToManyTargetSerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = ManyToManyTarget + fields = ('url', 'name', 'sources') + + +class ManyToManySourceSerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = ManyToManySource + fields = ('url', 'name', 'targets') + + +# ForeignKey +class ForeignKeyTargetSerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = ForeignKeyTarget + fields = ('url', 'name', 'sources') + + +class ForeignKeySourceSerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = ForeignKeySource + fields = ('url', 'name', 'target') + + +# Nullable ForeignKey +class NullableForeignKeySourceSerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = NullableForeignKeySource + fields = ('url', 'name', 'target') + + +# Nullable OneToOne +class NullableOneToOneTargetSerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = OneToOneTarget + fields = ('url', 'name', 'nullable_source') + + +# TODO: Add test that .data cannot be accessed prior to .is_valid + +class HyperlinkedManyToManyTests(TestCase): + urls = 'rest_framework.tests.test_relations_hyperlink' + + def setUp(self): + for idx in range(1, 4): + target = ManyToManyTarget(name='target-%d' % idx) + target.save() + source = ManyToManySource(name='source-%d' % idx) + source.save() + for target in ManyToManyTarget.objects.all(): + source.targets.add(target) + + def test_many_to_many_retrieve(self): + queryset = ManyToManySource.objects.all() + serializer = ManyToManySourceSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/manytomanysource/1/', 'name': 'source-1', 'targets': ['http://testserver/manytomanytarget/1/']}, + {'url': 'http://testserver/manytomanysource/2/', 'name': 'source-2', 'targets': ['http://testserver/manytomanytarget/1/', 'http://testserver/manytomanytarget/2/']}, + {'url': 'http://testserver/manytomanysource/3/', 'name': 'source-3', 'targets': ['http://testserver/manytomanytarget/1/', 'http://testserver/manytomanytarget/2/', 'http://testserver/manytomanytarget/3/']} + ] + self.assertEqual(serializer.data, expected) + + def test_reverse_many_to_many_retrieve(self): + queryset = ManyToManyTarget.objects.all() + serializer = ManyToManyTargetSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/manytomanytarget/1/', 'name': 'target-1', 'sources': ['http://testserver/manytomanysource/1/', 'http://testserver/manytomanysource/2/', 'http://testserver/manytomanysource/3/']}, + {'url': 'http://testserver/manytomanytarget/2/', 'name': 'target-2', 'sources': ['http://testserver/manytomanysource/2/', 'http://testserver/manytomanysource/3/']}, + {'url': 'http://testserver/manytomanytarget/3/', 'name': 'target-3', 'sources': ['http://testserver/manytomanysource/3/']} + ] + self.assertEqual(serializer.data, expected) + + def test_many_to_many_update(self): + data = {'url': 'http://testserver/manytomanysource/1/', 'name': 'source-1', 'targets': ['http://testserver/manytomanytarget/1/', 'http://testserver/manytomanytarget/2/', 'http://testserver/manytomanytarget/3/']} + instance = ManyToManySource.objects.get(pk=1) + serializer = ManyToManySourceSerializer(instance, data=data, context={'request': request}) + self.assertTrue(serializer.is_valid()) + serializer.save() + self.assertEqual(serializer.data, data) + + # Ensure source 1 is updated, and everything else is as expected + queryset = ManyToManySource.objects.all() + serializer = ManyToManySourceSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/manytomanysource/1/', 'name': 'source-1', 'targets': ['http://testserver/manytomanytarget/1/', 'http://testserver/manytomanytarget/2/', 'http://testserver/manytomanytarget/3/']}, + {'url': 'http://testserver/manytomanysource/2/', 'name': 'source-2', 'targets': ['http://testserver/manytomanytarget/1/', 'http://testserver/manytomanytarget/2/']}, + {'url': 'http://testserver/manytomanysource/3/', 'name': 'source-3', 'targets': ['http://testserver/manytomanytarget/1/', 'http://testserver/manytomanytarget/2/', 'http://testserver/manytomanytarget/3/']} + ] + self.assertEqual(serializer.data, expected) + + def test_reverse_many_to_many_update(self): + data = {'url': 'http://testserver/manytomanytarget/1/', 'name': 'target-1', 'sources': ['http://testserver/manytomanysource/1/']} + instance = ManyToManyTarget.objects.get(pk=1) + serializer = ManyToManyTargetSerializer(instance, data=data, context={'request': request}) + self.assertTrue(serializer.is_valid()) + serializer.save() + self.assertEqual(serializer.data, data) + + # Ensure target 1 is updated, and everything else is as expected + queryset = ManyToManyTarget.objects.all() + serializer = ManyToManyTargetSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/manytomanytarget/1/', 'name': 'target-1', 'sources': ['http://testserver/manytomanysource/1/']}, + {'url': 'http://testserver/manytomanytarget/2/', 'name': 'target-2', 'sources': ['http://testserver/manytomanysource/2/', 'http://testserver/manytomanysource/3/']}, + {'url': 'http://testserver/manytomanytarget/3/', 'name': 'target-3', 'sources': ['http://testserver/manytomanysource/3/']} + + ] + self.assertEqual(serializer.data, expected) + + def test_many_to_many_create(self): + data = {'url': 'http://testserver/manytomanysource/4/', 'name': 'source-4', 'targets': ['http://testserver/manytomanytarget/1/', 'http://testserver/manytomanytarget/3/']} + serializer = ManyToManySourceSerializer(data=data, context={'request': request}) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, data) + self.assertEqual(obj.name, 'source-4') + + # Ensure source 4 is added, and everything else is as expected + queryset = ManyToManySource.objects.all() + serializer = ManyToManySourceSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/manytomanysource/1/', 'name': 'source-1', 'targets': ['http://testserver/manytomanytarget/1/']}, + {'url': 'http://testserver/manytomanysource/2/', 'name': 'source-2', 'targets': ['http://testserver/manytomanytarget/1/', 'http://testserver/manytomanytarget/2/']}, + {'url': 'http://testserver/manytomanysource/3/', 'name': 'source-3', 'targets': ['http://testserver/manytomanytarget/1/', 'http://testserver/manytomanytarget/2/', 'http://testserver/manytomanytarget/3/']}, + {'url': 'http://testserver/manytomanysource/4/', 'name': 'source-4', 'targets': ['http://testserver/manytomanytarget/1/', 'http://testserver/manytomanytarget/3/']} + ] + self.assertEqual(serializer.data, expected) + + def test_reverse_many_to_many_create(self): + data = {'url': 'http://testserver/manytomanytarget/4/', 'name': 'target-4', 'sources': ['http://testserver/manytomanysource/1/', 'http://testserver/manytomanysource/3/']} + serializer = ManyToManyTargetSerializer(data=data, context={'request': request}) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, data) + self.assertEqual(obj.name, 'target-4') + + # Ensure target 4 is added, and everything else is as expected + queryset = ManyToManyTarget.objects.all() + serializer = ManyToManyTargetSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/manytomanytarget/1/', 'name': 'target-1', 'sources': ['http://testserver/manytomanysource/1/', 'http://testserver/manytomanysource/2/', 'http://testserver/manytomanysource/3/']}, + {'url': 'http://testserver/manytomanytarget/2/', 'name': 'target-2', 'sources': ['http://testserver/manytomanysource/2/', 'http://testserver/manytomanysource/3/']}, + {'url': 'http://testserver/manytomanytarget/3/', 'name': 'target-3', 'sources': ['http://testserver/manytomanysource/3/']}, + {'url': 'http://testserver/manytomanytarget/4/', 'name': 'target-4', 'sources': ['http://testserver/manytomanysource/1/', 'http://testserver/manytomanysource/3/']} + ] + self.assertEqual(serializer.data, expected) + + +class HyperlinkedForeignKeyTests(TestCase): + urls = 'rest_framework.tests.test_relations_hyperlink' + + def setUp(self): + target = ForeignKeyTarget(name='target-1') + target.save() + new_target = ForeignKeyTarget(name='target-2') + new_target.save() + for idx in range(1, 4): + source = ForeignKeySource(name='source-%d' % idx, target=target) + source.save() + + def test_foreign_key_retrieve(self): + queryset = ForeignKeySource.objects.all() + serializer = ForeignKeySourceSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/foreignkeysource/1/', 'name': 'source-1', 'target': 'http://testserver/foreignkeytarget/1/'}, + {'url': 'http://testserver/foreignkeysource/2/', 'name': 'source-2', 'target': 'http://testserver/foreignkeytarget/1/'}, + {'url': 'http://testserver/foreignkeysource/3/', 'name': 'source-3', 'target': 'http://testserver/foreignkeytarget/1/'} + ] + self.assertEqual(serializer.data, expected) + + def test_reverse_foreign_key_retrieve(self): + queryset = ForeignKeyTarget.objects.all() + serializer = ForeignKeyTargetSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/foreignkeytarget/1/', 'name': 'target-1', 'sources': ['http://testserver/foreignkeysource/1/', 'http://testserver/foreignkeysource/2/', 'http://testserver/foreignkeysource/3/']}, + {'url': 'http://testserver/foreignkeytarget/2/', 'name': 'target-2', 'sources': []}, + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_update(self): + data = {'url': 'http://testserver/foreignkeysource/1/', 'name': 'source-1', 'target': 'http://testserver/foreignkeytarget/2/'} + instance = ForeignKeySource.objects.get(pk=1) + serializer = ForeignKeySourceSerializer(instance, data=data, context={'request': request}) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.data, data) + serializer.save() + + # Ensure source 1 is updated, and everything else is as expected + queryset = ForeignKeySource.objects.all() + serializer = ForeignKeySourceSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/foreignkeysource/1/', 'name': 'source-1', 'target': 'http://testserver/foreignkeytarget/2/'}, + {'url': 'http://testserver/foreignkeysource/2/', 'name': 'source-2', 'target': 'http://testserver/foreignkeytarget/1/'}, + {'url': 'http://testserver/foreignkeysource/3/', 'name': 'source-3', 'target': 'http://testserver/foreignkeytarget/1/'} + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_update_incorrect_type(self): + data = {'url': 'http://testserver/foreignkeysource/1/', 'name': 'source-1', 'target': 2} + instance = ForeignKeySource.objects.get(pk=1) + serializer = ForeignKeySourceSerializer(instance, data=data, context={'request': request}) + self.assertFalse(serializer.is_valid()) + self.assertEqual(serializer.errors, {'target': ['Incorrect type. Expected url string, received int.']}) + + def test_reverse_foreign_key_update(self): + data = {'url': 'http://testserver/foreignkeytarget/2/', 'name': 'target-2', 'sources': ['http://testserver/foreignkeysource/1/', 'http://testserver/foreignkeysource/3/']} + instance = ForeignKeyTarget.objects.get(pk=2) + serializer = ForeignKeyTargetSerializer(instance, data=data, context={'request': request}) + self.assertTrue(serializer.is_valid()) + # We shouldn't have saved anything to the db yet since save + # hasn't been called. + queryset = ForeignKeyTarget.objects.all() + new_serializer = ForeignKeyTargetSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/foreignkeytarget/1/', 'name': 'target-1', 'sources': ['http://testserver/foreignkeysource/1/', 'http://testserver/foreignkeysource/2/', 'http://testserver/foreignkeysource/3/']}, + {'url': 'http://testserver/foreignkeytarget/2/', 'name': 'target-2', 'sources': []}, + ] + self.assertEqual(new_serializer.data, expected) + + serializer.save() + self.assertEqual(serializer.data, data) + + # Ensure target 2 is update, and everything else is as expected + queryset = ForeignKeyTarget.objects.all() + serializer = ForeignKeyTargetSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/foreignkeytarget/1/', 'name': 'target-1', 'sources': ['http://testserver/foreignkeysource/2/']}, + {'url': 'http://testserver/foreignkeytarget/2/', 'name': 'target-2', 'sources': ['http://testserver/foreignkeysource/1/', 'http://testserver/foreignkeysource/3/']}, + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_create(self): + data = {'url': 'http://testserver/foreignkeysource/4/', 'name': 'source-4', 'target': 'http://testserver/foreignkeytarget/2/'} + serializer = ForeignKeySourceSerializer(data=data, context={'request': request}) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, data) + self.assertEqual(obj.name, 'source-4') + + # Ensure source 1 is updated, and everything else is as expected + queryset = ForeignKeySource.objects.all() + serializer = ForeignKeySourceSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/foreignkeysource/1/', 'name': 'source-1', 'target': 'http://testserver/foreignkeytarget/1/'}, + {'url': 'http://testserver/foreignkeysource/2/', 'name': 'source-2', 'target': 'http://testserver/foreignkeytarget/1/'}, + {'url': 'http://testserver/foreignkeysource/3/', 'name': 'source-3', 'target': 'http://testserver/foreignkeytarget/1/'}, + {'url': 'http://testserver/foreignkeysource/4/', 'name': 'source-4', 'target': 'http://testserver/foreignkeytarget/2/'}, + ] + self.assertEqual(serializer.data, expected) + + def test_reverse_foreign_key_create(self): + data = {'url': 'http://testserver/foreignkeytarget/3/', 'name': 'target-3', 'sources': ['http://testserver/foreignkeysource/1/', 'http://testserver/foreignkeysource/3/']} + serializer = ForeignKeyTargetSerializer(data=data, context={'request': request}) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, data) + self.assertEqual(obj.name, 'target-3') + + # Ensure target 4 is added, and everything else is as expected + queryset = ForeignKeyTarget.objects.all() + serializer = ForeignKeyTargetSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/foreignkeytarget/1/', 'name': 'target-1', 'sources': ['http://testserver/foreignkeysource/2/']}, + {'url': 'http://testserver/foreignkeytarget/2/', 'name': 'target-2', 'sources': []}, + {'url': 'http://testserver/foreignkeytarget/3/', 'name': 'target-3', 'sources': ['http://testserver/foreignkeysource/1/', 'http://testserver/foreignkeysource/3/']}, + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_update_with_invalid_null(self): + data = {'url': 'http://testserver/foreignkeysource/1/', 'name': 'source-1', 'target': None} + instance = ForeignKeySource.objects.get(pk=1) + serializer = ForeignKeySourceSerializer(instance, data=data, context={'request': request}) + self.assertFalse(serializer.is_valid()) + self.assertEqual(serializer.errors, {'target': ['This field is required.']}) + + +class HyperlinkedNullableForeignKeyTests(TestCase): + urls = 'rest_framework.tests.test_relations_hyperlink' + + def setUp(self): + target = ForeignKeyTarget(name='target-1') + target.save() + for idx in range(1, 4): + if idx == 3: + target = None + source = NullableForeignKeySource(name='source-%d' % idx, target=target) + source.save() + + def test_foreign_key_retrieve_with_null(self): + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/nullableforeignkeysource/1/', 'name': 'source-1', 'target': 'http://testserver/foreignkeytarget/1/'}, + {'url': 'http://testserver/nullableforeignkeysource/2/', 'name': 'source-2', 'target': 'http://testserver/foreignkeytarget/1/'}, + {'url': 'http://testserver/nullableforeignkeysource/3/', 'name': 'source-3', 'target': None}, + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_create_with_valid_null(self): + data = {'url': 'http://testserver/nullableforeignkeysource/4/', 'name': 'source-4', 'target': None} + serializer = NullableForeignKeySourceSerializer(data=data, context={'request': request}) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, data) + self.assertEqual(obj.name, 'source-4') + + # Ensure source 4 is created, and everything else is as expected + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/nullableforeignkeysource/1/', 'name': 'source-1', 'target': 'http://testserver/foreignkeytarget/1/'}, + {'url': 'http://testserver/nullableforeignkeysource/2/', 'name': 'source-2', 'target': 'http://testserver/foreignkeytarget/1/'}, + {'url': 'http://testserver/nullableforeignkeysource/3/', 'name': 'source-3', 'target': None}, + {'url': 'http://testserver/nullableforeignkeysource/4/', 'name': 'source-4', 'target': None} + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_create_with_valid_emptystring(self): + """ + The emptystring should be interpreted as null in the context + of relationships. + """ + data = {'url': 'http://testserver/nullableforeignkeysource/4/', 'name': 'source-4', 'target': ''} + expected_data = {'url': 'http://testserver/nullableforeignkeysource/4/', 'name': 'source-4', 'target': None} + serializer = NullableForeignKeySourceSerializer(data=data, context={'request': request}) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, expected_data) + self.assertEqual(obj.name, 'source-4') + + # Ensure source 4 is created, and everything else is as expected + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/nullableforeignkeysource/1/', 'name': 'source-1', 'target': 'http://testserver/foreignkeytarget/1/'}, + {'url': 'http://testserver/nullableforeignkeysource/2/', 'name': 'source-2', 'target': 'http://testserver/foreignkeytarget/1/'}, + {'url': 'http://testserver/nullableforeignkeysource/3/', 'name': 'source-3', 'target': None}, + {'url': 'http://testserver/nullableforeignkeysource/4/', 'name': 'source-4', 'target': None} + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_update_with_valid_null(self): + data = {'url': 'http://testserver/nullableforeignkeysource/1/', 'name': 'source-1', 'target': None} + instance = NullableForeignKeySource.objects.get(pk=1) + serializer = NullableForeignKeySourceSerializer(instance, data=data, context={'request': request}) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.data, data) + serializer.save() + + # Ensure source 1 is updated, and everything else is as expected + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/nullableforeignkeysource/1/', 'name': 'source-1', 'target': None}, + {'url': 'http://testserver/nullableforeignkeysource/2/', 'name': 'source-2', 'target': 'http://testserver/foreignkeytarget/1/'}, + {'url': 'http://testserver/nullableforeignkeysource/3/', 'name': 'source-3', 'target': None}, + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_update_with_valid_emptystring(self): + """ + The emptystring should be interpreted as null in the context + of relationships. + """ + data = {'url': 'http://testserver/nullableforeignkeysource/1/', 'name': 'source-1', 'target': ''} + expected_data = {'url': 'http://testserver/nullableforeignkeysource/1/', 'name': 'source-1', 'target': None} + instance = NullableForeignKeySource.objects.get(pk=1) + serializer = NullableForeignKeySourceSerializer(instance, data=data, context={'request': request}) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.data, expected_data) + serializer.save() + + # Ensure source 1 is updated, and everything else is as expected + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/nullableforeignkeysource/1/', 'name': 'source-1', 'target': None}, + {'url': 'http://testserver/nullableforeignkeysource/2/', 'name': 'source-2', 'target': 'http://testserver/foreignkeytarget/1/'}, + {'url': 'http://testserver/nullableforeignkeysource/3/', 'name': 'source-3', 'target': None}, + ] + self.assertEqual(serializer.data, expected) + + # reverse foreign keys MUST be read_only + # In the general case they do not provide .remove() or .clear() + # and cannot be arbitrarily set. + + # def test_reverse_foreign_key_update(self): + # data = {'id': 1, 'name': 'target-1', 'sources': [1]} + # instance = ForeignKeyTarget.objects.get(pk=1) + # serializer = ForeignKeyTargetSerializer(instance, data=data) + # self.assertTrue(serializer.is_valid()) + # self.assertEqual(serializer.data, data) + # serializer.save() + + # # Ensure target 1 is updated, and everything else is as expected + # queryset = ForeignKeyTarget.objects.all() + # serializer = ForeignKeyTargetSerializer(queryset, many=True) + # expected = [ + # {'id': 1, 'name': 'target-1', 'sources': [1]}, + # {'id': 2, 'name': 'target-2', 'sources': []}, + # ] + # self.assertEqual(serializer.data, expected) + + +class HyperlinkedNullableOneToOneTests(TestCase): + urls = 'rest_framework.tests.test_relations_hyperlink' + + def setUp(self): + target = OneToOneTarget(name='target-1') + target.save() + new_target = OneToOneTarget(name='target-2') + new_target.save() + source = NullableOneToOneSource(name='source-1', target=target) + source.save() + + def test_reverse_foreign_key_retrieve_with_null(self): + queryset = OneToOneTarget.objects.all() + serializer = NullableOneToOneTargetSerializer(queryset, many=True, context={'request': request}) + expected = [ + {'url': 'http://testserver/onetoonetarget/1/', 'name': 'target-1', 'nullable_source': 'http://testserver/nullableonetoonesource/1/'}, + {'url': 'http://testserver/onetoonetarget/2/', 'name': 'target-2', 'nullable_source': None}, + ] + self.assertEqual(serializer.data, expected) + + +# Regression tests for #694 (`source` attribute on related fields) + +class HyperlinkedRelatedFieldSourceTests(TestCase): + urls = 'rest_framework.tests.test_relations_hyperlink' + + def test_related_manager_source(self): + """ + Relational fields should be able to use manager-returning methods as their source. + """ + BlogPost.objects.create(title='blah') + field = serializers.HyperlinkedRelatedField( + many=True, + source='get_blogposts_manager', + view_name='dummy-url', + ) + field.context = {'request': request} + + class ClassWithManagerMethod(object): + def get_blogposts_manager(self): + return BlogPost.objects + + obj = ClassWithManagerMethod() + value = field.field_to_native(obj, 'field_name') + self.assertEqual(value, ['http://testserver/dummyurl/1/']) + + def test_related_queryset_source(self): + """ + Relational fields should be able to use queryset-returning methods as their source. + """ + BlogPost.objects.create(title='blah') + field = serializers.HyperlinkedRelatedField( + many=True, + source='get_blogposts_queryset', + view_name='dummy-url', + ) + field.context = {'request': request} + + class ClassWithQuerysetMethod(object): + def get_blogposts_queryset(self): + return BlogPost.objects.all() + + obj = ClassWithQuerysetMethod() + value = field.field_to_native(obj, 'field_name') + self.assertEqual(value, ['http://testserver/dummyurl/1/']) + + def test_dotted_source(self): + """ + Source argument should support dotted.source notation. + """ + BlogPost.objects.create(title='blah') + field = serializers.HyperlinkedRelatedField( + many=True, + source='a.b.c', + view_name='dummy-url', + ) + field.context = {'request': request} + + class ClassWithQuerysetMethod(object): + a = { + 'b': { + 'c': BlogPost.objects.all() + } + } + + obj = ClassWithQuerysetMethod() + value = field.field_to_native(obj, 'field_name') + self.assertEqual(value, ['http://testserver/dummyurl/1/']) diff --git a/awx/lib/site-packages/rest_framework/tests/test_relations_nested.py b/awx/lib/site-packages/rest_framework/tests/test_relations_nested.py new file mode 100644 index 0000000000..f6d006b39b --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_relations_nested.py @@ -0,0 +1,107 @@ +from __future__ import unicode_literals +from django.test import TestCase +from rest_framework import serializers +from rest_framework.tests.models import ForeignKeyTarget, ForeignKeySource, NullableForeignKeySource, OneToOneTarget, NullableOneToOneSource + + +class ForeignKeySourceSerializer(serializers.ModelSerializer): + class Meta: + model = ForeignKeySource + fields = ('id', 'name', 'target') + depth = 1 + + +class ForeignKeyTargetSerializer(serializers.ModelSerializer): + class Meta: + model = ForeignKeyTarget + fields = ('id', 'name', 'sources') + depth = 1 + + +class NullableForeignKeySourceSerializer(serializers.ModelSerializer): + class Meta: + model = NullableForeignKeySource + fields = ('id', 'name', 'target') + depth = 1 + + +class NullableOneToOneTargetSerializer(serializers.ModelSerializer): + class Meta: + model = OneToOneTarget + fields = ('id', 'name', 'nullable_source') + depth = 1 + + +class ReverseForeignKeyTests(TestCase): + def setUp(self): + target = ForeignKeyTarget(name='target-1') + target.save() + new_target = ForeignKeyTarget(name='target-2') + new_target.save() + for idx in range(1, 4): + source = ForeignKeySource(name='source-%d' % idx, target=target) + source.save() + + def test_foreign_key_retrieve(self): + queryset = ForeignKeySource.objects.all() + serializer = ForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': {'id': 1, 'name': 'target-1'}}, + {'id': 2, 'name': 'source-2', 'target': {'id': 1, 'name': 'target-1'}}, + {'id': 3, 'name': 'source-3', 'target': {'id': 1, 'name': 'target-1'}}, + ] + self.assertEqual(serializer.data, expected) + + def test_reverse_foreign_key_retrieve(self): + queryset = ForeignKeyTarget.objects.all() + serializer = ForeignKeyTargetSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'target-1', 'sources': [ + {'id': 1, 'name': 'source-1', 'target': 1}, + {'id': 2, 'name': 'source-2', 'target': 1}, + {'id': 3, 'name': 'source-3', 'target': 1}, + ]}, + {'id': 2, 'name': 'target-2', 'sources': [ + ]} + ] + self.assertEqual(serializer.data, expected) + + +class NestedNullableForeignKeyTests(TestCase): + def setUp(self): + target = ForeignKeyTarget(name='target-1') + target.save() + for idx in range(1, 4): + if idx == 3: + target = None + source = NullableForeignKeySource(name='source-%d' % idx, target=target) + source.save() + + def test_foreign_key_retrieve_with_null(self): + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': {'id': 1, 'name': 'target-1'}}, + {'id': 2, 'name': 'source-2', 'target': {'id': 1, 'name': 'target-1'}}, + {'id': 3, 'name': 'source-3', 'target': None}, + ] + self.assertEqual(serializer.data, expected) + + +class NestedNullableOneToOneTests(TestCase): + def setUp(self): + target = OneToOneTarget(name='target-1') + target.save() + new_target = OneToOneTarget(name='target-2') + new_target.save() + source = NullableOneToOneSource(name='source-1', target=target) + source.save() + + def test_reverse_foreign_key_retrieve_with_null(self): + queryset = OneToOneTarget.objects.all() + serializer = NullableOneToOneTargetSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'target-1', 'nullable_source': {'id': 1, 'name': 'source-1', 'target': 1}}, + {'id': 2, 'name': 'target-2', 'nullable_source': None}, + ] + self.assertEqual(serializer.data, expected) diff --git a/awx/lib/site-packages/rest_framework/tests/test_relations_pk.py b/awx/lib/site-packages/rest_framework/tests/test_relations_pk.py new file mode 100644 index 0000000000..e2a1b81520 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_relations_pk.py @@ -0,0 +1,542 @@ +from __future__ import unicode_literals +from django.db import models +from django.test import TestCase +from rest_framework import serializers +from rest_framework.tests.models import ( + BlogPost, ManyToManyTarget, ManyToManySource, ForeignKeyTarget, ForeignKeySource, + NullableForeignKeySource, OneToOneTarget, NullableOneToOneSource, +) +from rest_framework.compat import six + + +# ManyToMany +class ManyToManyTargetSerializer(serializers.ModelSerializer): + class Meta: + model = ManyToManyTarget + fields = ('id', 'name', 'sources') + + +class ManyToManySourceSerializer(serializers.ModelSerializer): + class Meta: + model = ManyToManySource + fields = ('id', 'name', 'targets') + + +# ForeignKey +class ForeignKeyTargetSerializer(serializers.ModelSerializer): + class Meta: + model = ForeignKeyTarget + fields = ('id', 'name', 'sources') + + +class ForeignKeySourceSerializer(serializers.ModelSerializer): + class Meta: + model = ForeignKeySource + fields = ('id', 'name', 'target') + + +# Nullable ForeignKey +class NullableForeignKeySourceSerializer(serializers.ModelSerializer): + class Meta: + model = NullableForeignKeySource + fields = ('id', 'name', 'target') + + +# Nullable OneToOne +class NullableOneToOneTargetSerializer(serializers.ModelSerializer): + class Meta: + model = OneToOneTarget + fields = ('id', 'name', 'nullable_source') + + +# TODO: Add test that .data cannot be accessed prior to .is_valid + +class PKManyToManyTests(TestCase): + def setUp(self): + for idx in range(1, 4): + target = ManyToManyTarget(name='target-%d' % idx) + target.save() + source = ManyToManySource(name='source-%d' % idx) + source.save() + for target in ManyToManyTarget.objects.all(): + source.targets.add(target) + + def test_many_to_many_retrieve(self): + queryset = ManyToManySource.objects.all() + serializer = ManyToManySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'targets': [1]}, + {'id': 2, 'name': 'source-2', 'targets': [1, 2]}, + {'id': 3, 'name': 'source-3', 'targets': [1, 2, 3]} + ] + self.assertEqual(serializer.data, expected) + + def test_reverse_many_to_many_retrieve(self): + queryset = ManyToManyTarget.objects.all() + serializer = ManyToManyTargetSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'target-1', 'sources': [1, 2, 3]}, + {'id': 2, 'name': 'target-2', 'sources': [2, 3]}, + {'id': 3, 'name': 'target-3', 'sources': [3]} + ] + self.assertEqual(serializer.data, expected) + + def test_many_to_many_update(self): + data = {'id': 1, 'name': 'source-1', 'targets': [1, 2, 3]} + instance = ManyToManySource.objects.get(pk=1) + serializer = ManyToManySourceSerializer(instance, data=data) + self.assertTrue(serializer.is_valid()) + serializer.save() + self.assertEqual(serializer.data, data) + + # Ensure source 1 is updated, and everything else is as expected + queryset = ManyToManySource.objects.all() + serializer = ManyToManySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'targets': [1, 2, 3]}, + {'id': 2, 'name': 'source-2', 'targets': [1, 2]}, + {'id': 3, 'name': 'source-3', 'targets': [1, 2, 3]} + ] + self.assertEqual(serializer.data, expected) + + def test_reverse_many_to_many_update(self): + data = {'id': 1, 'name': 'target-1', 'sources': [1]} + instance = ManyToManyTarget.objects.get(pk=1) + serializer = ManyToManyTargetSerializer(instance, data=data) + self.assertTrue(serializer.is_valid()) + serializer.save() + self.assertEqual(serializer.data, data) + + # Ensure target 1 is updated, and everything else is as expected + queryset = ManyToManyTarget.objects.all() + serializer = ManyToManyTargetSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'target-1', 'sources': [1]}, + {'id': 2, 'name': 'target-2', 'sources': [2, 3]}, + {'id': 3, 'name': 'target-3', 'sources': [3]} + ] + self.assertEqual(serializer.data, expected) + + def test_many_to_many_create(self): + data = {'id': 4, 'name': 'source-4', 'targets': [1, 3]} + serializer = ManyToManySourceSerializer(data=data) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, data) + self.assertEqual(obj.name, 'source-4') + + # Ensure source 4 is added, and everything else is as expected + queryset = ManyToManySource.objects.all() + serializer = ManyToManySourceSerializer(queryset, many=True) + self.assertFalse(serializer.fields['targets'].read_only) + expected = [ + {'id': 1, 'name': 'source-1', 'targets': [1]}, + {'id': 2, 'name': 'source-2', 'targets': [1, 2]}, + {'id': 3, 'name': 'source-3', 'targets': [1, 2, 3]}, + {'id': 4, 'name': 'source-4', 'targets': [1, 3]}, + ] + self.assertEqual(serializer.data, expected) + + def test_reverse_many_to_many_create(self): + data = {'id': 4, 'name': 'target-4', 'sources': [1, 3]} + serializer = ManyToManyTargetSerializer(data=data) + self.assertFalse(serializer.fields['sources'].read_only) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, data) + self.assertEqual(obj.name, 'target-4') + + # Ensure target 4 is added, and everything else is as expected + queryset = ManyToManyTarget.objects.all() + serializer = ManyToManyTargetSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'target-1', 'sources': [1, 2, 3]}, + {'id': 2, 'name': 'target-2', 'sources': [2, 3]}, + {'id': 3, 'name': 'target-3', 'sources': [3]}, + {'id': 4, 'name': 'target-4', 'sources': [1, 3]} + ] + self.assertEqual(serializer.data, expected) + + +class PKForeignKeyTests(TestCase): + def setUp(self): + target = ForeignKeyTarget(name='target-1') + target.save() + new_target = ForeignKeyTarget(name='target-2') + new_target.save() + for idx in range(1, 4): + source = ForeignKeySource(name='source-%d' % idx, target=target) + source.save() + + def test_foreign_key_retrieve(self): + queryset = ForeignKeySource.objects.all() + serializer = ForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': 1}, + {'id': 2, 'name': 'source-2', 'target': 1}, + {'id': 3, 'name': 'source-3', 'target': 1} + ] + self.assertEqual(serializer.data, expected) + + def test_reverse_foreign_key_retrieve(self): + queryset = ForeignKeyTarget.objects.all() + serializer = ForeignKeyTargetSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'target-1', 'sources': [1, 2, 3]}, + {'id': 2, 'name': 'target-2', 'sources': []}, + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_update(self): + data = {'id': 1, 'name': 'source-1', 'target': 2} + instance = ForeignKeySource.objects.get(pk=1) + serializer = ForeignKeySourceSerializer(instance, data=data) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.data, data) + serializer.save() + + # Ensure source 1 is updated, and everything else is as expected + queryset = ForeignKeySource.objects.all() + serializer = ForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': 2}, + {'id': 2, 'name': 'source-2', 'target': 1}, + {'id': 3, 'name': 'source-3', 'target': 1} + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_update_incorrect_type(self): + data = {'id': 1, 'name': 'source-1', 'target': 'foo'} + instance = ForeignKeySource.objects.get(pk=1) + serializer = ForeignKeySourceSerializer(instance, data=data) + self.assertFalse(serializer.is_valid()) + self.assertEqual(serializer.errors, {'target': ['Incorrect type. Expected pk value, received %s.' % six.text_type.__name__]}) + + def test_reverse_foreign_key_update(self): + data = {'id': 2, 'name': 'target-2', 'sources': [1, 3]} + instance = ForeignKeyTarget.objects.get(pk=2) + serializer = ForeignKeyTargetSerializer(instance, data=data) + self.assertTrue(serializer.is_valid()) + # We shouldn't have saved anything to the db yet since save + # hasn't been called. + queryset = ForeignKeyTarget.objects.all() + new_serializer = ForeignKeyTargetSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'target-1', 'sources': [1, 2, 3]}, + {'id': 2, 'name': 'target-2', 'sources': []}, + ] + self.assertEqual(new_serializer.data, expected) + + serializer.save() + self.assertEqual(serializer.data, data) + + # Ensure target 2 is update, and everything else is as expected + queryset = ForeignKeyTarget.objects.all() + serializer = ForeignKeyTargetSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'target-1', 'sources': [2]}, + {'id': 2, 'name': 'target-2', 'sources': [1, 3]}, + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_create(self): + data = {'id': 4, 'name': 'source-4', 'target': 2} + serializer = ForeignKeySourceSerializer(data=data) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, data) + self.assertEqual(obj.name, 'source-4') + + # Ensure source 4 is added, and everything else is as expected + queryset = ForeignKeySource.objects.all() + serializer = ForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': 1}, + {'id': 2, 'name': 'source-2', 'target': 1}, + {'id': 3, 'name': 'source-3', 'target': 1}, + {'id': 4, 'name': 'source-4', 'target': 2}, + ] + self.assertEqual(serializer.data, expected) + + def test_reverse_foreign_key_create(self): + data = {'id': 3, 'name': 'target-3', 'sources': [1, 3]} + serializer = ForeignKeyTargetSerializer(data=data) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, data) + self.assertEqual(obj.name, 'target-3') + + # Ensure target 3 is added, and everything else is as expected + queryset = ForeignKeyTarget.objects.all() + serializer = ForeignKeyTargetSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'target-1', 'sources': [2]}, + {'id': 2, 'name': 'target-2', 'sources': []}, + {'id': 3, 'name': 'target-3', 'sources': [1, 3]}, + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_update_with_invalid_null(self): + data = {'id': 1, 'name': 'source-1', 'target': None} + instance = ForeignKeySource.objects.get(pk=1) + serializer = ForeignKeySourceSerializer(instance, data=data) + self.assertFalse(serializer.is_valid()) + self.assertEqual(serializer.errors, {'target': ['This field is required.']}) + + +class PKNullableForeignKeyTests(TestCase): + def setUp(self): + target = ForeignKeyTarget(name='target-1') + target.save() + for idx in range(1, 4): + if idx == 3: + target = None + source = NullableForeignKeySource(name='source-%d' % idx, target=target) + source.save() + + def test_foreign_key_retrieve_with_null(self): + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': 1}, + {'id': 2, 'name': 'source-2', 'target': 1}, + {'id': 3, 'name': 'source-3', 'target': None}, + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_create_with_valid_null(self): + data = {'id': 4, 'name': 'source-4', 'target': None} + serializer = NullableForeignKeySourceSerializer(data=data) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, data) + self.assertEqual(obj.name, 'source-4') + + # Ensure source 4 is created, and everything else is as expected + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': 1}, + {'id': 2, 'name': 'source-2', 'target': 1}, + {'id': 3, 'name': 'source-3', 'target': None}, + {'id': 4, 'name': 'source-4', 'target': None} + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_create_with_valid_emptystring(self): + """ + The emptystring should be interpreted as null in the context + of relationships. + """ + data = {'id': 4, 'name': 'source-4', 'target': ''} + expected_data = {'id': 4, 'name': 'source-4', 'target': None} + serializer = NullableForeignKeySourceSerializer(data=data) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, expected_data) + self.assertEqual(obj.name, 'source-4') + + # Ensure source 4 is created, and everything else is as expected + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': 1}, + {'id': 2, 'name': 'source-2', 'target': 1}, + {'id': 3, 'name': 'source-3', 'target': None}, + {'id': 4, 'name': 'source-4', 'target': None} + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_update_with_valid_null(self): + data = {'id': 1, 'name': 'source-1', 'target': None} + instance = NullableForeignKeySource.objects.get(pk=1) + serializer = NullableForeignKeySourceSerializer(instance, data=data) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.data, data) + serializer.save() + + # Ensure source 1 is updated, and everything else is as expected + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': None}, + {'id': 2, 'name': 'source-2', 'target': 1}, + {'id': 3, 'name': 'source-3', 'target': None} + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_update_with_valid_emptystring(self): + """ + The emptystring should be interpreted as null in the context + of relationships. + """ + data = {'id': 1, 'name': 'source-1', 'target': ''} + expected_data = {'id': 1, 'name': 'source-1', 'target': None} + instance = NullableForeignKeySource.objects.get(pk=1) + serializer = NullableForeignKeySourceSerializer(instance, data=data) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.data, expected_data) + serializer.save() + + # Ensure source 1 is updated, and everything else is as expected + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': None}, + {'id': 2, 'name': 'source-2', 'target': 1}, + {'id': 3, 'name': 'source-3', 'target': None} + ] + self.assertEqual(serializer.data, expected) + + # reverse foreign keys MUST be read_only + # In the general case they do not provide .remove() or .clear() + # and cannot be arbitrarily set. + + # def test_reverse_foreign_key_update(self): + # data = {'id': 1, 'name': 'target-1', 'sources': [1]} + # instance = ForeignKeyTarget.objects.get(pk=1) + # serializer = ForeignKeyTargetSerializer(instance, data=data) + # self.assertTrue(serializer.is_valid()) + # self.assertEqual(serializer.data, data) + # serializer.save() + + # # Ensure target 1 is updated, and everything else is as expected + # queryset = ForeignKeyTarget.objects.all() + # serializer = ForeignKeyTargetSerializer(queryset, many=True) + # expected = [ + # {'id': 1, 'name': 'target-1', 'sources': [1]}, + # {'id': 2, 'name': 'target-2', 'sources': []}, + # ] + # self.assertEqual(serializer.data, expected) + + +class PKNullableOneToOneTests(TestCase): + def setUp(self): + target = OneToOneTarget(name='target-1') + target.save() + new_target = OneToOneTarget(name='target-2') + new_target.save() + source = NullableOneToOneSource(name='source-1', target=new_target) + source.save() + + def test_reverse_foreign_key_retrieve_with_null(self): + queryset = OneToOneTarget.objects.all() + serializer = NullableOneToOneTargetSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'target-1', 'nullable_source': None}, + {'id': 2, 'name': 'target-2', 'nullable_source': 1}, + ] + self.assertEqual(serializer.data, expected) + + +# The below models and tests ensure that serializer fields corresponding +# to a ManyToManyField field with a user-specified ``through`` model are +# set to read only + + +class ManyToManyThroughTarget(models.Model): + name = models.CharField(max_length=100) + + +class ManyToManyThrough(models.Model): + source = models.ForeignKey('ManyToManyThroughSource') + target = models.ForeignKey(ManyToManyThroughTarget) + + +class ManyToManyThroughSource(models.Model): + name = models.CharField(max_length=100) + targets = models.ManyToManyField(ManyToManyThroughTarget, + related_name='sources', + through='ManyToManyThrough') + + +class ManyToManyThroughTargetSerializer(serializers.ModelSerializer): + class Meta: + model = ManyToManyThroughTarget + fields = ('id', 'name', 'sources') + + +class ManyToManyThroughSourceSerializer(serializers.ModelSerializer): + class Meta: + model = ManyToManyThroughSource + fields = ('id', 'name', 'targets') + + +class PKManyToManyThroughTests(TestCase): + def setUp(self): + self.source = ManyToManyThroughSource.objects.create( + name='through-source-1') + self.target = ManyToManyThroughTarget.objects.create( + name='through-target-1') + + def test_many_to_many_create(self): + data = {'id': 2, 'name': 'source-2', 'targets': [self.target.pk]} + serializer = ManyToManyThroughSourceSerializer(data=data) + self.assertTrue(serializer.fields['targets'].read_only) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(obj.name, 'source-2') + self.assertEqual(obj.targets.count(), 0) + + def test_many_to_many_reverse_create(self): + data = {'id': 2, 'name': 'target-2', 'sources': [self.source.pk]} + serializer = ManyToManyThroughTargetSerializer(data=data) + self.assertTrue(serializer.fields['sources'].read_only) + self.assertTrue(serializer.is_valid()) + serializer.save() + obj = serializer.save() + self.assertEqual(obj.name, 'target-2') + self.assertEqual(obj.sources.count(), 0) + + +# Regression tests for #694 (`source` attribute on related fields) + + +class PrimaryKeyRelatedFieldSourceTests(TestCase): + def test_related_manager_source(self): + """ + Relational fields should be able to use manager-returning methods as their source. + """ + BlogPost.objects.create(title='blah') + field = serializers.PrimaryKeyRelatedField(many=True, source='get_blogposts_manager') + + class ClassWithManagerMethod(object): + def get_blogposts_manager(self): + return BlogPost.objects + + obj = ClassWithManagerMethod() + value = field.field_to_native(obj, 'field_name') + self.assertEqual(value, [1]) + + def test_related_queryset_source(self): + """ + Relational fields should be able to use queryset-returning methods as their source. + """ + BlogPost.objects.create(title='blah') + field = serializers.PrimaryKeyRelatedField(many=True, source='get_blogposts_queryset') + + class ClassWithQuerysetMethod(object): + def get_blogposts_queryset(self): + return BlogPost.objects.all() + + obj = ClassWithQuerysetMethod() + value = field.field_to_native(obj, 'field_name') + self.assertEqual(value, [1]) + + def test_dotted_source(self): + """ + Source argument should support dotted.source notation. + """ + BlogPost.objects.create(title='blah') + field = serializers.PrimaryKeyRelatedField(many=True, source='a.b.c') + + class ClassWithQuerysetMethod(object): + a = { + 'b': { + 'c': BlogPost.objects.all() + } + } + + obj = ClassWithQuerysetMethod() + value = field.field_to_native(obj, 'field_name') + self.assertEqual(value, [1]) diff --git a/awx/lib/site-packages/rest_framework/tests/test_relations_slug.py b/awx/lib/site-packages/rest_framework/tests/test_relations_slug.py new file mode 100644 index 0000000000..435c821cfa --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_relations_slug.py @@ -0,0 +1,257 @@ +from django.test import TestCase +from rest_framework import serializers +from rest_framework.tests.models import NullableForeignKeySource, ForeignKeySource, ForeignKeyTarget + + +class ForeignKeyTargetSerializer(serializers.ModelSerializer): + sources = serializers.SlugRelatedField(many=True, slug_field='name') + + class Meta: + model = ForeignKeyTarget + + +class ForeignKeySourceSerializer(serializers.ModelSerializer): + target = serializers.SlugRelatedField(slug_field='name') + + class Meta: + model = ForeignKeySource + + +class NullableForeignKeySourceSerializer(serializers.ModelSerializer): + target = serializers.SlugRelatedField(slug_field='name', required=False) + + class Meta: + model = NullableForeignKeySource + + +# TODO: M2M Tests, FKTests (Non-nullable), One2One +class SlugForeignKeyTests(TestCase): + def setUp(self): + target = ForeignKeyTarget(name='target-1') + target.save() + new_target = ForeignKeyTarget(name='target-2') + new_target.save() + for idx in range(1, 4): + source = ForeignKeySource(name='source-%d' % idx, target=target) + source.save() + + def test_foreign_key_retrieve(self): + queryset = ForeignKeySource.objects.all() + serializer = ForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': 'target-1'}, + {'id': 2, 'name': 'source-2', 'target': 'target-1'}, + {'id': 3, 'name': 'source-3', 'target': 'target-1'} + ] + self.assertEqual(serializer.data, expected) + + def test_reverse_foreign_key_retrieve(self): + queryset = ForeignKeyTarget.objects.all() + serializer = ForeignKeyTargetSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'target-1', 'sources': ['source-1', 'source-2', 'source-3']}, + {'id': 2, 'name': 'target-2', 'sources': []}, + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_update(self): + data = {'id': 1, 'name': 'source-1', 'target': 'target-2'} + instance = ForeignKeySource.objects.get(pk=1) + serializer = ForeignKeySourceSerializer(instance, data=data) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.data, data) + serializer.save() + + # Ensure source 1 is updated, and everything else is as expected + queryset = ForeignKeySource.objects.all() + serializer = ForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': 'target-2'}, + {'id': 2, 'name': 'source-2', 'target': 'target-1'}, + {'id': 3, 'name': 'source-3', 'target': 'target-1'} + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_update_incorrect_type(self): + data = {'id': 1, 'name': 'source-1', 'target': 123} + instance = ForeignKeySource.objects.get(pk=1) + serializer = ForeignKeySourceSerializer(instance, data=data) + self.assertFalse(serializer.is_valid()) + self.assertEqual(serializer.errors, {'target': ['Object with name=123 does not exist.']}) + + def test_reverse_foreign_key_update(self): + data = {'id': 2, 'name': 'target-2', 'sources': ['source-1', 'source-3']} + instance = ForeignKeyTarget.objects.get(pk=2) + serializer = ForeignKeyTargetSerializer(instance, data=data) + self.assertTrue(serializer.is_valid()) + # We shouldn't have saved anything to the db yet since save + # hasn't been called. + queryset = ForeignKeyTarget.objects.all() + new_serializer = ForeignKeyTargetSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'target-1', 'sources': ['source-1', 'source-2', 'source-3']}, + {'id': 2, 'name': 'target-2', 'sources': []}, + ] + self.assertEqual(new_serializer.data, expected) + + serializer.save() + self.assertEqual(serializer.data, data) + + # Ensure target 2 is update, and everything else is as expected + queryset = ForeignKeyTarget.objects.all() + serializer = ForeignKeyTargetSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'target-1', 'sources': ['source-2']}, + {'id': 2, 'name': 'target-2', 'sources': ['source-1', 'source-3']}, + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_create(self): + data = {'id': 4, 'name': 'source-4', 'target': 'target-2'} + serializer = ForeignKeySourceSerializer(data=data) + serializer.is_valid() + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, data) + self.assertEqual(obj.name, 'source-4') + + # Ensure source 4 is added, and everything else is as expected + queryset = ForeignKeySource.objects.all() + serializer = ForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': 'target-1'}, + {'id': 2, 'name': 'source-2', 'target': 'target-1'}, + {'id': 3, 'name': 'source-3', 'target': 'target-1'}, + {'id': 4, 'name': 'source-4', 'target': 'target-2'}, + ] + self.assertEqual(serializer.data, expected) + + def test_reverse_foreign_key_create(self): + data = {'id': 3, 'name': 'target-3', 'sources': ['source-1', 'source-3']} + serializer = ForeignKeyTargetSerializer(data=data) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, data) + self.assertEqual(obj.name, 'target-3') + + # Ensure target 3 is added, and everything else is as expected + queryset = ForeignKeyTarget.objects.all() + serializer = ForeignKeyTargetSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'target-1', 'sources': ['source-2']}, + {'id': 2, 'name': 'target-2', 'sources': []}, + {'id': 3, 'name': 'target-3', 'sources': ['source-1', 'source-3']}, + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_update_with_invalid_null(self): + data = {'id': 1, 'name': 'source-1', 'target': None} + instance = ForeignKeySource.objects.get(pk=1) + serializer = ForeignKeySourceSerializer(instance, data=data) + self.assertFalse(serializer.is_valid()) + self.assertEqual(serializer.errors, {'target': ['This field is required.']}) + + +class SlugNullableForeignKeyTests(TestCase): + def setUp(self): + target = ForeignKeyTarget(name='target-1') + target.save() + for idx in range(1, 4): + if idx == 3: + target = None + source = NullableForeignKeySource(name='source-%d' % idx, target=target) + source.save() + + def test_foreign_key_retrieve_with_null(self): + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': 'target-1'}, + {'id': 2, 'name': 'source-2', 'target': 'target-1'}, + {'id': 3, 'name': 'source-3', 'target': None}, + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_create_with_valid_null(self): + data = {'id': 4, 'name': 'source-4', 'target': None} + serializer = NullableForeignKeySourceSerializer(data=data) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, data) + self.assertEqual(obj.name, 'source-4') + + # Ensure source 4 is created, and everything else is as expected + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': 'target-1'}, + {'id': 2, 'name': 'source-2', 'target': 'target-1'}, + {'id': 3, 'name': 'source-3', 'target': None}, + {'id': 4, 'name': 'source-4', 'target': None} + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_create_with_valid_emptystring(self): + """ + The emptystring should be interpreted as null in the context + of relationships. + """ + data = {'id': 4, 'name': 'source-4', 'target': ''} + expected_data = {'id': 4, 'name': 'source-4', 'target': None} + serializer = NullableForeignKeySourceSerializer(data=data) + self.assertTrue(serializer.is_valid()) + obj = serializer.save() + self.assertEqual(serializer.data, expected_data) + self.assertEqual(obj.name, 'source-4') + + # Ensure source 4 is created, and everything else is as expected + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': 'target-1'}, + {'id': 2, 'name': 'source-2', 'target': 'target-1'}, + {'id': 3, 'name': 'source-3', 'target': None}, + {'id': 4, 'name': 'source-4', 'target': None} + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_update_with_valid_null(self): + data = {'id': 1, 'name': 'source-1', 'target': None} + instance = NullableForeignKeySource.objects.get(pk=1) + serializer = NullableForeignKeySourceSerializer(instance, data=data) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.data, data) + serializer.save() + + # Ensure source 1 is updated, and everything else is as expected + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': None}, + {'id': 2, 'name': 'source-2', 'target': 'target-1'}, + {'id': 3, 'name': 'source-3', 'target': None} + ] + self.assertEqual(serializer.data, expected) + + def test_foreign_key_update_with_valid_emptystring(self): + """ + The emptystring should be interpreted as null in the context + of relationships. + """ + data = {'id': 1, 'name': 'source-1', 'target': ''} + expected_data = {'id': 1, 'name': 'source-1', 'target': None} + instance = NullableForeignKeySource.objects.get(pk=1) + serializer = NullableForeignKeySourceSerializer(instance, data=data) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.data, expected_data) + serializer.save() + + # Ensure source 1 is updated, and everything else is as expected + queryset = NullableForeignKeySource.objects.all() + serializer = NullableForeignKeySourceSerializer(queryset, many=True) + expected = [ + {'id': 1, 'name': 'source-1', 'target': None}, + {'id': 2, 'name': 'source-2', 'target': 'target-1'}, + {'id': 3, 'name': 'source-3', 'target': None} + ] + self.assertEqual(serializer.data, expected) diff --git a/awx/lib/site-packages/rest_framework/tests/test_renderers.py b/awx/lib/site-packages/rest_framework/tests/test_renderers.py new file mode 100644 index 0000000000..95b597411f --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_renderers.py @@ -0,0 +1,541 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from decimal import Decimal +from django.core.cache import cache +from django.test import TestCase +from django.test.client import RequestFactory +from django.utils import unittest +from django.utils.translation import ugettext_lazy as _ +from rest_framework import status, permissions +from rest_framework.compat import yaml, etree, patterns, url, include +from rest_framework.response import Response +from rest_framework.views import APIView +from rest_framework.renderers import BaseRenderer, JSONRenderer, YAMLRenderer, \ + XMLRenderer, JSONPRenderer, BrowsableAPIRenderer, UnicodeJSONRenderer +from rest_framework.parsers import YAMLParser, XMLParser +from rest_framework.settings import api_settings +from rest_framework.compat import StringIO +from rest_framework.compat import six +import datetime +import pickle +import re + + +DUMMYSTATUS = status.HTTP_200_OK +DUMMYCONTENT = 'dummycontent' + +RENDERER_A_SERIALIZER = lambda x: ('Renderer A: %s' % x).encode('ascii') +RENDERER_B_SERIALIZER = lambda x: ('Renderer B: %s' % x).encode('ascii') + + +expected_results = [ + ((elem for elem in [1, 2, 3]), JSONRenderer, b'[1, 2, 3]') # Generator +] + + +class BasicRendererTests(TestCase): + def test_expected_results(self): + for value, renderer_cls, expected in expected_results: + output = renderer_cls().render(value) + self.assertEqual(output, expected) + + +class RendererA(BaseRenderer): + media_type = 'mock/renderera' + format = "formata" + + def render(self, data, media_type=None, renderer_context=None): + return RENDERER_A_SERIALIZER(data) + + +class RendererB(BaseRenderer): + media_type = 'mock/rendererb' + format = "formatb" + + def render(self, data, media_type=None, renderer_context=None): + return RENDERER_B_SERIALIZER(data) + + +class MockView(APIView): + renderer_classes = (RendererA, RendererB) + + def get(self, request, **kwargs): + response = Response(DUMMYCONTENT, status=DUMMYSTATUS) + return response + + +class MockGETView(APIView): + + def get(self, request, **kwargs): + return Response({'foo': ['bar', 'baz']}) + + +class HTMLView(APIView): + renderer_classes = (BrowsableAPIRenderer, ) + + def get(self, request, **kwargs): + return Response('text') + + +class HTMLView1(APIView): + renderer_classes = (BrowsableAPIRenderer, JSONRenderer) + + def get(self, request, **kwargs): + return Response('text') + +urlpatterns = patterns('', + url(r'^.*\.(?P<format>.+)$', MockView.as_view(renderer_classes=[RendererA, RendererB])), + url(r'^$', MockView.as_view(renderer_classes=[RendererA, RendererB])), + url(r'^cache$', MockGETView.as_view()), + url(r'^jsonp/jsonrenderer$', MockGETView.as_view(renderer_classes=[JSONRenderer, JSONPRenderer])), + url(r'^jsonp/nojsonrenderer$', MockGETView.as_view(renderer_classes=[JSONPRenderer])), + url(r'^html$', HTMLView.as_view()), + url(r'^html1$', HTMLView1.as_view()), + url(r'^api', include('rest_framework.urls', namespace='rest_framework')) +) + + +class POSTDeniedPermission(permissions.BasePermission): + def has_permission(self, request, view): + return request.method != 'POST' + + +class POSTDeniedView(APIView): + renderer_classes = (BrowsableAPIRenderer,) + permission_classes = (POSTDeniedPermission,) + + def get(self, request): + return Response() + + def post(self, request): + return Response() + + def put(self, request): + return Response() + + def patch(self, request): + return Response() + + +class DocumentingRendererTests(TestCase): + def test_only_permitted_forms_are_displayed(self): + view = POSTDeniedView.as_view() + request = RequestFactory().get('/') + response = view(request).render() + self.assertNotContains(response, '>POST<') + self.assertContains(response, '>PUT<') + self.assertContains(response, '>PATCH<') + + +class RendererEndToEndTests(TestCase): + """ + End-to-end testing of renderers using an RendererMixin on a generic view. + """ + + urls = 'rest_framework.tests.test_renderers' + + def test_default_renderer_serializes_content(self): + """If the Accept header is not set the default renderer should serialize the response.""" + resp = self.client.get('/') + self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_A_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + def test_head_method_serializes_no_content(self): + """No response must be included in HEAD requests.""" + resp = self.client.head('/') + self.assertEqual(resp.status_code, DUMMYSTATUS) + self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8') + self.assertEqual(resp.content, six.b('')) + + def test_default_renderer_serializes_content_on_accept_any(self): + """If the Accept header is set to */* the default renderer should serialize the response.""" + resp = self.client.get('/', HTTP_ACCEPT='*/*') + self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_A_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + def test_specified_renderer_serializes_content_default_case(self): + """If the Accept header is set the specified renderer should serialize the response. + (In this case we check that works for the default renderer)""" + resp = self.client.get('/', HTTP_ACCEPT=RendererA.media_type) + self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_A_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + def test_specified_renderer_serializes_content_non_default_case(self): + """If the Accept header is set the specified renderer should serialize the response. + (In this case we check that works for a non-default renderer)""" + resp = self.client.get('/', HTTP_ACCEPT=RendererB.media_type) + self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + def test_specified_renderer_serializes_content_on_accept_query(self): + """The '_accept' query string should behave in the same way as the Accept header.""" + param = '?%s=%s' % ( + api_settings.URL_ACCEPT_OVERRIDE, + RendererB.media_type + ) + resp = self.client.get('/' + param) + self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + def test_unsatisfiable_accept_header_on_request_returns_406_status(self): + """If the Accept header is unsatisfiable we should return a 406 Not Acceptable response.""" + resp = self.client.get('/', HTTP_ACCEPT='foo/bar') + self.assertEqual(resp.status_code, status.HTTP_406_NOT_ACCEPTABLE) + + def test_specified_renderer_serializes_content_on_format_query(self): + """If a 'format' query is specified, the renderer with the matching + format attribute should serialize the response.""" + param = '?%s=%s' % ( + api_settings.URL_FORMAT_OVERRIDE, + RendererB.format + ) + resp = self.client.get('/' + param) + self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + def test_specified_renderer_serializes_content_on_format_kwargs(self): + """If a 'format' keyword arg is specified, the renderer with the matching + format attribute should serialize the response.""" + resp = self.client.get('/something.formatb') + self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + def test_specified_renderer_is_used_on_format_query_with_matching_accept(self): + """If both a 'format' query and a matching Accept header specified, + the renderer with the matching format attribute should serialize the response.""" + param = '?%s=%s' % ( + api_settings.URL_FORMAT_OVERRIDE, + RendererB.format + ) + resp = self.client.get('/' + param, + HTTP_ACCEPT=RendererB.media_type) + self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + +_flat_repr = '{"foo": ["bar", "baz"]}' +_indented_repr = '{\n "foo": [\n "bar",\n "baz"\n ]\n}' + + +def strip_trailing_whitespace(content): + """ + Seems to be some inconsistencies re. trailing whitespace with + different versions of the json lib. + """ + return re.sub(' +\n', '\n', content) + + +class JSONRendererTests(TestCase): + """ + Tests specific to the JSON Renderer + """ + + def test_render_lazy_strings(self): + """ + JSONRenderer should deal with lazy translated strings. + """ + ret = JSONRenderer().render(_('test')) + self.assertEqual(ret, b'"test"') + + def test_without_content_type_args(self): + """ + Test basic JSON rendering. + """ + obj = {'foo': ['bar', 'baz']} + renderer = JSONRenderer() + content = renderer.render(obj, 'application/json') + # Fix failing test case which depends on version of JSON library. + self.assertEqual(content.decode('utf-8'), _flat_repr) + + def test_with_content_type_args(self): + """ + Test JSON rendering with additional content type arguments supplied. + """ + obj = {'foo': ['bar', 'baz']} + renderer = JSONRenderer() + content = renderer.render(obj, 'application/json; indent=2') + self.assertEqual(strip_trailing_whitespace(content.decode('utf-8')), _indented_repr) + + def test_check_ascii(self): + obj = {'countries': ['United Kingdom', 'France', 'España']} + renderer = JSONRenderer() + content = renderer.render(obj, 'application/json') + self.assertEqual(content, '{"countries": ["United Kingdom", "France", "Espa\\u00f1a"]}'.encode('utf-8')) + + +class UnicodeJSONRendererTests(TestCase): + """ + Tests specific for the Unicode JSON Renderer + """ + def test_proper_encoding(self): + obj = {'countries': ['United Kingdom', 'France', 'España']} + renderer = UnicodeJSONRenderer() + content = renderer.render(obj, 'application/json') + self.assertEqual(content, '{"countries": ["United Kingdom", "France", "España"]}'.encode('utf-8')) + + +class JSONPRendererTests(TestCase): + """ + Tests specific to the JSONP Renderer + """ + + urls = 'rest_framework.tests.test_renderers' + + def test_without_callback_with_json_renderer(self): + """ + Test JSONP rendering with View JSON Renderer. + """ + resp = self.client.get('/jsonp/jsonrenderer', + HTTP_ACCEPT='application/javascript') + self.assertEqual(resp.status_code, status.HTTP_200_OK) + self.assertEqual(resp['Content-Type'], 'application/javascript; charset=utf-8') + self.assertEqual(resp.content, + ('callback(%s);' % _flat_repr).encode('ascii')) + + def test_without_callback_without_json_renderer(self): + """ + Test JSONP rendering without View JSON Renderer. + """ + resp = self.client.get('/jsonp/nojsonrenderer', + HTTP_ACCEPT='application/javascript') + self.assertEqual(resp.status_code, status.HTTP_200_OK) + self.assertEqual(resp['Content-Type'], 'application/javascript; charset=utf-8') + self.assertEqual(resp.content, + ('callback(%s);' % _flat_repr).encode('ascii')) + + def test_with_callback(self): + """ + Test JSONP rendering with callback function name. + """ + callback_func = 'myjsonpcallback' + resp = self.client.get('/jsonp/nojsonrenderer?callback=' + callback_func, + HTTP_ACCEPT='application/javascript') + self.assertEqual(resp.status_code, status.HTTP_200_OK) + self.assertEqual(resp['Content-Type'], 'application/javascript; charset=utf-8') + self.assertEqual(resp.content, + ('%s(%s);' % (callback_func, _flat_repr)).encode('ascii')) + + +if yaml: + _yaml_repr = 'foo: [bar, baz]\n' + + class YAMLRendererTests(TestCase): + """ + Tests specific to the JSON Renderer + """ + + def test_render(self): + """ + Test basic YAML rendering. + """ + obj = {'foo': ['bar', 'baz']} + renderer = YAMLRenderer() + content = renderer.render(obj, 'application/yaml') + self.assertEqual(content, _yaml_repr) + + def test_render_and_parse(self): + """ + Test rendering and then parsing returns the original object. + IE obj -> render -> parse -> obj. + """ + obj = {'foo': ['bar', 'baz']} + + renderer = YAMLRenderer() + parser = YAMLParser() + + content = renderer.render(obj, 'application/yaml') + data = parser.parse(StringIO(content)) + self.assertEqual(obj, data) + + +class XMLRendererTestCase(TestCase): + """ + Tests specific to the XML Renderer + """ + + _complex_data = { + "creation_date": datetime.datetime(2011, 12, 25, 12, 45, 00), + "name": "name", + "sub_data_list": [ + { + "sub_id": 1, + "sub_name": "first" + }, + { + "sub_id": 2, + "sub_name": "second" + } + ] + } + + def test_render_string(self): + """ + Test XML rendering. + """ + renderer = XMLRenderer() + content = renderer.render({'field': 'astring'}, 'application/xml') + self.assertXMLContains(content, '<field>astring</field>') + + def test_render_integer(self): + """ + Test XML rendering. + """ + renderer = XMLRenderer() + content = renderer.render({'field': 111}, 'application/xml') + self.assertXMLContains(content, '<field>111</field>') + + def test_render_datetime(self): + """ + Test XML rendering. + """ + renderer = XMLRenderer() + content = renderer.render({ + 'field': datetime.datetime(2011, 12, 25, 12, 45, 00) + }, 'application/xml') + self.assertXMLContains(content, '<field>2011-12-25 12:45:00</field>') + + def test_render_float(self): + """ + Test XML rendering. + """ + renderer = XMLRenderer() + content = renderer.render({'field': 123.4}, 'application/xml') + self.assertXMLContains(content, '<field>123.4</field>') + + def test_render_decimal(self): + """ + Test XML rendering. + """ + renderer = XMLRenderer() + content = renderer.render({'field': Decimal('111.2')}, 'application/xml') + self.assertXMLContains(content, '<field>111.2</field>') + + def test_render_none(self): + """ + Test XML rendering. + """ + renderer = XMLRenderer() + content = renderer.render({'field': None}, 'application/xml') + self.assertXMLContains(content, '<field></field>') + + def test_render_complex_data(self): + """ + Test XML rendering. + """ + renderer = XMLRenderer() + content = renderer.render(self._complex_data, 'application/xml') + self.assertXMLContains(content, '<sub_name>first</sub_name>') + self.assertXMLContains(content, '<sub_name>second</sub_name>') + + @unittest.skipUnless(etree, 'defusedxml not installed') + def test_render_and_parse_complex_data(self): + """ + Test XML rendering. + """ + renderer = XMLRenderer() + content = StringIO(renderer.render(self._complex_data, 'application/xml')) + + parser = XMLParser() + complex_data_out = parser.parse(content) + error_msg = "complex data differs!IN:\n %s \n\n OUT:\n %s" % (repr(self._complex_data), repr(complex_data_out)) + self.assertEqual(self._complex_data, complex_data_out, error_msg) + + def assertXMLContains(self, xml, string): + self.assertTrue(xml.startswith('<?xml version="1.0" encoding="utf-8"?>\n<root>')) + self.assertTrue(xml.endswith('</root>')) + self.assertTrue(string in xml, '%r not in %r' % (string, xml)) + + +# Tests for caching issue, #346 +class CacheRenderTest(TestCase): + """ + Tests specific to caching responses + """ + + urls = 'rest_framework.tests.test_renderers' + + cache_key = 'just_a_cache_key' + + @classmethod + def _get_pickling_errors(cls, obj, seen=None): + """ Return any errors that would be raised if `obj' is pickled + Courtesy of koffie @ http://stackoverflow.com/a/7218986/109897 + """ + if seen == None: + seen = [] + try: + state = obj.__getstate__() + except AttributeError: + return + if state == None: + return + if isinstance(state, tuple): + if not isinstance(state[0], dict): + state = state[1] + else: + state = state[0].update(state[1]) + result = {} + for i in state: + try: + pickle.dumps(state[i], protocol=2) + except pickle.PicklingError: + if not state[i] in seen: + seen.append(state[i]) + result[i] = cls._get_pickling_errors(state[i], seen) + return result + + def http_resp(self, http_method, url): + """ + Simple wrapper for Client http requests + Removes the `client' and `request' attributes from as they are + added by django.test.client.Client and not part of caching + responses outside of tests. + """ + method = getattr(self.client, http_method) + resp = method(url) + del resp.client, resp.request + return resp + + def test_obj_pickling(self): + """ + Test that responses are properly pickled + """ + resp = self.http_resp('get', '/cache') + + # Make sure that no pickling errors occurred + self.assertEqual(self._get_pickling_errors(resp), {}) + + # Unfortunately LocMem backend doesn't raise PickleErrors but returns + # None instead. + cache.set(self.cache_key, resp) + self.assertTrue(cache.get(self.cache_key) is not None) + + def test_head_caching(self): + """ + Test caching of HEAD requests + """ + resp = self.http_resp('head', '/cache') + cache.set(self.cache_key, resp) + + cached_resp = cache.get(self.cache_key) + self.assertIsInstance(cached_resp, Response) + + def test_get_caching(self): + """ + Test caching of GET requests + """ + resp = self.http_resp('get', '/cache') + cache.set(self.cache_key, resp) + + cached_resp = cache.get(self.cache_key) + self.assertIsInstance(cached_resp, Response) + self.assertEqual(cached_resp.content, resp.content) diff --git a/awx/lib/site-packages/rest_framework/tests/test_request.py b/awx/lib/site-packages/rest_framework/tests/test_request.py new file mode 100644 index 0000000000..a5c5e84ce7 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_request.py @@ -0,0 +1,323 @@ +""" +Tests for content parsing, and form-overloaded content parsing. +""" +from __future__ import unicode_literals +from django.contrib.auth.models import User +from django.contrib.auth import authenticate, login, logout +from django.contrib.sessions.middleware import SessionMiddleware +from django.test import TestCase, Client +from django.test.client import RequestFactory +from rest_framework import status +from rest_framework.authentication import SessionAuthentication +from rest_framework.compat import patterns +from rest_framework.parsers import ( + BaseParser, + FormParser, + MultiPartParser, + JSONParser +) +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework.settings import api_settings +from rest_framework.views import APIView +from rest_framework.compat import six +import json + + +factory = RequestFactory() + + +class PlainTextParser(BaseParser): + media_type = 'text/plain' + + def parse(self, stream, media_type=None, parser_context=None): + """ + Returns a 2-tuple of `(data, files)`. + + `data` will simply be a string representing the body of the request. + `files` will always be `None`. + """ + return stream.read() + + +class TestMethodOverloading(TestCase): + def test_method(self): + """ + Request methods should be same as underlying request. + """ + request = Request(factory.get('/')) + self.assertEqual(request.method, 'GET') + request = Request(factory.post('/')) + self.assertEqual(request.method, 'POST') + + def test_overloaded_method(self): + """ + POST requests can be overloaded to another method by setting a + reserved form field + """ + request = Request(factory.post('/', {api_settings.FORM_METHOD_OVERRIDE: 'DELETE'})) + self.assertEqual(request.method, 'DELETE') + + def test_x_http_method_override_header(self): + """ + POST requests can also be overloaded to another method by setting + the X-HTTP-Method-Override header. + """ + request = Request(factory.post('/', {'foo': 'bar'}, HTTP_X_HTTP_METHOD_OVERRIDE='DELETE')) + self.assertEqual(request.method, 'DELETE') + + +class TestContentParsing(TestCase): + def test_standard_behaviour_determines_no_content_GET(self): + """ + Ensure request.DATA returns empty QueryDict for GET request. + """ + request = Request(factory.get('/')) + self.assertEqual(request.DATA, {}) + + def test_standard_behaviour_determines_no_content_HEAD(self): + """ + Ensure request.DATA returns empty QueryDict for HEAD request. + """ + request = Request(factory.head('/')) + self.assertEqual(request.DATA, {}) + + def test_request_DATA_with_form_content(self): + """ + Ensure request.DATA returns content for POST request with form content. + """ + data = {'qwerty': 'uiop'} + request = Request(factory.post('/', data)) + request.parsers = (FormParser(), MultiPartParser()) + self.assertEqual(list(request.DATA.items()), list(data.items())) + + def test_request_DATA_with_text_content(self): + """ + Ensure request.DATA returns content for POST request with + non-form content. + """ + content = six.b('qwerty') + content_type = 'text/plain' + request = Request(factory.post('/', content, content_type=content_type)) + request.parsers = (PlainTextParser(),) + self.assertEqual(request.DATA, content) + + def test_request_POST_with_form_content(self): + """ + Ensure request.POST returns content for POST request with form content. + """ + data = {'qwerty': 'uiop'} + request = Request(factory.post('/', data)) + request.parsers = (FormParser(), MultiPartParser()) + self.assertEqual(list(request.POST.items()), list(data.items())) + + def test_standard_behaviour_determines_form_content_PUT(self): + """ + Ensure request.DATA returns content for PUT request with form content. + """ + data = {'qwerty': 'uiop'} + + from django import VERSION + + if VERSION >= (1, 5): + from django.test.client import MULTIPART_CONTENT, BOUNDARY, encode_multipart + request = Request(factory.put('/', encode_multipart(BOUNDARY, data), + content_type=MULTIPART_CONTENT)) + else: + request = Request(factory.put('/', data)) + + request.parsers = (FormParser(), MultiPartParser()) + self.assertEqual(list(request.DATA.items()), list(data.items())) + + def test_standard_behaviour_determines_non_form_content_PUT(self): + """ + Ensure request.DATA returns content for PUT request with + non-form content. + """ + content = six.b('qwerty') + content_type = 'text/plain' + request = Request(factory.put('/', content, content_type=content_type)) + request.parsers = (PlainTextParser(), ) + self.assertEqual(request.DATA, content) + + def test_overloaded_behaviour_allows_content_tunnelling(self): + """ + Ensure request.DATA returns content for overloaded POST request. + """ + json_data = {'foobar': 'qwerty'} + content = json.dumps(json_data) + content_type = 'application/json' + form_data = { + api_settings.FORM_CONTENT_OVERRIDE: content, + api_settings.FORM_CONTENTTYPE_OVERRIDE: content_type + } + request = Request(factory.post('/', form_data)) + request.parsers = (JSONParser(), ) + self.assertEqual(request.DATA, json_data) + + # def test_accessing_post_after_data_form(self): + # """ + # Ensures request.POST can be accessed after request.DATA in + # form request. + # """ + # data = {'qwerty': 'uiop'} + # request = factory.post('/', data=data) + # self.assertEqual(request.DATA.items(), data.items()) + # self.assertEqual(request.POST.items(), data.items()) + + # def test_accessing_post_after_data_for_json(self): + # """ + # Ensures request.POST can be accessed after request.DATA in + # json request. + # """ + # data = {'qwerty': 'uiop'} + # content = json.dumps(data) + # content_type = 'application/json' + # parsers = (JSONParser, ) + + # request = factory.post('/', content, content_type=content_type, + # parsers=parsers) + # self.assertEqual(request.DATA.items(), data.items()) + # self.assertEqual(request.POST.items(), []) + + # def test_accessing_post_after_data_for_overloaded_json(self): + # """ + # Ensures request.POST can be accessed after request.DATA in overloaded + # json request. + # """ + # data = {'qwerty': 'uiop'} + # content = json.dumps(data) + # content_type = 'application/json' + # parsers = (JSONParser, ) + # form_data = {Request._CONTENT_PARAM: content, + # Request._CONTENTTYPE_PARAM: content_type} + + # request = factory.post('/', form_data, parsers=parsers) + # self.assertEqual(request.DATA.items(), data.items()) + # self.assertEqual(request.POST.items(), form_data.items()) + + # def test_accessing_data_after_post_form(self): + # """ + # Ensures request.DATA can be accessed after request.POST in + # form request. + # """ + # data = {'qwerty': 'uiop'} + # parsers = (FormParser, MultiPartParser) + # request = factory.post('/', data, parsers=parsers) + + # self.assertEqual(request.POST.items(), data.items()) + # self.assertEqual(request.DATA.items(), data.items()) + + # def test_accessing_data_after_post_for_json(self): + # """ + # Ensures request.DATA can be accessed after request.POST in + # json request. + # """ + # data = {'qwerty': 'uiop'} + # content = json.dumps(data) + # content_type = 'application/json' + # parsers = (JSONParser, ) + # request = factory.post('/', content, content_type=content_type, + # parsers=parsers) + # self.assertEqual(request.POST.items(), []) + # self.assertEqual(request.DATA.items(), data.items()) + + # def test_accessing_data_after_post_for_overloaded_json(self): + # """ + # Ensures request.DATA can be accessed after request.POST in overloaded + # json request + # """ + # data = {'qwerty': 'uiop'} + # content = json.dumps(data) + # content_type = 'application/json' + # parsers = (JSONParser, ) + # form_data = {Request._CONTENT_PARAM: content, + # Request._CONTENTTYPE_PARAM: content_type} + + # request = factory.post('/', form_data, parsers=parsers) + # self.assertEqual(request.POST.items(), form_data.items()) + # self.assertEqual(request.DATA.items(), data.items()) + + +class MockView(APIView): + authentication_classes = (SessionAuthentication,) + + def post(self, request): + if request.POST.get('example') is not None: + return Response(status=status.HTTP_200_OK) + + return Response(status=status.INTERNAL_SERVER_ERROR) + +urlpatterns = patterns('', + (r'^$', MockView.as_view()), +) + + +class TestContentParsingWithAuthentication(TestCase): + urls = 'rest_framework.tests.test_request' + + def setUp(self): + self.csrf_client = Client(enforce_csrf_checks=True) + self.username = 'john' + self.email = 'lennon@thebeatles.com' + self.password = 'password' + self.user = User.objects.create_user(self.username, self.email, self.password) + + def test_user_logged_in_authentication_has_POST_when_not_logged_in(self): + """ + Ensures request.POST exists after SessionAuthentication when user + doesn't log in. + """ + content = {'example': 'example'} + + response = self.client.post('/', content) + self.assertEqual(status.HTTP_200_OK, response.status_code) + + response = self.csrf_client.post('/', content) + self.assertEqual(status.HTTP_200_OK, response.status_code) + + # def test_user_logged_in_authentication_has_post_when_logged_in(self): + # """Ensures request.POST exists after UserLoggedInAuthentication when user does log in""" + # self.client.login(username='john', password='password') + # self.csrf_client.login(username='john', password='password') + # content = {'example': 'example'} + + # response = self.client.post('/', content) + # self.assertEqual(status.OK, response.status_code, "POST data is malformed") + + # response = self.csrf_client.post('/', content) + # self.assertEqual(status.OK, response.status_code, "POST data is malformed") + + +class TestUserSetter(TestCase): + + def setUp(self): + # Pass request object through session middleware so session is + # available to login and logout functions + self.request = Request(factory.get('/')) + SessionMiddleware().process_request(self.request) + + User.objects.create_user('ringo', 'starr@thebeatles.com', 'yellow') + self.user = authenticate(username='ringo', password='yellow') + + def test_user_can_be_set(self): + self.request.user = self.user + self.assertEqual(self.request.user, self.user) + + def test_user_can_login(self): + login(self.request, self.user) + self.assertEqual(self.request.user, self.user) + + def test_user_can_logout(self): + self.request.user = self.user + self.assertFalse(self.request.user.is_anonymous()) + logout(self.request) + self.assertTrue(self.request.user.is_anonymous()) + + +class TestAuthSetter(TestCase): + + def test_auth_can_be_set(self): + request = Request(factory.get('/')) + request.auth = 'DUMMY' + self.assertEqual(request.auth, 'DUMMY') diff --git a/awx/lib/site-packages/rest_framework/tests/test_response.py b/awx/lib/site-packages/rest_framework/tests/test_response.py new file mode 100644 index 0000000000..eea3c6418a --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_response.py @@ -0,0 +1,278 @@ +from __future__ import unicode_literals +from django.test import TestCase +from rest_framework.tests.models import BasicModel, BasicModelSerializer +from rest_framework.compat import patterns, url, include +from rest_framework.response import Response +from rest_framework.views import APIView +from rest_framework import generics +from rest_framework import routers +from rest_framework import status +from rest_framework.renderers import ( + BaseRenderer, + JSONRenderer, + BrowsableAPIRenderer +) +from rest_framework import viewsets +from rest_framework.settings import api_settings +from rest_framework.compat import six + + +class MockPickleRenderer(BaseRenderer): + media_type = 'application/pickle' + + +class MockJsonRenderer(BaseRenderer): + media_type = 'application/json' + + +class MockTextMediaRenderer(BaseRenderer): + media_type = 'text/html' + +DUMMYSTATUS = status.HTTP_200_OK +DUMMYCONTENT = 'dummycontent' + +RENDERER_A_SERIALIZER = lambda x: ('Renderer A: %s' % x).encode('ascii') +RENDERER_B_SERIALIZER = lambda x: ('Renderer B: %s' % x).encode('ascii') + + +class RendererA(BaseRenderer): + media_type = 'mock/renderera' + format = "formata" + + def render(self, data, media_type=None, renderer_context=None): + return RENDERER_A_SERIALIZER(data) + + +class RendererB(BaseRenderer): + media_type = 'mock/rendererb' + format = "formatb" + + def render(self, data, media_type=None, renderer_context=None): + return RENDERER_B_SERIALIZER(data) + + +class RendererC(RendererB): + media_type = 'mock/rendererc' + format = 'formatc' + charset = "rendererc" + + +class MockView(APIView): + renderer_classes = (RendererA, RendererB, RendererC) + + def get(self, request, **kwargs): + return Response(DUMMYCONTENT, status=DUMMYSTATUS) + + +class MockViewSettingContentType(APIView): + renderer_classes = (RendererA, RendererB, RendererC) + + def get(self, request, **kwargs): + return Response(DUMMYCONTENT, status=DUMMYSTATUS, content_type='setbyview') + + +class HTMLView(APIView): + renderer_classes = (BrowsableAPIRenderer, ) + + def get(self, request, **kwargs): + return Response('text') + + +class HTMLView1(APIView): + renderer_classes = (BrowsableAPIRenderer, JSONRenderer) + + def get(self, request, **kwargs): + return Response('text') + + +class HTMLNewModelViewSet(viewsets.ModelViewSet): + model = BasicModel + + +class HTMLNewModelView(generics.ListCreateAPIView): + renderer_classes = (BrowsableAPIRenderer,) + permission_classes = [] + serializer_class = BasicModelSerializer + model = BasicModel + + +new_model_viewset_router = routers.DefaultRouter() +new_model_viewset_router.register(r'', HTMLNewModelViewSet) + + +urlpatterns = patterns('', + url(r'^setbyview$', MockViewSettingContentType.as_view(renderer_classes=[RendererA, RendererB, RendererC])), + url(r'^.*\.(?P<format>.+)$', MockView.as_view(renderer_classes=[RendererA, RendererB, RendererC])), + url(r'^$', MockView.as_view(renderer_classes=[RendererA, RendererB, RendererC])), + url(r'^html$', HTMLView.as_view()), + url(r'^html1$', HTMLView1.as_view()), + url(r'^html_new_model$', HTMLNewModelView.as_view()), + url(r'^html_new_model_viewset', include(new_model_viewset_router.urls)), + url(r'^restframework', include('rest_framework.urls', namespace='rest_framework')) +) + + +# TODO: Clean tests bellow - remove duplicates with above, better unit testing, ... +class RendererIntegrationTests(TestCase): + """ + End-to-end testing of renderers using an ResponseMixin on a generic view. + """ + + urls = 'rest_framework.tests.test_response' + + def test_default_renderer_serializes_content(self): + """If the Accept header is not set the default renderer should serialize the response.""" + resp = self.client.get('/') + self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_A_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + def test_head_method_serializes_no_content(self): + """No response must be included in HEAD requests.""" + resp = self.client.head('/') + self.assertEqual(resp.status_code, DUMMYSTATUS) + self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8') + self.assertEqual(resp.content, six.b('')) + + def test_default_renderer_serializes_content_on_accept_any(self): + """If the Accept header is set to */* the default renderer should serialize the response.""" + resp = self.client.get('/', HTTP_ACCEPT='*/*') + self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_A_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + def test_specified_renderer_serializes_content_default_case(self): + """If the Accept header is set the specified renderer should serialize the response. + (In this case we check that works for the default renderer)""" + resp = self.client.get('/', HTTP_ACCEPT=RendererA.media_type) + self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_A_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + def test_specified_renderer_serializes_content_non_default_case(self): + """If the Accept header is set the specified renderer should serialize the response. + (In this case we check that works for a non-default renderer)""" + resp = self.client.get('/', HTTP_ACCEPT=RendererB.media_type) + self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + def test_specified_renderer_serializes_content_on_accept_query(self): + """The '_accept' query string should behave in the same way as the Accept header.""" + param = '?%s=%s' % ( + api_settings.URL_ACCEPT_OVERRIDE, + RendererB.media_type + ) + resp = self.client.get('/' + param) + self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + def test_specified_renderer_serializes_content_on_format_query(self): + """If a 'format' query is specified, the renderer with the matching + format attribute should serialize the response.""" + resp = self.client.get('/?format=%s' % RendererB.format) + self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + def test_specified_renderer_serializes_content_on_format_kwargs(self): + """If a 'format' keyword arg is specified, the renderer with the matching + format attribute should serialize the response.""" + resp = self.client.get('/something.formatb') + self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + def test_specified_renderer_is_used_on_format_query_with_matching_accept(self): + """If both a 'format' query and a matching Accept header specified, + the renderer with the matching format attribute should serialize the response.""" + resp = self.client.get('/?format=%s' % RendererB.format, + HTTP_ACCEPT=RendererB.media_type) + self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8') + self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT)) + self.assertEqual(resp.status_code, DUMMYSTATUS) + + +class Issue122Tests(TestCase): + """ + Tests that covers #122. + """ + urls = 'rest_framework.tests.test_response' + + def test_only_html_renderer(self): + """ + Test if no infinite recursion occurs. + """ + self.client.get('/html') + + def test_html_renderer_is_first(self): + """ + Test if no infinite recursion occurs. + """ + self.client.get('/html1') + + +class Issue467Tests(TestCase): + """ + Tests for #467 + """ + + urls = 'rest_framework.tests.test_response' + + def test_form_has_label_and_help_text(self): + resp = self.client.get('/html_new_model') + self.assertEqual(resp['Content-Type'], 'text/html; charset=utf-8') + self.assertContains(resp, 'Text comes here') + self.assertContains(resp, 'Text description.') + + +class Issue807Tests(TestCase): + """ + Covers #807 + """ + + urls = 'rest_framework.tests.test_response' + + def test_does_not_append_charset_by_default(self): + """ + Renderers don't include a charset unless set explicitly. + """ + headers = {"HTTP_ACCEPT": RendererA.media_type} + resp = self.client.get('/', **headers) + expected = "{0}; charset={1}".format(RendererA.media_type, 'utf-8') + self.assertEqual(expected, resp['Content-Type']) + + def test_if_there_is_charset_specified_on_renderer_it_gets_appended(self): + """ + If renderer class has charset attribute declared, it gets appended + to Response's Content-Type + """ + headers = {"HTTP_ACCEPT": RendererC.media_type} + resp = self.client.get('/', **headers) + expected = "{0}; charset={1}".format(RendererC.media_type, RendererC.charset) + self.assertEqual(expected, resp['Content-Type']) + + def test_content_type_set_explictly_on_response(self): + """ + The content type may be set explictly on the response. + """ + headers = {"HTTP_ACCEPT": RendererC.media_type} + resp = self.client.get('/setbyview', **headers) + self.assertEqual('setbyview', resp['Content-Type']) + + def test_viewset_label_help_text(self): + param = '?%s=%s' % ( + api_settings.URL_ACCEPT_OVERRIDE, + 'text/html' + ) + resp = self.client.get('/html_new_model_viewset/' + param) + self.assertEqual(resp['Content-Type'], 'text/html; charset=utf-8') + self.assertContains(resp, 'Text comes here') + self.assertContains(resp, 'Text description.') + + def test_form_has_label_and_help_text(self): + resp = self.client.get('/html_new_model') + self.assertEqual(resp['Content-Type'], 'text/html; charset=utf-8') + self.assertContains(resp, 'Text comes here') + self.assertContains(resp, 'Text description.') diff --git a/awx/lib/site-packages/rest_framework/tests/test_reverse.py b/awx/lib/site-packages/rest_framework/tests/test_reverse.py new file mode 100644 index 0000000000..93ef563776 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_reverse.py @@ -0,0 +1,27 @@ +from __future__ import unicode_literals +from django.test import TestCase +from django.test.client import RequestFactory +from rest_framework.compat import patterns, url +from rest_framework.reverse import reverse + +factory = RequestFactory() + + +def null_view(request): + pass + +urlpatterns = patterns('', + url(r'^view$', null_view, name='view'), +) + + +class ReverseTests(TestCase): + """ + Tests for fully qualified URLs when using `reverse`. + """ + urls = 'rest_framework.tests.test_reverse' + + def test_reversed_urls_are_fully_qualified(self): + request = factory.get('/view') + url = reverse('view', request=request) + self.assertEqual(url, 'http://testserver/view') diff --git a/awx/lib/site-packages/rest_framework/tests/test_routers.py b/awx/lib/site-packages/rest_framework/tests/test_routers.py new file mode 100644 index 0000000000..10d3cc25a0 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_routers.py @@ -0,0 +1,121 @@ +from __future__ import unicode_literals +from django.db import models +from django.test import TestCase +from django.test.client import RequestFactory +from rest_framework import serializers, viewsets +from rest_framework.compat import include, patterns, url +from rest_framework.decorators import link, action +from rest_framework.response import Response +from rest_framework.routers import SimpleRouter + +factory = RequestFactory() + +urlpatterns = patterns('',) + + +class BasicViewSet(viewsets.ViewSet): + def list(self, request, *args, **kwargs): + return Response({'method': 'list'}) + + @action() + def action1(self, request, *args, **kwargs): + return Response({'method': 'action1'}) + + @action() + def action2(self, request, *args, **kwargs): + return Response({'method': 'action2'}) + + @action(methods=['post', 'delete']) + def action3(self, request, *args, **kwargs): + return Response({'method': 'action2'}) + + @link() + def link1(self, request, *args, **kwargs): + return Response({'method': 'link1'}) + + @link() + def link2(self, request, *args, **kwargs): + return Response({'method': 'link2'}) + + +class TestSimpleRouter(TestCase): + def setUp(self): + self.router = SimpleRouter() + + def test_link_and_action_decorator(self): + routes = self.router.get_routes(BasicViewSet) + decorator_routes = routes[2:] + # Make sure all these endpoints exist and none have been clobbered + for i, endpoint in enumerate(['action1', 'action2', 'action3', 'link1', 'link2']): + route = decorator_routes[i] + # check url listing + self.assertEqual(route.url, + '^{{prefix}}/{{lookup}}/{0}/$'.format(endpoint)) + # check method to function mapping + if endpoint == 'action3': + methods_map = ['post', 'delete'] + elif endpoint.startswith('action'): + methods_map = ['post'] + else: + methods_map = ['get'] + for method in methods_map: + self.assertEqual(route.mapping[method], endpoint) + + +class RouterTestModel(models.Model): + uuid = models.CharField(max_length=20) + text = models.CharField(max_length=200) + + +class TestCustomLookupFields(TestCase): + """ + Ensure that custom lookup fields are correctly routed. + """ + urls = 'rest_framework.tests.test_routers' + + def setUp(self): + class NoteSerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = RouterTestModel + lookup_field = 'uuid' + fields = ('url', 'uuid', 'text') + + class NoteViewSet(viewsets.ModelViewSet): + queryset = RouterTestModel.objects.all() + serializer_class = NoteSerializer + lookup_field = 'uuid' + + RouterTestModel.objects.create(uuid='123', text='foo bar') + + self.router = SimpleRouter() + self.router.register(r'notes', NoteViewSet) + + from rest_framework.tests import test_routers + urls = getattr(test_routers, 'urlpatterns') + urls += patterns('', + url(r'^', include(self.router.urls)), + ) + + def test_custom_lookup_field_route(self): + detail_route = self.router.urls[-1] + detail_url_pattern = detail_route.regex.pattern + self.assertIn('<uuid>', detail_url_pattern) + + def test_retrieve_lookup_field_list_view(self): + response = self.client.get('/notes/') + self.assertEquals(response.data, + [{ + "url": "http://testserver/notes/123/", + "uuid": "123", "text": "foo bar" + }] + ) + + def test_retrieve_lookup_field_detail_view(self): + response = self.client.get('/notes/123/') + self.assertEquals(response.data, + { + "url": "http://testserver/notes/123/", + "uuid": "123", "text": "foo bar" + } + ) + diff --git a/awx/lib/site-packages/rest_framework/tests/test_serializer.py b/awx/lib/site-packages/rest_framework/tests/test_serializer.py new file mode 100644 index 0000000000..6cc913c5cd --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_serializer.py @@ -0,0 +1,1558 @@ +from __future__ import unicode_literals +from django.db import models +from django.db.models.fields import BLANK_CHOICE_DASH +from django.test import TestCase +from django.utils.datastructures import MultiValueDict +from django.utils.translation import ugettext_lazy as _ +from rest_framework import serializers, fields, relations +from rest_framework.tests.models import (HasPositiveIntegerAsChoice, Album, ActionItem, Anchor, BasicModel, + BlankFieldModel, BlogPost, BlogPostComment, Book, CallableDefaultValueModel, DefaultValueModel, + ManyToManyModel, Person, ReadOnlyManyToManyModel, Photo, RESTFrameworkModel) +from rest_framework.tests.models import BasicModelSerializer +import datetime +import pickle + + +class SubComment(object): + def __init__(self, sub_comment): + self.sub_comment = sub_comment + + +class Comment(object): + def __init__(self, email, content, created): + self.email = email + self.content = content + self.created = created or datetime.datetime.now() + + def __eq__(self, other): + return all([getattr(self, attr) == getattr(other, attr) + for attr in ('email', 'content', 'created')]) + + def get_sub_comment(self): + sub_comment = SubComment('And Merry Christmas!') + return sub_comment + + +class CommentSerializer(serializers.Serializer): + email = serializers.EmailField() + content = serializers.CharField(max_length=1000) + created = serializers.DateTimeField() + sub_comment = serializers.Field(source='get_sub_comment.sub_comment') + + def restore_object(self, data, instance=None): + if instance is None: + return Comment(**data) + for key, val in data.items(): + setattr(instance, key, val) + return instance + + +class NamesSerializer(serializers.Serializer): + first = serializers.CharField() + last = serializers.CharField(required=False, default='') + initials = serializers.CharField(required=False, default='') + + +class PersonIdentifierSerializer(serializers.Serializer): + ssn = serializers.CharField() + names = NamesSerializer(source='names', required=False) + + +class BookSerializer(serializers.ModelSerializer): + isbn = serializers.RegexField(regex=r'^[0-9]{13}$', error_messages={'invalid': 'isbn has to be exact 13 numbers'}) + + class Meta: + model = Book + + +class ActionItemSerializer(serializers.ModelSerializer): + + class Meta: + model = ActionItem + + +class ActionItemSerializerCustomRestore(serializers.ModelSerializer): + + class Meta: + model = ActionItem + + def restore_object(self, data, instance=None): + if instance is None: + return ActionItem(**data) + for key, val in data.items(): + setattr(instance, key, val) + return instance + + +class PersonSerializer(serializers.ModelSerializer): + info = serializers.Field(source='info') + + class Meta: + model = Person + fields = ('name', 'age', 'info') + read_only_fields = ('age',) + + +class NestedSerializer(serializers.Serializer): + info = serializers.Field() + + +class ModelSerializerWithNestedSerializer(serializers.ModelSerializer): + nested = NestedSerializer(source='*') + + class Meta: + model = Person + + +class PersonSerializerInvalidReadOnly(serializers.ModelSerializer): + """ + Testing for #652. + """ + info = serializers.Field(source='info') + + class Meta: + model = Person + fields = ('name', 'age', 'info') + read_only_fields = ('age', 'info') + + +class AlbumsSerializer(serializers.ModelSerializer): + + class Meta: + model = Album + fields = ['title'] # lists are also valid options + + +class PositiveIntegerAsChoiceSerializer(serializers.ModelSerializer): + class Meta: + model = HasPositiveIntegerAsChoice + fields = ['some_integer'] + + +class BasicTests(TestCase): + def setUp(self): + self.comment = Comment( + 'tom@example.com', + 'Happy new year!', + datetime.datetime(2012, 1, 1) + ) + self.data = { + 'email': 'tom@example.com', + 'content': 'Happy new year!', + 'created': datetime.datetime(2012, 1, 1), + 'sub_comment': 'This wont change' + } + self.expected = { + 'email': 'tom@example.com', + 'content': 'Happy new year!', + 'created': datetime.datetime(2012, 1, 1), + 'sub_comment': 'And Merry Christmas!' + } + self.person_data = {'name': 'dwight', 'age': 35} + self.person = Person(**self.person_data) + self.person.save() + + def test_empty(self): + serializer = CommentSerializer() + expected = { + 'email': '', + 'content': '', + 'created': None, + 'sub_comment': '' + } + self.assertEqual(serializer.data, expected) + + def test_retrieve(self): + serializer = CommentSerializer(self.comment) + self.assertEqual(serializer.data, self.expected) + + def test_create(self): + serializer = CommentSerializer(data=self.data) + expected = self.comment + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(serializer.object, expected) + self.assertFalse(serializer.object is expected) + self.assertEqual(serializer.data['sub_comment'], 'And Merry Christmas!') + + def test_create_nested(self): + """Test a serializer with nested data.""" + names = {'first': 'John', 'last': 'Doe', 'initials': 'jd'} + data = {'ssn': '1234567890', 'names': names} + serializer = PersonIdentifierSerializer(data=data) + + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(serializer.object, data) + self.assertFalse(serializer.object is data) + self.assertEqual(serializer.data['names'], names) + + def test_create_partial_nested(self): + """Test a serializer with nested data which has missing fields.""" + names = {'first': 'John'} + data = {'ssn': '1234567890', 'names': names} + serializer = PersonIdentifierSerializer(data=data) + + expected_names = {'first': 'John', 'last': '', 'initials': ''} + data['names'] = expected_names + + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(serializer.object, data) + self.assertFalse(serializer.object is expected_names) + self.assertEqual(serializer.data['names'], expected_names) + + def test_null_nested(self): + """Test a serializer with a nonexistent nested field""" + data = {'ssn': '1234567890'} + serializer = PersonIdentifierSerializer(data=data) + + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(serializer.object, data) + self.assertFalse(serializer.object is data) + expected = {'ssn': '1234567890', 'names': None} + self.assertEqual(serializer.data, expected) + + def test_update(self): + serializer = CommentSerializer(self.comment, data=self.data) + expected = self.comment + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(serializer.object, expected) + self.assertTrue(serializer.object is expected) + self.assertEqual(serializer.data['sub_comment'], 'And Merry Christmas!') + + def test_partial_update(self): + msg = 'Merry New Year!' + partial_data = {'content': msg} + serializer = CommentSerializer(self.comment, data=partial_data) + self.assertEqual(serializer.is_valid(), False) + serializer = CommentSerializer(self.comment, data=partial_data, partial=True) + expected = self.comment + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(serializer.object, expected) + self.assertTrue(serializer.object is expected) + self.assertEqual(serializer.data['content'], msg) + + def test_model_fields_as_expected(self): + """ + Make sure that the fields returned are the same as defined + in the Meta data + """ + serializer = PersonSerializer(self.person) + self.assertEqual(set(serializer.data.keys()), + set(['name', 'age', 'info'])) + + def test_field_with_dictionary(self): + """ + Make sure that dictionaries from fields are left intact + """ + serializer = PersonSerializer(self.person) + expected = self.person_data + self.assertEqual(serializer.data['info'], expected) + + def test_read_only_fields(self): + """ + Attempting to update fields set as read_only should have no effect. + """ + serializer = PersonSerializer(self.person, data={'name': 'dwight', 'age': 99}) + self.assertEqual(serializer.is_valid(), True) + instance = serializer.save() + self.assertEqual(serializer.errors, {}) + # Assert age is unchanged (35) + self.assertEqual(instance.age, self.person_data['age']) + + def test_invalid_read_only_fields(self): + """ + Regression test for #652. + """ + self.assertRaises(AssertionError, PersonSerializerInvalidReadOnly, []) + + +class DictStyleSerializer(serializers.Serializer): + """ + Note that we don't have any `restore_object` method, so the default + case of simply returning a dict will apply. + """ + email = serializers.EmailField() + + +class DictStyleSerializerTests(TestCase): + def test_dict_style_deserialize(self): + """ + Ensure serializers can deserialize into a dict. + """ + data = {'email': 'foo@example.com'} + serializer = DictStyleSerializer(data=data) + self.assertTrue(serializer.is_valid()) + self.assertEqual(serializer.data, data) + + def test_dict_style_serialize(self): + """ + Ensure serializers can serialize dict objects. + """ + data = {'email': 'foo@example.com'} + serializer = DictStyleSerializer(data) + self.assertEqual(serializer.data, data) + + +class ValidationTests(TestCase): + def setUp(self): + self.comment = Comment( + 'tom@example.com', + 'Happy new year!', + datetime.datetime(2012, 1, 1) + ) + self.data = { + 'email': 'tom@example.com', + 'content': 'x' * 1001, + 'created': datetime.datetime(2012, 1, 1) + } + self.actionitem = ActionItem(title='Some to do item',) + + def test_create(self): + serializer = CommentSerializer(data=self.data) + self.assertEqual(serializer.is_valid(), False) + self.assertEqual(serializer.errors, {'content': ['Ensure this value has at most 1000 characters (it has 1001).']}) + + def test_update(self): + serializer = CommentSerializer(self.comment, data=self.data) + self.assertEqual(serializer.is_valid(), False) + self.assertEqual(serializer.errors, {'content': ['Ensure this value has at most 1000 characters (it has 1001).']}) + + def test_update_missing_field(self): + data = { + 'content': 'xxx', + 'created': datetime.datetime(2012, 1, 1) + } + serializer = CommentSerializer(self.comment, data=data) + self.assertEqual(serializer.is_valid(), False) + self.assertEqual(serializer.errors, {'email': ['This field is required.']}) + + def test_missing_bool_with_default(self): + """Make sure that a boolean value with a 'False' value is not + mistaken for not having a default.""" + data = { + 'title': 'Some action item', + #No 'done' value. + } + serializer = ActionItemSerializer(self.actionitem, data=data) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(serializer.errors, {}) + + def test_cross_field_validation(self): + + class CommentSerializerWithCrossFieldValidator(CommentSerializer): + + def validate(self, attrs): + if attrs["email"] not in attrs["content"]: + raise serializers.ValidationError("Email address not in content") + return attrs + + data = { + 'email': 'tom@example.com', + 'content': 'A comment from tom@example.com', + 'created': datetime.datetime(2012, 1, 1) + } + + serializer = CommentSerializerWithCrossFieldValidator(data=data) + self.assertTrue(serializer.is_valid()) + + data['content'] = 'A comment from foo@bar.com' + + serializer = CommentSerializerWithCrossFieldValidator(data=data) + self.assertFalse(serializer.is_valid()) + self.assertEqual(serializer.errors, {'non_field_errors': ['Email address not in content']}) + + def test_null_is_true_fields(self): + """ + Omitting a value for null-field should validate. + """ + serializer = PersonSerializer(data={'name': 'marko'}) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(serializer.errors, {}) + + def test_modelserializer_max_length_exceeded(self): + data = { + 'title': 'x' * 201, + } + serializer = ActionItemSerializer(data=data) + self.assertEqual(serializer.is_valid(), False) + self.assertEqual(serializer.errors, {'title': ['Ensure this value has at most 200 characters (it has 201).']}) + + def test_modelserializer_max_length_exceeded_with_custom_restore(self): + """ + When overriding ModelSerializer.restore_object, validation tests should still apply. + Regression test for #623. + + https://github.com/tomchristie/django-rest-framework/pull/623 + """ + data = { + 'title': 'x' * 201, + } + serializer = ActionItemSerializerCustomRestore(data=data) + self.assertEqual(serializer.is_valid(), False) + self.assertEqual(serializer.errors, {'title': ['Ensure this value has at most 200 characters (it has 201).']}) + + def test_default_modelfield_max_length_exceeded(self): + data = { + 'title': 'Testing "info" field...', + 'info': 'x' * 13, + } + serializer = ActionItemSerializer(data=data) + self.assertEqual(serializer.is_valid(), False) + self.assertEqual(serializer.errors, {'info': ['Ensure this value has at most 12 characters (it has 13).']}) + + def test_datetime_validation_failure(self): + """ + Test DateTimeField validation errors on non-str values. + Regression test for #669. + + https://github.com/tomchristie/django-rest-framework/issues/669 + """ + data = self.data + data['created'] = 0 + + serializer = CommentSerializer(data=data) + self.assertEqual(serializer.is_valid(), False) + + self.assertIn('created', serializer.errors) + + def test_missing_model_field_exception_msg(self): + """ + Assert that a meaningful exception message is outputted when the model + field is missing (e.g. when mistyping ``model``). + """ + class BrokenModelSerializer(serializers.ModelSerializer): + class Meta: + fields = ['some_field'] + + try: + BrokenModelSerializer() + except AssertionError as e: + self.assertEqual(e.args[0], "Serializer class 'BrokenModelSerializer' is missing 'model' Meta option") + except: + self.fail('Wrong exception type thrown.') + + def test_writable_star_source_on_nested_serializer(self): + """ + Assert that a nested serializer instantiated with source='*' correctly + expands the data into the outer serializer. + """ + serializer = ModelSerializerWithNestedSerializer(data={ + 'name': 'marko', + 'nested': {'info': 'hi'}}, + ) + self.assertEqual(serializer.is_valid(), True) + + +class CustomValidationTests(TestCase): + class CommentSerializerWithFieldValidator(CommentSerializer): + + def validate_email(self, attrs, source): + attrs[source] + return attrs + + def validate_content(self, attrs, source): + value = attrs[source] + if "test" not in value: + raise serializers.ValidationError("Test not in value") + return attrs + + def test_field_validation(self): + data = { + 'email': 'tom@example.com', + 'content': 'A test comment', + 'created': datetime.datetime(2012, 1, 1) + } + + serializer = self.CommentSerializerWithFieldValidator(data=data) + self.assertTrue(serializer.is_valid()) + + data['content'] = 'This should not validate' + + serializer = self.CommentSerializerWithFieldValidator(data=data) + self.assertFalse(serializer.is_valid()) + self.assertEqual(serializer.errors, {'content': ['Test not in value']}) + + def test_missing_data(self): + """ + Make sure that validate_content isn't called if the field is missing + """ + incomplete_data = { + 'email': 'tom@example.com', + 'created': datetime.datetime(2012, 1, 1) + } + serializer = self.CommentSerializerWithFieldValidator(data=incomplete_data) + self.assertFalse(serializer.is_valid()) + self.assertEqual(serializer.errors, {'content': ['This field is required.']}) + + def test_wrong_data(self): + """ + Make sure that validate_content isn't called if the field input is wrong + """ + wrong_data = { + 'email': 'not an email', + 'content': 'A test comment', + 'created': datetime.datetime(2012, 1, 1) + } + serializer = self.CommentSerializerWithFieldValidator(data=wrong_data) + self.assertFalse(serializer.is_valid()) + self.assertEqual(serializer.errors, {'email': ['Enter a valid e-mail address.']}) + + +class PositiveIntegerAsChoiceTests(TestCase): + def test_positive_integer_in_json_is_correctly_parsed(self): + data = {'some_integer': 1} + serializer = PositiveIntegerAsChoiceSerializer(data=data) + self.assertEqual(serializer.is_valid(), True) + + +class ModelValidationTests(TestCase): + def test_validate_unique(self): + """ + Just check if serializers.ModelSerializer handles unique checks via .full_clean() + """ + serializer = AlbumsSerializer(data={'title': 'a'}) + serializer.is_valid() + serializer.save() + second_serializer = AlbumsSerializer(data={'title': 'a'}) + self.assertFalse(second_serializer.is_valid()) + self.assertEqual(second_serializer.errors, {'title': ['Album with this Title already exists.']}) + + def test_foreign_key_with_partial(self): + """ + Test ModelSerializer validation with partial=True + + Specifically test foreign key validation. + """ + + album = Album(title='test') + album.save() + + class PhotoSerializer(serializers.ModelSerializer): + class Meta: + model = Photo + + photo_serializer = PhotoSerializer(data={'description': 'test', 'album': album.pk}) + self.assertTrue(photo_serializer.is_valid()) + photo = photo_serializer.save() + + # Updating only the album (foreign key) + photo_serializer = PhotoSerializer(instance=photo, data={'album': album.pk}, partial=True) + self.assertTrue(photo_serializer.is_valid()) + self.assertTrue(photo_serializer.save()) + + # Updating only the description + photo_serializer = PhotoSerializer(instance=photo, + data={'description': 'new'}, + partial=True) + + self.assertTrue(photo_serializer.is_valid()) + self.assertTrue(photo_serializer.save()) + + +class RegexValidationTest(TestCase): + def test_create_failed(self): + serializer = BookSerializer(data={'isbn': '1234567890'}) + self.assertFalse(serializer.is_valid()) + self.assertEqual(serializer.errors, {'isbn': ['isbn has to be exact 13 numbers']}) + + serializer = BookSerializer(data={'isbn': '12345678901234'}) + self.assertFalse(serializer.is_valid()) + self.assertEqual(serializer.errors, {'isbn': ['isbn has to be exact 13 numbers']}) + + serializer = BookSerializer(data={'isbn': 'abcdefghijklm'}) + self.assertFalse(serializer.is_valid()) + self.assertEqual(serializer.errors, {'isbn': ['isbn has to be exact 13 numbers']}) + + def test_create_success(self): + serializer = BookSerializer(data={'isbn': '1234567890123'}) + self.assertTrue(serializer.is_valid()) + + +class MetadataTests(TestCase): + def test_empty(self): + serializer = CommentSerializer() + expected = { + 'email': serializers.CharField, + 'content': serializers.CharField, + 'created': serializers.DateTimeField + } + for field_name, field in expected.items(): + self.assertTrue(isinstance(serializer.data.fields[field_name], field)) + + +class ManyToManyTests(TestCase): + def setUp(self): + class ManyToManySerializer(serializers.ModelSerializer): + class Meta: + model = ManyToManyModel + + self.serializer_class = ManyToManySerializer + + # An anchor instance to use for the relationship + self.anchor = Anchor() + self.anchor.save() + + # A model instance with a many to many relationship to the anchor + self.instance = ManyToManyModel() + self.instance.save() + self.instance.rel.add(self.anchor) + + # A serialized representation of the model instance + self.data = {'id': 1, 'rel': [self.anchor.id]} + + def test_retrieve(self): + """ + Serialize an instance of a model with a ManyToMany relationship. + """ + serializer = self.serializer_class(instance=self.instance) + expected = self.data + self.assertEqual(serializer.data, expected) + + def test_create(self): + """ + Create an instance of a model with a ManyToMany relationship. + """ + data = {'rel': [self.anchor.id]} + serializer = self.serializer_class(data=data) + self.assertEqual(serializer.is_valid(), True) + instance = serializer.save() + self.assertEqual(len(ManyToManyModel.objects.all()), 2) + self.assertEqual(instance.pk, 2) + self.assertEqual(list(instance.rel.all()), [self.anchor]) + + def test_update(self): + """ + Update an instance of a model with a ManyToMany relationship. + """ + new_anchor = Anchor() + new_anchor.save() + data = {'rel': [self.anchor.id, new_anchor.id]} + serializer = self.serializer_class(self.instance, data=data) + self.assertEqual(serializer.is_valid(), True) + instance = serializer.save() + self.assertEqual(len(ManyToManyModel.objects.all()), 1) + self.assertEqual(instance.pk, 1) + self.assertEqual(list(instance.rel.all()), [self.anchor, new_anchor]) + + def test_create_empty_relationship(self): + """ + Create an instance of a model with a ManyToMany relationship, + containing no items. + """ + data = {'rel': []} + serializer = self.serializer_class(data=data) + self.assertEqual(serializer.is_valid(), True) + instance = serializer.save() + self.assertEqual(len(ManyToManyModel.objects.all()), 2) + self.assertEqual(instance.pk, 2) + self.assertEqual(list(instance.rel.all()), []) + + def test_update_empty_relationship(self): + """ + Update an instance of a model with a ManyToMany relationship, + containing no items. + """ + new_anchor = Anchor() + new_anchor.save() + data = {'rel': []} + serializer = self.serializer_class(self.instance, data=data) + self.assertEqual(serializer.is_valid(), True) + instance = serializer.save() + self.assertEqual(len(ManyToManyModel.objects.all()), 1) + self.assertEqual(instance.pk, 1) + self.assertEqual(list(instance.rel.all()), []) + + def test_create_empty_relationship_flat_data(self): + """ + Create an instance of a model with a ManyToMany relationship, + containing no items, using a representation that does not support + lists (eg form data). + """ + data = MultiValueDict() + data.setlist('rel', ['']) + serializer = self.serializer_class(data=data) + self.assertEqual(serializer.is_valid(), True) + instance = serializer.save() + self.assertEqual(len(ManyToManyModel.objects.all()), 2) + self.assertEqual(instance.pk, 2) + self.assertEqual(list(instance.rel.all()), []) + + +class ReadOnlyManyToManyTests(TestCase): + def setUp(self): + class ReadOnlyManyToManySerializer(serializers.ModelSerializer): + rel = serializers.RelatedField(many=True, read_only=True) + + class Meta: + model = ReadOnlyManyToManyModel + + self.serializer_class = ReadOnlyManyToManySerializer + + # An anchor instance to use for the relationship + self.anchor = Anchor() + self.anchor.save() + + # A model instance with a many to many relationship to the anchor + self.instance = ReadOnlyManyToManyModel() + self.instance.save() + self.instance.rel.add(self.anchor) + + # A serialized representation of the model instance + self.data = {'rel': [self.anchor.id], 'id': 1, 'text': 'anchor'} + + def test_update(self): + """ + Attempt to update an instance of a model with a ManyToMany + relationship. Not updated due to read_only=True + """ + new_anchor = Anchor() + new_anchor.save() + data = {'rel': [self.anchor.id, new_anchor.id]} + serializer = self.serializer_class(self.instance, data=data) + self.assertEqual(serializer.is_valid(), True) + instance = serializer.save() + self.assertEqual(len(ReadOnlyManyToManyModel.objects.all()), 1) + self.assertEqual(instance.pk, 1) + # rel is still as original (1 entry) + self.assertEqual(list(instance.rel.all()), [self.anchor]) + + def test_update_without_relationship(self): + """ + Attempt to update an instance of a model where many to ManyToMany + relationship is not supplied. Not updated due to read_only=True + """ + new_anchor = Anchor() + new_anchor.save() + data = {} + serializer = self.serializer_class(self.instance, data=data) + self.assertEqual(serializer.is_valid(), True) + instance = serializer.save() + self.assertEqual(len(ReadOnlyManyToManyModel.objects.all()), 1) + self.assertEqual(instance.pk, 1) + # rel is still as original (1 entry) + self.assertEqual(list(instance.rel.all()), [self.anchor]) + + +class DefaultValueTests(TestCase): + def setUp(self): + class DefaultValueSerializer(serializers.ModelSerializer): + class Meta: + model = DefaultValueModel + + self.serializer_class = DefaultValueSerializer + self.objects = DefaultValueModel.objects + + def test_create_using_default(self): + data = {} + serializer = self.serializer_class(data=data) + self.assertEqual(serializer.is_valid(), True) + instance = serializer.save() + self.assertEqual(len(self.objects.all()), 1) + self.assertEqual(instance.pk, 1) + self.assertEqual(instance.text, 'foobar') + + def test_create_overriding_default(self): + data = {'text': 'overridden'} + serializer = self.serializer_class(data=data) + self.assertEqual(serializer.is_valid(), True) + instance = serializer.save() + self.assertEqual(len(self.objects.all()), 1) + self.assertEqual(instance.pk, 1) + self.assertEqual(instance.text, 'overridden') + + def test_partial_update_default(self): + """ Regression test for issue #532 """ + data = {'text': 'overridden'} + serializer = self.serializer_class(data=data, partial=True) + self.assertEqual(serializer.is_valid(), True) + instance = serializer.save() + + data = {'extra': 'extra_value'} + serializer = self.serializer_class(instance=instance, data=data, partial=True) + self.assertEqual(serializer.is_valid(), True) + instance = serializer.save() + + self.assertEqual(instance.extra, 'extra_value') + self.assertEqual(instance.text, 'overridden') + + +class CallableDefaultValueTests(TestCase): + def setUp(self): + class CallableDefaultValueSerializer(serializers.ModelSerializer): + class Meta: + model = CallableDefaultValueModel + + self.serializer_class = CallableDefaultValueSerializer + self.objects = CallableDefaultValueModel.objects + + def test_create_using_default(self): + data = {} + serializer = self.serializer_class(data=data) + self.assertEqual(serializer.is_valid(), True) + instance = serializer.save() + self.assertEqual(len(self.objects.all()), 1) + self.assertEqual(instance.pk, 1) + self.assertEqual(instance.text, 'foobar') + + def test_create_overriding_default(self): + data = {'text': 'overridden'} + serializer = self.serializer_class(data=data) + self.assertEqual(serializer.is_valid(), True) + instance = serializer.save() + self.assertEqual(len(self.objects.all()), 1) + self.assertEqual(instance.pk, 1) + self.assertEqual(instance.text, 'overridden') + + +class ManyRelatedTests(TestCase): + def test_reverse_relations(self): + post = BlogPost.objects.create(title="Test blog post") + post.blogpostcomment_set.create(text="I hate this blog post") + post.blogpostcomment_set.create(text="I love this blog post") + + class BlogPostCommentSerializer(serializers.Serializer): + text = serializers.CharField() + + class BlogPostSerializer(serializers.Serializer): + title = serializers.CharField() + comments = BlogPostCommentSerializer(source='blogpostcomment_set') + + serializer = BlogPostSerializer(instance=post) + expected = { + 'title': 'Test blog post', + 'comments': [ + {'text': 'I hate this blog post'}, + {'text': 'I love this blog post'} + ] + } + + self.assertEqual(serializer.data, expected) + + def test_include_reverse_relations(self): + post = BlogPost.objects.create(title="Test blog post") + post.blogpostcomment_set.create(text="I hate this blog post") + post.blogpostcomment_set.create(text="I love this blog post") + + class BlogPostSerializer(serializers.ModelSerializer): + class Meta: + model = BlogPost + fields = ('id', 'title', 'blogpostcomment_set') + + serializer = BlogPostSerializer(instance=post) + expected = { + 'id': 1, 'title': 'Test blog post', 'blogpostcomment_set': [1, 2] + } + self.assertEqual(serializer.data, expected) + + def test_depth_include_reverse_relations(self): + post = BlogPost.objects.create(title="Test blog post") + post.blogpostcomment_set.create(text="I hate this blog post") + post.blogpostcomment_set.create(text="I love this blog post") + + class BlogPostSerializer(serializers.ModelSerializer): + class Meta: + model = BlogPost + fields = ('id', 'title', 'blogpostcomment_set') + depth = 1 + + serializer = BlogPostSerializer(instance=post) + expected = { + 'id': 1, 'title': 'Test blog post', + 'blogpostcomment_set': [ + {'id': 1, 'text': 'I hate this blog post', 'blog_post': 1}, + {'id': 2, 'text': 'I love this blog post', 'blog_post': 1} + ] + } + self.assertEqual(serializer.data, expected) + + def test_callable_source(self): + post = BlogPost.objects.create(title="Test blog post") + post.blogpostcomment_set.create(text="I love this blog post") + + class BlogPostCommentSerializer(serializers.Serializer): + text = serializers.CharField() + + class BlogPostSerializer(serializers.Serializer): + title = serializers.CharField() + first_comment = BlogPostCommentSerializer(source='get_first_comment') + + serializer = BlogPostSerializer(post) + + expected = { + 'title': 'Test blog post', + 'first_comment': {'text': 'I love this blog post'} + } + self.assertEqual(serializer.data, expected) + + +class RelatedTraversalTest(TestCase): + def test_nested_traversal(self): + """ + Source argument should support dotted.source notation. + """ + user = Person.objects.create(name="django") + post = BlogPost.objects.create(title="Test blog post", writer=user) + post.blogpostcomment_set.create(text="I love this blog post") + + class PersonSerializer(serializers.ModelSerializer): + class Meta: + model = Person + fields = ("name", "age") + + class BlogPostCommentSerializer(serializers.ModelSerializer): + class Meta: + model = BlogPostComment + fields = ("text", "post_owner") + + text = serializers.CharField() + post_owner = PersonSerializer(source='blog_post.writer') + + class BlogPostSerializer(serializers.Serializer): + title = serializers.CharField() + comments = BlogPostCommentSerializer(source='blogpostcomment_set') + + serializer = BlogPostSerializer(instance=post) + + expected = { + 'title': 'Test blog post', + 'comments': [{ + 'text': 'I love this blog post', + 'post_owner': { + "name": "django", + "age": None + } + }] + } + + self.assertEqual(serializer.data, expected) + + def test_nested_traversal_with_none(self): + """ + If a component of the dotted.source is None, return None for the field. + """ + from rest_framework.tests.models import NullableForeignKeySource + instance = NullableForeignKeySource.objects.create(name='Source with null FK') + + class NullableSourceSerializer(serializers.Serializer): + target_name = serializers.Field(source='target.name') + + serializer = NullableSourceSerializer(instance=instance) + + expected = { + 'target_name': None, + } + + self.assertEqual(serializer.data, expected) + + +class SerializerMethodFieldTests(TestCase): + def setUp(self): + + class BoopSerializer(serializers.Serializer): + beep = serializers.SerializerMethodField('get_beep') + boop = serializers.Field() + boop_count = serializers.SerializerMethodField('get_boop_count') + + def get_beep(self, obj): + return 'hello!' + + def get_boop_count(self, obj): + return len(obj.boop) + + self.serializer_class = BoopSerializer + + def test_serializer_method_field(self): + + class MyModel(object): + boop = ['a', 'b', 'c'] + + source_data = MyModel() + + serializer = self.serializer_class(source_data) + + expected = { + 'beep': 'hello!', + 'boop': ['a', 'b', 'c'], + 'boop_count': 3, + } + + self.assertEqual(serializer.data, expected) + + +# Test for issue #324 +class BlankFieldTests(TestCase): + def setUp(self): + + class BlankFieldModelSerializer(serializers.ModelSerializer): + class Meta: + model = BlankFieldModel + + class BlankFieldSerializer(serializers.Serializer): + title = serializers.CharField(required=False) + + class NotBlankFieldModelSerializer(serializers.ModelSerializer): + class Meta: + model = BasicModel + + class NotBlankFieldSerializer(serializers.Serializer): + title = serializers.CharField() + + self.model_serializer_class = BlankFieldModelSerializer + self.serializer_class = BlankFieldSerializer + self.not_blank_model_serializer_class = NotBlankFieldModelSerializer + self.not_blank_serializer_class = NotBlankFieldSerializer + self.data = {'title': ''} + + def test_create_blank_field(self): + serializer = self.serializer_class(data=self.data) + self.assertEqual(serializer.is_valid(), True) + + def test_create_model_blank_field(self): + serializer = self.model_serializer_class(data=self.data) + self.assertEqual(serializer.is_valid(), True) + + def test_create_model_null_field(self): + serializer = self.model_serializer_class(data={'title': None}) + self.assertEqual(serializer.is_valid(), True) + + def test_create_not_blank_field(self): + """ + Test to ensure blank data in a field not marked as blank=True + is considered invalid in a non-model serializer + """ + serializer = self.not_blank_serializer_class(data=self.data) + self.assertEqual(serializer.is_valid(), False) + + def test_create_model_not_blank_field(self): + """ + Test to ensure blank data in a field not marked as blank=True + is considered invalid in a model serializer + """ + serializer = self.not_blank_model_serializer_class(data=self.data) + self.assertEqual(serializer.is_valid(), False) + + def test_create_model_empty_field(self): + serializer = self.model_serializer_class(data={}) + self.assertEqual(serializer.is_valid(), True) + + +#test for issue #460 +class SerializerPickleTests(TestCase): + """ + Test pickleability of the output of Serializers + """ + def test_pickle_simple_model_serializer_data(self): + """ + Test simple serializer + """ + pickle.dumps(PersonSerializer(Person(name="Methusela", age=969)).data) + + def test_pickle_inner_serializer(self): + """ + Test pickling a serializer whose resulting .data (a SortedDictWithMetadata) will + have unpickleable meta data--in order to make sure metadata doesn't get pulled into the pickle. + See DictWithMetadata.__getstate__ + """ + class InnerPersonSerializer(serializers.ModelSerializer): + class Meta: + model = Person + fields = ('name', 'age') + pickle.dumps(InnerPersonSerializer(Person(name="Noah", age=950)).data, 0) + + def test_getstate_method_should_not_return_none(self): + """ + Regression test for #645. + """ + data = serializers.DictWithMetadata({1: 1}) + self.assertEqual(data.__getstate__(), serializers.SortedDict({1: 1})) + + def test_serializer_data_is_pickleable(self): + """ + Another regression test for #645. + """ + data = serializers.SortedDictWithMetadata({1: 1}) + repr(pickle.loads(pickle.dumps(data, 0))) + + +# test for issue #725 +class SeveralChoicesModel(models.Model): + color = models.CharField( + max_length=10, + choices=[('red', 'Red'), ('green', 'Green'), ('blue', 'Blue')], + blank=False + ) + drink = models.CharField( + max_length=10, + choices=[('beer', 'Beer'), ('wine', 'Wine'), ('cider', 'Cider')], + blank=False, + default='beer' + ) + os = models.CharField( + max_length=10, + choices=[('linux', 'Linux'), ('osx', 'OSX'), ('windows', 'Windows')], + blank=True + ) + music_genre = models.CharField( + max_length=10, + choices=[('rock', 'Rock'), ('metal', 'Metal'), ('grunge', 'Grunge')], + blank=True, + default='metal' + ) + + +class SerializerChoiceFields(TestCase): + + def setUp(self): + super(SerializerChoiceFields, self).setUp() + + class SeveralChoicesSerializer(serializers.ModelSerializer): + class Meta: + model = SeveralChoicesModel + fields = ('color', 'drink', 'os', 'music_genre') + + self.several_choices_serializer = SeveralChoicesSerializer + + def test_choices_blank_false_not_default(self): + serializer = self.several_choices_serializer() + self.assertEqual( + serializer.fields['color'].choices, + [('red', 'Red'), ('green', 'Green'), ('blue', 'Blue')] + ) + + def test_choices_blank_false_with_default(self): + serializer = self.several_choices_serializer() + self.assertEqual( + serializer.fields['drink'].choices, + [('beer', 'Beer'), ('wine', 'Wine'), ('cider', 'Cider')] + ) + + def test_choices_blank_true_not_default(self): + serializer = self.several_choices_serializer() + self.assertEqual( + serializer.fields['os'].choices, + BLANK_CHOICE_DASH + [('linux', 'Linux'), ('osx', 'OSX'), ('windows', 'Windows')] + ) + + def test_choices_blank_true_with_default(self): + serializer = self.several_choices_serializer() + self.assertEqual( + serializer.fields['music_genre'].choices, + BLANK_CHOICE_DASH + [('rock', 'Rock'), ('metal', 'Metal'), ('grunge', 'Grunge')] + ) + + +# Regression tests for #675 +class Ticket(models.Model): + assigned = models.ForeignKey( + Person, related_name='assigned_tickets') + reviewer = models.ForeignKey( + Person, blank=True, null=True, related_name='reviewed_tickets') + + +class SerializerRelatedChoicesTest(TestCase): + + def setUp(self): + super(SerializerRelatedChoicesTest, self).setUp() + + class RelatedChoicesSerializer(serializers.ModelSerializer): + class Meta: + model = Ticket + fields = ('assigned', 'reviewer') + + self.related_fields_serializer = RelatedChoicesSerializer + + def test_empty_queryset_required(self): + serializer = self.related_fields_serializer() + self.assertEqual(serializer.fields['assigned'].queryset.count(), 0) + self.assertEqual( + [x for x in serializer.fields['assigned'].widget.choices], + [] + ) + + def test_empty_queryset_not_required(self): + serializer = self.related_fields_serializer() + self.assertEqual(serializer.fields['reviewer'].queryset.count(), 0) + self.assertEqual( + [x for x in serializer.fields['reviewer'].widget.choices], + [('', '---------')] + ) + + def test_with_some_persons_required(self): + Person.objects.create(name="Lionel Messi") + Person.objects.create(name="Xavi Hernandez") + serializer = self.related_fields_serializer() + self.assertEqual(serializer.fields['assigned'].queryset.count(), 2) + self.assertEqual( + [x for x in serializer.fields['assigned'].widget.choices], + [(1, 'Person object - 1'), (2, 'Person object - 2')] + ) + + def test_with_some_persons_not_required(self): + Person.objects.create(name="Lionel Messi") + Person.objects.create(name="Xavi Hernandez") + serializer = self.related_fields_serializer() + self.assertEqual(serializer.fields['reviewer'].queryset.count(), 2) + self.assertEqual( + [x for x in serializer.fields['reviewer'].widget.choices], + [('', '---------'), (1, 'Person object - 1'), (2, 'Person object - 2')] + ) + + +class DepthTest(TestCase): + def test_implicit_nesting(self): + + writer = Person.objects.create(name="django", age=1) + post = BlogPost.objects.create(title="Test blog post", writer=writer) + comment = BlogPostComment.objects.create(text="Test blog post comment", blog_post=post) + + class BlogPostCommentSerializer(serializers.ModelSerializer): + class Meta: + model = BlogPostComment + depth = 2 + + serializer = BlogPostCommentSerializer(instance=comment) + expected = {'id': 1, 'text': 'Test blog post comment', 'blog_post': {'id': 1, 'title': 'Test blog post', + 'writer': {'id': 1, 'name': 'django', 'age': 1}}} + + self.assertEqual(serializer.data, expected) + + def test_explicit_nesting(self): + writer = Person.objects.create(name="django", age=1) + post = BlogPost.objects.create(title="Test blog post", writer=writer) + comment = BlogPostComment.objects.create(text="Test blog post comment", blog_post=post) + + class PersonSerializer(serializers.ModelSerializer): + class Meta: + model = Person + + class BlogPostSerializer(serializers.ModelSerializer): + writer = PersonSerializer() + + class Meta: + model = BlogPost + + class BlogPostCommentSerializer(serializers.ModelSerializer): + blog_post = BlogPostSerializer() + + class Meta: + model = BlogPostComment + + serializer = BlogPostCommentSerializer(instance=comment) + expected = {'id': 1, 'text': 'Test blog post comment', 'blog_post': {'id': 1, 'title': 'Test blog post', + 'writer': {'id': 1, 'name': 'django', 'age': 1}}} + + self.assertEqual(serializer.data, expected) + + +class NestedSerializerContextTests(TestCase): + + def test_nested_serializer_context(self): + """ + Regression for #497 + + https://github.com/tomchristie/django-rest-framework/issues/497 + """ + class PhotoSerializer(serializers.ModelSerializer): + class Meta: + model = Photo + fields = ("description", "callable") + + callable = serializers.SerializerMethodField('_callable') + + def _callable(self, instance): + if not 'context_item' in self.context: + raise RuntimeError("context isn't getting passed into 2nd level nested serializer") + return "success" + + class AlbumSerializer(serializers.ModelSerializer): + class Meta: + model = Album + fields = ("photo_set", "callable") + + photo_set = PhotoSerializer(source="photo_set") + callable = serializers.SerializerMethodField("_callable") + + def _callable(self, instance): + if not 'context_item' in self.context: + raise RuntimeError("context isn't getting passed into 1st level nested serializer") + return "success" + + class AlbumCollection(object): + albums = None + + class AlbumCollectionSerializer(serializers.Serializer): + albums = AlbumSerializer(source="albums") + + album1 = Album.objects.create(title="album 1") + album2 = Album.objects.create(title="album 2") + Photo.objects.create(description="Bigfoot", album=album1) + Photo.objects.create(description="Unicorn", album=album1) + Photo.objects.create(description="Yeti", album=album2) + Photo.objects.create(description="Sasquatch", album=album2) + album_collection = AlbumCollection() + album_collection.albums = [album1, album2] + + # This will raise RuntimeError if context doesn't get passed correctly to the nested Serializers + AlbumCollectionSerializer(album_collection, context={'context_item': 'album context'}).data + + +class DeserializeListTestCase(TestCase): + + def setUp(self): + self.data = { + 'email': 'nobody@nowhere.com', + 'content': 'This is some test content', + 'created': datetime.datetime(2013, 3, 7), + } + + def test_no_errors(self): + data = [self.data.copy() for x in range(0, 3)] + serializer = CommentSerializer(data=data, many=True) + self.assertTrue(serializer.is_valid()) + self.assertTrue(isinstance(serializer.object, list)) + self.assertTrue( + all((isinstance(item, Comment) for item in serializer.object)) + ) + + def test_errors_return_as_list(self): + invalid_item = self.data.copy() + invalid_item['email'] = '' + data = [self.data.copy(), invalid_item, self.data.copy()] + + serializer = CommentSerializer(data=data, many=True) + self.assertFalse(serializer.is_valid()) + expected = [{}, {'email': ['This field is required.']}, {}] + self.assertEqual(serializer.errors, expected) + + +# Test for issue 747 + +class LazyStringModel(object): + def __init__(self, lazystring): + self.lazystring = lazystring + + +class LazyStringSerializer(serializers.Serializer): + lazystring = serializers.Field() + + def restore_object(self, attrs, instance=None): + if instance is not None: + instance.lazystring = attrs.get('lazystring', instance.lazystring) + return instance + return LazyStringModel(**attrs) + + +class LazyStringsTestCase(TestCase): + def setUp(self): + self.model = LazyStringModel(lazystring=_('lazystring')) + + def test_lazy_strings_are_translated(self): + serializer = LazyStringSerializer(self.model) + self.assertEqual(type(serializer.data['lazystring']), + type('lazystring')) + + +# Test for issue #467 + +class FieldLabelTest(TestCase): + def setUp(self): + self.serializer_class = BasicModelSerializer + + def test_label_from_model(self): + """ + Validates that label and help_text are correctly copied from the model class. + """ + serializer = self.serializer_class() + text_field = serializer.fields['text'] + + self.assertEqual('Text comes here', text_field.label) + self.assertEqual('Text description.', text_field.help_text) + + def test_field_ctor(self): + """ + This is check that ctor supports both label and help_text. + """ + self.assertEqual('Label', fields.Field(label='Label', help_text='Help').label) + self.assertEqual('Help', fields.CharField(label='Label', help_text='Help').help_text) + self.assertEqual('Label', relations.HyperlinkedRelatedField(view_name='fake', label='Label', help_text='Help', many=True).label) + + +class AttributeMappingOnAutogeneratedFieldsTests(TestCase): + + def setUp(self): + class AMOAFModel(RESTFrameworkModel): + char_field = models.CharField(max_length=1024, blank=True) + comma_separated_integer_field = models.CommaSeparatedIntegerField(max_length=1024, blank=True) + decimal_field = models.DecimalField(max_digits=64, decimal_places=32, blank=True) + email_field = models.EmailField(max_length=1024, blank=True) + file_field = models.FileField(max_length=1024, blank=True) + image_field = models.ImageField(max_length=1024, blank=True) + slug_field = models.SlugField(max_length=1024, blank=True) + url_field = models.URLField(max_length=1024, blank=True) + + class AMOAFSerializer(serializers.ModelSerializer): + class Meta: + model = AMOAFModel + + self.serializer_class = AMOAFSerializer + self.fields_attributes = { + 'char_field': [ + ('max_length', 1024), + ], + 'comma_separated_integer_field': [ + ('max_length', 1024), + ], + 'decimal_field': [ + ('max_digits', 64), + ('decimal_places', 32), + ], + 'email_field': [ + ('max_length', 1024), + ], + 'file_field': [ + ('max_length', 1024), + ], + 'image_field': [ + ('max_length', 1024), + ], + 'slug_field': [ + ('max_length', 1024), + ], + 'url_field': [ + ('max_length', 1024), + ], + } + + def field_test(self, field): + serializer = self.serializer_class(data={}) + self.assertEqual(serializer.is_valid(), True) + + for attribute in self.fields_attributes[field]: + self.assertEqual( + getattr(serializer.fields[field], attribute[0]), + attribute[1] + ) + + def test_char_field(self): + self.field_test('char_field') + + def test_comma_separated_integer_field(self): + self.field_test('comma_separated_integer_field') + + def test_decimal_field(self): + self.field_test('decimal_field') + + def test_email_field(self): + self.field_test('email_field') + + def test_file_field(self): + self.field_test('file_field') + + def test_image_field(self): + self.field_test('image_field') + + def test_slug_field(self): + self.field_test('slug_field') + + def test_url_field(self): + self.field_test('url_field') + + +class DefaultValuesOnAutogeneratedFieldsTests(TestCase): + + def setUp(self): + class DVOAFModel(RESTFrameworkModel): + positive_integer_field = models.PositiveIntegerField(blank=True) + positive_small_integer_field = models.PositiveSmallIntegerField(blank=True) + email_field = models.EmailField(blank=True) + file_field = models.FileField(blank=True) + image_field = models.ImageField(blank=True) + slug_field = models.SlugField(blank=True) + url_field = models.URLField(blank=True) + + class DVOAFSerializer(serializers.ModelSerializer): + class Meta: + model = DVOAFModel + + self.serializer_class = DVOAFSerializer + self.fields_attributes = { + 'positive_integer_field': [ + ('min_value', 0), + ], + 'positive_small_integer_field': [ + ('min_value', 0), + ], + 'email_field': [ + ('max_length', 75), + ], + 'file_field': [ + ('max_length', 100), + ], + 'image_field': [ + ('max_length', 100), + ], + 'slug_field': [ + ('max_length', 50), + ], + 'url_field': [ + ('max_length', 200), + ], + } + + def field_test(self, field): + serializer = self.serializer_class(data={}) + self.assertEqual(serializer.is_valid(), True) + + for attribute in self.fields_attributes[field]: + self.assertEqual( + getattr(serializer.fields[field], attribute[0]), + attribute[1] + ) + + def test_positive_integer_field(self): + self.field_test('positive_integer_field') + + def test_positive_small_integer_field(self): + self.field_test('positive_small_integer_field') + + def test_email_field(self): + self.field_test('email_field') + + def test_file_field(self): + self.field_test('file_field') + + def test_image_field(self): + self.field_test('image_field') + + def test_slug_field(self): + self.field_test('slug_field') + + def test_url_field(self): + self.field_test('url_field') + + +class MetadataSerializer(serializers.Serializer): + field1 = serializers.CharField(3, required=True) + field2 = serializers.CharField(10, required=False) + + +class MetadataSerializerTestCase(TestCase): + def setUp(self): + self.serializer = MetadataSerializer() + + def test_serializer_metadata(self): + metadata = self.serializer.metadata() + expected = { + 'field1': { + 'required': True, + 'max_length': 3, + 'type': 'string', + 'read_only': False + }, + 'field2': { + 'required': False, + 'max_length': 10, + 'type': 'string', + 'read_only': False + } + } + self.assertEqual(expected, metadata) diff --git a/awx/lib/site-packages/rest_framework/tests/test_serializer_bulk_update.py b/awx/lib/site-packages/rest_framework/tests/test_serializer_bulk_update.py new file mode 100644 index 0000000000..8b0ded1a84 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_serializer_bulk_update.py @@ -0,0 +1,278 @@ +""" +Tests to cover bulk create and update using serializers. +""" +from __future__ import unicode_literals +from django.test import TestCase +from rest_framework import serializers + + +class BulkCreateSerializerTests(TestCase): + """ + Creating multiple instances using serializers. + """ + + def setUp(self): + class BookSerializer(serializers.Serializer): + id = serializers.IntegerField() + title = serializers.CharField(max_length=100) + author = serializers.CharField(max_length=100) + + self.BookSerializer = BookSerializer + + def test_bulk_create_success(self): + """ + Correct bulk update serialization should return the input data. + """ + + data = [ + { + 'id': 0, + 'title': 'The electric kool-aid acid test', + 'author': 'Tom Wolfe' + }, { + 'id': 1, + 'title': 'If this is a man', + 'author': 'Primo Levi' + }, { + 'id': 2, + 'title': 'The wind-up bird chronicle', + 'author': 'Haruki Murakami' + } + ] + + serializer = self.BookSerializer(data=data, many=True) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(serializer.object, data) + + def test_bulk_create_errors(self): + """ + Correct bulk update serialization should return the input data. + """ + + data = [ + { + 'id': 0, + 'title': 'The electric kool-aid acid test', + 'author': 'Tom Wolfe' + }, { + 'id': 1, + 'title': 'If this is a man', + 'author': 'Primo Levi' + }, { + 'id': 'foo', + 'title': 'The wind-up bird chronicle', + 'author': 'Haruki Murakami' + } + ] + expected_errors = [ + {}, + {}, + {'id': ['Enter a whole number.']} + ] + + serializer = self.BookSerializer(data=data, many=True) + self.assertEqual(serializer.is_valid(), False) + self.assertEqual(serializer.errors, expected_errors) + + def test_invalid_list_datatype(self): + """ + Data containing list of incorrect data type should return errors. + """ + data = ['foo', 'bar', 'baz'] + serializer = self.BookSerializer(data=data, many=True) + self.assertEqual(serializer.is_valid(), False) + + expected_errors = [ + {'non_field_errors': ['Invalid data']}, + {'non_field_errors': ['Invalid data']}, + {'non_field_errors': ['Invalid data']} + ] + + self.assertEqual(serializer.errors, expected_errors) + + def test_invalid_single_datatype(self): + """ + Data containing a single incorrect data type should return errors. + """ + data = 123 + serializer = self.BookSerializer(data=data, many=True) + self.assertEqual(serializer.is_valid(), False) + + expected_errors = {'non_field_errors': ['Expected a list of items.']} + + self.assertEqual(serializer.errors, expected_errors) + + def test_invalid_single_object(self): + """ + Data containing only a single object, instead of a list of objects + should return errors. + """ + data = { + 'id': 0, + 'title': 'The electric kool-aid acid test', + 'author': 'Tom Wolfe' + } + serializer = self.BookSerializer(data=data, many=True) + self.assertEqual(serializer.is_valid(), False) + + expected_errors = {'non_field_errors': ['Expected a list of items.']} + + self.assertEqual(serializer.errors, expected_errors) + + +class BulkUpdateSerializerTests(TestCase): + """ + Updating multiple instances using serializers. + """ + + def setUp(self): + class Book(object): + """ + A data type that can be persisted to a mock storage backend + with `.save()` and `.delete()`. + """ + object_map = {} + + def __init__(self, id, title, author): + self.id = id + self.title = title + self.author = author + + def save(self): + Book.object_map[self.id] = self + + def delete(self): + del Book.object_map[self.id] + + class BookSerializer(serializers.Serializer): + id = serializers.IntegerField() + title = serializers.CharField(max_length=100) + author = serializers.CharField(max_length=100) + + def restore_object(self, attrs, instance=None): + if instance: + instance.id = attrs['id'] + instance.title = attrs['title'] + instance.author = attrs['author'] + return instance + return Book(**attrs) + + self.Book = Book + self.BookSerializer = BookSerializer + + data = [ + { + 'id': 0, + 'title': 'The electric kool-aid acid test', + 'author': 'Tom Wolfe' + }, { + 'id': 1, + 'title': 'If this is a man', + 'author': 'Primo Levi' + }, { + 'id': 2, + 'title': 'The wind-up bird chronicle', + 'author': 'Haruki Murakami' + } + ] + + for item in data: + book = Book(item['id'], item['title'], item['author']) + book.save() + + def books(self): + """ + Return all the objects in the mock storage backend. + """ + return self.Book.object_map.values() + + def test_bulk_update_success(self): + """ + Correct bulk update serialization should return the input data. + """ + data = [ + { + 'id': 0, + 'title': 'The electric kool-aid acid test', + 'author': 'Tom Wolfe' + }, { + 'id': 2, + 'title': 'Kafka on the shore', + 'author': 'Haruki Murakami' + } + ] + serializer = self.BookSerializer(self.books(), data=data, many=True, allow_add_remove=True) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(serializer.data, data) + serializer.save() + new_data = self.BookSerializer(self.books(), many=True).data + + self.assertEqual(data, new_data) + + def test_bulk_update_and_create(self): + """ + Bulk update serialization may also include created items. + """ + data = [ + { + 'id': 0, + 'title': 'The electric kool-aid acid test', + 'author': 'Tom Wolfe' + }, { + 'id': 3, + 'title': 'Kafka on the shore', + 'author': 'Haruki Murakami' + } + ] + serializer = self.BookSerializer(self.books(), data=data, many=True, allow_add_remove=True) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(serializer.data, data) + serializer.save() + new_data = self.BookSerializer(self.books(), many=True).data + self.assertEqual(data, new_data) + + def test_bulk_update_invalid_create(self): + """ + Bulk update serialization without allow_add_remove may not create items. + """ + data = [ + { + 'id': 0, + 'title': 'The electric kool-aid acid test', + 'author': 'Tom Wolfe' + }, { + 'id': 3, + 'title': 'Kafka on the shore', + 'author': 'Haruki Murakami' + } + ] + expected_errors = [ + {}, + {'non_field_errors': ['Cannot create a new item, only existing items may be updated.']} + ] + serializer = self.BookSerializer(self.books(), data=data, many=True) + self.assertEqual(serializer.is_valid(), False) + self.assertEqual(serializer.errors, expected_errors) + + def test_bulk_update_error(self): + """ + Incorrect bulk update serialization should return error data. + """ + data = [ + { + 'id': 0, + 'title': 'The electric kool-aid acid test', + 'author': 'Tom Wolfe' + }, { + 'id': 'foo', + 'title': 'Kafka on the shore', + 'author': 'Haruki Murakami' + } + ] + expected_errors = [ + {}, + {'id': ['Enter a whole number.']} + ] + serializer = self.BookSerializer(self.books(), data=data, many=True, allow_add_remove=True) + self.assertEqual(serializer.is_valid(), False) + self.assertEqual(serializer.errors, expected_errors) diff --git a/awx/lib/site-packages/rest_framework/tests/test_serializer_nested.py b/awx/lib/site-packages/rest_framework/tests/test_serializer_nested.py new file mode 100644 index 0000000000..71d0e24b51 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_serializer_nested.py @@ -0,0 +1,246 @@ +""" +Tests to cover nested serializers. + +Doesn't cover model serializers. +""" +from __future__ import unicode_literals +from django.test import TestCase +from rest_framework import serializers + + +class WritableNestedSerializerBasicTests(TestCase): + """ + Tests for deserializing nested entities. + Basic tests that use serializers that simply restore to dicts. + """ + + def setUp(self): + class TrackSerializer(serializers.Serializer): + order = serializers.IntegerField() + title = serializers.CharField(max_length=100) + duration = serializers.IntegerField() + + class AlbumSerializer(serializers.Serializer): + album_name = serializers.CharField(max_length=100) + artist = serializers.CharField(max_length=100) + tracks = TrackSerializer(many=True) + + self.AlbumSerializer = AlbumSerializer + + def test_nested_validation_success(self): + """ + Correct nested serialization should return the input data. + """ + + data = { + 'album_name': 'Discovery', + 'artist': 'Daft Punk', + 'tracks': [ + {'order': 1, 'title': 'One More Time', 'duration': 235}, + {'order': 2, 'title': 'Aerodynamic', 'duration': 184}, + {'order': 3, 'title': 'Digital Love', 'duration': 239} + ] + } + + serializer = self.AlbumSerializer(data=data) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(serializer.object, data) + + def test_nested_validation_error(self): + """ + Incorrect nested serialization should return appropriate error data. + """ + + data = { + 'album_name': 'Discovery', + 'artist': 'Daft Punk', + 'tracks': [ + {'order': 1, 'title': 'One More Time', 'duration': 235}, + {'order': 2, 'title': 'Aerodynamic', 'duration': 184}, + {'order': 3, 'title': 'Digital Love', 'duration': 'foobar'} + ] + } + expected_errors = { + 'tracks': [ + {}, + {}, + {'duration': ['Enter a whole number.']} + ] + } + + serializer = self.AlbumSerializer(data=data) + self.assertEqual(serializer.is_valid(), False) + self.assertEqual(serializer.errors, expected_errors) + + def test_many_nested_validation_error(self): + """ + Incorrect nested serialization should return appropriate error data + when multiple entities are being deserialized. + """ + + data = [ + { + 'album_name': 'Russian Red', + 'artist': 'I Love Your Glasses', + 'tracks': [ + {'order': 1, 'title': 'Cigarettes', 'duration': 121}, + {'order': 2, 'title': 'No Past Land', 'duration': 198}, + {'order': 3, 'title': 'They Don\'t Believe', 'duration': 191} + ] + }, + { + 'album_name': 'Discovery', + 'artist': 'Daft Punk', + 'tracks': [ + {'order': 1, 'title': 'One More Time', 'duration': 235}, + {'order': 2, 'title': 'Aerodynamic', 'duration': 184}, + {'order': 3, 'title': 'Digital Love', 'duration': 'foobar'} + ] + } + ] + expected_errors = [ + {}, + { + 'tracks': [ + {}, + {}, + {'duration': ['Enter a whole number.']} + ] + } + ] + + serializer = self.AlbumSerializer(data=data, many=True) + self.assertEqual(serializer.is_valid(), False) + self.assertEqual(serializer.errors, expected_errors) + + +class WritableNestedSerializerObjectTests(TestCase): + """ + Tests for deserializing nested entities. + These tests use serializers that restore to concrete objects. + """ + + def setUp(self): + # Couple of concrete objects that we're going to deserialize into + class Track(object): + def __init__(self, order, title, duration): + self.order, self.title, self.duration = order, title, duration + + def __eq__(self, other): + return ( + self.order == other.order and + self.title == other.title and + self.duration == other.duration + ) + + class Album(object): + def __init__(self, album_name, artist, tracks): + self.album_name, self.artist, self.tracks = album_name, artist, tracks + + def __eq__(self, other): + return ( + self.album_name == other.album_name and + self.artist == other.artist and + self.tracks == other.tracks + ) + + # And their corresponding serializers + class TrackSerializer(serializers.Serializer): + order = serializers.IntegerField() + title = serializers.CharField(max_length=100) + duration = serializers.IntegerField() + + def restore_object(self, attrs, instance=None): + return Track(attrs['order'], attrs['title'], attrs['duration']) + + class AlbumSerializer(serializers.Serializer): + album_name = serializers.CharField(max_length=100) + artist = serializers.CharField(max_length=100) + tracks = TrackSerializer(many=True) + + def restore_object(self, attrs, instance=None): + return Album(attrs['album_name'], attrs['artist'], attrs['tracks']) + + self.Album, self.Track = Album, Track + self.AlbumSerializer = AlbumSerializer + + def test_nested_validation_success(self): + """ + Correct nested serialization should return a restored object + that corresponds to the input data. + """ + + data = { + 'album_name': 'Discovery', + 'artist': 'Daft Punk', + 'tracks': [ + {'order': 1, 'title': 'One More Time', 'duration': 235}, + {'order': 2, 'title': 'Aerodynamic', 'duration': 184}, + {'order': 3, 'title': 'Digital Love', 'duration': 239} + ] + } + expected_object = self.Album( + album_name='Discovery', + artist='Daft Punk', + tracks=[ + self.Track(order=1, title='One More Time', duration=235), + self.Track(order=2, title='Aerodynamic', duration=184), + self.Track(order=3, title='Digital Love', duration=239), + ] + ) + + serializer = self.AlbumSerializer(data=data) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(serializer.object, expected_object) + + def test_many_nested_validation_success(self): + """ + Correct nested serialization should return multiple restored objects + that corresponds to the input data when multiple objects are + being deserialized. + """ + + data = [ + { + 'album_name': 'Russian Red', + 'artist': 'I Love Your Glasses', + 'tracks': [ + {'order': 1, 'title': 'Cigarettes', 'duration': 121}, + {'order': 2, 'title': 'No Past Land', 'duration': 198}, + {'order': 3, 'title': 'They Don\'t Believe', 'duration': 191} + ] + }, + { + 'album_name': 'Discovery', + 'artist': 'Daft Punk', + 'tracks': [ + {'order': 1, 'title': 'One More Time', 'duration': 235}, + {'order': 2, 'title': 'Aerodynamic', 'duration': 184}, + {'order': 3, 'title': 'Digital Love', 'duration': 239} + ] + } + ] + expected_object = [ + self.Album( + album_name='Russian Red', + artist='I Love Your Glasses', + tracks=[ + self.Track(order=1, title='Cigarettes', duration=121), + self.Track(order=2, title='No Past Land', duration=198), + self.Track(order=3, title='They Don\'t Believe', duration=191), + ] + ), + self.Album( + album_name='Discovery', + artist='Daft Punk', + tracks=[ + self.Track(order=1, title='One More Time', duration=235), + self.Track(order=2, title='Aerodynamic', duration=184), + self.Track(order=3, title='Digital Love', duration=239), + ] + ) + ] + + serializer = self.AlbumSerializer(data=data, many=True) + self.assertEqual(serializer.is_valid(), True) + self.assertEqual(serializer.object, expected_object) diff --git a/awx/lib/site-packages/rest_framework/tests/test_settings.py b/awx/lib/site-packages/rest_framework/tests/test_settings.py new file mode 100644 index 0000000000..857375c212 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_settings.py @@ -0,0 +1,22 @@ +"""Tests for the settings module""" +from __future__ import unicode_literals +from django.test import TestCase + +from rest_framework.settings import APISettings, DEFAULTS, IMPORT_STRINGS + + +class TestSettings(TestCase): + """Tests relating to the api settings""" + + def test_non_import_errors(self): + """Make sure other errors aren't suppressed.""" + settings = APISettings({'DEFAULT_MODEL_SERIALIZER_CLASS': 'rest_framework.tests.extras.bad_import.ModelSerializer'}, DEFAULTS, IMPORT_STRINGS) + with self.assertRaises(ValueError): + settings.DEFAULT_MODEL_SERIALIZER_CLASS + + def test_import_error_message_maintained(self): + """Make sure real import errors are captured and raised sensibly.""" + settings = APISettings({'DEFAULT_MODEL_SERIALIZER_CLASS': 'rest_framework.tests.extras.not_here.ModelSerializer'}, DEFAULTS, IMPORT_STRINGS) + with self.assertRaises(ImportError) as cm: + settings.DEFAULT_MODEL_SERIALIZER_CLASS + self.assertTrue('ImportError' in str(cm.exception)) diff --git a/awx/lib/site-packages/rest_framework/tests/test_throttling.py b/awx/lib/site-packages/rest_framework/tests/test_throttling.py new file mode 100644 index 0000000000..da400b2fcd --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_throttling.py @@ -0,0 +1,143 @@ +""" +Tests for the throttling implementations in the permissions module. +""" +from __future__ import unicode_literals +from django.test import TestCase +from django.contrib.auth.models import User +from django.core.cache import cache +from django.test.client import RequestFactory +from rest_framework.views import APIView +from rest_framework.throttling import UserRateThrottle +from rest_framework.response import Response + + +class User3SecRateThrottle(UserRateThrottle): + rate = '3/sec' + scope = 'seconds' + + +class User3MinRateThrottle(UserRateThrottle): + rate = '3/min' + scope = 'minutes' + + +class MockView(APIView): + throttle_classes = (User3SecRateThrottle,) + + def get(self, request): + return Response('foo') + + +class MockView_MinuteThrottling(APIView): + throttle_classes = (User3MinRateThrottle,) + + def get(self, request): + return Response('foo') + + +class ThrottlingTests(TestCase): + urls = 'rest_framework.tests.test_throttling' + + def setUp(self): + """ + Reset the cache so that no throttles will be active + """ + cache.clear() + self.factory = RequestFactory() + + def test_requests_are_throttled(self): + """ + Ensure request rate is limited + """ + request = self.factory.get('/') + for dummy in range(4): + response = MockView.as_view()(request) + self.assertEqual(429, response.status_code) + + def set_throttle_timer(self, view, value): + """ + Explicitly set the timer, overriding time.time() + """ + view.throttle_classes[0].timer = lambda self: value + + def test_request_throttling_expires(self): + """ + Ensure request rate is limited for a limited duration only + """ + self.set_throttle_timer(MockView, 0) + + request = self.factory.get('/') + for dummy in range(4): + response = MockView.as_view()(request) + self.assertEqual(429, response.status_code) + + # Advance the timer by one second + self.set_throttle_timer(MockView, 1) + + response = MockView.as_view()(request) + self.assertEqual(200, response.status_code) + + def ensure_is_throttled(self, view, expect): + request = self.factory.get('/') + request.user = User.objects.create(username='a') + for dummy in range(3): + view.as_view()(request) + request.user = User.objects.create(username='b') + response = view.as_view()(request) + self.assertEqual(expect, response.status_code) + + def test_request_throttling_is_per_user(self): + """ + Ensure request rate is only limited per user, not globally for + PerUserThrottles + """ + self.ensure_is_throttled(MockView, 200) + + def ensure_response_header_contains_proper_throttle_field(self, view, expected_headers): + """ + Ensure the response returns an X-Throttle field with status and next attributes + set properly. + """ + request = self.factory.get('/') + for timer, expect in expected_headers: + self.set_throttle_timer(view, timer) + response = view.as_view()(request) + if expect is not None: + self.assertEqual(response['X-Throttle-Wait-Seconds'], expect) + else: + self.assertFalse('X-Throttle-Wait-Seconds' in response) + + def test_seconds_fields(self): + """ + Ensure for second based throttles. + """ + self.ensure_response_header_contains_proper_throttle_field(MockView, + ((0, None), + (0, None), + (0, None), + (0, '1') + )) + + def test_minutes_fields(self): + """ + Ensure for minute based throttles. + """ + self.ensure_response_header_contains_proper_throttle_field(MockView_MinuteThrottling, + ((0, None), + (0, None), + (0, None), + (0, '60') + )) + + def test_next_rate_remains_constant_if_followed(self): + """ + If a client follows the recommended next request rate, + the throttling rate should stay constant. + """ + self.ensure_response_header_contains_proper_throttle_field(MockView_MinuteThrottling, + ((0, None), + (20, None), + (40, None), + (60, None), + (80, None) + )) diff --git a/awx/lib/site-packages/rest_framework/tests/test_urlpatterns.py b/awx/lib/site-packages/rest_framework/tests/test_urlpatterns.py new file mode 100644 index 0000000000..29ed4a961c --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_urlpatterns.py @@ -0,0 +1,76 @@ +from __future__ import unicode_literals +from collections import namedtuple +from django.core import urlresolvers +from django.test import TestCase +from django.test.client import RequestFactory +from rest_framework.compat import patterns, url, include +from rest_framework.urlpatterns import format_suffix_patterns + + +# A container class for test paths for the test case +URLTestPath = namedtuple('URLTestPath', ['path', 'args', 'kwargs']) + + +def dummy_view(request, *args, **kwargs): + pass + + +class FormatSuffixTests(TestCase): + """ + Tests `format_suffix_patterns` against different URLPatterns to ensure the URLs still resolve properly, including any captured parameters. + """ + def _resolve_urlpatterns(self, urlpatterns, test_paths): + factory = RequestFactory() + try: + urlpatterns = format_suffix_patterns(urlpatterns) + except Exception: + self.fail("Failed to apply `format_suffix_patterns` on the supplied urlpatterns") + resolver = urlresolvers.RegexURLResolver(r'^/', urlpatterns) + for test_path in test_paths: + request = factory.get(test_path.path) + try: + callback, callback_args, callback_kwargs = resolver.resolve(request.path_info) + except Exception: + self.fail("Failed to resolve URL: %s" % request.path_info) + self.assertEqual(callback_args, test_path.args) + self.assertEqual(callback_kwargs, test_path.kwargs) + + def test_format_suffix(self): + urlpatterns = patterns( + '', + url(r'^test$', dummy_view), + ) + test_paths = [ + URLTestPath('/test', (), {}), + URLTestPath('/test.api', (), {'format': 'api'}), + URLTestPath('/test.asdf', (), {'format': 'asdf'}), + ] + self._resolve_urlpatterns(urlpatterns, test_paths) + + def test_default_args(self): + urlpatterns = patterns( + '', + url(r'^test$', dummy_view, {'foo': 'bar'}), + ) + test_paths = [ + URLTestPath('/test', (), {'foo': 'bar', }), + URLTestPath('/test.api', (), {'foo': 'bar', 'format': 'api'}), + URLTestPath('/test.asdf', (), {'foo': 'bar', 'format': 'asdf'}), + ] + self._resolve_urlpatterns(urlpatterns, test_paths) + + def test_included_urls(self): + nested_patterns = patterns( + '', + url(r'^path$', dummy_view) + ) + urlpatterns = patterns( + '', + url(r'^test/', include(nested_patterns), {'foo': 'bar'}), + ) + test_paths = [ + URLTestPath('/test/path', (), {'foo': 'bar', }), + URLTestPath('/test/path.api', (), {'foo': 'bar', 'format': 'api'}), + URLTestPath('/test/path.asdf', (), {'foo': 'bar', 'format': 'asdf'}), + ] + self._resolve_urlpatterns(urlpatterns, test_paths) diff --git a/awx/lib/site-packages/rest_framework/tests/test_validation.py b/awx/lib/site-packages/rest_framework/tests/test_validation.py new file mode 100644 index 0000000000..a6ec0e993d --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_validation.py @@ -0,0 +1,87 @@ +from __future__ import unicode_literals +from django.db import models +from django.test import TestCase +from rest_framework import generics, serializers, status +from rest_framework.tests.utils import RequestFactory +import json + +factory = RequestFactory() + + +# Regression for #666 + +class ValidationModel(models.Model): + blank_validated_field = models.CharField(max_length=255) + + +class ValidationModelSerializer(serializers.ModelSerializer): + class Meta: + model = ValidationModel + fields = ('blank_validated_field',) + read_only_fields = ('blank_validated_field',) + + +class UpdateValidationModel(generics.RetrieveUpdateDestroyAPIView): + model = ValidationModel + serializer_class = ValidationModelSerializer + + +class TestPreSaveValidationExclusions(TestCase): + def test_pre_save_validation_exclusions(self): + """ + Somewhat weird test case to ensure that we don't perform model + validation on read only fields. + """ + obj = ValidationModel.objects.create(blank_validated_field='') + request = factory.put('/', json.dumps({}), + content_type='application/json') + view = UpdateValidationModel().as_view() + response = view(request, pk=obj.pk).render() + self.assertEqual(response.status_code, status.HTTP_200_OK) + + +# Regression for #653 + +class ShouldValidateModel(models.Model): + should_validate_field = models.CharField(max_length=255) + + +class ShouldValidateModelSerializer(serializers.ModelSerializer): + renamed = serializers.CharField(source='should_validate_field', required=False) + + class Meta: + model = ShouldValidateModel + fields = ('renamed',) + + +class TestPreSaveValidationExclusions(TestCase): + def test_renamed_fields_are_model_validated(self): + """ + Ensure fields with 'source' applied do get still get model validation. + """ + # We've set `required=False` on the serializer, but the model + # does not have `blank=True`, so this serializer should not validate. + serializer = ShouldValidateModelSerializer(data={'renamed': ''}) + self.assertEqual(serializer.is_valid(), False) + + +class ValidationSerializer(serializers.Serializer): + foo = serializers.CharField() + + def validate_foo(self, attrs, source): + raise serializers.ValidationError("foo invalid") + + def validate(self, attrs): + raise serializers.ValidationError("serializer invalid") + + +class TestAvoidValidation(TestCase): + """ + If serializer was initialized with invalid data (None or non dict-like), it + should avoid validation layer (validate_<field> and validate methods) + """ + def test_serializer_errors_has_only_invalid_data_error(self): + serializer = ValidationSerializer(data='invalid data') + self.assertFalse(serializer.is_valid()) + self.assertDictEqual(serializer.errors, + {'non_field_errors': ['Invalid data']}) diff --git a/awx/lib/site-packages/rest_framework/tests/test_views.py b/awx/lib/site-packages/rest_framework/tests/test_views.py new file mode 100644 index 0000000000..2767d24c80 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/test_views.py @@ -0,0 +1,103 @@ +from __future__ import unicode_literals + +import copy + +from django.test import TestCase +from django.test.client import RequestFactory + +from rest_framework import status +from rest_framework.decorators import api_view +from rest_framework.response import Response +from rest_framework.settings import api_settings +from rest_framework.views import APIView + +factory = RequestFactory() + + +class BasicView(APIView): + def get(self, request, *args, **kwargs): + return Response({'method': 'GET'}) + + def post(self, request, *args, **kwargs): + return Response({'method': 'POST', 'data': request.DATA}) + + +@api_view(['GET', 'POST', 'PUT', 'PATCH']) +def basic_view(request): + if request.method == 'GET': + return {'method': 'GET'} + elif request.method == 'POST': + return {'method': 'POST', 'data': request.DATA} + elif request.method == 'PUT': + return {'method': 'PUT', 'data': request.DATA} + elif request.method == 'PATCH': + return {'method': 'PATCH', 'data': request.DATA} + + +def sanitise_json_error(error_dict): + """ + Exact contents of JSON error messages depend on the installed version + of json. + """ + ret = copy.copy(error_dict) + chop = len('JSON parse error - No JSON object could be decoded') + ret['detail'] = ret['detail'][:chop] + return ret + + +class ClassBasedViewIntegrationTests(TestCase): + def setUp(self): + self.view = BasicView.as_view() + + def test_400_parse_error(self): + request = factory.post('/', 'f00bar', content_type='application/json') + response = self.view(request) + expected = { + 'detail': 'JSON parse error - No JSON object could be decoded' + } + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(sanitise_json_error(response.data), expected) + + def test_400_parse_error_tunneled_content(self): + content = 'f00bar' + content_type = 'application/json' + form_data = { + api_settings.FORM_CONTENT_OVERRIDE: content, + api_settings.FORM_CONTENTTYPE_OVERRIDE: content_type + } + request = factory.post('/', form_data) + response = self.view(request) + expected = { + 'detail': 'JSON parse error - No JSON object could be decoded' + } + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(sanitise_json_error(response.data), expected) + + +class FunctionBasedViewIntegrationTests(TestCase): + def setUp(self): + self.view = basic_view + + def test_400_parse_error(self): + request = factory.post('/', 'f00bar', content_type='application/json') + response = self.view(request) + expected = { + 'detail': 'JSON parse error - No JSON object could be decoded' + } + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(sanitise_json_error(response.data), expected) + + def test_400_parse_error_tunneled_content(self): + content = 'f00bar' + content_type = 'application/json' + form_data = { + api_settings.FORM_CONTENT_OVERRIDE: content, + api_settings.FORM_CONTENTTYPE_OVERRIDE: content_type + } + request = factory.post('/', form_data) + response = self.view(request) + expected = { + 'detail': 'JSON parse error - No JSON object could be decoded' + } + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(sanitise_json_error(response.data), expected) diff --git a/awx/lib/site-packages/rest_framework/tests/tests.py b/awx/lib/site-packages/rest_framework/tests/tests.py new file mode 100644 index 0000000000..554ebd1ad2 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/tests.py @@ -0,0 +1,16 @@ +""" +Force import of all modules in this package in order to get the standard test +runner to pick up the tests. Yowzers. +""" +from __future__ import unicode_literals +import os +import django + +modules = [filename.rsplit('.', 1)[0] + for filename in os.listdir(os.path.dirname(__file__)) + if filename.endswith('.py') and not filename.startswith('_')] +__test__ = dict() + +if django.VERSION < (1, 6): + for module in modules: + exec("from rest_framework.tests.%s import *" % module) diff --git a/awx/lib/site-packages/rest_framework/tests/utils.py b/awx/lib/site-packages/rest_framework/tests/utils.py new file mode 100644 index 0000000000..8c87917d92 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/tests/utils.py @@ -0,0 +1,40 @@ +from __future__ import unicode_literals +from django.test.client import FakePayload, Client as _Client, RequestFactory as _RequestFactory +from django.test.client import MULTIPART_CONTENT +from rest_framework.compat import urlparse + + +class RequestFactory(_RequestFactory): + + def __init__(self, **defaults): + super(RequestFactory, self).__init__(**defaults) + + def patch(self, path, data={}, content_type=MULTIPART_CONTENT, + **extra): + "Construct a PATCH request." + + patch_data = self._encode_data(data, content_type) + + parsed = urlparse.urlparse(path) + r = { + 'CONTENT_LENGTH': len(patch_data), + 'CONTENT_TYPE': content_type, + 'PATH_INFO': self._get_path(parsed), + 'QUERY_STRING': parsed[4], + 'REQUEST_METHOD': 'PATCH', + 'wsgi.input': FakePayload(patch_data), + } + r.update(extra) + return self.request(**r) + + +class Client(_Client, RequestFactory): + def patch(self, path, data={}, content_type=MULTIPART_CONTENT, + follow=False, **extra): + """ + Send a resource to the server using PATCH. + """ + response = super(Client, self).patch(path, data=data, content_type=content_type, **extra) + if follow: + response = self._handle_redirects(response, **extra) + return response diff --git a/awx/lib/site-packages/rest_framework/throttling.py b/awx/lib/site-packages/rest_framework/throttling.py new file mode 100644 index 0000000000..93ea9816cb --- /dev/null +++ b/awx/lib/site-packages/rest_framework/throttling.py @@ -0,0 +1,211 @@ +""" +Provides various throttling policies. +""" +from __future__ import unicode_literals +from django.core.cache import cache +from rest_framework import exceptions +from rest_framework.settings import api_settings +import time + + +class BaseThrottle(object): + """ + Rate throttling of requests. + """ + def allow_request(self, request, view): + """ + Return `True` if the request should be allowed, `False` otherwise. + """ + raise NotImplementedError('.allow_request() must be overridden') + + def wait(self): + """ + Optionally, return a recommended number of seconds to wait before + the next request. + """ + return None + + +class SimpleRateThrottle(BaseThrottle): + """ + A simple cache implementation, that only requires `.get_cache_key()` + to be overridden. + + The rate (requests / seconds) is set by a `throttle` attribute on the View + class. The attribute is a string of the form 'number_of_requests/period'. + + Period should be one of: ('s', 'sec', 'm', 'min', 'h', 'hour', 'd', 'day') + + Previous request information used for throttling is stored in the cache. + """ + + timer = time.time + settings = api_settings + cache_format = 'throtte_%(scope)s_%(ident)s' + scope = None + + def __init__(self): + if not getattr(self, 'rate', None): + self.rate = self.get_rate() + self.num_requests, self.duration = self.parse_rate(self.rate) + + def get_cache_key(self, request, view): + """ + Should return a unique cache-key which can be used for throttling. + Must be overridden. + + May return `None` if the request should not be throttled. + """ + raise NotImplementedError('.get_cache_key() must be overridden') + + def get_rate(self): + """ + Determine the string representation of the allowed request rate. + """ + if not getattr(self, 'scope', None): + msg = ("You must set either `.scope` or `.rate` for '%s' throttle" % + self.__class__.__name__) + raise exceptions.ConfigurationError(msg) + + try: + return self.settings.DEFAULT_THROTTLE_RATES[self.scope] + except KeyError: + msg = "No default throttle rate set for '%s' scope" % self.scope + raise exceptions.ConfigurationError(msg) + + def parse_rate(self, rate): + """ + Given the request rate string, return a two tuple of: + <allowed number of requests>, <period of time in seconds> + """ + if rate is None: + return (None, None) + num, period = rate.split('/') + num_requests = int(num) + duration = {'s': 1, 'm': 60, 'h': 3600, 'd': 86400}[period[0]] + return (num_requests, duration) + + def allow_request(self, request, view): + """ + Implement the check to see if the request should be throttled. + + On success calls `throttle_success`. + On failure calls `throttle_failure`. + """ + if self.rate is None: + return True + + self.key = self.get_cache_key(request, view) + self.history = cache.get(self.key, []) + self.now = self.timer() + + # Drop any requests from the history which have now passed the + # throttle duration + while self.history and self.history[-1] <= self.now - self.duration: + self.history.pop() + if len(self.history) >= self.num_requests: + return self.throttle_failure() + return self.throttle_success() + + def throttle_success(self): + """ + Inserts the current request's timestamp along with the key + into the cache. + """ + self.history.insert(0, self.now) + cache.set(self.key, self.history, self.duration) + return True + + def throttle_failure(self): + """ + Called when a request to the API has failed due to throttling. + """ + return False + + def wait(self): + """ + Returns the recommended next request time in seconds. + """ + if self.history: + remaining_duration = self.duration - (self.now - self.history[-1]) + else: + remaining_duration = self.duration + + available_requests = self.num_requests - len(self.history) + 1 + + return remaining_duration / float(available_requests) + + +class AnonRateThrottle(SimpleRateThrottle): + """ + Limits the rate of API calls that may be made by a anonymous users. + + The IP address of the request will be used as the unique cache key. + """ + scope = 'anon' + + def get_cache_key(self, request, view): + if request.user.is_authenticated(): + return None # Only throttle unauthenticated requests. + + ident = request.META.get('REMOTE_ADDR', None) + + return self.cache_format % { + 'scope': self.scope, + 'ident': ident + } + + +class UserRateThrottle(SimpleRateThrottle): + """ + Limits the rate of API calls that may be made by a given user. + + The user id will be used as a unique cache key if the user is + authenticated. For anonymous requests, the IP address of the request will + be used. + """ + scope = 'user' + + def get_cache_key(self, request, view): + if request.user.is_authenticated(): + ident = request.user.id + else: + ident = request.META.get('REMOTE_ADDR', None) + + return self.cache_format % { + 'scope': self.scope, + 'ident': ident + } + + +class ScopedRateThrottle(SimpleRateThrottle): + """ + Limits the rate of API calls by different amounts for various parts of + the API. Any view that has the `throttle_scope` property set will be + throttled. The unique cache key will be generated by concatenating the + user id of the request, and the scope of the view being accessed. + """ + scope_attr = 'throttle_scope' + + def get_cache_key(self, request, view): + """ + If `view.throttle_scope` is not set, don't apply this throttle. + + Otherwise generate the unique cache key by concatenating the user id + with the '.throttle_scope` property of the view. + """ + scope = getattr(view, self.scope_attr, None) + + if not scope: + # Only throttle views if `.throttle_scope` is set on the view. + return None + + if request.user.is_authenticated(): + ident = request.user.id + else: + ident = request.META.get('REMOTE_ADDR', None) + + return self.cache_format % { + 'scope': scope, + 'ident': ident + } diff --git a/awx/lib/site-packages/rest_framework/urlpatterns.py b/awx/lib/site-packages/rest_framework/urlpatterns.py new file mode 100644 index 0000000000..d9143bb4c9 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/urlpatterns.py @@ -0,0 +1,62 @@ +from __future__ import unicode_literals +from django.core.urlresolvers import RegexURLResolver +from rest_framework.compat import url, include +from rest_framework.settings import api_settings + + +def apply_suffix_patterns(urlpatterns, suffix_pattern, suffix_required): + ret = [] + for urlpattern in urlpatterns: + if isinstance(urlpattern, RegexURLResolver): + # Set of included URL patterns + regex = urlpattern.regex.pattern + namespace = urlpattern.namespace + app_name = urlpattern.app_name + kwargs = urlpattern.default_kwargs + # Add in the included patterns, after applying the suffixes + patterns = apply_suffix_patterns(urlpattern.url_patterns, + suffix_pattern, + suffix_required) + ret.append(url(regex, include(patterns, namespace, app_name), kwargs)) + + else: + # Regular URL pattern + regex = urlpattern.regex.pattern.rstrip('$') + suffix_pattern + view = urlpattern._callback or urlpattern._callback_str + kwargs = urlpattern.default_args + name = urlpattern.name + # Add in both the existing and the new urlpattern + if not suffix_required: + ret.append(urlpattern) + ret.append(url(regex, view, kwargs, name)) + + return ret + + +def format_suffix_patterns(urlpatterns, suffix_required=False, allowed=None): + """ + Supplement existing urlpatterns with corresponding patterns that also + include a '.format' suffix. Retains urlpattern ordering. + + urlpatterns: + A list of URL patterns. + + suffix_required: + If `True`, only suffixed URLs will be generated, and non-suffixed + URLs will not be used. Defaults to `False`. + + allowed: + An optional tuple/list of allowed suffixes. eg ['json', 'api'] + Defaults to `None`, which allows any suffix. + """ + suffix_kwarg = api_settings.FORMAT_SUFFIX_KWARG + if allowed: + if len(allowed) == 1: + allowed_pattern = allowed[0] + else: + allowed_pattern = '(%s)' % '|'.join(allowed) + suffix_pattern = r'\.(?P<%s>%s)$' % (suffix_kwarg, allowed_pattern) + else: + suffix_pattern = r'\.(?P<%s>[a-z]+)$' % suffix_kwarg + + return apply_suffix_patterns(urlpatterns, suffix_pattern, suffix_required) diff --git a/awx/lib/site-packages/rest_framework/urls.py b/awx/lib/site-packages/rest_framework/urls.py new file mode 100644 index 0000000000..9c4719f1d4 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/urls.py @@ -0,0 +1,24 @@ +""" +Login and logout views for the browsable API. + +Add these to your root URLconf if you're using the browsable API and +your API requires authentication. + +The urls must be namespaced as 'rest_framework', and you should make sure +your authentication settings include `SessionAuthentication`. + + urlpatterns = patterns('', + ... + url(r'^auth', include('rest_framework.urls', namespace='rest_framework')) + ) +""" +from __future__ import unicode_literals +from rest_framework.compat import patterns, url + + +template_name = {'template_name': 'rest_framework/login.html'} + +urlpatterns = patterns('django.contrib.auth.views', + url(r'^login/$', 'login', template_name, name='login'), + url(r'^logout/$', 'logout', template_name, name='logout'), +) diff --git a/awx/lib/site-packages/rest_framework/utils/__init__.py b/awx/lib/site-packages/rest_framework/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/rest_framework/utils/breadcrumbs.py b/awx/lib/site-packages/rest_framework/utils/breadcrumbs.py new file mode 100644 index 0000000000..d51374b0a8 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/utils/breadcrumbs.py @@ -0,0 +1,54 @@ +from __future__ import unicode_literals +from django.core.urlresolvers import resolve, get_script_prefix +from rest_framework.utils.formatting import get_view_name + + +def get_breadcrumbs(url): + """ + Given a url returns a list of breadcrumbs, which are each a + tuple of (name, url). + """ + + from rest_framework.views import APIView + + def breadcrumbs_recursive(url, breadcrumbs_list, prefix, seen): + """ + Add tuples of (name, url) to the breadcrumbs list, + progressively chomping off parts of the url. + """ + + try: + (view, unused_args, unused_kwargs) = resolve(url) + except Exception: + pass + else: + # Check if this is a REST framework view, + # and if so add it to the breadcrumbs + cls = getattr(view, 'cls', None) + if cls is not None and issubclass(cls, APIView): + # Don't list the same view twice in a row. + # Probably an optional trailing slash. + if not seen or seen[-1] != view: + suffix = getattr(view, 'suffix', None) + name = get_view_name(view.cls, suffix) + breadcrumbs_list.insert(0, (name, prefix + url)) + seen.append(view) + + if url == '': + # All done + return breadcrumbs_list + + elif url.endswith('/'): + # Drop trailing slash off the end and continue to try to + # resolve more breadcrumbs + url = url.rstrip('/') + return breadcrumbs_recursive(url, breadcrumbs_list, prefix, seen) + + # Drop trailing non-slash off the end and continue to try to + # resolve more breadcrumbs + url = url[:url.rfind('/') + 1] + return breadcrumbs_recursive(url, breadcrumbs_list, prefix, seen) + + prefix = get_script_prefix().rstrip('/') + url = url[len(prefix):] + return breadcrumbs_recursive(url, [], prefix, []) diff --git a/awx/lib/site-packages/rest_framework/utils/encoders.py b/awx/lib/site-packages/rest_framework/utils/encoders.py new file mode 100644 index 0000000000..b26a2085a8 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/utils/encoders.py @@ -0,0 +1,97 @@ +""" +Helper classes for parsers. +""" +from __future__ import unicode_literals +from django.utils.datastructures import SortedDict +from django.utils.functional import Promise +from rest_framework.compat import timezone, force_text +from rest_framework.serializers import DictWithMetadata, SortedDictWithMetadata +import datetime +import decimal +import types +import json + + +class JSONEncoder(json.JSONEncoder): + """ + JSONEncoder subclass that knows how to encode date/time/timedelta, + decimal types, and generators. + """ + def default(self, o): + # For Date Time string spec, see ECMA 262 + # http://ecma-international.org/ecma-262/5.1/#sec-15.9.1.15 + if isinstance(o, Promise): + return force_text(o) + elif isinstance(o, datetime.datetime): + r = o.isoformat() + if o.microsecond: + r = r[:23] + r[26:] + if r.endswith('+00:00'): + r = r[:-6] + 'Z' + return r + elif isinstance(o, datetime.date): + return o.isoformat() + elif isinstance(o, datetime.time): + if timezone and timezone.is_aware(o): + raise ValueError("JSON can't represent timezone-aware times.") + r = o.isoformat() + if o.microsecond: + r = r[:12] + return r + elif isinstance(o, datetime.timedelta): + return str(o.total_seconds()) + elif isinstance(o, decimal.Decimal): + return str(o) + elif hasattr(o, '__iter__'): + return [i for i in o] + return super(JSONEncoder, self).default(o) + + +try: + import yaml +except ImportError: + SafeDumper = None +else: + # Adapted from http://pyyaml.org/attachment/ticket/161/use_ordered_dict.py + class SafeDumper(yaml.SafeDumper): + """ + Handles decimals as strings. + Handles SortedDicts as usual dicts, but preserves field order, rather + than the usual behaviour of sorting the keys. + """ + def represent_decimal(self, data): + return self.represent_scalar('tag:yaml.org,2002:str', str(data)) + + def represent_mapping(self, tag, mapping, flow_style=None): + value = [] + node = yaml.MappingNode(tag, value, flow_style=flow_style) + if self.alias_key is not None: + self.represented_objects[self.alias_key] = node + best_style = True + if hasattr(mapping, 'items'): + mapping = list(mapping.items()) + if not isinstance(mapping, SortedDict): + mapping.sort() + for item_key, item_value in mapping: + node_key = self.represent_data(item_key) + node_value = self.represent_data(item_value) + if not (isinstance(node_key, yaml.ScalarNode) and not node_key.style): + best_style = False + if not (isinstance(node_value, yaml.ScalarNode) and not node_value.style): + best_style = False + value.append((node_key, node_value)) + if flow_style is None: + if self.default_flow_style is not None: + node.flow_style = self.default_flow_style + else: + node.flow_style = best_style + return node + + SafeDumper.add_representer(SortedDict, + yaml.representer.SafeRepresenter.represent_dict) + SafeDumper.add_representer(DictWithMetadata, + yaml.representer.SafeRepresenter.represent_dict) + SafeDumper.add_representer(SortedDictWithMetadata, + yaml.representer.SafeRepresenter.represent_dict) + SafeDumper.add_representer(types.GeneratorType, + yaml.representer.SafeRepresenter.represent_list) diff --git a/awx/lib/site-packages/rest_framework/utils/formatting.py b/awx/lib/site-packages/rest_framework/utils/formatting.py new file mode 100644 index 0000000000..ebadb3a670 --- /dev/null +++ b/awx/lib/site-packages/rest_framework/utils/formatting.py @@ -0,0 +1,80 @@ +""" +Utility functions to return a formatted name and description for a given view. +""" +from __future__ import unicode_literals + +from django.utils.html import escape +from django.utils.safestring import mark_safe +from rest_framework.compat import apply_markdown +import re + + +def _remove_trailing_string(content, trailing): + """ + Strip trailing component `trailing` from `content` if it exists. + Used when generating names from view classes. + """ + if content.endswith(trailing) and content != trailing: + return content[:-len(trailing)] + return content + + +def _remove_leading_indent(content): + """ + Remove leading indent from a block of text. + Used when generating descriptions from docstrings. + """ + whitespace_counts = [len(line) - len(line.lstrip(' ')) + for line in content.splitlines()[1:] if line.lstrip()] + + # unindent the content if needed + if whitespace_counts: + whitespace_pattern = '^' + (' ' * min(whitespace_counts)) + content = re.sub(re.compile(whitespace_pattern, re.MULTILINE), '', content) + content = content.strip('\n') + return content + + +def _camelcase_to_spaces(content): + """ + Translate 'CamelCaseNames' to 'Camel Case Names'. + Used when generating names from view classes. + """ + camelcase_boundry = '(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))' + content = re.sub(camelcase_boundry, ' \\1', content).strip() + return ' '.join(content.split('_')).title() + + +def get_view_name(cls, suffix=None): + """ + Return a formatted name for an `APIView` class or `@api_view` function. + """ + name = cls.__name__ + name = _remove_trailing_string(name, 'View') + name = _remove_trailing_string(name, 'ViewSet') + name = _camelcase_to_spaces(name) + if suffix: + name += ' ' + suffix + return name + + +def get_view_description(cls, html=False): + """ + Return a description for an `APIView` class or `@api_view` function. + """ + description = cls.__doc__ or '' + description = _remove_leading_indent(description) + if html: + return markup_description(description) + return description + + +def markup_description(description): + """ + Apply HTML markup to the given description. + """ + if apply_markdown: + description = apply_markdown(description) + else: + description = escape(description).replace('\n', '<br />') + return mark_safe(description) diff --git a/awx/lib/site-packages/rest_framework/utils/mediatypes.py b/awx/lib/site-packages/rest_framework/utils/mediatypes.py new file mode 100644 index 0000000000..c09c29338e --- /dev/null +++ b/awx/lib/site-packages/rest_framework/utils/mediatypes.py @@ -0,0 +1,88 @@ +""" +Handling of media types, as found in HTTP Content-Type and Accept headers. + +See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7 +""" +from __future__ import unicode_literals +from django.http.multipartparser import parse_header +from rest_framework import HTTP_HEADER_ENCODING + + +def media_type_matches(lhs, rhs): + """ + Returns ``True`` if the media type in the first argument <= the + media type in the second argument. The media types are strings + as described by the HTTP spec. + + Valid media type strings include: + + 'application/json; indent=4' + 'application/json' + 'text/*' + '*/*' + """ + lhs = _MediaType(lhs) + rhs = _MediaType(rhs) + return lhs.match(rhs) + + +def order_by_precedence(media_type_lst): + """ + Returns a list of sets of media type strings, ordered by precedence. + Precedence is determined by how specific a media type is: + + 3. 'type/subtype; param=val' + 2. 'type/subtype' + 1. 'type/*' + 0. '*/*' + """ + ret = [set(), set(), set(), set()] + for media_type in media_type_lst: + precedence = _MediaType(media_type).precedence + ret[3 - precedence].add(media_type) + return [media_types for media_types in ret if media_types] + + +class _MediaType(object): + def __init__(self, media_type_str): + if media_type_str is None: + media_type_str = '' + self.orig = media_type_str + self.full_type, self.params = parse_header(media_type_str.encode(HTTP_HEADER_ENCODING)) + self.main_type, sep, self.sub_type = self.full_type.partition('/') + + def match(self, other): + """Return true if this MediaType satisfies the given MediaType.""" + for key in self.params.keys(): + if key != 'q' and other.params.get(key, None) != self.params.get(key, None): + return False + + if self.sub_type != '*' and other.sub_type != '*' and other.sub_type != self.sub_type: + return False + + if self.main_type != '*' and other.main_type != '*' and other.main_type != self.main_type: + return False + + return True + + @property + def precedence(self): + """ + Return a precedence level from 0-3 for the media type given how specific it is. + """ + if self.main_type == '*': + return 0 + elif self.sub_type == '*': + return 1 + elif not self.params or self.params.keys() == ['q']: + return 2 + return 3 + + def __str__(self): + return unicode(self).encode('utf-8') + + def __unicode__(self): + ret = "%s/%s" % (self.main_type, self.sub_type) + for key, val in self.params.items(): + ret += "; %s=%s" % (key, val) + return ret diff --git a/awx/lib/site-packages/rest_framework/views.py b/awx/lib/site-packages/rest_framework/views.py new file mode 100644 index 0000000000..e1b6705b6d --- /dev/null +++ b/awx/lib/site-packages/rest_framework/views.py @@ -0,0 +1,349 @@ +""" +Provides an APIView class that is the base of all views in REST framework. +""" +from __future__ import unicode_literals + +from django.core.exceptions import PermissionDenied +from django.http import Http404, HttpResponse +from django.utils.datastructures import SortedDict +from django.views.decorators.csrf import csrf_exempt +from rest_framework import status, exceptions +from rest_framework.compat import View +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework.settings import api_settings +from rest_framework.utils.formatting import get_view_name, get_view_description + + +class APIView(View): + settings = api_settings + + renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES + parser_classes = api_settings.DEFAULT_PARSER_CLASSES + authentication_classes = api_settings.DEFAULT_AUTHENTICATION_CLASSES + throttle_classes = api_settings.DEFAULT_THROTTLE_CLASSES + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + content_negotiation_class = api_settings.DEFAULT_CONTENT_NEGOTIATION_CLASS + + @classmethod + def as_view(cls, **initkwargs): + """ + Store the original class on the view function. + + This allows us to discover information about the view when we do URL + reverse lookups. Used for breadcrumb generation. + """ + view = super(APIView, cls).as_view(**initkwargs) + view.cls = cls + return view + + @property + def allowed_methods(self): + """ + Wrap Django's private `_allowed_methods` interface in a public property. + """ + return self._allowed_methods() + + @property + def default_response_headers(self): + # TODO: deprecate? + # TODO: Only vary by accept if multiple renderers + return { + 'Allow': ', '.join(self.allowed_methods), + 'Vary': 'Accept' + } + + def http_method_not_allowed(self, request, *args, **kwargs): + """ + If `request.method` does not correspond to a handler method, + determine what kind of exception to raise. + """ + raise exceptions.MethodNotAllowed(request.method) + + def permission_denied(self, request): + """ + If request is not permitted, determine what kind of exception to raise. + """ + if not self.request.successful_authenticator: + raise exceptions.NotAuthenticated() + raise exceptions.PermissionDenied() + + def throttled(self, request, wait): + """ + If request is throttled, determine what kind of exception to raise. + """ + raise exceptions.Throttled(wait) + + def get_authenticate_header(self, request): + """ + If a request is unauthenticated, determine the WWW-Authenticate + header to use for 401 responses, if any. + """ + authenticators = self.get_authenticators() + if authenticators: + return authenticators[0].authenticate_header(request) + + def get_parser_context(self, http_request): + """ + Returns a dict that is passed through to Parser.parse(), + as the `parser_context` keyword argument. + """ + # Note: Additionally `request` will also be added to the context + # by the Request object. + return { + 'view': self, + 'args': getattr(self, 'args', ()), + 'kwargs': getattr(self, 'kwargs', {}) + } + + def get_renderer_context(self): + """ + Returns a dict that is passed through to Renderer.render(), + as the `renderer_context` keyword argument. + """ + # Note: Additionally 'response' will also be added to the context, + # by the Response object. + return { + 'view': self, + 'args': getattr(self, 'args', ()), + 'kwargs': getattr(self, 'kwargs', {}), + 'request': getattr(self, 'request', None) + } + + # API policy instantiation methods + + def get_format_suffix(self, **kwargs): + """ + Determine if the request includes a '.json' style format suffix + """ + if self.settings.FORMAT_SUFFIX_KWARG: + return kwargs.get(self.settings.FORMAT_SUFFIX_KWARG) + + def get_renderers(self): + """ + Instantiates and returns the list of renderers that this view can use. + """ + return [renderer() for renderer in self.renderer_classes] + + def get_parsers(self): + """ + Instantiates and returns the list of parsers that this view can use. + """ + return [parser() for parser in self.parser_classes] + + def get_authenticators(self): + """ + Instantiates and returns the list of authenticators that this view can use. + """ + return [auth() for auth in self.authentication_classes] + + def get_permissions(self): + """ + Instantiates and returns the list of permissions that this view requires. + """ + return [permission() for permission in self.permission_classes] + + def get_throttles(self): + """ + Instantiates and returns the list of throttles that this view uses. + """ + return [throttle() for throttle in self.throttle_classes] + + def get_content_negotiator(self): + """ + Instantiate and return the content negotiation class to use. + """ + if not getattr(self, '_negotiator', None): + self._negotiator = self.content_negotiation_class() + return self._negotiator + + # API policy implementation methods + + def perform_content_negotiation(self, request, force=False): + """ + Determine which renderer and media type to use render the response. + """ + renderers = self.get_renderers() + conneg = self.get_content_negotiator() + + try: + return conneg.select_renderer(request, renderers, self.format_kwarg) + except Exception: + if force: + return (renderers[0], renderers[0].media_type) + raise + + def perform_authentication(self, request): + """ + Perform authentication on the incoming request. + + Note that if you override this and simply 'pass', then authentication + will instead be performed lazily, the first time either + `request.user` or `request.auth` is accessed. + """ + request.user + + def check_permissions(self, request): + """ + Check if the request should be permitted. + Raises an appropriate exception if the request is not permitted. + """ + for permission in self.get_permissions(): + if not permission.has_permission(request, self): + self.permission_denied(request) + + def check_object_permissions(self, request, obj): + """ + Check if the request should be permitted for a given object. + Raises an appropriate exception if the request is not permitted. + """ + for permission in self.get_permissions(): + if not permission.has_object_permission(request, self, obj): + self.permission_denied(request) + + def check_throttles(self, request): + """ + Check if request should be throttled. + Raises an appropriate exception if the request is throttled. + """ + for throttle in self.get_throttles(): + if not throttle.allow_request(request, self): + self.throttled(request, throttle.wait()) + + # Dispatch methods + + def initialize_request(self, request, *args, **kargs): + """ + Returns the initial request object. + """ + parser_context = self.get_parser_context(request) + + return Request(request, + parsers=self.get_parsers(), + authenticators=self.get_authenticators(), + negotiator=self.get_content_negotiator(), + parser_context=parser_context) + + def initial(self, request, *args, **kwargs): + """ + Runs anything that needs to occur prior to calling the method handler. + """ + self.format_kwarg = self.get_format_suffix(**kwargs) + + # Ensure that the incoming request is permitted + self.perform_authentication(request) + self.check_permissions(request) + self.check_throttles(request) + + # Perform content negotiation and store the accepted info on the request + neg = self.perform_content_negotiation(request) + request.accepted_renderer, request.accepted_media_type = neg + + def finalize_response(self, request, response, *args, **kwargs): + """ + Returns the final response object. + """ + # Make the error obvious if a proper response is not returned + assert isinstance(response, HttpResponse), ( + 'Expected a `Response` to be returned from the view, ' + 'but received a `%s`' % type(response) + ) + + if isinstance(response, Response): + if not getattr(request, 'accepted_renderer', None): + neg = self.perform_content_negotiation(request, force=True) + request.accepted_renderer, request.accepted_media_type = neg + + response.accepted_renderer = request.accepted_renderer + response.accepted_media_type = request.accepted_media_type + response.renderer_context = self.get_renderer_context() + + for key, value in self.headers.items(): + response[key] = value + + return response + + def handle_exception(self, exc): + """ + Handle any exception that occurs, by returning an appropriate response, + or re-raising the error. + """ + if isinstance(exc, exceptions.Throttled): + # Throttle wait header + self.headers['X-Throttle-Wait-Seconds'] = '%d' % exc.wait + + if isinstance(exc, (exceptions.NotAuthenticated, + exceptions.AuthenticationFailed)): + # WWW-Authenticate header for 401 responses, else coerce to 403 + auth_header = self.get_authenticate_header(self.request) + + if auth_header: + self.headers['WWW-Authenticate'] = auth_header + else: + exc.status_code = status.HTTP_403_FORBIDDEN + + if isinstance(exc, exceptions.APIException): + return Response({'detail': exc.detail}, + status=exc.status_code, + exception=True) + elif isinstance(exc, Http404): + return Response({'detail': 'Not found'}, + status=status.HTTP_404_NOT_FOUND, + exception=True) + elif isinstance(exc, PermissionDenied): + return Response({'detail': 'Permission denied'}, + status=status.HTTP_403_FORBIDDEN, + exception=True) + raise + + # Note: session based authentication is explicitly CSRF validated, + # all other authentication is CSRF exempt. + @csrf_exempt + def dispatch(self, request, *args, **kwargs): + """ + `.dispatch()` is pretty much the same as Django's regular dispatch, + but with extra hooks for startup, finalize, and exception handling. + """ + request = self.initialize_request(request, *args, **kwargs) + self.request = request + self.args = args + self.kwargs = kwargs + self.headers = self.default_response_headers # deprecate? + + try: + self.initial(request, *args, **kwargs) + + # Get the appropriate handler method + if request.method.lower() in self.http_method_names: + handler = getattr(self, request.method.lower(), + self.http_method_not_allowed) + else: + handler = self.http_method_not_allowed + + response = handler(request, *args, **kwargs) + + except Exception as exc: + response = self.handle_exception(exc) + + self.response = self.finalize_response(request, response, *args, **kwargs) + return self.response + + def options(self, request, *args, **kwargs): + """ + Handler method for HTTP 'OPTIONS' request. + We may as well implement this as Django will otherwise provide + a less useful default implementation. + """ + return Response(self.metadata(request), status=status.HTTP_200_OK) + + def metadata(self, request): + """ + Return a dictionary of metadata about the view. + Used to return responses for OPTIONS requests. + """ + ret = SortedDict() + ret['name'] = get_view_name(self.__class__) + ret['description'] = get_view_description(self.__class__) + ret['renders'] = [renderer.media_type for renderer in self.renderer_classes] + ret['parses'] = [parser.media_type for parser in self.parser_classes] + return ret diff --git a/awx/lib/site-packages/rest_framework/viewsets.py b/awx/lib/site-packages/rest_framework/viewsets.py new file mode 100644 index 0000000000..d91323f22d --- /dev/null +++ b/awx/lib/site-packages/rest_framework/viewsets.py @@ -0,0 +1,139 @@ +""" +ViewSets are essentially just a type of class based view, that doesn't provide +any method handlers, such as `get()`, `post()`, etc... but instead has actions, +such as `list()`, `retrieve()`, `create()`, etc... + +Actions are only bound to methods at the point of instantiating the views. + + user_list = UserViewSet.as_view({'get': 'list'}) + user_detail = UserViewSet.as_view({'get': 'retrieve'}) + +Typically, rather than instantiate views from viewsets directly, you'll +regsiter the viewset with a router and let the URL conf be determined +automatically. + + router = DefaultRouter() + router.register(r'users', UserViewSet, 'user') + urlpatterns = router.urls +""" +from __future__ import unicode_literals + +from functools import update_wrapper +from django.utils.decorators import classonlymethod +from rest_framework import views, generics, mixins + + +class ViewSetMixin(object): + """ + This is the magic. + + Overrides `.as_view()` so that it takes an `actions` keyword that performs + the binding of HTTP methods to actions on the Resource. + + For example, to create a concrete view binding the 'GET' and 'POST' methods + to the 'list' and 'create' actions... + + view = MyViewSet.as_view({'get': 'list', 'post': 'create'}) + """ + + @classonlymethod + def as_view(cls, actions=None, **initkwargs): + """ + Because of the way class based views create a closure around the + instantiated view, we need to totally reimplement `.as_view`, + and slightly modify the view function that is created and returned. + """ + # The suffix initkwarg is reserved for identifing the viewset type + # eg. 'List' or 'Instance'. + cls.suffix = None + + # sanitize keyword arguments + for key in initkwargs: + if key in cls.http_method_names: + raise TypeError("You tried to pass in the %s method name as a " + "keyword argument to %s(). Don't do that." + % (key, cls.__name__)) + if not hasattr(cls, key): + raise TypeError("%s() received an invalid keyword %r" % ( + cls.__name__, key)) + + def view(request, *args, **kwargs): + self = cls(**initkwargs) + # We also store the mapping of request methods to actions, + # so that we can later set the action attribute. + # eg. `self.action = 'list'` on an incoming GET request. + self.action_map = actions + + # Bind methods to actions + # This is the bit that's different to a standard view + for method, action in actions.items(): + handler = getattr(self, action) + setattr(self, method, handler) + + # Patch this in as it's otherwise only present from 1.5 onwards + if hasattr(self, 'get') and not hasattr(self, 'head'): + self.head = self.get + + # And continue as usual + return self.dispatch(request, *args, **kwargs) + + # take name and docstring from class + update_wrapper(view, cls, updated=()) + + # and possible attributes set by decorators + # like csrf_exempt from dispatch + update_wrapper(view, cls.dispatch, assigned=()) + + # We need to set these on the view function, so that breadcrumb + # generation can pick out these bits of information from a + # resolved URL. + view.cls = cls + view.suffix = initkwargs.get('suffix', None) + return view + + def initialize_request(self, request, *args, **kargs): + """ + Set the `.action` attribute on the view, + depending on the request method. + """ + request = super(ViewSetMixin, self).initialize_request(request, *args, **kargs) + self.action = self.action_map.get(request.method.lower()) + return request + + +class ViewSet(ViewSetMixin, views.APIView): + """ + The base ViewSet class does not provide any actions by default. + """ + pass + + +class GenericViewSet(ViewSetMixin, generics.GenericAPIView): + """ + The GenericViewSet class does not provide any actions by default, + but does include the base set of generic view behavior, such as + the `get_object` and `get_queryset` methods. + """ + pass + + +class ReadOnlyModelViewSet(mixins.RetrieveModelMixin, + mixins.ListModelMixin, + GenericViewSet): + """ + A viewset that provides default `list()` and `retrieve()` actions. + """ + pass + + +class ModelViewSet(mixins.CreateModelMixin, + mixins.RetrieveModelMixin, + mixins.UpdateModelMixin, + mixins.DestroyModelMixin, + mixins.ListModelMixin, + GenericViewSet): + """ + A viewset that provides default `create()`, `retrieve()`, `update()`, + `partial_update()`, `destroy()` and `list()` actions. + """ + pass diff --git a/awx/lib/site-packages/screen.py b/awx/lib/site-packages/screen.py new file mode 100644 index 0000000000..13699f93b5 --- /dev/null +++ b/awx/lib/site-packages/screen.py @@ -0,0 +1,380 @@ +"""This implements a virtual screen. This is used to support ANSI terminal +emulation. The screen representation and state is implemented in this class. +Most of the methods are inspired by ANSI screen control codes. The ANSI class +extends this class to add parsing of ANSI escape codes. + +$Id: screen.py 486 2007-07-13 01:04:16Z noah $ +""" + +import copy + +NUL = 0 # Fill character; ignored on input. +ENQ = 5 # Transmit answerback message. +BEL = 7 # Ring the bell. +BS = 8 # Move cursor left. +HT = 9 # Move cursor to next tab stop. +LF = 10 # Line feed. +VT = 11 # Same as LF. +FF = 12 # Same as LF. +CR = 13 # Move cursor to left margin or newline. +SO = 14 # Invoke G1 character set. +SI = 15 # Invoke G0 character set. +XON = 17 # Resume transmission. +XOFF = 19 # Halt transmission. +CAN = 24 # Cancel escape sequence. +SUB = 26 # Same as CAN. +ESC = 27 # Introduce a control sequence. +DEL = 127 # Fill character; ignored on input. +SPACE = chr(32) # Space or blank character. + +def constrain (n, min, max): + + """This returns a number, n constrained to the min and max bounds. """ + + if n < min: + return min + if n > max: + return max + return n + +class screen: + + """This object maintains the state of a virtual text screen as a + rectangluar array. This maintains a virtual cursor position and handles + scrolling as characters are added. This supports most of the methods needed + by an ANSI text screen. Row and column indexes are 1-based (not zero-based, + like arrays). """ + + def __init__ (self, r=24,c=80): + + """This initializes a blank scree of the given dimentions.""" + + self.rows = r + self.cols = c + self.cur_r = 1 + self.cur_c = 1 + self.cur_saved_r = 1 + self.cur_saved_c = 1 + self.scroll_row_start = 1 + self.scroll_row_end = self.rows + self.w = [ [SPACE] * self.cols for c in range(self.rows)] + + def __str__ (self): + + """This returns a printable representation of the screen. The end of + each screen line is terminated by a newline. """ + + return '\n'.join ([ ''.join(c) for c in self.w ]) + + def dump (self): + + """This returns a copy of the screen as a string. This is similar to + __str__ except that lines are not terminated with line feeds. """ + + return ''.join ([ ''.join(c) for c in self.w ]) + + def pretty (self): + + """This returns a copy of the screen as a string with an ASCII text box + around the screen border. This is similar to __str__ except that it + adds a box. """ + + top_bot = '+' + '-'*self.cols + '+\n' + return top_bot + '\n'.join(['|'+line+'|' for line in str(self).split('\n')]) + '\n' + top_bot + + def fill (self, ch=SPACE): + + self.fill_region (1,1,self.rows,self.cols, ch) + + def fill_region (self, rs,cs, re,ce, ch=SPACE): + + rs = constrain (rs, 1, self.rows) + re = constrain (re, 1, self.rows) + cs = constrain (cs, 1, self.cols) + ce = constrain (ce, 1, self.cols) + if rs > re: + rs, re = re, rs + if cs > ce: + cs, ce = ce, cs + for r in range (rs, re+1): + for c in range (cs, ce + 1): + self.put_abs (r,c,ch) + + def cr (self): + + """This moves the cursor to the beginning (col 1) of the current row. + """ + + self.cursor_home (self.cur_r, 1) + + def lf (self): + + """This moves the cursor down with scrolling. + """ + + old_r = self.cur_r + self.cursor_down() + if old_r == self.cur_r: + self.scroll_up () + self.erase_line() + + def crlf (self): + + """This advances the cursor with CRLF properties. + The cursor will line wrap and the screen may scroll. + """ + + self.cr () + self.lf () + + def newline (self): + + """This is an alias for crlf(). + """ + + self.crlf() + + def put_abs (self, r, c, ch): + + """Screen array starts at 1 index.""" + + r = constrain (r, 1, self.rows) + c = constrain (c, 1, self.cols) + ch = str(ch)[0] + self.w[r-1][c-1] = ch + + def put (self, ch): + + """This puts a characters at the current cursor position. + """ + + self.put_abs (self.cur_r, self.cur_c, ch) + + def insert_abs (self, r, c, ch): + + """This inserts a character at (r,c). Everything under + and to the right is shifted right one character. + The last character of the line is lost. + """ + + r = constrain (r, 1, self.rows) + c = constrain (c, 1, self.cols) + for ci in range (self.cols, c, -1): + self.put_abs (r,ci, self.get_abs(r,ci-1)) + self.put_abs (r,c,ch) + + def insert (self, ch): + + self.insert_abs (self.cur_r, self.cur_c, ch) + + def get_abs (self, r, c): + + r = constrain (r, 1, self.rows) + c = constrain (c, 1, self.cols) + return self.w[r-1][c-1] + + def get (self): + + self.get_abs (self.cur_r, self.cur_c) + + def get_region (self, rs,cs, re,ce): + + """This returns a list of lines representing the region. + """ + + rs = constrain (rs, 1, self.rows) + re = constrain (re, 1, self.rows) + cs = constrain (cs, 1, self.cols) + ce = constrain (ce, 1, self.cols) + if rs > re: + rs, re = re, rs + if cs > ce: + cs, ce = ce, cs + sc = [] + for r in range (rs, re+1): + line = '' + for c in range (cs, ce + 1): + ch = self.get_abs (r,c) + line = line + ch + sc.append (line) + return sc + + def cursor_constrain (self): + + """This keeps the cursor within the screen area. + """ + + self.cur_r = constrain (self.cur_r, 1, self.rows) + self.cur_c = constrain (self.cur_c, 1, self.cols) + + def cursor_home (self, r=1, c=1): # <ESC>[{ROW};{COLUMN}H + + self.cur_r = r + self.cur_c = c + self.cursor_constrain () + + def cursor_back (self,count=1): # <ESC>[{COUNT}D (not confused with down) + + self.cur_c = self.cur_c - count + self.cursor_constrain () + + def cursor_down (self,count=1): # <ESC>[{COUNT}B (not confused with back) + + self.cur_r = self.cur_r + count + self.cursor_constrain () + + def cursor_forward (self,count=1): # <ESC>[{COUNT}C + + self.cur_c = self.cur_c + count + self.cursor_constrain () + + def cursor_up (self,count=1): # <ESC>[{COUNT}A + + self.cur_r = self.cur_r - count + self.cursor_constrain () + + def cursor_up_reverse (self): # <ESC> M (called RI -- Reverse Index) + + old_r = self.cur_r + self.cursor_up() + if old_r == self.cur_r: + self.scroll_up() + + def cursor_force_position (self, r, c): # <ESC>[{ROW};{COLUMN}f + + """Identical to Cursor Home.""" + + self.cursor_home (r, c) + + def cursor_save (self): # <ESC>[s + + """Save current cursor position.""" + + self.cursor_save_attrs() + + def cursor_unsave (self): # <ESC>[u + + """Restores cursor position after a Save Cursor.""" + + self.cursor_restore_attrs() + + def cursor_save_attrs (self): # <ESC>7 + + """Save current cursor position.""" + + self.cur_saved_r = self.cur_r + self.cur_saved_c = self.cur_c + + def cursor_restore_attrs (self): # <ESC>8 + + """Restores cursor position after a Save Cursor.""" + + self.cursor_home (self.cur_saved_r, self.cur_saved_c) + + def scroll_constrain (self): + + """This keeps the scroll region within the screen region.""" + + if self.scroll_row_start <= 0: + self.scroll_row_start = 1 + if self.scroll_row_end > self.rows: + self.scroll_row_end = self.rows + + def scroll_screen (self): # <ESC>[r + + """Enable scrolling for entire display.""" + + self.scroll_row_start = 1 + self.scroll_row_end = self.rows + + def scroll_screen_rows (self, rs, re): # <ESC>[{start};{end}r + + """Enable scrolling from row {start} to row {end}.""" + + self.scroll_row_start = rs + self.scroll_row_end = re + self.scroll_constrain() + + def scroll_down (self): # <ESC>D + + """Scroll display down one line.""" + + # Screen is indexed from 1, but arrays are indexed from 0. + s = self.scroll_row_start - 1 + e = self.scroll_row_end - 1 + self.w[s+1:e+1] = copy.deepcopy(self.w[s:e]) + + def scroll_up (self): # <ESC>M + + """Scroll display up one line.""" + + # Screen is indexed from 1, but arrays are indexed from 0. + s = self.scroll_row_start - 1 + e = self.scroll_row_end - 1 + self.w[s:e] = copy.deepcopy(self.w[s+1:e+1]) + + def erase_end_of_line (self): # <ESC>[0K -or- <ESC>[K + + """Erases from the current cursor position to the end of the current + line.""" + + self.fill_region (self.cur_r, self.cur_c, self.cur_r, self.cols) + + def erase_start_of_line (self): # <ESC>[1K + + """Erases from the current cursor position to the start of the current + line.""" + + self.fill_region (self.cur_r, 1, self.cur_r, self.cur_c) + + def erase_line (self): # <ESC>[2K + + """Erases the entire current line.""" + + self.fill_region (self.cur_r, 1, self.cur_r, self.cols) + + def erase_down (self): # <ESC>[0J -or- <ESC>[J + + """Erases the screen from the current line down to the bottom of the + screen.""" + + self.erase_end_of_line () + self.fill_region (self.cur_r + 1, 1, self.rows, self.cols) + + def erase_up (self): # <ESC>[1J + + """Erases the screen from the current line up to the top of the + screen.""" + + self.erase_start_of_line () + self.fill_region (self.cur_r-1, 1, 1, self.cols) + + def erase_screen (self): # <ESC>[2J + + """Erases the screen with the background color.""" + + self.fill () + + def set_tab (self): # <ESC>H + + """Sets a tab at the current position.""" + + pass + + def clear_tab (self): # <ESC>[g + + """Clears tab at the current position.""" + + pass + + def clear_all_tabs (self): # <ESC>[3g + + """Clears all tabs.""" + + pass + +# Insert line Esc [ Pn L +# Delete line Esc [ Pn M +# Delete character Esc [ Pn P +# Scrolling region Esc [ Pn(top);Pn(bot) r + diff --git a/awx/lib/site-packages/six.py b/awx/lib/site-packages/six.py new file mode 100644 index 0000000000..eae31454ae --- /dev/null +++ b/awx/lib/site-packages/six.py @@ -0,0 +1,404 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2013 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy of +# this software and associated documentation files (the "Software"), to deal in +# the Software without restriction, including without limitation the rights to +# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +# the Software, and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +import operator +import sys +import types + +__author__ = "Benjamin Peterson <benjamin@python.org>" +__version__ = "1.3.0" + + +# True if we are running on Python 3. +PY3 = sys.version_info[0] == 3 + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) + # This is a bit ugly, but it avoids running this again. + delattr(tp, self.name) + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + + +class _MovedItems(types.ModuleType): + """Lazy loading of moved objects""" + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("reload_module", "__builtin__", "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("winreg", "_winreg"), +] +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) +del attr + +moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" + + _iterkeys = "keys" + _itervalues = "values" + _iteritems = "items" + _iterlists = "lists" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + _iterkeys = "iterkeys" + _itervalues = "itervalues" + _iteritems = "iteritems" + _iterlists = "iterlists" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +def iterkeys(d, **kw): + """Return an iterator over the keys of a dictionary.""" + return iter(getattr(d, _iterkeys)(**kw)) + +def itervalues(d, **kw): + """Return an iterator over the values of a dictionary.""" + return iter(getattr(d, _itervalues)(**kw)) + +def iteritems(d, **kw): + """Return an iterator over the (key, value) pairs of a dictionary.""" + return iter(getattr(d, _iteritems)(**kw)) + +def iterlists(d, **kw): + """Return an iterator over the (key, [values]) pairs of a dictionary.""" + return iter(getattr(d, _iterlists)(**kw)) + + +if PY3: + def b(s): + return s.encode("latin-1") + def u(s): + return s + if sys.version_info[1] <= 1: + def int2byte(i): + return bytes((i,)) + else: + # This is about 2x faster than the implementation above on 3.2+ + int2byte = operator.methodcaller("to_bytes", 1, "big") + import io + StringIO = io.StringIO + BytesIO = io.BytesIO +else: + def b(s): + return s + def u(s): + return unicode(s, "unicode_escape") + int2byte = chr + import StringIO + StringIO = BytesIO = StringIO.StringIO +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +if PY3: + import builtins + exec_ = getattr(builtins, "exec") + + + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + + + print_ = getattr(builtins, "print") + del builtins + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + + def print_(*args, **kwargs): + """The new-style print function.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + def write(data): + if not isinstance(data, basestring): + data = str(data) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) + +_add_doc(reraise, """Reraise an exception.""") + + +def with_metaclass(meta, base=object): + """Create a base class with a metaclass.""" + return meta("NewBase", (base,), {}) diff --git a/awx/lib/site-packages/south/__init__.py b/awx/lib/site-packages/south/__init__.py new file mode 100644 index 0000000000..20c39178e6 --- /dev/null +++ b/awx/lib/site-packages/south/__init__.py @@ -0,0 +1,9 @@ +""" +South - Useable migrations for Django apps +""" + +__version__ = "0.8.1" +__authors__ = [ + "Andrew Godwin <andrew@aeracode.org>", + "Andy McCurdy <andy@andymccurdy.com>" +] diff --git a/awx/lib/site-packages/south/creator/__init__.py b/awx/lib/site-packages/south/creator/__init__.py new file mode 100644 index 0000000000..96a1a8021f --- /dev/null +++ b/awx/lib/site-packages/south/creator/__init__.py @@ -0,0 +1,5 @@ +""" +The creator module is responsible for making new migration files, either +as blank templates or autodetecting changes. It contains code that used to +all be in startmigration.py. +""" diff --git a/awx/lib/site-packages/south/creator/actions.py b/awx/lib/site-packages/south/creator/actions.py new file mode 100644 index 0000000000..37586c23ca --- /dev/null +++ b/awx/lib/site-packages/south/creator/actions.py @@ -0,0 +1,556 @@ +""" +Actions - things like 'a model was removed' or 'a field was changed'. +Each one has a class, which can take the action description and insert code +blocks into the forwards() and backwards() methods, in the right place. +""" + +from __future__ import print_function + +import sys + +from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT +from django.db.models.fields import FieldDoesNotExist, NOT_PROVIDED, CharField, TextField + +from south.modelsinspector import value_clean +from south.creator.freezer import remove_useless_attributes, model_key +from south.utils import datetime_utils +from south.utils.py3 import raw_input + + +class Action(object): + """ + Generic base Action class. Contains utility methods for inserting into + the forwards() and backwards() method lists. + """ + + prepend_forwards = False + prepend_backwards = False + + def forwards_code(self): + raise NotImplementedError + + def backwards_code(self): + raise NotImplementedError + + def add_forwards(self, forwards): + if self.prepend_forwards: + forwards.insert(0, self.forwards_code()) + else: + forwards.append(self.forwards_code()) + + def add_backwards(self, backwards): + if self.prepend_backwards: + backwards.insert(0, self.backwards_code()) + else: + backwards.append(self.backwards_code()) + + def console_line(self): + "Returns the string to print on the console, e.g. ' + Added field foo'" + raise NotImplementedError + + @classmethod + def triples_to_defs(cls, fields): + # Turn the (class, args, kwargs) format into a string + for field, triple in fields.items(): + fields[field] = cls.triple_to_def(triple) + return fields + + @classmethod + def triple_to_def(cls, triple): + "Turns a single triple into a definition." + return "self.gf(%r)(%s)" % ( + triple[0], # Field full path + ", ".join(triple[1] + ["%s=%s" % (kwd, val) for kwd, val in triple[2].items()]), # args and kwds + ) + + +class AddModel(Action): + """ + Addition of a model. Takes the Model subclass that is being created. + """ + + FORWARDS_TEMPLATE = ''' + # Adding model '%(model_name)s' + db.create_table(%(table_name)r, ( + %(field_defs)s + )) + db.send_create_signal(%(app_label)r, [%(model_name)r])'''[1:] + "\n" + + BACKWARDS_TEMPLATE = ''' + # Deleting model '%(model_name)s' + db.delete_table(%(table_name)r)'''[1:] + "\n" + + def __init__(self, model, model_def): + self.model = model + self.model_def = model_def + + def console_line(self): + "Returns the string to print on the console, e.g. ' + Added field foo'" + return " + Added model %s.%s" % ( + self.model._meta.app_label, + self.model._meta.object_name, + ) + + def forwards_code(self): + "Produces the code snippet that gets put into forwards()" + field_defs = ",\n ".join([ + "(%r, %s)" % (name, defn) for name, defn + in self.triples_to_defs(self.model_def).items() + ]) + "," + + return self.FORWARDS_TEMPLATE % { + "model_name": self.model._meta.object_name, + "table_name": self.model._meta.db_table, + "app_label": self.model._meta.app_label, + "field_defs": field_defs, + } + + def backwards_code(self): + "Produces the code snippet that gets put into backwards()" + return self.BACKWARDS_TEMPLATE % { + "model_name": self.model._meta.object_name, + "table_name": self.model._meta.db_table, + } + + +class DeleteModel(AddModel): + """ + Deletion of a model. Takes the Model subclass that is being created. + """ + + def console_line(self): + "Returns the string to print on the console, e.g. ' + Added field foo'" + return " - Deleted model %s.%s" % ( + self.model._meta.app_label, + self.model._meta.object_name, + ) + + def forwards_code(self): + return AddModel.backwards_code(self) + + def backwards_code(self): + return AddModel.forwards_code(self) + + +class _NullIssuesField(object): + """ + A field that might need to ask a question about rogue NULL values. + """ + + allow_third_null_option = False + irreversible = False + + IRREVERSIBLE_TEMPLATE = ''' + # User chose to not deal with backwards NULL issues for '%(model_name)s.%(field_name)s' + raise RuntimeError("Cannot reverse this migration. '%(model_name)s.%(field_name)s' and its values cannot be restored.")''' + + def deal_with_not_null_no_default(self, field, field_def): + # If it's a CharField or TextField that's blank, skip this step. + if isinstance(field, (CharField, TextField)) and field.blank: + field_def[2]['default'] = repr("") + return + # Oh dear. Ask them what to do. + print(" ? The field '%s.%s' does not have a default specified, yet is NOT NULL." % ( + self.model._meta.object_name, + field.name, + )) + print(" ? Since you are %s, you MUST specify a default" % self.null_reason) + print(" ? value to use for existing rows. Would you like to:") + print(" ? 1. Quit now, and add a default to the field in models.py") + print(" ? 2. Specify a one-off value to use for existing columns now") + if self.allow_third_null_option: + print(" ? 3. Disable the backwards migration by raising an exception.") + while True: + choice = raw_input(" ? Please select a choice: ") + if choice == "1": + sys.exit(1) + elif choice == "2": + break + elif choice == "3" and self.allow_third_null_option: + break + else: + print(" ! Invalid choice.") + if choice == "2": + self.add_one_time_default(field, field_def) + elif choice == "3": + self.irreversible = True + + def add_one_time_default(self, field, field_def): + # OK, they want to pick their own one-time default. Who are we to refuse? + print(" ? Please enter Python code for your one-off default value.") + print(" ? The datetime module is available, so you can do e.g. datetime.date.today()") + while True: + code = raw_input(" >>> ") + if not code: + print(" ! Please enter some code, or 'exit' (with no quotes) to exit.") + elif code == "exit": + sys.exit(1) + else: + try: + result = eval(code, {}, {"datetime": datetime_utils}) + except (SyntaxError, NameError) as e: + print(" ! Invalid input: %s" % e) + else: + break + # Right, add the default in. + field_def[2]['default'] = value_clean(result) + + def irreversable_code(self, field): + return self.IRREVERSIBLE_TEMPLATE % { + "model_name": self.model._meta.object_name, + "table_name": self.model._meta.db_table, + "field_name": field.name, + "field_column": field.column, + } + + +class AddField(Action, _NullIssuesField): + """ + Adds a field to a model. Takes a Model class and the field name. + """ + + null_reason = "adding this field" + + FORWARDS_TEMPLATE = ''' + # Adding field '%(model_name)s.%(field_name)s' + db.add_column(%(table_name)r, %(field_name)r, + %(field_def)s, + keep_default=False)'''[1:] + "\n" + + BACKWARDS_TEMPLATE = ''' + # Deleting field '%(model_name)s.%(field_name)s' + db.delete_column(%(table_name)r, %(field_column)r)'''[1:] + "\n" + + def __init__(self, model, field, field_def): + self.model = model + self.field = field + self.field_def = field_def + + # See if they've made a NOT NULL column but also have no default (far too common) + is_null = self.field.null + default = (self.field.default is not None) and (self.field.default is not NOT_PROVIDED) + + if not is_null and not default: + self.deal_with_not_null_no_default(self.field, self.field_def) + + def console_line(self): + "Returns the string to print on the console, e.g. ' + Added field foo'" + return " + Added field %s on %s.%s" % ( + self.field.name, + self.model._meta.app_label, + self.model._meta.object_name, + ) + + def forwards_code(self): + + return self.FORWARDS_TEMPLATE % { + "model_name": self.model._meta.object_name, + "table_name": self.model._meta.db_table, + "field_name": self.field.name, + "field_column": self.field.column, + "field_def": self.triple_to_def(self.field_def), + } + + def backwards_code(self): + return self.BACKWARDS_TEMPLATE % { + "model_name": self.model._meta.object_name, + "table_name": self.model._meta.db_table, + "field_name": self.field.name, + "field_column": self.field.column, + } + + +class DeleteField(AddField): + """ + Removes a field from a model. Takes a Model class and the field name. + """ + + null_reason = "removing this field" + allow_third_null_option = True + + def console_line(self): + "Returns the string to print on the console, e.g. ' + Added field foo'" + return " - Deleted field %s on %s.%s" % ( + self.field.name, + self.model._meta.app_label, + self.model._meta.object_name, + ) + + def forwards_code(self): + return AddField.backwards_code(self) + + def backwards_code(self): + if not self.irreversible: + return AddField.forwards_code(self) + else: + return self.irreversable_code(self.field) + + +class ChangeField(Action, _NullIssuesField): + """ + Changes a field's type/options on a model. + """ + + null_reason = "making this field non-nullable" + + FORWARDS_TEMPLATE = BACKWARDS_TEMPLATE = ''' + # Changing field '%(model_name)s.%(field_name)s' + db.alter_column(%(table_name)r, %(field_column)r, %(field_def)s)''' + + RENAME_TEMPLATE = ''' + # Renaming column for '%(model_name)s.%(field_name)s' to match new field type. + db.rename_column(%(table_name)r, %(old_column)r, %(new_column)r)''' + + def __init__(self, model, old_field, new_field, old_def, new_def): + self.model = model + self.old_field = old_field + self.new_field = new_field + self.old_def = old_def + self.new_def = new_def + + # See if they've changed a not-null field to be null + new_default = (self.new_field.default is not None) and (self.new_field.default is not NOT_PROVIDED) + old_default = (self.old_field.default is not None) and (self.old_field.default is not NOT_PROVIDED) + if self.old_field.null and not self.new_field.null and not new_default: + self.deal_with_not_null_no_default(self.new_field, self.new_def) + if not self.old_field.null and self.new_field.null and not old_default: + self.null_reason = "making this field nullable" + self.allow_third_null_option = True + self.deal_with_not_null_no_default(self.old_field, self.old_def) + + def console_line(self): + "Returns the string to print on the console, e.g. ' + Added field foo'" + return " ~ Changed field %s on %s.%s" % ( + self.new_field.name, + self.model._meta.app_label, + self.model._meta.object_name, + ) + + def _code(self, old_field, new_field, new_def): + + output = "" + + if self.old_field.column != self.new_field.column: + output += self.RENAME_TEMPLATE % { + "model_name": self.model._meta.object_name, + "table_name": self.model._meta.db_table, + "field_name": new_field.name, + "old_column": old_field.column, + "new_column": new_field.column, + } + + output += self.FORWARDS_TEMPLATE % { + "model_name": self.model._meta.object_name, + "table_name": self.model._meta.db_table, + "field_name": new_field.name, + "field_column": new_field.column, + "field_def": self.triple_to_def(new_def), + } + + return output + + def forwards_code(self): + return self._code(self.old_field, self.new_field, self.new_def) + + def backwards_code(self): + if not self.irreversible: + return self._code(self.new_field, self.old_field, self.old_def) + else: + return self.irreversable_code(self.old_field) + + +class AddUnique(Action): + """ + Adds a unique constraint to a model. Takes a Model class and the field names. + """ + + FORWARDS_TEMPLATE = ''' + # Adding unique constraint on '%(model_name)s', fields %(field_names)s + db.create_unique(%(table_name)r, %(fields)r)'''[1:] + "\n" + + BACKWARDS_TEMPLATE = ''' + # Removing unique constraint on '%(model_name)s', fields %(field_names)s + db.delete_unique(%(table_name)r, %(fields)r)'''[1:] + "\n" + + prepend_backwards = True + + def __init__(self, model, fields): + self.model = model + self.fields = fields + + def console_line(self): + "Returns the string to print on the console, e.g. ' + Added field foo'" + return " + Added unique constraint for %s on %s.%s" % ( + [x.name for x in self.fields], + self.model._meta.app_label, + self.model._meta.object_name, + ) + + def forwards_code(self): + + return self.FORWARDS_TEMPLATE % { + "model_name": self.model._meta.object_name, + "table_name": self.model._meta.db_table, + "fields": [field.column for field in self.fields], + "field_names": [field.name for field in self.fields], + } + + def backwards_code(self): + return self.BACKWARDS_TEMPLATE % { + "model_name": self.model._meta.object_name, + "table_name": self.model._meta.db_table, + "fields": [field.column for field in self.fields], + "field_names": [field.name for field in self.fields], + } + + +class DeleteUnique(AddUnique): + """ + Removes a unique constraint from a model. Takes a Model class and the field names. + """ + + prepend_forwards = True + prepend_backwards = False + + def console_line(self): + "Returns the string to print on the console, e.g. ' + Added field foo'" + return " - Deleted unique constraint for %s on %s.%s" % ( + [x.name for x in self.fields], + self.model._meta.app_label, + self.model._meta.object_name, + ) + + def forwards_code(self): + return AddUnique.backwards_code(self) + + def backwards_code(self): + return AddUnique.forwards_code(self) + + +class AddIndex(AddUnique): + """ + Adds an index to a model field[s]. Takes a Model class and the field names. + """ + + FORWARDS_TEMPLATE = ''' + # Adding index on '%(model_name)s', fields %(field_names)s + db.create_index(%(table_name)r, %(fields)r)'''[1:] + "\n" + + BACKWARDS_TEMPLATE = ''' + # Removing index on '%(model_name)s', fields %(field_names)s + db.delete_index(%(table_name)r, %(fields)r)'''[1:] + "\n" + + def console_line(self): + "Returns the string to print on the console, e.g. ' + Added field foo'" + return " + Added index for %s on %s.%s" % ( + [x.name for x in self.fields], + self.model._meta.app_label, + self.model._meta.object_name, + ) + + +class DeleteIndex(AddIndex): + """ + Deletes an index off a model field[s]. Takes a Model class and the field names. + """ + + def console_line(self): + "Returns the string to print on the console, e.g. ' + Added field foo'" + return " + Deleted index for %s on %s.%s" % ( + [x.name for x in self.fields], + self.model._meta.app_label, + self.model._meta.object_name, + ) + + def forwards_code(self): + return AddIndex.backwards_code(self) + + def backwards_code(self): + return AddIndex.forwards_code(self) + + +class AddM2M(Action): + """ + Adds a unique constraint to a model. Takes a Model class and the field names. + """ + + FORWARDS_TEMPLATE = ''' + # Adding M2M table for field %(field_name)s on '%(model_name)s' + m2m_table_name = %(table_name)s + db.create_table(m2m_table_name, ( + ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), + (%(left_field)r, models.ForeignKey(orm[%(left_model_key)r], null=False)), + (%(right_field)r, models.ForeignKey(orm[%(right_model_key)r], null=False)) + )) + db.create_unique(m2m_table_name, [%(left_column)r, %(right_column)r])'''[1:] + "\n" + + BACKWARDS_TEMPLATE = ''' + # Removing M2M table for field %(field_name)s on '%(model_name)s' + db.delete_table(%(table_name)s)'''[1:] + "\n" + + def __init__(self, model, field): + self.model = model + self.field = field + + def console_line(self): + "Returns the string to print on the console, e.g. ' + Added field foo'" + return " + Added M2M table for %s on %s.%s" % ( + self.field.name, + self.model._meta.app_label, + self.model._meta.object_name, + ) + + def table_name(self): + # This is part of a workaround for the fact that Django uses + # different shortening for automatically generated m2m table names + # (as opposed to any explicitly specified table name) + f = self.field + explicit = f.db_table + if explicit: + return "%r" % explicit + else: + auto = "%s_%s" % (self.model._meta.db_table, f.name) + return 'db.shorten_name(%r)' % auto + + def forwards_code(self): + + return self.FORWARDS_TEMPLATE % { + "model_name": self.model._meta.object_name, + "field_name": self.field.name, + "table_name": self.table_name(), + "left_field": self.field.m2m_column_name()[:-3], # Remove the _id part + "left_column": self.field.m2m_column_name(), + "left_model_key": model_key(self.model), + "right_field": self.field.m2m_reverse_name()[:-3], # Remove the _id part + "right_column": self.field.m2m_reverse_name(), + "right_model_key": model_key(self.field.rel.to), + } + + def backwards_code(self): + + return self.BACKWARDS_TEMPLATE % { + "model_name": self.model._meta.object_name, + "field_name": self.field.name, + "table_name": self.table_name(), + } + + +class DeleteM2M(AddM2M): + """ + Adds a unique constraint to a model. Takes a Model class and the field names. + """ + + def console_line(self): + "Returns the string to print on the console, e.g. ' + Added field foo'" + return " - Deleted M2M table for %s on %s.%s" % ( + self.field.name, + self.model._meta.app_label, + self.model._meta.object_name, + ) + + def forwards_code(self): + return AddM2M.backwards_code(self) + + def backwards_code(self): + return AddM2M.forwards_code(self) + diff --git a/awx/lib/site-packages/south/creator/changes.py b/awx/lib/site-packages/south/creator/changes.py new file mode 100644 index 0000000000..9a11d9eb3b --- /dev/null +++ b/awx/lib/site-packages/south/creator/changes.py @@ -0,0 +1,506 @@ +""" +Contains things to detect changes - either using options passed in on the +commandline, or by using autodetection, etc. +""" + +from __future__ import print_function + +from django.db import models +from django.contrib.contenttypes.generic import GenericRelation +from django.utils.datastructures import SortedDict + +from south.creator.freezer import remove_useless_attributes, freeze_apps, model_key +from south.utils import auto_through +from south.utils.py3 import string_types + +class BaseChanges(object): + """ + Base changes class. + """ + def suggest_name(self): + return '' + + def split_model_def(self, model, model_def): + """ + Given a model and its model def (a dict of field: triple), returns three + items: the real fields dict, the Meta dict, and the M2M fields dict. + """ + real_fields = SortedDict() + meta = SortedDict() + m2m_fields = SortedDict() + for name, triple in model_def.items(): + if name == "Meta": + meta = triple + elif isinstance(model._meta.get_field_by_name(name)[0], models.ManyToManyField): + m2m_fields[name] = triple + else: + real_fields[name] = triple + return real_fields, meta, m2m_fields + + def current_model_from_key(self, key): + app_label, model_name = key.split(".") + return models.get_model(app_label, model_name) + + def current_field_from_key(self, key, fieldname): + app_label, model_name = key.split(".") + # Special, for the magical field from order_with_respect_to + if fieldname == "_order": + field = models.IntegerField() + field.name = "_order" + field.attname = "_order" + field.column = "_order" + field.default = 0 + return field + # Otherwise, normal. + return models.get_model(app_label, model_name)._meta.get_field_by_name(fieldname)[0] + + +class AutoChanges(BaseChanges): + """ + Detects changes by 'diffing' two sets of frozen model definitions. + """ + + # Field types we don't generate add/remove field changes for. + IGNORED_FIELD_TYPES = [ + GenericRelation, + ] + + def __init__(self, migrations, old_defs, old_orm, new_defs): + self.migrations = migrations + self.old_defs = old_defs + self.old_orm = old_orm + self.new_defs = new_defs + + def suggest_name(self): + parts = ["auto"] + for change_name, params in self.get_changes(): + if change_name == "AddModel": + parts.append("add_%s" % params['model']._meta.object_name.lower()) + elif change_name == "DeleteModel": + parts.append("del_%s" % params['model']._meta.object_name.lower()) + elif change_name == "AddField": + parts.append("add_field_%s_%s" % ( + params['model']._meta.object_name.lower(), + params['field'].name, + )) + elif change_name == "DeleteField": + parts.append("del_field_%s_%s" % ( + params['model']._meta.object_name.lower(), + params['field'].name, + )) + elif change_name == "ChangeField": + parts.append("chg_field_%s_%s" % ( + params['model']._meta.object_name.lower(), + params['new_field'].name, + )) + elif change_name == "AddUnique": + parts.append("add_unique_%s_%s" % ( + params['model']._meta.object_name.lower(), + "_".join([x.name for x in params['fields']]), + )) + elif change_name == "DeleteUnique": + parts.append("del_unique_%s_%s" % ( + params['model']._meta.object_name.lower(), + "_".join([x.name for x in params['fields']]), + )) + elif change_name == "AddIndex": + parts.append("add_index_%s_%s" % ( + params['model']._meta.object_name.lower(), + "_".join([x.name for x in params['fields']]), + )) + elif change_name == "DeleteIndex": + parts.append("del_index_%s_%s" % ( + params['model']._meta.object_name.lower(), + "_".join([x.name for x in params['fields']]), + )) + return ("__".join(parts))[:70] + + def get_changes(self): + """ + Returns the difference between the old and new sets of models as a 5-tuple: + added_models, deleted_models, added_fields, deleted_fields, changed_fields + """ + + deleted_models = set() + + # See if anything's vanished + for key in self.old_defs: + if key not in self.new_defs: + # We shouldn't delete it if it was managed=False + old_fields, old_meta, old_m2ms = self.split_model_def(self.old_orm[key], self.old_defs[key]) + if old_meta.get("managed", "True") != "False": + # Alright, delete it. + yield ("DeleteModel", { + "model": self.old_orm[key], + "model_def": old_fields, + }) + # Also make sure we delete any M2Ms it had. + for fieldname in old_m2ms: + # Only delete its stuff if it wasn't a through=. + field = self.old_orm[key + ":" + fieldname] + if auto_through(field): + yield ("DeleteM2M", {"model": self.old_orm[key], "field": field}) + # And any index/uniqueness constraints it had + for attr, operation in (("unique_together", "DeleteUnique"), ("index_together", "DeleteIndex")): + together = eval(old_meta.get(attr, "[]")) + if together: + # If it's only a single tuple, make it into the longer one + if isinstance(together[0], string_types): + together = [together] + # For each combination, make an action for it + for fields in together: + yield (operation, { + "model": self.old_orm[key], + "fields": [self.old_orm[key]._meta.get_field_by_name(x)[0] for x in fields], + }) + # We always add it in here so we ignore it later + deleted_models.add(key) + + # Or appeared + for key in self.new_defs: + if key not in self.old_defs: + # We shouldn't add it if it's managed=False + new_fields, new_meta, new_m2ms = self.split_model_def(self.current_model_from_key(key), self.new_defs[key]) + if new_meta.get("managed", "True") != "False": + yield ("AddModel", { + "model": self.current_model_from_key(key), + "model_def": new_fields, + }) + # Also make sure we add any M2Ms it has. + for fieldname in new_m2ms: + # Only create its stuff if it wasn't a through=. + field = self.current_field_from_key(key, fieldname) + if auto_through(field): + yield ("AddM2M", {"model": self.current_model_from_key(key), "field": field}) + # And any index/uniqueness constraints it has + for attr, operation in (("unique_together", "AddUnique"), ("index_together", "AddIndex")): + together = eval(new_meta.get(attr, "[]")) + if together: + # If it's only a single tuple, make it into the longer one + if isinstance(together[0], string_types): + together = [together] + # For each combination, make an action for it + for fields in together: + yield (operation, { + "model": self.current_model_from_key(key), + "fields": [self.current_model_from_key(key)._meta.get_field_by_name(x)[0] for x in fields], + }) + + # Now, for every model that's stayed the same, check its fields. + for key in self.old_defs: + if key not in deleted_models: + + old_fields, old_meta, old_m2ms = self.split_model_def(self.old_orm[key], self.old_defs[key]) + new_fields, new_meta, new_m2ms = self.split_model_def(self.current_model_from_key(key), self.new_defs[key]) + + # Do nothing for models which are now not managed. + if new_meta.get("managed", "True") == "False": + continue + + # Find fields that have vanished. + for fieldname in old_fields: + if fieldname not in new_fields: + # Don't do it for any fields we're ignoring + field = self.old_orm[key + ":" + fieldname] + field_allowed = True + for field_type in self.IGNORED_FIELD_TYPES: + if isinstance(field, field_type): + field_allowed = False + if field_allowed: + # Looks alright. + yield ("DeleteField", { + "model": self.old_orm[key], + "field": field, + "field_def": old_fields[fieldname], + }) + + # And ones that have appeared + for fieldname in new_fields: + if fieldname not in old_fields: + # Don't do it for any fields we're ignoring + field = self.current_field_from_key(key, fieldname) + field_allowed = True + for field_type in self.IGNORED_FIELD_TYPES: + if isinstance(field, field_type): + field_allowed = False + if field_allowed: + # Looks alright. + yield ("AddField", { + "model": self.current_model_from_key(key), + "field": field, + "field_def": new_fields[fieldname], + }) + + # Find M2Ms that have vanished + for fieldname in old_m2ms: + if fieldname not in new_m2ms: + # Only delete its stuff if it wasn't a through=. + field = self.old_orm[key + ":" + fieldname] + if auto_through(field): + yield ("DeleteM2M", {"model": self.old_orm[key], "field": field}) + + # Find M2Ms that have appeared + for fieldname in new_m2ms: + if fieldname not in old_m2ms: + # Only create its stuff if it wasn't a through=. + field = self.current_field_from_key(key, fieldname) + if auto_through(field): + yield ("AddM2M", {"model": self.current_model_from_key(key), "field": field}) + + # For the ones that exist in both models, see if they were changed + for fieldname in set(old_fields).intersection(set(new_fields)): + # Non-index changes + if self.different_attributes( + remove_useless_attributes(old_fields[fieldname], True, True), + remove_useless_attributes(new_fields[fieldname], True, True)): + yield ("ChangeField", { + "model": self.current_model_from_key(key), + "old_field": self.old_orm[key + ":" + fieldname], + "new_field": self.current_field_from_key(key, fieldname), + "old_def": old_fields[fieldname], + "new_def": new_fields[fieldname], + }) + # Index changes + old_field = self.old_orm[key + ":" + fieldname] + new_field = self.current_field_from_key(key, fieldname) + if not old_field.db_index and new_field.db_index: + # They've added an index. + yield ("AddIndex", { + "model": self.current_model_from_key(key), + "fields": [new_field], + }) + if old_field.db_index and not new_field.db_index: + # They've removed an index. + yield ("DeleteIndex", { + "model": self.old_orm[key], + "fields": [old_field], + }) + # See if their uniques have changed + if old_field.unique != new_field.unique: + # Make sure we look at the one explicitly given to see what happened + if new_field.unique: + yield ("AddUnique", { + "model": self.current_model_from_key(key), + "fields": [new_field], + }) + else: + yield ("DeleteUnique", { + "model": self.old_orm[key], + "fields": [old_field], + }) + + # See if there's any M2Ms that have changed. + for fieldname in set(old_m2ms).intersection(set(new_m2ms)): + old_field = self.old_orm[key + ":" + fieldname] + new_field = self.current_field_from_key(key, fieldname) + # Have they _added_ a through= ? + if auto_through(old_field) and not auto_through(new_field): + yield ("DeleteM2M", {"model": self.old_orm[key], "field": old_field}) + # Have they _removed_ a through= ? + if not auto_through(old_field) and auto_through(new_field): + yield ("AddM2M", {"model": self.current_model_from_key(key), "field": new_field}) + + ## See if the {index,unique}_togethers have changed + for attr, add_operation, del_operation in (("unique_together", "AddUnique", "DeleteUnique"), ("index_together", "AddIndex", "DeleteIndex")): + # First, normalise them into lists of sets. + old_together = eval(old_meta.get(attr, "[]")) + new_together = eval(new_meta.get(attr, "[]")) + if old_together and isinstance(old_together[0], string_types): + old_together = [old_together] + if new_together and isinstance(new_together[0], string_types): + new_together = [new_together] + old_together = list(map(set, old_together)) + new_together = list(map(set, new_together)) + # See if any appeared or disappeared + for item in old_together: + if item not in new_together: + yield (del_operation, { + "model": self.old_orm[key], + "fields": [self.old_orm[key + ":" + x] for x in item], + }) + for item in new_together: + if item not in old_together: + yield (add_operation, { + "model": self.current_model_from_key(key), + "fields": [self.current_field_from_key(key, x) for x in item], + }) + + @classmethod + def is_triple(cls, triple): + "Returns whether the argument is a triple." + return isinstance(triple, (list, tuple)) and len(triple) == 3 and \ + isinstance(triple[0], string_types) and \ + isinstance(triple[1], (list, tuple)) and \ + isinstance(triple[2], dict) + + @classmethod + def different_attributes(cls, old, new): + """ + Backwards-compat comparison that ignores orm. on the RHS and not the left + and which knows django.db.models.fields.CharField = models.CharField. + Has a whole load of tests in tests/autodetection.py. + """ + + # If they're not triples, just do normal comparison + if not cls.is_triple(old) or not cls.is_triple(new): + return old != new + + # Expand them out into parts + old_field, old_pos, old_kwd = old + new_field, new_pos, new_kwd = new + + # Copy the positional and keyword arguments so we can compare them and pop off things + old_pos, new_pos = old_pos[:], new_pos[:] + old_kwd = dict(old_kwd.items()) + new_kwd = dict(new_kwd.items()) + + # Remove comparison of the existence of 'unique', that's done elsewhere. + # TODO: Make this work for custom fields where unique= means something else? + if "unique" in old_kwd: + del old_kwd['unique'] + if "unique" in new_kwd: + del new_kwd['unique'] + + # If the first bit is different, check it's not by dj.db.models... + if old_field != new_field: + if old_field.startswith("models.") and (new_field.startswith("django.db.models") \ + or new_field.startswith("django.contrib.gis")): + if old_field.split(".")[-1] != new_field.split(".")[-1]: + return True + else: + # Remove those fields from the final comparison + old_field = new_field = "" + + # If there's a positional argument in the first, and a 'to' in the second, + # see if they're actually comparable. + if (old_pos and "to" in new_kwd) and ("orm" in new_kwd['to'] and "orm" not in old_pos[0]): + # Do special comparison to fix #153 + try: + if old_pos[0] != new_kwd['to'].split("'")[1].split(".")[1]: + return True + except IndexError: + pass # Fall back to next comparison + # Remove those attrs from the final comparison + old_pos = old_pos[1:] + del new_kwd['to'] + + return old_field != new_field or old_pos != new_pos or old_kwd != new_kwd + + +class ManualChanges(BaseChanges): + """ + Detects changes by reading the command line. + """ + + def __init__(self, migrations, added_models, added_fields, added_indexes): + self.migrations = migrations + self.added_models = added_models + self.added_fields = added_fields + self.added_indexes = added_indexes + + def suggest_name(self): + bits = [] + for model_name in self.added_models: + bits.append('add_model_%s' % model_name) + for field_name in self.added_fields: + bits.append('add_field_%s' % field_name) + for index_name in self.added_indexes: + bits.append('add_index_%s' % index_name) + return '_'.join(bits).replace('.', '_') + + def get_changes(self): + # Get the model defs so we can use them for the yield later + model_defs = freeze_apps([self.migrations.app_label()]) + # Make the model changes + for model_name in self.added_models: + model = models.get_model(self.migrations.app_label(), model_name) + real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)]) + yield ("AddModel", { + "model": model, + "model_def": real_fields, + }) + # And the field changes + for field_desc in self.added_fields: + try: + model_name, field_name = field_desc.split(".") + except (TypeError, ValueError): + raise ValueError("%r is not a valid field description." % field_desc) + model = models.get_model(self.migrations.app_label(), model_name) + real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)]) + yield ("AddField", { + "model": model, + "field": model._meta.get_field_by_name(field_name)[0], + "field_def": real_fields[field_name], + }) + # And the indexes + for field_desc in self.added_indexes: + try: + model_name, field_name = field_desc.split(".") + except (TypeError, ValueError): + print("%r is not a valid field description." % field_desc) + model = models.get_model(self.migrations.app_label(), model_name) + yield ("AddIndex", { + "model": model, + "fields": [model._meta.get_field_by_name(field_name)[0]], + }) + + +class InitialChanges(BaseChanges): + """ + Creates all models; handles --initial. + """ + def suggest_name(self): + return 'initial' + + def __init__(self, migrations): + self.migrations = migrations + + def get_changes(self): + # Get the frozen models for this app + model_defs = freeze_apps([self.migrations.app_label()]) + + for model in models.get_models(models.get_app(self.migrations.app_label())): + + # Don't do anything for unmanaged, abstract or proxy models + if model._meta.abstract or getattr(model._meta, "proxy", False) or not getattr(model._meta, "managed", True): + continue + + real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)]) + + # Firstly, add the main table and fields + yield ("AddModel", { + "model": model, + "model_def": real_fields, + }) + + # Then, add any indexing/uniqueness that's around + if meta: + for attr, operation in (("unique_together", "AddUnique"), ("index_together", "AddIndex")): + together = eval(meta.get(attr, "[]")) + if together: + # If it's only a single tuple, make it into the longer one + if isinstance(together[0], string_types): + together = [together] + # For each combination, make an action for it + for fields in together: + yield (operation, { + "model": model, + "fields": [model._meta.get_field_by_name(x)[0] for x in fields], + }) + + # Finally, see if there's some M2M action + for name, triple in m2m_fields.items(): + field = model._meta.get_field_by_name(name)[0] + # But only if it's not through=foo (#120) + if field.rel.through: + try: + # Django 1.1 and below + through_model = field.rel.through_model + except AttributeError: + # Django 1.2 + through_model = field.rel.through + if (not field.rel.through) or getattr(through_model._meta, "auto_created", False): + yield ("AddM2M", { + "model": model, + "field": field, + }) diff --git a/awx/lib/site-packages/south/creator/freezer.py b/awx/lib/site-packages/south/creator/freezer.py new file mode 100644 index 0000000000..0f98cea0d7 --- /dev/null +++ b/awx/lib/site-packages/south/creator/freezer.py @@ -0,0 +1,192 @@ +""" +Handles freezing of models into FakeORMs. +""" + +from __future__ import print_function + +import sys + +from django.db import models +from django.db.models.base import ModelBase, Model +from django.contrib.contenttypes.generic import GenericRelation + +from south.utils import get_attribute, auto_through +from south import modelsinspector +from south.utils.py3 import string_types + +def freeze_apps(apps): + """ + Takes a list of app labels, and returns a string of their frozen form. + """ + if isinstance(apps, string_types): + apps = [apps] + frozen_models = set() + # For each app, add in all its models + for app in apps: + for model in models.get_models(models.get_app(app)): + # Only add if it's not abstract or proxy + if not model._meta.abstract and not getattr(model._meta, "proxy", False): + frozen_models.add(model) + # Now, add all the dependencies + for model in list(frozen_models): + frozen_models.update(model_dependencies(model)) + # Serialise! + model_defs = {} + model_classes = {} + for model in frozen_models: + model_defs[model_key(model)] = prep_for_freeze(model) + model_classes[model_key(model)] = model + # Check for any custom fields that failed to freeze. + missing_fields = False + for key, fields in model_defs.items(): + for field_name, value in fields.items(): + if value is None: + missing_fields = True + model_class = model_classes[key] + field_class = model_class._meta.get_field_by_name(field_name)[0] + print(" ! Cannot freeze field '%s.%s'" % (key, field_name)) + print(" ! (this field has class %s.%s)" % (field_class.__class__.__module__, field_class.__class__.__name__)) + if missing_fields: + print("") + print(" ! South cannot introspect some fields; this is probably because they are custom") + print(" ! fields. If they worked in 0.6 or below, this is because we have removed the") + print(" ! models parser (it often broke things).") + print(" ! To fix this, read http://south.aeracode.org/wiki/MyFieldsDontWork") + sys.exit(1) + + return model_defs + +def freeze_apps_to_string(apps): + return pprint_frozen_models(freeze_apps(apps)) + +### + +def model_key(model): + "For a given model, return 'appname.modelname'." + return "%s.%s" % (model._meta.app_label, model._meta.object_name.lower()) + +def prep_for_freeze(model): + """ + Takes a model and returns the ready-to-serialise dict (all you need + to do is just pretty-print it). + """ + fields = modelsinspector.get_model_fields(model, m2m=True) + # Remove useless attributes (like 'choices') + for name, field in fields.items(): + fields[name] = remove_useless_attributes(field) + # See if there's a Meta + fields['Meta'] = remove_useless_meta(modelsinspector.get_model_meta(model)) + # Add in our own special items to track the object name and managed + fields['Meta']['object_name'] = model._meta.object_name # Special: not eval'able. + if not getattr(model._meta, "managed", True): + fields['Meta']['managed'] = repr(model._meta.managed) + return fields + +### Dependency resolvers + +def model_dependencies(model, checked_models=None): + """ + Returns a set of models this one depends on to be defined; things like + OneToOneFields as ID, ForeignKeys everywhere, etc. + """ + depends = set() + checked_models = checked_models or set() + # Get deps for each field + for field in model._meta.fields + model._meta.many_to_many: + depends.update(field_dependencies(field, checked_models)) + # Add in any non-abstract bases + for base in model.__bases__: + if issubclass(base, models.Model) and hasattr(base, '_meta') and not base._meta.abstract: + depends.add(base) + # Now recurse + new_to_check = depends - checked_models + while new_to_check: + checked_model = new_to_check.pop() + if checked_model == model or checked_model in checked_models: + continue + checked_models.add(checked_model) + deps = model_dependencies(checked_model, checked_models) + # Loop through dependencies... + for dep in deps: + # If the new dep is not already checked, add to the queue + if (dep not in depends) and (dep not in new_to_check) and (dep not in checked_models): + new_to_check.add(dep) + depends.add(dep) + return depends + +def field_dependencies(field, checked_models=None): + checked_models = checked_models or set() + depends = set() + arg_defs, kwarg_defs = modelsinspector.matching_details(field) + for attrname, options in arg_defs + list(kwarg_defs.values()): + if options.get("ignore_if_auto_through", False) and auto_through(field): + continue + if options.get("is_value", False): + value = attrname + elif attrname == 'rel.through' and hasattr(getattr(field, 'rel', None), 'through_model'): + # Hack for django 1.1 and below, where the through model is stored + # in rel.through_model while rel.through stores only the model name. + value = field.rel.through_model + else: + try: + value = get_attribute(field, attrname) + except AttributeError: + if options.get("ignore_missing", False): + continue + raise + if isinstance(value, Model): + value = value.__class__ + if not isinstance(value, ModelBase): + continue + if getattr(value._meta, "proxy", False): + value = value._meta.proxy_for_model + if value in checked_models: + continue + checked_models.add(value) + depends.add(value) + depends.update(model_dependencies(value, checked_models)) + + return depends + +### Prettyprinters + +def pprint_frozen_models(models): + return "{\n %s\n }" % ",\n ".join([ + "%r: %s" % (name, pprint_fields(fields)) + for name, fields in sorted(models.items()) + ]) + +def pprint_fields(fields): + return "{\n %s\n }" % ",\n ".join([ + "%r: %r" % (name, defn) + for name, defn in sorted(fields.items()) + ]) + +### Output sanitisers + +USELESS_KEYWORDS = ["choices", "help_text", "verbose_name"] +USELESS_DB_KEYWORDS = ["related_name", "default", "blank"] # Important for ORM, not for DB. +INDEX_KEYWORDS = ["db_index"] + +def remove_useless_attributes(field, db=False, indexes=False): + "Removes useless (for database) attributes from the field's defn." + # Work out what to remove, and remove it. + keywords = USELESS_KEYWORDS[:] + if db: + keywords += USELESS_DB_KEYWORDS[:] + if indexes: + keywords += INDEX_KEYWORDS[:] + if field: + for name in keywords: + if name in field[2]: + del field[2][name] + return field + +USELESS_META = ["verbose_name", "verbose_name_plural"] +def remove_useless_meta(meta): + "Removes useless (for database) attributes from the table's meta." + if meta: + for name in USELESS_META: + if name in meta: + del meta[name] + return meta diff --git a/awx/lib/site-packages/south/db/__init__.py b/awx/lib/site-packages/south/db/__init__.py new file mode 100644 index 0000000000..9927c27f0c --- /dev/null +++ b/awx/lib/site-packages/south/db/__init__.py @@ -0,0 +1,82 @@ + +# Establish the common DatabaseOperations instance, which we call 'db'. +# Much thanks to cmkmrr for a lot of the code base here + +from django.conf import settings +import sys + +# A few aliases, because there's FQMNs now +engine_modules = { + 'django.db.backends.postgresql_psycopg2': 'postgresql_psycopg2', + 'django.db.backends.sqlite3': 'sqlite3', + 'django.db.backends.mysql': 'mysql', + 'mysql_oursql.standard': 'mysql', + 'django.db.backends.oracle': 'oracle', + 'sql_server.pyodbc': 'sql_server.pyodbc', #django-pyodbc + 'sqlserver_ado': 'sql_server.pyodbc', #django-mssql + 'firebird': 'firebird', #django-firebird + 'django.contrib.gis.db.backends.postgis': 'postgresql_psycopg2', + 'django.contrib.gis.db.backends.spatialite': 'sqlite3', + 'django.contrib.gis.db.backends.mysql': 'mysql', + 'django.contrib.gis.db.backends.oracle': 'oracle', + 'doj.backends.zxjdbc.postgresql': 'postgresql_psycopg2', #django-jython + 'doj.backends.zxjdbc.mysql': 'mysql', #django-jython + 'doj.backends.zxjdbc.oracle': 'oracle', #django-jython +} + +# First, work out if we're multi-db or not, and which databases we have +try: + from django.db import DEFAULT_DB_ALIAS +except ImportError: + #### 1.1 or below #### + # We'll 'fake' multi-db; set the default alias + DEFAULT_DB_ALIAS = 'default' + # SOUTH_DATABASE_ADAPTER is an optional override if you have a different module + engine = getattr(settings, "SOUTH_DATABASE_ADAPTER", "south.db.%s" % settings.DATABASE_ENGINE) + # And then, we have one database with one engine + db_engines = {DEFAULT_DB_ALIAS: engine} +else: + #### 1.2 or above #### + # Loop over the defined databases, gathering up their engines + db_engines = dict([ + # Note we check to see if contrib.gis has overridden us. + (alias, "south.db.%s" % engine_modules[db_settings['ENGINE']]) + for alias, db_settings in settings.DATABASES.items() + if db_settings['ENGINE'] in engine_modules + ]) + # Update with any overrides + db_engines.update(getattr(settings, "SOUTH_DATABASE_ADAPTERS", {})) + # Check there's no None engines, or... + for alias, engine in db_engines.items(): + if engine is None: + # They've used a backend we don't support + sys.stderr.write( + ( + "There is no South database module for your database backend '%s'. " + \ + "Please either choose a supported database, check for " + \ + "SOUTH_DATABASE_ADAPTER[S] settings, " + \ + "or remove South from INSTALLED_APPS.\n" + ) % (settings.DATABASES[alias]['ENGINE'],) + ) + sys.exit(1) + +# Now, turn that into a dict of <alias: south db module> +dbs = {} +try: + for alias, module_name in db_engines.items(): + module = __import__(module_name, {}, {}, ['']) + dbs[alias] = module.DatabaseOperations(alias) +except ImportError: + # This error should only be triggered on 1.1 and below. + sys.stderr.write( + ( + "There is no South database module '%s' for your database. " + \ + "Please either choose a supported database, check for " + \ + "SOUTH_DATABASE_ADAPTER[S] settings, " + \ + "or remove South from INSTALLED_APPS.\n" + ) % (module_name,) + ) + sys.exit(1) + +# Finally, to make old migrations work, keep 'db' around as the default database +db = dbs[DEFAULT_DB_ALIAS] diff --git a/awx/lib/site-packages/south/db/firebird.py b/awx/lib/site-packages/south/db/firebird.py new file mode 100644 index 0000000000..c55a82517f --- /dev/null +++ b/awx/lib/site-packages/south/db/firebird.py @@ -0,0 +1,353 @@ +# firebird + +from __future__ import print_function + +import datetime + +from django.db import connection, models +from django.core.management.color import no_style +from django.db.utils import DatabaseError + +from south.db import generic +from south.utils.py3 import string_types + +class DatabaseOperations(generic.DatabaseOperations): + backend_name = 'firebird' + alter_string_set_type = 'ALTER %(column)s TYPE %(type)s' + alter_string_set_default = 'ALTER %(column)s SET DEFAULT %(default)s;' + alter_string_drop_null = '' + add_column_string = 'ALTER TABLE %s ADD %s;' + delete_column_string = 'ALTER TABLE %s DROP %s;' + rename_table_sql = '' + + # Features + allows_combined_alters = False + has_booleans = False + + def _fill_constraint_cache(self, db_name, table_name): + self._constraint_cache.setdefault(db_name, {}) + self._constraint_cache[db_name][table_name] = {} + + rows = self.execute(""" + SELECT + rc.RDB$CONSTRAINT_NAME, + rc.RDB$CONSTRAINT_TYPE, + cc.RDB$TRIGGER_NAME + FROM rdb$relation_constraints rc + JOIN rdb$check_constraints cc + ON rc.rdb$constraint_name = cc.rdb$constraint_name + WHERE rc.rdb$constraint_type = 'NOT NULL' + AND rc.rdb$relation_name = '%s' + """ % table_name) + + for constraint, kind, column in rows: + self._constraint_cache[db_name][table_name].setdefault(column, set()) + self._constraint_cache[db_name][table_name][column].add((kind, constraint)) + return + + def _alter_column_set_null(self, table_name, column_name, is_null): + sql = """ + UPDATE RDB$RELATION_FIELDS SET RDB$NULL_FLAG = %(null_flag)s + WHERE RDB$FIELD_NAME = '%(column)s' + AND RDB$RELATION_NAME = '%(table_name)s' + """ + null_flag = 'NULL' if is_null else '1' + return sql % { + 'null_flag': null_flag, + 'column': column_name.upper(), + 'table_name': table_name.upper() + } + + def _column_has_default(self, params): + sql = """ + SELECT a.RDB$DEFAULT_VALUE + FROM RDB$RELATION_FIELDS a + WHERE a.RDB$FIELD_NAME = '%(column)s' + AND a.RDB$RELATION_NAME = '%(table_name)s' + """ + value = self.execute(sql % params) + return True if value else False + + + def _alter_set_defaults(self, field, name, params, sqls): + "Subcommand of alter_column that sets default values (overrideable)" + # Next, set any default + if not field.null and field.has_default(): + default = field.get_default() + sqls.append(('ALTER COLUMN %s SET DEFAULT %%s ' % (self.quote_name(name),), [default])) + elif self._column_has_default(params): + sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (self.quote_name(name),), [])) + + + @generic.invalidate_table_constraints + def create_table(self, table_name, fields): + qn = self.quote_name(table_name) + columns = [] + autoinc_sql = '' + + for field_name, field in fields: + col = self.column_sql(table_name, field_name, field) + if not col: + continue + + columns.append(col) + if isinstance(field, models.AutoField): + field_name = field.db_column or field.column + autoinc_sql = connection.ops.autoinc_sql(table_name, field_name) + + sql = 'CREATE TABLE %s (%s);' % (qn, ', '.join([col for col in columns])) + self.execute(sql) + if autoinc_sql: + self.execute(autoinc_sql[0]) + self.execute(autoinc_sql[1]) + + def rename_table(self, old_table_name, table_name): + """ + Renames table is not supported by firebird. + This involve recreate all related objects (store procedure, views, triggers, etc) + """ + pass + + @generic.invalidate_table_constraints + def delete_table(self, table_name, cascade=False): + """ + Deletes the table 'table_name'. + Firebird will also delete any triggers associated with the table. + """ + super(DatabaseOperations, self).delete_table(table_name, cascade=False) + + # Also, drop sequence if exists + sql = connection.ops.drop_sequence_sql(table_name) + if sql: + try: + self.execute(sql) + except: + pass + + def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False): + """ + Creates the SQL snippet for a column. Used by add_column and add_table. + """ + + # If the field hasn't already been told its attribute name, do so. + if not field_prepared: + field.set_attributes_from_name(field_name) + + # hook for the field to do any resolution prior to it's attributes being queried + if hasattr(field, 'south_init'): + field.south_init() + + # Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL) + field = self._field_sanity(field) + + try: + sql = field.db_type(connection=self._get_connection()) + except TypeError: + sql = field.db_type() + + if sql: + # Some callers, like the sqlite stuff, just want the extended type. + if with_name: + field_output = [self.quote_name(field.column), sql] + else: + field_output = [sql] + + if field.primary_key: + field_output.append('NOT NULL PRIMARY KEY') + elif field.unique: + # Just use UNIQUE (no indexes any more, we have delete_unique) + field_output.append('UNIQUE') + + sql = ' '.join(field_output) + sqlparams = () + + # if the field is "NOT NULL" and a default value is provided, create the column with it + # this allows the addition of a NOT NULL field to a table with existing rows + if not getattr(field, '_suppress_default', False): + if field.has_default(): + default = field.get_default() + # If the default is actually None, don't add a default term + if default is not None: + # If the default is a callable, then call it! + if callable(default): + default = default() + # Now do some very cheap quoting. TODO: Redesign return values to avoid this. + if isinstance(default, string_types): + default = "'%s'" % default.replace("'", "''") + elif isinstance(default, (datetime.date, datetime.time, datetime.datetime)): + default = "'%s'" % default + elif isinstance(default, bool): + default = int(default) + # Escape any % signs in the output (bug #317) + if isinstance(default, string_types): + default = default.replace("%", "%%") + # Add it in + sql += " DEFAULT %s" + sqlparams = (default) + elif (not field.null and field.blank) or (field.get_default() == ''): + if field.empty_strings_allowed and self._get_connection().features.interprets_empty_strings_as_nulls: + sql += " DEFAULT ''" + # Error here would be nice, but doesn't seem to play fair. + #else: + # raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.") + + # Firebird need set not null after of default value keyword + if not field.primary_key and not field.null: + sql += ' NOT NULL' + + if field.rel and self.supports_foreign_keys: + self.add_deferred_sql( + self.foreign_key_sql( + table_name, + field.column, + field.rel.to._meta.db_table, + field.rel.to._meta.get_field(field.rel.field_name).column + ) + ) + + # Things like the contrib.gis module fields have this in 1.1 and below + if hasattr(field, 'post_create_sql'): + for stmt in field.post_create_sql(no_style(), table_name): + self.add_deferred_sql(stmt) + + # In 1.2 and above, you have to ask the DatabaseCreation stuff for it. + # This also creates normal indexes in 1.1. + if hasattr(self._get_connection().creation, "sql_indexes_for_field"): + # Make a fake model to pass in, with only db_table + model = self.mock_model("FakeModelForGISCreation", table_name) + for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()): + self.add_deferred_sql(stmt) + + if sql: + return sql % sqlparams + else: + return None + + + def _drop_constraints(self, table_name, name, field): + if self.has_check_constraints: + check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK") + for constraint in check_constraints: + self.execute(self.delete_check_sql % { + 'table': self.quote_name(table_name), + 'constraint': self.quote_name(constraint), + }) + + # Drop or add UNIQUE constraint + unique_constraint = list(self._constraints_affecting_columns(table_name, [name], "UNIQUE")) + if field.unique and not unique_constraint: + self.create_unique(table_name, [name]) + elif not field.unique and unique_constraint: + self.delete_unique(table_name, [name]) + + # Drop all foreign key constraints + try: + self.delete_foreign_key(table_name, name) + except ValueError: + # There weren't any + pass + + + @generic.invalidate_table_constraints + def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False): + """ + Alters the given column name so it will match the given field. + Note that conversion between the two by the database must be possible. + Will not automatically add _id by default; to have this behavour, pass + explicit_name=False. + + @param table_name: The name of the table to add the column to + @param name: The name of the column to alter + @param field: The new field definition to use + """ + + if self.dry_run: + if self.debug: + print(' - no dry run output for alter_column() due to dynamic DDL, sorry') + return + + + # hook for the field to do any resolution prior to it's attributes being queried + if hasattr(field, 'south_init'): + field.south_init() + + # Add _id or whatever if we need to + field.set_attributes_from_name(name) + if not explicit_name: + name = field.column + else: + field.column = name + + if not ignore_constraints: + # Drop all check constraints. Note that constraints will be added back + # with self.alter_string_set_type and self.alter_string_drop_null. + self._drop_constraints(table_name, name, field) + + # First, change the type + params = { + "column": self.quote_name(name), + "type": self._db_type_for_alter_column(field), + "table_name": table_name + } + + # SQLs is a list of (SQL, values) pairs. + sqls = [] + sqls_extra = [] + + # Only alter the column if it has a type (Geometry ones sometimes don't) + if params["type"] is not None: + sqls.append((self.alter_string_set_type % params, [])) + + # Add any field- and backend- specific modifications + self._alter_add_column_mods(field, name, params, sqls) + + # Next, nullity: modified, firebird doesn't support DROP NOT NULL + sqls_extra.append(self._alter_column_set_null(table_name, name, field.null)) + + # Next, set any default + self._alter_set_defaults(field, name, params, sqls) + + # Finally, actually change the column + if self.allows_combined_alters: + sqls, values = list(zip(*sqls)) + self.execute( + "ALTER TABLE %s %s;" % (self.quote_name(table_name), ", ".join(sqls)), + generic.flatten(values), + ) + else: + # Databases like e.g. MySQL don't like more than one alter at once. + for sql, values in sqls: + try: + self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), sql), values) + except DatabaseError as e: + print(e) + + + # Execute extra sql, which don't need ALTER TABLE statement + for sql in sqls_extra: + self.execute(sql) + + if not ignore_constraints: + # Add back FK constraints if needed + if field.rel and self.supports_foreign_keys: + self.execute( + self.foreign_key_sql( + table_name, + field.column, + field.rel.to._meta.db_table, + field.rel.to._meta.get_field(field.rel.field_name).column + ) + ) + + @generic.copy_column_constraints + @generic.delete_column_constraints + def rename_column(self, table_name, old, new): + if old == new: + # Short-circuit out + return [] + + self.execute('ALTER TABLE %s ALTER %s TO %s;' % ( + self.quote_name(table_name), + self.quote_name(old), + self.quote_name(new), + )) diff --git a/awx/lib/site-packages/south/db/generic.py b/awx/lib/site-packages/south/db/generic.py new file mode 100644 index 0000000000..1a26d955b5 --- /dev/null +++ b/awx/lib/site-packages/south/db/generic.py @@ -0,0 +1,1160 @@ +from __future__ import print_function + +import re +import sys + +from django.core.management.color import no_style +from django.db import transaction, models +from django.db.utils import DatabaseError +from django.db.backends.util import truncate_name +from django.db.backends.creation import BaseDatabaseCreation +from django.db.models.fields import NOT_PROVIDED +from django.dispatch import dispatcher +from django.conf import settings +from django.utils.datastructures import SortedDict +try: + from django.utils.functional import cached_property +except ImportError: + class cached_property(object): + """ + Decorator that creates converts a method with a single + self argument into a property cached on the instance. + """ + def __init__(self, func): + self.func = func + + def __get__(self, instance, type): + res = instance.__dict__[self.func.__name__] = self.func(instance) + return res + +from south.logger import get_logger +from south.utils.py3 import string_types, text_type + + +def alias(attrname): + """ + Returns a function which calls 'attrname' - for function aliasing. + We can't just use foo = bar, as this breaks subclassing. + """ + def func(self, *args, **kwds): + return getattr(self, attrname)(*args, **kwds) + return func + + +def invalidate_table_constraints(func): + def _cache_clear(self, table, *args, **opts): + self._set_cache(table, value=INVALID) + return func(self, table, *args, **opts) + return _cache_clear + + +def delete_column_constraints(func): + def _column_rm(self, table, column, *args, **opts): + self._set_cache(table, column, value=[]) + return func(self, table, column, *args, **opts) + return _column_rm + + +def copy_column_constraints(func): + def _column_cp(self, table, column_old, column_new, *args, **opts): + db_name = self._get_setting('NAME') + self._set_cache(table, column_new, value=self.lookup_constraint(db_name, table, column_old)) + return func(self, table, column_old, column_new, *args, **opts) + return _column_cp + + +class INVALID(Exception): + def __repr__(self): + return 'INVALID' + + +class DryRunError(ValueError): + pass + + +class DatabaseOperations(object): + """ + Generic SQL implementation of the DatabaseOperations. + Some of this code comes from Django Evolution. + """ + + alter_string_set_type = 'ALTER COLUMN %(column)s TYPE %(type)s' + alter_string_set_null = 'ALTER COLUMN %(column)s DROP NOT NULL' + alter_string_drop_null = 'ALTER COLUMN %(column)s SET NOT NULL' + delete_check_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s' + add_column_string = 'ALTER TABLE %s ADD COLUMN %s;' + delete_unique_sql = "ALTER TABLE %s DROP CONSTRAINT %s" + delete_foreign_key_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s' + create_table_sql = 'CREATE TABLE %(table)s (%(columns)s)' + max_index_name_length = 63 + drop_index_string = 'DROP INDEX %(index_name)s' + delete_column_string = 'ALTER TABLE %s DROP COLUMN %s CASCADE;' + create_primary_key_string = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)" + delete_primary_key_sql = "ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s" + add_check_constraint_fragment = "ADD CONSTRAINT %(constraint)s CHECK (%(check)s)" + rename_table_sql = "ALTER TABLE %s RENAME TO %s;" + backend_name = None + default_schema_name = "public" + + # Features + allows_combined_alters = True + supports_foreign_keys = True + has_check_constraints = True + has_booleans = True + raises_default_errors = True + + @cached_property + def has_ddl_transactions(self): + """ + Tests the database using feature detection to see if it has + transactional DDL support. + """ + self._possibly_initialise() + connection = self._get_connection() + if hasattr(connection.features, "confirm") and not connection.features._confirmed: + connection.features.confirm() + # Django 1.3's MySQLdb backend doesn't raise DatabaseError + exceptions = (DatabaseError, ) + try: + from MySQLdb import OperationalError + exceptions += (OperationalError, ) + except ImportError: + pass + # Now do the test + if getattr(connection.features, 'supports_transactions', True): + cursor = connection.cursor() + self.start_transaction() + cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)') + self.rollback_transaction() + try: + try: + cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)') + except exceptions: + return False + else: + return True + finally: + cursor.execute('DROP TABLE DDL_TRANSACTION_TEST') + else: + return False + + def __init__(self, db_alias): + self.debug = False + self.deferred_sql = [] + self.dry_run = False + self.pending_transactions = 0 + self.pending_create_signals = [] + self.db_alias = db_alias + self._constraint_cache = {} + self._initialised = False + + def lookup_constraint(self, db_name, table_name, column_name=None): + """ return a set() of constraints for db_name.table_name.column_name """ + def _lookup(): + table = self._constraint_cache[db_name][table_name] + if table is INVALID: + raise INVALID + elif column_name is None: + return list(table.items()) + else: + return table[column_name] + + try: + ret = _lookup() + return ret + except INVALID: + del self._constraint_cache[db_name][table_name] + self._fill_constraint_cache(db_name, table_name) + except KeyError: + if self._is_valid_cache(db_name, table_name): + return [] + self._fill_constraint_cache(db_name, table_name) + + return self.lookup_constraint(db_name, table_name, column_name) + + def _set_cache(self, table_name, column_name=None, value=INVALID): + db_name = self._get_setting('NAME') + try: + if column_name is not None: + self._constraint_cache[db_name][table_name][column_name] = value + else: + self._constraint_cache[db_name][table_name] = value + except (LookupError, TypeError): + pass + + def _is_valid_cache(self, db_name, table_name): + # we cache per-table so if the table is there it is valid + try: + return self._constraint_cache[db_name][table_name] is not INVALID + except KeyError: + return False + + def _is_multidb(self): + try: + from django.db import connections + connections # Prevents "unused import" warning + except ImportError: + return False + else: + return True + + def _get_connection(self): + """ + Returns a django connection for a given DB Alias + """ + if self._is_multidb(): + from django.db import connections + return connections[self.db_alias] + else: + from django.db import connection + return connection + + def _get_setting(self, setting_name): + """ + Allows code to get a setting (like, for example, STORAGE_ENGINE) + """ + setting_name = setting_name.upper() + connection = self._get_connection() + if self._is_multidb(): + # Django 1.2 and above + return connection.settings_dict[setting_name] + else: + # Django 1.1 and below + return getattr(settings, "DATABASE_%s" % setting_name) + + def _has_setting(self, setting_name): + """ + Existence-checking version of _get_setting. + """ + try: + self._get_setting(setting_name) + except (KeyError, AttributeError): + return False + else: + return True + + def _get_schema_name(self): + try: + return self._get_setting('schema') + except (KeyError, AttributeError): + return self.default_schema_name + + def _possibly_initialise(self): + if not self._initialised: + self.connection_init() + self._initialised = True + + def connection_init(self): + """ + Run before any SQL to let database-specific config be sent as a command, + e.g. which storage engine (MySQL) or transaction serialisability level. + """ + pass + + def quote_name(self, name): + """ + Uses the database backend to quote the given table/column name. + """ + return self._get_connection().ops.quote_name(name) + + def _print_sql_error(self, e, sql, params=[]): + print('FATAL ERROR - The following SQL query failed: %s' % sql, file=sys.stderr) + print('The error was: %s' % e, file=sys.stderr) + + def execute(self, sql, params=[], print_all_errors=True): + """ + Executes the given SQL statement, with optional parameters. + If the instance's debug attribute is True, prints out what it executes. + """ + + self._possibly_initialise() + + cursor = self._get_connection().cursor() + if self.debug: + print(" = %s" % sql, params) + + if self.dry_run: + return [] + + get_logger().debug(text_type('execute "%s" with params "%s"' % (sql, params))) + + try: + cursor.execute(sql, params) + except DatabaseError as e: + if print_all_errors: + self._print_sql_error(e, sql, params) + raise + + try: + return cursor.fetchall() + except: + return [] + + def execute_many(self, sql, regex=r"(?mx) ([^';]* (?:'[^']*'[^';]*)*)", comment_regex=r"(?mx) (?:^\s*$)|(?:--.*$)"): + """ + Takes a SQL file and executes it as many separate statements. + (Some backends, such as Postgres, don't work otherwise.) + """ + # Be warned: This function is full of dark magic. Make sure you really + # know regexes before trying to edit it. + # First, strip comments + sql = "\n".join([x.strip().replace("%", "%%") for x in re.split(comment_regex, sql) if x.strip()]) + # Now execute each statement + for st in re.split(regex, sql)[1:][::2]: + self.execute(st) + + def add_deferred_sql(self, sql): + """ + Add a SQL statement to the deferred list, that won't be executed until + this instance's execute_deferred_sql method is run. + """ + self.deferred_sql.append(sql) + + def execute_deferred_sql(self): + """ + Executes all deferred SQL, resetting the deferred_sql list + """ + for sql in self.deferred_sql: + self.execute(sql) + + self.deferred_sql = [] + + def clear_deferred_sql(self): + """ + Resets the deferred_sql list to empty. + """ + self.deferred_sql = [] + + def clear_run_data(self, pending_creates = None): + """ + Resets variables to how they should be before a run. Used for dry runs. + If you want, pass in an old panding_creates to reset to. + """ + self.clear_deferred_sql() + self.pending_create_signals = pending_creates or [] + + def get_pending_creates(self): + return self.pending_create_signals + + @invalidate_table_constraints + def create_table(self, table_name, fields): + """ + Creates the table 'table_name'. 'fields' is a tuple of fields, + each repsented by a 2-part tuple of field name and a + django.db.models.fields.Field object + """ + + if len(table_name) > 63: + print(" ! WARNING: You have a table name longer than 63 characters; this will not fully work on PostgreSQL or MySQL.") + + # avoid default values in CREATE TABLE statements (#925) + for field_name, field in fields: + field._suppress_default = True + + columns = [ + self.column_sql(table_name, field_name, field) + for field_name, field in fields + ] + + self.execute(self.create_table_sql % { + "table": self.quote_name(table_name), + "columns": ', '.join([col for col in columns if col]), + }) + + add_table = alias('create_table') # Alias for consistency's sake + + @invalidate_table_constraints + def rename_table(self, old_table_name, table_name): + """ + Renames the table 'old_table_name' to 'table_name'. + """ + if old_table_name == table_name: + # Short-circuit out. + return + params = (self.quote_name(old_table_name), self.quote_name(table_name)) + self.execute(self.rename_table_sql % params) + # Invalidate the not-yet-indexed table + self._set_cache(table_name, value=INVALID) + + @invalidate_table_constraints + def delete_table(self, table_name, cascade=True): + """ + Deletes the table 'table_name'. + """ + params = (self.quote_name(table_name), ) + if cascade: + self.execute('DROP TABLE %s CASCADE;' % params) + else: + self.execute('DROP TABLE %s;' % params) + + drop_table = alias('delete_table') + + @invalidate_table_constraints + def clear_table(self, table_name): + """ + Deletes all rows from 'table_name'. + """ + params = (self.quote_name(table_name), ) + self.execute('DELETE FROM %s;' % params) + + @invalidate_table_constraints + def add_column(self, table_name, name, field, keep_default=True): + """ + Adds the column 'name' to the table 'table_name'. + Uses the 'field' paramater, a django.db.models.fields.Field instance, + to generate the necessary sql + + @param table_name: The name of the table to add the column to + @param name: The name of the column to add + @param field: The field to use + """ + sql = self.column_sql(table_name, name, field) + if sql: + params = ( + self.quote_name(table_name), + sql, + ) + sql = self.add_column_string % params + self.execute(sql) + + # Now, drop the default if we need to + if field.default is not None: + field.default = NOT_PROVIDED + self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True) + + def _db_type_for_alter_column(self, field): + """ + Returns a field's type suitable for ALTER COLUMN. + By default it just returns field.db_type(). + To be overriden by backend specific subclasses + @param field: The field to generate type for + """ + try: + return field.db_type(connection=self._get_connection()) + except TypeError: + return field.db_type() + + def _alter_add_column_mods(self, field, name, params, sqls): + """ + Subcommand of alter_column that modifies column definitions beyond + the type string -- e.g. adding constraints where they cannot be specified + as part of the type (overrideable) + """ + pass + + def _alter_set_defaults(self, field, name, params, sqls): + "Subcommand of alter_column that sets default values (overrideable)" + # Next, set any default + if not field.null and field.has_default(): + default = field.get_db_prep_save(field.get_default(), connection=self._get_connection()) + sqls.append(('ALTER COLUMN %s SET DEFAULT %%s ' % (self.quote_name(name),), [default])) + else: + sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (self.quote_name(name),), [])) + + def _update_nulls_to_default(self, params, field): + "Subcommand of alter_column that updates nulls to default value (overrideable)" + default = field.get_db_prep_save(field.get_default(), connection=self._get_connection()) + self.execute('UPDATE %(table_name)s SET %(column)s=%%s WHERE %(column)s IS NULL' % params, [default]) + + @invalidate_table_constraints + def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False): + """ + Alters the given column name so it will match the given field. + Note that conversion between the two by the database must be possible. + Will not automatically add _id by default; to have this behavour, pass + explicit_name=False. + + @param table_name: The name of the table to add the column to + @param name: The name of the column to alter + @param field: The new field definition to use + """ + + if self.dry_run: + if self.debug: + print(' - no dry run output for alter_column() due to dynamic DDL, sorry') + return + + # hook for the field to do any resolution prior to it's attributes being queried + if hasattr(field, 'south_init'): + field.south_init() + + # Add _id or whatever if we need to + field.set_attributes_from_name(name) + if not explicit_name: + name = field.column + else: + field.column = name + + if not ignore_constraints: + # Drop all check constraints. Note that constraints will be added back + # with self.alter_string_set_type and self.alter_string_drop_null. + if self.has_check_constraints: + check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK") + for constraint in check_constraints: + self.execute(self.delete_check_sql % { + 'table': self.quote_name(table_name), + 'constraint': self.quote_name(constraint), + }) + + # Drop all foreign key constraints + try: + self.delete_foreign_key(table_name, name) + except ValueError: + # There weren't any + pass + + # First, change the type + params = { + "column": self.quote_name(name), + "type": self._db_type_for_alter_column(field), + "table_name": self.quote_name(table_name) + } + + # SQLs is a list of (SQL, values) pairs. + sqls = [] + + # Only alter the column if it has a type (Geometry ones sometimes don't) + if params["type"] is not None: + sqls.append((self.alter_string_set_type % params, [])) + + # Add any field- and backend- specific modifications + self._alter_add_column_mods(field, name, params, sqls) + # Next, nullity + if field.null or field.has_default(): + sqls.append((self.alter_string_set_null % params, [])) + else: + sqls.append((self.alter_string_drop_null % params, [])) + + # Do defaults + self._alter_set_defaults(field, name, params, sqls) + + # Actually change the column (step 1 -- Nullity may need to be fixed) + if self.allows_combined_alters: + sqls, values = zip(*sqls) + self.execute( + "ALTER TABLE %s %s;" % (self.quote_name(table_name), ", ".join(sqls)), + flatten(values), + ) + else: + # Databases like e.g. MySQL don't like more than one alter at once. + for sql, values in sqls: + self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), sql), values) + + if not field.null and field.has_default(): + # Final fixes + self._update_nulls_to_default(params, field) + self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), self.alter_string_drop_null % params), []) + + if not ignore_constraints: + # Add back FK constraints if needed + if field.rel and self.supports_foreign_keys: + self.execute( + self.foreign_key_sql( + table_name, + field.column, + field.rel.to._meta.db_table, + field.rel.to._meta.get_field(field.rel.field_name).column + ) + ) + + def _fill_constraint_cache(self, db_name, table_name): + + schema = self._get_schema_name() + ifsc_tables = ["constraint_column_usage", "key_column_usage"] + + self._constraint_cache.setdefault(db_name, {}) + self._constraint_cache[db_name][table_name] = {} + + for ifsc_table in ifsc_tables: + rows = self.execute(""" + SELECT kc.constraint_name, kc.column_name, c.constraint_type + FROM information_schema.%s AS kc + JOIN information_schema.table_constraints AS c ON + kc.table_schema = c.table_schema AND + kc.table_name = c.table_name AND + kc.constraint_name = c.constraint_name + WHERE + kc.table_schema = %%s AND + kc.table_name = %%s + """ % ifsc_table, [schema, table_name]) + for constraint, column, kind in rows: + self._constraint_cache[db_name][table_name].setdefault(column, set()) + self._constraint_cache[db_name][table_name][column].add((kind, constraint)) + return + + def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"): + """ + Gets the names of the constraints affecting the given columns. + If columns is None, returns all constraints of the type on the table. + """ + if self.dry_run: + raise DryRunError("Cannot get constraints for columns.") + + if columns is not None: + columns = set(map(lambda s: s.lower(), columns)) + + db_name = self._get_setting('NAME') + + cnames = {} + for col, constraints in self.lookup_constraint(db_name, table_name): + for kind, cname in constraints: + if kind == type: + cnames.setdefault(cname, set()) + cnames[cname].add(col.lower()) + + for cname, cols in cnames.items(): + if cols == columns or columns is None: + yield cname + + @invalidate_table_constraints + def create_unique(self, table_name, columns): + """ + Creates a UNIQUE constraint on the columns on the given table. + """ + + if not isinstance(columns, (list, tuple)): + columns = [columns] + + name = self.create_index_name(table_name, columns, suffix="_uniq") + + cols = ", ".join(map(self.quote_name, columns)) + self.execute("ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)" % ( + self.quote_name(table_name), + self.quote_name(name), + cols, + )) + return name + + @invalidate_table_constraints + def delete_unique(self, table_name, columns): + """ + Deletes a UNIQUE constraint on precisely the columns on the given table. + """ + + if not isinstance(columns, (list, tuple)): + columns = [columns] + + # Dry runs mean we can't do anything. + if self.dry_run: + if self.debug: + print(' - no dry run output for delete_unique_column() due to dynamic DDL, sorry') + return + + constraints = list(self._constraints_affecting_columns(table_name, columns)) + if not constraints: + raise ValueError("Cannot find a UNIQUE constraint on table %s, columns %r" % (table_name, columns)) + for constraint in constraints: + self.execute(self.delete_unique_sql % ( + self.quote_name(table_name), + self.quote_name(constraint), + )) + + def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False): + """ + Creates the SQL snippet for a column. Used by add_column and add_table. + """ + + # If the field hasn't already been told its attribute name, do so. + if not field_prepared: + field.set_attributes_from_name(field_name) + + # hook for the field to do any resolution prior to it's attributes being queried + if hasattr(field, 'south_init'): + field.south_init() + + # Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL) + field = self._field_sanity(field) + + try: + sql = field.db_type(connection=self._get_connection()) + except TypeError: + sql = field.db_type() + + if sql: + + # Some callers, like the sqlite stuff, just want the extended type. + if with_name: + field_output = [self.quote_name(field.column), sql] + else: + field_output = [sql] + + field_output.append('%sNULL' % (not field.null and 'NOT ' or '')) + if field.primary_key: + field_output.append('PRIMARY KEY') + elif field.unique: + # Just use UNIQUE (no indexes any more, we have delete_unique) + field_output.append('UNIQUE') + + tablespace = field.db_tablespace or tablespace + if tablespace and getattr(self._get_connection().features, "supports_tablespaces", False) and field.unique: + # We must specify the index tablespace inline, because we + # won't be generating a CREATE INDEX statement for this field. + field_output.append(self._get_connection().ops.tablespace_sql(tablespace, inline=True)) + + sql = ' '.join(field_output) + sqlparams = () + # if the field is "NOT NULL" and a default value is provided, create the column with it + # this allows the addition of a NOT NULL field to a table with existing rows + if not getattr(field, '_suppress_default', False): + if field.has_default(): + default = field.get_default() + # If the default is actually None, don't add a default term + if default is not None: + # If the default is a callable, then call it! + if callable(default): + default = default() + + default = field.get_db_prep_save(default, connection=self._get_connection()) + default = self._default_value_workaround(default) + # Now do some very cheap quoting. TODO: Redesign return values to avoid this. + if isinstance(default, string_types): + default = "'%s'" % default.replace("'", "''") + # Escape any % signs in the output (bug #317) + if isinstance(default, string_types): + default = default.replace("%", "%%") + # Add it in + sql += " DEFAULT %s" + sqlparams = (default) + elif (not field.null and field.blank) or (field.get_default() == ''): + if field.empty_strings_allowed and self._get_connection().features.interprets_empty_strings_as_nulls: + sql += " DEFAULT ''" + # Error here would be nice, but doesn't seem to play fair. + #else: + # raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.") + + if field.rel and self.supports_foreign_keys: + self.add_deferred_sql( + self.foreign_key_sql( + table_name, + field.column, + field.rel.to._meta.db_table, + field.rel.to._meta.get_field(field.rel.field_name).column + ) + ) + + # Things like the contrib.gis module fields have this in 1.1 and below + if hasattr(field, 'post_create_sql'): + for stmt in field.post_create_sql(no_style(), table_name): + self.add_deferred_sql(stmt) + + # In 1.2 and above, you have to ask the DatabaseCreation stuff for it. + # This also creates normal indexes in 1.1. + if hasattr(self._get_connection().creation, "sql_indexes_for_field"): + # Make a fake model to pass in, with only db_table + model = self.mock_model("FakeModelForGISCreation", table_name) + for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()): + self.add_deferred_sql(stmt) + + if sql: + return sql % sqlparams + else: + return None + + def _field_sanity(self, field): + """ + Placeholder for DBMS-specific field alterations (some combos aren't valid, + e.g. DEFAULT and TEXT on MySQL) + """ + return field + + def _default_value_workaround(self, value): + """ + DBMS-specific value alterations (this really works around + missing functionality in Django backends) + """ + if isinstance(value, bool) and not self.has_booleans: + return int(value) + else: + return value + + def foreign_key_sql(self, from_table_name, from_column_name, to_table_name, to_column_name): + """ + Generates a full SQL statement to add a foreign key constraint + """ + constraint_name = '%s_refs_%s_%s' % (from_column_name, to_column_name, self._digest(from_table_name, to_table_name)) + return 'ALTER TABLE %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % ( + self.quote_name(from_table_name), + self.quote_name(self.shorten_name(constraint_name)), + self.quote_name(from_column_name), + self.quote_name(to_table_name), + self.quote_name(to_column_name), + self._get_connection().ops.deferrable_sql() # Django knows this + ) + + @invalidate_table_constraints + def delete_foreign_key(self, table_name, column): + """ + Drop a foreign key constraint + """ + if self.dry_run: + if self.debug: + print(' - no dry run output for delete_foreign_key() due to dynamic DDL, sorry') + return # We can't look at the DB to get the constraints + constraints = self._find_foreign_constraints(table_name, column) + if not constraints: + raise ValueError("Cannot find a FOREIGN KEY constraint on table %s, column %s" % (table_name, column)) + for constraint_name in constraints: + self.execute(self.delete_foreign_key_sql % { + "table": self.quote_name(table_name), + "constraint": self.quote_name(constraint_name), + }) + + drop_foreign_key = alias('delete_foreign_key') + + def _find_foreign_constraints(self, table_name, column_name=None): + constraints = self._constraints_affecting_columns( + table_name, [column_name], "FOREIGN KEY") + + primary_key_columns = self._find_primary_key_columns(table_name) + + if len(primary_key_columns) > 1: + # Composite primary keys cannot be referenced by a foreign key + return list(constraints) + else: + primary_key_columns.add(column_name) + recursive_constraints = set(self._constraints_affecting_columns( + table_name, primary_key_columns, "FOREIGN KEY")) + return list(recursive_constraints.union(constraints)) + + def _digest(self, *args): + """ + Use django.db.backends.creation.BaseDatabaseCreation._digest + to create index name in Django style. An evil hack :( + """ + if not hasattr(self, '_django_db_creation'): + self._django_db_creation = BaseDatabaseCreation(self._get_connection()) + return self._django_db_creation._digest(*args) + + def shorten_name(self, name): + return truncate_name(name, self._get_connection().ops.max_name_length()) + + def create_index_name(self, table_name, column_names, suffix=""): + """ + Generate a unique name for the index + """ + + # If there is just one column in the index, use a default algorithm from Django + if len(column_names) == 1 and not suffix: + return self.shorten_name( + '%s_%s' % (table_name, self._digest(column_names[0])) + ) + + # Else generate the name for the index by South + table_name = table_name.replace('"', '').replace('.', '_') + index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names)))) + + # If the index name is too long, truncate it + index_name = ('%s_%s%s%s' % (table_name, column_names[0], index_unique_name, suffix)).replace('"', '').replace('.', '_') + if len(index_name) > self.max_index_name_length: + part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix)) + index_name = '%s%s' % (table_name[:(self.max_index_name_length - len(part))], part) + + return index_name + + def create_index_sql(self, table_name, column_names, unique=False, db_tablespace=''): + """ + Generates a create index statement on 'table_name' for a list of 'column_names' + """ + if not column_names: + print("No column names supplied on which to create an index") + return '' + + connection = self._get_connection() + if db_tablespace and connection.features.supports_tablespaces: + tablespace_sql = ' ' + connection.ops.tablespace_sql(db_tablespace) + else: + tablespace_sql = '' + + index_name = self.create_index_name(table_name, column_names) + return 'CREATE %sINDEX %s ON %s (%s)%s;' % ( + unique and 'UNIQUE ' or '', + self.quote_name(index_name), + self.quote_name(table_name), + ','.join([self.quote_name(field) for field in column_names]), + tablespace_sql + ) + + @invalidate_table_constraints + def create_index(self, table_name, column_names, unique=False, db_tablespace=''): + """ Executes a create index statement """ + sql = self.create_index_sql(table_name, column_names, unique, db_tablespace) + self.execute(sql) + + @invalidate_table_constraints + def delete_index(self, table_name, column_names, db_tablespace=''): + """ + Deletes an index created with create_index. + This is possible using only columns due to the deterministic + index naming function which relies on column names. + """ + if isinstance(column_names, string_types): + column_names = [column_names] + name = self.create_index_name(table_name, column_names) + sql = self.drop_index_string % { + "index_name": self.quote_name(name), + "table_name": self.quote_name(table_name), + } + self.execute(sql) + + drop_index = alias('delete_index') + + @delete_column_constraints + def delete_column(self, table_name, name): + """ + Deletes the column 'column_name' from the table 'table_name'. + """ + params = (self.quote_name(table_name), self.quote_name(name)) + self.execute(self.delete_column_string % params, []) + + drop_column = alias('delete_column') + + def rename_column(self, table_name, old, new): + """ + Renames the column 'old' from the table 'table_name' to 'new'. + """ + raise NotImplementedError("rename_column has no generic SQL syntax") + + @invalidate_table_constraints + def delete_primary_key(self, table_name): + """ + Drops the old primary key. + """ + # Dry runs mean we can't do anything. + if self.dry_run: + if self.debug: + print(' - no dry run output for delete_primary_key() due to dynamic DDL, sorry') + return + + constraints = list(self._constraints_affecting_columns(table_name, None, type="PRIMARY KEY")) + if not constraints: + raise ValueError("Cannot find a PRIMARY KEY constraint on table %s" % (table_name,)) + + for constraint in constraints: + self.execute(self.delete_primary_key_sql % { + "table": self.quote_name(table_name), + "constraint": self.quote_name(constraint), + }) + + drop_primary_key = alias('delete_primary_key') + + @invalidate_table_constraints + def create_primary_key(self, table_name, columns): + """ + Creates a new primary key on the specified columns. + """ + if not isinstance(columns, (list, tuple)): + columns = [columns] + self.execute(self.create_primary_key_string % { + "table": self.quote_name(table_name), + "constraint": self.quote_name(table_name + "_pkey"), + "columns": ", ".join(map(self.quote_name, columns)), + }) + + def _find_primary_key_columns(self, table_name): + """ + Find all columns of the primary key of the specified table + """ + db_name = self._get_setting('NAME') + + primary_key_columns = set() + for col, constraints in self.lookup_constraint(db_name, table_name): + for kind, cname in constraints: + if kind == 'PRIMARY KEY': + primary_key_columns.add(col.lower()) + + return primary_key_columns + + def start_transaction(self): + """ + Makes sure the following commands are inside a transaction. + Must be followed by a (commit|rollback)_transaction call. + """ + if self.dry_run: + self.pending_transactions += 1 + transaction.commit_unless_managed(using=self.db_alias) + transaction.enter_transaction_management(using=self.db_alias) + transaction.managed(True, using=self.db_alias) + + def commit_transaction(self): + """ + Commits the current transaction. + Must be preceded by a start_transaction call. + """ + if self.dry_run: + return + transaction.commit(using=self.db_alias) + transaction.leave_transaction_management(using=self.db_alias) + + def rollback_transaction(self): + """ + Rolls back the current transaction. + Must be preceded by a start_transaction call. + """ + if self.dry_run: + self.pending_transactions -= 1 + transaction.rollback(using=self.db_alias) + transaction.leave_transaction_management(using=self.db_alias) + + def rollback_transactions_dry_run(self): + """ + Rolls back all pending_transactions during this dry run. + """ + if not self.dry_run: + return + while self.pending_transactions > 0: + self.rollback_transaction() + if transaction.is_dirty(using=self.db_alias): + # Force an exception, if we're still in a dirty transaction. + # This means we are missing a COMMIT/ROLLBACK. + transaction.leave_transaction_management(using=self.db_alias) + + def send_create_signal(self, app_label, model_names): + self.pending_create_signals.append((app_label, model_names)) + + def send_pending_create_signals(self, verbosity=0, interactive=False): + # Group app_labels together + signals = SortedDict() + for (app_label, model_names) in self.pending_create_signals: + try: + signals[app_label].extend(model_names) + except KeyError: + signals[app_label] = list(model_names) + # Send only one signal per app. + for (app_label, model_names) in signals.items(): + self.really_send_create_signal(app_label, list(set(model_names)), + verbosity=verbosity, + interactive=interactive) + self.pending_create_signals = [] + + def really_send_create_signal(self, app_label, model_names, + verbosity=0, interactive=False): + """ + Sends a post_syncdb signal for the model specified. + + If the model is not found (perhaps it's been deleted?), + no signal is sent. + + TODO: The behavior of django.contrib.* apps seems flawed in that + they don't respect created_models. Rather, they blindly execute + over all models within the app sending the signal. This is a + patch we should push Django to make For now, this should work. + """ + + if self.debug: + print(" - Sending post_syncdb signal for %s: %s" % (app_label, model_names)) + + app = models.get_app(app_label) + if not app: + return + + created_models = [] + for model_name in model_names: + model = models.get_model(app_label, model_name) + if model: + created_models.append(model) + + if created_models: + + if hasattr(dispatcher, "send"): + # Older djangos + dispatcher.send(signal=models.signals.post_syncdb, sender=app, + app=app, created_models=created_models, + verbosity=verbosity, interactive=interactive) + else: + if self._is_multidb(): + # Django 1.2+ + models.signals.post_syncdb.send( + sender=app, + app=app, + created_models=created_models, + verbosity=verbosity, + interactive=interactive, + db=self.db_alias, + ) + else: + # Django 1.1 - 1.0 + models.signals.post_syncdb.send( + sender=app, + app=app, + created_models=created_models, + verbosity=verbosity, + interactive=interactive, + ) + + def mock_model(self, model_name, db_table, db_tablespace='', + pk_field_name='id', pk_field_type=models.AutoField, + pk_field_args=[], pk_field_kwargs={}): + """ + Generates a MockModel class that provides enough information + to be used by a foreign key/many-to-many relationship. + + Migrations should prefer to use these rather than actual models + as models could get deleted over time, but these can remain in + migration files forever. + + Depreciated. + """ + class MockOptions(object): + def __init__(self): + self.db_table = db_table + self.db_tablespace = db_tablespace or settings.DEFAULT_TABLESPACE + self.object_name = model_name + self.module_name = model_name.lower() + + if pk_field_type == models.AutoField: + pk_field_kwargs['primary_key'] = True + + self.pk = pk_field_type(*pk_field_args, **pk_field_kwargs) + self.pk.set_attributes_from_name(pk_field_name) + self.abstract = False + + def get_field_by_name(self, field_name): + # we only care about the pk field + return (self.pk, self.model, True, False) + + def get_field(self, name): + # we only care about the pk field + return self.pk + + class MockModel(object): + _meta = None + + # We need to return an actual class object here, not an instance + MockModel._meta = MockOptions() + MockModel._meta.model = MockModel + return MockModel + + def _db_positive_type_for_alter_column(self, klass, field): + """ + A helper for subclasses overriding _db_type_for_alter_column: + Remove the check constraint from the type string for PositiveInteger + and PositiveSmallInteger fields. + @param klass: The type of the child (required to allow this to be used when it is subclassed) + @param field: The field to generate type for + """ + super_result = super(klass, self)._db_type_for_alter_column(field) + if isinstance(field, (models.PositiveSmallIntegerField, models.PositiveIntegerField)): + return super_result.split(" ", 1)[0] + return super_result + + def _alter_add_positive_check(self, klass, field, name, params, sqls): + """ + A helper for subclasses overriding _alter_add_column_mods: + Add a check constraint verifying positivity to PositiveInteger and + PositiveSmallInteger fields. + """ + super(klass, self)._alter_add_column_mods(field, name, params, sqls) + if isinstance(field, (models.PositiveSmallIntegerField, models.PositiveIntegerField)): + uniq_hash = abs(hash(tuple(params.values()))) + d = dict( + constraint = "CK_%s_PSTV_%s" % (name, hex(uniq_hash)[2:]), + check = "%s >= 0" % self.quote_name(name)) + sqls.append((self.add_check_constraint_fragment % d, [])) + + +# Single-level flattening of lists +def flatten(ls): + nl = [] + for l in ls: + nl += l + return nl diff --git a/awx/lib/site-packages/south/db/mysql.py b/awx/lib/site-packages/south/db/mysql.py new file mode 100644 index 0000000000..f3f8cb1a69 --- /dev/null +++ b/awx/lib/site-packages/south/db/mysql.py @@ -0,0 +1,283 @@ +# MySQL-specific implementations for south +# Original author: Andrew Godwin +# Patches by: F. Gabriel Gosselin <gabrielNOSPAM@evidens.ca> + +from south.db import generic +from south.db.generic import DryRunError, INVALID +from south.logger import get_logger + + +def delete_column_constraints(func): + """ + Decorates column operation functions for MySQL. + Deletes the constraints from the database and clears local cache. + """ + def _column_rm(self, table_name, column_name, *args, **opts): + # Delete foreign key constraints + try: + self.delete_foreign_key(table_name, column_name) + except ValueError: + pass # If no foreign key on column, OK because it checks first + # Delete constraints referring to this column + try: + reverse = self._lookup_reverse_constraint(table_name, column_name) + for cname, rtable, rcolumn in reverse: + self.delete_foreign_key(rtable, rcolumn) + except DryRunError: + pass + return func(self, table_name, column_name, *args, **opts) + return _column_rm + + +def copy_column_constraints(func): + """ + Decorates column operation functions for MySQL. + Determines existing constraints and copies them to a new column + """ + def _column_cp(self, table_name, column_old, column_new, *args, **opts): + # Copy foreign key constraint + try: + constraint = self._find_foreign_constraints(table_name, column_old)[0] + (ftable, fcolumn) = self._lookup_constraint_references(table_name, constraint) + if ftable and fcolumn: + fk_sql = self.foreign_key_sql( + table_name, column_new, ftable, fcolumn) + get_logger().debug("Foreign key SQL: " + fk_sql) + self.add_deferred_sql(fk_sql) + except IndexError: + pass # No constraint exists so ignore + except DryRunError: + pass + # Copy constraints referring to this column + try: + reverse = self._lookup_reverse_constraint(table_name, column_old) + for cname, rtable, rcolumn in reverse: + fk_sql = self.foreign_key_sql( + rtable, rcolumn, table_name, column_new) + self.add_deferred_sql(fk_sql) + except DryRunError: + pass + return func(self, table_name, column_old, column_new, *args, **opts) + return _column_cp + + +def invalidate_table_constraints(func): + """ + For MySQL we grab all table constraints simultaneously, so this is + effective. + It further solves the issues of invalidating referred table constraints. + """ + def _cache_clear(self, table, *args, **opts): + db_name = self._get_setting('NAME') + if db_name in self._constraint_cache: + del self._constraint_cache[db_name] + if db_name in self._reverse_cache: + del self._reverse_cache[db_name] + if db_name in self._constraint_references: + del self._constraint_references[db_name] + return func(self, table, *args, **opts) + return _cache_clear + + +class DatabaseOperations(generic.DatabaseOperations): + """ + MySQL implementation of database operations. + + MySQL has no DDL transaction support This can confuse people when they ask + how to roll back - hence the dry runs, etc., found in the migration code. + """ + + backend_name = "mysql" + alter_string_set_type = '' + alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;' + alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;' + drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s' + delete_primary_key_sql = "ALTER TABLE %(table)s DROP PRIMARY KEY" + delete_foreign_key_sql = "ALTER TABLE %(table)s DROP FOREIGN KEY %(constraint)s" + delete_unique_sql = "ALTER TABLE %s DROP INDEX %s" + rename_table_sql = "RENAME TABLE %s TO %s;" + + allows_combined_alters = False + has_check_constraints = False + raises_default_errors = False + + geom_types = ['geometry', 'point', 'linestring', 'polygon'] + text_types = ['text', 'blob'] + + def __init__(self, db_alias): + self._constraint_references = {} + self._reverse_cache = {} + super(DatabaseOperations, self).__init__(db_alias) + if self._has_setting('STORAGE_ENGINE') and self._get_setting('STORAGE_ENGINE'): + self.create_table_sql = self.create_table_sql + ' ENGINE=%s' % self._get_setting('STORAGE_ENGINE') + + def _is_valid_cache(self, db_name, table_name): + cache = self._constraint_cache + # we cache the whole db so if there are any tables table_name is valid + return db_name in cache and cache[db_name].get(table_name, None) is not INVALID + + def _fill_constraint_cache(self, db_name, table_name): + # for MySQL grab all constraints for this database. It's just as cheap as a single column. + self._constraint_cache[db_name] = {} + self._constraint_cache[db_name][table_name] = {} + self._reverse_cache[db_name] = {} + self._constraint_references[db_name] = {} + + name_query = """ + SELECT kc.`constraint_name`, kc.`column_name`, kc.`table_name`, + kc.`referenced_table_name`, kc.`referenced_column_name` + FROM information_schema.key_column_usage AS kc + WHERE + kc.table_schema = %s + """ + rows = self.execute(name_query, [db_name]) + if not rows: + return + cnames = {} + for constraint, column, table, ref_table, ref_column in rows: + key = (table, constraint) + cnames.setdefault(key, set()) + cnames[key].add((column, ref_table, ref_column)) + + type_query = """ + SELECT c.constraint_name, c.table_name, c.constraint_type + FROM information_schema.table_constraints AS c + WHERE + c.table_schema = %s + """ + rows = self.execute(type_query, [db_name]) + for constraint, table, kind in rows: + key = (table, constraint) + self._constraint_cache[db_name].setdefault(table, {}) + try: + cols = cnames[key] + except KeyError: + cols = set() + for column_set in cols: + (column, ref_table, ref_column) = column_set + self._constraint_cache[db_name][table].setdefault(column, set()) + if kind == 'FOREIGN KEY': + self._constraint_cache[db_name][table][column].add((kind, + constraint)) + # Create constraint lookup, see constraint_references + self._constraint_references[db_name][(table, + constraint)] = (ref_table, ref_column) + # Create reverse table lookup, reverse_lookup + self._reverse_cache[db_name].setdefault(ref_table, {}) + self._reverse_cache[db_name][ref_table].setdefault(ref_column, + set()) + self._reverse_cache[db_name][ref_table][ref_column].add( + (constraint, table, column)) + else: + self._constraint_cache[db_name][table][column].add((kind, + constraint)) + + def connection_init(self): + """ + Run before any SQL to let database-specific config be sent as a command, + e.g. which storage engine (MySQL) or transaction serialisability level. + """ + cursor = self._get_connection().cursor() + if self._has_setting('STORAGE_ENGINE') and self._get_setting('STORAGE_ENGINE'): + cursor.execute("SET storage_engine=%s;" % self._get_setting('STORAGE_ENGINE')) + + def start_transaction(self): + super(DatabaseOperations, self).start_transaction() + self.execute("SET FOREIGN_KEY_CHECKS=0;") + + @copy_column_constraints + @delete_column_constraints + @invalidate_table_constraints + def rename_column(self, table_name, old, new): + if old == new or self.dry_run: + return [] + + rows = [x for x in self.execute('DESCRIBE %s' % (self.quote_name(table_name),)) if x[0] == old] + + if not rows: + raise ValueError("No column '%s' in '%s'." % (old, table_name)) + + params = ( + self.quote_name(table_name), + self.quote_name(old), + self.quote_name(new), + rows[0][1], + rows[0][2] == "YES" and "NULL" or "NOT NULL", + rows[0][4] and "DEFAULT " or "", + rows[0][4] and "%s" or "", + rows[0][5] or "", + ) + + sql = 'ALTER TABLE %s CHANGE COLUMN %s %s %s %s %s %s %s;' % params + + if rows[0][4]: + self.execute(sql, (rows[0][4],)) + else: + self.execute(sql) + + @delete_column_constraints + def delete_column(self, table_name, name): + super(DatabaseOperations, self).delete_column(table_name, name) + + @invalidate_table_constraints + def rename_table(self, old_table_name, table_name): + super(DatabaseOperations, self).rename_table(old_table_name, + table_name) + + @invalidate_table_constraints + def delete_table(self, table_name): + super(DatabaseOperations, self).delete_table(table_name) + + def _lookup_constraint_references(self, table_name, cname): + """ + Provided an existing table and constraint, returns tuple of (foreign + table, column) + """ + db_name = self._get_setting('NAME') + try: + return self._constraint_references[db_name][(table_name, cname)] + except KeyError: + return None + + def _lookup_reverse_constraint(self, table_name, column_name=None): + """Look for the column referenced by a foreign constraint""" + db_name = self._get_setting('NAME') + if self.dry_run: + raise DryRunError("Cannot get constraints for columns.") + + if not self._is_valid_cache(db_name, table_name): + # Piggy-back on lookup_constraint, ensures cache exists + self.lookup_constraint(db_name, table_name) + + try: + table = self._reverse_cache[db_name][table_name] + if column_name == None: + return [(y, tuple(y)) for x, y in table.items()] + else: + return tuple(table[column_name]) + except KeyError: + return [] + + def _field_sanity(self, field): + """ + This particular override stops us sending DEFAULTs for BLOB/TEXT columns. + """ + # MySQL does not support defaults for geometry columns also + type = self._db_type_for_alter_column(field).lower() + is_geom = True in [type.find(t) > -1 for t in self.geom_types] + is_text = True in [type.find(t) > -1 for t in self.text_types] + + if is_geom or is_text: + field._suppress_default = True + return field + + def _alter_set_defaults(self, field, name, params, sqls): + """ + MySQL does not support defaults on text or blob columns. + """ + type = params['type'] + # MySQL does not support defaults for geometry columns also + is_geom = True in [type.find(t) > -1 for t in self.geom_types] + is_text = True in [type.find(t) > -1 for t in self.text_types] + if not is_geom and not is_text: + super(DatabaseOperations, self)._alter_set_defaults(field, name, params, sqls) diff --git a/awx/lib/site-packages/south/db/oracle.py b/awx/lib/site-packages/south/db/oracle.py new file mode 100644 index 0000000000..6e002945ff --- /dev/null +++ b/awx/lib/site-packages/south/db/oracle.py @@ -0,0 +1,318 @@ +from __future__ import print_function + +import os.path +import sys +import re +import warnings +import cx_Oracle + + +from django.db import connection, models +from django.db.backends.util import truncate_name +from django.core.management.color import no_style +from django.db.models.fields import NOT_PROVIDED +from django.db.utils import DatabaseError + +# In revision r16016 function get_sequence_name has been transformed into +# method of DatabaseOperations class. To make code backward-compatible we +# need to handle both situations. +try: + from django.db.backends.oracle.base import get_sequence_name\ + as original_get_sequence_name +except ImportError: + original_get_sequence_name = None + +from south.db import generic + +class DatabaseOperations(generic.DatabaseOperations): + """ + Oracle implementation of database operations. + """ + backend_name = 'oracle' + + alter_string_set_type = 'ALTER TABLE %(table_name)s MODIFY %(column)s %(type)s %(nullity)s;' + alter_string_set_default = 'ALTER TABLE %(table_name)s MODIFY %(column)s DEFAULT %(default)s;' + alter_string_update_nulls_to_default = \ + 'UPDATE %(table_name)s SET %(column)s = %(default)s WHERE %(column)s IS NULL;' + add_column_string = 'ALTER TABLE %s ADD %s;' + delete_column_string = 'ALTER TABLE %s DROP COLUMN %s;' + add_constraint_string = 'ALTER TABLE %(table_name)s ADD CONSTRAINT %(constraint)s %(clause)s' + + allows_combined_alters = False + has_booleans = False + + constraints_dict = { + 'P': 'PRIMARY KEY', + 'U': 'UNIQUE', + 'C': 'CHECK', + 'R': 'FOREIGN KEY' + } + + def get_sequence_name(self, table_name): + if original_get_sequence_name is None: + return self._get_connection().ops._get_sequence_name(table_name) + else: + return original_get_sequence_name(table_name) + + #TODO: This will cause very obscure bugs if anyone uses a column name or string value + # that looks like a column definition (with 'CHECK', 'DEFAULT' and/or 'NULL' in it) + # e.g. "CHECK MATE" varchar(10) DEFAULT 'NULL' + def adj_column_sql(self, col): + # Syntax fixes -- Oracle is picky about clause order + col = re.sub('(?P<constr>CHECK \(.*\))(?P<any>.*)(?P<default>DEFAULT \d+)', + lambda mo: '%s %s%s'%(mo.group('default'), mo.group('constr'), mo.group('any')), col) #syntax fix for boolean/integer field only + col = re.sub('(?P<not_null>(NOT )?NULL) (?P<misc>(.* )?)(?P<default>DEFAULT.+)', + lambda mo: '%s %s %s'%(mo.group('default'),mo.group('not_null'),mo.group('misc') or ''), col) #fix order of NULL/NOT NULL and DEFAULT + return col + + def check_meta(self, table_name): + return table_name in [ m._meta.db_table for m in models.get_models() ] #caching provided by Django + + def normalize_name(self, name): + """ + Get the properly shortened and uppercased identifier as returned by quote_name(), but without the actual quotes. + """ + nn = self.quote_name(name) + if nn[0] == '"' and nn[-1] == '"': + nn = nn[1:-1] + return nn + + @generic.invalidate_table_constraints + def create_table(self, table_name, fields): + qn = self.quote_name(table_name) + columns = [] + autoinc_sql = '' + + + for field_name, field in fields: + + # avoid default values in CREATE TABLE statements (#925) + field._suppress_default = True + + col = self.column_sql(table_name, field_name, field) + if not col: + continue + col = self.adj_column_sql(col) + + columns.append(col) + if isinstance(field, models.AutoField): + autoinc_sql = connection.ops.autoinc_sql(table_name, field_name) + + sql = 'CREATE TABLE %s (%s);' % (qn, ', '.join([col for col in columns])) + self.execute(sql) + if autoinc_sql: + self.execute(autoinc_sql[0]) + self.execute(autoinc_sql[1]) + + @generic.invalidate_table_constraints + def delete_table(self, table_name, cascade=True): + qn = self.quote_name(table_name) + + # Note: PURGE is not valid syntax for Oracle 9i (it was added in 10) + if cascade: + self.execute('DROP TABLE %s CASCADE CONSTRAINTS;' % qn) + else: + self.execute('DROP TABLE %s;' % qn) + + # If the table has an AutoField a sequence was created. + sequence_sql = """ +DECLARE + i INTEGER; +BEGIN + SELECT COUNT(*) INTO i FROM USER_CATALOG + WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE'; + IF i = 1 THEN + EXECUTE IMMEDIATE 'DROP SEQUENCE "%(sq_name)s"'; + END IF; +END; +/""" % {'sq_name': self.get_sequence_name(table_name)} + self.execute(sequence_sql) + + @generic.invalidate_table_constraints + def alter_column(self, table_name, name, field, explicit_name=True): + + if self.dry_run: + if self.debug: + print(' - no dry run output for alter_column() due to dynamic DDL, sorry') + return + + qn = self.quote_name(table_name) + + # hook for the field to do any resolution prior to it's attributes being queried + if hasattr(field, 'south_init'): + field.south_init() + field = self._field_sanity(field) + + # Add _id or whatever if we need to + field.set_attributes_from_name(name) + if not explicit_name: + name = field.column + qn_col = self.quote_name(name) + + # First, change the type + # This will actually also add any CHECK constraints needed, + # since e.g. 'type' for a BooleanField is 'NUMBER(1) CHECK (%(qn_column)s IN (0,1))' + params = { + 'table_name':qn, + 'column': qn_col, + 'type': self._db_type_for_alter_column(field), + 'nullity': 'NOT NULL', + 'default': 'NULL' + } + if field.null: + params['nullity'] = 'NULL' + + sql_templates = [ + (self.alter_string_set_type, params), + (self.alter_string_set_default, params), + ] + if not field.null and field.has_default(): + # Use default for rows that had nulls. To support the case where + # the new default does not fit the old type, we need to first change + # the column type to the new type, but null=True; then set the default; + # then complete the type change. + def change_params(**kw): + "A little helper for non-destructively changing the params" + p = params.copy() + p.update(kw) + return p + sql_templates[:0] = [ + (self.alter_string_set_type, change_params(nullity='NULL')), + (self.alter_string_update_nulls_to_default, change_params(default=self._default_value_workaround(field.get_default()))), + ] + + + # drop CHECK constraints. Make sure this is executed before the ALTER TABLE statements + # generated above, since those statements recreate the constraints we delete here. + check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK") + for constraint in check_constraints: + self.execute(self.delete_check_sql % { + 'table': self.quote_name(table_name), + 'constraint': self.quote_name(constraint), + }) + + for sql_template, params in sql_templates: + try: + self.execute(sql_template % params, print_all_errors=False) + except DatabaseError as exc: + description = str(exc) + # Oracle complains if a column is already NULL/NOT NULL + if 'ORA-01442' in description or 'ORA-01451' in description: + # so we just drop NULL/NOT NULL part from target sql and retry + params['nullity'] = '' + sql = sql_template % params + self.execute(sql) + # Oracle also has issues if we try to change a regular column + # to a LOB or vice versa (also REF, object, VARRAY or nested + # table, but these don't come up much in Django apps) + elif 'ORA-22858' in description or 'ORA-22859' in description: + self._alter_column_lob_workaround(table_name, name, field) + else: + self._print_sql_error(exc, sql_template % params) + raise + + def _alter_column_lob_workaround(self, table_name, name, field): + """ + Oracle refuses to change a column type from/to LOB to/from a regular + column. In Django, this shows up when the field is changed from/to + a TextField. + What we need to do instead is: + - Rename the original column + - Add the desired field as new + - Update the table to transfer values from old to new + - Drop old column + """ + renamed = self._generate_temp_name(name) + self.rename_column(table_name, name, renamed) + self.add_column(table_name, name, field, keep_default=False) + self.execute("UPDATE %s set %s=%s" % ( + self.quote_name(table_name), + self.quote_name(name), + self.quote_name(renamed), + )) + self.delete_column(table_name, renamed) + + def _generate_temp_name(self, for_name): + suffix = hex(hash(for_name)).upper()[1:] + return self.normalize_name(for_name + "_" + suffix) + + @generic.copy_column_constraints #TODO: Appears to be nulled by the delete decorator below... + @generic.delete_column_constraints + def rename_column(self, table_name, old, new): + if old == new: + # Short-circuit out + return [] + self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % ( + self.quote_name(table_name), + self.quote_name(old), + self.quote_name(new), + )) + + @generic.invalidate_table_constraints + def add_column(self, table_name, name, field, keep_default=False): + sql = self.column_sql(table_name, name, field) + sql = self.adj_column_sql(sql) + + if sql: + params = ( + self.quote_name(table_name), + sql + ) + sql = self.add_column_string % params + self.execute(sql) + + # Now, drop the default if we need to + if not keep_default and field.default is not None: + field.default = NOT_PROVIDED + self.alter_column(table_name, name, field, explicit_name=False) + + def delete_column(self, table_name, name): + return super(DatabaseOperations, self).delete_column(self.quote_name(table_name), name) + + def lookup_constraint(self, db_name, table_name, column_name=None): + if column_name: + # Column names in the constraint cache come from the database, + # make sure we use the properly shortened/uppercased version + # for lookup. + column_name = self.normalize_name(column_name) + return super(DatabaseOperations, self).lookup_constraint(db_name, table_name, column_name) + + def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"): + if columns: + columns = [self.normalize_name(c) for c in columns] + return super(DatabaseOperations, self)._constraints_affecting_columns(table_name, columns, type) + + def _field_sanity(self, field): + """ + This particular override stops us sending DEFAULTs for BooleanField. + """ + if isinstance(field, models.BooleanField) and field.has_default(): + field.default = int(field.to_python(field.get_default())) + return field + + def _default_value_workaround(self, value): + from datetime import date,time,datetime + if isinstance(value, (date,time,datetime)): + return "'%s'" % value + else: + return super(DatabaseOperations, self)._default_value_workaround(value) + + def _fill_constraint_cache(self, db_name, table_name): + self._constraint_cache.setdefault(db_name, {}) + self._constraint_cache[db_name][table_name] = {} + + rows = self.execute(""" + SELECT user_cons_columns.constraint_name, + user_cons_columns.column_name, + user_constraints.constraint_type + FROM user_constraints + JOIN user_cons_columns ON + user_constraints.table_name = user_cons_columns.table_name AND + user_constraints.constraint_name = user_cons_columns.constraint_name + WHERE user_constraints.table_name = '%s' + """ % self.normalize_name(table_name)) + + for constraint, column, kind in rows: + self._constraint_cache[db_name][table_name].setdefault(column, set()) + self._constraint_cache[db_name][table_name][column].add((self.constraints_dict[kind], constraint)) + return diff --git a/awx/lib/site-packages/south/db/postgresql_psycopg2.py b/awx/lib/site-packages/south/db/postgresql_psycopg2.py new file mode 100644 index 0000000000..d6c63c47a3 --- /dev/null +++ b/awx/lib/site-packages/south/db/postgresql_psycopg2.py @@ -0,0 +1,96 @@ +from __future__ import print_function + +import uuid +from django.db.backends.util import truncate_name +from south.db import generic + + +class DatabaseOperations(generic.DatabaseOperations): + + """ + PsycoPG2 implementation of database operations. + """ + + backend_name = "postgres" + + def create_index_name(self, table_name, column_names, suffix=""): + """ + Generate a unique name for the index + + Django's logic for naming field indexes is different in the + postgresql_psycopg2 backend, so we follow that for single-column + indexes. + """ + + if len(column_names) == 1: + return truncate_name( + '%s_%s%s' % (table_name, column_names[0], suffix), + self._get_connection().ops.max_name_length() + ) + return super(DatabaseOperations, self).create_index_name(table_name, column_names, suffix) + + @generic.copy_column_constraints + @generic.delete_column_constraints + def rename_column(self, table_name, old, new): + if old == new: + # Short-circuit out + return [] + self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % ( + self.quote_name(table_name), + self.quote_name(old), + self.quote_name(new), + )) + + @generic.invalidate_table_constraints + def rename_table(self, old_table_name, table_name): + "will rename the table and an associated ID sequence and primary key index" + # First, rename the table + generic.DatabaseOperations.rename_table(self, old_table_name, table_name) + # Then, try renaming the ID sequence + # (if you're using other AutoFields... your problem, unfortunately) + + if self.execute( + """ + SELECT 1 + FROM information_schema.sequences + WHERE sequence_name = %s + """, + [old_table_name + '_id_seq'] + ): + generic.DatabaseOperations.rename_table(self, old_table_name + "_id_seq", table_name + "_id_seq") + + # Rename primary key index, will not rename other indices on + # the table that are used by django (e.g. foreign keys). Until + # figure out how, you need to do this yourself. + + pkey_index_names = self.execute( + """ + SELECT pg_index.indexrelid::regclass + FROM pg_index, pg_attribute + WHERE + indrelid = %s::regclass AND + pg_attribute.attrelid = indrelid AND + pg_attribute.attnum = any(pg_index.indkey) + AND indisprimary + """, + [table_name] + ) + if old_table_name + "_pkey" in pkey_index_names: + generic.DatabaseOperations.rename_table(self, old_table_name + "_pkey", table_name + "_pkey") + + def rename_index(self, old_index_name, index_name): + "Rename an index individually" + generic.DatabaseOperations.rename_table(self, old_index_name, index_name) + + def _default_value_workaround(self, value): + "Support for UUIDs on psql" + if isinstance(value, uuid.UUID): + return str(value) + else: + return super(DatabaseOperations, self)._default_value_workaround(value) + + def _db_type_for_alter_column(self, field): + return self._db_positive_type_for_alter_column(DatabaseOperations, field) + + def _alter_add_column_mods(self, field, name, params, sqls): + return self._alter_add_positive_check(DatabaseOperations, field, name, params, sqls) diff --git a/awx/lib/site-packages/south/db/sql_server/__init__.py b/awx/lib/site-packages/south/db/sql_server/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/db/sql_server/pyodbc.py b/awx/lib/site-packages/south/db/sql_server/pyodbc.py new file mode 100644 index 0000000000..1b200ad13b --- /dev/null +++ b/awx/lib/site-packages/south/db/sql_server/pyodbc.py @@ -0,0 +1,447 @@ +from datetime import date, datetime, time +from warnings import warn +from django.db import models +from django.db.models import fields +from south.db import generic +from south.db.generic import delete_column_constraints, invalidate_table_constraints, copy_column_constraints +from south.exceptions import ConstraintDropped +from south.utils.py3 import string_types +try: + from django.utils.encoding import smart_text # Django >= 1.5 +except ImportError: + from django.utils.encoding import smart_unicode as smart_text # Django < 1.5 +from django.core.management.color import no_style + +class DatabaseOperations(generic.DatabaseOperations): + """ + django-pyodbc (sql_server.pyodbc) implementation of database operations. + """ + + backend_name = "pyodbc" + + add_column_string = 'ALTER TABLE %s ADD %s;' + alter_string_set_type = 'ALTER COLUMN %(column)s %(type)s' + alter_string_set_null = 'ALTER COLUMN %(column)s %(type)s NULL' + alter_string_drop_null = 'ALTER COLUMN %(column)s %(type)s NOT NULL' + + allows_combined_alters = False + + drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s' + drop_constraint_string = 'ALTER TABLE %(table_name)s DROP CONSTRAINT %(constraint_name)s' + delete_column_string = 'ALTER TABLE %s DROP COLUMN %s' + + #create_check_constraint_sql = "ALTER TABLE %(table)s " + \ + # generic.DatabaseOperations.add_check_constraint_fragment + create_foreign_key_sql = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s " + \ + "FOREIGN KEY (%(column)s) REFERENCES %(target)s" + create_unique_sql = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s UNIQUE (%(columns)s)" + + + default_schema_name = "dbo" + + has_booleans = False + + + @delete_column_constraints + def delete_column(self, table_name, name): + q_table_name, q_name = (self.quote_name(table_name), self.quote_name(name)) + + # Zap the constraints + for const in self._find_constraints_for_column(table_name,name): + params = {'table_name':q_table_name, 'constraint_name': const} + sql = self.drop_constraint_string % params + self.execute(sql, []) + + # Zap the indexes + for ind in self._find_indexes_for_column(table_name,name): + params = {'table_name':q_table_name, 'index_name': ind} + sql = self.drop_index_string % params + self.execute(sql, []) + + # Zap default if exists + drop_default = self.drop_column_default_sql(table_name, name) + if drop_default: + sql = "ALTER TABLE [%s] %s" % (table_name, drop_default) + self.execute(sql, []) + + # Finally zap the column itself + self.execute(self.delete_column_string % (q_table_name, q_name), []) + + def _find_indexes_for_column(self, table_name, name): + "Find the indexes that apply to a column, needed when deleting" + + sql = """ + SELECT si.name, si.id, sik.colid, sc.name + FROM dbo.sysindexes si WITH (NOLOCK) + INNER JOIN dbo.sysindexkeys sik WITH (NOLOCK) + ON sik.id = si.id + AND sik.indid = si.indid + INNER JOIN dbo.syscolumns sc WITH (NOLOCK) + ON si.id = sc.id + AND sik.colid = sc.colid + WHERE si.indid !=0 + AND si.id = OBJECT_ID('%s') + AND sc.name = '%s' + """ + idx = self.execute(sql % (table_name, name), []) + return [i[0] for i in idx] + + + def _find_constraints_for_column(self, table_name, name, just_names=True): + """ + Find the constraints that apply to a column, needed when deleting. Defaults not included. + This is more general than the parent _constraints_affecting_columns, as on MSSQL this + includes PK and FK constraints. + """ + + sql = """ + SELECT CC.[CONSTRAINT_NAME] + ,TC.[CONSTRAINT_TYPE] + ,CHK.[CHECK_CLAUSE] + ,RFD.TABLE_SCHEMA + ,RFD.TABLE_NAME + ,RFD.COLUMN_NAME + -- used for normalized names + ,CC.TABLE_NAME + ,CC.COLUMN_NAME + FROM [INFORMATION_SCHEMA].[TABLE_CONSTRAINTS] TC + JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE CC + ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG + AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA + AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME + LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS CHK + ON CHK.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG + AND CHK.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA + AND CHK.CONSTRAINT_NAME = CC.CONSTRAINT_NAME + AND 'CHECK' = TC.CONSTRAINT_TYPE + LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS REF + ON REF.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG + AND REF.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA + AND REF.CONSTRAINT_NAME = CC.CONSTRAINT_NAME + AND 'FOREIGN KEY' = TC.CONSTRAINT_TYPE + LEFT JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE RFD + ON RFD.CONSTRAINT_CATALOG = REF.UNIQUE_CONSTRAINT_CATALOG + AND RFD.CONSTRAINT_SCHEMA = REF.UNIQUE_CONSTRAINT_SCHEMA + AND RFD.CONSTRAINT_NAME = REF.UNIQUE_CONSTRAINT_NAME + WHERE CC.CONSTRAINT_CATALOG = CC.TABLE_CATALOG + AND CC.CONSTRAINT_SCHEMA = CC.TABLE_SCHEMA + AND CC.TABLE_CATALOG = %s + AND CC.TABLE_SCHEMA = %s + AND CC.TABLE_NAME = %s + AND CC.COLUMN_NAME = %s + """ + db_name = self._get_setting('name') + schema_name = self._get_schema_name() + table = self.execute(sql, [db_name, schema_name, table_name, name]) + + if just_names: + return [r[0] for r in table] + + all = {} + for r in table: + cons_name, type = r[:2] + if type=='PRIMARY KEY' or type=='UNIQUE': + cons = all.setdefault(cons_name, (type,[])) + sql = ''' + SELECT COLUMN_NAME + FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE RFD + WHERE RFD.CONSTRAINT_CATALOG = %s + AND RFD.CONSTRAINT_SCHEMA = %s + AND RFD.TABLE_NAME = %s + AND RFD.CONSTRAINT_NAME = %s + ''' + columns = self.execute(sql, [db_name, schema_name, table_name, cons_name]) + cons[1].extend(col for col, in columns) + elif type=='CHECK': + cons = (type, r[2]) + elif type=='FOREIGN KEY': + if cons_name in all: + raise NotImplementedError("Multiple-column foreign keys are not supported") + else: + cons = (type, r[3:6]) + else: + raise NotImplementedError("Don't know how to handle constraints of type "+ type) + all[cons_name] = cons + return all + + @invalidate_table_constraints + def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False): + """ + Alters the given column name so it will match the given field. + Note that conversion between the two by the database must be possible. + Will not automatically add _id by default; to have this behavour, pass + explicit_name=False. + + @param table_name: The name of the table to add the column to + @param name: The name of the column to alter + @param field: The new field definition to use + """ + self._fix_field_definition(field) + + if not ignore_constraints: + qn = self.quote_name + sch = qn(self._get_schema_name()) + tab = qn(table_name) + table = ".".join([sch, tab]) + try: + self.delete_foreign_key(table_name, name) + except ValueError: + # no FK constraint on this field. That's OK. + pass + constraints = self._find_constraints_for_column(table_name, name, False) + for constraint in constraints.keys(): + params = dict(table_name = table, + constraint_name = qn(constraint)) + sql = self.drop_constraint_string % params + self.execute(sql, []) + + ret_val = super(DatabaseOperations, self).alter_column(table_name, name, field, explicit_name, ignore_constraints=True) + + if not ignore_constraints: + for cname, (ctype,args) in constraints.items(): + params = dict(table = table, + constraint = qn(cname)) + if ctype=='UNIQUE': + params['columns'] = ", ".join(map(qn,args)) + sql = self.create_unique_sql % params + elif ctype=='PRIMARY KEY': + params['columns'] = ", ".join(map(qn,args)) + sql = self.create_primary_key_string % params + elif ctype=='FOREIGN KEY': + continue + # Foreign keys taken care of below + #target = "%s.%s(%s)" % tuple(map(qn,args)) + #params.update(column = qn(name), target = target) + #sql = self.create_foreign_key_sql % params + elif ctype=='CHECK': + warn(ConstraintDropped("CHECK "+ args, table_name, name)) + continue + #TODO: Some check constraints should be restored; but not before the generic + # backend restores them. + #params['check'] = args + #sql = self.create_check_constraint_sql % params + else: + raise NotImplementedError("Don't know how to handle constraints of type "+ type) + self.execute(sql, []) + # Create foreign key if necessary + if field.rel and self.supports_foreign_keys: + self.execute( + self.foreign_key_sql( + table_name, + field.column, + field.rel.to._meta.db_table, + field.rel.to._meta.get_field(field.rel.field_name).column + ) + ) + model = self.mock_model("FakeModelForIndexCreation", table_name) + for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()): + self.execute(stmt) + + + return ret_val + + def _alter_set_defaults(self, field, name, params, sqls): + "Subcommand of alter_column that sets default values (overrideable)" + # First drop the current default if one exists + table_name = self.quote_name(params['table_name']) + drop_default = self.drop_column_default_sql(table_name, name) + if drop_default: + sqls.append((drop_default, [])) + + # Next, set any default + + if field.has_default(): + default = field.get_default() + literal = self._value_to_unquoted_literal(field, default) + sqls.append(('ADD DEFAULT %s for %s' % (self._quote_string(literal), self.quote_name(name),), [])) + + def _value_to_unquoted_literal(self, field, value): + # Start with the field's own translation + conn = self._get_connection() + value = field.get_db_prep_save(value, connection=conn) + # This is still a Python object -- nobody expects to need a literal. + if isinstance(value, string_types): + return smart_text(value) + elif isinstance(value, (date,time,datetime)): + return value.isoformat() + else: + #TODO: Anybody else needs special translations? + return str(value) + def _default_value_workaround(self, value): + if isinstance(value, (date,time,datetime)): + return value.isoformat() + else: + return super(DatabaseOperations, self)._default_value_workaround(value) + + def _quote_string(self, s): + return "'" + s.replace("'","''") + "'" + + + def drop_column_default_sql(self, table_name, name, q_name=None): + "MSSQL specific drop default, which is a pain" + + sql = """ + SELECT object_name(cdefault) + FROM syscolumns + WHERE id = object_id('%s') + AND name = '%s' + """ + cons = self.execute(sql % (table_name, name), []) + if cons and cons[0] and cons[0][0]: + return "DROP CONSTRAINT %s" % cons[0][0] + return None + + def _fix_field_definition(self, field): + if isinstance(field, (fields.BooleanField, fields.NullBooleanField)): + if field.default == True: + field.default = 1 + if field.default == False: + field.default = 0 + + # This is copied from South's generic add_column, with two modifications: + # 1) The sql-server-specific call to _fix_field_definition + # 2) Removing a default, when needed, by calling drop_default and not the more general alter_column + @invalidate_table_constraints + def add_column(self, table_name, name, field, keep_default=False): + """ + Adds the column 'name' to the table 'table_name'. + Uses the 'field' paramater, a django.db.models.fields.Field instance, + to generate the necessary sql + + @param table_name: The name of the table to add the column to + @param name: The name of the column to add + @param field: The field to use + """ + self._fix_field_definition(field) + sql = self.column_sql(table_name, name, field) + if sql: + params = ( + self.quote_name(table_name), + sql, + ) + sql = self.add_column_string % params + self.execute(sql) + + # Now, drop the default if we need to + if not keep_default and field.default is not None: + field.default = fields.NOT_PROVIDED + #self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True) + self.drop_default(table_name, name, field) + + @invalidate_table_constraints + def drop_default(self, table_name, name, field): + fragment = self.drop_column_default_sql(table_name, name) + if fragment: + table_name = self.quote_name(table_name) + sql = " ".join(["ALTER TABLE", table_name, fragment]) + self.execute(sql) + + + @invalidate_table_constraints + def create_table(self, table_name, field_defs): + # Tweak stuff as needed + for _, f in field_defs: + self._fix_field_definition(f) + + # Run + super(DatabaseOperations, self).create_table(table_name, field_defs) + + def _find_referencing_fks(self, table_name): + "MSSQL does not support cascading FKs when dropping tables, we need to implement." + + # FK -- Foreign Keys + # UCTU -- Unique Constraints Table Usage + # FKTU -- Foreign Key Table Usage + # (last two are both really CONSTRAINT_TABLE_USAGE, different join conditions) + sql = """ + SELECT FKTU.TABLE_SCHEMA as REFING_TABLE_SCHEMA, + FKTU.TABLE_NAME as REFING_TABLE_NAME, + FK.[CONSTRAINT_NAME] as FK_NAME + FROM [INFORMATION_SCHEMA].[REFERENTIAL_CONSTRAINTS] FK + JOIN [INFORMATION_SCHEMA].[CONSTRAINT_TABLE_USAGE] UCTU + ON FK.UNIQUE_CONSTRAINT_CATALOG = UCTU.CONSTRAINT_CATALOG and + FK.UNIQUE_CONSTRAINT_NAME = UCTU.CONSTRAINT_NAME and + FK.UNIQUE_CONSTRAINT_SCHEMA = UCTU.CONSTRAINT_SCHEMA + JOIN [INFORMATION_SCHEMA].[CONSTRAINT_TABLE_USAGE] FKTU + ON FK.CONSTRAINT_CATALOG = FKTU.CONSTRAINT_CATALOG and + FK.CONSTRAINT_NAME = FKTU.CONSTRAINT_NAME and + FK.CONSTRAINT_SCHEMA = FKTU.CONSTRAINT_SCHEMA + WHERE FK.CONSTRAINT_CATALOG = %s + AND UCTU.TABLE_SCHEMA = %s -- REFD_TABLE_SCHEMA + AND UCTU.TABLE_NAME = %s -- REFD_TABLE_NAME + """ + db_name = self._get_setting('name') + schema_name = self._get_schema_name() + return self.execute(sql, [db_name, schema_name, table_name]) + + @invalidate_table_constraints + def delete_table(self, table_name, cascade=True): + """ + Deletes the table 'table_name'. + """ + if cascade: + refing = self._find_referencing_fks(table_name) + for schmea, table, constraint in refing: + table = ".".join(map (self.quote_name, [schmea, table])) + params = dict(table_name = table, + constraint_name = self.quote_name(constraint)) + sql = self.drop_constraint_string % params + self.execute(sql, []) + cascade = False + super(DatabaseOperations, self).delete_table(table_name, cascade) + + @copy_column_constraints + @delete_column_constraints + def rename_column(self, table_name, old, new): + """ + Renames the column of 'table_name' from 'old' to 'new'. + WARNING - This isn't transactional on MSSQL! + """ + if old == new: + # No Operation + return + # Examples on the MS site show the table name not being quoted... + params = (table_name, self.quote_name(old), self.quote_name(new)) + self.execute("EXEC sp_rename '%s.%s', %s, 'COLUMN'" % params) + + @invalidate_table_constraints + def rename_table(self, old_table_name, table_name): + """ + Renames the table 'old_table_name' to 'table_name'. + WARNING - This isn't transactional on MSSQL! + """ + if old_table_name == table_name: + # No Operation + return + params = (self.quote_name(old_table_name), self.quote_name(table_name)) + self.execute('EXEC sp_rename %s, %s' % params) + + def _db_type_for_alter_column(self, field): + return self._db_positive_type_for_alter_column(DatabaseOperations, field) + + def _alter_add_column_mods(self, field, name, params, sqls): + return self._alter_add_positive_check(DatabaseOperations, field, name, params, sqls) + + @invalidate_table_constraints + def delete_foreign_key(self, table_name, column): + super(DatabaseOperations, self).delete_foreign_key(table_name, column) + # A FK also implies a non-unique index + find_index_sql = """ + SELECT i.name -- s.name, t.name, c.name + FROM sys.tables t + INNER JOIN sys.schemas s ON t.schema_id = s.schema_id + INNER JOIN sys.indexes i ON i.object_id = t.object_id + INNER JOIN sys.index_columns ic ON ic.object_id = t.object_id + INNER JOIN sys.columns c ON c.object_id = t.object_id + AND ic.column_id = c.column_id + WHERE i.is_unique=0 AND i.is_primary_key=0 AND i.is_unique_constraint=0 + AND s.name = %s + AND t.name = %s + AND c.name = %s + """ + schema = self._get_schema_name() + indexes = self.execute(find_index_sql, [schema, table_name, column]) + qn = self.quote_name + for index in (i[0] for i in indexes if i[0]): # "if i[0]" added because an empty name may return + self.execute("DROP INDEX %s on %s.%s" % (qn(index), qn(schema), qn(table_name) )) + diff --git a/awx/lib/site-packages/south/db/sqlite3.py b/awx/lib/site-packages/south/db/sqlite3.py new file mode 100644 index 0000000000..db45511456 --- /dev/null +++ b/awx/lib/site-packages/south/db/sqlite3.py @@ -0,0 +1,272 @@ +from south.db import generic + + +class DatabaseOperations(generic.DatabaseOperations): + + """ + SQLite3 implementation of database operations. + """ + + backend_name = "sqlite3" + + # SQLite ignores several constraints. I wish I could. + supports_foreign_keys = False + has_check_constraints = False + has_booleans = False + + def add_column(self, table_name, name, field, *args, **kwds): + """ + Adds a column. + """ + # If it's not nullable, and has no default, raise an error (SQLite is picky) + if (not field.null and + (not field.has_default() or field.get_default() is None) and + not field.empty_strings_allowed): + raise ValueError("You cannot add a null=False column without a default value.") + # Initialise the field. + field.set_attributes_from_name(name) + # We add columns by remaking the table; even though SQLite supports + # adding columns, it doesn't support adding PRIMARY KEY or UNIQUE cols. + # We define fields with no default; a default will be used, though, to fill up the remade table + field_default = None + if not getattr(field, '_suppress_default', False): + default = field.get_default() + if default is not None and default!='': + field_default = "'%s'" % field.get_db_prep_save(default, connection=self._get_connection()) + field._suppress_default = True + self._remake_table(table_name, added={ + field.column: (self._column_sql_for_create(table_name, name, field, False), field_default) + }) + + def _get_full_table_description(self, connection, cursor, table_name): + cursor.execute('PRAGMA table_info(%s)' % connection.ops.quote_name(table_name)) + # cid, name, type, notnull, dflt_value, pk + return [{'name': field[1], + 'type': field[2], + 'null_ok': not field[3], + 'dflt_value': field[4], + 'pk': field[5] # undocumented + } for field in cursor.fetchall()] + + @generic.invalidate_table_constraints + def _remake_table(self, table_name, added={}, renames={}, deleted=[], altered={}, primary_key_override=None, uniques_deleted=[]): + """ + Given a table and three sets of changes (renames, deletes, alters), + recreates it with the modified schema. + """ + # Dry runs get skipped completely + if self.dry_run: + return + # Temporary table's name + temp_name = "_south_new_" + table_name + # Work out the (possibly new) definitions of each column + definitions = {} + cursor = self._get_connection().cursor() + # Get the index descriptions + indexes = self._get_connection().introspection.get_indexes(cursor, table_name) + multi_indexes = self._get_multi_indexes(table_name) + # Work out new column defs. + for column_info in self._get_full_table_description(self._get_connection(), cursor, table_name): + name = column_info['name'] + if name in deleted: + continue + # Get the type, ignoring PRIMARY KEY (we need to be consistent) + type = column_info['type'].replace("PRIMARY KEY", "") + # Add on primary key, not null or unique if needed. + if (primary_key_override and primary_key_override == name) or \ + (not primary_key_override and name in indexes and + indexes[name]['primary_key']): + type += " PRIMARY KEY" + elif not column_info['null_ok']: + type += " NOT NULL" + if (name in indexes and indexes[name]['unique'] and + name not in uniques_deleted): + type += " UNIQUE" + if column_info['dflt_value'] is not None: + type += " DEFAULT " + column_info['dflt_value'] + # Deal with a rename + if name in renames: + name = renames[name] + # Add to the defs + definitions[name] = type + # Add on altered columns + for name, type in altered.items(): + if (primary_key_override and primary_key_override == name) or \ + (not primary_key_override and name in indexes and + indexes[name]['primary_key']): + type += " PRIMARY KEY" + if (name in indexes and indexes[name]['unique'] and + name not in uniques_deleted): + type += " UNIQUE" + definitions[name] = type + # Add on the new columns + for name, (type,_) in added.items(): + if (primary_key_override and primary_key_override == name): + type += " PRIMARY KEY" + definitions[name] = type + # Alright, Make the table + self.execute("CREATE TABLE %s (%s)" % ( + self.quote_name(temp_name), + ", ".join(["%s %s" % (self.quote_name(cname), ctype) for cname, ctype in definitions.items()]), + )) + # Copy over the data + self._copy_data(table_name, temp_name, renames, added) + # Delete the old table, move our new one over it + self.delete_table(table_name) + self.rename_table(temp_name, table_name) + # Recreate multi-valued indexes + # We can't do that before since it's impossible to rename indexes + # and index name scope is global + self._make_multi_indexes(table_name, multi_indexes, renames=renames, deleted=deleted, uniques_deleted=uniques_deleted) + + def _copy_data(self, src, dst, field_renames={}, added={}): + "Used to copy data into a new table" + # Make a list of all the fields to select + cursor = self._get_connection().cursor() + src_fields = [column_info[0] for column_info in self._get_connection().introspection.get_table_description(cursor, src)] + dst_fields = [column_info[0] for column_info in self._get_connection().introspection.get_table_description(cursor, dst)] + src_fields_new = [] + dst_fields_new = [] + for field in src_fields: + if field in field_renames: + dst_fields_new.append(self.quote_name(field_renames[field])) + elif field in dst_fields: + dst_fields_new.append(self.quote_name(field)) + else: + continue + src_fields_new.append(self.quote_name(field)) + for field, (_,default) in added.items(): + if default is not None and default!='': + field = self.quote_name(field) + src_fields_new.append("%s as %s" % (default, field)) + dst_fields_new.append(field) + # Copy over the data + self.execute("INSERT INTO %s (%s) SELECT %s FROM %s;" % ( + self.quote_name(dst), + ', '.join(dst_fields_new), + ', '.join(src_fields_new), + self.quote_name(src), + )) + + def _create_unique(self, table_name, columns): + self.execute("CREATE UNIQUE INDEX %s ON %s(%s);" % ( + self.quote_name('%s_%s' % (table_name, '__'.join(columns))), + self.quote_name(table_name), + ', '.join(self.quote_name(c) for c in columns), + )) + + def _get_multi_indexes(self, table_name): + indexes = [] + cursor = self._get_connection().cursor() + cursor.execute('PRAGMA index_list(%s)' % self.quote_name(table_name)) + # seq, name, unique + for index, unique in [(field[1], field[2]) for field in cursor.fetchall()]: + if not unique: + continue + cursor.execute('PRAGMA index_info(%s)' % self.quote_name(index)) + info = cursor.fetchall() + if len(info) == 1: + continue + columns = [] + for field in info: + columns.append(field[2]) + indexes.append(columns) + return indexes + + def _make_multi_indexes(self, table_name, indexes, deleted=[], renames={}, uniques_deleted=[]): + for index in indexes: + columns = [] + + for name in index: + # Handle deletion + if name in deleted: + columns = [] + break + + # Handle renames + if name in renames: + name = renames[name] + columns.append(name) + + if columns and set(columns) != set(uniques_deleted): + self._create_unique(table_name, columns) + + def _column_sql_for_create(self, table_name, name, field, explicit_name=True): + "Given a field and its name, returns the full type for the CREATE TABLE (without unique/pk)" + field.set_attributes_from_name(name) + if not explicit_name: + name = field.db_column + else: + field.column = name + sql = self.column_sql(table_name, name, field, with_name=False, field_prepared=True) + # Remove keywords we don't want (this should be type only, not constraint) + if sql: + sql = sql.replace("PRIMARY KEY", "") + return sql + + def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False): + """ + Changes a column's SQL definition. + + Note that this sqlite3 implementation ignores the ignore_constraints argument. + The argument is accepted for API compatibility with the generic + DatabaseOperations.alter_column() method. + """ + # Change nulls to default if needed + if not field.null and field.has_default(): + params = { + "column": self.quote_name(name), + "table_name": self.quote_name(table_name) + } + self._update_nulls_to_default(params, field) + # Remake the table correctly + field._suppress_default = True + self._remake_table(table_name, altered={ + name: self._column_sql_for_create(table_name, name, field, explicit_name), + }) + + def delete_column(self, table_name, column_name): + """ + Deletes a column. + """ + self._remake_table(table_name, deleted=[column_name]) + + def rename_column(self, table_name, old, new): + """ + Renames a column from one name to another. + """ + self._remake_table(table_name, renames={old: new}) + + def create_unique(self, table_name, columns): + """ + Create an unique index on columns + """ + self._create_unique(table_name, columns) + + def delete_unique(self, table_name, columns): + """ + Delete an unique index + """ + self._remake_table(table_name, uniques_deleted=columns) + + def create_primary_key(self, table_name, columns): + if not isinstance(columns, (list, tuple)): + columns = [columns] + assert len(columns) == 1, "SQLite backend does not support multi-column primary keys" + self._remake_table(table_name, primary_key_override=columns[0]) + + # Not implemented this yet. + def delete_primary_key(self, table_name): + # By passing True in, we make sure we wipe all existing PKs. + self._remake_table(table_name, primary_key_override=True) + + # No cascades on deletes + def delete_table(self, table_name, cascade=True): + generic.DatabaseOperations.delete_table(self, table_name, False) + + def _default_value_workaround(self, default): + if default == True: + default = 1 + elif default == False: + default = 0 + return default diff --git a/awx/lib/site-packages/south/exceptions.py b/awx/lib/site-packages/south/exceptions.py new file mode 100644 index 0000000000..e501d91861 --- /dev/null +++ b/awx/lib/site-packages/south/exceptions.py @@ -0,0 +1,155 @@ +from __future__ import print_function + +from traceback import format_exception, format_exc + +class SouthError(RuntimeError): + pass + +class SouthWarning(RuntimeWarning): + pass + +class BrokenMigration(SouthError): + def __init__(self, migration, exc_info): + self.migration = migration + self.exc_info = exc_info + if self.exc_info: + self.traceback = ''.join(format_exception(*self.exc_info)) + else: + self.traceback = format_exc() + + def __str__(self): + return ("While loading migration '%(migration)s':\n" + '%(traceback)s' % self.__dict__) + + +class UnknownMigration(BrokenMigration): + def __str__(self): + return ("Migration '%(migration)s' probably doesn't exist.\n" + '%(traceback)s' % self.__dict__) + + +class InvalidMigrationModule(SouthError): + def __init__(self, application, module): + self.application = application + self.module = module + + def __str__(self): + return ('The migration module specified for %(application)s, %(module)r, is invalid; the parent module does not exist.' % self.__dict__) + + +class NoMigrations(SouthError): + def __init__(self, application): + self.application = application + + def __str__(self): + return "Application '%(application)s' has no migrations." % self.__dict__ + + +class MultiplePrefixMatches(SouthError): + def __init__(self, prefix, matches): + self.prefix = prefix + self.matches = matches + + def __str__(self): + self.matches_list = "\n ".join([str(m) for m in self.matches]) + return ("Prefix '%(prefix)s' matches more than one migration:\n" + " %(matches_list)s") % self.__dict__ + + +class GhostMigrations(SouthError): + def __init__(self, ghosts): + self.ghosts = ghosts + + def __str__(self): + self.ghosts_list = "\n ".join([str(m) for m in self.ghosts]) + return ("\n\n ! These migrations are in the database but not on disk:\n" + " %(ghosts_list)s\n" + " ! I'm not trusting myself; either fix this yourself by fiddling\n" + " ! with the south_migrationhistory table, or pass --delete-ghost-migrations\n" + " ! to South to have it delete ALL of these records (this may not be good).") % self.__dict__ + + +class CircularDependency(SouthError): + def __init__(self, trace): + self.trace = trace + + def __str__(self): + trace = " -> ".join([str(s) for s in self.trace]) + return ("Found circular dependency:\n" + " %s") % trace + + +class InconsistentMigrationHistory(SouthError): + def __init__(self, problems): + self.problems = problems + + def __str__(self): + return ('Inconsistent migration history\n' + 'The following options are available:\n' + ' --merge: will just attempt the migration ignoring any potential dependency conflicts.') + + +class DependsOnHigherMigration(SouthError): + def __init__(self, migration, depends_on): + self.migration = migration + self.depends_on = depends_on + + def __str__(self): + return "Lower migration '%(migration)s' depends on a higher migration '%(depends_on)s' in the same app." % self.__dict__ + + +class DependsOnUnknownMigration(SouthError): + def __init__(self, migration, depends_on): + self.migration = migration + self.depends_on = depends_on + + def __str__(self): + print("Migration '%(migration)s' depends on unknown migration '%(depends_on)s'." % self.__dict__) + + +class DependsOnUnmigratedApplication(SouthError): + def __init__(self, migration, application): + self.migration = migration + self.application = application + + def __str__(self): + return "Migration '%(migration)s' depends on unmigrated application '%(application)s'." % self.__dict__ + + +class FailedDryRun(SouthError): + def __init__(self, migration, exc_info): + self.migration = migration + self.name = migration.name() + self.exc_info = exc_info + self.traceback = ''.join(format_exception(*self.exc_info)) + + def __str__(self): + return (" ! Error found during dry run of '%(name)s'! Aborting.\n" + "%(traceback)s") % self.__dict__ + + +class ORMBaseNotIncluded(SouthError): + """Raised when a frozen model has something in _ormbases which isn't frozen.""" + pass + + +class UnfreezeMeLater(Exception): + """An exception, which tells the ORM unfreezer to postpone this model.""" + pass + + +class ImpossibleORMUnfreeze(SouthError): + """Raised if the ORM can't manage to unfreeze all the models in a linear fashion.""" + pass + +class ConstraintDropped(SouthWarning): + def __init__(self, constraint, table, column=None): + self.table = table + if column: + self.column = ".%s" % column + else: + self.column = "" + self.constraint = constraint + + def __str__(self): + return "Constraint %(constraint)s was dropped from %(table)s%(column)s -- was this intended?" % self.__dict__ diff --git a/awx/lib/site-packages/south/hacks/__init__.py b/awx/lib/site-packages/south/hacks/__init__.py new file mode 100644 index 0000000000..8f28503ed6 --- /dev/null +++ b/awx/lib/site-packages/south/hacks/__init__.py @@ -0,0 +1,10 @@ +""" +The hacks module encapsulates all the horrible things that play with Django +internals in one, evil place. +This top file will automagically expose the correct Hacks class. +""" + +# Currently, these work for 1.0 and 1.1. +from south.hacks.django_1_0 import Hacks + +hacks = Hacks() \ No newline at end of file diff --git a/awx/lib/site-packages/south/hacks/django_1_0.py b/awx/lib/site-packages/south/hacks/django_1_0.py new file mode 100644 index 0000000000..00d0a8bab3 --- /dev/null +++ b/awx/lib/site-packages/south/hacks/django_1_0.py @@ -0,0 +1,109 @@ +""" +Hacks for the Django 1.0/1.0.2 releases. +""" + +from django.conf import settings +from django.db.backends.creation import BaseDatabaseCreation +from django.db.models.loading import cache +from django.core import management +from django.core.management.commands.flush import Command as FlushCommand +from django.utils.datastructures import SortedDict + +from south.utils.py3 import string_types + +class SkipFlushCommand(FlushCommand): + def handle_noargs(self, **options): + # no-op to avoid calling flush + return + +class Hacks: + + def set_installed_apps(self, apps): + """ + Sets Django's INSTALLED_APPS setting to be effectively the list passed in. + """ + + # Make sure it's a list. + apps = list(apps) + + # Make sure it contains strings + if apps: + assert isinstance(apps[0], string_types), "The argument to set_installed_apps must be a list of strings." + + # Monkeypatch in! + settings.INSTALLED_APPS, settings.OLD_INSTALLED_APPS = ( + apps, + settings.INSTALLED_APPS, + ) + self._redo_app_cache() + + + def reset_installed_apps(self): + """ + Undoes the effect of set_installed_apps. + """ + settings.INSTALLED_APPS = settings.OLD_INSTALLED_APPS + self._redo_app_cache() + + + def _redo_app_cache(self): + """ + Used to repopulate AppCache after fiddling with INSTALLED_APPS. + """ + cache.loaded = False + cache.handled = {} + cache.postponed = [] + cache.app_store = SortedDict() + cache.app_models = SortedDict() + cache.app_errors = {} + cache._populate() + + + def clear_app_cache(self): + """ + Clears the contents of AppCache to a blank state, so new models + from the ORM can be added. + """ + self.old_app_models, cache.app_models = cache.app_models, {} + + + def unclear_app_cache(self): + """ + Reversed the effects of clear_app_cache. + """ + cache.app_models = self.old_app_models + cache._get_models_cache = {} + + + def repopulate_app_cache(self): + """ + Rebuilds AppCache with the real model definitions. + """ + cache._populate() + + def store_app_cache_state(self): + self.stored_app_cache_state = dict(**cache.__dict__) + + def restore_app_cache_state(self): + cache.__dict__ = self.stored_app_cache_state + + def patch_flush_during_test_db_creation(self): + """ + Patches BaseDatabaseCreation.create_test_db to not flush database + """ + + def patch(f): + def wrapper(*args, **kwargs): + # hold onto the original and replace flush command with a no-op + original_flush_command = management._commands['flush'] + try: + management._commands['flush'] = SkipFlushCommand() + # run create_test_db + return f(*args, **kwargs) + finally: + # unpatch flush back to the original + management._commands['flush'] = original_flush_command + return wrapper + + BaseDatabaseCreation.create_test_db = patch(BaseDatabaseCreation.create_test_db) + diff --git a/awx/lib/site-packages/south/introspection_plugins/__init__.py b/awx/lib/site-packages/south/introspection_plugins/__init__.py new file mode 100644 index 0000000000..38262b52fb --- /dev/null +++ b/awx/lib/site-packages/south/introspection_plugins/__init__.py @@ -0,0 +1,11 @@ +# This module contains built-in introspector plugins for various common +# Django apps. + +# These imports trigger the lower-down files +import south.introspection_plugins.geodjango +import south.introspection_plugins.django_audit_log +import south.introspection_plugins.django_tagging +import south.introspection_plugins.django_taggit +import south.introspection_plugins.django_objectpermissions +import south.introspection_plugins.annoying_autoonetoone + diff --git a/awx/lib/site-packages/south/introspection_plugins/annoying_autoonetoone.py b/awx/lib/site-packages/south/introspection_plugins/annoying_autoonetoone.py new file mode 100644 index 0000000000..d61304f3be --- /dev/null +++ b/awx/lib/site-packages/south/introspection_plugins/annoying_autoonetoone.py @@ -0,0 +1,11 @@ +from django.conf import settings +from south.modelsinspector import add_introspection_rules + +if 'annoying' in settings.INSTALLED_APPS: + try: + from annoying.fields import AutoOneToOneField + except ImportError: + pass + else: + #django-annoying's AutoOneToOneField is essentially a OneToOneField. + add_introspection_rules([], ["^annoying\.fields\.AutoOneToOneField"]) diff --git a/awx/lib/site-packages/south/introspection_plugins/django_audit_log.py b/awx/lib/site-packages/south/introspection_plugins/django_audit_log.py new file mode 100644 index 0000000000..b874428e6d --- /dev/null +++ b/awx/lib/site-packages/south/introspection_plugins/django_audit_log.py @@ -0,0 +1,30 @@ +""" +South introspection rules for django-audit-log +""" + +from django.contrib.auth.models import User +from django.conf import settings +from south.modelsinspector import add_introspection_rules + +if "audit_log" in settings.INSTALLED_APPS: + try: + # Try and import the field so we can see if audit_log is available + from audit_log.models import fields + + # Make sure the `to` and `null` parameters will be ignored + rules = [( + (fields.LastUserField,), + [], + { + 'to': ['rel.to', {'default': User}], + 'null': ['null', {'default': True}], + }, + )] + + # Add the rules for the `LastUserField` + add_introspection_rules( + rules, + ['^audit_log\.models\.fields\.LastUserField'], + ) + except ImportError: + pass diff --git a/awx/lib/site-packages/south/introspection_plugins/django_objectpermissions.py b/awx/lib/site-packages/south/introspection_plugins/django_objectpermissions.py new file mode 100644 index 0000000000..42b353b53a --- /dev/null +++ b/awx/lib/site-packages/south/introspection_plugins/django_objectpermissions.py @@ -0,0 +1,16 @@ +""" +South introspection rules for django-objectpermissions +""" + +from django.conf import settings +from south.modelsinspector import add_ignored_fields + +if 'objectpermissions' in settings.INSTALLED_APPS: + try: + from objectpermissions.models import UserPermissionRelation, GroupPermissionRelation + except ImportError: + pass + else: + add_ignored_fields(["^objectpermissions\.models\.UserPermissionRelation", + "^objectpermissions\.models\.GroupPermissionRelation"]) + diff --git a/awx/lib/site-packages/south/introspection_plugins/django_tagging.py b/awx/lib/site-packages/south/introspection_plugins/django_tagging.py new file mode 100644 index 0000000000..c02e5294de --- /dev/null +++ b/awx/lib/site-packages/south/introspection_plugins/django_tagging.py @@ -0,0 +1,24 @@ +from south.modelsinspector import add_introspection_rules +from django.conf import settings + +if "tagging" in settings.INSTALLED_APPS: + try: + from tagging.fields import TagField + except ImportError: + pass + else: + rules = [ + ( + (TagField, ), + [], + { + "blank": ["blank", {"default": True}], + "max_length": ["max_length", {"default": 255}], + }, + ), + ] + add_introspection_rules(rules, ["^tagging\.fields",]) + +if "tagging_autocomplete" in settings.INSTALLED_APPS: + add_introspection_rules([], ["^tagging_autocomplete\.models\.TagAutocompleteField"]) + diff --git a/awx/lib/site-packages/south/introspection_plugins/django_taggit.py b/awx/lib/site-packages/south/introspection_plugins/django_taggit.py new file mode 100644 index 0000000000..aded23fa81 --- /dev/null +++ b/awx/lib/site-packages/south/introspection_plugins/django_taggit.py @@ -0,0 +1,14 @@ +""" +South introspection rules for django-taggit +""" + +from django.conf import settings +from south.modelsinspector import add_ignored_fields + +if 'taggit' in settings.INSTALLED_APPS: + try: + from taggit.managers import TaggableManager + except ImportError: + pass + else: + add_ignored_fields(["^taggit\.managers"]) diff --git a/awx/lib/site-packages/south/introspection_plugins/django_timezones.py b/awx/lib/site-packages/south/introspection_plugins/django_timezones.py new file mode 100644 index 0000000000..d4b573d89c --- /dev/null +++ b/awx/lib/site-packages/south/introspection_plugins/django_timezones.py @@ -0,0 +1,21 @@ +from south.modelsinspector import add_introspection_rules +from django.conf import settings + +if "timezones" in settings.INSTALLED_APPS: + try: + from timezones.fields import TimeZoneField + except ImportError: + pass + else: + rules = [ + ( + (TimeZoneField, ), + [], + { + "blank": ["blank", {"default": True}], + "max_length": ["max_length", {"default": 100}], + }, + ), + ] + add_introspection_rules(rules, ["^timezones\.fields",]) + diff --git a/awx/lib/site-packages/south/introspection_plugins/geodjango.py b/awx/lib/site-packages/south/introspection_plugins/geodjango.py new file mode 100644 index 0000000000..bece1c9f56 --- /dev/null +++ b/awx/lib/site-packages/south/introspection_plugins/geodjango.py @@ -0,0 +1,45 @@ +""" +GeoDjango introspection rules +""" + +import django +from django.conf import settings + +from south.modelsinspector import add_introspection_rules + +has_gis = "django.contrib.gis" in settings.INSTALLED_APPS + +if has_gis: + # Alright,import the field + from django.contrib.gis.db.models.fields import GeometryField + + # Make some introspection rules + if django.VERSION[0] == 1 and django.VERSION[1] >= 1: + # Django 1.1's gis module renamed these. + rules = [ + ( + (GeometryField, ), + [], + { + "srid": ["srid", {"default": 4326}], + "spatial_index": ["spatial_index", {"default": True}], + "dim": ["dim", {"default": 2}], + "geography": ["geography", {"default": False}], + }, + ), + ] + else: + rules = [ + ( + (GeometryField, ), + [], + { + "srid": ["_srid", {"default": 4326}], + "spatial_index": ["_index", {"default": True}], + "dim": ["_dim", {"default": 2}], + }, + ), + ] + + # Install them + add_introspection_rules(rules, ["^django\.contrib\.gis"]) \ No newline at end of file diff --git a/awx/lib/site-packages/south/logger.py b/awx/lib/site-packages/south/logger.py new file mode 100644 index 0000000000..2caae3a9f6 --- /dev/null +++ b/awx/lib/site-packages/south/logger.py @@ -0,0 +1,38 @@ +import sys +import logging +from django.conf import settings + +# Create a dummy handler to use for now. +class NullHandler(logging.Handler): + def emit(self, record): + pass + +def get_logger(): + "Attach a file handler to the logger if there isn't one already." + debug_on = getattr(settings, "SOUTH_LOGGING_ON", False) + logging_file = getattr(settings, "SOUTH_LOGGING_FILE", False) + + if debug_on: + if logging_file: + if len(_logger.handlers) < 2: + _logger.addHandler(logging.FileHandler(logging_file)) + _logger.setLevel(logging.DEBUG) + else: + raise IOError("SOUTH_LOGGING_ON is True. You also need a SOUTH_LOGGING_FILE setting.") + + return _logger + +def close_logger(): + "Closes the logger handler for the file, so we can remove the file after a test." + for handler in _logger.handlers: + _logger.removeHandler(handler) + if isinstance(handler, logging.FileHandler): + handler.close() + +def init_logger(): + "Initialize the south logger" + logger = logging.getLogger("south") + logger.addHandler(NullHandler()) + return logger + +_logger = init_logger() diff --git a/awx/lib/site-packages/south/management/__init__.py b/awx/lib/site-packages/south/management/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/management/commands/__init__.py b/awx/lib/site-packages/south/management/commands/__init__.py new file mode 100644 index 0000000000..da218eb240 --- /dev/null +++ b/awx/lib/site-packages/south/management/commands/__init__.py @@ -0,0 +1,40 @@ + +# Common framework for syncdb actions + +import copy + +from django.core import management +from django.conf import settings + +# Make sure the template loader cache is fixed _now_ (#448) +import django.template.loaders.app_directories + +from south.hacks import hacks +from south.management.commands.syncdb import Command as SyncCommand + +class MigrateAndSyncCommand(SyncCommand): + """Used for situations where "syncdb" is called by test frameworks.""" + + option_list = copy.deepcopy(SyncCommand.option_list) + + for opt in option_list: + if "--migrate" == opt.get_opt_string(): + opt.default = True + break + +def patch_for_test_db_setup(): + # Load the commands cache + management.get_commands() + # Repoint to the correct version of syncdb + if hasattr(settings, "SOUTH_TESTS_MIGRATE") and not settings.SOUTH_TESTS_MIGRATE: + # point at the core syncdb command when creating tests + # tests should always be up to date with the most recent model structure + management._commands['syncdb'] = 'django.core' + else: + management._commands['syncdb'] = MigrateAndSyncCommand() + # Avoid flushing data migrations. + # http://code.djangoproject.com/ticket/14661 introduced change that flushed custom + # sql during the test database creation (thus flushing the data migrations). + # we patch flush to be no-op during create_test_db, but still allow flushing + # after each test for non-transactional backends. + hacks.patch_flush_during_test_db_creation() diff --git a/awx/lib/site-packages/south/management/commands/convert_to_south.py b/awx/lib/site-packages/south/management/commands/convert_to_south.py new file mode 100644 index 0000000000..658ed482fc --- /dev/null +++ b/awx/lib/site-packages/south/management/commands/convert_to_south.py @@ -0,0 +1,95 @@ +""" +Quick conversion command module. +""" + +from __future__ import print_function + +from optparse import make_option +import sys + +from django.core.management.base import BaseCommand +from django.core.management.color import no_style +from django.conf import settings +from django.db import models +from django.core import management +from django.core.exceptions import ImproperlyConfigured + +from south.migration import Migrations +from south.hacks import hacks +from south.exceptions import NoMigrations + +class Command(BaseCommand): + + option_list = BaseCommand.option_list + if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]: + option_list += ( + make_option('--verbosity', action='store', dest='verbosity', default='1', + type='choice', choices=['0', '1', '2'], + help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'), + ) + option_list += ( + make_option('--delete-ghost-migrations', action='store_true', dest='delete_ghosts', default=False, + help="Tells South to delete any 'ghost' migrations (ones in the database but not on disk)."), + make_option('--ignore-ghost-migrations', action='store_true', dest='ignore_ghosts', default=False, + help="Tells South to ignore any 'ghost' migrations (ones in the database but not on disk) and continue to apply new migrations."), + ) + + help = "Quickly converts the named application to use South if it is currently using syncdb." + + def handle(self, app=None, *args, **options): + + # Make sure we have an app + if not app: + print("Please specify an app to convert.") + return + + # See if the app exists + app = app.split(".")[-1] + try: + app_module = models.get_app(app) + except ImproperlyConfigured: + print("There is no enabled application matching '%s'." % app) + return + + # Try to get its list of models + model_list = models.get_models(app_module) + if not model_list: + print("This application has no models; this command is for applications that already have models syncdb'd.") + print("Make some models, and then use ./manage.py schemamigration %s --initial instead." % app) + return + + # Ask South if it thinks it's already got migrations + try: + Migrations(app) + except NoMigrations: + pass + else: + print("This application is already managed by South.") + return + + # Finally! It seems we've got a candidate, so do the two-command trick + verbosity = int(options.get('verbosity', 0)) + management.call_command("schemamigration", app, initial=True, verbosity=verbosity) + + # Now, we need to re-clean and sanitise appcache + hacks.clear_app_cache() + hacks.repopulate_app_cache() + + # And also clear our cached Migration classes + Migrations._clear_cache() + + # Now, migrate + management.call_command( + "migrate", + app, + "0001", + fake=True, + verbosity=verbosity, + ignore_ghosts=options.get("ignore_ghosts", False), + delete_ghosts=options.get("delete_ghosts", False), + ) + + print() + print("App '%s' converted. Note that South assumed the application's models matched the database" % app) + print("(i.e. you haven't changed it since last syncdb); if you have, you should delete the %s/migrations" % app) + print("directory, revert models.py so it matches the database, and try again.") diff --git a/awx/lib/site-packages/south/management/commands/datamigration.py b/awx/lib/site-packages/south/management/commands/datamigration.py new file mode 100644 index 0000000000..08c1d0891c --- /dev/null +++ b/awx/lib/site-packages/south/management/commands/datamigration.py @@ -0,0 +1,128 @@ +""" +Data migration creation command +""" + +from __future__ import print_function + +import sys +import os +import re +from optparse import make_option + +try: + set +except NameError: + from sets import Set as set + +from django.core.management.base import BaseCommand +from django.core.management.color import no_style +from django.db import models +from django.conf import settings + +from south.migration import Migrations +from south.exceptions import NoMigrations +from south.creator import freezer + +class Command(BaseCommand): + option_list = BaseCommand.option_list + ( + make_option('--freeze', action='append', dest='freeze_list', type='string', + help='Freeze the specified app(s). Provide an app name with each; use the option multiple times for multiple apps'), + make_option('--stdout', action='store_true', dest='stdout', default=False, + help='Print the migration to stdout instead of writing it to a file.'), + ) + help = "Creates a new template data migration for the given app" + usage_str = "Usage: ./manage.py datamigration appname migrationname [--stdout] [--freeze appname]" + + def handle(self, app=None, name="", freeze_list=None, stdout=False, verbosity=1, **options): + + # Any supposed lists that are None become empty lists + freeze_list = freeze_list or [] + + # --stdout means name = - + if stdout: + name = "-" + + # Only allow valid names + if re.search('[^_\w]', name) and name != "-": + self.error("Migration names should contain only alphanumeric characters and underscores.") + + # if not name, there's an error + if not name: + self.error("You must provide a name for this migration\n" + self.usage_str) + + if not app: + self.error("You must provide an app to create a migration for.\n" + self.usage_str) + + # Get the Migrations for this app (creating the migrations dir if needed) + migrations = Migrations(app, force_creation=True, verbose_creation=verbosity > 0) + + # See what filename is next in line. We assume they use numbers. + new_filename = migrations.next_filename(name) + + # Work out which apps to freeze + apps_to_freeze = self.calc_frozen_apps(migrations, freeze_list) + + # So, what's in this file, then? + file_contents = MIGRATION_TEMPLATE % { + "frozen_models": freezer.freeze_apps_to_string(apps_to_freeze), + "complete_apps": apps_to_freeze and "complete_apps = [%s]" % (", ".join(map(repr, apps_to_freeze))) or "" + } + + # - is a special name which means 'print to stdout' + if name == "-": + print(file_contents) + # Write the migration file if the name isn't - + else: + fp = open(os.path.join(migrations.migrations_dir(), new_filename), "w") + fp.write(file_contents) + fp.close() + print("Created %s." % new_filename, file=sys.stderr) + + def calc_frozen_apps(self, migrations, freeze_list): + """ + Works out, from the current app, settings, and the command line options, + which apps should be frozen. + """ + apps_to_freeze = [] + for to_freeze in freeze_list: + if "." in to_freeze: + self.error("You cannot freeze %r; you must provide an app label, like 'auth' or 'books'." % to_freeze) + # Make sure it's a real app + if not models.get_app(to_freeze): + self.error("You cannot freeze %r; it's not an installed app." % to_freeze) + # OK, it's fine + apps_to_freeze.append(to_freeze) + if getattr(settings, 'SOUTH_AUTO_FREEZE_APP', True): + apps_to_freeze.append(migrations.app_label()) + return apps_to_freeze + + def error(self, message, code=1): + """ + Prints the error, and exits with the given code. + """ + print(message, file=sys.stderr) + sys.exit(code) + + +MIGRATION_TEMPLATE = """# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import DataMigration +from django.db import models + +class Migration(DataMigration): + + def forwards(self, orm): + "Write your forwards methods here." + # Note: Don't use "from appname.models import ModelName". + # Use orm.ModelName to refer to models in this application, + # and orm['appname.ModelName'] for models in other applications. + + def backwards(self, orm): + "Write your backwards methods here." + + models = %(frozen_models)s + + %(complete_apps)s + symmetrical = True +""" diff --git a/awx/lib/site-packages/south/management/commands/graphmigrations.py b/awx/lib/site-packages/south/management/commands/graphmigrations.py new file mode 100644 index 0000000000..6ff1e479be --- /dev/null +++ b/awx/lib/site-packages/south/management/commands/graphmigrations.py @@ -0,0 +1,63 @@ +""" +Outputs a graphviz dot file of the dependencies. +""" + +from __future__ import print_function + +from optparse import make_option +import re +import textwrap + +from django.core.management.base import BaseCommand +from django.core.management.color import no_style + +from south.migration import Migrations, all_migrations + +class Command(BaseCommand): + + help = "Outputs a GraphViz dot file of all migration dependencies to stdout." + + def handle(self, **options): + + # Resolve dependencies + Migrations.calculate_dependencies() + + colors = [ 'crimson', 'darkgreen', 'darkgoldenrod', 'navy', + 'brown', 'darkorange', 'aquamarine' , 'blueviolet' ] + color_index = 0 + wrapper = textwrap.TextWrapper(width=40) + + print("digraph G {") + + # Group each app in a subgraph + for migrations in all_migrations(): + print(" subgraph %s {" % migrations.app_label()) + print(" node [color=%s];" % colors[color_index]) + for migration in migrations: + # Munge the label - text wrap and change _ to spaces + label = "%s - %s" % ( + migration.app_label(), migration.name()) + label = re.sub(r"_+", " ", label) + label= "\\n".join(wrapper.wrap(label)) + print(' "%s.%s" [label="%s"];' % ( + migration.app_label(), migration.name(), label)) + print(" }") + color_index = (color_index + 1) % len(colors) + + # For every migration, print its links. + for migrations in all_migrations(): + for migration in migrations: + for other in migration.dependencies: + # Added weight tends to keep migrations from the same app + # in vertical alignment + attrs = "[weight=2.0]" + # But the more interesting edges are those between apps + if other.app_label() != migration.app_label(): + attrs = "[style=bold]" + print(' "%s.%s" -> "%s.%s" %s;' % ( + other.app_label(), other.name(), + migration.app_label(), migration.name(), + attrs + )) + + print("}"); diff --git a/awx/lib/site-packages/south/management/commands/migrate.py b/awx/lib/site-packages/south/management/commands/migrate.py new file mode 100644 index 0000000000..693dbb7b6a --- /dev/null +++ b/awx/lib/site-packages/south/management/commands/migrate.py @@ -0,0 +1,264 @@ +""" +Migrate management command. +""" + +from __future__ import print_function + +import os.path, re, sys +from functools import reduce +from optparse import make_option + +from django.core.management.base import BaseCommand +from django.conf import settings +from django.utils.importlib import import_module + +from south import migration +from south.migration import Migrations +from south.exceptions import NoMigrations +from south.db import DEFAULT_DB_ALIAS + +class Command(BaseCommand): + option_list = BaseCommand.option_list + ( + make_option('--all', action='store_true', dest='all_apps', default=False, + help='Run the specified migration for all apps.'), + make_option('--list', action='store_true', dest='show_list', default=False, + help='List migrations noting those that have been applied'), + make_option('--changes', action='store_true', dest='show_changes', default=False, + help='List changes for migrations'), + make_option('--skip', action='store_true', dest='skip', default=False, + help='Will skip over out-of-order missing migrations'), + make_option('--merge', action='store_true', dest='merge', default=False, + help='Will run out-of-order missing migrations as they are - no rollbacks.'), + make_option('--no-initial-data', action='store_true', dest='no_initial_data', default=False, + help='Skips loading initial data if specified.'), + make_option('--fake', action='store_true', dest='fake', default=False, + help="Pretends to do the migrations, but doesn't actually execute them."), + make_option('--db-dry-run', action='store_true', dest='db_dry_run', default=False, + help="Doesn't execute the SQL generated by the db methods, and doesn't store a record that the migration(s) occurred. Useful to test migrations before applying them."), + make_option('--delete-ghost-migrations', action='store_true', dest='delete_ghosts', default=False, + help="Tells South to delete any 'ghost' migrations (ones in the database but not on disk)."), + make_option('--ignore-ghost-migrations', action='store_true', dest='ignore_ghosts', default=False, + help="Tells South to ignore any 'ghost' migrations (ones in the database but not on disk) and continue to apply new migrations."), + make_option('--noinput', action='store_false', dest='interactive', default=True, + help='Tells Django to NOT prompt the user for input of any kind.'), + make_option('--database', action='store', dest='database', + default=DEFAULT_DB_ALIAS, help='Nominates a database to synchronize. ' + 'Defaults to the "default" database.'), + ) + if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]: + option_list += ( + make_option('--verbosity', action='store', dest='verbosity', default='1', + type='choice', choices=['0', '1', '2'], + help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'), + ) + help = "Runs migrations for all apps." + args = "[appname] [migrationname|zero] [--all] [--list] [--skip] [--merge] [--no-initial-data] [--fake] [--db-dry-run] [--database=dbalias]" + + def handle(self, app=None, target=None, skip=False, merge=False, backwards=False, fake=False, db_dry_run=False, show_list=False, show_changes=False, database=DEFAULT_DB_ALIAS, delete_ghosts=False, ignore_ghosts=False, **options): + + # NOTE: THIS IS DUPLICATED FROM django.core.management.commands.syncdb + # This code imports any module named 'management' in INSTALLED_APPS. + # The 'management' module is the preferred way of listening to post_syncdb + # signals, and since we're sending those out with create_table migrations, + # we need apps to behave correctly. + for app_name in settings.INSTALLED_APPS: + try: + import_module('.management', app_name) + except ImportError as exc: + msg = exc.args[0] + if not msg.startswith('No module named') or 'management' not in msg: + raise + # END DJANGO DUPE CODE + + # if all_apps flag is set, shift app over to target + if options.get('all_apps', False): + target = app + app = None + + # Migrate each app + if app: + try: + apps = [Migrations(app)] + except NoMigrations: + print("The app '%s' does not appear to use migrations." % app) + print("./manage.py migrate " + self.args) + return + else: + apps = list(migration.all_migrations()) + + # Do we need to show the list of migrations? + if show_list and apps: + list_migrations(apps, database, **options) + + if show_changes and apps: + show_migration_changes(apps) + + if not (show_list or show_changes): + + for app in apps: + result = migration.migrate_app( + app, + target_name = target, + fake = fake, + db_dry_run = db_dry_run, + verbosity = int(options.get('verbosity', 0)), + interactive = options.get('interactive', True), + load_initial_data = not options.get('no_initial_data', False), + merge = merge, + skip = skip, + database = database, + delete_ghosts = delete_ghosts, + ignore_ghosts = ignore_ghosts, + ) + if result is False: + sys.exit(1) # Migration failed, so the command fails. + + +def list_migrations(apps, database = DEFAULT_DB_ALIAS, **options): + """ + Prints a list of all available migrations, and which ones are currently applied. + Accepts a list of Migrations instances. + """ + from south.models import MigrationHistory + applied_migrations = MigrationHistory.objects.filter(app_name__in=[app.app_label() for app in apps]) + if database != DEFAULT_DB_ALIAS: + applied_migrations = applied_migrations.using(database) + applied_migrations_lookup = dict(('%s.%s' % (mi.app_name, mi.migration), mi) for mi in applied_migrations) + + print() + for app in apps: + print(" " + app.app_label()) + # Get the migrations object + for migration in app: + full_name = migration.app_label() + "." + migration.name() + if full_name in applied_migrations_lookup: + applied_migration = applied_migrations_lookup[full_name] + print(format_migration_list_item(migration.name(), applied=applied_migration.applied, **options)) + else: + print(format_migration_list_item(migration.name(), applied=False, **options)) + print() + +def show_migration_changes(apps): + """ + Prints a list of all available migrations, and which ones are currently applied. + Accepts a list of Migrations instances. + + Much simpler, less clear, and much less robust version: + grep "ing " migrations/*.py + """ + for app in apps: + print(app.app_label()) + # Get the migrations objects + migrations = [migration for migration in app] + # we use reduce to compare models in pairs, not to generate a value + reduce(diff_migrations, migrations) + +def format_migration_list_item(name, applied=True, **options): + if applied: + if int(options.get('verbosity')) >= 2: + return ' (*) %-80s (applied %s)' % (name, applied) + else: + return ' (*) %s' % name + else: + return ' ( ) %s' % name + +def diff_migrations(migration1, migration2): + + def model_name(models, model): + return models[model].get('Meta', {}).get('object_name', model) + + def field_name(models, model, field): + return '%s.%s' % (model_name(models, model), field) + + print(" " + migration2.name()) + + models1 = migration1.migration_class().models + models2 = migration2.migration_class().models + + # find new models + for model in models2.keys(): + if not model in models1.keys(): + print(' added model %s' % model_name(models2, model)) + + # find removed models + for model in models1.keys(): + if not model in models2.keys(): + print(' removed model %s' % model_name(models1, model)) + + # compare models + for model in models1: + if model in models2: + + # find added fields + for field in models2[model]: + if not field in models1[model]: + print(' added field %s' % field_name(models2, model, field)) + + # find removed fields + for field in models1[model]: + if not field in models2[model]: + print(' removed field %s' % field_name(models1, model, field)) + + # compare fields + for field in models1[model]: + if field in models2[model]: + + name = field_name(models1, model, field) + + # compare field attributes + field_value1 = models1[model][field] + field_value2 = models2[model][field] + + # if a field has become a class, or vice versa + if type(field_value1) != type(field_value2): + print(' type of %s changed from %s to %s' % ( + name, field_value1, field_value2)) + + # if class + elif isinstance(field_value1, dict): + # print ' %s is a class' % name + pass + + # else regular field + else: + + type1, attr_list1, field_attrs1 = models1[model][field] + type2, attr_list2, field_attrs2 = models2[model][field] + + if type1 != type2: + print(' %s type changed from %s to %s' % ( + name, type1, type2)) + + if attr_list1 != []: + print(' %s list %s is not []' % ( + name, attr_list1)) + if attr_list2 != []: + print(' %s list %s is not []' % ( + name, attr_list2)) + if attr_list1 != attr_list2: + print(' %s list changed from %s to %s' % ( + name, attr_list1, attr_list2)) + + # find added field attributes + for attr in field_attrs2: + if not attr in field_attrs1: + print(' added %s attribute %s=%s' % ( + name, attr, field_attrs2[attr])) + + # find removed field attributes + for attr in field_attrs1: + if not attr in field_attrs2: + print(' removed attribute %s(%s=%s)' % ( + name, attr, field_attrs1[attr])) + + # compare field attributes + for attr in field_attrs1: + if attr in field_attrs2: + + value1 = field_attrs1[attr] + value2 = field_attrs2[attr] + if value1 != value2: + print(' %s attribute %s changed from %s to %s' % ( + name, attr, value1, value2)) + + return migration2 diff --git a/awx/lib/site-packages/south/management/commands/migrationcheck.py b/awx/lib/site-packages/south/management/commands/migrationcheck.py new file mode 100644 index 0000000000..f498d0b312 --- /dev/null +++ b/awx/lib/site-packages/south/management/commands/migrationcheck.py @@ -0,0 +1,67 @@ +from django.core.exceptions import ImproperlyConfigured +from django.core.management import call_command, CommandError +from django.core.management.base import BaseCommand +from django.conf import settings +from django.db.models import loading +from django.test import simple + +from south.migration import Migrations +from south.exceptions import NoMigrations +from south.hacks import hacks + +class Command(BaseCommand): + help = "Runs migrations for each app in turn, detecting missing depends_on values." + usage_str = "Usage: ./manage.py migrationcheck" + + def handle(self, check_app_name=None, **options): + runner = simple.DjangoTestSuiteRunner(verbosity=0) + err_msg = "Failed to migrate %s; see output for hints at missing dependencies:\n" + hacks.patch_flush_during_test_db_creation() + failures = 0 + if check_app_name is None: + app_names = settings.INSTALLED_APPS + else: + app_names = [check_app_name] + for app_name in app_names: + app_label = app_name.split(".")[-1] + if app_name == 'south': + continue + + try: + Migrations(app_name) + except (NoMigrations, ImproperlyConfigured): + continue + app = loading.get_app(app_label) + + verbosity = int(options.get('verbosity', 1)) + if verbosity >= 1: + self.stderr.write("processing %s\n" % app_name) + + old_config = runner.setup_databases() + try: + call_command('migrate', app_label, noinput=True, verbosity=verbosity) + for model in loading.get_models(app): + dummy = model._default_manager.exists() + except (KeyboardInterrupt, SystemExit): + raise + except Exception as e: + failures += 1 + if verbosity >= 1: + self.stderr.write(err_msg % app_name) + self.stderr.write("%s\n" % e) + finally: + runner.teardown_databases(old_config) + if failures > 0: + raise CommandError("Missing depends_on found in %s app(s)." % failures) + self.stderr.write("No missing depends_on found.\n") +# +#for each app: +# start with blank db. +# syncdb only south (and contrib?) +# +# migrate a single app all the way up. any errors is missing depends_on. +# for all models of that app, try the default manager: +# from django.db.models import loading +# for m in loading.get_models(loading.get_app('a')): +# m._default_manager.exists() +# Any error is also a missing depends on. diff --git a/awx/lib/site-packages/south/management/commands/schemamigration.py b/awx/lib/site-packages/south/management/commands/schemamigration.py new file mode 100644 index 0000000000..e29fc620b6 --- /dev/null +++ b/awx/lib/site-packages/south/management/commands/schemamigration.py @@ -0,0 +1,226 @@ +""" +Startmigration command, version 2. +""" + +from __future__ import print_function + +import sys +import os +import re +import string +import random +import inspect +from optparse import make_option + +try: + set +except NameError: + from sets import Set as set + +from django.core.management.base import BaseCommand +from django.core.management.color import no_style +from django.core.exceptions import ImproperlyConfigured +from django.db import models +from django.conf import settings + +from south.migration import Migrations, migrate_app +from south.models import MigrationHistory +from south.exceptions import NoMigrations +from south.creator import changes, actions, freezer +from south.management.commands.datamigration import Command as DataCommand + +class Command(DataCommand): + option_list = DataCommand.option_list + ( + make_option('--add-model', action='append', dest='added_model_list', type='string', + help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --add-model parameters.'), + make_option('--add-field', action='append', dest='added_field_list', type='string', + help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'), + make_option('--add-index', action='append', dest='added_index_list', type='string', + help='Generate an Add Index migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'), + make_option('--initial', action='store_true', dest='initial', default=False, + help='Generate the initial schema for the app.'), + make_option('--auto', action='store_true', dest='auto', default=False, + help='Attempt to automatically detect differences from the last migration.'), + make_option('--empty', action='store_true', dest='empty', default=False, + help='Make a blank migration.'), + make_option('--update', action='store_true', dest='update', default=False, + help='Update the most recent migration instead of creating a new one. Rollback this migration if it is already applied.'), + ) + help = "Creates a new template schema migration for the given app" + usage_str = "Usage: ./manage.py schemamigration appname migrationname [--empty] [--initial] [--auto] [--add-model ModelName] [--add-field ModelName.field_name] [--stdout]" + + def handle(self, app=None, name="", added_model_list=None, added_field_list=None, freeze_list=None, initial=False, auto=False, stdout=False, added_index_list=None, verbosity=1, empty=False, update=False, **options): + + # Any supposed lists that are None become empty lists + added_model_list = added_model_list or [] + added_field_list = added_field_list or [] + added_index_list = added_index_list or [] + freeze_list = freeze_list or [] + + # --stdout means name = - + if stdout: + name = "-" + + # Only allow valid names + if re.search('[^_\w]', name) and name != "-": + self.error("Migration names should contain only alphanumeric characters and underscores.") + + # Make sure options are compatable + if initial and (added_model_list or added_field_list or auto): + self.error("You cannot use --initial and other options together\n" + self.usage_str) + + if auto and (added_model_list or added_field_list or initial): + self.error("You cannot use --auto and other options together\n" + self.usage_str) + + if not app: + self.error("You must provide an app to create a migration for.\n" + self.usage_str) + + # See if the app exists + app = app.split(".")[-1] + try: + app_module = models.get_app(app) + except ImproperlyConfigured: + print("There is no enabled application matching '%s'." % app) + return + + # Get the Migrations for this app (creating the migrations dir if needed) + migrations = Migrations(app, force_creation=True, verbose_creation=int(verbosity) > 0) + + # What actions do we need to do? + if auto: + # Get the old migration + try: + last_migration = migrations[-2 if update else -1] + except IndexError: + self.error("You cannot use --auto on an app with no migrations. Try --initial.") + # Make sure it has stored models + if migrations.app_label() not in getattr(last_migration.migration_class(), "complete_apps", []): + self.error("You cannot use automatic detection, since the previous migration does not have this whole app frozen.\nEither make migrations using '--freeze %s' or set 'SOUTH_AUTO_FREEZE_APP = True' in your settings.py." % migrations.app_label()) + # Alright, construct two model dicts to run the differ on. + old_defs = dict( + (k, v) for k, v in last_migration.migration_class().models.items() + if k.split(".")[0] == migrations.app_label() + ) + new_defs = dict( + (k, v) for k, v in freezer.freeze_apps([migrations.app_label()]).items() + if k.split(".")[0] == migrations.app_label() + ) + change_source = changes.AutoChanges( + migrations = migrations, + old_defs = old_defs, + old_orm = last_migration.orm(), + new_defs = new_defs, + ) + + elif initial: + # Do an initial migration + change_source = changes.InitialChanges(migrations) + + else: + # Read the commands manually off of the arguments + if (added_model_list or added_field_list or added_index_list): + change_source = changes.ManualChanges( + migrations, + added_model_list, + added_field_list, + added_index_list, + ) + elif empty: + change_source = None + else: + print("You have not passed any of --initial, --auto, --empty, --add-model, --add-field or --add-index.", file=sys.stderr) + sys.exit(1) + + # Validate this so we can access the last migration without worrying + if update and not migrations: + self.error("You cannot use --update on an app with no migrations.") + + # if not name, there's an error + if not name: + if change_source: + name = change_source.suggest_name() + if update: + name = re.sub(r'^\d{4}_', '', migrations[-1].name()) + if not name: + self.error("You must provide a name for this migration\n" + self.usage_str) + + # Get the actions, and then insert them into the actions lists + forwards_actions = [] + backwards_actions = [] + if change_source: + for action_name, params in change_source.get_changes(): + # Run the correct Action class + try: + action_class = getattr(actions, action_name) + except AttributeError: + raise ValueError("Invalid action name from source: %s" % action_name) + else: + action = action_class(**params) + action.add_forwards(forwards_actions) + action.add_backwards(backwards_actions) + print(action.console_line(), file=sys.stderr) + + # Nowt happen? That's not good for --auto. + if auto and not forwards_actions: + self.error("Nothing seems to have changed.") + + # Work out which apps to freeze + apps_to_freeze = self.calc_frozen_apps(migrations, freeze_list) + + # So, what's in this file, then? + file_contents = MIGRATION_TEMPLATE % { + "forwards": "\n".join(forwards_actions or [" pass"]), + "backwards": "\n".join(backwards_actions or [" pass"]), + "frozen_models": freezer.freeze_apps_to_string(apps_to_freeze), + "complete_apps": apps_to_freeze and "complete_apps = [%s]" % (", ".join(map(repr, apps_to_freeze))) or "" + } + + # Deal with update mode as late as possible, avoid a rollback as long + # as something else can go wrong. + if update: + last_migration = migrations[-1] + if MigrationHistory.objects.filter(applied__isnull=False, app_name=app, migration=last_migration.name()): + print("Migration to be updated, %s, is already applied, rolling it back now..." % last_migration.name(), file=sys.stderr) + migrate_app(migrations, 'current-1', verbosity=verbosity) + for ext in ('py', 'pyc'): + old_filename = "%s.%s" % (os.path.join(migrations.migrations_dir(), last_migration.filename), ext) + if os.path.isfile(old_filename): + os.unlink(old_filename) + migrations.remove(last_migration) + + # See what filename is next in line. We assume they use numbers. + new_filename = migrations.next_filename(name) + + # - is a special name which means 'print to stdout' + if name == "-": + print(file_contents) + # Write the migration file if the name isn't - + else: + fp = open(os.path.join(migrations.migrations_dir(), new_filename), "w") + fp.write(file_contents) + fp.close() + verb = 'Updated' if update else 'Created' + if empty: + print("%s %s. You must now edit this migration and add the code for each direction." % (verb, new_filename), file=sys.stderr) + else: + print("%s %s. You can now apply this migration with: ./manage.py migrate %s" % (verb, new_filename, app), file=sys.stderr) + + +MIGRATION_TEMPLATE = """# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): +%(forwards)s + + def backwards(self, orm): +%(backwards)s + + models = %(frozen_models)s + + %(complete_apps)s""" diff --git a/awx/lib/site-packages/south/management/commands/startmigration.py b/awx/lib/site-packages/south/management/commands/startmigration.py new file mode 100644 index 0000000000..e4fcf458c9 --- /dev/null +++ b/awx/lib/site-packages/south/management/commands/startmigration.py @@ -0,0 +1,33 @@ +""" +Now-obsolete startmigration command. +""" + +from __future__ import print_function + +from optparse import make_option + +from django.core.management.base import BaseCommand +from django.core.management.color import no_style + +class Command(BaseCommand): + option_list = BaseCommand.option_list + ( + make_option('--model', action='append', dest='added_model_list', type='string', + help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --add-model parameters.'), + make_option('--add-field', action='append', dest='added_field_list', type='string', + help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'), + make_option('--add-index', action='append', dest='added_index_list', type='string', + help='Generate an Add Index migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'), + make_option('--initial', action='store_true', dest='initial', default=False, + help='Generate the initial schema for the app.'), + make_option('--auto', action='store_true', dest='auto', default=False, + help='Attempt to automatically detect differences from the last migration.'), + make_option('--freeze', action='append', dest='freeze_list', type='string', + help='Freeze the specified model(s). Pass in either an app name (to freeze the whole app) or a single model, as appname.modelname.'), + make_option('--stdout', action='store_true', dest='stdout', default=False, + help='Print the migration to stdout instead of writing it to a file.'), + ) + help = "Deprecated command" + + def handle(self, app=None, name="", added_model_list=None, added_field_list=None, initial=False, freeze_list=None, auto=False, stdout=False, added_index_list=None, **options): + + print("The 'startmigration' command is now deprecated; please use the new 'schemamigration' and 'datamigration' commands.") diff --git a/awx/lib/site-packages/south/management/commands/syncdb.py b/awx/lib/site-packages/south/management/commands/syncdb.py new file mode 100644 index 0000000000..702085b194 --- /dev/null +++ b/awx/lib/site-packages/south/management/commands/syncdb.py @@ -0,0 +1,113 @@ +""" +Overridden syncdb command +""" + +from __future__ import print_function + +import sys +from optparse import make_option + +from django.core.management.base import NoArgsCommand, BaseCommand +from django.core.management.color import no_style +from django.utils.datastructures import SortedDict +from django.core.management.commands import syncdb +from django.conf import settings +from django.db import models +from django.db.models.loading import cache +from django.core import management + +from south.db import dbs +from south import migration +from south.exceptions import NoMigrations + +def get_app_label(app): + return '.'.join( app.__name__.split('.')[0:-1] ) + +class Command(NoArgsCommand): + option_list = syncdb.Command.option_list + ( + make_option('--migrate', action='store_true', dest='migrate', default=False, + help='Tells South to also perform migrations after the sync. Default for during testing, and other internal calls.'), + make_option('--all', action='store_true', dest='migrate_all', default=False, + help='Makes syncdb work on all apps, even migrated ones. Be careful!'), + ) + if '--verbosity' not in [opt.get_opt_string() for opt in syncdb.Command.option_list]: + option_list += ( + make_option('--verbosity', action='store', dest='verbosity', default='1', + type='choice', choices=['0', '1', '2'], + help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'), + ) + help = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created, except those which use migrations." + + def handle_noargs(self, migrate_all=False, **options): + + # Import the 'management' module within each installed app, to register + # dispatcher events. + # This is copied from Django, to fix bug #511. + try: + from django.utils.importlib import import_module + except ImportError: + pass # TODO: Remove, only for Django1.0 + else: + for app_name in settings.INSTALLED_APPS: + try: + import_module('.management', app_name) + except ImportError as exc: + msg = exc.args[0] + if not msg.startswith('No module named') or 'management' not in msg: + raise + + # Work out what uses migrations and so doesn't need syncing + apps_needing_sync = [] + apps_migrated = [] + for app in models.get_apps(): + app_label = get_app_label(app) + if migrate_all: + apps_needing_sync.append(app_label) + else: + try: + migrations = migration.Migrations(app_label) + except NoMigrations: + # It needs syncing + apps_needing_sync.append(app_label) + else: + # This is a migrated app, leave it + apps_migrated.append(app_label) + verbosity = int(options.get('verbosity', 0)) + + # Run syncdb on only the ones needed + if verbosity: + print("Syncing...") + + old_installed, settings.INSTALLED_APPS = settings.INSTALLED_APPS, apps_needing_sync + old_app_store, cache.app_store = cache.app_store, SortedDict([ + (k, v) for (k, v) in cache.app_store.items() + if get_app_label(k) in apps_needing_sync + ]) + + # This will allow the setting of the MySQL storage engine, for example. + for db in dbs.values(): + db.connection_init() + + # OK, run the actual syncdb + syncdb.Command().execute(**options) + + settings.INSTALLED_APPS = old_installed + cache.app_store = old_app_store + + # Migrate if needed + if options.get('migrate', True): + if verbosity: + print("Migrating...") + management.call_command('migrate', **options) + + # Be obvious about what we did + if verbosity: + print("\nSynced:\n > %s" % "\n > ".join(apps_needing_sync)) + + if options.get('migrate', True): + if verbosity: + print("\nMigrated:\n - %s" % "\n - ".join(apps_migrated)) + else: + if verbosity: + print("\nNot synced (use migrations):\n - %s" % "\n - ".join(apps_migrated)) + print("(use ./manage.py migrate to migrate these)") diff --git a/awx/lib/site-packages/south/management/commands/test.py b/awx/lib/site-packages/south/management/commands/test.py new file mode 100644 index 0000000000..990178637a --- /dev/null +++ b/awx/lib/site-packages/south/management/commands/test.py @@ -0,0 +1,8 @@ +from django.core.management.commands import test + +from south.management.commands import patch_for_test_db_setup + +class Command(test.Command): + def handle(self, *args, **kwargs): + patch_for_test_db_setup() + super(Command, self).handle(*args, **kwargs) diff --git a/awx/lib/site-packages/south/management/commands/testserver.py b/awx/lib/site-packages/south/management/commands/testserver.py new file mode 100644 index 0000000000..3c3c4b5ecc --- /dev/null +++ b/awx/lib/site-packages/south/management/commands/testserver.py @@ -0,0 +1,8 @@ +from django.core.management.commands import testserver + +from south.management.commands import patch_for_test_db_setup + +class Command(testserver.Command): + def handle(self, *args, **kwargs): + patch_for_test_db_setup() + super(Command, self).handle(*args, **kwargs) diff --git a/awx/lib/site-packages/south/migration/__init__.py b/awx/lib/site-packages/south/migration/__init__.py new file mode 100644 index 0000000000..221ed071de --- /dev/null +++ b/awx/lib/site-packages/south/migration/__init__.py @@ -0,0 +1,235 @@ +""" +Main migration logic. +""" + +from __future__ import print_function + +import sys + +from django.core.exceptions import ImproperlyConfigured + +import south.db +from south import exceptions +from south.models import MigrationHistory +from south.db import db, DEFAULT_DB_ALIAS +from south.migration.migrators import (Backwards, Forwards, + DryRunMigrator, FakeMigrator, + LoadInitialDataMigrator) +from south.migration.base import Migration, Migrations +from south.migration.utils import SortedSet +from south.migration.base import all_migrations +from south.signals import pre_migrate, post_migrate + + +def to_apply(forwards, done): + return [m for m in forwards if m not in done] + +def to_unapply(backwards, done): + return [m for m in backwards if m in done] + +def problems(pending, done): + last = None + if not pending: + raise StopIteration() + for migration in pending: + if migration in done: + last = migration + continue + if last and migration not in done: + yield last, migration + +def forwards_problems(pending, done, verbosity): + """ + Takes the list of linearised pending migrations, and the set of done ones, + and returns the list of problems, if any. + """ + return inner_problem_check(problems(reversed(pending), done), done, verbosity) + +def backwards_problems(pending, done, verbosity): + return inner_problem_check(problems(pending, done), done, verbosity) + +def inner_problem_check(problems, done, verbosity): + "Takes a set of possible problems and gets the actual issues out of it." + result = [] + for last, migration in problems: + checked = set([]) + # 'Last' is the last applied migration. Step back from it until we + # either find nothing wrong, or we find something. + to_check = list(last.dependencies) + while to_check: + checking = to_check.pop() + if checking in checked: + continue + checked.add(checking) + + if checking not in done: + # That's bad. Error. + if verbosity: + print((" ! Migration %s should not have been applied " + "before %s but was." % (last, checking))) + result.append((last, checking)) + else: + to_check.extend(checking.dependencies) + return result + +def check_migration_histories(histories, delete_ghosts=False, ignore_ghosts=False): + "Checks that there's no 'ghost' migrations in the database." + exists = SortedSet() + ghosts = [] + for h in histories: + try: + m = h.get_migration() + m.migration() + except exceptions.UnknownMigration: + ghosts.append(h) + except ImproperlyConfigured: + pass # Ignore missing applications + else: + exists.add(m) + if ghosts: + # They may want us to delete ghosts. + if delete_ghosts: + for h in ghosts: + h.delete() + elif not ignore_ghosts: + raise exceptions.GhostMigrations(ghosts) + return exists + +def get_dependencies(target, migrations): + forwards = list + backwards = list + if target is None: + backwards = migrations[0].backwards_plan + else: + forwards = target.forwards_plan + # When migrating backwards we want to remove up to and + # including the next migration up in this app (not the next + # one, that includes other apps) + migration_before_here = target.next() + if migration_before_here: + backwards = migration_before_here.backwards_plan + return forwards, backwards + +def get_direction(target, applied, migrations, verbosity, interactive): + # Get the forwards and reverse dependencies for this target + forwards, backwards = get_dependencies(target, migrations) + # Is the whole forward branch applied? + problems = None + forwards = forwards() + workplan = to_apply(forwards, applied) + if not workplan: + # If they're all applied, we only know it's not backwards + direction = None + else: + # If the remaining migrations are strictly a right segment of + # the forwards trace, we just need to go forwards to our + # target (and check for badness) + problems = forwards_problems(forwards, applied, verbosity) + direction = Forwards(verbosity=verbosity, interactive=interactive) + if not problems: + # What about the whole backward trace then? + backwards = backwards() + missing_backwards = to_apply(backwards, applied) + if missing_backwards != backwards: + # If what's missing is a strict left segment of backwards (i.e. + # all the higher migrations) then we need to go backwards + workplan = to_unapply(backwards, applied) + problems = backwards_problems(backwards, applied, verbosity) + direction = Backwards(verbosity=verbosity, interactive=interactive) + return direction, problems, workplan + +def get_migrator(direction, db_dry_run, fake, load_initial_data): + if not direction: + return direction + if db_dry_run: + direction = DryRunMigrator(migrator=direction, ignore_fail=False) + elif fake: + direction = FakeMigrator(migrator=direction) + elif load_initial_data: + direction = LoadInitialDataMigrator(migrator=direction) + return direction + +def get_unapplied_migrations(migrations, applied_migrations): + applied_migration_names = ['%s.%s' % (mi.app_name,mi.migration) for mi in applied_migrations] + + for migration in migrations: + is_applied = '%s.%s' % (migration.app_label(), migration.name()) in applied_migration_names + if not is_applied: + yield migration + +def migrate_app(migrations, target_name=None, merge=False, fake=False, db_dry_run=False, yes=False, verbosity=0, load_initial_data=False, skip=False, database=DEFAULT_DB_ALIAS, delete_ghosts=False, ignore_ghosts=False, interactive=False): + app_label = migrations.app_label() + + verbosity = int(verbosity) + # Fire off the pre-migrate signal + pre_migrate.send(None, app=app_label) + + # If there aren't any, quit quizically + if not migrations: + print("? You have no migrations for the '%s' app. You might want some." % app_label) + return + + # Load the entire dependency graph + Migrations.calculate_dependencies() + + # Check there's no strange ones in the database + applied_all = MigrationHistory.objects.filter(applied__isnull=False).order_by('applied').using(database) + applied = applied_all.filter(app_name=app_label).using(database) + south.db.db = south.db.dbs[database] + Migrations.invalidate_all_modules() + + south.db.db.debug = (verbosity > 1) + + if target_name == 'current-1': + if applied.count() > 1: + previous_migration = applied[applied.count() - 2] + if verbosity: + print('previous_migration: %s (applied: %s)' % (previous_migration.migration, previous_migration.applied)) + target_name = previous_migration.migration + else: + if verbosity: + print('previous_migration: zero') + target_name = 'zero' + elif target_name == 'current+1': + try: + first_unapplied_migration = get_unapplied_migrations(migrations, applied).next() + target_name = first_unapplied_migration.name() + except StopIteration: + target_name = None + + applied_all = check_migration_histories(applied_all, delete_ghosts, ignore_ghosts) + + # Guess the target_name + target = migrations.guess_migration(target_name) + if verbosity: + if target_name not in ('zero', None) and target.name() != target_name: + print(" - Soft matched migration %s to %s." % (target_name, + target.name())) + print("Running migrations for %s:" % app_label) + + # Get the forwards and reverse dependencies for this target + direction, problems, workplan = get_direction(target, applied_all, migrations, + verbosity, interactive) + if problems and not (merge or skip): + raise exceptions.InconsistentMigrationHistory(problems) + + # Perform the migration + migrator = get_migrator(direction, db_dry_run, fake, load_initial_data) + if migrator: + migrator.print_title(target) + success = migrator.migrate_many(target, workplan, database) + # Finally, fire off the post-migrate signal + if success: + post_migrate.send(None, app=app_label) + else: + if verbosity: + # Say there's nothing. + print('- Nothing to migrate.') + # If we have initial data enabled, and we're at the most recent + # migration, do initial data. + # Note: We use a fake Forwards() migrator here. It's never used really. + if load_initial_data: + migrator = LoadInitialDataMigrator(migrator=Forwards(verbosity=verbosity)) + migrator.load_initial_data(target, db=database) + # Send signal. + post_migrate.send(None, app=app_label) diff --git a/awx/lib/site-packages/south/migration/base.py b/awx/lib/site-packages/south/migration/base.py new file mode 100644 index 0000000000..8bd6a5a69f --- /dev/null +++ b/awx/lib/site-packages/south/migration/base.py @@ -0,0 +1,440 @@ +from __future__ import print_function + +from collections import deque +import datetime +from imp import reload +import os +import re +import sys + +from django.core.exceptions import ImproperlyConfigured +from django.db import models +from django.conf import settings +from django.utils import importlib + +from south import exceptions +from south.migration.utils import depends, dfs, flatten, get_app_label +from south.orm import FakeORM +from south.utils import memoize, ask_for_it_by_name, datetime_utils +from south.migration.utils import app_label_to_app_module +from south.utils.py3 import string_types, with_metaclass + +def all_migrations(applications=None): + """ + Returns all Migrations for all `applications` that are migrated. + """ + if applications is None: + applications = models.get_apps() + for model_module in applications: + # The app they've passed is the models module - go up one level + app_path = ".".join(model_module.__name__.split(".")[:-1]) + app = ask_for_it_by_name(app_path) + try: + yield Migrations(app) + except exceptions.NoMigrations: + pass + + +def application_to_app_label(application): + "Works out the app label from either the app label, the app name, or the module" + if isinstance(application, string_types): + app_label = application.split('.')[-1] + else: + app_label = application.__name__.split('.')[-1] + return app_label + + +class MigrationsMetaclass(type): + + """ + Metaclass which ensures there is only one instance of a Migrations for + any given app. + """ + + def __init__(self, name, bases, dict): + super(MigrationsMetaclass, self).__init__(name, bases, dict) + self.instances = {} + + def __call__(self, application, **kwds): + + app_label = application_to_app_label(application) + + # If we don't already have an instance, make one + if app_label not in self.instances: + self.instances[app_label] = super(MigrationsMetaclass, self).__call__(app_label_to_app_module(app_label), **kwds) + + return self.instances[app_label] + + def _clear_cache(self): + "Clears the cache of Migration objects." + self.instances = {} + + +class Migrations(with_metaclass(MigrationsMetaclass, list)): + """ + Holds a list of Migration objects for a particular app. + """ + + if getattr(settings, "SOUTH_USE_PYC", False): + MIGRATION_FILENAME = re.compile(r'(?!__init__)' # Don't match __init__.py + r'[0-9a-zA-Z_]*' # Don't match dotfiles, or names with dots/invalid chars in them + r'(\.pyc?)?$') # Match .py or .pyc files, or module dirs + else: + MIGRATION_FILENAME = re.compile(r'(?!__init__)' # Don't match __init__.py + r'[0-9a-zA-Z_]*' # Don't match dotfiles, or names with dots/invalid chars in them + r'(\.py)?$') # Match only .py files, or module dirs + + def __init__(self, application, force_creation=False, verbose_creation=True): + "Constructor. Takes the module of the app, NOT its models (like get_app returns)" + self._cache = {} + self.set_application(application, force_creation, verbose_creation) + + def create_migrations_directory(self, verbose=True): + "Given an application, ensures that the migrations directory is ready." + migrations_dir = self.migrations_dir() + # Make the directory if it's not already there + if not os.path.isdir(migrations_dir): + if verbose: + print("Creating migrations directory at '%s'..." % migrations_dir) + os.mkdir(migrations_dir) + # Same for __init__.py + init_path = os.path.join(migrations_dir, "__init__.py") + if not os.path.isfile(init_path): + # Touch the init py file + if verbose: + print("Creating __init__.py in '%s'..." % migrations_dir) + open(init_path, "w").close() + + def migrations_dir(self): + """ + Returns the full path of the migrations directory. + If it doesn't exist yet, returns where it would exist, based on the + app's migrations module (defaults to app.migrations) + """ + module_path = self.migrations_module() + try: + module = importlib.import_module(module_path) + except ImportError: + # There's no migrations module made yet; guess! + try: + parent = importlib.import_module(".".join(module_path.split(".")[:-1])) + except ImportError: + # The parent doesn't even exist, that's an issue. + raise exceptions.InvalidMigrationModule( + application = self.application.__name__, + module = module_path, + ) + else: + # Good guess. + return os.path.join(os.path.dirname(parent.__file__), module_path.split(".")[-1]) + else: + # Get directory directly + return os.path.dirname(module.__file__) + + def migrations_module(self): + "Returns the module name of the migrations module for this" + app_label = application_to_app_label(self.application) + if hasattr(settings, "SOUTH_MIGRATION_MODULES"): + if app_label in settings.SOUTH_MIGRATION_MODULES: + # There's an override. + return settings.SOUTH_MIGRATION_MODULES[app_label] + return self._application.__name__ + '.migrations' + + def get_application(self): + return self._application + + def set_application(self, application, force_creation=False, verbose_creation=True): + """ + Called when the application for this Migrations is set. + Imports the migrations module object, and throws a paddy if it can't. + """ + self._application = application + if not hasattr(application, 'migrations'): + try: + module = importlib.import_module(self.migrations_module()) + self._migrations = application.migrations = module + except ImportError: + if force_creation: + self.create_migrations_directory(verbose_creation) + module = importlib.import_module(self.migrations_module()) + self._migrations = application.migrations = module + else: + raise exceptions.NoMigrations(application) + self._load_migrations_module(application.migrations) + + application = property(get_application, set_application) + + def _load_migrations_module(self, module): + self._migrations = module + filenames = [] + dirname = self.migrations_dir() + for f in os.listdir(dirname): + if self.MIGRATION_FILENAME.match(os.path.basename(f)): + full_path = os.path.join(dirname, f) + # If it's a .pyc file, only append if the .py isn't already around + if f.endswith(".pyc") and (os.path.isfile(full_path[:-1])): + continue + # If it's a module directory, only append if it contains __init__.py[c]. + if os.path.isdir(full_path): + if not (os.path.isfile(os.path.join(full_path, "__init__.py")) or \ + (getattr(settings, "SOUTH_USE_PYC", False) and \ + os.path.isfile(os.path.join(full_path, "__init__.pyc")))): + continue + filenames.append(f) + filenames.sort() + self.extend(self.migration(f) for f in filenames) + + def migration(self, filename): + name = Migration.strip_filename(filename) + if name not in self._cache: + self._cache[name] = Migration(self, name) + return self._cache[name] + + def __getitem__(self, value): + if isinstance(value, string_types): + return self.migration(value) + return super(Migrations, self).__getitem__(value) + + def _guess_migration(self, prefix): + prefix = Migration.strip_filename(prefix) + matches = [m for m in self if m.name().startswith(prefix)] + if len(matches) == 1: + return matches[0] + elif len(matches) > 1: + raise exceptions.MultiplePrefixMatches(prefix, matches) + else: + raise exceptions.UnknownMigration(prefix, None) + + def guess_migration(self, target_name): + if target_name == 'zero' or not self: + return + elif target_name is None: + return self[-1] + else: + return self._guess_migration(prefix=target_name) + + def app_label(self): + return self._application.__name__.split('.')[-1] + + def full_name(self): + return self._migrations.__name__ + + @classmethod + def calculate_dependencies(cls, force=False): + "Goes through all the migrations, and works out the dependencies." + if getattr(cls, "_dependencies_done", False) and not force: + return + for migrations in all_migrations(): + for migration in migrations: + migration.calculate_dependencies() + cls._dependencies_done = True + + @staticmethod + def invalidate_all_modules(): + "Goes through all the migrations, and invalidates all cached modules." + for migrations in all_migrations(): + for migration in migrations: + migration.invalidate_module() + + def next_filename(self, name): + "Returns the fully-formatted filename of what a new migration 'name' would be" + highest_number = 0 + for migration in self: + try: + number = int(migration.name().split("_")[0]) + highest_number = max(highest_number, number) + except ValueError: + pass + # Work out the new filename + return "%04i_%s.py" % ( + highest_number + 1, + name, + ) + + +class Migration(object): + + """ + Class which represents a particular migration file on-disk. + """ + + def __init__(self, migrations, filename): + """ + Returns the migration class implied by 'filename'. + """ + self.migrations = migrations + self.filename = filename + self.dependencies = set() + self.dependents = set() + + def __str__(self): + return self.app_label() + ':' + self.name() + + def __repr__(self): + return '<Migration: %s>' % str(self) + + def __eq__(self, other): + return self.app_label() == other.app_label() and self.name() == other.name() + + def __hash__(self): + return hash(str(self)) + + def app_label(self): + return self.migrations.app_label() + + @staticmethod + def strip_filename(filename): + return os.path.splitext(os.path.basename(filename))[0] + + def name(self): + return self.strip_filename(os.path.basename(self.filename)) + + def full_name(self): + return self.migrations.full_name() + '.' + self.name() + + def migration(self): + "Tries to load the actual migration module" + full_name = self.full_name() + try: + migration = sys.modules[full_name] + except KeyError: + try: + migration = __import__(full_name, {}, {}, ['Migration']) + except ImportError as e: + raise exceptions.UnknownMigration(self, sys.exc_info()) + except Exception as e: + raise exceptions.BrokenMigration(self, sys.exc_info()) + # Override some imports + migration._ = lambda x: x # Fake i18n + migration.datetime = datetime_utils + return migration + migration = memoize(migration) + + def migration_class(self): + "Returns the Migration class from the module" + return self.migration().Migration + + def migration_instance(self): + "Instantiates the migration_class" + return self.migration_class()() + migration_instance = memoize(migration_instance) + + def previous(self): + "Returns the migration that comes before this one in the sequence." + index = self.migrations.index(self) - 1 + if index < 0: + return None + return self.migrations[index] + previous = memoize(previous) + + def next(self): + "Returns the migration that comes after this one in the sequence." + index = self.migrations.index(self) + 1 + if index >= len(self.migrations): + return None + return self.migrations[index] + next = memoize(next) + + def _get_dependency_objects(self, attrname): + """ + Given the name of an attribute (depends_on or needed_by), either yields + a list of migration objects representing it, or errors out. + """ + for app, name in getattr(self.migration_class(), attrname, []): + try: + migrations = Migrations(app) + except ImproperlyConfigured: + raise exceptions.DependsOnUnmigratedApplication(self, app) + migration = migrations.migration(name) + try: + migration.migration() + except exceptions.UnknownMigration: + raise exceptions.DependsOnUnknownMigration(self, migration) + if migration.is_before(self) == False: + raise exceptions.DependsOnHigherMigration(self, migration) + yield migration + + def calculate_dependencies(self): + """ + Loads dependency info for this migration, and stores it in itself + and any other relevant migrations. + """ + # Normal deps first + for migration in self._get_dependency_objects("depends_on"): + self.dependencies.add(migration) + migration.dependents.add(self) + # And reverse deps + for migration in self._get_dependency_objects("needed_by"): + self.dependents.add(migration) + migration.dependencies.add(self) + # And implicit ordering deps + previous = self.previous() + if previous: + self.dependencies.add(previous) + previous.dependents.add(self) + + def invalidate_module(self): + """ + Removes the cached version of this migration's module import, so we + have to re-import it. Used when south.db.db changes. + """ + reload(self.migration()) + self.migration._invalidate() + + def forwards(self): + return self.migration_instance().forwards + + def backwards(self): + return self.migration_instance().backwards + + def forwards_plan(self): + """ + Returns a list of Migration objects to be applied, in order. + + This list includes `self`, which will be applied last. + """ + return depends(self, lambda x: x.dependencies) + + def _backwards_plan(self): + return depends(self, lambda x: x.dependents) + + def backwards_plan(self): + """ + Returns a list of Migration objects to be unapplied, in order. + + This list includes `self`, which will be unapplied last. + """ + return list(self._backwards_plan()) + + def is_before(self, other): + if self.migrations == other.migrations: + if self.filename < other.filename: + return True + return False + + def is_after(self, other): + if self.migrations == other.migrations: + if self.filename > other.filename: + return True + return False + + def prev_orm(self): + if getattr(self.migration_class(), 'symmetrical', False): + return self.orm() + previous = self.previous() + if previous is None: + # First migration? The 'previous ORM' is empty. + return FakeORM(None, self.app_label()) + return previous.orm() + prev_orm = memoize(prev_orm) + + def orm(self): + return FakeORM(self.migration_class(), self.app_label()) + orm = memoize(orm) + + def no_dry_run(self): + migration_class = self.migration_class() + try: + return migration_class.no_dry_run + except AttributeError: + return False diff --git a/awx/lib/site-packages/south/migration/migrators.py b/awx/lib/site-packages/south/migration/migrators.py new file mode 100644 index 0000000000..1be895dcf3 --- /dev/null +++ b/awx/lib/site-packages/south/migration/migrators.py @@ -0,0 +1,357 @@ +from __future__ import print_function + +from copy import copy, deepcopy +import datetime +import inspect +import sys +import traceback +try: + from cStringIO import StringIO # python 2 +except ImportError: + from io import StringIO # python 3 + +from django.core.management import call_command +from django.core.management.commands import loaddata +from django.db import models + +import south.db +from south import exceptions +from south.db import DEFAULT_DB_ALIAS +from south.models import MigrationHistory +from south.signals import ran_migration + + +class Migrator(object): + def __init__(self, verbosity=0, interactive=False): + self.verbosity = int(verbosity) + self.interactive = bool(interactive) + + @staticmethod + def title(target): + raise NotImplementedError() + + def print_title(self, target): + if self.verbosity: + print(self.title(target)) + + @staticmethod + def status(target): + raise NotImplementedError() + + def print_status(self, migration): + status = self.status(migration) + if self.verbosity and status: + print(status) + + @staticmethod + def orm(migration): + raise NotImplementedError() + + def backwards(self, migration): + return self._wrap_direction(migration.backwards(), migration.prev_orm()) + + def direction(self, migration): + raise NotImplementedError() + + @staticmethod + def _wrap_direction(direction, orm): + args = inspect.getargspec(direction) + if len(args[0]) == 1: + # Old migration, no ORM should be passed in + return direction + return (lambda: direction(orm)) + + @staticmethod + def record(migration, database): + raise NotImplementedError() + + def run_migration_error(self, migration, extra_info=''): + return ( + ' ! Error found during real run of migration! Aborting.\n' + '\n' + ' ! Since you have a database that does not support running\n' + ' ! schema-altering statements in transactions, we have had \n' + ' ! to leave it in an interim state between migrations.\n' + '%s\n' + ' ! The South developers regret this has happened, and would\n' + ' ! like to gently persuade you to consider a slightly\n' + ' ! easier-to-deal-with DBMS (one that supports DDL transactions)\n' + ' ! NOTE: The error which caused the migration to fail is further up.' + ) % extra_info + + def run_migration(self, migration, database): + migration_function = self.direction(migration) + south.db.db.start_transaction() + try: + migration_function() + south.db.db.execute_deferred_sql() + if not isinstance(getattr(self, '_wrapper', self), DryRunMigrator): + # record us as having done this in the same transaction, + # since we're not in a dry run + self.record(migration, database) + except: + south.db.db.rollback_transaction() + if not south.db.db.has_ddl_transactions: + print(self.run_migration_error(migration)) + print("Error in migration: %s" % migration) + raise + else: + try: + south.db.db.commit_transaction() + except: + print("Error during commit in migration: %s" % migration) + raise + + + def run(self, migration, database): + # Get the correct ORM. + south.db.db.current_orm = self.orm(migration) + # If we're not already in a dry run, and the database doesn't support + # running DDL inside a transaction, *cough*MySQL*cough* then do a dry + # run first. + if not isinstance(getattr(self, '_wrapper', self), DryRunMigrator): + if not south.db.db.has_ddl_transactions: + dry_run = DryRunMigrator(migrator=self, ignore_fail=False) + dry_run.run_migration(migration, database) + return self.run_migration(migration, database) + + + def send_ran_migration(self, migration): + ran_migration.send(None, + app=migration.app_label(), + migration=migration, + method=self.__class__.__name__.lower()) + + def migrate(self, migration, database): + """ + Runs the specified migration forwards/backwards, in order. + """ + app = migration.migrations._migrations + migration_name = migration.name() + self.print_status(migration) + result = self.run(migration, database) + self.send_ran_migration(migration) + return result + + def migrate_many(self, target, migrations, database): + raise NotImplementedError() + + +class MigratorWrapper(object): + def __init__(self, migrator, *args, **kwargs): + self._migrator = copy(migrator) + attributes = dict([(k, getattr(self, k)) + for k in self.__class__.__dict__ + if not k.startswith('__')]) + self._migrator.__dict__.update(attributes) + self._migrator.__dict__['_wrapper'] = self + + def __getattr__(self, name): + return getattr(self._migrator, name) + + +class DryRunMigrator(MigratorWrapper): + def __init__(self, ignore_fail=True, *args, **kwargs): + super(DryRunMigrator, self).__init__(*args, **kwargs) + self._ignore_fail = ignore_fail + + def _run_migration(self, migration): + if migration.no_dry_run(): + if self.verbosity: + print(" - Migration '%s' is marked for no-dry-run." % migration) + return + south.db.db.dry_run = True + # preserve the constraint cache as it can be mutated by the dry run + constraint_cache = deepcopy(south.db.db._constraint_cache) + if self._ignore_fail: + south.db.db.debug, old_debug = False, south.db.db.debug + pending_creates = south.db.db.get_pending_creates() + south.db.db.start_transaction() + migration_function = self.direction(migration) + try: + try: + migration_function() + south.db.db.execute_deferred_sql() + except: + raise exceptions.FailedDryRun(migration, sys.exc_info()) + finally: + south.db.db.rollback_transactions_dry_run() + if self._ignore_fail: + south.db.db.debug = old_debug + south.db.db.clear_run_data(pending_creates) + south.db.db.dry_run = False + # restore the preserved constraint cache from before dry run was + # executed + south.db.db._constraint_cache = constraint_cache + + def run_migration(self, migration, database): + try: + self._run_migration(migration) + except exceptions.FailedDryRun: + if self._ignore_fail: + return False + raise + + def send_ran_migration(self, *args, **kwargs): + pass + + +class FakeMigrator(MigratorWrapper): + def run(self, migration, database): + # Don't actually run, just record as if ran + self.record(migration, database) + if self.verbosity: + print(' (faked)') + + def send_ran_migration(self, *args, **kwargs): + pass + + +class LoadInitialDataMigrator(MigratorWrapper): + + def load_initial_data(self, target, db='default'): + if target is None or target != target.migrations[-1]: + return + # Load initial data, if we ended up at target + if self.verbosity: + print(" - Loading initial data for %s." % target.app_label()) + # Override Django's get_apps call temporarily to only load from the + # current app + old_get_apps = models.get_apps + new_get_apps = lambda: [models.get_app(target.app_label())] + models.get_apps = new_get_apps + loaddata.get_apps = new_get_apps + try: + call_command('loaddata', 'initial_data', verbosity=self.verbosity, database=db) + finally: + models.get_apps = old_get_apps + loaddata.get_apps = old_get_apps + + def migrate_many(self, target, migrations, database): + migrator = self._migrator + result = migrator.__class__.migrate_many(migrator, target, migrations, database) + if result: + self.load_initial_data(target, db=database) + return True + + +class Forwards(Migrator): + """ + Runs the specified migration forwards, in order. + """ + torun = 'forwards' + + @staticmethod + def title(target): + if target is not None: + return " - Migrating forwards to %s." % target.name() + else: + assert False, "You cannot migrate forwards to zero." + + @staticmethod + def status(migration): + return ' > %s' % migration + + @staticmethod + def orm(migration): + return migration.orm() + + def forwards(self, migration): + return self._wrap_direction(migration.forwards(), migration.orm()) + + direction = forwards + + @staticmethod + def record(migration, database): + # Record us as having done this + record = MigrationHistory.for_migration(migration, database) + try: + from django.utils.timezone import now + record.applied = now() + except ImportError: + record.applied = datetime.datetime.utcnow() + if database != DEFAULT_DB_ALIAS: + record.save(using=database) + else: + # Django 1.1 and below always go down this branch. + record.save() + + def format_backwards(self, migration): + if migration.no_dry_run(): + return " (migration cannot be dry-run; cannot discover commands)" + old_debug, old_dry_run = south.db.db.debug, south.db.db.dry_run + south.db.db.debug = south.db.db.dry_run = True + stdout = sys.stdout + sys.stdout = StringIO() + try: + try: + self.backwards(migration)() + return sys.stdout.getvalue() + except: + raise + finally: + south.db.db.debug, south.db.db.dry_run = old_debug, old_dry_run + sys.stdout = stdout + + def run_migration_error(self, migration, extra_info=''): + extra_info = ('\n' + '! You *might* be able to recover with:' + '%s' + '%s' % + (self.format_backwards(migration), extra_info)) + return super(Forwards, self).run_migration_error(migration, extra_info) + + def migrate_many(self, target, migrations, database): + try: + for migration in migrations: + result = self.migrate(migration, database) + if result is False: # The migrations errored, but nicely. + return False + finally: + # Call any pending post_syncdb signals + south.db.db.send_pending_create_signals(verbosity=self.verbosity, + interactive=self.interactive) + return True + + +class Backwards(Migrator): + """ + Runs the specified migration backwards, in order. + """ + torun = 'backwards' + + @staticmethod + def title(target): + if target is None: + return " - Migrating backwards to zero state." + else: + return " - Migrating backwards to just after %s." % target.name() + + @staticmethod + def status(migration): + return ' < %s' % migration + + @staticmethod + def orm(migration): + return migration.prev_orm() + + direction = Migrator.backwards + + @staticmethod + def record(migration, database): + # Record us as having not done this + record = MigrationHistory.for_migration(migration, database) + if record.id is not None: + if database != DEFAULT_DB_ALIAS: + record.delete(using=database) + else: + # Django 1.1 always goes down here + record.delete() + + def migrate_many(self, target, migrations, database): + for migration in migrations: + self.migrate(migration, database) + return True + + + diff --git a/awx/lib/site-packages/south/migration/utils.py b/awx/lib/site-packages/south/migration/utils.py new file mode 100644 index 0000000000..68b91645ac --- /dev/null +++ b/awx/lib/site-packages/south/migration/utils.py @@ -0,0 +1,94 @@ +import sys +from collections import deque + +from django.utils.datastructures import SortedDict +from django.db import models + +from south import exceptions + + +class SortedSet(SortedDict): + def __init__(self, data=tuple()): + self.extend(data) + + def __str__(self): + return "SortedSet(%s)" % list(self) + + def add(self, value): + self[value] = True + + def remove(self, value): + del self[value] + + def extend(self, iterable): + [self.add(k) for k in iterable] + + +def get_app_label(app): + """ + Returns the _internal_ app label for the given app module. + i.e. for <module django.contrib.auth.models> will return 'auth' + """ + return app.__name__.split('.')[-2] + + +def app_label_to_app_module(app_label): + """ + Given the app label, returns the module of the app itself (unlike models.get_app, + which returns the models module) + """ + # Get the models module + app = models.get_app(app_label) + module_name = ".".join(app.__name__.split(".")[:-1]) + try: + module = sys.modules[module_name] + except KeyError: + __import__(module_name, {}, {}, ['']) + module = sys.modules[module_name] + return module + + +def flatten(*stack): + stack = deque(stack) + while stack: + try: + x = next(stack[0]) + except TypeError: + stack[0] = iter(stack[0]) + x = next(stack[0]) + except StopIteration: + stack.popleft() + continue + if hasattr(x, '__iter__') and not isinstance(x, str): + stack.appendleft(x) + else: + yield x + +dependency_cache = {} + +def _dfs(start, get_children, path): + if (start, get_children) in dependency_cache: + return dependency_cache[(start, get_children)] + + results = [] + if start in path: + raise exceptions.CircularDependency(path[path.index(start):] + [start]) + path.append(start) + results.append(start) + children = sorted(get_children(start), key=lambda x: str(x)) + + # We need to apply all the migrations this one depends on + for n in children: + results = _dfs(n, get_children, path) + results + + path.pop() + + results = list(SortedSet(results)) + dependency_cache[(start, get_children)] = results + return results + +def dfs(start, get_children): + return _dfs(start, get_children, []) + +def depends(start, get_children): + return dfs(start, get_children) diff --git a/awx/lib/site-packages/south/models.py b/awx/lib/site-packages/south/models.py new file mode 100644 index 0000000000..8239d618d6 --- /dev/null +++ b/awx/lib/site-packages/south/models.py @@ -0,0 +1,37 @@ +from django.db import models +from south.db import DEFAULT_DB_ALIAS + +class MigrationHistory(models.Model): + app_name = models.CharField(max_length=255) + migration = models.CharField(max_length=255) + applied = models.DateTimeField(blank=True) + + @classmethod + def for_migration(cls, migration, database): + try: + # Switch on multi-db-ness + if database != DEFAULT_DB_ALIAS: + # Django 1.2 + objects = cls.objects.using(database) + else: + # Django <= 1.1 + objects = cls.objects + return objects.get( + app_name=migration.app_label(), + migration=migration.name(), + ) + except cls.DoesNotExist: + return cls( + app_name=migration.app_label(), + migration=migration.name(), + ) + + def get_migrations(self): + from south.migration.base import Migrations + return Migrations(self.app_name) + + def get_migration(self): + return self.get_migrations().migration(self.migration) + + def __str__(self): + return "<%s: %s>" % (self.app_name, self.migration) diff --git a/awx/lib/site-packages/south/modelsinspector.py b/awx/lib/site-packages/south/modelsinspector.py new file mode 100644 index 0000000000..115764d788 --- /dev/null +++ b/awx/lib/site-packages/south/modelsinspector.py @@ -0,0 +1,463 @@ +""" +Like the old south.modelsparser, but using introspection where possible +rather than direct inspection of models.py. +""" + +from __future__ import print_function + +import datetime +import re +import decimal + +from south.utils import get_attribute, auto_through +from south.utils.py3 import text_type + +from django.db import models +from django.db.models.base import ModelBase, Model +from django.db.models.fields import NOT_PROVIDED +from django.conf import settings +from django.utils.functional import Promise +from django.contrib.contenttypes import generic +from django.utils.datastructures import SortedDict +from django.utils import datetime_safe + +NOISY = False + +try: + from django.utils import timezone +except ImportError: + timezone = False + + +# Define any converter functions first to prevent NameErrors + +def convert_on_delete_handler(value): + django_db_models_module = 'models' # relative to standard import 'django.db' + if hasattr(models, "PROTECT"): + if value in (models.CASCADE, models.PROTECT, models.DO_NOTHING, models.SET_DEFAULT): + # straightforward functions + return '%s.%s' % (django_db_models_module, value.__name__) + else: + # This is totally dependent on the implementation of django.db.models.deletion.SET + func_name = getattr(value, '__name__', None) + if func_name == 'set_on_delete': + # we must inspect the function closure to see what parameters were passed in + closure_contents = value.__closure__[0].cell_contents + if closure_contents is None: + return "%s.SET_NULL" % (django_db_models_module) + # simple function we can perhaps cope with: + elif hasattr(closure_contents, '__call__'): + raise ValueError("South does not support on_delete with SET(function) as values.") + else: + # Attempt to serialise the value + return "%s.SET(%s)" % (django_db_models_module, value_clean(closure_contents)) + raise ValueError("%s was not recognized as a valid model deletion handler. Possible values: %s." % (value, ', '.join(f.__name__ for f in (models.CASCADE, models.PROTECT, models.SET, models.SET_NULL, models.SET_DEFAULT, models.DO_NOTHING)))) + else: + raise ValueError("on_delete argument encountered in Django version that does not support it") + +# Gives information about how to introspect certain fields. +# This is a list of triples; the first item is a list of fields it applies to, +# (note that isinstance is used, so superclasses are perfectly valid here) +# the second is a list of positional argument descriptors, and the third +# is a list of keyword argument descriptors. +# Descriptors are of the form: +# [attrname, options] +# Where attrname is the attribute on the field to get the value from, and options +# is an optional dict. +# +# The introspector uses the combination of all matching entries, in order. + +introspection_details = [ + ( + (models.Field, ), + [], + { + "null": ["null", {"default": False}], + "blank": ["blank", {"default": False, "ignore_if":"primary_key"}], + "primary_key": ["primary_key", {"default": False}], + "max_length": ["max_length", {"default": None}], + "unique": ["_unique", {"default": False}], + "db_index": ["db_index", {"default": False}], + "default": ["default", {"default": NOT_PROVIDED, "ignore_dynamics": True}], + "db_column": ["db_column", {"default": None}], + "db_tablespace": ["db_tablespace", {"default": settings.DEFAULT_INDEX_TABLESPACE}], + }, + ), + ( + (models.ForeignKey, models.OneToOneField), + [], + dict([ + ("to", ["rel.to", {}]), + ("to_field", ["rel.field_name", {"default_attr": "rel.to._meta.pk.name"}]), + ("related_name", ["rel.related_name", {"default": None}]), + ("db_index", ["db_index", {"default": True}]), + ("on_delete", ["rel.on_delete", {"default": getattr(models, "CASCADE", None), "is_django_function": True, "converter": convert_on_delete_handler, "ignore_missing": True}]) + ]) + ), + ( + (models.ManyToManyField,), + [], + { + "to": ["rel.to", {}], + "symmetrical": ["rel.symmetrical", {"default": True}], + "related_name": ["rel.related_name", {"default": None}], + "db_table": ["db_table", {"default": None}], + # TODO: Kind of ugly to add this one-time-only option + "through": ["rel.through", {"ignore_if_auto_through": True}], + }, + ), + ( + (models.DateField, models.TimeField), + [], + { + "auto_now": ["auto_now", {"default": False}], + "auto_now_add": ["auto_now_add", {"default": False}], + }, + ), + ( + (models.DecimalField, ), + [], + { + "max_digits": ["max_digits", {"default": None}], + "decimal_places": ["decimal_places", {"default": None}], + }, + ), + ( + (models.SlugField, ), + [], + { + "db_index": ["db_index", {"default": True}], + }, + ), + ( + (models.BooleanField, ), + [], + { + "default": ["default", {"default": NOT_PROVIDED, "converter": bool}], + "blank": ["blank", {"default": True, "ignore_if":"primary_key"}], + }, + ), + ( + (models.FilePathField, ), + [], + { + "path": ["path", {"default": ''}], + "match": ["match", {"default": None}], + "recursive": ["recursive", {"default": False}], + }, + ), + ( + (generic.GenericRelation, ), + [], + { + "to": ["rel.to", {}], + "symmetrical": ["rel.symmetrical", {"default": True}], + "object_id_field": ["object_id_field_name", {"default": "object_id"}], + "content_type_field": ["content_type_field_name", {"default": "content_type"}], + "blank": ["blank", {"default": True}], + }, + ), +] + +# Regexes of allowed field full paths +allowed_fields = [ + "^django\.db", + "^django\.contrib\.contenttypes\.generic", + "^django\.contrib\.localflavor", +] + +# Regexes of ignored fields (custom fields which look like fields, but have no column behind them) +ignored_fields = [ + "^django\.contrib\.contenttypes\.generic\.GenericRelation", + "^django\.contrib\.contenttypes\.generic\.GenericForeignKey", +] + +# Similar, but for Meta, so just the inner level (kwds). +meta_details = { + "db_table": ["db_table", {"default_attr_concat": ["%s_%s", "app_label", "module_name"]}], + "db_tablespace": ["db_tablespace", {"default": settings.DEFAULT_TABLESPACE}], + "unique_together": ["unique_together", {"default": []}], + "index_together": ["index_together", {"default": [], "ignore_missing": True}], + "ordering": ["ordering", {"default": []}], + "proxy": ["proxy", {"default": False, "ignore_missing": True}], +} + + +def add_introspection_rules(rules=[], patterns=[]): + "Allows you to add some introspection rules at runtime, e.g. for 3rd party apps." + assert isinstance(rules, (list, tuple)) + assert isinstance(patterns, (list, tuple)) + allowed_fields.extend(patterns) + introspection_details.extend(rules) + + +def add_ignored_fields(patterns): + "Allows you to add some ignore field patterns." + assert isinstance(patterns, (list, tuple)) + ignored_fields.extend(patterns) + + +def can_ignore(field): + """ + Returns True if we know for certain that we can ignore this field, False + otherwise. + """ + full_name = "%s.%s" % (field.__class__.__module__, field.__class__.__name__) + for regex in ignored_fields: + if re.match(regex, full_name): + return True + return False + + +def can_introspect(field): + """ + Returns True if we are allowed to introspect this field, False otherwise. + ('allowed' means 'in core'. Custom fields can declare they are introspectable + by the default South rules by adding the attribute _south_introspects = True.) + """ + # Check for special attribute + if hasattr(field, "_south_introspects") and field._south_introspects: + return True + # Check it's an introspectable field + full_name = "%s.%s" % (field.__class__.__module__, field.__class__.__name__) + for regex in allowed_fields: + if re.match(regex, full_name): + return True + return False + + +def matching_details(field): + """ + Returns the union of all matching entries in introspection_details for the field. + """ + our_args = [] + our_kwargs = {} + for classes, args, kwargs in introspection_details: + if any([isinstance(field, x) for x in classes]): + our_args.extend(args) + our_kwargs.update(kwargs) + return our_args, our_kwargs + + +class IsDefault(Exception): + """ + Exception for when a field contains its default value. + """ + + +def get_value(field, descriptor): + """ + Gets an attribute value from a Field instance and formats it. + """ + attrname, options = descriptor + # If the options say it's not a attribute name but a real value, use that. + if options.get('is_value', False): + value = attrname + else: + try: + value = get_attribute(field, attrname) + except AttributeError: + if options.get("ignore_missing", False): + raise IsDefault + else: + raise + + # Lazy-eval functions get eval'd. + if isinstance(value, Promise): + value = text_type(value) + # If the value is the same as the default, omit it for clarity + if "default" in options and value == options['default']: + raise IsDefault + # If there's an ignore_if, use it + if "ignore_if" in options: + if get_attribute(field, options['ignore_if']): + raise IsDefault + # If there's an ignore_if_auto_through which is True, use it + if options.get("ignore_if_auto_through", False): + if auto_through(field): + raise IsDefault + # Some default values need to be gotten from an attribute too. + if "default_attr" in options: + default_value = get_attribute(field, options['default_attr']) + if value == default_value: + raise IsDefault + # Some are made from a formatting string and several attrs (e.g. db_table) + if "default_attr_concat" in options: + format, attrs = options['default_attr_concat'][0], options['default_attr_concat'][1:] + default_value = format % tuple(map(lambda x: get_attribute(field, x), attrs)) + if value == default_value: + raise IsDefault + # Clean and return the value + return value_clean(value, options) + + +def value_clean(value, options={}): + "Takes a value and cleans it up (so e.g. it has timezone working right)" + # Lazy-eval functions get eval'd. + if isinstance(value, Promise): + value = text_type(value) + # Callables get called. + if not options.get('is_django_function', False) and callable(value) and not isinstance(value, ModelBase): + # Datetime.datetime.now is special, as we can access it from the eval + # context (and because it changes all the time; people will file bugs otherwise). + if value == datetime.datetime.now: + return "datetime.datetime.now" + elif value == datetime.datetime.utcnow: + return "datetime.datetime.utcnow" + elif value == datetime.date.today: + return "datetime.date.today" + # In case we use Django's own now function, revert to datetime's + # original one since we'll deal with timezones on our own. + elif timezone and value == timezone.now: + return "datetime.datetime.now" + # All other callables get called. + value = value() + # Models get their own special repr() + if isinstance(value, ModelBase): + # If it's a proxy model, follow it back to its non-proxy parent + if getattr(value._meta, "proxy", False): + value = value._meta.proxy_for_model + return "orm['%s.%s']" % (value._meta.app_label, value._meta.object_name) + # As do model instances + if isinstance(value, Model): + if options.get("ignore_dynamics", False): + raise IsDefault + return "orm['%s.%s'].objects.get(pk=%r)" % (value.__class__._meta.app_label, value.__class__._meta.object_name, value.pk) + # Make sure Decimal is converted down into a string + if isinstance(value, decimal.Decimal): + value = str(value) + # in case the value is timezone aware + datetime_types = ( + datetime.datetime, + datetime.time, + datetime_safe.datetime, + ) + if (timezone and isinstance(value, datetime_types) and + getattr(settings, 'USE_TZ', False) and + value is not None and timezone.is_aware(value)): + default_timezone = timezone.get_default_timezone() + value = timezone.make_naive(value, default_timezone) + # datetime_safe has an improper repr value + if isinstance(value, datetime_safe.datetime): + value = datetime.datetime(*value.utctimetuple()[:7]) + # converting a date value to a datetime to be able to handle + # timezones later gracefully + elif isinstance(value, (datetime.date, datetime_safe.date)): + value = datetime.datetime(*value.timetuple()[:3]) + # Now, apply the converter func if there is one + if "converter" in options: + value = options['converter'](value) + # Return the final value + if options.get('is_django_function', False): + return value + else: + return repr(value) + + +def introspector(field): + """ + Given a field, introspects its definition triple. + """ + arg_defs, kwarg_defs = matching_details(field) + args = [] + kwargs = {} + # For each argument, use the descriptor to get the real value. + for defn in arg_defs: + try: + args.append(get_value(field, defn)) + except IsDefault: + pass + for kwd, defn in kwarg_defs.items(): + try: + kwargs[kwd] = get_value(field, defn) + except IsDefault: + pass + return args, kwargs + + +def get_model_fields(model, m2m=False): + """ + Given a model class, returns a dict of {field_name: field_triple} defs. + """ + + field_defs = SortedDict() + inherited_fields = {} + + # Go through all bases (that are themselves models, but not Model) + for base in model.__bases__: + if hasattr(base, '_meta') and issubclass(base, models.Model): + if not base._meta.abstract: + # Looks like we need their fields, Ma. + inherited_fields.update(get_model_fields(base)) + + # Now, go through all the fields and try to get their definition + source = model._meta.local_fields[:] + if m2m: + source += model._meta.local_many_to_many + + for field in source: + # Can we ignore it completely? + if can_ignore(field): + continue + # Does it define a south_field_triple method? + if hasattr(field, "south_field_triple"): + if NOISY: + print(" ( Nativing field: %s" % field.name) + field_defs[field.name] = field.south_field_triple() + # Can we introspect it? + elif can_introspect(field): + # Get the full field class path. + field_class = field.__class__.__module__ + "." + field.__class__.__name__ + # Run this field through the introspector + args, kwargs = introspector(field) + # Workaround for Django bug #13987 + if model._meta.pk.column == field.column and 'primary_key' not in kwargs: + kwargs['primary_key'] = True + # That's our definition! + field_defs[field.name] = (field_class, args, kwargs) + # Shucks, no definition! + else: + if NOISY: + print(" ( Nodefing field: %s" % field.name) + field_defs[field.name] = None + + # If they've used the horrific hack that is order_with_respect_to, deal with + # it. + if model._meta.order_with_respect_to: + field_defs['_order'] = ("django.db.models.fields.IntegerField", [], {"default": "0"}) + + return field_defs + + +def get_model_meta(model): + """ + Given a model class, will return the dict representing the Meta class. + """ + + # Get the introspected attributes + meta_def = {} + for kwd, defn in meta_details.items(): + try: + meta_def[kwd] = get_value(model._meta, defn) + except IsDefault: + pass + + # Also, add on any non-abstract model base classes. + # This is called _ormbases as the _bases variable was previously used + # for a list of full class paths to bases, so we can't conflict. + for base in model.__bases__: + if hasattr(base, '_meta') and issubclass(base, models.Model): + if not base._meta.abstract: + # OK, that matches our terms. + if "_ormbases" not in meta_def: + meta_def['_ormbases'] = [] + meta_def['_ormbases'].append("%s.%s" % ( + base._meta.app_label, + base._meta.object_name, + )) + + return meta_def + + +# Now, load the built-in South introspection plugins +import south.introspection_plugins diff --git a/awx/lib/site-packages/south/orm.py b/awx/lib/site-packages/south/orm.py new file mode 100644 index 0000000000..1e5d56ed79 --- /dev/null +++ b/awx/lib/site-packages/south/orm.py @@ -0,0 +1,403 @@ +""" +South's fake ORM; lets you not have to write SQL inside migrations. +Roughly emulates the real Django ORM, to a point. +""" + +from __future__ import print_function + +import inspect + +from django.db import models +from django.db.models.loading import cache +from django.core.exceptions import ImproperlyConfigured + +from south.db import db +from south.utils import ask_for_it_by_name, datetime_utils +from south.hacks import hacks +from south.exceptions import UnfreezeMeLater, ORMBaseNotIncluded, ImpossibleORMUnfreeze +from south.utils.py3 import string_types + + +class ModelsLocals(object): + + """ + Custom dictionary-like class to be locals(); + falls back to lowercase search for items that don't exist + (because we store model names as lowercase). + """ + + def __init__(self, data): + self.data = data + + def __getitem__(self, key): + try: + return self.data[key] + except KeyError: + return self.data[key.lower()] + + +# Stores already-created ORMs. +_orm_cache = {} + +def FakeORM(*args): + """ + Creates a Fake Django ORM. + This is actually a memoised constructor; the real class is _FakeORM. + """ + if not args in _orm_cache: + _orm_cache[args] = _FakeORM(*args) + return _orm_cache[args] + + +class LazyFakeORM(object): + """ + In addition to memoising the ORM call, this function lazily generates them + for a Migration class. Assign the result of this to (for example) + .orm, and as soon as .orm is accessed the ORM will be created. + """ + + def __init__(self, *args): + self._args = args + self.orm = None + + def __get__(self, obj, type=None): + if not self.orm: + self.orm = FakeORM(*self._args) + return self.orm + + +class _FakeORM(object): + + """ + Simulates the Django ORM at some point in time, + using a frozen definition on the Migration class. + """ + + def __init__(self, cls, app): + self.default_app = app + self.cls = cls + # Try loading the models off the migration class; default to no models. + self.models = {} + try: + self.models_source = cls.models + except AttributeError: + return + + # Start a 'new' AppCache + hacks.clear_app_cache() + + # Now, make each model's data into a FakeModel + # We first make entries for each model that are just its name + # This allows us to have circular model dependency loops + model_names = [] + for name, data in self.models_source.items(): + # Make sure there's some kind of Meta + if "Meta" not in data: + data['Meta'] = {} + try: + app_label, model_name = name.split(".", 1) + except ValueError: + app_label = self.default_app + model_name = name + + # If there's an object_name in the Meta, use it and remove it + if "object_name" in data['Meta']: + model_name = data['Meta']['object_name'] + del data['Meta']['object_name'] + + name = "%s.%s" % (app_label, model_name) + self.models[name.lower()] = name + model_names.append((name.lower(), app_label, model_name, data)) + + # Loop until model_names is entry, or hasn't shrunk in size since + # last iteration. + # The make_model method can ask to postpone a model; it's then pushed + # to the back of the queue. Because this is currently only used for + # inheritance, it should thus theoretically always decrease by one. + last_size = None + while model_names: + # First, make sure we've shrunk. + if len(model_names) == last_size: + raise ImpossibleORMUnfreeze() + last_size = len(model_names) + # Make one run through + postponed_model_names = [] + for name, app_label, model_name, data in model_names: + try: + self.models[name] = self.make_model(app_label, model_name, data) + except UnfreezeMeLater: + postponed_model_names.append((name, app_label, model_name, data)) + # Reset + model_names = postponed_model_names + + # And perform the second run to iron out any circular/backwards depends. + self.retry_failed_fields() + + # Force evaluation of relations on the models now + for model in self.models.values(): + model._meta.get_all_field_names() + + # Reset AppCache + hacks.unclear_app_cache() + + + def __iter__(self): + return iter(self.models.values()) + + + def __getattr__(self, key): + fullname = (self.default_app+"."+key).lower() + try: + return self.models[fullname] + except KeyError: + raise AttributeError("The model '%s' from the app '%s' is not available in this migration. (Did you use orm.ModelName, not orm['app.ModelName']?)" % (key, self.default_app)) + + + def __getitem__(self, key): + # Detect if they asked for a field on a model or not. + if ":" in key: + key, fname = key.split(":") + else: + fname = None + # Now, try getting the model + key = key.lower() + try: + model = self.models[key] + except KeyError: + try: + app, model = key.split(".", 1) + except ValueError: + raise KeyError("The model '%s' is not in appname.modelname format." % key) + else: + raise KeyError("The model '%s' from the app '%s' is not available in this migration." % (model, app)) + # If they asked for a field, get it. + if fname: + return model._meta.get_field_by_name(fname)[0] + else: + return model + + + def eval_in_context(self, code, app, extra_imports={}): + "Evaluates the given code in the context of the migration file." + + # Drag in the migration module's locals (hopefully including models.py) + fake_locals = dict(inspect.getmodule(self.cls).__dict__) + + # Remove all models from that (i.e. from modern models.py), to stop pollution + for key, value in fake_locals.items(): + if isinstance(value, type) and issubclass(value, models.Model) and hasattr(value, "_meta"): + del fake_locals[key] + + # We add our models into the locals for the eval + fake_locals.update(dict([ + (name.split(".")[-1], model) + for name, model in self.models.items() + ])) + + # Make sure the ones for this app override. + fake_locals.update(dict([ + (name.split(".")[-1], model) + for name, model in self.models.items() + if name.split(".")[0] == app + ])) + + # Ourselves as orm, to allow non-fail cross-app referencing + fake_locals['orm'] = self + + # And a fake _ function + fake_locals['_'] = lambda x: x + + # Datetime; there should be no datetime direct accesses + fake_locals['datetime'] = datetime_utils + + # Now, go through the requested imports and import them. + for name, value in extra_imports.items(): + # First, try getting it out of locals. + parts = value.split(".") + try: + obj = fake_locals[parts[0]] + for part in parts[1:]: + obj = getattr(obj, part) + except (KeyError, AttributeError): + pass + else: + fake_locals[name] = obj + continue + # OK, try to import it directly + try: + fake_locals[name] = ask_for_it_by_name(value) + except ImportError: + if name == "SouthFieldClass": + raise ValueError("Cannot import the required field '%s'" % value) + else: + print("WARNING: Cannot import '%s'" % value) + + # Use ModelsLocals to make lookups work right for CapitalisedModels + fake_locals = ModelsLocals(fake_locals) + + return eval(code, globals(), fake_locals) + + + def make_meta(self, app, model, data, stub=False): + "Makes a Meta class out of a dict of eval-able arguments." + results = {'app_label': app} + for key, code in data.items(): + # Some things we never want to use. + if key in ["_bases", "_ormbases"]: + continue + # Some things we don't want with stubs. + if stub and key in ["order_with_respect_to"]: + continue + # OK, add it. + try: + results[key] = self.eval_in_context(code, app) + except (NameError, AttributeError) as e: + raise ValueError("Cannot successfully create meta field '%s' for model '%s.%s': %s." % ( + key, app, model, e + )) + return type("Meta", tuple(), results) + + + def make_model(self, app, name, data): + "Makes a Model class out of the given app name, model name and pickled data." + + # Extract any bases out of Meta + if "_ormbases" in data['Meta']: + # Make sure everything we depend on is done already; otherwise, wait. + for key in data['Meta']['_ormbases']: + key = key.lower() + if key not in self.models: + raise ORMBaseNotIncluded("Cannot find ORM base %s" % key) + elif isinstance(self.models[key], string_types): + # Then the other model hasn't been unfrozen yet. + # We postpone ourselves; the situation will eventually resolve. + raise UnfreezeMeLater() + bases = [self.models[key.lower()] for key in data['Meta']['_ormbases']] + # Perhaps the old style? + elif "_bases" in data['Meta']: + bases = map(ask_for_it_by_name, data['Meta']['_bases']) + # Ah, bog standard, then. + else: + bases = [models.Model] + + # Turn the Meta dict into a basic class + meta = self.make_meta(app, name, data['Meta'], data.get("_stub", False)) + + failed_fields = {} + fields = {} + stub = False + + # Now, make some fields! + for fname, params in data.items(): + # If it's the stub marker, ignore it. + if fname == "_stub": + stub = bool(params) + continue + elif fname == "Meta": + continue + elif not params: + raise ValueError("Field '%s' on model '%s.%s' has no definition." % (fname, app, name)) + elif isinstance(params, string_types): + # It's a premade definition string! Let's hope it works... + code = params + extra_imports = {} + else: + # If there's only one parameter (backwards compat), make it 3. + if len(params) == 1: + params = (params[0], [], {}) + # There should be 3 parameters. Code is a tuple of (code, what-to-import) + if len(params) == 3: + code = "SouthFieldClass(%s)" % ", ".join( + params[1] + + ["%s=%s" % (n, v) for n, v in params[2].items()] + ) + extra_imports = {"SouthFieldClass": params[0]} + else: + raise ValueError("Field '%s' on model '%s.%s' has a weird definition length (should be 1 or 3 items)." % (fname, app, name)) + + try: + # Execute it in a probably-correct context. + field = self.eval_in_context(code, app, extra_imports) + except (NameError, AttributeError, AssertionError, KeyError): + # It might rely on other models being around. Add it to the + # model for the second pass. + failed_fields[fname] = (code, extra_imports) + else: + fields[fname] = field + + # Find the app in the Django core, and get its module + more_kwds = {} + try: + app_module = models.get_app(app) + more_kwds['__module__'] = app_module.__name__ + except ImproperlyConfigured: + # The app this belonged to has vanished, but thankfully we can still + # make a mock model, so ignore the error. + more_kwds['__module__'] = '_south_mock' + + more_kwds['Meta'] = meta + + # Make our model + fields.update(more_kwds) + + model = type( + str(name), + tuple(bases), + fields, + ) + + # If this is a stub model, change Objects to a whiny class + if stub: + model.objects = WhinyManager() + # Also, make sure they can't instantiate it + model.__init__ = whiny_method + else: + model.objects = NoDryRunManager(model.objects) + + if failed_fields: + model._failed_fields = failed_fields + + return model + + def retry_failed_fields(self): + "Tries to re-evaluate the _failed_fields for each model." + for modelkey, model in self.models.items(): + app, modelname = modelkey.split(".", 1) + if hasattr(model, "_failed_fields"): + for fname, (code, extra_imports) in model._failed_fields.items(): + try: + field = self.eval_in_context(code, app, extra_imports) + except (NameError, AttributeError, AssertionError, KeyError) as e: + # It's failed again. Complain. + raise ValueError("Cannot successfully create field '%s' for model '%s': %s." % ( + fname, modelname, e + )) + else: + # Startup that field. + model.add_to_class(fname, field) + + +class WhinyManager(object): + "A fake manager that whines whenever you try to touch it. For stub models." + + def __getattr__(self, key): + raise AttributeError("You cannot use items from a stub model.") + + +class NoDryRunManager(object): + """ + A manager that always proxies through to the real manager, + unless a dry run is in progress. + """ + + def __init__(self, real): + self.real = real + + def __getattr__(self, name): + if db.dry_run: + raise AttributeError("You are in a dry run, and cannot access the ORM.\nWrap ORM sections in 'if not db.dry_run:', or if the whole migration is only a data migration, set no_dry_run = True on the Migration class.") + return getattr(self.real, name) + + +def whiny_method(*a, **kw): + raise ValueError("You cannot instantiate a stub model.") diff --git a/awx/lib/site-packages/south/signals.py b/awx/lib/site-packages/south/signals.py new file mode 100644 index 0000000000..f2938d5225 --- /dev/null +++ b/awx/lib/site-packages/south/signals.py @@ -0,0 +1,24 @@ +""" +South-specific signals +""" + +from django.dispatch import Signal +from django.conf import settings + +# Sent at the start of the migration of an app +pre_migrate = Signal(providing_args=["app"]) + +# Sent after each successful migration of an app +post_migrate = Signal(providing_args=["app"]) + +# Sent after each run of a particular migration in a direction +ran_migration = Signal(providing_args=["app","migration","method"]) + +# Compatibility code for django.contrib.auth +# Is causing strange errors, removing for now (we might need to fix up orm first) +#if 'django.contrib.auth' in settings.INSTALLED_APPS: + #def create_permissions_compat(app, **kwargs): + #from django.db.models import get_app + #from django.contrib.auth.management import create_permissions + #create_permissions(get_app(app), (), 0) + #post_migrate.connect(create_permissions_compat) diff --git a/awx/lib/site-packages/south/tests/__init__.py b/awx/lib/site-packages/south/tests/__init__.py new file mode 100644 index 0000000000..3dd9c210a7 --- /dev/null +++ b/awx/lib/site-packages/south/tests/__init__.py @@ -0,0 +1,108 @@ +from __future__ import print_function + +#import unittest +import os +import sys +from functools import wraps +from django.conf import settings +from south.hacks import hacks + +# Make sure skipping tests is available. +try: + # easiest and best is unittest included in Django>=1.3 + from django.utils import unittest +except ImportError: + # earlier django... use unittest from stdlib + import unittest +# however, skipUnless was only added in Python 2.7; +# if not available, we need to do something else +try: + skipUnless = unittest.skipUnless #@UnusedVariable +except AttributeError: + def skipUnless(condition, message): + def decorator(testfunc): + @wraps(testfunc) + def wrapper(self): + if condition: + # Apply method + testfunc(self) + else: + # The skip exceptions are not available either... + print("Skipping", testfunc.__name__,"--", message) + return wrapper + return decorator + +# ditto for skipIf +try: + skipIf = unittest.skipIf #@UnusedVariable +except AttributeError: + def skipIf(condition, message): + def decorator(testfunc): + @wraps(testfunc) + def wrapper(self): + if condition: + print("Skipping", testfunc.__name__,"--", message) + else: + # Apply method + testfunc(self) + return wrapper + return decorator + +# Add the tests directory so fakeapp is on sys.path +test_root = os.path.dirname(__file__) +sys.path.append(test_root) + +# Note: the individual test files are imported below this. + +class Monkeypatcher(unittest.TestCase): + + """ + Base test class for tests that play with the INSTALLED_APPS setting at runtime. + """ + + def create_fake_app(self, name): + + class Fake: + pass + + fake = Fake() + fake.__name__ = name + try: + fake.migrations = __import__(name + ".migrations", {}, {}, ['migrations']) + except ImportError: + pass + return fake + + def setUp(self): + """ + Changes the Django environment so we can run tests against our test apps. + """ + if hasattr(self, 'installed_apps'): + hacks.store_app_cache_state() + hacks.set_installed_apps(self.installed_apps) + # Make sure dependencies are calculated for new apps + Migrations._dependencies_done = False + + def tearDown(self): + """ + Undoes what setUp did. + """ + if hasattr(self, 'installed_apps'): + hacks.reset_installed_apps() + hacks.restore_app_cache_state() + + +# Try importing all tests if asked for (then we can run 'em) +try: + skiptest = settings.SKIP_SOUTH_TESTS +except: + skiptest = True + +if not skiptest: + from south.tests.db import * + from south.tests.db_mysql import * + from south.tests.logic import * + from south.tests.autodetection import * + from south.tests.logger import * + from south.tests.inspector import * + from south.tests.freezer import * diff --git a/awx/lib/site-packages/south/tests/autodetection.py b/awx/lib/site-packages/south/tests/autodetection.py new file mode 100644 index 0000000000..dd66103ea5 --- /dev/null +++ b/awx/lib/site-packages/south/tests/autodetection.py @@ -0,0 +1,353 @@ +from south.tests import unittest + +from south.creator.changes import AutoChanges, InitialChanges +from south.migration.base import Migrations +from south.tests import Monkeypatcher +from south.creator import freezer +from south.orm import FakeORM +from south.v2 import SchemaMigration + +class TestComparison(unittest.TestCase): + + """ + Tests the comparison methods of startmigration. + """ + + def test_no_change(self): + "Test with a completely unchanged definition." + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['southdemo.Lizard']"}), + ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['southdemo.Lizard']"}), + ), + False, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.related.ForeignKey', ['ohhai', 'there'], {'to': "somewhere", "from": "there"}), + ('django.db.models.fields.related.ForeignKey', ['ohhai', 'there'], {"from": "there", 'to': "somewhere"}), + ), + False, + ) + + + def test_pos_change(self): + "Test with a changed positional argument." + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.CharField', ['hi'], {'to': "foo"}), + ('django.db.models.fields.CharField', [], {'to': "foo"}), + ), + True, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.CharField', [], {'to': "foo"}), + ('django.db.models.fields.CharField', ['bye'], {'to': "foo"}), + ), + True, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.CharField', ['pi'], {'to': "foo"}), + ('django.db.models.fields.CharField', ['pi'], {'to': "foo"}), + ), + False, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.CharField', ['pisdadad'], {'to': "foo"}), + ('django.db.models.fields.CharField', ['pi'], {'to': "foo"}), + ), + True, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.CharField', ['hi'], {}), + ('django.db.models.fields.CharField', [], {}), + ), + True, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.CharField', [], {}), + ('django.db.models.fields.CharField', ['bye'], {}), + ), + True, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.CharField', ['pi'], {}), + ('django.db.models.fields.CharField', ['pi'], {}), + ), + False, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.CharField', ['pi'], {}), + ('django.db.models.fields.CharField', ['45fdfdf'], {}), + ), + True, + ) + + + def test_kwd_change(self): + "Test a changed keyword argument" + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.CharField', ['pi'], {'to': "foo"}), + ('django.db.models.fields.CharField', ['pi'], {'to': "blue"}), + ), + True, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.CharField', [], {'to': "foo"}), + ('django.db.models.fields.CharField', [], {'to': "blue"}), + ), + True, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.CharField', ['b'], {'to': "foo"}), + ('django.db.models.fields.CharField', ['b'], {'to': "blue"}), + ), + True, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.CharField', [], {'to': "foo"}), + ('django.db.models.fields.CharField', [], {}), + ), + True, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.CharField', ['a'], {'to': "foo"}), + ('django.db.models.fields.CharField', ['a'], {}), + ), + True, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.CharField', [], {}), + ('django.db.models.fields.CharField', [], {'to': "foo"}), + ), + True, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('django.db.models.fields.CharField', ['a'], {}), + ('django.db.models.fields.CharField', ['a'], {'to': "foo"}), + ), + True, + ) + + + + def test_backcompat_nochange(self): + "Test that the backwards-compatable comparison is working" + + self.assertEqual( + AutoChanges.different_attributes( + ('models.CharField', [], {}), + ('django.db.models.fields.CharField', [], {}), + ), + False, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('models.CharField', ['ack'], {}), + ('django.db.models.fields.CharField', ['ack'], {}), + ), + False, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('models.CharField', [], {'to':'b'}), + ('django.db.models.fields.CharField', [], {'to':'b'}), + ), + False, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('models.CharField', ['hah'], {'to':'you'}), + ('django.db.models.fields.CharField', ['hah'], {'to':'you'}), + ), + False, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('models.CharField', ['hah'], {'to':'you'}), + ('django.db.models.fields.CharField', ['hah'], {'to':'heh'}), + ), + True, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('models.CharField', ['hah'], {}), + ('django.db.models.fields.CharField', [], {'to':"orm['appname.hah']"}), + ), + False, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('models.CharField', ['hah'], {}), + ('django.db.models.fields.CharField', [], {'to':'hah'}), + ), + True, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('models.CharField', ['hah'], {}), + ('django.db.models.fields.CharField', [], {'to':'rrr'}), + ), + True, + ) + + self.assertEqual( + AutoChanges.different_attributes( + ('models.CharField', ['hah'], {}), + ('django.db.models.fields.IntField', [], {'to':'hah'}), + ), + True, + ) + +class TestNonManagedIgnored(Monkeypatcher): + + installed_apps = ["non_managed"] + + full_defs = { + 'non_managed.legacy': { + 'Meta': {'object_name': 'Legacy', 'db_table': "'legacy_table'", 'managed': 'False'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True'}), + 'size': ('django.db.models.fields.IntegerField', [], {}) + } + } + + def test_not_added_init(self): + + migrations = Migrations("non_managed") + changes = InitialChanges(migrations) + change_list = changes.get_changes() + if list(change_list): + self.fail("Initial migration creates table for non-managed model") + + def test_not_added_auto(self): + + empty_defs = { } + class EmptyMigration(SchemaMigration): + "Serves as fake previous migration" + + def forwards(self, orm): + pass + + def backwards(self, orm): + pass + + models = empty_defs + + complete_apps = ['non_managed'] + + migrations = Migrations("non_managed") + empty_orm = FakeORM(EmptyMigration, "non_managed") + changes = AutoChanges( + migrations = migrations, + old_defs = empty_defs, + old_orm = empty_orm, + new_defs = self.full_defs, + ) + change_list = changes.get_changes() + if list(change_list): + self.fail("Auto migration creates table for non-managed model") + + def test_not_deleted_auto(self): + + empty_defs = { } + old_defs = freezer.freeze_apps(["non_managed"]) + class InitialMigration(SchemaMigration): + "Serves as fake previous migration" + + def forwards(self, orm): + pass + + def backwards(self, orm): + pass + + models = self.full_defs + + complete_apps = ['non_managed'] + + migrations = Migrations("non_managed") + initial_orm = FakeORM(InitialMigration, "non_managed") + changes = AutoChanges( + migrations = migrations, + old_defs = self.full_defs, + old_orm = initial_orm, + new_defs = empty_defs, + ) + change_list = changes.get_changes() + if list(change_list): + self.fail("Auto migration deletes table for non-managed model") + + def test_not_modified_auto(self): + + fake_defs = { + 'non_managed.legacy': { + 'Meta': {'object_name': 'Legacy', 'db_table': "'legacy_table'", 'managed': 'False'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True'}), + #'size': ('django.db.models.fields.IntegerField', [], {}) # The "change" is the addition of this field + } + } + class InitialMigration(SchemaMigration): + "Serves as fake previous migration" + + def forwards(self, orm): + pass + + def backwards(self, orm): + pass + + models = fake_defs + + complete_apps = ['non_managed'] + + from non_managed import models as dummy_import_to_force_loading_models # TODO: Does needing this indicate a bug in MokeyPatcher? + + migrations = Migrations("non_managed") + initial_orm = FakeORM(InitialMigration, "non_managed") + changes = AutoChanges( + migrations = migrations, + old_defs = fake_defs, + old_orm = initial_orm, + new_defs = self.full_defs + ) + change_list = changes.get_changes() + if list(change_list): + self.fail("Auto migration changes table for non-managed model") diff --git a/awx/lib/site-packages/south/tests/brokenapp/__init__.py b/awx/lib/site-packages/south/tests/brokenapp/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/brokenapp/migrations/0001_depends_on_unmigrated.py b/awx/lib/site-packages/south/tests/brokenapp/migrations/0001_depends_on_unmigrated.py new file mode 100644 index 0000000000..d53f8364f7 --- /dev/null +++ b/awx/lib/site-packages/south/tests/brokenapp/migrations/0001_depends_on_unmigrated.py @@ -0,0 +1,13 @@ +from south.db import db +from django.db import models + +class Migration: + + depends_on = [('unknown', '0001_initial')] + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/brokenapp/migrations/0002_depends_on_unknown.py b/awx/lib/site-packages/south/tests/brokenapp/migrations/0002_depends_on_unknown.py new file mode 100644 index 0000000000..389af80664 --- /dev/null +++ b/awx/lib/site-packages/south/tests/brokenapp/migrations/0002_depends_on_unknown.py @@ -0,0 +1,13 @@ +from south.db import db +from django.db import models + +class Migration: + + depends_on = [('fakeapp', '9999_unknown')] + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/brokenapp/migrations/0003_depends_on_higher.py b/awx/lib/site-packages/south/tests/brokenapp/migrations/0003_depends_on_higher.py new file mode 100644 index 0000000000..319069ba88 --- /dev/null +++ b/awx/lib/site-packages/south/tests/brokenapp/migrations/0003_depends_on_higher.py @@ -0,0 +1,13 @@ +from south.db import db +from django.db import models + +class Migration: + + depends_on = [('brokenapp', '0004_higher')] + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/brokenapp/migrations/0004_higher.py b/awx/lib/site-packages/south/tests/brokenapp/migrations/0004_higher.py new file mode 100644 index 0000000000..d27ed3a172 --- /dev/null +++ b/awx/lib/site-packages/south/tests/brokenapp/migrations/0004_higher.py @@ -0,0 +1,11 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/brokenapp/migrations/__init__.py b/awx/lib/site-packages/south/tests/brokenapp/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/brokenapp/models.py b/awx/lib/site-packages/south/tests/brokenapp/models.py new file mode 100644 index 0000000000..a7d84dced4 --- /dev/null +++ b/awx/lib/site-packages/south/tests/brokenapp/models.py @@ -0,0 +1,55 @@ +# -*- coding: UTF-8 -*- + +from django.db import models +from django.contrib.auth.models import User as UserAlias + +def default_func(): + return "yays" + +# An empty case. +class Other1(models.Model): pass + +# Nastiness. +class HorribleModel(models.Model): + "A model to test the edge cases of model parsing" + + ZERO, ONE = range(2) + + # First, some nice fields + name = models.CharField(max_length=255) + short_name = models.CharField(max_length=50) + slug = models.SlugField(unique=True) + + # A ForeignKey, to a model above, and then below + o1 = models.ForeignKey(Other1) + o2 = models.ForeignKey('Other2') + + # Now to something outside + user = models.ForeignKey(UserAlias, related_name="horribles") + + # Unicode! + code = models.CharField(max_length=25, default="↑↑↓↓←→←→BA") + + # Odd defaults! + class_attr = models.IntegerField(default=ZERO) + func = models.CharField(max_length=25, default=default_func) + + # Time to get nasty. Define a non-field choices, and use it + choices = [('hello', '1'), ('world', '2')] + choiced = models.CharField(max_length=20, choices=choices) + + class Meta: + db_table = "my_fave" + verbose_name = "Dr. Strangelove," + \ + """or how I learned to stop worrying +and love the bomb""" + + # Now spread over multiple lines + multiline = \ + models.TextField( + ) + +# Special case. +class Other2(models.Model): + # Try loading a field without a newline after it (inspect hates this) + close_but_no_cigar = models.PositiveIntegerField(primary_key=True) \ No newline at end of file diff --git a/awx/lib/site-packages/south/tests/circular_a/__init__.py b/awx/lib/site-packages/south/tests/circular_a/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/circular_a/migrations/0001_first.py b/awx/lib/site-packages/south/tests/circular_a/migrations/0001_first.py new file mode 100644 index 0000000000..b0d90ebcb0 --- /dev/null +++ b/awx/lib/site-packages/south/tests/circular_a/migrations/0001_first.py @@ -0,0 +1,13 @@ +from south.db import db +from django.db import models + +class Migration: + + depends_on = [('circular_b', '0001_first')] + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/circular_a/migrations/__init__.py b/awx/lib/site-packages/south/tests/circular_a/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/circular_a/models.py b/awx/lib/site-packages/south/tests/circular_a/models.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/circular_b/__init__.py b/awx/lib/site-packages/south/tests/circular_b/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/circular_b/migrations/0001_first.py b/awx/lib/site-packages/south/tests/circular_b/migrations/0001_first.py new file mode 100644 index 0000000000..b11b120971 --- /dev/null +++ b/awx/lib/site-packages/south/tests/circular_b/migrations/0001_first.py @@ -0,0 +1,13 @@ +from south.db import db +from django.db import models + +class Migration: + + depends_on = [('circular_a', '0001_first')] + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/circular_b/migrations/__init__.py b/awx/lib/site-packages/south/tests/circular_b/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/circular_b/models.py b/awx/lib/site-packages/south/tests/circular_b/models.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/db.py b/awx/lib/site-packages/south/tests/db.py new file mode 100644 index 0000000000..90f62fc0be --- /dev/null +++ b/awx/lib/site-packages/south/tests/db.py @@ -0,0 +1,952 @@ +import datetime +from warnings import filterwarnings + +from south.db import db, generic +from django.db import connection, models, IntegrityError as DjangoIntegrityError + +from south.tests import unittest, skipIf, skipUnless +from south.utils.py3 import text_type, with_metaclass + +# Create a list of error classes from the various database libraries +errors = [] +try: + from psycopg2 import ProgrammingError + errors.append(ProgrammingError) +except ImportError: + pass +errors = tuple(errors) + +# On SQL Server, the backend's IntegrityError is not (a subclass of) Django's. +try: + from sql_server.pyodbc.base import IntegrityError as SQLServerIntegrityError + IntegrityError = (DjangoIntegrityError, SQLServerIntegrityError) +except ImportError: + IntegrityError = DjangoIntegrityError + +try: + from south.db import mysql +except ImportError: + mysql = None + + +class TestOperations(unittest.TestCase): + + """ + Tests if the various DB abstraction calls work. + Can only test a limited amount due to DB differences. + """ + + def setUp(self): + db.debug = False + try: + import MySQLdb + except ImportError: + pass + else: + filterwarnings('ignore', category=MySQLdb.Warning) + db.clear_deferred_sql() + db.start_transaction() + + def tearDown(self): + db.rollback_transaction() + + def test_create(self): + """ + Test creation of tables. + """ + cursor = connection.cursor() + # It needs to take at least 2 args + self.assertRaises(TypeError, db.create_table) + self.assertRaises(TypeError, db.create_table, "test1") + # Empty tables (i.e. no columns) are not fine, so make at least 1 + db.create_table("test1", [('email_confirmed', models.BooleanField(default=False))]) + # And should exist + cursor.execute("SELECT * FROM test1") + # Make sure we can't do the same query on an empty table + try: + cursor.execute("SELECT * FROM nottheretest1") + except: + pass + else: + self.fail("Non-existent table could be selected!") + + @skipUnless(db.raises_default_errors, 'This database does not raise errors on missing defaults.') + def test_create_default(self): + """ + Test creation of tables, make sure defaults are not left in the database + """ + db.create_table("test_create_default", [('a', models.IntegerField()), + ('b', models.IntegerField(default=17))]) + cursor = connection.cursor() + self.assertRaises(IntegrityError, cursor.execute, "INSERT INTO test_create_default(a) VALUES (17)") + + def test_delete(self): + """ + Test deletion of tables. + """ + cursor = connection.cursor() + db.create_table("test_deltable", [('email_confirmed', models.BooleanField(default=False))]) + db.delete_table("test_deltable") + # Make sure it went + try: + cursor.execute("SELECT * FROM test_deltable") + except: + pass + else: + self.fail("Just-deleted table could be selected!") + + def test_nonexistent_delete(self): + """ + Test deletion of nonexistent tables. + """ + try: + db.delete_table("test_nonexistdeltable") + except: + pass + else: + self.fail("Non-existent table could be deleted!") + + def test_foreign_keys(self): + """ + Tests foreign key creation, especially uppercase (see #61) + """ + Test = db.mock_model(model_name='Test', db_table='test5a', + db_tablespace='', pk_field_name='ID', + pk_field_type=models.AutoField, pk_field_args=[]) + db.create_table("test5a", [('ID', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True))]) + db.create_table("test5b", [ + ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), + ('UNIQUE', models.ForeignKey(Test)), + ]) + db.execute_deferred_sql() + + @skipUnless(db.supports_foreign_keys, 'Foreign keys can only be deleted on ' + 'engines that support them.') + def test_recursive_foreign_key_delete(self): + """ + Test that recursive foreign keys are deleted correctly (see #1065) + """ + Test = db.mock_model(model_name='Test', db_table='test_rec_fk_del', + db_tablespace='', pk_field_name='id', + pk_field_type=models.AutoField, pk_field_args=[]) + db.create_table('test_rec_fk_del', [ + ('id', models.AutoField(primary_key=True, auto_created=True)), + ('fk', models.ForeignKey(Test)), + ]) + db.execute_deferred_sql() + db.delete_foreign_key('test_rec_fk_del', 'fk_id') + + def test_rename(self): + """ + Test column renaming + """ + cursor = connection.cursor() + db.create_table("test_rn", [('spam', models.BooleanField(default=False))]) + # Make sure we can select the column + cursor.execute("SELECT spam FROM test_rn") + # Rename it + db.rename_column("test_rn", "spam", "eggs") + cursor.execute("SELECT eggs FROM test_rn") + db.commit_transaction() + db.start_transaction() + try: + cursor.execute("SELECT spam FROM test_rn") + except: + pass + else: + self.fail("Just-renamed column could be selected!") + db.rollback_transaction() + db.delete_table("test_rn") + db.start_transaction() + + def test_dry_rename(self): + """ + Test column renaming while --dry-run is turned on (should do nothing) + See ticket #65 + """ + cursor = connection.cursor() + db.create_table("test_drn", [('spam', models.BooleanField(default=False))]) + # Make sure we can select the column + cursor.execute("SELECT spam FROM test_drn") + # Rename it + db.dry_run = True + db.rename_column("test_drn", "spam", "eggs") + db.dry_run = False + cursor.execute("SELECT spam FROM test_drn") + db.commit_transaction() + db.start_transaction() + try: + cursor.execute("SELECT eggs FROM test_drn") + except: + pass + else: + self.fail("Dry-renamed new column could be selected!") + db.rollback_transaction() + db.delete_table("test_drn") + db.start_transaction() + + def test_table_rename(self): + """ + Test column renaming + """ + cursor = connection.cursor() + db.create_table("testtr", [('spam', models.BooleanField(default=False))]) + # Make sure we can select the column + cursor.execute("SELECT spam FROM testtr") + # Rename it + db.rename_table("testtr", "testtr2") + cursor.execute("SELECT spam FROM testtr2") + db.commit_transaction() + db.start_transaction() + try: + cursor.execute("SELECT spam FROM testtr") + except: + pass + else: + self.fail("Just-renamed column could be selected!") + db.rollback_transaction() + db.delete_table("testtr2") + db.start_transaction() + + def test_percents_in_defaults(self): + """ + Test that % in a default gets escaped to %%. + """ + try: + db.create_table("testpind", [('cf', models.CharField(max_length=255, default="It should be 2%!"))]) + except IndexError: + self.fail("% was not properly escaped in column SQL.") + db.delete_table("testpind") + + def test_index(self): + """ + Test the index operations + """ + db.create_table("test3", [ + ('SELECT', models.BooleanField(default=False)), + ('eggs', models.IntegerField(unique=True)), + ]) + db.execute_deferred_sql() + # Add an index on that column + db.create_index("test3", ["SELECT"]) + # Add another index on two columns + db.create_index("test3", ["SELECT", "eggs"]) + # Delete them both + db.delete_index("test3", ["SELECT"]) + db.delete_index("test3", ["SELECT", "eggs"]) + # Delete the unique index/constraint + if db.backend_name != "sqlite3": + db.delete_unique("test3", ["eggs"]) + db.delete_table("test3") + + def test_primary_key(self): + """ + Test the primary key operations + """ + + db.create_table("test_pk", [ + ('id', models.IntegerField(primary_key=True)), + ('new_pkey', models.IntegerField()), + ('eggs', models.IntegerField(unique=True)), + ]) + db.execute_deferred_sql() + # Remove the default primary key, and make eggs it + db.delete_primary_key("test_pk") + db.create_primary_key("test_pk", "new_pkey") + # Try inserting a now-valid row pair + db.execute("INSERT INTO test_pk (id, new_pkey, eggs) VALUES (1, 2, 3)") + db.execute("INSERT INTO test_pk (id, new_pkey, eggs) VALUES (1, 3, 4)") + db.delete_table("test_pk") + + def test_primary_key_implicit(self): + """ + Tests that changing primary key implicitly fails. + """ + db.create_table("test_pki", [ + ('id', models.IntegerField(primary_key=True)), + ('new_pkey', models.IntegerField()), + ('eggs', models.IntegerField(unique=True)), + ]) + db.execute_deferred_sql() + # Fiddle with alter_column to attempt to make it remove the primary key + db.alter_column("test_pki", "id", models.IntegerField()) + db.alter_column("test_pki", "new_pkey", models.IntegerField(primary_key=True)) + # Try inserting a should-be-valid row pair + db.execute("INSERT INTO test_pki (id, new_pkey, eggs) VALUES (1, 2, 3)") + db.execute("INSERT INTO test_pki (id, new_pkey, eggs) VALUES (2, 2, 4)") + db.delete_table("test_pki") + + def test_add_columns(self): + """ + Test adding columns + """ + db.create_table("test_addc", [ + ('spam', models.BooleanField(default=False)), + ('eggs', models.IntegerField()), + ]) + # Add a column + db.add_column("test_addc", "add1", models.IntegerField(default=3)) + User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={}) + # insert some data so we can test the default value of the added fkey + db.execute("INSERT INTO test_addc (spam, eggs, add1) VALUES (%s, 1, 2)", [False]) + db.add_column("test_addc", "user", models.ForeignKey(User, null=True)) + db.execute_deferred_sql() + # try selecting from the user_id column to make sure it was actually created + val = db.execute("SELECT user_id FROM test_addc")[0][0] + self.assertEquals(val, None) + db.delete_column("test_addc", "add1") + # make sure adding an indexed field works + db.add_column("test_addc", "add2", models.CharField(max_length=15, db_index=True, default='pi')) + db.execute_deferred_sql() + db.delete_table("test_addc") + + def test_delete_columns(self): + """ + Test deleting columns + """ + db.create_table("test_delc", [ + ('spam', models.BooleanField(default=False)), + ('eggs', models.IntegerField(db_index=True, unique=True)), + ]) + db.delete_column("test_delc", "eggs") + + def test_add_nullbool_column(self): + """ + Test adding NullBoolean columns + """ + db.create_table("test_addnbc", [ + ('spam', models.BooleanField(default=False)), + ('eggs', models.IntegerField()), + ]) + # Add a column + db.add_column("test_addnbc", "add1", models.NullBooleanField()) + # Add a column with a default + db.add_column("test_addnbc", "add2", models.NullBooleanField(default=True)) + # insert some data so we can test the default values of the added column + db.execute("INSERT INTO test_addnbc (spam, eggs) VALUES (%s, 1)", [False]) + # try selecting from the new columns to make sure they were properly created + false, null1, null2 = db.execute("SELECT spam,add1,add2 FROM test_addnbc")[0][0:3] + self.assertIsNone(null1, "Null boolean field with no value inserted returns non-null") + self.assertIsNone(null2, "Null boolean field (added with default) with no value inserted returns non-null") + self.assertEquals(false, False) + db.delete_table("test_addnbc") + + def test_alter_columns(self): + """ + Test altering columns + """ + db.create_table("test_alterc", [ + ('spam', models.BooleanField(default=False)), + ('eggs', models.IntegerField()), + ]) + db.execute_deferred_sql() + # Change eggs to be a FloatField + db.alter_column("test_alterc", "eggs", models.FloatField()) + db.execute_deferred_sql() + db.delete_table("test_alterc") + db.execute_deferred_sql() + + def test_alter_char_default(self): + """ + Test altering column defaults with char fields + """ + db.create_table("test_altercd", [ + ('spam', models.CharField(max_length=30)), + ('eggs', models.IntegerField()), + ]) + # Change spam default + db.alter_column("test_altercd", "spam", models.CharField(max_length=30, default="loof", null=True)) + # Assert the default is not in the database + db.execute("INSERT INTO test_altercd (eggs) values (12)") + null = db.execute("SELECT spam FROM test_altercd")[0][0] + self.assertFalse(null, "Default for char field was installed into database") + + def test_mysql_defaults(self): + """ + Test MySQL default handling for BLOB and TEXT. + """ + db.create_table("test_altermyd", [ + ('spam', models.BooleanField(default=False)), + ('eggs', models.TextField()), + ]) + # Change eggs to be a FloatField + db.alter_column("test_altermyd", "eggs", models.TextField(null=True)) + db.delete_table("test_altermyd") + + def test_alter_column_postgres_multiword(self): + """ + Tests altering columns with multiple words in Postgres types (issue #125) + e.g. 'datetime with time zone', look at django/db/backends/postgresql/creation.py + """ + db.create_table("test_multiword", [ + ('col_datetime', models.DateTimeField(null=True)), + ('col_integer', models.PositiveIntegerField(null=True)), + ('col_smallint', models.PositiveSmallIntegerField(null=True)), + ('col_float', models.FloatField(null=True)), + ]) + + # test if 'double precision' is preserved + db.alter_column('test_multiword', 'col_float', models.FloatField('float', null=True)) + + # test if 'CHECK ("%(column)s" >= 0)' is stripped + db.alter_column('test_multiword', 'col_integer', models.PositiveIntegerField(null=True)) + db.alter_column('test_multiword', 'col_smallint', models.PositiveSmallIntegerField(null=True)) + + # test if 'with timezone' is preserved + if db.backend_name == "postgres": + db.execute("INSERT INTO test_multiword (col_datetime) VALUES ('2009-04-24 14:20:55+02')") + db.alter_column('test_multiword', 'col_datetime', models.DateTimeField(auto_now=True)) + assert db.execute("SELECT col_datetime = '2009-04-24 14:20:55+02' FROM test_multiword")[0][0] + + db.delete_table("test_multiword") + + @skipUnless(db.has_check_constraints, 'Only applies to databases that ' + 'support CHECK constraints.') + def test_alter_constraints(self): + """ + Tests that going from a PostiveIntegerField to an IntegerField drops + the constraint on the database. + """ + # Make the test table + db.create_table("test_alterc", [ + ('num', models.PositiveIntegerField()), + ]) + db.execute_deferred_sql() + # Add in some test values + db.execute("INSERT INTO test_alterc (num) VALUES (1)") + db.execute("INSERT INTO test_alterc (num) VALUES (2)") + # Ensure that adding a negative number is bad + db.commit_transaction() + db.start_transaction() + try: + db.execute("INSERT INTO test_alterc (num) VALUES (-3)") + except: + db.rollback_transaction() + else: + self.fail("Could insert a negative integer into a PositiveIntegerField.") + # Alter it to a normal IntegerField + db.alter_column("test_alterc", "num", models.IntegerField()) + db.execute_deferred_sql() + # It should now work + db.execute("INSERT INTO test_alterc (num) VALUES (-3)") + db.delete_table("test_alterc") + # We need to match up for tearDown + db.start_transaction() + + @skipIf(db.backend_name == "sqlite3", "SQLite backend doesn't support this " + "yet.") + def test_unique(self): + """ + Tests creating/deleting unique constraints. + """ + db.create_table("test_unique2", [ + ('id', models.AutoField(primary_key=True)), + ]) + db.create_table("test_unique", [ + ('spam', models.BooleanField(default=False)), + ('eggs', models.IntegerField()), + ('ham', models.ForeignKey(db.mock_model('Unique2', 'test_unique2'))), + ]) + db.execute_deferred_sql() + # Add a constraint + db.create_unique("test_unique", ["spam"]) + db.execute_deferred_sql() + # Shouldn't do anything during dry-run + db.dry_run = True + db.delete_unique("test_unique", ["spam"]) + db.dry_run = False + db.delete_unique("test_unique", ["spam"]) + db.create_unique("test_unique", ["spam"]) + # Special preparations for Sql Server + if db.backend_name == "pyodbc": + db.execute("SET IDENTITY_INSERT test_unique2 ON;") + db.execute("INSERT INTO test_unique2 (id) VALUES (1)") + db.execute("INSERT INTO test_unique2 (id) VALUES (2)") + db.commit_transaction() + db.start_transaction() + + + # Test it works + TRUE = (True,) + FALSE = (False,) + db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE) + db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 2)", FALSE) + try: + db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 2, 1)", FALSE) + except: + db.rollback_transaction() + else: + self.fail("Could insert non-unique item.") + + # Drop that, add one only on eggs + db.delete_unique("test_unique", ["spam"]) + db.execute("DELETE FROM test_unique") + db.create_unique("test_unique", ["eggs"]) + db.start_transaction() + + # Test similarly + db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE) + db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 2)", FALSE) + try: + db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 1)", TRUE) + except: + db.rollback_transaction() + else: + self.fail("Could insert non-unique item.") + + # Drop those, test combined constraints + db.delete_unique("test_unique", ["eggs"]) + db.execute("DELETE FROM test_unique") + db.create_unique("test_unique", ["spam", "eggs", "ham_id"]) + db.start_transaction() + # Test similarly + db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE) + db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 1)", FALSE) + try: + db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE) + except: + db.rollback_transaction() + else: + self.fail("Could insert non-unique pair.") + db.delete_unique("test_unique", ["spam", "eggs", "ham_id"]) + db.start_transaction() + + def test_alter_unique(self): + """ + Tests that unique constraints are not affected when + altering columns (that's handled by create_/delete_unique) + """ + db.create_table("test_alter_unique", [ + ('spam', models.IntegerField()), + ('eggs', models.IntegerField(unique=True)), + ]) + db.execute_deferred_sql() + + # Make sure the unique constraint is created + db.execute('INSERT INTO test_alter_unique (spam, eggs) VALUES (0, 42)') + db.commit_transaction() + db.start_transaction() + try: + db.execute("INSERT INTO test_alter_unique (spam, eggs) VALUES (1, 42)") + except: + pass + else: + self.fail("Could insert the same integer twice into a unique field.") + db.rollback_transaction() + + # Alter without unique=True (should not affect anything) + db.alter_column("test_alter_unique", "eggs", models.IntegerField()) + + # Insertion should still fail + db.start_transaction() + try: + db.execute("INSERT INTO test_alter_unique (spam, eggs) VALUES (1, 42)") + except: + pass + else: + self.fail("Could insert the same integer twice into a unique field after alter_column with unique=False.") + db.rollback_transaction() + + # Delete the unique index/constraint + if db.backend_name != "sqlite3": + db.delete_unique("test_alter_unique", ["eggs"]) + db.delete_table("test_alter_unique") + db.start_transaction() + + # Test multi-field constraint + db.create_table("test_alter_unique2", [ + ('spam', models.IntegerField()), + ('eggs', models.IntegerField()), + ]) + db.create_unique('test_alter_unique2', ('spam', 'eggs')) + db.execute_deferred_sql() + db.execute('INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 42)') + db.commit_transaction() + # Verify that constraint works + db.start_transaction() + try: + db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (1, 42)") + except: + self.fail("Looks like multi-field unique constraint applied to only one field.") + db.start_transaction() + db.rollback_transaction() + try: + db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 43)") + except: + self.fail("Looks like multi-field unique constraint applied to only one field.") + db.rollback_transaction() + db.start_transaction() + try: + db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 42)") + except: + pass + else: + self.fail("Could insert the same integer twice into a unique field.") + db.rollback_transaction() + # Altering one column should not drop or modify multi-column constraint + db.alter_column("test_alter_unique2", "eggs", models.CharField(max_length=10)) + db.start_transaction() + try: + db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (1, 42)") + except: + self.fail("Altering one column broken multi-column unique constraint.") + db.start_transaction() + db.rollback_transaction() + try: + db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 43)") + except: + self.fail("Altering one column broken multi-column unique constraint.") + db.rollback_transaction() + db.start_transaction() + try: + db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 42)") + except: + pass + else: + self.fail("Could insert the same integer twice into a unique field after alter_column with unique=False.") + db.rollback_transaction() + db.delete_table("test_alter_unique2") + db.start_transaction() + + def test_capitalised_constraints(self): + """ + Under PostgreSQL at least, capitalised constraints must be quoted. + """ + db.create_table("test_capconst", [ + ('SOMECOL', models.PositiveIntegerField(primary_key=True)), + ]) + # Alter it so it's not got the check constraint + db.alter_column("test_capconst", "SOMECOL", models.IntegerField()) + + def test_text_default(self): + """ + MySQL cannot have blank defaults on TEXT columns. + """ + db.create_table("test_textdef", [ + ('textcol', models.TextField(blank=True)), + ]) + + def test_text_to_char(self): + """ + On Oracle, you can't simply ALTER TABLE MODIFY a textfield to a charfield + """ + value = "kawabanga" + db.create_table("test_text_to_char", [ + ('textcol', models.TextField()), + ]) + db.execute_deferred_sql() + db.execute("INSERT INTO test_text_to_char VALUES (%s)", [value]) + db.alter_column("test_text_to_char", "textcol", models.CharField(max_length=100)) + db.execute_deferred_sql() + after = db.execute("select * from test_text_to_char")[0][0] + self.assertEqual(value, after, "Change from text to char altered value [ %r != %r ]" % (value, after)) + + def test_char_to_text(self): + """ + On Oracle, you can't simply ALTER TABLE MODIFY a charfield to a textfield either + """ + value = "agnabawak" + db.create_table("test_char_to_text", [ + ('textcol', models.CharField(max_length=100)), + ]) + db.execute_deferred_sql() + db.execute("INSERT INTO test_char_to_text VALUES (%s)", [value]) + db.alter_column("test_char_to_text", "textcol", models.TextField()) + db.execute_deferred_sql() + after = db.execute("select * from test_char_to_text")[0][0] + after = text_type(after) # Oracle text fields return a sort of lazy string -- force evaluation + self.assertEqual(value, after, "Change from char to text altered value [ %r != %r ]" % (value, after)) + + @skipUnless(db.raises_default_errors, 'This database does not raise errors on missing defaults.') + def test_datetime_default(self): + """ + Test that defaults are correctly not created for datetime columns + """ + end_of_world = datetime.datetime(2012, 12, 21, 0, 0, 1) + + try: + from django.utils import timezone + except ImportError: + pass + else: + from django.conf import settings + if getattr(settings, 'USE_TZ', False): + end_of_world = end_of_world.replace(tzinfo=timezone.utc) + + db.create_table("test_datetime_def", [ + ('col0', models.IntegerField(null=True)), + ('col1', models.DateTimeField(default=end_of_world)), + ('col2', models.DateTimeField(null=True)), + ]) + db.execute_deferred_sql() + # insert a row + db.execute("INSERT INTO test_datetime_def (col0, col1, col2) values (null,%s,null)", [end_of_world]) + db.alter_column("test_datetime_def", "col2", models.DateTimeField(default=end_of_world)) + db.add_column("test_datetime_def", "col3", models.DateTimeField(default=end_of_world)) + db.execute_deferred_sql() + db.commit_transaction() + # In the single existing row, we now expect col1=col2=col3=end_of_world... + db.start_transaction() + ends = db.execute("select col1,col2,col3 from test_datetime_def")[0] + self.failUnlessEqual(len(ends), 3) + for e in ends: + self.failUnlessEqual(e, end_of_world) + db.commit_transaction() + # ...but there should not be a default in the database for col1 or col3 + for cols in ["col1,col2", "col2,col3"]: + db.start_transaction() + statement = "insert into test_datetime_def (col0,%s) values (null,%%s,%%s)" % cols + self.assertRaises( + IntegrityError, + db.execute, statement, [end_of_world, end_of_world] + ) + db.rollback_transaction() + + db.start_transaction() # To preserve the sanity and semantics of this test class + + def test_add_unique_fk(self): + """ + Test adding a ForeignKey with unique=True or a OneToOneField + """ + db.create_table("test_add_unique_fk", [ + ('spam', models.BooleanField(default=False)) + ]) + + db.add_column("test_add_unique_fk", "mock1", models.ForeignKey(db.mock_model('Mock', 'mock'), null=True, unique=True)) + db.add_column("test_add_unique_fk", "mock2", models.OneToOneField(db.mock_model('Mock', 'mock'), null=True)) + + db.delete_table("test_add_unique_fk") + + @skipUnless(db.has_check_constraints, 'Only applies to databases that ' + 'support CHECK constraints.') + def test_column_constraint(self): + """ + Tests that the value constraint of PositiveIntegerField is enforced on + the database level. + """ + db.create_table("test_column_constraint", [ + ('spam', models.PositiveIntegerField()), + ]) + db.execute_deferred_sql() + + # Make sure we can't insert negative values + db.commit_transaction() + db.start_transaction() + try: + db.execute("INSERT INTO test_column_constraint VALUES (-42)") + except: + pass + else: + self.fail("Could insert a negative value into a PositiveIntegerField.") + db.rollback_transaction() + + # remove constraint + db.alter_column("test_column_constraint", "spam", models.IntegerField()) + db.execute_deferred_sql() + # make sure the insertion works now + db.execute('INSERT INTO test_column_constraint VALUES (-42)') + db.execute('DELETE FROM test_column_constraint') + + # add it back again + db.alter_column("test_column_constraint", "spam", models.PositiveIntegerField()) + db.execute_deferred_sql() + # it should fail again + db.start_transaction() + try: + db.execute("INSERT INTO test_column_constraint VALUES (-42)") + except: + pass + else: + self.fail("Could insert a negative value after changing an IntegerField to a PositiveIntegerField.") + db.rollback_transaction() + + db.delete_table("test_column_constraint") + db.start_transaction() + + def test_sql_defaults(self): + """ + Test that sql default value is correct for non-string field types. + Datetimes are handled in test_datetime_default. + """ + + class CustomField(with_metaclass(models.SubfieldBase, models.CharField)): + description = 'CustomField' + def get_default(self): + if self.has_default(): + if callable(self.default): + return self.default() + return self.default + return super(CustomField, self).get_default() + def get_prep_value(self, value): + if not value: + return value + return ','.join(map(str, value)) + def to_python(self, value): + if not value or isinstance(value, list): + return value + return list(map(int, value.split(','))) + + false_value = db.has_booleans and 'False' or '0' + defaults = ( + (models.CharField(default='sukasuka'), 'DEFAULT \'sukasuka'), + (models.BooleanField(default=False), 'DEFAULT %s' % false_value), + (models.IntegerField(default=42), 'DEFAULT 42'), + (CustomField(default=[2012, 2018, 2021, 2036]), 'DEFAULT \'2012,2018,2021,2036') + ) + for field, sql_test_str in defaults: + sql = db.column_sql('fish', 'YAAAAAAZ', field) + if sql_test_str not in sql: + self.fail("default sql value was not properly generated for field %r.\nSql was %s" % (field, sql)) + + def test_make_added_foreign_key_not_null(self): + # Table for FK to target + User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={}) + # Table with no foreign key + db.create_table("test_fk", [ + ('eggs', models.IntegerField()), + ]) + db.execute_deferred_sql() + + # Add foreign key + db.add_column("test_fk", 'foreik', models.ForeignKey(User, null=True)) + db.execute_deferred_sql() + + # Make the FK null + db.alter_column("test_fk", "foreik_id", models.ForeignKey(User)) + db.execute_deferred_sql() + +class TestCacheGeneric(unittest.TestCase): + base_ops_cls = generic.DatabaseOperations + def setUp(self): + class CacheOps(self.base_ops_cls): + def __init__(self): + self._constraint_cache = {} + self.cache_filled = 0 + self.settings = {'NAME': 'db'} + + def _fill_constraint_cache(self, db, table): + self.cache_filled += 1 + self._constraint_cache.setdefault(db, {}) + self._constraint_cache[db].setdefault(table, {}) + + @generic.invalidate_table_constraints + def clear_con(self, table): + pass + + @generic.copy_column_constraints + def cp_column(self, table, column_old, column_new): + pass + + @generic.delete_column_constraints + def rm_column(self, table, column): + pass + + @generic.copy_column_constraints + @generic.delete_column_constraints + def mv_column(self, table, column_old, column_new): + pass + + def _get_setting(self, attr): + return self.settings[attr] + self.CacheOps = CacheOps + + def test_cache(self): + ops = self.CacheOps() + self.assertEqual(0, ops.cache_filled) + self.assertFalse(ops.lookup_constraint('db', 'table')) + self.assertEqual(1, ops.cache_filled) + self.assertFalse(ops.lookup_constraint('db', 'table')) + self.assertEqual(1, ops.cache_filled) + ops.clear_con('table') + self.assertEqual(1, ops.cache_filled) + self.assertFalse(ops.lookup_constraint('db', 'table')) + self.assertEqual(2, ops.cache_filled) + self.assertFalse(ops.lookup_constraint('db', 'table', 'column')) + self.assertEqual(2, ops.cache_filled) + + cache = ops._constraint_cache + cache['db']['table']['column'] = 'constraint' + self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column')) + self.assertEqual([('column', 'constraint')], ops.lookup_constraint('db', 'table')) + self.assertEqual(2, ops.cache_filled) + + # invalidate_table_constraints + ops.clear_con('new_table') + self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column')) + self.assertEqual(2, ops.cache_filled) + + self.assertFalse(ops.lookup_constraint('db', 'new_table')) + self.assertEqual(3, ops.cache_filled) + + # delete_column_constraints + cache['db']['table']['column'] = 'constraint' + self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column')) + ops.rm_column('table', 'column') + self.assertEqual([], ops.lookup_constraint('db', 'table', 'column')) + self.assertEqual([], ops.lookup_constraint('db', 'table', 'noexist_column')) + + # copy_column_constraints + cache['db']['table']['column'] = 'constraint' + self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column')) + ops.cp_column('table', 'column', 'column_new') + self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column_new')) + self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column')) + + # copy + delete + cache['db']['table']['column'] = 'constraint' + self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column')) + ops.mv_column('table', 'column', 'column_new') + self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column_new')) + self.assertEqual([], ops.lookup_constraint('db', 'table', 'column')) + + def test_valid(self): + ops = self.CacheOps() + # none of these should vivify a table into a valid state + self.assertFalse(ops._is_valid_cache('db', 'table')) + self.assertFalse(ops._is_valid_cache('db', 'table')) + ops.clear_con('table') + self.assertFalse(ops._is_valid_cache('db', 'table')) + ops.rm_column('table', 'column') + self.assertFalse(ops._is_valid_cache('db', 'table')) + + # these should change the cache state + ops.lookup_constraint('db', 'table') + self.assertTrue(ops._is_valid_cache('db', 'table')) + ops.lookup_constraint('db', 'table', 'column') + self.assertTrue(ops._is_valid_cache('db', 'table')) + ops.clear_con('table') + self.assertFalse(ops._is_valid_cache('db', 'table')) + + def test_valid_implementation(self): + # generic fills the cache on a per-table basis + ops = self.CacheOps() + self.assertFalse(ops._is_valid_cache('db', 'table')) + self.assertFalse(ops._is_valid_cache('db', 'other_table')) + ops.lookup_constraint('db', 'table') + self.assertTrue(ops._is_valid_cache('db', 'table')) + self.assertFalse(ops._is_valid_cache('db', 'other_table')) + ops.lookup_constraint('db', 'other_table') + self.assertTrue(ops._is_valid_cache('db', 'table')) + self.assertTrue(ops._is_valid_cache('db', 'other_table')) + ops.clear_con('table') + self.assertFalse(ops._is_valid_cache('db', 'table')) + self.assertTrue(ops._is_valid_cache('db', 'other_table')) + +if mysql: + class TestCacheMysql(TestCacheGeneric): + base_ops_cls = mysql.DatabaseOperations + + def test_valid_implementation(self): + # mysql fills the cache on a per-db basis + ops = self.CacheOps() + self.assertFalse(ops._is_valid_cache('db', 'table')) + self.assertFalse(ops._is_valid_cache('db', 'other_table')) + ops.lookup_constraint('db', 'table') + self.assertTrue(ops._is_valid_cache('db', 'table')) + self.assertTrue(ops._is_valid_cache('db', 'other_table')) + ops.lookup_constraint('db', 'other_table') + self.assertTrue(ops._is_valid_cache('db', 'table')) + self.assertTrue(ops._is_valid_cache('db', 'other_table')) + ops.clear_con('table') + self.assertFalse(ops._is_valid_cache('db', 'table')) + self.assertTrue(ops._is_valid_cache('db', 'other_table')) diff --git a/awx/lib/site-packages/south/tests/db_mysql.py b/awx/lib/site-packages/south/tests/db_mysql.py new file mode 100644 index 0000000000..e83596c463 --- /dev/null +++ b/awx/lib/site-packages/south/tests/db_mysql.py @@ -0,0 +1,164 @@ +# Additional MySQL-specific tests +# Written by: F. Gabriel Gosselin <gabrielNOSPAM@evidens.ca> +# Based on tests by: aarranz +from south.tests import unittest, skipUnless + + +from south.db import db, generic, mysql +from django.db import connection, models + +from south.utils.py3 import with_metaclass + + +# A class decoration may be used in lieu of this when Python 2.5 is the +# minimum. +class TestMySQLOperationsMeta(type): + + def __new__(mcs, name, bases, dict_): + decorator = skipUnless(db.backend_name == "mysql", 'MySQL-specific tests') + + for key, method in dict_.items(): + if key.startswith('test'): + dict_[key] = decorator(method) + + return type.__new__(mcs, name, bases, dict_) + +class TestMySQLOperations(with_metaclass(TestMySQLOperationsMeta, unittest.TestCase)): + """MySQL-specific tests""" + + def setUp(self): + db.debug = False + db.clear_deferred_sql() + + def tearDown(self): + pass + + def _create_foreign_tables(self, main_name, reference_name): + # Create foreign table and model + Foreign = db.mock_model(model_name='Foreign', db_table=reference_name, + db_tablespace='', pk_field_name='id', + pk_field_type=models.AutoField, + pk_field_args=[]) + db.create_table(reference_name, [ + ('id', models.AutoField(primary_key=True)), + ]) + # Create table with foreign key + db.create_table(main_name, [ + ('id', models.AutoField(primary_key=True)), + ('foreign', models.ForeignKey(Foreign)), + ]) + return Foreign + + def test_constraint_references(self): + """Tests that referred table is reported accurately""" + main_table = 'test_cns_ref' + reference_table = 'test_cr_foreign' + db.start_transaction() + self._create_foreign_tables(main_table, reference_table) + db.execute_deferred_sql() + constraint = db._find_foreign_constraints(main_table, 'foreign_id')[0] + references = db._lookup_constraint_references(main_table, constraint) + self.assertEquals((reference_table, 'id'), references) + db.delete_table(main_table) + db.delete_table(reference_table) + + def test_reverse_column_constraint(self): + """Tests that referred column in a foreign key (ex. id) is found""" + main_table = 'test_reverse_ref' + reference_table = 'test_rr_foreign' + db.start_transaction() + self._create_foreign_tables(main_table, reference_table) + db.execute_deferred_sql() + inverse = db._lookup_reverse_constraint(reference_table, 'id') + (cname, rev_table, rev_column) = inverse[0] + self.assertEquals(main_table, rev_table) + self.assertEquals('foreign_id', rev_column) + db.delete_table(main_table) + db.delete_table(reference_table) + + def test_delete_fk_column(self): + main_table = 'test_drop_foreign' + ref_table = 'test_df_ref' + self._create_foreign_tables(main_table, ref_table) + db.execute_deferred_sql() + constraints = db._find_foreign_constraints(main_table, 'foreign_id') + self.assertEquals(len(constraints), 1) + db.delete_column(main_table, 'foreign_id') + constraints = db._find_foreign_constraints(main_table, 'foreign_id') + self.assertEquals(len(constraints), 0) + db.delete_table(main_table) + db.delete_table(ref_table) + + def test_rename_fk_column(self): + main_table = 'test_rename_foreign' + ref_table = 'test_rf_ref' + self._create_foreign_tables(main_table, ref_table) + db.execute_deferred_sql() + constraints = db._find_foreign_constraints(main_table, 'foreign_id') + self.assertEquals(len(constraints), 1) + db.rename_column(main_table, 'foreign_id', 'reference_id') + db.execute_deferred_sql() #Create constraints + constraints = db._find_foreign_constraints(main_table, 'reference_id') + self.assertEquals(len(constraints), 1) + db.delete_table(main_table) + db.delete_table(ref_table) + + def test_rename_fk_inbound(self): + """ + Tests that the column referred to by an external column can be renamed. + Edge case, but also useful as stepping stone to renaming tables. + """ + main_table = 'test_rename_fk_inbound' + ref_table = 'test_rfi_ref' + self._create_foreign_tables(main_table, ref_table) + db.execute_deferred_sql() + constraints = db._lookup_reverse_constraint(ref_table, 'id') + self.assertEquals(len(constraints), 1) + db.rename_column(ref_table, 'id', 'rfi_id') + db.execute_deferred_sql() #Create constraints + constraints = db._lookup_reverse_constraint(ref_table, 'rfi_id') + self.assertEquals(len(constraints), 1) + cname = db._find_foreign_constraints(main_table, 'foreign_id')[0] + (rtable, rcolumn) = db._lookup_constraint_references(main_table, cname) + self.assertEquals(rcolumn, 'rfi_id') + db.delete_table(main_table) + db.delete_table(ref_table) + + def test_rename_constrained_table(self): + """Renames a table with a foreign key column (towards another table)""" + main_table = 'test_rn_table' + ref_table = 'test_rt_ref' + renamed_table = 'test_renamed_table' + self._create_foreign_tables(main_table, ref_table) + db.execute_deferred_sql() + constraints = db._find_foreign_constraints(main_table, 'foreign_id') + self.assertEquals(len(constraints), 1) + db.rename_table(main_table, renamed_table) + db.execute_deferred_sql() #Create constraints + constraints = db._find_foreign_constraints(renamed_table, 'foreign_id') + self.assertEquals(len(constraints), 1) + (rtable, rcolumn) = db._lookup_constraint_references( + renamed_table, constraints[0]) + self.assertEquals(rcolumn, 'id') + db.delete_table(renamed_table) + db.delete_table(ref_table) + + def test_renamed_referenced_table(self): + """Rename a table referred to in a foreign key""" + main_table = 'test_rn_refd_table' + ref_table = 'test_rrt_ref' + renamed_table = 'test_renamed_ref' + self._create_foreign_tables(main_table, ref_table) + db.execute_deferred_sql() + constraints = db._lookup_reverse_constraint(ref_table) + self.assertEquals(len(constraints), 1) + db.rename_table(ref_table, renamed_table) + db.execute_deferred_sql() #Create constraints + constraints = db._find_foreign_constraints(main_table, 'foreign_id') + self.assertEquals(len(constraints), 1) + (rtable, rcolumn) = db._lookup_constraint_references( + main_table, constraints[0]) + self.assertEquals(renamed_table, rtable) + db.delete_table(main_table) + db.delete_table(renamed_table) + diff --git a/awx/lib/site-packages/south/tests/deps_a/__init__.py b/awx/lib/site-packages/south/tests/deps_a/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/deps_a/migrations/0001_a.py b/awx/lib/site-packages/south/tests/deps_a/migrations/0001_a.py new file mode 100644 index 0000000000..d27ed3a172 --- /dev/null +++ b/awx/lib/site-packages/south/tests/deps_a/migrations/0001_a.py @@ -0,0 +1,11 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/deps_a/migrations/0002_a.py b/awx/lib/site-packages/south/tests/deps_a/migrations/0002_a.py new file mode 100644 index 0000000000..d27ed3a172 --- /dev/null +++ b/awx/lib/site-packages/south/tests/deps_a/migrations/0002_a.py @@ -0,0 +1,11 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/deps_a/migrations/0003_a.py b/awx/lib/site-packages/south/tests/deps_a/migrations/0003_a.py new file mode 100644 index 0000000000..d27ed3a172 --- /dev/null +++ b/awx/lib/site-packages/south/tests/deps_a/migrations/0003_a.py @@ -0,0 +1,11 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/deps_a/migrations/0004_a.py b/awx/lib/site-packages/south/tests/deps_a/migrations/0004_a.py new file mode 100644 index 0000000000..e5c29771c8 --- /dev/null +++ b/awx/lib/site-packages/south/tests/deps_a/migrations/0004_a.py @@ -0,0 +1,13 @@ +from south.db import db +from django.db import models + +class Migration: + + depends_on = [('deps_b', '0003_b')] + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/deps_a/migrations/0005_a.py b/awx/lib/site-packages/south/tests/deps_a/migrations/0005_a.py new file mode 100644 index 0000000000..d27ed3a172 --- /dev/null +++ b/awx/lib/site-packages/south/tests/deps_a/migrations/0005_a.py @@ -0,0 +1,11 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/deps_a/migrations/__init__.py b/awx/lib/site-packages/south/tests/deps_a/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/deps_a/models.py b/awx/lib/site-packages/south/tests/deps_a/models.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/deps_b/__init__.py b/awx/lib/site-packages/south/tests/deps_b/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/deps_b/migrations/0001_b.py b/awx/lib/site-packages/south/tests/deps_b/migrations/0001_b.py new file mode 100644 index 0000000000..d27ed3a172 --- /dev/null +++ b/awx/lib/site-packages/south/tests/deps_b/migrations/0001_b.py @@ -0,0 +1,11 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/deps_b/migrations/0002_b.py b/awx/lib/site-packages/south/tests/deps_b/migrations/0002_b.py new file mode 100644 index 0000000000..459ea5dd94 --- /dev/null +++ b/awx/lib/site-packages/south/tests/deps_b/migrations/0002_b.py @@ -0,0 +1,13 @@ +from south.db import db +from django.db import models + +class Migration: + + depends_on = [('deps_a', '0002_a')] + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/deps_b/migrations/0003_b.py b/awx/lib/site-packages/south/tests/deps_b/migrations/0003_b.py new file mode 100644 index 0000000000..1692888ed0 --- /dev/null +++ b/awx/lib/site-packages/south/tests/deps_b/migrations/0003_b.py @@ -0,0 +1,13 @@ +from south.db import db +from django.db import models + +class Migration: + + depends_on = [('deps_a', '0003_a')] + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/deps_b/migrations/0004_b.py b/awx/lib/site-packages/south/tests/deps_b/migrations/0004_b.py new file mode 100644 index 0000000000..d27ed3a172 --- /dev/null +++ b/awx/lib/site-packages/south/tests/deps_b/migrations/0004_b.py @@ -0,0 +1,11 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/deps_b/migrations/0005_b.py b/awx/lib/site-packages/south/tests/deps_b/migrations/0005_b.py new file mode 100644 index 0000000000..d27ed3a172 --- /dev/null +++ b/awx/lib/site-packages/south/tests/deps_b/migrations/0005_b.py @@ -0,0 +1,11 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/deps_b/migrations/__init__.py b/awx/lib/site-packages/south/tests/deps_b/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/deps_b/models.py b/awx/lib/site-packages/south/tests/deps_b/models.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/deps_c/__init__.py b/awx/lib/site-packages/south/tests/deps_c/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/deps_c/migrations/0001_c.py b/awx/lib/site-packages/south/tests/deps_c/migrations/0001_c.py new file mode 100644 index 0000000000..d27ed3a172 --- /dev/null +++ b/awx/lib/site-packages/south/tests/deps_c/migrations/0001_c.py @@ -0,0 +1,11 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/deps_c/migrations/0002_c.py b/awx/lib/site-packages/south/tests/deps_c/migrations/0002_c.py new file mode 100644 index 0000000000..d27ed3a172 --- /dev/null +++ b/awx/lib/site-packages/south/tests/deps_c/migrations/0002_c.py @@ -0,0 +1,11 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/deps_c/migrations/0003_c.py b/awx/lib/site-packages/south/tests/deps_c/migrations/0003_c.py new file mode 100644 index 0000000000..d27ed3a172 --- /dev/null +++ b/awx/lib/site-packages/south/tests/deps_c/migrations/0003_c.py @@ -0,0 +1,11 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/deps_c/migrations/0004_c.py b/awx/lib/site-packages/south/tests/deps_c/migrations/0004_c.py new file mode 100644 index 0000000000..d27ed3a172 --- /dev/null +++ b/awx/lib/site-packages/south/tests/deps_c/migrations/0004_c.py @@ -0,0 +1,11 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/deps_c/migrations/0005_c.py b/awx/lib/site-packages/south/tests/deps_c/migrations/0005_c.py new file mode 100644 index 0000000000..459ea5dd94 --- /dev/null +++ b/awx/lib/site-packages/south/tests/deps_c/migrations/0005_c.py @@ -0,0 +1,13 @@ +from south.db import db +from django.db import models + +class Migration: + + depends_on = [('deps_a', '0002_a')] + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/deps_c/migrations/__init__.py b/awx/lib/site-packages/south/tests/deps_c/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/deps_c/models.py b/awx/lib/site-packages/south/tests/deps_c/models.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/emptyapp/__init__.py b/awx/lib/site-packages/south/tests/emptyapp/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/emptyapp/migrations/__init__.py b/awx/lib/site-packages/south/tests/emptyapp/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/emptyapp/models.py b/awx/lib/site-packages/south/tests/emptyapp/models.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/fakeapp/__init__.py b/awx/lib/site-packages/south/tests/fakeapp/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/fakeapp/migrations/0001_spam.py b/awx/lib/site-packages/south/tests/fakeapp/migrations/0001_spam.py new file mode 100644 index 0000000000..9739648a22 --- /dev/null +++ b/awx/lib/site-packages/south/tests/fakeapp/migrations/0001_spam.py @@ -0,0 +1,17 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + # Model 'Spam' + db.create_table("southtest_spam", ( + ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), + ('weight', models.FloatField()), + ('expires', models.DateTimeField()), + ('name', models.CharField(max_length=255)) + )) + + def backwards(self): + db.delete_table("southtest_spam") + diff --git a/awx/lib/site-packages/south/tests/fakeapp/migrations/0002_eggs.py b/awx/lib/site-packages/south/tests/fakeapp/migrations/0002_eggs.py new file mode 100644 index 0000000000..3ec83999fe --- /dev/null +++ b/awx/lib/site-packages/south/tests/fakeapp/migrations/0002_eggs.py @@ -0,0 +1,20 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + + Spam = db.mock_model(model_name='Spam', db_table='southtest_spam', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField) + + db.create_table("southtest_eggs", ( + ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), + ('size', models.FloatField()), + ('quantity', models.IntegerField()), + ('spam', models.ForeignKey(Spam)), + )) + + def backwards(self): + + db.delete_table("southtest_eggs") + diff --git a/awx/lib/site-packages/south/tests/fakeapp/migrations/0003_alter_spam.py b/awx/lib/site-packages/south/tests/fakeapp/migrations/0003_alter_spam.py new file mode 100644 index 0000000000..39126c279b --- /dev/null +++ b/awx/lib/site-packages/south/tests/fakeapp/migrations/0003_alter_spam.py @@ -0,0 +1,18 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + + db.alter_column("southtest_spam", 'weight', models.FloatField(null=True)) + + def backwards(self): + + db.alter_column("southtest_spam", 'weight', models.FloatField()) + + models = { + "fakeapp.bug135": { + 'date': ('models.DateTimeField', [], {'default': 'datetime.datetime(2009, 5, 6, 15, 33, 15, 780013)'}), + } + } diff --git a/awx/lib/site-packages/south/tests/fakeapp/migrations/__init__.py b/awx/lib/site-packages/south/tests/fakeapp/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/fakeapp/models.py b/awx/lib/site-packages/south/tests/fakeapp/models.py new file mode 100644 index 0000000000..cc39eb74c0 --- /dev/null +++ b/awx/lib/site-packages/south/tests/fakeapp/models.py @@ -0,0 +1,111 @@ +# -*- coding: UTF-8 -*- + +from django.db import models +from django.contrib.auth.models import User as UserAlias + +from south.modelsinspector import add_introspection_rules + +on_delete_is_available = hasattr(models, "PROTECT") # models here is django.db.models + +def default_func(): + return "yays" + +# An empty case. +class Other1(models.Model): pass + +# Another one +class Other3(models.Model): pass +def get_sentinel_object(): + """ + A function to return the object to be used in place of any deleted object, + when using the SET option for on_delete. + """ + # Create a new one, so we always have an instance to test with. Can't work! + return Other3() + +# Nastiness. +class HorribleModel(models.Model): + "A model to test the edge cases of model parsing" + + ZERO, ONE = 0, 1 + + # First, some nice fields + name = models.CharField(max_length=255) + short_name = models.CharField(max_length=50) + slug = models.SlugField(unique=True) + + # A ForeignKey, to a model above, and then below + o1 = models.ForeignKey(Other1) + o2 = models.ForeignKey('Other2') + + if on_delete_is_available: + o_set_null_on_delete = models.ForeignKey('Other3', null=True, on_delete=models.SET_NULL) + o_cascade_delete = models.ForeignKey('Other3', null=True, on_delete=models.CASCADE, related_name="cascademe") + o_protect = models.ForeignKey('Other3', null=True, on_delete=models.PROTECT, related_name="dontcascademe") + o_default_on_delete = models.ForeignKey('Other3', null=True, default=1, on_delete=models.SET_DEFAULT, related_name="setmedefault") + o_set_on_delete_function = models.ForeignKey('Other3', null=True, default=1, on_delete=models.SET(get_sentinel_object), related_name="setsentinel") + o_set_on_delete_value = models.ForeignKey('Other3', null=True, default=1, on_delete=models.SET(get_sentinel_object()), related_name="setsentinelwithactualvalue") # dubious case + o_no_action_on_delete = models.ForeignKey('Other3', null=True, default=1, on_delete=models.DO_NOTHING, related_name="deletemeatyourperil") + + + # Now to something outside + user = models.ForeignKey(UserAlias, related_name="horribles") + + # Unicode! + code = models.CharField(max_length=25, default="↑↑↓↓←→←→BA") + + # Odd defaults! + class_attr = models.IntegerField(default=ZERO) + func = models.CharField(max_length=25, default=default_func) + + # Time to get nasty. Define a non-field choices, and use it + choices = [('hello', '1'), ('world', '2')] + choiced = models.CharField(max_length=20, choices=choices) + + class Meta: + db_table = "my_fave" + verbose_name = "Dr. Strangelove," + \ + """or how I learned to stop worrying +and love the bomb""" + + # Now spread over multiple lines + multiline = \ + models.TextField( + ) + +# Special case. +class Other2(models.Model): + # Try loading a field without a newline after it (inspect hates this) + close_but_no_cigar = models.PositiveIntegerField(primary_key=True) + +class CustomField(models.IntegerField): + def __init__(self, an_other_model, **kwargs): + super(CustomField, self).__init__(**kwargs) + self.an_other_model = an_other_model + +add_introspection_rules([ + ( + [CustomField], + [], + {'an_other_model': ('an_other_model', {})}, + ), +], ['^south\.tests\.fakeapp\.models\.CustomField']) + +class BaseModel(models.Model): + pass + +class SubModel(BaseModel): + others = models.ManyToManyField(Other1) + custom = CustomField(Other2) + +class CircularA(models.Model): + c = models.ForeignKey('CircularC') + +class CircularB(models.Model): + a = models.ForeignKey(CircularA) + +class CircularC(models.Model): + b = models.ForeignKey(CircularB) + +class Recursive(models.Model): + self = models.ForeignKey('self') diff --git a/awx/lib/site-packages/south/tests/freezer.py b/awx/lib/site-packages/south/tests/freezer.py new file mode 100644 index 0000000000..82c44024af --- /dev/null +++ b/awx/lib/site-packages/south/tests/freezer.py @@ -0,0 +1,15 @@ +from south.tests import unittest + +from south.creator.freezer import model_dependencies +from south.tests.fakeapp import models + +class TestFreezer(unittest.TestCase): + def test_dependencies(self): + self.assertEqual(set(model_dependencies(models.SubModel)), + set([models.BaseModel, models.Other1, models.Other2])) + + self.assertEqual(set(model_dependencies(models.CircularA)), + set([models.CircularA, models.CircularB, models.CircularC])) + + self.assertEqual(set(model_dependencies(models.Recursive)), + set([models.Recursive])) diff --git a/awx/lib/site-packages/south/tests/inspector.py b/awx/lib/site-packages/south/tests/inspector.py new file mode 100644 index 0000000000..dcd6d57de1 --- /dev/null +++ b/awx/lib/site-packages/south/tests/inspector.py @@ -0,0 +1,109 @@ + +from south.tests import Monkeypatcher, skipUnless +from south.modelsinspector import (convert_on_delete_handler, get_value, + IsDefault, models, value_clean) + +from fakeapp.models import HorribleModel, get_sentinel_object + + +on_delete_is_available = hasattr(models, "PROTECT") # models here is django.db.models +skipUnlessOnDeleteAvailable = skipUnless(on_delete_is_available, "not testing on_delete -- not available on Django<1.3") + +class TestModelInspector(Monkeypatcher): + + """ + Tests if the various parts of the modelinspector work. + """ + + def test_get_value(self): + + # Let's start nicely. + name = HorribleModel._meta.get_field_by_name("name")[0] + slug = HorribleModel._meta.get_field_by_name("slug")[0] + user = HorribleModel._meta.get_field_by_name("user")[0] + + # Simple int retrieval + self.assertEqual( + get_value(name, ["max_length", {}]), + "255", + ) + + # Bool retrieval + self.assertEqual( + get_value(slug, ["unique", {}]), + "True", + ) + + # String retrieval + self.assertEqual( + get_value(user, ["rel.related_name", {}]), + "'horribles'", + ) + + # Default triggering + self.assertEqual( + get_value(slug, ["unique", {"default": False}]), + "True", + ) + self.assertRaises( + IsDefault, + get_value, + slug, + ["unique", {"default": True}], + ) + + @skipUnlessOnDeleteAvailable + def test_get_value_on_delete(self): + + # First validate the FK fields with on_delete options + o_set_null_on_delete = HorribleModel._meta.get_field_by_name("o_set_null_on_delete")[0] + o_cascade_delete = HorribleModel._meta.get_field_by_name("o_cascade_delete")[0] + o_protect = HorribleModel._meta.get_field_by_name("o_protect")[0] + o_default_on_delete = HorribleModel._meta.get_field_by_name("o_default_on_delete")[0] + o_set_on_delete_function = HorribleModel._meta.get_field_by_name("o_set_on_delete_function")[0] + o_set_on_delete_value = HorribleModel._meta.get_field_by_name("o_set_on_delete_value")[0] + o_no_action_on_delete = HorribleModel._meta.get_field_by_name("o_no_action_on_delete")[0] + # TODO this is repeated from the introspection_details in modelsinspector: + # better to refactor that so we can reference these settings, in case they + # must change at some point. + on_delete = ["rel.on_delete", {"default": models.CASCADE, "is_django_function": True, "converter": convert_on_delete_handler, }] + + # Foreign Key cascade update/delete + self.assertRaises( + IsDefault, + get_value, + o_cascade_delete, + on_delete, + ) + self.assertEqual( + get_value(o_protect, on_delete), + "models.PROTECT", + ) + self.assertEqual( + get_value(o_no_action_on_delete, on_delete), + "models.DO_NOTHING", + ) + self.assertEqual( + get_value(o_set_null_on_delete, on_delete), + "models.SET_NULL", + ) + self.assertEqual( + get_value(o_default_on_delete, on_delete), + "models.SET_DEFAULT", + ) + # For now o_set_on_delete raises, see modelsinspector.py + #self.assertEqual( + # get_value(o_set_on_delete_function, on_delete), + # "models.SET(get_sentinel_object)", + #) + self.assertRaises( + ValueError, + get_value, + o_set_on_delete_function, + on_delete, + ) + self.assertEqual( + get_value(o_set_on_delete_value, on_delete), + "models.SET(%s)" % value_clean(get_sentinel_object()), + ) + \ No newline at end of file diff --git a/awx/lib/site-packages/south/tests/logger.py b/awx/lib/site-packages/south/tests/logger.py new file mode 100644 index 0000000000..78d159dc86 --- /dev/null +++ b/awx/lib/site-packages/south/tests/logger.py @@ -0,0 +1,82 @@ +import io +import logging +import os +import tempfile +from south.tests import unittest +import sys + +from django.conf import settings +from django.db import connection, models + +from south.db import db +from south.logger import close_logger + +class TestLogger(unittest.TestCase): + + """ + Tests if the logging is working reasonably. Some tests ignored if you don't + have write permission to the disk. + """ + + def setUp(self): + db.debug = False + self.test_path = tempfile.mkstemp(suffix=".south.log")[1] + + def test_db_execute_logging_nofile(self): + "Does logging degrade nicely if SOUTH_LOGGING_ON not set?" + settings.SOUTH_LOGGING_ON = False # this needs to be set to False + # to avoid issues where other tests + # set this to True. settings is shared + # between these tests. + db.create_table("test9", [('email_confirmed', models.BooleanField(default=False))]) + + def test_db_execute_logging_off_with_basic_config(self): + """ + Does the south logger avoid outputing debug information with + south logging turned off and python logging configured with + a basic config?" + """ + settings.SOUTH_LOGGING_ON = False + + # Set root logger to capture WARNING and worse + logging_stream = io.StringIO() + logging.basicConfig(stream=logging_stream, level=logging.WARNING) + + db.create_table("test12", [('email_confirmed', models.BooleanField(default=False))]) + + # since south logging is off, and our root logger is at WARNING + # we should not find DEBUG info in the log + self.assertEqual(logging_stream.getvalue(), '') + + def test_db_execute_logging_validfile(self): + "Does logging work when passing in a valid file?" + settings.SOUTH_LOGGING_ON = True + settings.SOUTH_LOGGING_FILE = self.test_path + # Check to see if we can make the logfile + try: + fh = open(self.test_path, "w") + except IOError: + # Permission was denied, ignore the test. + return + else: + fh.close() + # Do an action which logs + db.create_table("test10", [('email_confirmed', models.BooleanField(default=False))]) + # Close the logged file + close_logger() + try: + os.remove(self.test_path) + except: + # It's a tempfile, it's not vital we remove it. + pass + + def test_db_execute_logging_missingfilename(self): + "Does logging raise an error if there is a missing filename?" + settings.SOUTH_LOGGING_ON = True + settings.SOUTH_LOGGING_FILE = None + self.assertRaises( + IOError, + db.create_table, + "test11", + [('email_confirmed', models.BooleanField(default=False))], + ) diff --git a/awx/lib/site-packages/south/tests/logic.py b/awx/lib/site-packages/south/tests/logic.py new file mode 100644 index 0000000000..2b21cef6e2 --- /dev/null +++ b/awx/lib/site-packages/south/tests/logic.py @@ -0,0 +1,902 @@ +from south.tests import unittest + +import datetime +import sys +try: + set # builtin, python >=2.6 +except NameError: + from sets import Set as set # in stdlib, python >=2.3 + +from south import exceptions +from south.migration import migrate_app +from south.migration.base import all_migrations, Migrations +from south.creator.changes import ManualChanges +from south.migration.utils import depends, flatten, get_app_label +from south.models import MigrationHistory +from south.tests import Monkeypatcher +from south.db import db + + + +class TestBrokenMigration(Monkeypatcher): + installed_apps = ["fakeapp", "otherfakeapp", "brokenapp"] + + def test_broken_dependencies(self): + self.assertRaises( + exceptions.DependsOnUnmigratedApplication, + Migrations.calculate_dependencies, + force=True, + ) + #depends_on_unknown = self.brokenapp['0002_depends_on_unknown'] + #self.assertRaises(exceptions.DependsOnUnknownMigration, + # depends_on_unknown.dependencies) + #depends_on_higher = self.brokenapp['0003_depends_on_higher'] + #self.assertRaises(exceptions.DependsOnHigherMigration, + # depends_on_higher.dependencies) + + +class TestMigration(Monkeypatcher): + installed_apps = ["fakeapp", "otherfakeapp"] + + def setUp(self): + super(TestMigration, self).setUp() + self.fakeapp = Migrations('fakeapp') + self.otherfakeapp = Migrations('otherfakeapp') + Migrations.calculate_dependencies(force=True) + + def test_str(self): + migrations = [str(m) for m in self.fakeapp] + self.assertEqual(['fakeapp:0001_spam', + 'fakeapp:0002_eggs', + 'fakeapp:0003_alter_spam'], + migrations) + + def test_repr(self): + migrations = [repr(m) for m in self.fakeapp] + self.assertEqual(['<Migration: fakeapp:0001_spam>', + '<Migration: fakeapp:0002_eggs>', + '<Migration: fakeapp:0003_alter_spam>'], + migrations) + + def test_app_label(self): + self.assertEqual(['fakeapp', 'fakeapp', 'fakeapp'], + [m.app_label() for m in self.fakeapp]) + + def test_name(self): + self.assertEqual(['0001_spam', '0002_eggs', '0003_alter_spam'], + [m.name() for m in self.fakeapp]) + + def test_full_name(self): + self.assertEqual(['fakeapp.migrations.0001_spam', + 'fakeapp.migrations.0002_eggs', + 'fakeapp.migrations.0003_alter_spam'], + [m.full_name() for m in self.fakeapp]) + + def test_migration(self): + # Can't use vanilla import, modules beginning with numbers aren't in grammar + M1 = __import__("fakeapp.migrations.0001_spam", {}, {}, ['Migration']).Migration + M2 = __import__("fakeapp.migrations.0002_eggs", {}, {}, ['Migration']).Migration + M3 = __import__("fakeapp.migrations.0003_alter_spam", {}, {}, ['Migration']).Migration + self.assertEqual([M1, M2, M3], + [m.migration().Migration for m in self.fakeapp]) + self.assertRaises(exceptions.UnknownMigration, + self.fakeapp['9999_unknown'].migration) + + def test_previous(self): + self.assertEqual([None, + self.fakeapp['0001_spam'], + self.fakeapp['0002_eggs']], + [m.previous() for m in self.fakeapp]) + + def test_dependencies(self): + "Test that the dependency detection works." + self.assertEqual([ + set([]), + set([self.fakeapp['0001_spam']]), + set([self.fakeapp['0002_eggs']]) + ], + [m.dependencies for m in self.fakeapp], + ) + self.assertEqual([ + set([self.fakeapp['0001_spam']]), + set([self.otherfakeapp['0001_first']]), + set([ + self.otherfakeapp['0002_second'], + self.fakeapp['0003_alter_spam'], + ]) + ], + [m.dependencies for m in self.otherfakeapp], + ) + + def test_forwards_plan(self): + self.assertEqual([ + [self.fakeapp['0001_spam']], + [ + self.fakeapp['0001_spam'], + self.fakeapp['0002_eggs'] + ], + [ + self.fakeapp['0001_spam'], + self.fakeapp['0002_eggs'], + self.fakeapp['0003_alter_spam'], + ] + ], + [m.forwards_plan() for m in self.fakeapp], + ) + self.assertEqual([ + [ + self.fakeapp['0001_spam'], + self.otherfakeapp['0001_first'] + ], + [ + self.fakeapp['0001_spam'], + self.otherfakeapp['0001_first'], + self.otherfakeapp['0002_second'] + ], + [ + self.fakeapp['0001_spam'], + self.otherfakeapp['0001_first'], + self.otherfakeapp['0002_second'], + self.fakeapp['0002_eggs'], + self.fakeapp['0003_alter_spam'], + self.otherfakeapp['0003_third'], + ] + ], + [m.forwards_plan() for m in self.otherfakeapp], + ) + + def test_is_before(self): + F1 = self.fakeapp['0001_spam'] + F2 = self.fakeapp['0002_eggs'] + F3 = self.fakeapp['0003_alter_spam'] + O1 = self.otherfakeapp['0001_first'] + O2 = self.otherfakeapp['0002_second'] + O3 = self.otherfakeapp['0003_third'] + self.assertTrue(F1.is_before(F2)) + self.assertTrue(F1.is_before(F3)) + self.assertTrue(F2.is_before(F3)) + self.assertEqual(O3.is_before(O1), False) + self.assertEqual(O3.is_before(O2), False) + self.assertEqual(O2.is_before(O2), False) + self.assertEqual(O2.is_before(O1), False) + self.assertEqual(F2.is_before(O1), None) + self.assertEqual(F2.is_before(O2), None) + self.assertEqual(F2.is_before(O3), None) + + +class TestMigrationDependencies(Monkeypatcher): + installed_apps = ['deps_a', 'deps_b', 'deps_c'] + + def setUp(self): + super(TestMigrationDependencies, self).setUp() + self.deps_a = Migrations('deps_a') + self.deps_b = Migrations('deps_b') + self.deps_c = Migrations('deps_c') + Migrations.calculate_dependencies(force=True) + + def test_dependencies(self): + self.assertEqual( + [ + set([]), + set([self.deps_a['0001_a']]), + set([self.deps_a['0002_a']]), + set([ + self.deps_a['0003_a'], + self.deps_b['0003_b'], + ]), + set([self.deps_a['0004_a']]), + ], + [m.dependencies for m in self.deps_a], + ) + self.assertEqual( + [ + set([]), + set([ + self.deps_b['0001_b'], + self.deps_a['0002_a'] + ]), + set([ + self.deps_b['0002_b'], + self.deps_a['0003_a'] + ]), + set([self.deps_b['0003_b']]), + set([self.deps_b['0004_b']]), + ], + [m.dependencies for m in self.deps_b], + ) + self.assertEqual( + [ + set([]), + set([self.deps_c['0001_c']]), + set([self.deps_c['0002_c']]), + set([self.deps_c['0003_c']]), + set([ + self.deps_c['0004_c'], + self.deps_a['0002_a'] + ]), + ], + [m.dependencies for m in self.deps_c], + ) + + def test_dependents(self): + self.assertEqual([set([self.deps_a['0002_a']]), + set([self.deps_c['0005_c'], + self.deps_b['0002_b'], + self.deps_a['0003_a']]), + set([self.deps_b['0003_b'], + self.deps_a['0004_a']]), + set([self.deps_a['0005_a']]), + set([])], + [m.dependents for m in self.deps_a]) + self.assertEqual([set([self.deps_b['0002_b']]), + set([self.deps_b['0003_b']]), + set([self.deps_b['0004_b'], + self.deps_a['0004_a']]), + set([self.deps_b['0005_b']]), + set([])], + [m.dependents for m in self.deps_b]) + self.assertEqual([set([self.deps_c['0002_c']]), + set([self.deps_c['0003_c']]), + set([self.deps_c['0004_c']]), + set([self.deps_c['0005_c']]), + set([])], + [m.dependents for m in self.deps_c]) + + def test_forwards_plan(self): + self.assertEqual([[self.deps_a['0001_a']], + [self.deps_a['0001_a'], + self.deps_a['0002_a']], + [self.deps_a['0001_a'], + self.deps_a['0002_a'], + self.deps_a['0003_a']], + [self.deps_b['0001_b'], + self.deps_a['0001_a'], + self.deps_a['0002_a'], + self.deps_b['0002_b'], + self.deps_a['0003_a'], + self.deps_b['0003_b'], + self.deps_a['0004_a']], + [self.deps_b['0001_b'], + self.deps_a['0001_a'], + self.deps_a['0002_a'], + self.deps_b['0002_b'], + self.deps_a['0003_a'], + self.deps_b['0003_b'], + self.deps_a['0004_a'], + self.deps_a['0005_a']]], + [m.forwards_plan() for m in self.deps_a]) + self.assertEqual([[self.deps_b['0001_b']], + [self.deps_b['0001_b'], + self.deps_a['0001_a'], + self.deps_a['0002_a'], + self.deps_b['0002_b']], + [self.deps_b['0001_b'], + self.deps_a['0001_a'], + self.deps_a['0002_a'], + self.deps_b['0002_b'], + self.deps_a['0003_a'], + self.deps_b['0003_b']], + [self.deps_b['0001_b'], + self.deps_a['0001_a'], + self.deps_a['0002_a'], + self.deps_b['0002_b'], + self.deps_a['0003_a'], + self.deps_b['0003_b'], + self.deps_b['0004_b']], + [self.deps_b['0001_b'], + self.deps_a['0001_a'], + self.deps_a['0002_a'], + self.deps_b['0002_b'], + self.deps_a['0003_a'], + self.deps_b['0003_b'], + self.deps_b['0004_b'], + self.deps_b['0005_b']]], + [m.forwards_plan() for m in self.deps_b]) + self.assertEqual([[self.deps_c['0001_c']], + [self.deps_c['0001_c'], + self.deps_c['0002_c']], + [self.deps_c['0001_c'], + self.deps_c['0002_c'], + self.deps_c['0003_c']], + [self.deps_c['0001_c'], + self.deps_c['0002_c'], + self.deps_c['0003_c'], + self.deps_c['0004_c']], + [self.deps_c['0001_c'], + self.deps_c['0002_c'], + self.deps_c['0003_c'], + self.deps_c['0004_c'], + self.deps_a['0001_a'], + self.deps_a['0002_a'], + self.deps_c['0005_c']]], + [m.forwards_plan() for m in self.deps_c]) + + def test_backwards_plan(self): + self.assertEqual([ + [ + self.deps_c['0005_c'], + self.deps_b['0005_b'], + self.deps_b['0004_b'], + self.deps_a['0005_a'], + self.deps_a['0004_a'], + self.deps_b['0003_b'], + self.deps_b['0002_b'], + self.deps_a['0003_a'], + self.deps_a['0002_a'], + self.deps_a['0001_a'], + ], + [ + self.deps_c['0005_c'], + self.deps_b['0005_b'], + self.deps_b['0004_b'], + self.deps_a['0005_a'], + self.deps_a['0004_a'], + self.deps_b['0003_b'], + self.deps_b['0002_b'], + self.deps_a['0003_a'], + self.deps_a['0002_a'], + ], + [ + self.deps_b['0005_b'], + self.deps_b['0004_b'], + self.deps_a['0005_a'], + self.deps_a['0004_a'], + self.deps_b['0003_b'], + self.deps_a['0003_a'], + ], + [ + self.deps_a['0005_a'], + self.deps_a['0004_a'], + ], + [ + self.deps_a['0005_a'], + ] + ], [m.backwards_plan() for m in self.deps_a]) + self.assertEqual([ + [ + self.deps_b['0005_b'], + self.deps_b['0004_b'], + self.deps_a['0005_a'], + self.deps_a['0004_a'], + self.deps_b['0003_b'], + self.deps_b['0002_b'], + self.deps_b['0001_b'], + ], + [ + self.deps_b['0005_b'], + self.deps_b['0004_b'], + self.deps_a['0005_a'], + self.deps_a['0004_a'], + self.deps_b['0003_b'], + self.deps_b['0002_b'], + ], + [ + self.deps_b['0005_b'], + self.deps_b['0004_b'], + self.deps_a['0005_a'], + self.deps_a['0004_a'], + self.deps_b['0003_b'], + ], + [ + self.deps_b['0005_b'], + self.deps_b['0004_b'], + ], + [ + self.deps_b['0005_b'], + ], + ], [m.backwards_plan() for m in self.deps_b]) + self.assertEqual([ + [ + self.deps_c['0005_c'], + self.deps_c['0004_c'], + self.deps_c['0003_c'], + self.deps_c['0002_c'], + self.deps_c['0001_c'], + ], + [ + self.deps_c['0005_c'], + self.deps_c['0004_c'], + self.deps_c['0003_c'], + self.deps_c['0002_c'], + ], + [ + self.deps_c['0005_c'], + self.deps_c['0004_c'], + self.deps_c['0003_c'], + ], + [ + self.deps_c['0005_c'], + self.deps_c['0004_c'], + ], + [self.deps_c['0005_c']] + ], [m.backwards_plan() for m in self.deps_c]) + + +class TestCircularDependencies(Monkeypatcher): + installed_apps = ["circular_a", "circular_b"] + + def test_plans(self): + Migrations.calculate_dependencies(force=True) + circular_a = Migrations('circular_a') + circular_b = Migrations('circular_b') + self.assertRaises( + exceptions.CircularDependency, + circular_a[-1].forwards_plan, + ) + self.assertRaises( + exceptions.CircularDependency, + circular_b[-1].forwards_plan, + ) + self.assertRaises( + exceptions.CircularDependency, + circular_a[-1].backwards_plan, + ) + self.assertRaises( + exceptions.CircularDependency, + circular_b[-1].backwards_plan, + ) + + +class TestMigrations(Monkeypatcher): + installed_apps = ["fakeapp", "otherfakeapp"] + + def test_all(self): + + M1 = Migrations(__import__("fakeapp", {}, {}, [''])) + M2 = Migrations(__import__("otherfakeapp", {}, {}, [''])) + + self.assertEqual( + [M1, M2], + list(all_migrations()), + ) + + def test(self): + + M1 = Migrations(__import__("fakeapp", {}, {}, [''])) + + self.assertEqual(M1, Migrations("fakeapp")) + self.assertEqual(M1, Migrations(self.create_fake_app("fakeapp"))) + + def test_application(self): + fakeapp = Migrations("fakeapp") + application = __import__("fakeapp", {}, {}, ['']) + self.assertEqual(application, fakeapp.application) + + def test_migration(self): + # Can't use vanilla import, modules beginning with numbers aren't in grammar + M1 = __import__("fakeapp.migrations.0001_spam", {}, {}, ['Migration']).Migration + M2 = __import__("fakeapp.migrations.0002_eggs", {}, {}, ['Migration']).Migration + migration = Migrations('fakeapp') + self.assertEqual(M1, migration['0001_spam'].migration().Migration) + self.assertEqual(M2, migration['0002_eggs'].migration().Migration) + self.assertRaises(exceptions.UnknownMigration, + migration['0001_jam'].migration) + + def test_guess_migration(self): + # Can't use vanilla import, modules beginning with numbers aren't in grammar + M1 = __import__("fakeapp.migrations.0001_spam", {}, {}, ['Migration']).Migration + migration = Migrations('fakeapp') + self.assertEqual(M1, migration.guess_migration("0001_spam").migration().Migration) + self.assertEqual(M1, migration.guess_migration("0001_spa").migration().Migration) + self.assertEqual(M1, migration.guess_migration("0001_sp").migration().Migration) + self.assertEqual(M1, migration.guess_migration("0001_s").migration().Migration) + self.assertEqual(M1, migration.guess_migration("0001_").migration().Migration) + self.assertEqual(M1, migration.guess_migration("0001").migration().Migration) + self.assertRaises(exceptions.UnknownMigration, + migration.guess_migration, "0001-spam") + self.assertRaises(exceptions.MultiplePrefixMatches, + migration.guess_migration, "000") + self.assertRaises(exceptions.MultiplePrefixMatches, + migration.guess_migration, "") + self.assertRaises(exceptions.UnknownMigration, + migration.guess_migration, "0001_spams") + self.assertRaises(exceptions.UnknownMigration, + migration.guess_migration, "0001_jam") + + def test_app_label(self): + names = ['fakeapp', 'otherfakeapp'] + self.assertEqual(names, + [Migrations(n).app_label() for n in names]) + + def test_full_name(self): + names = ['fakeapp', 'otherfakeapp'] + self.assertEqual([n + '.migrations' for n in names], + [Migrations(n).full_name() for n in names]) + + +class TestMigrationLogic(Monkeypatcher): + + """ + Tests if the various logic functions in migration actually work. + """ + + installed_apps = ["fakeapp", "otherfakeapp"] + + def setUp(self): + super(TestMigrationLogic, self).setUp() + MigrationHistory.objects.all().delete() + + def assertListEqual(self, list1, list2, msg=None): + list1 = set(list1) + list2 = set(list2) + return self.assert_(list1 == list2, "%s is not equal to %s" % (list1, list2)) + + def test_find_ghost_migrations(self): + pass + + def test_apply_migrations(self): + migrations = Migrations("fakeapp") + + # We should start with no migrations + self.assertEqual(list(MigrationHistory.objects.all()), []) + + # Apply them normally + migrate_app(migrations, target_name=None, fake=False, + load_initial_data=True) + + # We should finish with all migrations + self.assertListEqual( + (("fakeapp", "0001_spam"), + ("fakeapp", "0002_eggs"), + ("fakeapp", "0003_alter_spam"),), + MigrationHistory.objects.values_list("app_name", "migration"), + ) + + # Now roll them backwards + migrate_app(migrations, target_name="zero", fake=False) + + # Finish with none + self.assertEqual(list(MigrationHistory.objects.all()), []) + + + def test_migration_merge_forwards(self): + migrations = Migrations("fakeapp") + + # We should start with no migrations + self.assertEqual(list(MigrationHistory.objects.all()), []) + + # Insert one in the wrong order + MigrationHistory.objects.create(app_name = "fakeapp", + migration = "0002_eggs", + applied = datetime.datetime.now()) + + # Did it go in? + self.assertListEqual( + (("fakeapp", "0002_eggs"),), + MigrationHistory.objects.values_list("app_name", "migration"), + ) + + # Apply them normally + self.assertRaises(exceptions.InconsistentMigrationHistory, + migrate_app, + migrations, target_name=None, fake=False) + self.assertRaises(exceptions.InconsistentMigrationHistory, + migrate_app, + migrations, target_name='zero', fake=False) + try: + migrate_app(migrations, target_name=None, fake=False) + except exceptions.InconsistentMigrationHistory as e: + self.assertEqual( + [ + ( + migrations['0002_eggs'], + migrations['0001_spam'], + ) + ], + e.problems, + ) + try: + migrate_app(migrations, target_name="zero", fake=False) + except exceptions.InconsistentMigrationHistory as e: + self.assertEqual( + [ + ( + migrations['0002_eggs'], + migrations['0001_spam'], + ) + ], + e.problems, + ) + + # Nothing should have changed (no merge mode!) + self.assertListEqual( + (("fakeapp", "0002_eggs"),), + MigrationHistory.objects.values_list("app_name", "migration"), + ) + + # Apply with merge + migrate_app(migrations, target_name=None, merge=True, fake=False) + + # We should finish with all migrations + self.assertListEqual( + (("fakeapp", "0001_spam"), + ("fakeapp", "0002_eggs"), + ("fakeapp", "0003_alter_spam"),), + MigrationHistory.objects.values_list("app_name", "migration"), + ) + + # Now roll them backwards + migrate_app(migrations, target_name="0002", fake=False) + migrate_app(migrations, target_name="0001", fake=True) + migrate_app(migrations, target_name="zero", fake=False) + + # Finish with none + self.assertEqual(list(MigrationHistory.objects.all()), []) + + def test_alter_column_null(self): + + def null_ok(eat_exception=True): + from django.db import connection, transaction + # the DBAPI introspection module fails on postgres NULLs. + cursor = connection.cursor() + + # SQLite has weird now() + if db.backend_name == "sqlite3": + now_func = "DATETIME('NOW')" + # So does SQLServer... should we be using a backend attribute? + elif db.backend_name == "pyodbc": + now_func = "GETDATE()" + elif db.backend_name == "oracle": + now_func = "SYSDATE" + else: + now_func = "NOW()" + + try: + if db.backend_name == "pyodbc": + cursor.execute("SET IDENTITY_INSERT southtest_spam ON;") + cursor.execute("INSERT INTO southtest_spam (id, weight, expires, name) VALUES (100, NULL, %s, 'whatever');" % now_func) + except: + if eat_exception: + transaction.rollback() + return False + else: + raise + else: + cursor.execute("DELETE FROM southtest_spam") + transaction.commit() + return True + + MigrationHistory.objects.all().delete() + migrations = Migrations("fakeapp") + + # by default name is NOT NULL + migrate_app(migrations, target_name="0002", fake=False) + self.failIf(null_ok()) + self.assertListEqual( + (("fakeapp", "0001_spam"), + ("fakeapp", "0002_eggs"),), + MigrationHistory.objects.values_list("app_name", "migration"), + ) + + # after 0003, it should be NULL + migrate_app(migrations, target_name="0003", fake=False) + self.assert_(null_ok(False)) + self.assertListEqual( + (("fakeapp", "0001_spam"), + ("fakeapp", "0002_eggs"), + ("fakeapp", "0003_alter_spam"),), + MigrationHistory.objects.values_list("app_name", "migration"), + ) + + # make sure it is NOT NULL again + migrate_app(migrations, target_name="0002", fake=False) + self.failIf(null_ok(), 'weight not null after migration') + self.assertListEqual( + (("fakeapp", "0001_spam"), + ("fakeapp", "0002_eggs"),), + MigrationHistory.objects.values_list("app_name", "migration"), + ) + + # finish with no migrations, otherwise other tests fail... + migrate_app(migrations, target_name="zero", fake=False) + self.assertEqual(list(MigrationHistory.objects.all()), []) + + def test_dependencies(self): + + fakeapp = Migrations("fakeapp") + otherfakeapp = Migrations("otherfakeapp") + + # Test a simple path + self.assertEqual([fakeapp['0001_spam'], + fakeapp['0002_eggs'], + fakeapp['0003_alter_spam']], + fakeapp['0003_alter_spam'].forwards_plan()) + + # And a complex one. + self.assertEqual( + [ + fakeapp['0001_spam'], + otherfakeapp['0001_first'], + otherfakeapp['0002_second'], + fakeapp['0002_eggs'], + fakeapp['0003_alter_spam'], + otherfakeapp['0003_third'] + ], + otherfakeapp['0003_third'].forwards_plan(), + ) + + +class TestMigrationUtils(Monkeypatcher): + installed_apps = ["fakeapp", "otherfakeapp"] + + def test_get_app_label(self): + self.assertEqual( + "southtest", + get_app_label(self.create_fake_app("southtest.models")), + ) + self.assertEqual( + "baz", + get_app_label(self.create_fake_app("foo.bar.baz.models")), + ) + +class TestUtils(unittest.TestCase): + + def test_flatten(self): + self.assertEqual([], list(flatten(iter([])))) + self.assertEqual([], list(flatten(iter([iter([]), ])))) + self.assertEqual([1], list(flatten(iter([1])))) + self.assertEqual([1, 2], list(flatten(iter([1, 2])))) + self.assertEqual([1, 2], list(flatten(iter([iter([1]), 2])))) + self.assertEqual([1, 2], list(flatten(iter([iter([1, 2])])))) + self.assertEqual([1, 2, 3], list(flatten(iter([iter([1, 2]), 3])))) + self.assertEqual([1, 2, 3], + list(flatten(iter([iter([1]), iter([2]), 3])))) + self.assertEqual([1, 2, 3], + list(flatten([[1], [2], 3]))) + + def test_depends(self): + graph = {'A1': []} + self.assertEqual(['A1'], + depends('A1', lambda n: graph[n])) + graph = {'A1': [], + 'A2': ['A1'], + 'A3': ['A2']} + self.assertEqual(['A1', 'A2', 'A3'], + depends('A3', lambda n: graph[n])) + graph = {'A1': [], + 'A2': ['A1'], + 'A3': ['A2', 'A1']} + self.assertEqual(['A1', 'A2', 'A3'], + depends('A3', lambda n: graph[n])) + graph = {'A1': [], + 'A2': ['A1'], + 'A3': ['A2', 'A1', 'B1'], + 'B1': []} + self.assertEqual( + ['B1', 'A1', 'A2', 'A3'], + depends('A3', lambda n: graph[n]), + ) + graph = {'A1': [], + 'A2': ['A1'], + 'A3': ['A2', 'A1', 'B2'], + 'B1': [], + 'B2': ['B1']} + self.assertEqual( + ['B1', 'B2', 'A1', 'A2', 'A3'], + depends('A3', lambda n: graph[n]), + ) + graph = {'A1': [], + 'A2': ['A1', 'B1'], + 'A3': ['A2'], + 'B1': ['A1']} + self.assertEqual(['A1', 'B1', 'A2', 'A3'], + depends('A3', lambda n: graph[n])) + graph = {'A1': [], + 'A2': ['A1'], + 'A3': ['A2', 'A1', 'B2'], + 'B1': [], + 'B2': ['B1', 'C1'], + 'C1': ['B1']} + self.assertEqual( + ['B1', 'C1', 'B2', 'A1', 'A2', 'A3'], + depends('A3', lambda n: graph[n]), + ) + graph = {'A1': [], + 'A2': ['A1'], + 'A3': ['A2', 'B2', 'A1', 'C1'], + 'B1': ['A1'], + 'B2': ['B1', 'C2', 'A1'], + 'C1': ['B1'], + 'C2': ['C1', 'A1'], + 'C3': ['C2']} + self.assertEqual( + ['A1', 'B1', 'C1', 'C2', 'B2', 'A2', 'A3'], + depends('A3', lambda n: graph[n]), + ) + + def assertCircularDependency(self, trace, target, graph): + "Custom assertion that checks a circular dependency is detected correctly." + self.assertRaises( + exceptions.CircularDependency, + depends, + target, + lambda n: graph[n], + ) + try: + depends(target, lambda n: graph[n]) + except exceptions.CircularDependency as e: + self.assertEqual(trace, e.trace) + + def test_depends_cycle(self): + graph = {'A1': ['A1']} + self.assertCircularDependency( + ['A1', 'A1'], + 'A1', + graph, + ) + graph = {'A1': [], + 'A2': ['A1', 'A2'], + 'A3': ['A2']} + self.assertCircularDependency( + ['A2', 'A2'], + 'A3', + graph, + ) + graph = {'A1': [], + 'A2': ['A1'], + 'A3': ['A2', 'A3'], + 'A4': ['A3']} + self.assertCircularDependency( + ['A3', 'A3'], + 'A4', + graph, + ) + graph = {'A1': ['B1'], + 'B1': ['A1']} + self.assertCircularDependency( + ['A1', 'B1', 'A1'], + 'A1', + graph, + ) + graph = {'A1': [], + 'A2': ['A1', 'B2'], + 'A3': ['A2'], + 'B1': [], + 'B2': ['B1', 'A2'], + 'B3': ['B2']} + self.assertCircularDependency( + ['A2', 'B2', 'A2'], + 'A3', + graph, + ) + graph = {'A1': [], + 'A2': ['A1', 'B3'], + 'A3': ['A2'], + 'B1': [], + 'B2': ['B1', 'A2'], + 'B3': ['B2']} + self.assertCircularDependency( + ['A2', 'B3', 'B2', 'A2'], + 'A3', + graph, + ) + graph = {'A1': [], + 'A2': ['A1'], + 'A3': ['A2', 'B2'], + 'A4': ['A3'], + 'B1': ['A3'], + 'B2': ['B1']} + self.assertCircularDependency( + ['A3', 'B2', 'B1', 'A3'], + 'A4', + graph, + ) + +class TestManualChanges(Monkeypatcher): + installed_apps = ["fakeapp", "otherfakeapp"] + + def test_suggest_name(self): + migrations = Migrations('fakeapp') + change = ManualChanges(migrations, + [], + ['fakeapp.slug'], + []) + self.assertEquals(change.suggest_name(), + 'add_field_fakeapp_slug') + + change = ManualChanges(migrations, + [], + [], + ['fakeapp.slug']) + self.assertEquals(change.suggest_name(), + 'add_index_fakeapp_slug') diff --git a/awx/lib/site-packages/south/tests/non_managed/__init__.py b/awx/lib/site-packages/south/tests/non_managed/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/non_managed/migrations/__init__.py b/awx/lib/site-packages/south/tests/non_managed/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/non_managed/models.py b/awx/lib/site-packages/south/tests/non_managed/models.py new file mode 100644 index 0000000000..e520d94b70 --- /dev/null +++ b/awx/lib/site-packages/south/tests/non_managed/models.py @@ -0,0 +1,16 @@ +# -*- coding: UTF-8 -*- + +""" +An app with a model that is not managed for testing that South does +not try to manage it in any way +""" +from django.db import models + +class Legacy(models.Model): + + name = models.CharField(max_length=10) + size = models.IntegerField() + + class Meta: + db_table = "legacy_table" + managed = False diff --git a/awx/lib/site-packages/south/tests/otherfakeapp/__init__.py b/awx/lib/site-packages/south/tests/otherfakeapp/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/otherfakeapp/migrations/0001_first.py b/awx/lib/site-packages/south/tests/otherfakeapp/migrations/0001_first.py new file mode 100644 index 0000000000..ad9c09599c --- /dev/null +++ b/awx/lib/site-packages/south/tests/otherfakeapp/migrations/0001_first.py @@ -0,0 +1,15 @@ +from south.db import db +from django.db import models + +class Migration: + + depends_on = ( + ("fakeapp", "0001_spam"), + ) + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/otherfakeapp/migrations/0002_second.py b/awx/lib/site-packages/south/tests/otherfakeapp/migrations/0002_second.py new file mode 100644 index 0000000000..7c0fb0cf2f --- /dev/null +++ b/awx/lib/site-packages/south/tests/otherfakeapp/migrations/0002_second.py @@ -0,0 +1,11 @@ +from south.db import db +from django.db import models + +class Migration: + + def forwards(self): + pass + + def backwards(self): + pass + diff --git a/awx/lib/site-packages/south/tests/otherfakeapp/migrations/0003_third.py b/awx/lib/site-packages/south/tests/otherfakeapp/migrations/0003_third.py new file mode 100644 index 0000000000..fa8ed97639 --- /dev/null +++ b/awx/lib/site-packages/south/tests/otherfakeapp/migrations/0003_third.py @@ -0,0 +1,14 @@ +from south.db import db +from django.db import models + +class Migration: + + depends_on = ( + ("fakeapp", "0003_alter_spam"), + ) + + def forwards(self): + pass + + def backwards(self): + pass diff --git a/awx/lib/site-packages/south/tests/otherfakeapp/migrations/__init__.py b/awx/lib/site-packages/south/tests/otherfakeapp/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/south/tests/otherfakeapp/models.py b/awx/lib/site-packages/south/tests/otherfakeapp/models.py new file mode 100644 index 0000000000..93a4b8edf2 --- /dev/null +++ b/awx/lib/site-packages/south/tests/otherfakeapp/models.py @@ -0,0 +1 @@ +# This file left intentionally blank. \ No newline at end of file diff --git a/awx/lib/site-packages/south/utils/__init__.py b/awx/lib/site-packages/south/utils/__init__.py new file mode 100644 index 0000000000..c3c5191633 --- /dev/null +++ b/awx/lib/site-packages/south/utils/__init__.py @@ -0,0 +1,73 @@ +""" +Generally helpful utility functions. +""" + + +def _ask_for_it_by_name(name): + "Returns an object referenced by absolute path." + bits = name.split(".") + + ## what if there is no absolute reference? + if len(bits)>1: + modulename = ".".join(bits[:-1]) + else: + modulename=bits[0] + + module = __import__(modulename, {}, {}, bits[-1]) + + if len(bits) == 1: + return module + else: + return getattr(module, bits[-1]) + + +def ask_for_it_by_name(name): + "Returns an object referenced by absolute path. (Memoised outer wrapper)" + if name not in ask_for_it_by_name.cache: + ask_for_it_by_name.cache[name] = _ask_for_it_by_name(name) + return ask_for_it_by_name.cache[name] +ask_for_it_by_name.cache = {} + + +def get_attribute(item, attribute): + """ + Like getattr, but recursive (i.e. you can ask for 'foo.bar.yay'.) + """ + value = item + for part in attribute.split("."): + value = getattr(value, part) + return value + +def auto_through(field): + "Returns if the M2M class passed in has an autogenerated through table or not." + return ( + # Django 1.0/1.1 + (not field.rel.through) + or + # Django 1.2+ + getattr(getattr(field.rel.through, "_meta", None), "auto_created", False) + ) + +def auto_model(model): + "Returns if the given model was automatically generated." + return getattr(model._meta, "auto_created", False) + +def memoize(function): + "Standard memoization decorator." + name = function.__name__ + _name = '_' + name + + def method(self): + if not hasattr(self, _name): + value = function(self) + setattr(self, _name, value) + return getattr(self, _name) + + def invalidate(): + if hasattr(method, _name): + delattr(method, _name) + + method.__name__ = function.__name__ + method.__doc__ = function.__doc__ + method._invalidate = invalidate + return method diff --git a/awx/lib/site-packages/south/utils/datetime_utils.py b/awx/lib/site-packages/south/utils/datetime_utils.py new file mode 100644 index 0000000000..a416935941 --- /dev/null +++ b/awx/lib/site-packages/south/utils/datetime_utils.py @@ -0,0 +1,28 @@ +from datetime import * + +import django +from django.conf import settings + +if django.VERSION[:2] >= (1, 4) and getattr(settings, 'USE_TZ', False): + from django.utils import timezone + from datetime import datetime as _datetime + + class datetime(_datetime): + """ + A custom datetime.datetime class which acts as a compatibility + layer between South and Django 1.4's timezone aware datetime + instances. + + It basically adds the default timezone (as configured in Django's + settings) automatically if no tzinfo is given. + """ + def __new__(cls, year, month, day, + hour=0, minute=0, second=0, microsecond=0, tzinfo=None): + + dt = _datetime(year, month, day, + hour, minute, second, microsecond, + tzinfo=tzinfo) + if tzinfo is None: + default_timezone = timezone.get_default_timezone() + dt = timezone.make_aware(dt, default_timezone) + return dt diff --git a/awx/lib/site-packages/south/utils/py3.py b/awx/lib/site-packages/south/utils/py3.py new file mode 100644 index 0000000000..9c5baaded0 --- /dev/null +++ b/awx/lib/site-packages/south/utils/py3.py @@ -0,0 +1,21 @@ +""" +Python 2 + 3 compatibility functions. This is a very small subset of six. +""" + +import sys + +PY3 = sys.version_info[0] == 3 + +if PY3: + string_types = str, + text_type = str + raw_input = input + +else: + string_types = basestring, + text_type = unicode + raw_input = raw_input + +def with_metaclass(meta, base=object): + """Create a base class with a metaclass.""" + return meta("NewBase", (base,), {}) diff --git a/awx/lib/site-packages/south/v2.py b/awx/lib/site-packages/south/v2.py new file mode 100644 index 0000000000..464605284e --- /dev/null +++ b/awx/lib/site-packages/south/v2.py @@ -0,0 +1,19 @@ +""" +API versioning file; we can tell what kind of migrations things are +by what class they inherit from (if none, it's a v1). +""" + +from south.utils import ask_for_it_by_name + +class BaseMigration(object): + + def gf(self, field_name): + "Gets a field by absolute reference." + return ask_for_it_by_name(field_name) + +class SchemaMigration(BaseMigration): + pass + +class DataMigration(BaseMigration): + # Data migrations shouldn't be dry-run + no_dry_run = True diff --git a/awx/lib/site-packages/taggit/__init__.py b/awx/lib/site-packages/taggit/__init__.py new file mode 100644 index 0000000000..4c4993466d --- /dev/null +++ b/awx/lib/site-packages/taggit/__init__.py @@ -0,0 +1 @@ +VERSION = (0, 10, 0, 'alpha', 1) diff --git a/awx/lib/site-packages/taggit/admin.py b/awx/lib/site-packages/taggit/admin.py new file mode 100644 index 0000000000..6c012d6fe3 --- /dev/null +++ b/awx/lib/site-packages/taggit/admin.py @@ -0,0 +1,19 @@ +from django.contrib import admin + +from taggit.models import Tag, TaggedItem + + +class TaggedItemInline(admin.StackedInline): + model = TaggedItem + +class TagAdmin(admin.ModelAdmin): + inlines = [ + TaggedItemInline + ] + list_display = ["name", "slug"] + ordering = ["name", "slug"] + search_fields = ["name"] + prepopulated_fields = {"slug": ["name"]} + + +admin.site.register(Tag, TagAdmin) diff --git a/awx/lib/site-packages/taggit/forms.py b/awx/lib/site-packages/taggit/forms.py new file mode 100644 index 0000000000..e0198bd933 --- /dev/null +++ b/awx/lib/site-packages/taggit/forms.py @@ -0,0 +1,21 @@ +from django import forms +from django.utils.translation import ugettext as _ + +from taggit.utils import parse_tags, edit_string_for_tags + + +class TagWidget(forms.TextInput): + def render(self, name, value, attrs=None): + if value is not None and not isinstance(value, basestring): + value = edit_string_for_tags([o.tag for o in value.select_related("tag")]) + return super(TagWidget, self).render(name, value, attrs) + +class TagField(forms.CharField): + widget = TagWidget + + def clean(self, value): + value = super(TagField, self).clean(value) + try: + return parse_tags(value) + except ValueError: + raise forms.ValidationError(_("Please provide a comma-separated list of tags.")) diff --git a/awx/lib/site-packages/taggit/locale/de/LC_MESSAGES/django.mo b/awx/lib/site-packages/taggit/locale/de/LC_MESSAGES/django.mo new file mode 100644 index 0000000000..3d9eaaf534 Binary files /dev/null and b/awx/lib/site-packages/taggit/locale/de/LC_MESSAGES/django.mo differ diff --git a/awx/lib/site-packages/taggit/locale/de/LC_MESSAGES/django.po b/awx/lib/site-packages/taggit/locale/de/LC_MESSAGES/django.po new file mode 100644 index 0000000000..98ecdac5a8 --- /dev/null +++ b/awx/lib/site-packages/taggit/locale/de/LC_MESSAGES/django.po @@ -0,0 +1,67 @@ +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: django-taggit\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2010-09-07 09:26-0700\n" +"PO-Revision-Date: 2010-09-07 09:26-0700\n" +"Last-Translator: Jannis Leidel <jannis@leidel.info>\n" +"Language-Team: German <de@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1)\n" + +#: forms.py:20 +msgid "Please provide a comma-separated list of tags." +msgstr "Bitte eine durch Komma getrennte Schlagwortliste eingeben." + +#: managers.py:39 managers.py:83 models.py:50 +msgid "Tags" +msgstr "Schlagwörter" + +#: managers.py:84 +msgid "A comma-separated list of tags." +msgstr "Eine durch Komma getrennte Schlagwortliste." + +#: models.py:10 +msgid "Name" +msgstr "Name" + +#: models.py:11 +msgid "Slug" +msgstr "Kürzel" + +#: models.py:49 +msgid "Tag" +msgstr "Schlagwort" + +#: models.py:56 +#, python-format +msgid "%(object)s tagged with %(tag)s" +msgstr "%(object)s verschlagwortet mit %(tag)s" + +#: models.py:100 +msgid "Object id" +msgstr "Objekt-ID" + +#: models.py:104 models.py:110 +msgid "Content type" +msgstr "Inhaltstyp" + +#: models.py:138 +msgid "Tagged Item" +msgstr "Verschlagwortetes Objekt" + +#: models.py:139 +msgid "Tagged Items" +msgstr "Verschlagwortete Objekte" + +#: contrib/suggest/models.py:57 +msgid "" +"Enter a valid Regular Expression. To make it case-insensitive include \"(?i)" +"\" in your expression." +msgstr "" +"Bitte einen regulären Ausdruck eingeben. Fügen Sie \"(?i) \" dem " +"Ausdruck hinzu, um nicht zwischen Groß- und Kleinschreibung zu " +"unterscheiden." diff --git a/awx/lib/site-packages/taggit/locale/en/LC_MESSAGES/django.po b/awx/lib/site-packages/taggit/locale/en/LC_MESSAGES/django.po new file mode 100644 index 0000000000..c5642c7d05 --- /dev/null +++ b/awx/lib/site-packages/taggit/locale/en/LC_MESSAGES/django.po @@ -0,0 +1,68 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2010-09-07 09:45-0700\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +#: forms.py:20 +msgid "Please provide a comma-separated list of tags." +msgstr "" + +#: managers.py:39 managers.py:83 models.py:50 +msgid "Tags" +msgstr "" + +#: managers.py:84 +msgid "A comma-separated list of tags." +msgstr "" + +#: models.py:10 +msgid "Name" +msgstr "" + +#: models.py:11 +msgid "Slug" +msgstr "" + +#: models.py:49 +msgid "Tag" +msgstr "" + +#: models.py:56 +#, python-format +msgid "%(object)s tagged with %(tag)s" +msgstr "" + +#: models.py:100 +msgid "Object id" +msgstr "" + +#: models.py:104 models.py:110 +msgid "Content type" +msgstr "" + +#: models.py:138 +msgid "Tagged Item" +msgstr "" + +#: models.py:139 +msgid "Tagged Items" +msgstr "" + +#: contrib/suggest/models.py:57 +msgid "" +"Enter a valid Regular Expression. To make it case-insensitive include \"(?i)" +"\" in your expression." +msgstr "" diff --git a/awx/lib/site-packages/taggit/locale/he/LC_MESSAGES/django.mo b/awx/lib/site-packages/taggit/locale/he/LC_MESSAGES/django.mo new file mode 100644 index 0000000000..562db710ea Binary files /dev/null and b/awx/lib/site-packages/taggit/locale/he/LC_MESSAGES/django.mo differ diff --git a/awx/lib/site-packages/taggit/locale/he/LC_MESSAGES/django.po b/awx/lib/site-packages/taggit/locale/he/LC_MESSAGES/django.po new file mode 100644 index 0000000000..e27a878f4a --- /dev/null +++ b/awx/lib/site-packages/taggit/locale/he/LC_MESSAGES/django.po @@ -0,0 +1,69 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: Django Taggit\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2010-06-26 12:47-0500\n" +"PO-Revision-Date: 2010-06-26 12:54-0600\n" +"Last-Translator: Alex <alex.gaynor@gmail.com>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: forms.py:20 +msgid "Please provide a comma-separated list of tags." +msgstr "נא לספק רשימה של תגים מופרדת עם פסיקים." + +#: managers.py:41 +#: managers.py:113 +#: models.py:18 +msgid "Tags" +msgstr "תגיות" + +#: managers.py:114 +msgid "A comma-separated list of tags." +msgstr "רשימה של תגים מופרדת עם פסיקים." + +#: models.py:10 +msgid "Name" +msgstr "שם" + +#: models.py:11 +msgid "Slug" +msgstr "" + +#: models.py:17 +msgid "Tag" +msgstr "תג" + +#: models.py:56 +#, python-format +msgid "%(object)s tagged with %(tag)s" +msgstr "%(object)s מתויג עם %(tag)s" + +#: models.py:86 +msgid "Object id" +msgstr "" + +#: models.py:87 +msgid "Content type" +msgstr "" + +#: models.py:92 +msgid "Tagged Item" +msgstr "" + +#: models.py:93 +msgid "Tagged Items" +msgstr "" + +#: contrib/suggest/models.py:57 +msgid "Enter a valid Regular Expression. To make it case-insensitive include \"(?i)\" in your expression." +msgstr "" + diff --git a/awx/lib/site-packages/taggit/locale/nb/LC_MESSAGES/django.mo b/awx/lib/site-packages/taggit/locale/nb/LC_MESSAGES/django.mo new file mode 100644 index 0000000000..237612d5e9 Binary files /dev/null and b/awx/lib/site-packages/taggit/locale/nb/LC_MESSAGES/django.mo differ diff --git a/awx/lib/site-packages/taggit/locale/nb/LC_MESSAGES/django.po b/awx/lib/site-packages/taggit/locale/nb/LC_MESSAGES/django.po new file mode 100644 index 0000000000..a1ba12858e --- /dev/null +++ b/awx/lib/site-packages/taggit/locale/nb/LC_MESSAGES/django.po @@ -0,0 +1,72 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: 0.9.3\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2010-09-07 09:45-0700\n" +"PO-Revision-Date: 2012-12-08 14:42+0100\n" +"Last-Translator: Bjørn Pettersen <bp@datakortet.no>\n" +"Language-Team: Norwegian <bp@datakortet.no>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"X-Generator: Poedit 1.5.4\n" +"Language: Norwegian\n" + +#: forms.py:20 +msgid "Please provide a comma-separated list of tags." +msgstr "Vennligst oppgi en kommaseparert tagg-liste." + +#: managers.py:39 managers.py:83 models.py:50 +msgid "Tags" +msgstr "Tagger" + +#: managers.py:84 +msgid "A comma-separated list of tags." +msgstr "En kommaseparert tagg-liste." + +#: models.py:10 +msgid "Name" +msgstr "Navn" + +#: models.py:11 +msgid "Slug" +msgstr "Slug" + +#: models.py:49 +msgid "Tag" +msgstr "Tagg" + +#: models.py:56 +#, python-format +msgid "%(object)s tagged with %(tag)s" +msgstr "%(object)s tagget med %(tag)s" + +#: models.py:100 +msgid "Object id" +msgstr "Objekt-id" + +#: models.py:104 models.py:110 +msgid "Content type" +msgstr "Innholdstype" + +#: models.py:138 +msgid "Tagged Item" +msgstr "Tagget Element" + +#: models.py:139 +msgid "Tagged Items" +msgstr "Taggede Elementer" + +#: contrib/suggest/models.py:57 +msgid "" +"Enter a valid Regular Expression. To make it case-insensitive include \"(?" +"i)\" in your expression." +msgstr "" +"Skriv et gyldig regulært utrykk (regex). For å gjøre det uavhengig av " +"forskjellen mellom store og små bokstaver må du inkludere \"(?i)\" i din " +"regex." diff --git a/awx/lib/site-packages/taggit/locale/nl/LC_MESSAGES/django.mo b/awx/lib/site-packages/taggit/locale/nl/LC_MESSAGES/django.mo new file mode 100644 index 0000000000..28e7b7e434 Binary files /dev/null and b/awx/lib/site-packages/taggit/locale/nl/LC_MESSAGES/django.mo differ diff --git a/awx/lib/site-packages/taggit/locale/nl/LC_MESSAGES/django.po b/awx/lib/site-packages/taggit/locale/nl/LC_MESSAGES/django.po new file mode 100644 index 0000000000..7871b0bd7d --- /dev/null +++ b/awx/lib/site-packages/taggit/locale/nl/LC_MESSAGES/django.po @@ -0,0 +1,64 @@ +msgid "" +msgstr "" +"Project-Id-Version: django-taggit\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2010-09-07 09:45-0700\n" +"PO-Revision-Date: 2010-09-07 23:04+0100\n" +"Last-Translator: Jeffrey Gelens <jeffrey@gelens.org>\n" +"Language-Team: Dutch\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +#: forms.py:20 +msgid "Please provide a comma-separated list of tags." +msgstr "Geef een door komma gescheiden lijst van tags." + +#: managers.py:39 +#: managers.py:83 +#: models.py:50 +msgid "Tags" +msgstr "Tags" + +#: managers.py:84 +msgid "A comma-separated list of tags." +msgstr "Een door komma gescheiden lijst van tags." + +#: models.py:10 +msgid "Name" +msgstr "Naam" + +#: models.py:11 +msgid "Slug" +msgstr "Slug" + +#: models.py:49 +msgid "Tag" +msgstr "Tag" + +#: models.py:56 +#, python-format +msgid "%(object)s tagged with %(tag)s" +msgstr "%(object)s getagged met %(tag)s" + +#: models.py:100 +msgid "Object id" +msgstr "Object-id" + +#: models.py:104 +#: models.py:110 +msgid "Content type" +msgstr "Inhoudstype" + +#: models.py:138 +msgid "Tagged Item" +msgstr "Object getagged" + +#: models.py:139 +msgid "Tagged Items" +msgstr "Objecten getagged" + +#: contrib/suggest/models.py:57 +msgid "Enter a valid Regular Expression. To make it case-insensitive include \"(?i)\" in your expression." +msgstr "Voer een valide reguliere expressie in. Voeg \"(?i)\" aan de expressie toe om deze hoofdletter ongevoelig te maken." + diff --git a/awx/lib/site-packages/taggit/locale/pt_BR/LC_MESSAGES/django.mo b/awx/lib/site-packages/taggit/locale/pt_BR/LC_MESSAGES/django.mo new file mode 100644 index 0000000000..c78bcb81d9 Binary files /dev/null and b/awx/lib/site-packages/taggit/locale/pt_BR/LC_MESSAGES/django.mo differ diff --git a/awx/lib/site-packages/taggit/locale/pt_BR/LC_MESSAGES/django.po b/awx/lib/site-packages/taggit/locale/pt_BR/LC_MESSAGES/django.po new file mode 100644 index 0000000000..8804bbf5eb --- /dev/null +++ b/awx/lib/site-packages/taggit/locale/pt_BR/LC_MESSAGES/django.po @@ -0,0 +1,62 @@ +# This file is distributed under WTFPL license. +# +# Translators: +# RPB <r@ifgy.co>, 2013. +msgid "" +msgstr "" +"Project-Id-Version: django-taggit\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2013-01-15 22:25-0200\n" +"PO-Revision-Date: 2013-01-12 18:11-0200\n" +"Last-Translator: RPB <r@ifgy.co>\n" +"Language-Team: Portuguese (Brazil) <r@ifgy.co>\n" +"Language: pt_BR\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n > 1)\n" + +#: forms.py:21 +msgid "Please provide a comma-separated list of tags." +msgstr "Favor fornecer uma lista de marcadores separados por vírgula." + +#: managers.py:39 models.py:57 +msgid "Tags" +msgstr "Marcadores" + +#: managers.py:40 +msgid "A comma-separated list of tags." +msgstr "Uma lista de marcadores separados por vírgula." + +#: models.py:10 +msgid "Name" +msgstr "Nome" + +#: models.py:11 +msgid "Slug" +msgstr "Slug" + +#: models.py:56 +msgid "Tag" +msgstr "Marcador" + +#: models.py:63 +#, python-format +msgid "%(object)s tagged with %(tag)s" +msgstr "%(object)s marcados com %(tag)s" + +#: models.py:113 +msgid "Object id" +msgstr "Id do objeto" + +#: models.py:117 models.py:123 +msgid "Content type" +msgstr "Tipo de conteúdo" + +#: models.py:159 +msgid "Tagged Item" +msgstr "Item marcado" + +#: models.py:160 +msgid "Tagged Items" +msgstr "Itens marcados" diff --git a/awx/lib/site-packages/taggit/locale/ru/LC_MESSAGES/django.mo b/awx/lib/site-packages/taggit/locale/ru/LC_MESSAGES/django.mo new file mode 100644 index 0000000000..61a7e39e43 Binary files /dev/null and b/awx/lib/site-packages/taggit/locale/ru/LC_MESSAGES/django.mo differ diff --git a/awx/lib/site-packages/taggit/locale/ru/LC_MESSAGES/django.po b/awx/lib/site-packages/taggit/locale/ru/LC_MESSAGES/django.po new file mode 100644 index 0000000000..42e3ebe7e0 --- /dev/null +++ b/awx/lib/site-packages/taggit/locale/ru/LC_MESSAGES/django.po @@ -0,0 +1,70 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: Django Taggit\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2010-06-11 11:28+0700\n" +"PO-Revision-Date: 2010-06-11 11:30+0700\n" +"Last-Translator: Igor 'idle sign' Starikov <idlesign@yandex.ru>\n" +"Language-Team: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;\n" +"X-Poedit-Language: Russian\n" + +#: forms.py:20 +msgid "Please provide a comma-separated list of tags." +msgstr "Укажите метки через запятую." + +#: managers.py:41 +#: managers.py:101 +#: models.py:17 +msgid "Tags" +msgstr "Метки" + +#: managers.py:102 +msgid "A comma-separated list of tags." +msgstr "Список меток через запятую." + +#: models.py:9 +msgid "Name" +msgstr "Название" + +#: models.py:10 +msgid "Slug" +msgstr "Слаг" + +#: models.py:16 +msgid "Tag" +msgstr "Метка" + +#: models.py:55 +#, python-format +msgid "%(object)s tagged with %(tag)s" +msgstr "элемент «%(object)s» с меткой «%(tag)s»" + +#: models.py:82 +msgid "Object id" +msgstr "ID объекта" + +#: models.py:83 +msgid "Content type" +msgstr "Тип содержимого" + +#: models.py:87 +msgid "Tagged Item" +msgstr "Элемент с меткой" + +#: models.py:88 +msgid "Tagged Items" +msgstr "Элементы с меткой" + +#: contrib/suggest/models.py:57 +msgid "Enter a valid Regular Expression. To make it case-insensitive include \"(?i)\" in your expression." +msgstr "Введите регулярное выражение. Чтобы сделать его чувствительным к регистру укажите \"(?i)\"." + diff --git a/awx/lib/site-packages/taggit/managers.py b/awx/lib/site-packages/taggit/managers.py new file mode 100644 index 0000000000..ca1700fd96 --- /dev/null +++ b/awx/lib/site-packages/taggit/managers.py @@ -0,0 +1,245 @@ +from django.contrib.contenttypes.generic import GenericRelation +from django.contrib.contenttypes.models import ContentType +from django.db import models +from django.db.models.fields.related import ManyToManyRel, RelatedField, add_lazy_relation +from django.db.models.related import RelatedObject +from django.utils.text import capfirst +from django.utils.translation import ugettext_lazy as _ + +from taggit.forms import TagField +from taggit.models import TaggedItem, GenericTaggedItemBase +from taggit.utils import require_instance_manager + + +try: + all +except NameError: + # 2.4 compat + try: + from django.utils.itercompat import all + except ImportError: + # 1.1.X compat + def all(iterable): + for item in iterable: + if not item: + return False + return True + + +class TaggableRel(ManyToManyRel): + def __init__(self): + self.related_name = None + self.limit_choices_to = {} + self.symmetrical = True + self.multiple = True + self.through = None + + +class TaggableManager(RelatedField): + def __init__(self, verbose_name=_("Tags"), + help_text=_("A comma-separated list of tags."), through=None, blank=False): + self.through = through or TaggedItem + self.rel = TaggableRel() + self.verbose_name = verbose_name + self.help_text = help_text + self.blank = blank + self.editable = True + self.unique = False + self.creates_table = False + self.db_column = None + self.choices = None + self.serialize = False + self.null = True + self.creation_counter = models.Field.creation_counter + models.Field.creation_counter += 1 + + def __get__(self, instance, model): + if instance is not None and instance.pk is None: + raise ValueError("%s objects need to have a primary key value " + "before you can access their tags." % model.__name__) + manager = _TaggableManager( + through=self.through, model=model, instance=instance + ) + return manager + + def contribute_to_class(self, cls, name): + self.name = self.column = name + self.model = cls + cls._meta.add_field(self) + setattr(cls, name, self) + if not cls._meta.abstract: + if isinstance(self.through, basestring): + def resolve_related_class(field, model, cls): + self.through = model + self.post_through_setup(cls) + add_lazy_relation( + cls, self, self.through, resolve_related_class + ) + else: + self.post_through_setup(cls) + + def post_through_setup(self, cls): + self.use_gfk = ( + self.through is None or issubclass(self.through, GenericTaggedItemBase) + ) + self.rel.to = self.through._meta.get_field("tag").rel.to + self.related = RelatedObject(self.through, cls, self) + if self.use_gfk: + tagged_items = GenericRelation(self.through) + tagged_items.contribute_to_class(cls, "tagged_items") + + def save_form_data(self, instance, value): + getattr(instance, self.name).set(*value) + + def formfield(self, form_class=TagField, **kwargs): + defaults = { + "label": capfirst(self.verbose_name), + "help_text": self.help_text, + "required": not self.blank + } + defaults.update(kwargs) + return form_class(**defaults) + + def value_from_object(self, instance): + if instance.pk: + return self.through.objects.filter(**self.through.lookup_kwargs(instance)) + return self.through.objects.none() + + def related_query_name(self): + return self.model._meta.module_name + + def m2m_reverse_name(self): + return self.through._meta.get_field_by_name("tag")[0].column + + def m2m_target_field_name(self): + return self.model._meta.pk.name + + def m2m_reverse_target_field_name(self): + return self.rel.to._meta.pk.name + + def m2m_column_name(self): + if self.use_gfk: + return self.through._meta.virtual_fields[0].fk_field + return self.through._meta.get_field('content_object').column + + def db_type(self, connection=None): + return None + + def m2m_db_table(self): + return self.through._meta.db_table + + def extra_filters(self, pieces, pos, negate): + if negate or not self.use_gfk: + return [] + prefix = "__".join(["tagged_items"] + pieces[:pos-2]) + cts = map(ContentType.objects.get_for_model, _get_subclasses(self.model)) + if len(cts) == 1: + return [("%s__content_type" % prefix, cts[0])] + return [("%s__content_type__in" % prefix, cts)] + + def bulk_related_objects(self, new_objs, using): + return [] + + +class _TaggableManager(models.Manager): + def __init__(self, through, model, instance): + self.through = through + self.model = model + self.instance = instance + + def get_query_set(self): + return self.through.tags_for(self.model, self.instance) + + def _lookup_kwargs(self): + return self.through.lookup_kwargs(self.instance) + + @require_instance_manager + def add(self, *tags): + str_tags = set([ + t + for t in tags + if not isinstance(t, self.through.tag_model()) + ]) + tag_objs = set(tags) - str_tags + # If str_tags has 0 elements Django actually optimizes that to not do a + # query. Malcolm is very smart. + existing = self.through.tag_model().objects.filter( + name__in=str_tags + ) + tag_objs.update(existing) + + for new_tag in str_tags - set(t.name for t in existing): + tag_objs.add(self.through.tag_model().objects.create(name=new_tag)) + + for tag in tag_objs: + self.through.objects.get_or_create(tag=tag, **self._lookup_kwargs()) + + @require_instance_manager + def set(self, *tags): + self.clear() + self.add(*tags) + + @require_instance_manager + def remove(self, *tags): + self.through.objects.filter(**self._lookup_kwargs()).filter( + tag__name__in=tags).delete() + + @require_instance_manager + def clear(self): + self.through.objects.filter(**self._lookup_kwargs()).delete() + + def most_common(self): + return self.get_query_set().annotate( + num_times=models.Count(self.through.tag_relname()) + ).order_by('-num_times') + + @require_instance_manager + def similar_objects(self): + lookup_kwargs = self._lookup_kwargs() + lookup_keys = sorted(lookup_kwargs) + qs = self.through.objects.values(*lookup_kwargs.keys()) + qs = qs.annotate(n=models.Count('pk')) + qs = qs.exclude(**lookup_kwargs) + qs = qs.filter(tag__in=self.all()) + qs = qs.order_by('-n') + + # TODO: This all feels like a bit of a hack. + items = {} + if len(lookup_keys) == 1: + # Can we do this without a second query by using a select_related() + # somehow? + f = self.through._meta.get_field_by_name(lookup_keys[0])[0] + objs = f.rel.to._default_manager.filter(**{ + "%s__in" % f.rel.field_name: [r["content_object"] for r in qs] + }) + for obj in objs: + items[(getattr(obj, f.rel.field_name),)] = obj + else: + preload = {} + for result in qs: + preload.setdefault(result['content_type'], set()) + preload[result["content_type"]].add(result["object_id"]) + + for ct, obj_ids in preload.iteritems(): + ct = ContentType.objects.get_for_id(ct) + for obj in ct.model_class()._default_manager.filter(pk__in=obj_ids): + items[(ct.pk, obj.pk)] = obj + + results = [] + for result in qs: + obj = items[ + tuple(result[k] for k in lookup_keys) + ] + obj.similar_tags = result["n"] + results.append(obj) + return results + + +def _get_subclasses(model): + subclasses = [model] + for f in model._meta.get_all_field_names(): + field = model._meta.get_field_by_name(f)[0] + if (isinstance(field, RelatedObject) and + getattr(field.field.rel, "parent_link", None)): + subclasses.extend(_get_subclasses(field.model)) + return subclasses diff --git a/awx/lib/site-packages/taggit/migrations/0001_initial.py b/awx/lib/site-packages/taggit/migrations/0001_initial.py new file mode 100644 index 0000000000..666ca6199e --- /dev/null +++ b/awx/lib/site-packages/taggit/migrations/0001_initial.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding model 'Tag' + db.create_table(u'taggit_tag', ( + (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('name', self.gf('django.db.models.fields.CharField')(max_length=100)), + ('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=100)), + )) + db.send_create_signal(u'taggit', ['Tag']) + + # Adding model 'TaggedItem' + db.create_table(u'taggit_taggeditem', ( + (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('tag', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'taggit_taggeditem_items', to=orm['taggit.Tag'])), + ('object_id', self.gf('django.db.models.fields.IntegerField')(db_index=True)), + ('content_type', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'taggit_taggeditem_tagged_items', to=orm['contenttypes.ContentType'])), + )) + db.send_create_signal(u'taggit', ['TaggedItem']) + + + def backwards(self, orm): + # Deleting model 'Tag' + db.delete_table(u'taggit_tag') + + # Deleting model 'TaggedItem' + db.delete_table(u'taggit_taggeditem') + + + models = { + u'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + u'taggit.tag': { + 'Meta': {'object_name': 'Tag'}, + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}) + }, + u'taggit.taggeditem': { + 'Meta': {'object_name': 'TaggedItem'}, + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}), + 'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"}) + } + } + + complete_apps = ['taggit'] \ No newline at end of file diff --git a/awx/lib/site-packages/taggit/migrations/0002_unique_tagnames.py b/awx/lib/site-packages/taggit/migrations/0002_unique_tagnames.py new file mode 100644 index 0000000000..e5eb033b0a --- /dev/null +++ b/awx/lib/site-packages/taggit/migrations/0002_unique_tagnames.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding unique constraint on 'Tag', fields ['name'] + db.create_unique(u'taggit_tag', ['name']) + + + def backwards(self, orm): + # Removing unique constraint on 'Tag', fields ['name'] + db.delete_unique(u'taggit_tag', ['name']) + + + models = { + u'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + u'taggit.tag': { + 'Meta': {'object_name': 'Tag'}, + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}), + 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}) + }, + u'taggit.taggeditem': { + 'Meta': {'object_name': 'TaggedItem'}, + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}), + 'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"}) + } + } + + complete_apps = ['taggit'] \ No newline at end of file diff --git a/awx/lib/site-packages/taggit/migrations/__init__.py b/awx/lib/site-packages/taggit/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/taggit/models.py b/awx/lib/site-packages/taggit/models.py new file mode 100644 index 0000000000..581a5b1920 --- /dev/null +++ b/awx/lib/site-packages/taggit/models.py @@ -0,0 +1,160 @@ +import django +from django.contrib.contenttypes.models import ContentType +from django.contrib.contenttypes.generic import GenericForeignKey +from django.db import models, IntegrityError, transaction +from django.template.defaultfilters import slugify as default_slugify +from django.utils.translation import ugettext_lazy as _, ugettext + + +class TagBase(models.Model): + name = models.CharField(verbose_name=_('Name'), unique=True, max_length=100) + slug = models.SlugField(verbose_name=_('Slug'), unique=True, max_length=100) + + def __unicode__(self): + return self.name + + class Meta: + abstract = True + + def save(self, *args, **kwargs): + if not self.pk and not self.slug: + self.slug = self.slugify(self.name) + if django.VERSION >= (1, 2): + from django.db import router + using = kwargs.get("using") or router.db_for_write( + type(self), instance=self) + # Make sure we write to the same db for all attempted writes, + # with a multi-master setup, theoretically we could try to + # write and rollback on different DBs + kwargs["using"] = using + trans_kwargs = {"using": using} + else: + trans_kwargs = {} + i = 0 + while True: + i += 1 + try: + sid = transaction.savepoint(**trans_kwargs) + res = super(TagBase, self).save(*args, **kwargs) + transaction.savepoint_commit(sid, **trans_kwargs) + return res + except IntegrityError: + transaction.savepoint_rollback(sid, **trans_kwargs) + self.slug = self.slugify(self.name, i) + else: + return super(TagBase, self).save(*args, **kwargs) + + def slugify(self, tag, i=None): + slug = default_slugify(tag) + if i is not None: + slug += "_%d" % i + return slug + + +class Tag(TagBase): + class Meta: + verbose_name = _("Tag") + verbose_name_plural = _("Tags") + + + +class ItemBase(models.Model): + def __unicode__(self): + return ugettext("%(object)s tagged with %(tag)s") % { + "object": self.content_object, + "tag": self.tag + } + + class Meta: + abstract = True + + @classmethod + def tag_model(cls): + return cls._meta.get_field_by_name("tag")[0].rel.to + + @classmethod + def tag_relname(cls): + return cls._meta.get_field_by_name('tag')[0].rel.related_name + + @classmethod + def lookup_kwargs(cls, instance): + return { + 'content_object': instance + } + + @classmethod + def bulk_lookup_kwargs(cls, instances): + return { + "content_object__in": instances, + } + + +class TaggedItemBase(ItemBase): + if django.VERSION < (1, 2): + tag = models.ForeignKey(Tag, related_name="%(class)s_items") + else: + tag = models.ForeignKey(Tag, related_name="%(app_label)s_%(class)s_items") + + class Meta: + abstract = True + + @classmethod + def tags_for(cls, model, instance=None): + if instance is not None: + return cls.tag_model().objects.filter(**{ + '%s__content_object' % cls.tag_relname(): instance + }) + return cls.tag_model().objects.filter(**{ + '%s__content_object__isnull' % cls.tag_relname(): False + }).distinct() + + +class GenericTaggedItemBase(ItemBase): + object_id = models.IntegerField(verbose_name=_('Object id'), db_index=True) + if django.VERSION < (1, 2): + content_type = models.ForeignKey( + ContentType, + verbose_name=_('Content type'), + related_name="%(class)s_tagged_items" + ) + else: + content_type = models.ForeignKey( + ContentType, + verbose_name=_('Content type'), + related_name="%(app_label)s_%(class)s_tagged_items" + ) + content_object = GenericForeignKey() + + class Meta: + abstract=True + + @classmethod + def lookup_kwargs(cls, instance): + return { + 'object_id': instance.pk, + 'content_type': ContentType.objects.get_for_model(instance) + } + + @classmethod + def bulk_lookup_kwargs(cls, instances): + # TODO: instances[0], can we assume there are instances. + return { + "object_id__in": [instance.pk for instance in instances], + "content_type": ContentType.objects.get_for_model(instances[0]), + } + + @classmethod + def tags_for(cls, model, instance=None): + ct = ContentType.objects.get_for_model(model) + kwargs = { + "%s__content_type" % cls.tag_relname(): ct + } + if instance is not None: + kwargs["%s__object_id" % cls.tag_relname()] = instance.pk + return cls.tag_model().objects.filter(**kwargs).distinct() + + +class TaggedItem(GenericTaggedItemBase, TaggedItemBase): + class Meta: + verbose_name = _("Tagged Item") + verbose_name_plural = _("Tagged Items") diff --git a/awx/lib/site-packages/taggit/tests/__init__.py b/awx/lib/site-packages/taggit/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/taggit/tests/forms.py b/awx/lib/site-packages/taggit/tests/forms.py new file mode 100644 index 0000000000..2cdc6a8dca --- /dev/null +++ b/awx/lib/site-packages/taggit/tests/forms.py @@ -0,0 +1,20 @@ +from django import forms + +from taggit.tests.models import Food, DirectFood, CustomPKFood, OfficialFood + + +class FoodForm(forms.ModelForm): + class Meta: + model = Food + +class DirectFoodForm(forms.ModelForm): + class Meta: + model = DirectFood + +class CustomPKFoodForm(forms.ModelForm): + class Meta: + model = CustomPKFood + +class OfficialFoodForm(forms.ModelForm): + class Meta: + model = OfficialFood diff --git a/awx/lib/site-packages/taggit/tests/models.py b/awx/lib/site-packages/taggit/tests/models.py new file mode 100644 index 0000000000..a0e21e046f --- /dev/null +++ b/awx/lib/site-packages/taggit/tests/models.py @@ -0,0 +1,143 @@ +from django.db import models + +from taggit.managers import TaggableManager +from taggit.models import (TaggedItemBase, GenericTaggedItemBase, TaggedItem, + TagBase, Tag) + + +class Food(models.Model): + name = models.CharField(max_length=50) + + tags = TaggableManager() + + def __unicode__(self): + return self.name + +class Pet(models.Model): + name = models.CharField(max_length=50) + + tags = TaggableManager() + + def __unicode__(self): + return self.name + +class HousePet(Pet): + trained = models.BooleanField() + + +# Test direct-tagging with custom through model + +class TaggedFood(TaggedItemBase): + content_object = models.ForeignKey('DirectFood') + +class TaggedPet(TaggedItemBase): + content_object = models.ForeignKey('DirectPet') + +class DirectFood(models.Model): + name = models.CharField(max_length=50) + + tags = TaggableManager(through="TaggedFood") + +class DirectPet(models.Model): + name = models.CharField(max_length=50) + + tags = TaggableManager(through=TaggedPet) + + def __unicode__(self): + return self.name + +class DirectHousePet(DirectPet): + trained = models.BooleanField() + + +# Test custom through model to model with custom PK + +class TaggedCustomPKFood(TaggedItemBase): + content_object = models.ForeignKey('CustomPKFood') + +class TaggedCustomPKPet(TaggedItemBase): + content_object = models.ForeignKey('CustomPKPet') + +class CustomPKFood(models.Model): + name = models.CharField(max_length=50, primary_key=True) + + tags = TaggableManager(through=TaggedCustomPKFood) + + def __unicode__(self): + return self.name + +class CustomPKPet(models.Model): + name = models.CharField(max_length=50, primary_key=True) + + tags = TaggableManager(through=TaggedCustomPKPet) + + def __unicode__(self): + return self.name + +class CustomPKHousePet(CustomPKPet): + trained = models.BooleanField() + +# Test custom through model to a custom tag model + +class OfficialTag(TagBase): + official = models.BooleanField() + +class OfficialThroughModel(GenericTaggedItemBase): + tag = models.ForeignKey(OfficialTag, related_name="tagged_items") + +class OfficialFood(models.Model): + name = models.CharField(max_length=50) + + tags = TaggableManager(through=OfficialThroughModel) + + def __unicode__(self): + return self.name + +class OfficialPet(models.Model): + name = models.CharField(max_length=50) + + tags = TaggableManager(through=OfficialThroughModel) + + def __unicode__(self): + return self.name + +class OfficialHousePet(OfficialPet): + trained = models.BooleanField() + + +class Media(models.Model): + tags = TaggableManager() + + class Meta: + abstract = True + +class Photo(Media): + pass + +class Movie(Media): + pass + + +class ArticleTag(Tag): + class Meta: + proxy = True + + def slugify(self, tag, i=None): + slug = "category-%s" % tag.lower() + + if i is not None: + slug += "-%d" % i + return slug + +class ArticleTaggedItem(TaggedItem): + class Meta: + proxy = True + + @classmethod + def tag_model(self): + return ArticleTag + +class Article(models.Model): + title = models.CharField(max_length=100) + + tags = TaggableManager(through=ArticleTaggedItem) diff --git a/awx/lib/site-packages/taggit/tests/runtests.py b/awx/lib/site-packages/taggit/tests/runtests.py new file mode 100644 index 0000000000..3e52cf18fe --- /dev/null +++ b/awx/lib/site-packages/taggit/tests/runtests.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +import os +import sys + +from django.conf import settings + +if not settings.configured: + settings.configure( + DATABASES={ + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', + } + }, + INSTALLED_APPS=[ + 'django.contrib.contenttypes', + 'taggit', + 'taggit.tests', + ] + ) + + +from django.test.simple import DjangoTestSuiteRunner + +def runtests(): + runner = DjangoTestSuiteRunner() + failures = runner.run_tests(['tests'], verbosity=1, interactive=True) + sys.exit(failures) + +if __name__ == '__main__': + runtests(*sys.argv[1:]) + diff --git a/awx/lib/site-packages/taggit/tests/tests.py b/awx/lib/site-packages/taggit/tests/tests.py new file mode 100644 index 0000000000..6282db84ab --- /dev/null +++ b/awx/lib/site-packages/taggit/tests/tests.py @@ -0,0 +1,486 @@ +from unittest import TestCase as UnitTestCase + +import django +from django.conf import settings +from django.core.exceptions import ValidationError +from django.db import connection +from django.test import TestCase, TransactionTestCase + +from taggit.managers import TaggableManager +from taggit.models import Tag, TaggedItem +from taggit.tests.forms import (FoodForm, DirectFoodForm, CustomPKFoodForm, + OfficialFoodForm) +from taggit.tests.models import (Food, Pet, HousePet, DirectFood, DirectPet, + DirectHousePet, TaggedPet, CustomPKFood, CustomPKPet, CustomPKHousePet, + TaggedCustomPKPet, OfficialFood, OfficialPet, OfficialHousePet, + OfficialThroughModel, OfficialTag, Photo, Movie, Article) +from taggit.utils import parse_tags, edit_string_for_tags + + +class BaseTaggingTest(object): + def assert_tags_equal(self, qs, tags, sort=True, attr="name"): + got = map(lambda tag: getattr(tag, attr), qs) + if sort: + got.sort() + tags.sort() + self.assertEqual(got, tags) + + def assert_num_queries(self, n, f, *args, **kwargs): + original_DEBUG = settings.DEBUG + settings.DEBUG = True + current = len(connection.queries) + try: + f(*args, **kwargs) + self.assertEqual( + len(connection.queries) - current, + n, + ) + finally: + settings.DEBUG = original_DEBUG + + def _get_form_str(self, form_str): + if django.VERSION >= (1, 3): + form_str %= { + "help_start": '<span class="helptext">', + "help_stop": "</span>" + } + else: + form_str %= { + "help_start": "", + "help_stop": "" + } + return form_str + + def assert_form_renders(self, form, html): + try: + self.assertHTMLEqual(str(form), self._get_form_str(html)) + except AttributeError: + self.assertEqual(str(form), self._get_form_str(html)) + + +class BaseTaggingTestCase(TestCase, BaseTaggingTest): + pass + +class BaseTaggingTransactionTestCase(TransactionTestCase, BaseTaggingTest): + pass + + +class TagModelTestCase(BaseTaggingTransactionTestCase): + food_model = Food + tag_model = Tag + + def test_unique_slug(self): + apple = self.food_model.objects.create(name="apple") + apple.tags.add("Red", "red") + + def test_update(self): + special = self.tag_model.objects.create(name="special") + special.save() + + def test_add(self): + apple = self.food_model.objects.create(name="apple") + yummy = self.tag_model.objects.create(name="yummy") + apple.tags.add(yummy) + + def test_slugify(self): + a = Article.objects.create(title="django-taggit 1.0 Released") + a.tags.add("awesome", "release", "AWESOME") + self.assert_tags_equal(a.tags.all(), [ + "category-awesome", + "category-release", + "category-awesome-1" + ], attr="slug") + +class TagModelDirectTestCase(TagModelTestCase): + food_model = DirectFood + tag_model = Tag + +class TagModelCustomPKTestCase(TagModelTestCase): + food_model = CustomPKFood + tag_model = Tag + +class TagModelOfficialTestCase(TagModelTestCase): + food_model = OfficialFood + tag_model = OfficialTag + +class TaggableManagerTestCase(BaseTaggingTestCase): + food_model = Food + pet_model = Pet + housepet_model = HousePet + taggeditem_model = TaggedItem + tag_model = Tag + + def test_add_tag(self): + apple = self.food_model.objects.create(name="apple") + self.assertEqual(list(apple.tags.all()), []) + self.assertEqual(list(self.food_model.tags.all()), []) + + apple.tags.add('green') + self.assert_tags_equal(apple.tags.all(), ['green']) + self.assert_tags_equal(self.food_model.tags.all(), ['green']) + + pear = self.food_model.objects.create(name="pear") + pear.tags.add('green') + self.assert_tags_equal(pear.tags.all(), ['green']) + self.assert_tags_equal(self.food_model.tags.all(), ['green']) + + apple.tags.add('red') + self.assert_tags_equal(apple.tags.all(), ['green', 'red']) + self.assert_tags_equal(self.food_model.tags.all(), ['green', 'red']) + + self.assert_tags_equal( + self.food_model.tags.most_common(), + ['green', 'red'], + sort=False + ) + + apple.tags.remove('green') + self.assert_tags_equal(apple.tags.all(), ['red']) + self.assert_tags_equal(self.food_model.tags.all(), ['green', 'red']) + tag = self.tag_model.objects.create(name="delicious") + apple.tags.add(tag) + self.assert_tags_equal(apple.tags.all(), ["red", "delicious"]) + + apple.delete() + self.assert_tags_equal(self.food_model.tags.all(), ["green"]) + + def test_add_queries(self): + apple = self.food_model.objects.create(name="apple") + # 1 query to see which tags exist + # + 3 queries to create the tags. + # + 6 queries to create the intermediary things (including SELECTs, to + # make sure we don't double create. + self.assert_num_queries(10, apple.tags.add, "red", "delicious", "green") + + pear = self.food_model.objects.create(name="pear") + # 1 query to see which tags exist + # + 4 queries to create the intermeidary things (including SELECTs, to + # make sure we dont't double create. + self.assert_num_queries(5, pear.tags.add, "green", "delicious") + + self.assert_num_queries(0, pear.tags.add) + + def test_require_pk(self): + food_instance = self.food_model() + self.assertRaises(ValueError, lambda: food_instance.tags.all()) + + def test_delete_obj(self): + apple = self.food_model.objects.create(name="apple") + apple.tags.add("red") + self.assert_tags_equal(apple.tags.all(), ["red"]) + strawberry = self.food_model.objects.create(name="strawberry") + strawberry.tags.add("red") + apple.delete() + self.assert_tags_equal(strawberry.tags.all(), ["red"]) + + def test_delete_bulk(self): + apple = self.food_model.objects.create(name="apple") + kitty = self.pet_model.objects.create(pk=apple.pk, name="kitty") + + apple.tags.add("red", "delicious", "fruit") + kitty.tags.add("feline") + + self.food_model.objects.all().delete() + + self.assert_tags_equal(kitty.tags.all(), ["feline"]) + + def test_lookup_by_tag(self): + apple = self.food_model.objects.create(name="apple") + apple.tags.add("red", "green") + pear = self.food_model.objects.create(name="pear") + pear.tags.add("green") + + self.assertEqual( + list(self.food_model.objects.filter(tags__name__in=["red"])), + [apple] + ) + self.assertEqual( + list(self.food_model.objects.filter(tags__name__in=["green"])), + [apple, pear] + ) + + kitty = self.pet_model.objects.create(name="kitty") + kitty.tags.add("fuzzy", "red") + dog = self.pet_model.objects.create(name="dog") + dog.tags.add("woof", "red") + self.assertEqual( + list(self.food_model.objects.filter(tags__name__in=["red"]).distinct()), + [apple] + ) + + tag = self.tag_model.objects.get(name="woof") + self.assertEqual(list(self.pet_model.objects.filter(tags__in=[tag])), [dog]) + + cat = self.housepet_model.objects.create(name="cat", trained=True) + cat.tags.add("fuzzy") + + self.assertEqual( + map(lambda o: o.pk, self.pet_model.objects.filter(tags__name__in=["fuzzy"])), + [kitty.pk, cat.pk] + ) + + def test_exclude(self): + apple = self.food_model.objects.create(name="apple") + apple.tags.add("red", "green", "delicious") + + pear = self.food_model.objects.create(name="pear") + pear.tags.add("green", "delicious") + + guava = self.food_model.objects.create(name="guava") + + self.assertEqual( + sorted(map(lambda o: o.pk, self.food_model.objects.exclude(tags__name__in=["red"]))), + sorted([pear.pk, guava.pk]), + ) + + def test_similarity_by_tag(self): + """Test that pears are more similar to apples than watermelons""" + apple = self.food_model.objects.create(name="apple") + apple.tags.add("green", "juicy", "small", "sour") + + pear = self.food_model.objects.create(name="pear") + pear.tags.add("green", "juicy", "small", "sweet") + + watermelon = self.food_model.objects.create(name="watermelon") + watermelon.tags.add("green", "juicy", "large", "sweet") + + similar_objs = apple.tags.similar_objects() + self.assertEqual(similar_objs, [pear, watermelon]) + self.assertEqual(map(lambda x: x.similar_tags, similar_objs), [3, 2]) + + def test_tag_reuse(self): + apple = self.food_model.objects.create(name="apple") + apple.tags.add("juicy", "juicy") + self.assert_tags_equal(apple.tags.all(), ['juicy']) + + def test_query_traverse(self): + spot = self.pet_model.objects.create(name='Spot') + spike = self.pet_model.objects.create(name='Spike') + spot.tags.add('scary') + spike.tags.add('fluffy') + lookup_kwargs = { + '%s__name' % self.pet_model._meta.module_name: 'Spot' + } + self.assert_tags_equal( + self.tag_model.objects.filter(**lookup_kwargs), + ['scary'] + ) + + def test_taggeditem_unicode(self): + ross = self.pet_model.objects.create(name="ross") + # I keep Ross Perot for a pet, what's it to you? + ross.tags.add("president") + + self.assertEqual( + unicode(self.taggeditem_model.objects.all()[0]), + "ross tagged with president" + ) + + def test_abstract_subclasses(self): + p = Photo.objects.create() + p.tags.add("outdoors", "pretty") + self.assert_tags_equal( + p.tags.all(), + ["outdoors", "pretty"] + ) + + m = Movie.objects.create() + m.tags.add("hd") + self.assert_tags_equal( + m.tags.all(), + ["hd"], + ) + + def test_field_api(self): + # Check if tag field, which simulates m2m, has django-like api. + field = self.food_model._meta.get_field('tags') + self.assertTrue(hasattr(field, 'rel')) + self.assertTrue(hasattr(field, 'related')) + self.assertEqual(self.food_model, field.related.model) + + +class TaggableManagerDirectTestCase(TaggableManagerTestCase): + food_model = DirectFood + pet_model = DirectPet + housepet_model = DirectHousePet + taggeditem_model = TaggedPet + +class TaggableManagerCustomPKTestCase(TaggableManagerTestCase): + food_model = CustomPKFood + pet_model = CustomPKPet + housepet_model = CustomPKHousePet + taggeditem_model = TaggedCustomPKPet + + def test_require_pk(self): + # TODO with a charfield pk, pk is never None, so taggit has no way to + # tell if the instance is saved or not + pass + +class TaggableManagerOfficialTestCase(TaggableManagerTestCase): + food_model = OfficialFood + pet_model = OfficialPet + housepet_model = OfficialHousePet + taggeditem_model = OfficialThroughModel + tag_model = OfficialTag + + def test_extra_fields(self): + self.tag_model.objects.create(name="red") + self.tag_model.objects.create(name="delicious", official=True) + apple = self.food_model.objects.create(name="apple") + apple.tags.add("delicious", "red") + + pear = self.food_model.objects.create(name="Pear") + pear.tags.add("delicious") + + self.assertEqual( + map(lambda o: o.pk, self.food_model.objects.filter(tags__official=False)), + [apple.pk], + ) + + +class TaggableFormTestCase(BaseTaggingTestCase): + form_class = FoodForm + food_model = Food + + def test_form(self): + self.assertEqual(self.form_class.base_fields.keys(), ['name', 'tags']) + + f = self.form_class({'name': 'apple', 'tags': 'green, red, yummy'}) + self.assert_form_renders(f, """<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" value="apple" maxlength="50" /></td></tr> +<tr><th><label for="id_tags">Tags:</label></th><td><input type="text" name="tags" value="green, red, yummy" id="id_tags" /><br />%(help_start)sA comma-separated list of tags.%(help_stop)s</td></tr>""") + f.save() + apple = self.food_model.objects.get(name='apple') + self.assert_tags_equal(apple.tags.all(), ['green', 'red', 'yummy']) + + f = self.form_class({'name': 'apple', 'tags': 'green, red, yummy, delicious'}, instance=apple) + f.save() + apple = self.food_model.objects.get(name='apple') + self.assert_tags_equal(apple.tags.all(), ['green', 'red', 'yummy', 'delicious']) + self.assertEqual(self.food_model.objects.count(), 1) + + f = self.form_class({"name": "raspberry"}) + self.assertFalse(f.is_valid()) + + f = self.form_class(instance=apple) + self.assert_form_renders(f, """<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" value="apple" maxlength="50" /></td></tr> +<tr><th><label for="id_tags">Tags:</label></th><td><input type="text" name="tags" value="delicious, green, red, yummy" id="id_tags" /><br />%(help_start)sA comma-separated list of tags.%(help_stop)s</td></tr>""") + + apple.tags.add('has,comma') + f = self.form_class(instance=apple) + self.assert_form_renders(f, """<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" value="apple" maxlength="50" /></td></tr> +<tr><th><label for="id_tags">Tags:</label></th><td><input type="text" name="tags" value=""has,comma", delicious, green, red, yummy" id="id_tags" /><br />%(help_start)sA comma-separated list of tags.%(help_stop)s</td></tr>""") + + apple.tags.add('has space') + f = self.form_class(instance=apple) + self.assert_form_renders(f, """<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" value="apple" maxlength="50" /></td></tr> +<tr><th><label for="id_tags">Tags:</label></th><td><input type="text" name="tags" value=""has space", "has,comma", delicious, green, red, yummy" id="id_tags" /><br />%(help_start)sA comma-separated list of tags.%(help_stop)s</td></tr>""") + + def test_formfield(self): + tm = TaggableManager(verbose_name='categories', help_text='Add some categories', blank=True) + ff = tm.formfield() + self.assertEqual(ff.label, 'Categories') + self.assertEqual(ff.help_text, u'Add some categories') + self.assertEqual(ff.required, False) + + self.assertEqual(ff.clean(""), []) + + tm = TaggableManager() + ff = tm.formfield() + self.assertRaises(ValidationError, ff.clean, "") + +class TaggableFormDirectTestCase(TaggableFormTestCase): + form_class = DirectFoodForm + food_model = DirectFood + +class TaggableFormCustomPKTestCase(TaggableFormTestCase): + form_class = CustomPKFoodForm + food_model = CustomPKFood + +class TaggableFormOfficialTestCase(TaggableFormTestCase): + form_class = OfficialFoodForm + food_model = OfficialFood + + +class TagStringParseTestCase(UnitTestCase): + """ + Ported from Jonathan Buchanan's `django-tagging + <http://django-tagging.googlecode.com/>`_ + """ + + def test_with_simple_space_delimited_tags(self): + """ + Test with simple space-delimited tags. + """ + self.assertEqual(parse_tags('one'), [u'one']) + self.assertEqual(parse_tags('one two'), [u'one', u'two']) + self.assertEqual(parse_tags('one two three'), [u'one', u'three', u'two']) + self.assertEqual(parse_tags('one one two two'), [u'one', u'two']) + + def test_with_comma_delimited_multiple_words(self): + """ + Test with comma-delimited multiple words. + An unquoted comma in the input will trigger this. + """ + self.assertEqual(parse_tags(',one'), [u'one']) + self.assertEqual(parse_tags(',one two'), [u'one two']) + self.assertEqual(parse_tags(',one two three'), [u'one two three']) + self.assertEqual(parse_tags('a-one, a-two and a-three'), + [u'a-one', u'a-two and a-three']) + + def test_with_double_quoted_multiple_words(self): + """ + Test with double-quoted multiple words. + A completed quote will trigger this. Unclosed quotes are ignored. + """ + self.assertEqual(parse_tags('"one'), [u'one']) + self.assertEqual(parse_tags('"one two'), [u'one', u'two']) + self.assertEqual(parse_tags('"one two three'), [u'one', u'three', u'two']) + self.assertEqual(parse_tags('"one two"'), [u'one two']) + self.assertEqual(parse_tags('a-one "a-two and a-three"'), + [u'a-one', u'a-two and a-three']) + + def test_with_no_loose_commas(self): + """ + Test with no loose commas -- split on spaces. + """ + self.assertEqual(parse_tags('one two "thr,ee"'), [u'one', u'thr,ee', u'two']) + + def test_with_loose_commas(self): + """ + Loose commas - split on commas + """ + self.assertEqual(parse_tags('"one", two three'), [u'one', u'two three']) + + def test_tags_with_double_quotes_can_contain_commas(self): + """ + Double quotes can contain commas + """ + self.assertEqual(parse_tags('a-one "a-two, and a-three"'), + [u'a-one', u'a-two, and a-three']) + self.assertEqual(parse_tags('"two", one, one, two, "one"'), + [u'one', u'two']) + + def test_with_naughty_input(self): + """ + Test with naughty input. + """ + # Bad users! Naughty users! + self.assertEqual(parse_tags(None), []) + self.assertEqual(parse_tags(''), []) + self.assertEqual(parse_tags('"'), []) + self.assertEqual(parse_tags('""'), []) + self.assertEqual(parse_tags('"' * 7), []) + self.assertEqual(parse_tags(',,,,,,'), []) + self.assertEqual(parse_tags('",",",",",",","'), [u',']) + self.assertEqual(parse_tags('a-one "a-two" and "a-three'), + [u'a-one', u'a-three', u'a-two', u'and']) + + def test_recreation_of_tag_list_string_representations(self): + plain = Tag.objects.create(name='plain') + spaces = Tag.objects.create(name='spa ces') + comma = Tag.objects.create(name='com,ma') + self.assertEqual(edit_string_for_tags([plain]), u'plain') + self.assertEqual(edit_string_for_tags([plain, spaces]), u'"spa ces", plain') + self.assertEqual(edit_string_for_tags([plain, spaces, comma]), u'"com,ma", "spa ces", plain') + self.assertEqual(edit_string_for_tags([plain, comma]), u'"com,ma", plain') + self.assertEqual(edit_string_for_tags([comma, spaces]), u'"com,ma", "spa ces"') diff --git a/awx/lib/site-packages/taggit/utils.py b/awx/lib/site-packages/taggit/utils.py new file mode 100644 index 0000000000..1b5e5a7f11 --- /dev/null +++ b/awx/lib/site-packages/taggit/utils.py @@ -0,0 +1,126 @@ +from django.utils.encoding import force_unicode +from django.utils.functional import wraps + + +def parse_tags(tagstring): + """ + Parses tag input, with multiple word input being activated and + delineated by commas and double quotes. Quotes take precedence, so + they may contain commas. + + Returns a sorted list of unique tag names. + + Ported from Jonathan Buchanan's `django-tagging + <http://django-tagging.googlecode.com/>`_ + """ + if not tagstring: + return [] + + tagstring = force_unicode(tagstring) + + # Special case - if there are no commas or double quotes in the + # input, we don't *do* a recall... I mean, we know we only need to + # split on spaces. + if u',' not in tagstring and u'"' not in tagstring: + words = list(set(split_strip(tagstring, u' '))) + words.sort() + return words + + words = [] + buffer = [] + # Defer splitting of non-quoted sections until we know if there are + # any unquoted commas. + to_be_split = [] + saw_loose_comma = False + open_quote = False + i = iter(tagstring) + try: + while True: + c = i.next() + if c == u'"': + if buffer: + to_be_split.append(u''.join(buffer)) + buffer = [] + # Find the matching quote + open_quote = True + c = i.next() + while c != u'"': + buffer.append(c) + c = i.next() + if buffer: + word = u''.join(buffer).strip() + if word: + words.append(word) + buffer = [] + open_quote = False + else: + if not saw_loose_comma and c == u',': + saw_loose_comma = True + buffer.append(c) + except StopIteration: + # If we were parsing an open quote which was never closed treat + # the buffer as unquoted. + if buffer: + if open_quote and u',' in buffer: + saw_loose_comma = True + to_be_split.append(u''.join(buffer)) + if to_be_split: + if saw_loose_comma: + delimiter = u',' + else: + delimiter = u' ' + for chunk in to_be_split: + words.extend(split_strip(chunk, delimiter)) + words = list(set(words)) + words.sort() + return words + + +def split_strip(string, delimiter=u','): + """ + Splits ``string`` on ``delimiter``, stripping each resulting string + and returning a list of non-empty strings. + + Ported from Jonathan Buchanan's `django-tagging + <http://django-tagging.googlecode.com/>`_ + """ + if not string: + return [] + + words = [w.strip() for w in string.split(delimiter)] + return [w for w in words if w] + + +def edit_string_for_tags(tags): + """ + Given list of ``Tag`` instances, creates a string representation of + the list suitable for editing by the user, such that submitting the + given string representation back without changing it will give the + same list of tags. + + Tag names which contain commas will be double quoted. + + If any tag name which isn't being quoted contains whitespace, the + resulting string of tag names will be comma-delimited, otherwise + it will be space-delimited. + + Ported from Jonathan Buchanan's `django-tagging + <http://django-tagging.googlecode.com/>`_ + """ + names = [] + for tag in tags: + name = tag.name + if u',' in name or u' ' in name: + names.append('"%s"' % name) + else: + names.append(name) + return u', '.join(sorted(names)) + + +def require_instance_manager(func): + @wraps(func) + def inner(self, *args, **kwargs): + if self.instance is None: + raise TypeError("Can't call %s with a non-instance manager" % func.__name__) + return func(self, *args, **kwargs) + return inner diff --git a/awx/lib/site-packages/taggit/views.py b/awx/lib/site-packages/taggit/views.py new file mode 100644 index 0000000000..1e407f41c9 --- /dev/null +++ b/awx/lib/site-packages/taggit/views.py @@ -0,0 +1,19 @@ +from django.contrib.contenttypes.models import ContentType +from django.shortcuts import get_object_or_404 +from django.views.generic.list import ListView + +from taggit.models import TaggedItem, Tag + + +def tagged_object_list(request, slug, queryset, **kwargs): + if callable(queryset): + queryset = queryset() + tag = get_object_or_404(Tag, slug=slug) + qs = queryset.filter(pk__in=TaggedItem.objects.filter( + tag=tag, content_type=ContentType.objects.get_for_model(queryset.model) + ).values_list("object_id", flat=True)) + if "extra_context" not in kwargs: + kwargs["extra_context"] = {} + kwargs["extra_context"]["tag"] = tag + return ListView.as_view(request, qs, **kwargs) + diff --git a/awx/plugins/callback/job_event_callback.py b/awx/plugins/callback/job_event_callback.py index 40983a2e41..e2696770e4 100644 --- a/awx/plugins/callback/job_event_callback.py +++ b/awx/plugins/callback/job_event_callback.py @@ -32,11 +32,20 @@ # Python import json import os +import sys import urllib import urlparse # Requests -import requests +try: + import requests +except ImportError: + # If running from an AWX installation, use the local version of requests if + # if cannot be found globally. + local_site_packages = os.path.join(os.path.dirname(__file__), '..', '..', + 'lib', 'site-packages') + sys.path.insert(0, local_site_packages) + import requests class TokenAuth(requests.auth.AuthBase): diff --git a/awx/scripts/inventory.py b/awx/scripts/inventory.py index 3d2e9d3ce1..dc7e117351 100755 --- a/awx/scripts/inventory.py +++ b/awx/scripts/inventory.py @@ -40,7 +40,15 @@ import urllib import urlparse # Requests -import requests +try: + import requests +except ImportError: + # If running from an AWX installation, use the local version of requests if + # if cannot be found globally. + local_site_packages = os.path.join(os.path.dirname(__file__), '..', 'lib', + 'site-packages') + sys.path.insert(0, local_site_packages) + import requests class TokenAuth(requests.auth.AuthBase): def __init__(self, token): diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 593a1c9c0e..6d3b846713 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -226,9 +226,9 @@ os.environ.setdefault('DJANGO_LIVE_TEST_SERVER_ADDRESS', 'localhost:9013-9199') # Skip migrations when running tests. SOUTH_TESTS_MIGRATE = False -if 'djcelery' in INSTALLED_APPS: - import djcelery - djcelery.setup_loader() +# Initialize Django-Celery. +import djcelery +djcelery.setup_loader() BROKER_URL = 'django://' CELERY_TASK_SERIALIZER = 'json' diff --git a/awx/wsgi.py b/awx/wsgi.py index 0cac644448..8dc82d8910 100644 --- a/awx/wsgi.py +++ b/awx/wsgi.py @@ -11,8 +11,17 @@ https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/ """ import os -os.environ.setdefault('DJANGO_SETTINGS_MODULE', - 'awx.settings.production') +import sys +from awx import MODE +# Update the default settings environment variable based on current mode. +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE) + +# Add local site-packages directory to path. +local_site_packages = os.path.join(os.path.dirname(__file__), 'lib', + 'site-packages') +sys.path.insert(0, local_site_packages) + +# Return the default Django WSGI application. from django.core.wsgi import get_wsgi_application application = get_wsgi_application() diff --git a/requirements/dev.txt b/requirements/dev.txt index a8d5cabc72..5d750fcfa3 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -3,18 +3,20 @@ -U distribute -# Packages used for development and production: -Django>=1.5 -django-celery -django-extensions -django-jsonfield -django-taggit -djangorestframework>=2.3.0,<2.4.0 -Markdown -pexpect -python-dateutil -requests -South>=0.8,<2.0 +# Packages used for both development and production: +Django>=1.4 + +# The following packages are now bundled with AWX (awx/lib/site-packages): + #django-celery + #django-extensions + #django-jsonfield + #django-taggit + #djangorestframework>=2.3.0,<2.4.0 + #Markdown + #pexpect + #python-dateutil + #requests + #South>=0.8,<2.0 # Development-only packages: django-debug-toolbar diff --git a/requirements/dev_local.txt b/requirements/dev_local.txt index 3d95706073..bed4830e9c 100644 --- a/requirements/dev_local.txt +++ b/requirements/dev_local.txt @@ -5,32 +5,29 @@ distribute-0.6.45.tar.gz Django-1.5.1.tar.gz -# Needed by python-dateutil, django-extensions: -six-1.3.0.tar.gz - -# Needed by kombu: -amqp-1.0.11.tar.gz -anyjson-0.3.3.tar.gz - -# Needed by celery: -billiard-2.7.3.28.tar.gz -kombu-2.5.10.tar.gz -python-dateutil-2.1.tar.gz - -# Needed by django-celery: -celery-3.0.19.tar.gz -pytz-2013b.tar.gz - -# Remaining dev/prod packages: -django-celery-3.0.17.tar.gz -django-extensions-1.1.1.tar.gz -django-jsonfield-0.9.10.tar.gz -django-taggit-0.10a1.tar.gz -djangorestframework-2.3.5.tar.gz -Markdown-2.3.1.tar.gz -pexpect-2.4.tar.gz -requests-1.2.3.tar.gz -South-0.8.1.tar.gz +# The following packages are now bundled with AWX (awx/lib/site-packages): + # Needed by python-dateutil, django-extensions: + #six-1.3.0.tar.gz + # Needed by kombu: + #amqp-1.0.11.tar.gz + #anyjson-0.3.3.tar.gz + # Needed by celery: + #billiard-2.7.3.28.tar.gz + #kombu-2.5.10.tar.gz + #python-dateutil-2.1.tar.gz + # Needed by django-celery: + #celery-3.0.19.tar.gz + #pytz-2013b.tar.gz + # Remaining dev/prod packages: + #django-celery-3.0.17.tar.gz + #django-extensions-1.1.1.tar.gz + #django-jsonfield-0.9.10.tar.gz + #django-taggit-0.10a1.tar.gz + #djangorestframework-2.3.5.tar.gz + #Markdown-2.3.1.tar.gz + #pexpect-2.4.tar.gz + #requests-1.2.3.tar.gz + #South-0.8.1.tar.gz # Remaining dev-only packages: django-debug-toolbar-0.9.4.tar.gz diff --git a/requirements/prod.txt b/requirements/prod.txt index 2c2a346f78..73b7f95b2e 100644 --- a/requirements/prod.txt +++ b/requirements/prod.txt @@ -1,17 +1,19 @@ # PIP requirements for AWX production environment (downloaded from PyPI). # Install using "pip -r prod.txt". -Django>=1.5 -django-celery -django-extensions -django-jsonfield -django-taggit -djangorestframework>=2.3.0,<2.4.0 -Markdown -pexpect -python-dateutil -requests -South>=0.8,<2.0 +Django>=1.4 + +# The following packages are now bundled with AWX (awx/lib/site-packages): + #django-celery + #django-extensions + #django-jsonfield + #django-taggit + #djangorestframework>=2.3.0,<2.4.0 + #Markdown + #pexpect + #python-dateutil + #requests + #South>=0.8,<2.0 # You may also need to install the following extra packages using the OS # package manager, or pip if you're running inside a virtualenv. diff --git a/requirements/prod_local.txt b/requirements/prod_local.txt index 807b53a290..5ed3de0989 100644 --- a/requirements/prod_local.txt +++ b/requirements/prod_local.txt @@ -3,32 +3,29 @@ Django-1.5.1.tar.gz -# Needed by python-dateutil, django-extensions: -six-1.3.0.tar.gz - -# Needed by kombu: -amqp-1.0.11.tar.gz -anyjson-0.3.3.tar.gz - -# Needed by celery: -billiard-2.7.3.28.tar.gz -kombu-2.5.10.tar.gz -python-dateutil-2.1.tar.gz - -# Needed by django-celery: -celery-3.0.19.tar.gz -pytz-2013b.tar.gz - -# Remaining packages: -django-celery-3.0.17.tar.gz -django-extensions-1.1.1.tar.gz -django-jsonfield-0.9.10.tar.gz -django-taggit-0.10a1.tar.gz -djangorestframework-2.3.5.tar.gz -Markdown-2.3.1.tar.gz -pexpect-2.4.tar.gz -requests-1.2.3.tar.gz -South-0.8.1.tar.gz +# The following packages are now bundled with AWX (awx/lib/site-packages): + # Needed by python-dateutil, django-extensions: + #six-1.3.0.tar.gz + # Needed by kombu: + #amqp-1.0.11.tar.gz + #anyjson-0.3.3.tar.gz + # Needed by celery: + #billiard-2.7.3.28.tar.gz + #kombu-2.5.10.tar.gz + #python-dateutil-2.1.tar.gz + # Needed by django-celery: + #celery-3.0.19.tar.gz + #pytz-2013b.tar.gz + # Remaining dev/prod packages: + #django-celery-3.0.17.tar.gz + #django-extensions-1.1.1.tar.gz + #django-jsonfield-0.9.10.tar.gz + #django-taggit-0.10a1.tar.gz + #djangorestframework-2.3.5.tar.gz + #Markdown-2.3.1.tar.gz + #pexpect-2.4.tar.gz + #requests-1.2.3.tar.gz + #South-0.8.1.tar.gz # You may also need to install the following extra packages using the OS # package manager, or pip if you're running inside a virtualenv. diff --git a/requirements/test.txt b/requirements/test.txt deleted file mode 100644 index 9afc4355e3..0000000000 --- a/requirements/test.txt +++ /dev/null @@ -1,15 +0,0 @@ -# PIP requirements for AWX testing using Tox (downloaded from PyPI). Install -# using "pip -r test.txt". - -ansible==1.2 -django-celery -django-extensions -django-jsonfield -django-taggit -djangorestframework>=2.3.0,<2.4.0 -Markdown -pexpect -PyYAML -python-dateutil -requests -South>=0.8,<2.0 diff --git a/setup.py b/setup.py index 428cf1af44..4dd5f6402d 100755 --- a/setup.py +++ b/setup.py @@ -74,17 +74,8 @@ setup( include_package_data=True, zip_safe=False, #install_requires=[ - # 'Django>=1.5', yes - # 'django-celery', yes - # 'django-extensions', yes - # 'django-filter', - # 'django-jsonfield', - # 'django-taggit', - # 'djangorestframework>=2.3.0,<2.4.0', - # 'pexpect', - # 'python-dateutil', yes - # 'PyYAML', yes - # 'South>=0.8,<2.0', + # 'Django>=1.4', + # 'PyYAML', #], setup_requires=[], classifiers=[ diff --git a/tox.ini b/tox.ini index 833b41a832..daf27aeb7c 100644 --- a/tox.ini +++ b/tox.ini @@ -5,7 +5,8 @@ envlist = [testenv] commands = python manage.py test main deps = - -r{toxinidir}/requirements/test.txt + ansible==1.2 + PyYAML setenv = DJANGO_SETTINGS_MODULE = awx.settings.development # For OS X to be able to install pycrypto.