File awx-24.6.1.obscpio of Package python-awx-cli

07070100000000000081A400000000000000000000000166846B9200000469000000000000000000000000000000000000001600000000awx-24.6.1/.gitignore# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg

# PyInstaller
#  Usually these files are written by a python script from a template
#  before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
report.xml
report.pylama
*,cover
.hypothesis/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# IPython Notebook
.ipynb_checkpoints

# pyenv
.python-version

# celery beat schedule file
celerybeat-schedule

# dotenv
.env

# virtualenv
venv/
ENV/

# Spyder project settings
.spyderproject

# Rope project settings
.ropeproject

# vim
*.swp

# mac OS
*.DS_Store

# pytest
*.pytest_cache
07070100000001000081A400000000000000000000000166846B920000008A000000000000000000000000000000000000001700000000awx-24.6.1/MANIFEST.ininclude requirements.txt
include setup.py
include VERSION
recursive-include awxkit *.py *.yml *.md
recursive-include test *.py *.yml *.md
07070100000002000081A400000000000000000000000166846B9200000149000000000000000000000000000000000000001500000000awx-24.6.1/README.mdawxkit
======

A Python library that backs the provided `awx` command line client.

It can be installed by running `pip install awxkit`.

The PyPI respository can be found [here](https://pypi.org/project/awxkit/).

For more information on installing the CLI and building the docs on how to use it, look [here](./awxkit/cli/docs).07070100000003000041ED00000000000000000000000266846B9200000000000000000000000000000000000000000000001200000000awx-24.6.1/awxkit07070100000004000081A400000000000000000000000166846B92000000A7000000000000000000000000000000000000001E00000000awx-24.6.1/awxkit/__init__.pyfrom awxkit.api import pages, client, resources  # NOQA
from awxkit.config import config  # NOQA
from awxkit import awx  # NOQA
from awxkit.ws import WSClient  # NOQA
07070100000005000041ED00000000000000000000000266846B9200000000000000000000000000000000000000000000001600000000awx-24.6.1/awxkit/api07070100000006000081A400000000000000000000000166846B920000003B000000000000000000000000000000000000002200000000awx-24.6.1/awxkit/api/__init__.pyfrom .pages import *  # NOQA
from .client import *  # NOQA
07070100000007000081A400000000000000000000000166846B920000141B000000000000000000000000000000000000002000000000awx-24.6.1/awxkit/api/client.pyimport logging

import requests

from awxkit import exceptions as exc
from awxkit.config import config


log = logging.getLogger(__name__)


class ConnectionException(exc.Common):
    pass


class Token_Auth(requests.auth.AuthBase):
    def __init__(self, token):
        self.token = token

    def __call__(self, request):
        request.headers['Authorization'] = 'Bearer {0.token}'.format(self)
        return request


def log_elapsed(r, *args, **kwargs):  # requests hook to display API elapsed time
    log.debug('"{0.request.method} {0.url}" elapsed: {0.elapsed}'.format(r))


class Connection(object):
    """A requests.Session wrapper for establishing connection w/ AWX instance"""

    def __init__(self, server, verify=False):
        self.server = server
        self.verify = verify
        # Note: We use the old sessionid here incase someone is trying to connect to an older AWX version
        # There is a check below so that if AWX returns an X-API-Session-Cookie-Name we will grab it and
        # connect with the new session cookie name.
        self.session_cookie_name = 'sessionid'

        if not self.verify:
            requests.packages.urllib3.disable_warnings()

        self.session = requests.Session()
        self.uses_session_cookie = False

    def get_session_requirements(self, next=config.api_base_path):
        self.get(config.api_base_path)  # this causes a cookie w/ the CSRF token to be set
        return dict(next=next)

    def login(self, username=None, password=None, token=None, **kwargs):
        if username and password:
            _next = kwargs.get('next')
            if _next:
                headers = self.session.headers.copy()
                response = self.post(f"{config.api_base_path}login/", headers=headers, data=dict(username=username, password=password, next=_next))
                # The login causes a redirect so we need to search the history of the request to find the header
                for historical_response in response.history:
                    if 'X-API-Session-Cookie-Name' in historical_response.headers:
                        self.session_cookie_name = historical_response.headers.get('X-API-Session-Cookie-Name')

                self.session_id = self.session.cookies.get(self.session_cookie_name, None)
                self.uses_session_cookie = True
            else:
                self.session.auth = (username, password)
        elif token:
            self.session.auth = Token_Auth(token)
        else:
            self.session.auth = None

    def logout(self):
        if self.uses_session_cookie:
            self.session.cookies.pop(self.session_cookie_name, None)
        else:
            self.session.auth = None

    def request(self, relative_endpoint, method='get', json=None, data=None, query_parameters=None, headers=None):
        """Core requests.Session wrapper that returns requests.Response objects"""
        session_request_method = getattr(self.session, method, None)
        if not session_request_method:
            raise ConnectionException(message="Unknown request method: {0}".format(method))

        use_endpoint = relative_endpoint
        if self.server.endswith('/'):
            self.server = self.server[:-1]
        if use_endpoint.startswith('/'):
            use_endpoint = use_endpoint[1:]
        url = '/'.join([self.server, use_endpoint])

        kwargs = dict(verify=self.verify, params=query_parameters, json=json, data=data, hooks=dict(response=log_elapsed))

        if headers is not None:
            kwargs['headers'] = headers

        if method in ('post', 'put', 'patch', 'delete'):
            kwargs.setdefault('headers', {})['X-CSRFToken'] = self.session.cookies.get('csrftoken')
            kwargs['headers']['Referer'] = url

        for attempt in range(1, config.client_connection_attempts + 1):
            try:
                response = session_request_method(url, **kwargs)
                break
            except requests.exceptions.ConnectionError as err:
                if attempt == config.client_connection_attempts:
                    raise err
                log.exception('Failed to reach url: {0}.  Retrying.'.format(url))

        return response

    def delete(self, relative_endpoint):
        return self.request(relative_endpoint, method='delete')

    def get(self, relative_endpoint, query_parameters=None, headers=None):
        return self.request(relative_endpoint, method='get', query_parameters=query_parameters, headers=headers)

    def head(self, relative_endpoint):
        return self.request(relative_endpoint, method='head')

    def options(self, relative_endpoint):
        return self.request(relative_endpoint, method='options')

    def patch(self, relative_endpoint, json):
        return self.request(relative_endpoint, method='patch', json=json)

    def post(self, relative_endpoint, json=None, data=None, headers=None):
        return self.request(relative_endpoint, method='post', json=json, data=data, headers=headers)

    def put(self, relative_endpoint, json):
        return self.request(relative_endpoint, method='put', json=json)
07070100000008000041ED00000000000000000000000266846B9200000000000000000000000000000000000000000000001D00000000awx-24.6.1/awxkit/api/mixins07070100000009000081A400000000000000000000000166846B920000013F000000000000000000000000000000000000002900000000awx-24.6.1/awxkit/api/mixins/__init__.pyfrom .has_create import *  # NOQA
from .has_instance_groups import HasInstanceGroups  # NOQA
from .has_notifications import HasNotifications  # NOQA
from .has_status import HasStatus  # NOQA
from .has_survey import HasSurvey  # NOQA
from .has_variables import HasVariables  # NOQA
from .has_copy import HasCopy  # NOQA
0707010000000A000081A400000000000000000000000166846B92000001BA000000000000000000000000000000000000002900000000awx-24.6.1/awxkit/api/mixins/has_copy.pyfrom awxkit.api.pages import Page
from awxkit.utils import random_title


class HasCopy(object):
    def can_copy(self):
        return self.get_related('copy').can_copy

    def copy(self, name=''):
        """Return a copy of current page"""
        payload = {"name": name or "Copy - " + random_title()}
        endpoint = self.json.related['copy']
        page = Page(self.connection, endpoint=endpoint)
        return page.post(payload)
0707010000000B000081A400000000000000000000000166846B92000041F5000000000000000000000000000000000000002B00000000awx-24.6.1/awxkit/api/mixins/has_create.pyfrom collections import defaultdict
import inspect

from awxkit.utils import get_class_if_instance, class_name_to_kw_arg, is_proper_subclass, super_dir_set
from awxkit.utils.toposort import toposort


# HasCreate dependency resolution and creation utilities
def dependency_graph(page, *provided_dependencies):
    """Creates a dependency graph of the form
    {page: set(page.dependencies[0:i]),
     page.dependencies[0]: set(page.dependencies[0][0:j]
     ...
     page.dependencies[i][j][...][n]: set(page.dependencies[i][j][...][n][0:z]),
     ...}
    Any optional provided_dependencies will be included as if they were dependencies,
    without affecting the value of each keyed page.
    """
    graph = {}
    dependencies = set(getattr(page, 'dependencies', []))  # Some HasCreate's can claim generic Base's w/o dependencies
    graph[page] = dependencies
    for dependency in dependencies | set(provided_dependencies):
        graph.update(dependency_graph(dependency))
    return graph


def optional_dependency_graph(page, *provided_dependencies):
    """Creates a dependency graph for a page including all dependencies and optional_dependencies
    Any optional provided_dependencies will be included as if they were dependencies,
    without affecting the value of each keyed page.
    """
    graph = {}
    dependencies = set(getattr(page, 'dependencies', []) + getattr(page, 'optional_dependencies', []))
    graph[page] = dependencies
    for dependency in dependencies | set(provided_dependencies):
        graph.update(optional_dependency_graph(dependency))
    return graph


def creation_order(graph):
    """returns a list of sets of HasCreate subclasses representing the order of page creation that will
    resolve the dependencies of subsequent pages for any non-cyclic dependency_graph
    ex:
    [set(Organization), set(Inventory), set(Group)]

    **The result is based entirely on the passed dependency graph and should be blind
    to node attributes.**
    """
    return list(toposort(graph))


def separate_async_optionals(creation_order):
    """In cases where creation group items share dependencies but as asymetric optionals,
    those that create them as actual dependencies to be later sourced as optionals
    need to be listed first
    """
    actual_order = []
    for group in creation_order:
        if len(group) <= 1:
            actual_order.append(group)
            continue
        by_count = defaultdict(set)
        has_creates = [cand for cand in group if hasattr(cand, 'dependencies')]
        counts = {has_create: 0 for has_create in has_creates}
        for has_create in has_creates:
            for dependency in has_create.dependencies:
                for compared in [cand for cand in has_creates if cand != has_create]:
                    if dependency in compared.optional_dependencies:
                        counts[has_create] += 1
        for has_create in group:
            by_count[counts.get(has_create, 0)].add(has_create)
        for count in sorted(by_count, reverse=True):
            actual_order.append(by_count[count])
    return actual_order


def page_creation_order(page=None, *provided_dependencies):
    """returns a creation_order() where HasCreate subclasses do not share creation group sets with members
    of their optional_dependencies.  All provided_dependencies and their dependencies will also be
    included in the creation
    """
    if not page:
        return []
    # dependency_graphs only care about class type
    provided_dependencies = [x[0] if isinstance(x, tuple) else x for x in provided_dependencies]
    provided_dependencies = [get_class_if_instance(x) for x in provided_dependencies]
    # make a set of all pages we may need to create
    to_create = set(dependency_graph(page, *provided_dependencies))
    # creation order w/ the most accurate dependency graph
    full_graph_order = creation_order(optional_dependency_graph(page, *provided_dependencies))
    order = []
    for group in full_graph_order:
        to_append = group & to_create  # we only care about pages we may need to create
        if to_append:
            order.append(to_append)
    actual_order = separate_async_optionals(order)

    return actual_order


def all_instantiated_dependencies(*potential_parents):
    """returns a list of all instantiated dependencies including parents themselves.
    Will be in page_creation_order
    """
    scope_provided_dependencies = []

    instantiated = set([x for x in potential_parents if not isinstance(x, type) and not isinstance(x, tuple)])

    for potential_parent in [x for x in instantiated if hasattr(x, '_dependency_store')]:
        for dependency in potential_parent._dependency_store.values():
            if dependency and dependency not in scope_provided_dependencies:
                scope_provided_dependencies.extend(all_instantiated_dependencies(dependency))

    scope_provided_dependencies.extend(instantiated)
    scope_provided_dependencies = list(set(scope_provided_dependencies))
    class_to_provided = {}
    for provided in scope_provided_dependencies:
        if provided.__class__ in class_to_provided:
            class_to_provided[provided.__class__].append(provided)
        else:
            class_to_provided[provided.__class__] = [provided]

    all_instantiated = []
    for group in page_creation_order(*scope_provided_dependencies):
        for item in group:
            if item in class_to_provided:
                all_instantiated.extend(class_to_provided[item])
                del class_to_provided[item]
            elif item.__class__ in class_to_provided:
                all_instantiated.extend(class_to_provided[item.__class__])
                del class_to_provided[item.__class__]

    return all_instantiated


class DSAdapter(object):
    """Access HasCreate._dependency_store dependencies by attribute instead of class.

    ex:
    ```
    base_sc = HasCreate().create(inventory=awxkit.api.Inventory)
    base_sc._dependency_store[Inventory] == base.ds.inventory
    ```
    """

    def __init__(self, owner, dependency_store):
        self.owner = owner
        self.dependency_store = dependency_store
        self._lookup = {class_name_to_kw_arg(cls.__name__): cls for cls in dependency_store}

    def __repr__(self):
        return self.__str__()

    def __str__(self):
        return str(list(self._lookup.keys()))

    def __getattr__(self, attr):
        if attr in self._lookup:
            dep = self.dependency_store[self._lookup[attr]]
            if dep:
                return dep
        raise AttributeError('{0.owner} has no dependency "{1}"'.format(self, attr))

    def __getitem__(self, item):
        return getattr(self, item)

    def __iter__(self):
        return iter(self._lookup)

    def __dir__(self):
        attrs = super_dir_set(self.__class__)
        if '_lookup' in self.__dict__ and hasattr(self._lookup, 'keys'):
            attrs.update(self._lookup.keys())
        return sorted(attrs)


# Hijack json.dumps and simplejson.dumps (used by requests)
# to allow HasCreate.create_payload() serialization without impacting payload.ds access
def filter_ds_from_payload(dumps):
    def _filter_ds_from_payload(obj, *a, **kw):
        if hasattr(obj, 'get') and isinstance(obj.get('ds'), DSAdapter):
            filtered = obj.copy()
            del filtered['ds']
        else:
            filtered = obj
        return dumps(filtered, *a, **kw)

    return _filter_ds_from_payload


import json  # noqa

json.dumps = filter_ds_from_payload(json.dumps)

try:
    import simplejson  # noqa

    simplejson.dumps = filter_ds_from_payload(simplejson.dumps)
except ImportError:
    pass


class HasCreate(object):
    # For reference only.  Use self.ds, or self._dependency_store if mutating.
    dependencies = []
    optional_dependencies = []

    # Provides introspection capability in recursive create_and_update_dependencies calls
    _scoped_dependencies_by_frame = dict()

    def __init__(self, *a, **kw):
        dependency_store = kw.get('ds')
        if dependency_store is None:
            deps = self.dependencies + self.optional_dependencies
            self._dependency_store = {base_subclass: None for base_subclass in deps}
            self.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        else:
            self._dependency_store = dependency_store.dependency_store
            self.ds = dependency_store
        super(HasCreate, self).__init__(*a, **kw)

    def _update_dependencies(self, dependency_candidates):
        """updates self._dependency_store to reflect instantiated dependencies, if any."""
        if self._dependency_store:
            potentials = []

            # in case the candidate is an instance of a desired base class
            # (e.g. Project for self._dependency_store = {'UnifiedJobTemplate': None})
            # we try each of its base classes until a match is found
            base_lookup = {}
            for candidate in dependency_candidates:
                for cls_type in inspect.getmro(candidate[0].__class__):
                    if cls_type in self._dependency_store:
                        base_lookup[candidate[0]] = cls_type
                        potentials.append(candidate)
                        break
            second_pass = []
            for candidate, claimed in potentials:
                if claimed:
                    self._dependency_store[base_lookup[candidate]] = candidate
                else:
                    second_pass.append(candidate)
            # Technical Debt: We need to iron out the expected behavior of multiple instances
            # of unclaimed types. Right now the last one in potentials is marked as a dependency.
            second_pass.reverse()  # for the last one in the list to be marked we need to reverse.
            for candidate in second_pass:
                if not self._dependency_store[base_lookup[candidate]]:
                    self._dependency_store[base_lookup[candidate]] = candidate

    def create_and_update_dependencies(self, *provided_and_desired_dependencies):
        """in order creation of dependencies and updating of self._dependency_store
        to include instances, indexed by page class.  If a (HasCreate, dict()) tuple is
        provided as a desired dependency, the dict() will be unpacked as kwargs for the
        `HasCreate.create(**dict())` call.

        ***
        Providing (HasCreate, dict()) tuples for dependency args to this method
        removes the assurance that all shared dependencies types will be the same instance
        and only one instance of each type is created
        (Tech Debt: can create orphans if default dependency isn't claimed).
        The provided args are only in scope of the desired page, override any previously created
        instance of the same class, and replace said instances in the continuing chain.
        ***

        ```
        ex:
        self.dependencies = [awxkit.api.pages.Inventory]
        self.create_and_update_dependencies()
        inventory = self._dependency_store[awxkit.api.pages.Inventory]

        ex:
        self.dependencies = [awxkit.api.pages.Inventory]
        self.create_and_update_dependencies((awxkit.api.pages.Inventory, dict(attr_one=1, attr_two=2)))
        inventory = self._dependency_store[awxkit.api.pages.Inventory]
        # assume kwargs are set as attributes by Inventory.create()
        inventory.attr_one == 1
        > True
        inventory.attr_two == 2
        > True

        ex:
        self.dependencies = []
        self.optional_dependencies = [awxkit.api.pages.Organization]
        self.create_and_update_dependencies(awxkit.api.pages.Organization)
        organization = self._dependency_store[awxkit.api.pages.Organization]

        ex:
        self.dependencies = [awxkit.api.pages.Inventory]
        inventory = v2.inventories.create()
        self.create_and_update_dependencies(inventory)
        inventory == self._dependency_store[awxkit.api.pages.Inventory]
        > True
        ```
        """
        if not any((self.dependencies, self.optional_dependencies)):
            return

        # remove falsy values
        provided_and_desired_dependencies = [x for x in provided_and_desired_dependencies if x]
        # (HasCreate(), True) tells HasCreate._update_dependencies to link
        provided_dependencies = [(x, True) for x in provided_and_desired_dependencies if not isinstance(x, type) and not isinstance(x, tuple)]

        # Since dependencies are often declared at runtime, we need to use some introspection
        # to determine previously created ones for proper dependency store linking.
        # This is done by keeping an updated dependency record by the root caller's frame
        caller_frame = inspect.currentframe()
        self.parent_frame = None
        for frame in inspect.stack()[1:]:
            if frame[3] == 'create_and_update_dependencies':
                self.parent_frame = frame[0]

        if not self.parent_frame:
            # a maintained dict of instantiated resources keyed by lowercase class name to be
            # expanded as keyword args during `create()` calls
            all_instantiated = all_instantiated_dependencies(*[d[0] for d in provided_dependencies])
            scoped_dependencies = {class_name_to_kw_arg(d.__class__.__name__): d for d in all_instantiated}
            self._scoped_dependencies_by_frame[caller_frame] = [self, scoped_dependencies]
        else:
            scoped_dependencies = self._scoped_dependencies_by_frame[self.parent_frame][1]

        desired_dependencies = []
        desired_dependency_classes = []
        for item in provided_and_desired_dependencies:
            if isinstance(item, tuple):
                item_cls = item[0]
            elif inspect.isclass(item):
                item_cls = item
            else:
                item_cls = item.__class__
            if item_cls not in [x[0].__class__ for x in provided_dependencies]:
                desired_dependency_classes.append(item_cls)
                desired_dependencies.append(item)

        if desired_dependencies:
            ordered_desired_dependencies = []
            creation_order = [item for s in page_creation_order(*desired_dependency_classes) for item in s]
            for item in creation_order:
                for desired in desired_dependency_classes:
                    if desired == item or is_proper_subclass(desired, item):
                        ordered_desired_dependencies.append(desired)
                        desired_dependency_classes.remove(desired)
                        break

            # keep track of (HasCreate, kwarg_dict)
            provided_with_kwargs = dict()
            for page_cls, provided_kwargs in [x for x in desired_dependencies if isinstance(x, tuple)]:
                provided_with_kwargs[page_cls] = provided_kwargs

            for to_create in ordered_desired_dependencies:
                scoped_args = dict(scoped_dependencies)

                if to_create in provided_with_kwargs:
                    scoped_args.pop(to_create, None)  # remove any conflicts in favor of explicit kwargs
                    scoped_args.update(provided_with_kwargs.pop(to_create))

                scoped_args.pop(class_name_to_kw_arg(to_create.__name__), None)

                created = to_create(self.connection).create(**scoped_args)
                provided_dependencies.append((created, True))

                for dependency, _ in provided_dependencies:
                    if dependency not in scoped_dependencies:
                        scoped_dependencies[class_name_to_kw_arg(dependency.__class__.__name__)] = dependency

        self._update_dependencies(provided_dependencies)

        if not self.parent_frame:
            del self._scoped_dependencies_by_frame[caller_frame]

    def teardown(self):
        """Calls `silent_cleanup()` on all dependencies and self in reverse page creation order."""
        to_teardown = all_instantiated_dependencies(self)
        to_teardown_types = set(map(get_class_if_instance, to_teardown))
        order = [
            set([potential for potential in (get_class_if_instance(x) for x in group) if potential in to_teardown_types])
            for group in page_creation_order(self, *to_teardown)
        ]
        order.reverse()
        for teardown_group in order:
            for teardown_class in teardown_group:
                instance = [x for x in to_teardown if isinstance(x, teardown_class)].pop()
                instance.silent_cleanup()

        for item in to_teardown:
            for dep_type, dep in item._dependency_store.items():
                if dep and dep_type in to_teardown_types:
                    item._dependency_store[dep_type] = None  # Note that we don't call del
0707010000000C000081A400000000000000000000000166846B9200000266000000000000000000000000000000000000003400000000awx-24.6.1/awxkit/api/mixins/has_instance_groups.pyfrom contextlib import suppress

import awxkit.exceptions as exc


class HasInstanceGroups(object):
    def add_instance_group(self, instance_group):
        with suppress(exc.NoContent):
            self.related['instance_groups'].post(dict(id=instance_group.id))

    def remove_instance_group(self, instance_group):
        with suppress(exc.NoContent):
            self.related['instance_groups'].post(dict(id=instance_group.id, disassociate=instance_group.id))

    def remove_all_instance_groups(self):
        for ig in self.related.instance_groups.get().results:
            self.remove_instance_group(ig)
0707010000000D000081A400000000000000000000000166846B920000064F000000000000000000000000000000000000003200000000awx-24.6.1/awxkit/api/mixins/has_notifications.pyfrom contextlib import suppress

import awxkit.exceptions as exc


notification_endpoints = ("notification_templates", "notification_templates_started", "notification_templates_error", "notification_templates_success")
wfjt_notification_endpoints = notification_endpoints + ('notification_templates_approvals',)


class HasNotifications(object):
    def add_notification_template(self, notification_template, endpoint="notification_templates_success"):
        from awxkit.api.pages.workflow_job_templates import WorkflowJobTemplate

        supported_endpoints = wfjt_notification_endpoints if isinstance(self, WorkflowJobTemplate) else notification_endpoints
        if endpoint not in supported_endpoints:
            raise ValueError('Unsupported notification endpoint "{0}". Please use one of {1}.'.format(endpoint, notification_endpoints))
        with suppress(exc.NoContent):
            self.related[endpoint].post(dict(id=notification_template.id))

    def remove_notification_template(self, notification_template, endpoint="notification_templates_success"):
        from awxkit.api.pages.workflow_job_templates import WorkflowJobTemplate

        supported_endpoints = wfjt_notification_endpoints if isinstance(self, WorkflowJobTemplate) else notification_endpoints
        if endpoint not in supported_endpoints:
            raise ValueError('Unsupported notification endpoint "{0}". Please use one of {1}.'.format(endpoint, notification_endpoints))
        with suppress(exc.NoContent):
            self.related[endpoint].post(dict(id=notification_template.id, disassociate=notification_template.id))
0707010000000E000081A400000000000000000000000166846B92000010D7000000000000000000000000000000000000002B00000000awx-24.6.1/awxkit/api/mixins/has_status.pyfrom datetime import datetime
import json

from awxkit.utils import poll_until
from awxkit.exceptions import WaitUntilTimeout
from awxkit.config import config


def bytes_to_str(obj):
    try:
        return obj.decode()
    except AttributeError:
        return str(obj)


class HasStatus(object):
    completed_statuses = ['successful', 'failed', 'error', 'canceled']
    started_statuses = ['pending', 'running'] + completed_statuses

    @property
    def is_completed(self):
        return self.status.lower() in self.completed_statuses

    @property
    def is_successful(self):
        return self.status == 'successful'

    def wait_until_status(self, status, interval=1, timeout=60, **kwargs):
        status = [status] if not isinstance(status, (list, tuple)) else status
        try:
            poll_until(lambda: getattr(self.get(), 'status') in status, interval=interval, timeout=timeout, **kwargs)
        except WaitUntilTimeout:
            # This will raise a more informative error than just "WaitUntilTimeout" error
            self.assert_status(status)
        return self

    def wait_until_completed(self, interval=5, timeout=60, **kwargs):
        start_time = datetime.utcnow()
        HasStatus.wait_until_status(self, self.completed_statuses, interval=interval, timeout=timeout, **kwargs)
        if not getattr(self, 'event_processing_finished', True):
            elapsed = datetime.utcnow() - start_time
            time_left = timeout - elapsed.total_seconds()
            poll_until(lambda: getattr(self.get(), 'event_processing_finished', True), interval=interval, timeout=time_left, **kwargs)
        return self

    def wait_until_started(self, interval=1, timeout=60):
        return self.wait_until_status(self.started_statuses, interval=interval, timeout=timeout)

    def failure_output_details(self):
        msg = ''
        if getattr(self, 'result_stdout', ''):
            output = bytes_to_str(self.result_stdout)
            if output:
                msg = '\nstdout:\n{}'.format(output)
        if getattr(self, 'job_explanation', ''):
            msg += '\njob_explanation: {}'.format(bytes_to_str(self.job_explanation))
        if getattr(self, 'result_traceback', ''):
            msg += '\nresult_traceback:\n{}'.format(bytes_to_str(self.result_traceback))
        return msg

    def assert_status(self, status_list, msg=None):
        if isinstance(status_list, str):
            status_list = [status_list]
        if self.status in status_list:
            # include corner cases in is_successful logic
            if 'successful' not in status_list or self.is_successful:
                return
        if msg is None:
            msg = ''
        else:
            msg += '\n'
        msg += '{0}-{1} has status of {2}, which is not in {3}.'.format(self.type.title(), self.id, self.status, status_list)
        if getattr(self, 'execution_environment', ''):
            msg += '\nexecution_environment: {}'.format(bytes_to_str(self.execution_environment))
            if getattr(self, 'related', False):
                ee = self.related.execution_environment.get()
                msg += f'\nee_image: {ee.image}'
                msg += f'\nee_credential: {ee.credential}'
                msg += f'\nee_pull_option: {ee.pull}'
                msg += f'\nee_summary_fields: {ee.summary_fields}'

        msg += self.failure_output_details()

        if getattr(self, 'job_explanation', '').startswith('Previous Task Failed'):
            try:
                data = json.loads(self.job_explanation.replace('Previous Task Failed: ', ''))
                dependency = self.walk('/{0}v2/{1}s/{2}/'.format(config.api_base_path, data['job_type'], data['job_id']))
                if hasattr(dependency, 'failure_output_details'):
                    msg += '\nDependency output:\n{}'.format(dependency.failure_output_details())
                else:
                    msg += '\nDependency info:\n{}'.format(dependency)
            except Exception as e:
                msg += '\nFailed to obtain dependency stdout: {}'.format(e)

        msg += '\nTIME WHEN STATUS WAS FOUND: {} (UTC)\n'.format(datetime.utcnow())

        raise AssertionError(msg)

    def assert_successful(self, msg=None):
        return self.assert_status('successful', msg=msg)
0707010000000F000081A400000000000000000000000166846B920000025E000000000000000000000000000000000000002B00000000awx-24.6.1/awxkit/api/mixins/has_survey.pyfrom awxkit.utils import random_title


class HasSurvey(object):
    def add_survey(self, spec=None, name=None, description=None, required=False, enabled=True):
        payload = dict(
            name=name or 'Survey - {}'.format(random_title()),
            description=description or random_title(10),
            spec=spec or [dict(required=required, question_name="What's the password?", variable="secret", type="password", default="foo")],
        )
        if enabled != self.survey_enabled:
            self.patch(survey_enabled=enabled)
        return self.related.survey_spec.post(payload).get()
07070100000010000081A400000000000000000000000166846B92000000BF000000000000000000000000000000000000002E00000000awx-24.6.1/awxkit/api/mixins/has_variables.pyimport yaml

from awxkit.utils import PseudoNamespace


class HasVariables(object):
    @property
    def variables(self):
        return PseudoNamespace(yaml.safe_load(self.json.variables))
07070100000011000041ED00000000000000000000000266846B9200000000000000000000000000000000000000000000001C00000000awx-24.6.1/awxkit/api/pages07070100000012000081A400000000000000000000000166846B9200000641000000000000000000000000000000000000002800000000awx-24.6.1/awxkit/api/pages/__init__.py# Order matters
from .page import *  # NOQA
from .base import *  # NOQA
from .bulk import *  # NOQA
from .access_list import *  # NOQA
from .api import *  # NOQA
from .authtoken import *  # NOQA
from .roles import *  # NOQA
from .organizations import *  # NOQA
from .notifications import *  # NOQA
from .notification_templates import *  # NOQA
from .users import *  # NOQA
from .applications import *  # NOQA
from .teams import *  # NOQA
from .credentials import *  # NOQA
from .unified_jobs import *  # NOQA
from .unified_job_templates import *  # NOQA
from .execution_environments import *  # NOQA
from .projects import *  # NOQA
from .inventory import *  # NOQA
from .system_job_templates import *  # NOQA
from .job_templates import *  # NOQA
from .jobs import *  # NOQA
from .survey_spec import *  # NOQA
from .system_jobs import *  # NOQA
from .config import *  # NOQA
from .ping import *  # NOQA
from .dashboard import *  # NOQA
from .activity_stream import *  # NOQA
from .schedules import *  # NOQA
from .ad_hoc_commands import *  # NOQA
from .labels import *  # NOQA
from .workflow_job_templates import *  # NOQA
from .workflow_job_template_nodes import *  # NOQA
from .workflow_jobs import *  # NOQA
from .workflow_job_nodes import *  # NOQA
from .workflow_approvals import *  # NOQA
from .settings import *  # NOQA
from .instances import *  # NOQA
from .instance_groups import *  # NOQA
from .credential_input_sources import *  # NOQA
from .metrics import *  # NOQA
from .subscriptions import *  # NOQA
from .workflow_approval_templates import *  # NOQA
from .host_metrics import *  # NOQA
07070100000013000081A400000000000000000000000166846B92000001FE000000000000000000000000000000000000002B00000000awx-24.6.1/awxkit/api/pages/access_list.pyfrom awxkit.api.resources import resources
from . import users
from . import page


class AccessList(page.PageList, users.User):
    pass


page.register_page(
    [
        resources.organization_access_list,
        resources.user_access_list,
        resources.inventory_access_list,
        resources.group_access_list,
        resources.credential_access_list,
        resources.project_access_list,
        resources.job_template_access_list,
        resources.team_access_list,
    ],
    AccessList,
)
07070100000014000081A400000000000000000000000166846B920000015C000000000000000000000000000000000000002F00000000awx-24.6.1/awxkit/api/pages/activity_stream.pyfrom awxkit.api.resources import resources
from . import base
from . import page


class ActivityStream(base.Base):
    pass


page.register_page(resources.activity, ActivityStream)


class ActivityStreams(page.PageList, ActivityStream):
    pass


page.register_page([resources.activity_stream, resources.object_activity_stream], ActivityStreams)
07070100000015000081A400000000000000000000000166846B9200000A25000000000000000000000000000000000000002F00000000awx-24.6.1/awxkit/api/pages/ad_hoc_commands.pyfrom awxkit.utils import update_payload, PseudoNamespace
from awxkit.api.pages import Inventory, Credential
from awxkit.api.mixins import HasCreate, DSAdapter
from awxkit.utils import not_provided as np
from awxkit.api.resources import resources

from .jobs import UnifiedJob
from . import page


class AdHocCommand(HasCreate, UnifiedJob):
    dependencies = [Inventory, Credential]

    def relaunch(self, payload={}):
        """Relaunch the command using the related->relaunch endpoint"""
        # navigate to relaunch_pg
        relaunch_pg = self.get_related('relaunch')

        # relaunch the command
        result = relaunch_pg.post(payload)

        # return the corresponding command_pg
        return self.walk(result.url)

    def payload(self, inventory, credential, module_name='ping', **kwargs):
        payload = PseudoNamespace(inventory=inventory.id, credential=credential.id, module_name=module_name)

        optional_fields = ('diff_mode', 'extra_vars', 'module_args', 'job_type', 'limit', 'forks', 'verbosity')
        return update_payload(payload, optional_fields, kwargs)

    def create_payload(self, module_name='ping', module_args=np, job_type=np, limit=np, verbosity=np, inventory=Inventory, credential=Credential, **kwargs):
        self.create_and_update_dependencies(inventory, credential)

        payload = self.payload(
            module_name=module_name,
            module_args=module_args,
            job_type=job_type,
            limit=limit,
            verbosity=verbosity,
            inventory=self.ds.inventory,
            credential=self.ds.credential,
            **kwargs
        )
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, module_name='ping', module_args=np, job_type=np, limit=np, verbosity=np, inventory=Inventory, credential=Credential, **kwargs):
        payload = self.create_payload(
            module_name=module_name,
            module_args=module_args,
            job_type=job_type,
            limit=limit,
            verbosity=verbosity,
            inventory=inventory,
            credential=credential,
            **kwargs
        )
        return self.update_identity(AdHocCommands(self.connection).post(payload))


page.register_page([resources.ad_hoc_command], AdHocCommand)


class AdHocCommands(page.PageList, AdHocCommand):
    pass


page.register_page(
    [resources.ad_hoc_commands, resources.inventory_related_ad_hoc_commands, resources.group_related_ad_hoc_commands, resources.host_related_ad_hoc_commands],
    AdHocCommands,
)
07070100000016000081A400000000000000000000000166846B920000478A000000000000000000000000000000000000002300000000awx-24.6.1/awxkit/api/pages/api.pyfrom collections import defaultdict
import itertools
import logging

from awxkit.api.resources import resources
import awxkit.exceptions as exc
from . import base
from . import page
from .. import utils
from ..mixins import has_create

log = logging.getLogger(__name__)


EXPORTABLE_RESOURCES = [
    'users',
    'organizations',
    'teams',
    'credential_types',
    'credentials',
    'notification_templates',
    'projects',
    'inventory',
    'inventory_sources',
    'job_templates',
    'workflow_job_templates',
    'execution_environments',
    'applications',
    'schedules',
]


EXPORTABLE_RELATIONS = ['Roles', 'NotificationTemplates', 'WorkflowJobTemplateNodes', 'Credentials', 'Hosts', 'Groups', 'ExecutionEnvironments', 'Schedules']


# These are special-case related objects, where we want only in this
# case to export a full object instead of a natural key reference.
DEPENDENT_EXPORT = [
    ('JobTemplate', 'Label'),
    ('JobTemplate', 'SurveySpec'),
    ('JobTemplate', 'Schedule'),
    ('WorkflowJobTemplate', 'Label'),
    ('WorkflowJobTemplate', 'SurveySpec'),
    ('WorkflowJobTemplate', 'Schedule'),
    ('WorkflowJobTemplate', 'WorkflowJobTemplateNode'),
    ('InventorySource', 'Schedule'),
    ('Inventory', 'Group'),
    ('Inventory', 'Host'),
    ('Inventory', 'Label'),
    ('WorkflowJobTemplateNode', 'WorkflowApprovalTemplate'),
]


# This is for related views where it is unneeded to export anything,
# such as because it is a calculated subset of objects covered by a
# different view.
DEPENDENT_NONEXPORT = [
    ('InventorySource', 'groups'),
    ('InventorySource', 'hosts'),
    ('Inventory', 'root_groups'),
    ('Group', 'all_hosts'),
    ('Group', 'potential_children'),
    ('Host', 'all_groups'),
    ('WorkflowJobTemplateNode', 'create_approval_template'),
]


class Api(base.Base):
    pass


page.register_page(resources.api, Api)


class ApiV2(base.Base):
    # Export methods

    def _export(self, _page, post_fields):
        # Drop any (credential_type) assets that are being managed by the instance.
        if _page.json.get('managed'):
            log.debug("%s is managed, skipping.", _page.endpoint)
            return None
        if post_fields is None:  # Deprecated endpoint or insufficient permissions
            log.error("Object export failed: %s", _page.endpoint)
            self._has_error = True
            return None

        # Note: doing _page[key] automatically parses json blob strings, which can be a problem.
        fields = {key: _page.json[key] for key in post_fields if key in _page.json and key not in _page.related and key != 'id'}

        # iterate over direct fields in the object
        for key in post_fields:
            if key in _page.related:
                related = _page.related[key]
            else:
                if post_fields[key]['type'] == 'id' and _page.json.get(key) is not None:
                    log.warning("Related link %r missing from %s, attempting to reconstruct endpoint.", key, _page.endpoint)
                    res_pattern, resource = getattr(resources, key, None), None
                    if res_pattern:
                        try:
                            top_level = res_pattern.split('/')[3]
                            resource = getattr(self, top_level, None)
                        except IndexError:
                            pass
                    if resource is None:
                        log.error("Unable to infer endpoint for %r on %s.", key, _page.endpoint)
                        self._has_error = True
                        continue
                    related = self._filtered_list(resource, _page.json[key]).results[0]
                else:
                    continue

            rel_endpoint = self._cache.get_page(related)
            if rel_endpoint is None:  # This foreign key is unreadable
                if post_fields[key].get('required'):
                    log.error("Foreign key %r export failed for object %s.", key, _page.endpoint)
                    self._has_error = True
                    return None
                log.warning("Foreign key %r export failed for object %s, setting to null", key, _page.endpoint)
                continue

            # Workflow approval templates have a special creation endpoint,
            # therefore we are skipping the export via natural key.
            if rel_endpoint.__item_class__.__name__ == 'WorkflowApprovalTemplate':
                continue

            rel_natural_key = rel_endpoint.get_natural_key(self._cache)
            if rel_natural_key is None:
                log.error("Unable to construct a natural key for foreign key %r of object %s.", key, _page.endpoint)
                self._has_error = True
                return None  # This foreign key has unresolvable dependencies
            fields[key] = rel_natural_key

        # iterate over related fields in the object
        related = {}
        for key, rel_endpoint in _page.related.items():
            # skip if no endpoint for this related object
            if not rel_endpoint:
                continue

            rel = rel_endpoint._create()

            if rel.__item_class__.__name__ != 'WorkflowApprovalTemplate':
                if key in post_fields:
                    continue

            is_relation = rel.__class__.__name__ in EXPORTABLE_RELATIONS

            # determine if the parent object and the related object that we are processing through are related
            # if this tuple is in the DEPENDENT_EXPORT than we output the full object
            # else we output the natural key
            is_dependent = (_page.__item_class__.__name__, rel.__item_class__.__name__) in DEPENDENT_EXPORT

            is_blocked = (_page.__item_class__.__name__, key) in DEPENDENT_NONEXPORT
            if is_blocked or not (is_relation or is_dependent):
                continue

            # if the rel is of WorkflowApprovalTemplate type, get rel_post_fields from create_approval_template endpoint
            rel_option_endpoint = rel_endpoint
            export_key = key
            if rel.__item_class__.__name__ == 'WorkflowApprovalTemplate':
                export_key = 'create_approval_template'
                rel_option_endpoint = _page.related.get('create_approval_template')

            rel_post_fields = utils.get_post_fields(rel_option_endpoint, self._cache)
            if rel_post_fields is None:
                log.debug("%s is a read-only endpoint.", rel_endpoint)
                continue
            is_attach = 'id' in rel_post_fields  # This is not a create-only endpoint.

            if is_dependent:
                by_natural_key = False
            elif is_relation and is_attach and not is_blocked:
                by_natural_key = True
            else:
                continue

            rel_page = self._cache.get_page(rel_endpoint)

            if rel_page is None:
                continue

            if 'results' in rel_page:
                results = (x.get_natural_key(self._cache) if by_natural_key else self._export(x, rel_post_fields) for x in rel_page.results)
                related[export_key] = [x for x in results if x is not None]
            elif rel.__item_class__.__name__ == 'WorkflowApprovalTemplate':
                related[export_key] = self._export(rel_page, rel_post_fields)
            else:
                related[export_key] = rel_page.json

        if related:
            fields['related'] = related

        if _page.__item_class__.__name__ != 'WorkflowApprovalTemplate':
            natural_key = _page.get_natural_key(self._cache)
            if natural_key is None:
                log.error("Unable to construct a natural key for object %s.", _page.endpoint)
                self._has_error = True
                return None
            fields['natural_key'] = natural_key

        return fields

    def _export_list(self, endpoint):
        post_fields = utils.get_post_fields(endpoint, self._cache)
        if post_fields is None:
            return None

        if isinstance(endpoint, page.TentativePage):
            endpoint = self._cache.get_page(endpoint)
            if endpoint is None:
                return None

        assets = (self._export(asset, post_fields) for asset in endpoint.results)
        return [asset for asset in assets if asset is not None]

    def _check_for_int(self, value):
        return isinstance(value, int) or (isinstance(value, str) and value.isdecimal())

    def _filtered_list(self, endpoint, value):
        if isinstance(value, list) and len(value) == 1:
            value = value[0]
        if self._check_for_int(value):
            return endpoint.get(id=int(value))

        options = self._cache.get_options(endpoint)
        identifier = next(field for field in options['search_fields'] if field in ('name', 'username', 'hostname'))
        if isinstance(value, list):
            if all(self._check_for_int(item) for item in value):
                identifier = 'or__id'
            else:
                identifier = 'or__' + identifier

        return endpoint.get(**{identifier: value}, all_pages=True)

    def export_assets(self, **kwargs):
        self._cache = page.PageCache(self.connection)

        # If no resource kwargs are explicitly used, export everything.
        all_resources = all(kwargs.get(resource) is None for resource in EXPORTABLE_RESOURCES)

        data = {}
        for resource in EXPORTABLE_RESOURCES:
            value = kwargs.get(resource)
            if all_resources or value is not None:
                endpoint = getattr(self, resource)
                if value:
                    endpoint = self._filtered_list(endpoint, value)
                data[resource] = self._export_list(endpoint)

        return data

    # Import methods

    def _dependent_resources(self):
        page_resource = {}
        for resource in self.json:
            # The /api/v2/constructed_inventories endpoint is for the UI but will register as an Inventory endpoint
            # We want to map the type to /api/v2/inventories/ which works for constructed too
            if resource == 'constructed_inventory':
                continue
            page_resource[getattr(self, resource)._create().__item_class__] = resource
        data_pages = [getattr(self, resource)._create().__item_class__ for resource in EXPORTABLE_RESOURCES]

        for page_cls in itertools.chain(*has_create.page_creation_order(*data_pages)):
            yield page_resource[page_cls]

    def _import_list(self, endpoint, assets):
        log.debug("_import_list -- endpoint: %s, assets: %s", endpoint.endpoint, repr(assets))
        post_fields = utils.get_post_fields(endpoint, self._cache)

        changed = False

        for asset in assets:
            post_data = {}
            for field, value in asset.items():
                if field not in post_fields:
                    continue
                if post_fields[field]['type'] in ('id', 'integer') and isinstance(value, dict):
                    _page = self._cache.get_by_natural_key(value)
                    post_data[field] = _page['id'] if _page is not None else None
                else:
                    post_data[field] = utils.remove_encrypted(value)

            _page = self._cache.get_by_natural_key(asset['natural_key'])
            try:
                if _page is None:
                    if asset['natural_key']['type'] == 'user':
                        # We should only impose a default password if the resource doesn't exist.
                        post_data.setdefault('password', 'abc123')
                    try:
                        _page = endpoint.post(post_data)
                    except exc.NoContent:
                        # desired exception under some circumstances, e.g. labels that already exist
                        if _page is None and 'name' in post_data:
                            results = endpoint.get(all_pages=True).results
                            for item in results:
                                if item['name'] == post_data['name']:
                                    _page = item.get()
                                    break
                            else:
                                raise
                    changed = True
                    if asset['natural_key']['type'] == 'project':
                        # When creating a project, we need to wait for its
                        # first project update to finish so that associated
                        # JTs have valid options for playbook names
                        try:
                            _page.wait_until_completed(timeout=300)
                        except AssertionError:
                            # If the project update times out, try to
                            # carry on in the hopes that it will
                            # finish before it is needed.
                            pass
                else:
                    # If we are an existing project and our scm_tpye is not changing don't try and import the local_path setting
                    if asset['natural_key']['type'] == 'project' and 'local_path' in post_data and _page['scm_type'] == post_data['scm_type']:
                        del post_data['local_path']

                    if asset['natural_key']['type'] == 'user':
                        _page = _page.patch(**post_data)
                    else:
                        _page = _page.put(post_data)
                    changed = True
            except (exc.Common, AssertionError) as e:
                identifier = asset.get("name", None) or asset.get("username", None) or asset.get("hostname", None)
                log.error(f'{endpoint} "{identifier}": {e}.')
                self._has_error = True
                log.debug("post_data: %r", post_data)
                continue

            self._cache.set_page(_page)

            # Queue up everything related to be either created or assigned.
            for name, S in asset.get('related', {}).items():
                if not S:
                    continue
                if name == 'roles':
                    indexed_roles = defaultdict(list)
                    for role in S:
                        if role.get('content_object') is None:
                            continue
                        indexed_roles[role['content_object']['type']].append(role)
                    self._roles.append((_page, indexed_roles))
                else:
                    self._related.append((_page, name, S))

        return changed

    def _assign_role(self, endpoint, role):
        if 'content_object' not in role:
            return
        obj_page = self._cache.get_by_natural_key(role['content_object'])
        if obj_page is None:
            return
        role_page = obj_page.get_object_role(role['name'], by_name=True)
        try:
            endpoint.post({'id': role_page['id']})
        except exc.NoContent:  # desired exception on successful (dis)association
            pass
        except exc.Common as e:
            log.error("Role assignment failed: %s.", e)
            self._has_error = True
            log.debug("post_data: %r", {'id': role_page['id']})

    def _assign_membership(self):
        for _page, indexed_roles in self._roles:
            role_endpoint = _page.json['related']['roles']
            for content_type in ('organization', 'team'):
                for role in indexed_roles.get(content_type, []):
                    self._assign_role(role_endpoint, role)

    def _assign_roles(self):
        for _page, indexed_roles in self._roles:
            role_endpoint = _page.json['related']['roles']
            for content_type in set(indexed_roles) - {'organization', 'team'}:
                for role in indexed_roles.get(content_type, []):
                    self._assign_role(role_endpoint, role)

    def _assign_related(self):
        for _page, name, related_set in self._related:
            endpoint = _page.related[name]
            if isinstance(related_set, dict):  # Related that are just json blobs, e.g. survey_spec
                endpoint.post(related_set)
                continue

            if 'natural_key' not in related_set[0]:  # It is an attach set
                # Try to impedance match
                related = endpoint.get(all_pages=True)
                existing = {rel['id'] for rel in related.results}
                for item in related_set:
                    rel_page = self._cache.get_by_natural_key(item)
                    if rel_page is None:
                        log.error("Could not find matching object in Tower for imported relation, item: %r", item)
                        self._has_error = True
                        continue
                    if rel_page['id'] in existing:
                        continue
                    try:
                        post_data = {'id': rel_page['id']}
                        endpoint.post(post_data)
                        log.error("endpoint: %s, id: %s", endpoint.endpoint, rel_page['id'])
                        self._has_error = True
                    except exc.NoContent:  # desired exception on successful (dis)association
                        pass
                    except exc.Common as e:
                        log.error("Object association failed: %s.", e)
                        self._has_error = True
                        log.debug("post_data: %r", post_data)
            else:  # It is a create set
                self._cache.get_page(endpoint)
                self._import_list(endpoint, related_set)

            # FIXME: deal with pruning existing relations that do not match the import set

    def import_assets(self, data):
        self._cache = page.PageCache(self.connection)
        self._related = []
        self._roles = []

        changed = False

        for resource in self._dependent_resources():
            endpoint = getattr(self, resource)

            imported = self._import_list(endpoint, data.get(resource) or [])
            changed = changed or imported

        self._assign_related()
        self._assign_membership()
        self._assign_roles()

        return changed


page.register_page(resources.v2, ApiV2)
07070100000017000081A400000000000000000000000166846B9200000CCB000000000000000000000000000000000000002C00000000awx-24.6.1/awxkit/api/pages/applications.pyfrom awxkit.utils import random_title, update_payload, filter_by_class, PseudoNamespace
from awxkit.api.resources import resources
from awxkit.api.pages import Organization
from awxkit.api.mixins import HasCreate, DSAdapter

from . import page
from . import base


class OAuth2Application(HasCreate, base.Base):
    dependencies = [Organization]
    NATURAL_KEY = ('organization', 'name')

    def payload(self, **kwargs):
        payload = PseudoNamespace(
            name=kwargs.get('name') or 'OAuth2Application - {}'.format(random_title()),
            description=kwargs.get('description') or random_title(10),
            client_type=kwargs.get('client_type', 'public'),
            authorization_grant_type=kwargs.get('authorization_grant_type', 'password'),
        )
        if kwargs.get('organization'):
            payload.organization = kwargs['organization'].id

        optional_fields = ('redirect_uris', 'skip_authorization')
        update_payload(payload, optional_fields, kwargs)
        return payload

    def create_payload(self, organization=Organization, **kwargs):
        self.create_and_update_dependencies(*filter_by_class((organization, Organization)))
        organization = self.ds.organization if organization else None
        payload = self.payload(organization=organization, **kwargs)
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, organization=Organization, **kwargs):
        payload = self.create_payload(organization=organization, **kwargs)
        return self.update_identity(OAuth2Applications(self.connection).post(payload))


page.register_page((resources.application, (resources.applications, 'post')), OAuth2Application)


class OAuth2Applications(page.PageList, OAuth2Application):
    pass


page.register_page(resources.applications, OAuth2Applications)


class OAuth2AccessToken(HasCreate, base.Base):
    optional_dependencies = [OAuth2Application]

    def payload(self, **kwargs):
        payload = PseudoNamespace(description=kwargs.get('description') or random_title(10), scope=kwargs.get('scope', 'write'))

        if kwargs.get('oauth_2_application'):
            payload.application = kwargs['oauth_2_application'].id

        optional_fields = ('expires',)
        update_payload(payload, optional_fields, kwargs)
        return payload

    def create_payload(self, oauth_2_application=None, **kwargs):
        self.create_and_update_dependencies(*filter_by_class((oauth_2_application, OAuth2Application)))
        oauth_2_application = self.ds.oauth_2_application if oauth_2_application else None
        payload = self.payload(oauth_2_application=oauth_2_application, **kwargs)
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, oauth_2_application=None, **kwargs):
        payload = self.create_payload(oauth_2_application=oauth_2_application, **kwargs)
        return self.update_identity(OAuth2AccessTokens(self.connection).post(payload))


page.register_page((resources.token, (resources.tokens, 'post')), OAuth2AccessToken)


class OAuth2AccessTokens(page.PageList, OAuth2AccessToken):
    pass


page.register_page(resources.tokens, OAuth2AccessTokens)
07070100000018000081A400000000000000000000000166846B92000000AD000000000000000000000000000000000000002900000000awx-24.6.1/awxkit/api/pages/authtoken.pyfrom awxkit.api.resources import resources
from . import base
from . import page


class AuthToken(base.Base):
    pass


page.register_page(resources.authtoken, AuthToken)
07070100000019000081A400000000000000000000000166846B92000023F8000000000000000000000000000000000000002400000000awx-24.6.1/awxkit/api/pages/base.pyimport collections
import logging

from requests.auth import HTTPBasicAuth

from awxkit.api.pages import Page, get_registered_page, exception_from_status_code
from awxkit.config import config
from awxkit.api.resources import resources
import awxkit.exceptions as exc


log = logging.getLogger(__name__)


class Base(Page):
    def silent_delete(self):
        """Delete the object. If it's already deleted, ignore the error"""
        try:
            if not config.prevent_teardown:
                return self.delete()
        except (exc.NoContent, exc.NotFound, exc.Forbidden):
            pass
        except (exc.BadRequest, exc.Conflict) as e:
            if 'Job has not finished processing events' in e.msg:
                pass
            if 'Resource is being used' in e.msg:
                pass
            else:
                raise e

    def get_object_role(self, role, by_name=False):
        """Lookup and return a related object role by its role field or name.

        Args:
        ----
            role (str): The role's `role_field` or name
            by_name (bool): Whether to retrieve the role by its name field (default: False)

        Examples:
        --------
            >>> # get the description of the Use role for an inventory
            >>> inventory = v2.inventory.create()
            >>> use_role_1 = inventory.get_object_role('use_role')
            >>> use_role_2 = inventory.get_object_role('use', True)
            >>> use_role_1.description
            u'Can use the inventory in a job template'
            >>> use_role_1.json == use_role_2.json
            True

        """
        if by_name:
            for obj_role in self.related.object_roles.get().results:
                if obj_role.name.lower() == role.lower():
                    return obj_role

            raise Exception("Role '{0}' not found for {1.endpoint}".format(role, self))

        object_roles = self.get_related('object_roles', role_field=role)
        if not object_roles.count == 1:
            raise Exception("No role with role_field '{0}' found.".format(role))
        return object_roles.results[0]

    def set_object_roles(self, agent, *role_names, **kw):
        """Associate related object roles to a User or Team by role names

        Args:
        ----
            agent (User or Team): The agent the role is to be (dis)associated with.
            *role_names (str): an arbitrary number of role names ('Admin', 'Execute', 'Read', etc.)
            **kw:
                endpoint (str): The endpoint to use when making the object role association
                - 'related_users': use the related users endpoint of the role (default)
                - 'related_roles': use the related roles endpoint of the user
                disassociate (bool): Indicates whether to disassociate the role with the user (default: False)

        Examples:
        --------
            # create a user that is an organization admin with use and
            # update roles on an inventory
            >>> organization = v2.organization.create()
            >>> inventory = v2.inventory.create()
            >>> user = v2.user.create()
            >>> organization.set_object_roles(user, 'admin')
            >>> inventory.set_object_roles(user, 'use', 'update')

        """
        from awxkit.api.pages import User, Team

        endpoint = kw.get('endpoint', 'related_users')
        disassociate = kw.get('disassociate', False)

        if not any([isinstance(agent, agent_type) for agent_type in (User, Team)]):
            raise ValueError('Invalid agent type {0.__class__.__name__}'.format(agent))

        if endpoint not in ('related_users', 'related_roles'):
            raise ValueError('Invalid role association endpoint: {0}'.format(endpoint))

        object_roles = [self.get_object_role(name, by_name=True) for name in role_names]
        payload = {}
        for role in object_roles:
            if endpoint == 'related_users':
                payload['id'] = agent.id
                if isinstance(agent, User):
                    endpoint_model = role.related.users
                elif isinstance(agent, Team):
                    endpoint_model = role.related.teams
                else:
                    raise RuntimeError("Unhandled type for agent: {0.__class__.__name__}.".format(agent))
            elif endpoint == 'related_roles':
                payload['id'] = role.id
                endpoint_model = agent.related.roles
            else:
                raise RuntimeError('Invalid role association endpoint')

            if disassociate:
                payload['disassociate'] = True

            try:
                endpoint_model.post(payload)
            except exc.NoContent:  # desired exception on successful (dis)association
                pass
        return True

    @property
    def object_roles(self):
        from awxkit.api.pages import Roles, Role

        url = self.get().json.related.object_roles
        for obj_role in Roles(self.connection, endpoint=url).get().json.results:
            yield Role(self.connection, endpoint=obj_role.url).get()

    def get_authtoken(self, username='', password=''):
        default_cred = config.credentials.default
        payload = dict(username=username or default_cred.username, password=password or default_cred.password)
        auth_url = resources.authtoken
        return get_registered_page(auth_url)(self.connection, endpoint=auth_url).post(payload).token

    def load_authtoken(self, username='', password=''):
        self.connection.login(token=self.get_authtoken(username, password))
        return self

    load_default_authtoken = load_authtoken

    def get_oauth2_token(self, username='', password='', client_id=None, description='AWX CLI', client_secret=None, scope='write'):
        default_cred = config.credentials.default
        username = username or default_cred.username
        password = password or default_cred.password
        req = collections.namedtuple('req', 'headers')({})
        if client_id and client_secret:
            HTTPBasicAuth(client_id, client_secret)(req)
            req.headers['Content-Type'] = 'application/x-www-form-urlencoded'
            resp = self.connection.post(
                f"{config.api_base_path}o/token/",
                data={"grant_type": "password", "username": username, "password": password, "scope": scope},
                headers=req.headers,
            )
        elif client_id:
            req.headers['Content-Type'] = 'application/x-www-form-urlencoded'
            resp = self.connection.post(
                f"{config.api_base_path}o/token/",
                data={"grant_type": "password", "username": username, "password": password, "client_id": client_id, "scope": scope},
                headers=req.headers,
            )
        else:
            HTTPBasicAuth(username, password)(req)
            resp = self.connection.post(
                '{0}v2/users/{1}/personal_tokens/'.format(config.api_base_path, username),
                json={"description": description, "application": None, "scope": scope},
                headers=req.headers,
            )
        if resp.ok:
            result = resp.json()
            if client_id:
                return result.pop('access_token', None)
            else:
                return result.pop('token', None)
        else:
            raise exception_from_status_code(resp.status_code)

    def load_session(self, username='', password=''):
        default_cred = config.credentials.default
        self.connection.login(
            username=username or default_cred.username, password=password or default_cred.password, **self.connection.get_session_requirements()
        )
        return self

    def cleanup(self):
        log.debug('{0.endpoint} cleaning up.'.format(self))
        return self._cleanup(self.delete)

    def silent_cleanup(self):
        log.debug('{0.endpoint} silently cleaning up.'.format(self))
        return self._cleanup(self.silent_delete)

    def _cleanup(self, delete_method):
        try:
            delete_method()
        except exc.Forbidden as e:
            if e.msg == {'detail': 'Cannot delete running job resource.'}:
                self.cancel()
                self.wait_until_completed(interval=1, timeout=30, since_job_created=False)
                delete_method()
            else:
                raise
        except exc.Conflict as e:
            conflict = e.msg.get('conflict', e.msg.get('error', ''))
            if "running jobs" in conflict:
                active_jobs = e.msg.get('active_jobs', [])  # [{type: id},], not page containing
                jobs = []
                for active_job in active_jobs:
                    job_type = active_job['type']
                    endpoint = '{}v2/{}s/{}/'.format(config.api_base_path, job_type, active_job['id'])
                    job = self.walk(endpoint)
                    jobs.append(job)
                    job.cancel()
                for job in jobs:
                    job.wait_until_completed(interval=1, timeout=30, since_job_created=False)
                delete_method()
            else:
                raise
0707010000001A000081A400000000000000000000000166846B92000002D3000000000000000000000000000000000000002400000000awx-24.6.1/awxkit/api/pages/bulk.pyfrom awxkit.api.resources import resources
from . import base
from . import page


class Bulk(base.Base):
    def get(self, **query_parameters):
        request = self.connection.get(self.endpoint, query_parameters, headers={'Accept': 'application/json'})
        return self.page_identity(request)


page.register_page([resources.bulk, (resources.bulk, 'get')], Bulk)


class BulkJobLaunch(base.Base):
    def post(self, payload={}):
        result = self.connection.post(self.endpoint, payload)
        if 'url' in result.json():
            return self.walk(result.json()['url'])
        else:
            return self.page_identity(result, request_json={})


page.register_page(resources.bulk_job_launch, BulkJobLaunch)
0707010000001B000081A400000000000000000000000166846B92000004B7000000000000000000000000000000000000002600000000awx-24.6.1/awxkit/api/pages/config.pyfrom awxkit.api.resources import resources
from . import base
from . import page


class Config(base.Base):
    @property
    def is_aws_license(self):
        return self.license_info.get('is_aws', False) or 'ami-id' in self.license_info or 'instance-id' in self.license_info

    @property
    def is_valid_license(self):
        return self.license_info.get('valid_key', False) and 'instance_count' in self.license_info

    @property
    def is_trial_license(self):
        return self.is_valid_license and self.license_info.get('trial', False)

    @property
    def is_awx_license(self):
        return self.license_info.get('license_type', None) == 'open'

    @property
    def is_enterprise_license(self):
        return self.is_valid_license and self.license_info.get('license_type', None) == 'enterprise'

    @property
    def features(self):
        """returns a list of enabled license features"""
        return [k for k, v in self.license_info.get('features', {}).items() if v]


class ConfigAttach(page.Page):
    def attach(self, **kwargs):
        return self.post(json=kwargs).json


page.register_page(resources.config, Config)
page.register_page(resources.config_attach, ConfigAttach)
0707010000001C000081A400000000000000000000000166846B9200000196000000000000000000000000000000000000003800000000awx-24.6.1/awxkit/api/pages/credential_input_sources.pyfrom awxkit.api.resources import resources
from . import base
from . import page


class CredentialInputSource(base.Base):
    pass


page.register_page(resources.credential_input_source, CredentialInputSource)


class CredentialInputSources(page.PageList, CredentialInputSource):
    pass


page.register_page([resources.credential_input_sources, resources.related_input_sources], CredentialInputSources)
0707010000001D000081A400000000000000000000000166846B9200002D05000000000000000000000000000000000000002B00000000awx-24.6.1/awxkit/api/pages/credentials.pyimport logging

import http.client as http

import awxkit.exceptions as exc
from awxkit.api.mixins import DSAdapter, HasCopy, HasCreate
from awxkit.api.pages import Organization, Team, User
from awxkit.api.resources import resources
from awxkit.config import config
from awxkit.utils import (
    PseudoNamespace,
    cloud_types,
    filter_by_class,
    not_provided,
    random_title,
    update_payload,
)

from . import base, page
from .page import exception_from_status_code

from urllib.parse import urljoin


log = logging.getLogger(__name__)


credential_input_fields = (
    'authorize_password',
    'become_method',
    'become_password',
    'become_username',
    'client',
    'cloud_environment',
    'domain',
    'host',
    'password',
    'project_id',
    'project_name',
    'secret',
    'ssh_key_data',
    'ssh_key_unlock',
    'subscription',
    'tenant',
    'username',
    'vault_password',
    'vault_id',
    'gpg_public_key',
)


def generate_private_key():
    from cryptography.hazmat.backends import default_backend
    from cryptography.hazmat.primitives import serialization
    from cryptography.hazmat.primitives.asymmetric import rsa

    key = rsa.generate_private_key(public_exponent=65537, key_size=4096, backend=default_backend())
    return key.private_bytes(
        encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption()
    ).decode('utf-8')


def config_cred_from_kind(kind):
    try:
        if kind == 'net':
            config_cred = config.credentials.network
        elif kind in cloud_types:
            if kind == 'azure_rm':
                config_cred = config.credentials.cloud.azure
            else:
                config_cred = config.credentials.cloud[kind]
        else:
            config_cred = config.credentials[kind]
        return config_cred
    except (KeyError, AttributeError):
        return PseudoNamespace()


credential_type_name_to_config_kind_map = {
    'amazon web services': 'aws',
    'container registry': 'registry',
    'ansible galaxy/automation hub api token': 'galaxy',
    'red hat ansible automation platform': 'controller',
    'google compute engine': 'gce',
    'insights': 'insights',
    'openshift or kubernetes api bearer token': 'kubernetes',
    'microsoft azure classic (deprecated)': 'azure_classic',
    'microsoft azure resource manager': 'azure_rm',
    'network': 'net',
    'openstack': 'OpenStack',
    'red hat virtualization': 'rhv',
    'red hat cloudforms': 'cloudforms',
    'red hat satellite 6': 'satellite6',
    'source control': 'scm',
    'machine': 'ssh',
    'vault': 'vault',
    'vmware vcenter': 'vmware',
    'gpg public key': 'gpg_public_key',
    'terraform backend configuration': 'terraform',
}

config_kind_to_credential_type_name_map = {kind: name for name, kind in credential_type_name_to_config_kind_map.items()}


def kind_and_config_cred_from_credential_type(credential_type):
    kind = ''

    if not credential_type.managed:
        return kind, PseudoNamespace()
    try:
        if credential_type.kind == 'net':
            config_cred = config.credentials.network
            kind = 'net'
        elif credential_type.kind == 'cloud':
            kind = credential_type_name_to_config_kind_map[credential_type.name.lower()]
            config_kind = kind if kind != 'azure_rm' else 'azure'
            config_cred = config.credentials.cloud[config_kind]
        else:
            kind = credential_type.kind.lower()
            config_cred = config.credentials[kind]
        return kind, config_cred
    except (KeyError, AttributeError):
        return kind, PseudoNamespace()


def get_payload_field_and_value_from_kwargs_or_config_cred(field, kind, kwargs, config_cred):
    if field in ('project_id', 'project_name'):  # Needed to prevent Project kwarg collision
        config_field = 'project'
    elif field == 'subscription' and 'azure' in kind:
        config_field = 'subscription_id'
    elif field == 'username' and kind == 'azure_ad':
        config_field = 'ad_user'
    elif field == 'client':
        config_field = 'client_id'
    elif field == 'authorize_password':
        config_field = 'authorize'
    else:
        config_field = field
    value = kwargs.get(field, config_cred.get(config_field, not_provided))
    if field in ('project_id', 'project_name'):
        field = 'project'
    return field, value


class CredentialType(HasCreate, base.Base):
    NATURAL_KEY = ('name', 'kind')

    def silent_delete(self):
        if not self.managed:
            return super(CredentialType, self).silent_delete()

    def payload(self, kind='cloud', **kwargs):
        payload = PseudoNamespace(
            name=kwargs.get('name') or 'CredentialType - {}'.format(random_title()), description=kwargs.get('description') or random_title(10), kind=kind
        )
        fields = ('inputs', 'injectors')
        update_payload(payload, fields, kwargs)
        return payload

    def create_payload(self, kind='cloud', **kwargs):
        payload = self.payload(kind=kind, **kwargs)
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, kind='cloud', **kwargs):
        payload = self.create_payload(kind=kind, **kwargs)
        return self.update_identity(CredentialTypes(self.connection).post(payload))

    def test(self, data):
        """Test the credential type endpoint."""
        response = self.connection.post(urljoin(str(self.url), 'test/'), data)
        exception = exception_from_status_code(response.status_code)
        exc_str = "%s (%s) received" % (http.responses[response.status_code], response.status_code)
        if exception:
            raise exception(exc_str, response.json())
        elif response.status_code == http.FORBIDDEN:
            raise exc.Forbidden(exc_str, response.json())
        return response


page.register_page([resources.credential_type, (resources.credential_types, 'post')], CredentialType)


class CredentialTypes(page.PageList, CredentialType):
    pass


page.register_page(resources.credential_types, CredentialTypes)


class Credential(HasCopy, HasCreate, base.Base):
    dependencies = [CredentialType]
    optional_dependencies = [Organization, User, Team]
    NATURAL_KEY = ('organization', 'name', 'credential_type')

    def payload(self, credential_type, user=None, team=None, organization=None, inputs=None, **kwargs):
        if not any((user, team, organization)):
            raise TypeError('{0.__class__.__name__} requires user, team, and/or organization instances.'.format(self))

        if inputs is None:
            inputs = {}

        payload = PseudoNamespace(
            name=kwargs.get('name') or 'Credential - {}'.format(random_title()),
            description=kwargs.get('description') or random_title(10),
            credential_type=credential_type.id,
            inputs=inputs,
        )
        if user:
            payload.user = user.id
        if team:
            payload.team = team.id
        if organization:
            payload.organization = organization.id

        kind, config_cred = kind_and_config_cred_from_credential_type(credential_type)

        for field in credential_input_fields:
            field, value = get_payload_field_and_value_from_kwargs_or_config_cred(field, kind, inputs or kwargs, config_cred)
            if value != not_provided:
                payload.inputs[field] = value

        if kind == 'net':
            payload.inputs.authorize = inputs.get('authorize', bool(inputs.get('authorize_password')))

        if kind in ('ssh', 'net') and 'ssh_key_data' not in payload.inputs:
            payload.inputs.ssh_key_data = inputs.get('ssh_key_data', generate_private_key())

        return payload

    def create_payload(self, credential_type=CredentialType, user=None, team=None, organization=Organization, inputs=None, **kwargs):
        if isinstance(credential_type, int):
            # if an int was passed, it is assumed to be the pk id of a
            # credential type
            credential_type = CredentialTypes(self.connection).get(id=credential_type).results.pop()

        if credential_type == CredentialType:
            kind = kwargs.pop('kind', 'ssh')
            if kind in ('openstack', 'openstack_v3'):
                credential_type_name = 'OpenStack'
                if inputs is None:
                    if kind == 'openstack_v3':
                        inputs = config.credentials.cloud['openstack_v3']
                    else:
                        inputs = config.credentials.cloud['openstack']
            else:
                credential_type_name = config_kind_to_credential_type_name_map[kind]
            credential_type = CredentialTypes(self.connection).get(managed=True, name__icontains=credential_type_name).results.pop()

        credential_type, organization, user, team = filter_by_class((credential_type, CredentialType), (organization, Organization), (user, User), (team, Team))
        if not any((user, team, organization)):
            organization = Organization
        self.create_and_update_dependencies(credential_type, organization, user, team)
        user = self.ds.user if user else None
        team = self.ds.team if team else None
        organization = self.ds.organization if organization else None

        payload = self.payload(self.ds.credential_type, user=user, team=team, organization=organization, inputs=inputs, **kwargs)
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, credential_type=CredentialType, user=None, team=None, organization=None, inputs=None, **kwargs):
        payload = self.create_payload(credential_type=credential_type, user=user, team=team, organization=organization, inputs=inputs, **kwargs)
        return self.update_identity(Credentials(self.connection)).post(payload)

    def test(self, data):
        """Test the credential endpoint."""
        response = self.connection.post(urljoin(str(self.url), 'test/'), data)
        exception = exception_from_status_code(response.status_code)
        exc_str = "%s (%s) received" % (http.responses[response.status_code], response.status_code)
        if exception:
            raise exception(exc_str, response.json())
        elif response.status_code == http.FORBIDDEN:
            raise exc.Forbidden(exc_str, response.json())
        return response

    @property
    def expected_passwords_needed_to_start(self):
        """Return a list of expected passwords needed to start a job using this credential."""
        passwords = []
        for field in ('password', 'become_password', 'ssh_key_unlock', 'vault_password'):
            if getattr(self.inputs, field, None) == 'ASK':
                if field == 'password':
                    passwords.append('ssh_password')
                else:
                    passwords.append(field)
        return passwords


page.register_page(
    [resources.credential, (resources.credentials, 'post'), (resources.credential_copy, 'post'), (resources.organization_galaxy_credentials, 'post')],
    Credential,
)


class Credentials(page.PageList, Credential):
    pass


page.register_page([resources.credentials, resources.related_credentials, resources.organization_galaxy_credentials], Credentials)


class CredentialCopy(base.Base):
    pass


page.register_page(resources.credential_copy, CredentialCopy)
0707010000001E000081A400000000000000000000000166846B92000000AD000000000000000000000000000000000000002900000000awx-24.6.1/awxkit/api/pages/dashboard.pyfrom awxkit.api.resources import resources
from . import base
from . import page


class Dashboard(base.Base):
    pass


page.register_page(resources.dashboard, Dashboard)
0707010000001F000081A400000000000000000000000166846B9200000944000000000000000000000000000000000000003600000000awx-24.6.1/awxkit/api/pages/execution_environments.pyimport logging

from awxkit.api.mixins import DSAdapter, HasCreate, HasCopy
from awxkit.api.pages import (
    Credential,
    Organization,
)
from awxkit.api.resources import resources
from awxkit.utils import random_title, PseudoNamespace, filter_by_class

from . import base
from . import page


log = logging.getLogger(__name__)


class ExecutionEnvironment(HasCreate, HasCopy, base.Base):
    dependencies = [Organization, Credential]
    NATURAL_KEY = ('name',)

    # fields are name, image, organization, managed, credential
    def create(self, name='', image='quay.io/ansible/awx-ee:latest', organization=Organization, credential=None, pull='', **kwargs):
        # we do not want to make a credential by default
        payload = self.create_payload(name=name, image=image, organization=organization, credential=credential, pull=pull, **kwargs)
        ret = self.update_identity(ExecutionEnvironments(self.connection).post(payload))
        return ret

    def create_payload(self, name='', organization=Organization, credential=None, **kwargs):
        self.create_and_update_dependencies(*filter_by_class((credential, Credential), (organization, Organization)))

        credential = self.ds.credential if credential else None
        organization = self.ds.organization if organization else None

        payload = self.payload(name=name, organization=organization, credential=credential, **kwargs)
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def payload(self, name='', image=None, organization=None, credential=None, pull='', **kwargs):
        payload = PseudoNamespace(
            name=name or "EE - {}".format(random_title()),
            image=image or "example.invalid/component:tagname",
            organization=organization.id if organization else None,
            credential=credential.id if credential else None,
            pull=pull,
            **kwargs
        )

        return payload


page.register_page(
    [resources.execution_environment, (resources.execution_environments, 'post'), (resources.organization_execution_environments, 'post')], ExecutionEnvironment
)


class ExecutionEnvironments(page.PageList, ExecutionEnvironment):
    pass


page.register_page([resources.execution_environments, resources.organization_execution_environments], ExecutionEnvironments)
07070100000020000081A400000000000000000000000166846B92000001DF000000000000000000000000000000000000002C00000000awx-24.6.1/awxkit/api/pages/host_metrics.pyfrom awxkit.api.resources import resources
from . import base
from . import page


class HostMetric(base.Base):
    def get(self, **query_parameters):
        request = self.connection.get(self.endpoint, query_parameters, headers={'Accept': 'application/json'})
        return self.page_identity(request)


class HostMetrics(page.PageList, HostMetric):
    pass


page.register_page([resources.host_metric], HostMetric)

page.register_page([resources.host_metrics], HostMetrics)
07070100000021000081A400000000000000000000000166846B920000066F000000000000000000000000000000000000002F00000000awx-24.6.1/awxkit/api/pages/instance_groups.pyfrom contextlib import suppress

from awxkit.utils import PseudoNamespace, random_title, update_payload, set_payload_foreign_key_args
from awxkit.api.resources import resources
from awxkit.api.mixins import HasCreate
import awxkit.exceptions as exc
from . import base
from . import page


class InstanceGroup(HasCreate, base.Base):
    def add_instance(self, instance):
        with suppress(exc.NoContent):
            self.related.instances.post(dict(id=instance.id))

    def remove_instance(self, instance):
        with suppress(exc.NoContent):
            self.related.instances.post(dict(id=instance.id, disassociate=True))

    def payload(self, **kwargs):
        payload = PseudoNamespace(name=kwargs.get('name') or 'Instance Group - {}'.format(random_title()))
        fields = ('policy_instance_percentage', 'policy_instance_minimum', 'policy_instance_list', 'is_container_group', 'max_forks', 'max_concurrent_jobs')
        update_payload(payload, fields, kwargs)

        set_payload_foreign_key_args(payload, ('credential',), kwargs)

        return payload

    def create_payload(self, name='', **kwargs):
        payload = self.payload(name=name, **kwargs)
        return payload

    def create(self, name='', **kwargs):
        payload = self.create_payload(name=name, **kwargs)
        return self.update_identity(InstanceGroups(self.connection).post(payload))


page.register_page([resources.instance_group, (resources.instance_groups, 'post')], InstanceGroup)


class InstanceGroups(page.PageList, InstanceGroup):
    pass


page.register_page([resources.instance_groups, resources.related_instance_groups], InstanceGroups)
07070100000022000081A400000000000000000000000166846B92000002AC000000000000000000000000000000000000002900000000awx-24.6.1/awxkit/api/pages/instances.pyfrom awxkit.api.resources import resources
from . import base
from . import page


class Instance(base.Base):
    pass


page.register_page(resources.instance, Instance)


class Instances(page.PageList, Instance):
    pass


page.register_page([resources.instances, resources.related_instances, resources.instance_peers], Instances)


class InstanceInstallBundle(page.Page):
    def extract_data(self, response):
        # The actual content of this response will be in the full set
        # of bytes from response.content, which will be exposed via
        # the Page.bytes interface.
        return {}


page.register_page(resources.instance_install_bundle, InstanceInstallBundle)
07070100000023000081A400000000000000000000000166846B9200003E12000000000000000000000000000000000000002900000000awx-24.6.1/awxkit/api/pages/inventory.pyfrom contextlib import suppress
import logging
import json

from awxkit.api.pages import Credential, Organization, Project, UnifiedJob, UnifiedJobTemplate
from awxkit.utils import filter_by_class, random_title, update_payload, not_provided, PseudoNamespace, poll_until
from awxkit.api.mixins import DSAdapter, HasCreate, HasInstanceGroups, HasNotifications, HasVariables, HasCopy
from awxkit.config import config
from awxkit.api.resources import resources
import awxkit.exceptions as exc
from . import base
from . import page


log = logging.getLogger(__name__)


class Inventory(HasCopy, HasCreate, HasInstanceGroups, HasVariables, base.Base):
    dependencies = [Organization]
    NATURAL_KEY = ('organization', 'name')

    def print_ini(self):
        """Print an ini version of the inventory"""
        output = list()
        inv_dict = self.related.script.get(hostvars=1).json

        for group in inv_dict.keys():
            if group == '_meta':
                continue

            # output host groups
            output.append('[%s]' % group)
            for host in inv_dict[group].get('hosts', []):
                # FIXME ... include hostvars
                output.append(host)
            output.append('')  # newline

            # output child groups
            if inv_dict[group].get('children', []):
                output.append('[%s:children]' % group)
                for child in inv_dict[group].get('children', []):
                    output.append(child)
                output.append('')  # newline

            # output group vars
            if inv_dict[group].get('vars', {}).items():
                output.append('[%s:vars]' % group)
                for k, v in inv_dict[group].get('vars', {}).items():
                    output.append('%s=%s' % (k, v))
                output.append('')  # newline

        print('\n'.join(output))

    def payload(self, organization, **kwargs):
        payload = PseudoNamespace(
            name=kwargs.get('name') or 'Inventory - {}'.format(random_title()),
            description=kwargs.get('description') or random_title(10),
            organization=organization.id,
        )

        optional_fields = ('host_filter', 'kind', 'variables', 'prevent_instance_group_fallback')

        update_payload(payload, optional_fields, kwargs)

        if 'variables' in payload and isinstance(payload.variables, dict):
            payload.variables = json.dumps(payload.variables)

        return payload

    def create_payload(self, name='', description='', organization=Organization, **kwargs):
        self.create_and_update_dependencies(organization)
        payload = self.payload(name=name, description=description, organization=self.ds.organization, **kwargs)
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, name='', description='', organization=Organization, **kwargs):
        payload = self.create_payload(name=name, description=description, organization=organization, **kwargs)
        return self.update_identity(Inventories(self.connection).post(payload))

    def add_host(self, host=None):
        if host is None:
            return self.related.hosts.create(inventory=self)

        if isinstance(host, base.Base):
            host = host.json
        with suppress(exc.NoContent):
            self.related.hosts.post(host)
        return host

    def wait_until_deleted(self):
        def _wait():
            try:
                self.get()
            except exc.NotFound:
                return True

        poll_until(_wait, interval=1, timeout=60)

    def silent_delete(self):
        try:
            if not config.prevent_teardown:
                r = self.delete()
                self.wait_until_deleted()
                return r
        except (exc.NoContent, exc.NotFound, exc.Forbidden):
            pass
        except (exc.BadRequest, exc.Conflict) as e:
            if 'Resource is being used' in e.msg:
                pass
            else:
                raise e

    def update_inventory_sources(self, wait=False):
        response = self.related.update_inventory_sources.post()
        source_ids = [entry['inventory_source'] for entry in response if entry['status'] == 'started']

        inv_updates = []
        for source_id in source_ids:
            inv_source = self.related.inventory_sources.get(id=source_id).results.pop()
            inv_updates.append(inv_source.related.current_job.get())

        if wait:
            for update in inv_updates:
                update.wait_until_completed()
        return inv_updates


page.register_page(
    [
        resources.inventory,
        resources.constructed_inventory,
        (resources.inventories, 'post'),
        (resources.inventory_copy, 'post'),
        (resources.constructed_inventories, 'post'),
    ],
    Inventory,
)


class Inventories(page.PageList, Inventory):
    pass


page.register_page([resources.inventories, resources.related_inventories, resources.constructed_inventories], Inventories)


class Group(HasCreate, HasVariables, base.Base):
    dependencies = [Inventory]
    optional_dependencies = [Credential]
    NATURAL_KEY = ('name', 'inventory')

    @property
    def is_root_group(self):
        """Returns whether the current group is a top-level root group in the inventory"""
        return self.related.inventory.get().related.root_groups.get(id=self.id).count == 1

    def get_parents(self):
        """Inspects the API and returns all groups that include the current group as a child."""
        return Groups(self.connection).get(children=self.id).results

    def payload(self, inventory, credential=None, **kwargs):
        payload = PseudoNamespace(
            name=kwargs.get('name') or 'Group{}'.format(random_title(non_ascii=False)),
            description=kwargs.get('description') or random_title(10),
            inventory=inventory.id,
        )

        if credential:
            payload.credential = credential.id

        update_payload(payload, ('variables',), kwargs)

        if 'variables' in payload and isinstance(payload.variables, dict):
            payload.variables = json.dumps(payload.variables)

        return payload

    def create_payload(self, name='', description='', inventory=Inventory, credential=None, **kwargs):
        self.create_and_update_dependencies(inventory, credential)
        credential = self.ds.credential if credential else None
        payload = self.payload(inventory=self.ds.inventory, credential=credential, name=name, description=description, **kwargs)
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, name='', description='', inventory=Inventory, **kwargs):
        payload = self.create_payload(name=name, description=description, inventory=inventory, **kwargs)

        parent = kwargs.get('parent', None)  # parent must be a Group instance
        resource = parent.related.children if parent else Groups(self.connection)
        return self.update_identity(resource.post(payload))

    def add_host(self, host=None):
        if host is None:
            host = self.related.hosts.create(inventory=self.ds.inventory)
            with suppress(exc.NoContent):
                host.related.groups.post(dict(id=self.id))
            return host

        if isinstance(host, base.Base):
            host = host.json
        with suppress(exc.NoContent):
            self.related.hosts.post(host)
        return host

    def add_group(self, group):
        if isinstance(group, page.Page):
            group = group.json
        with suppress(exc.NoContent):
            self.related.children.post(group)

    def remove_group(self, group):
        if isinstance(group, page.Page):
            group = group.json
        with suppress(exc.NoContent):
            self.related.children.post(dict(id=group.id, disassociate=True))


page.register_page([resources.group, (resources.groups, 'post')], Group)


class Groups(page.PageList, Group):
    pass


page.register_page(
    [
        resources.groups,
        resources.host_groups,
        resources.inventory_related_groups,
        resources.inventory_related_root_groups,
        resources.group_children,
        resources.group_potential_children,
    ],
    Groups,
)


class Host(HasCreate, HasVariables, base.Base):
    dependencies = [Inventory]
    NATURAL_KEY = ('name', 'inventory')

    def payload(self, inventory, **kwargs):
        payload = PseudoNamespace(
            name=kwargs.get('name') or 'Host{}'.format(random_title(non_ascii=False)),
            description=kwargs.get('description') or random_title(10),
            inventory=inventory.id,
        )

        optional_fields = ('enabled', 'instance_id')

        update_payload(payload, optional_fields, kwargs)

        variables = kwargs.get('variables', not_provided)

        if variables is None:
            variables = dict(ansible_host='localhost', ansible_connection='local', ansible_python_interpreter='{{ ansible_playbook_python }}')

        if variables != not_provided:
            if isinstance(variables, dict):
                variables = json.dumps(variables)
            payload.variables = variables

        return payload

    def create_payload(self, name='', description='', variables=None, inventory=Inventory, **kwargs):
        self.create_and_update_dependencies(*filter_by_class((inventory, Inventory)))
        payload = self.payload(inventory=self.ds.inventory, name=name, description=description, variables=variables, **kwargs)
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, name='', description='', variables=None, inventory=Inventory, **kwargs):
        payload = self.create_payload(name=name, description=description, variables=variables, inventory=inventory, **kwargs)
        return self.update_identity(Hosts(self.connection).post(payload))


page.register_page([resources.host, (resources.hosts, 'post')], Host)


class Hosts(page.PageList, Host):
    pass


page.register_page([resources.hosts, resources.group_related_hosts, resources.inventory_related_hosts, resources.inventory_sources_related_hosts], Hosts)


class FactVersion(base.Base):
    pass


page.register_page(resources.host_related_fact_version, FactVersion)


class FactVersions(page.PageList, FactVersion):
    @property
    def count(self):
        return len(self.results)


page.register_page(resources.host_related_fact_versions, FactVersions)


class FactView(base.Base):
    pass


page.register_page(resources.fact_view, FactView)


class InventorySource(HasCreate, HasNotifications, UnifiedJobTemplate):
    optional_schedule_fields = tuple()
    dependencies = [Inventory]
    optional_dependencies = [Credential, Project]
    NATURAL_KEY = ('organization', 'name', 'inventory')

    def payload(self, inventory, source='scm', credential=None, project=None, **kwargs):
        payload = PseudoNamespace(
            name=kwargs.get('name') or 'InventorySource - {}'.format(random_title()),
            description=kwargs.get('description') or random_title(10),
            inventory=inventory.id,
            source=source,
        )

        if credential:
            payload.credential = credential.id
        if project:
            payload.source_project = project.id

        optional_fields = (
            'source_path',
            'source_vars',
            'scm_branch',
            'timeout',
            'overwrite',
            'overwrite_vars',
            'update_cache_timeout',
            'update_on_launch',
            'verbosity',
        )

        update_payload(payload, optional_fields, kwargs)

        return payload

    def create_payload(self, name='', description='', source='scm', inventory=Inventory, credential=None, project=None, **kwargs):
        if source == 'scm':
            kwargs.setdefault('overwrite_vars', True)
            kwargs.setdefault('source_path', 'inventories/script_migrations/script_source.py')
            if project is None:
                project = Project

        inventory, credential, project = filter_by_class((inventory, Inventory), (credential, Credential), (project, Project))
        self.create_and_update_dependencies(inventory, credential, project)

        if credential:
            credential = self.ds.credential
        if project:
            project = self.ds.project

        payload = self.payload(inventory=self.ds.inventory, source=source, credential=credential, project=project, name=name, description=description, **kwargs)
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, name='', description='', source='scm', inventory=Inventory, credential=None, project=None, **kwargs):
        payload = self.create_payload(name=name, description=description, source=source, inventory=inventory, credential=credential, project=project, **kwargs)
        return self.update_identity(InventorySources(self.connection).post(payload))

    def update(self):
        """Update the inventory_source using related->update endpoint"""
        # get related->launch
        update_pg = self.get_related('update')

        # assert can_update == True
        assert update_pg.can_update, "The specified inventory_source (id:%s) is not able to update (can_update:%s)" % (self.id, update_pg.can_update)

        # start the inventory_update
        result = update_pg.post()

        # assert JSON response
        assert 'inventory_update' in result.json, "Unexpected JSON response when starting an inventory_update.\n%s" % json.dumps(result.json, indent=2)

        # locate and return the inventory_update
        jobs_pg = self.related.inventory_updates.get(id=result.json['inventory_update'])
        assert jobs_pg.count == 1, "An inventory_update started (id:%s) but job not found in response at %s/inventory_updates/" % (
            result.json['inventory_update'],
            self.url,
        )
        return jobs_pg.results[0]

    @property
    def is_successful(self):
        """An inventory_source is considered successful when source != "" and super().is_successful ."""
        return self.source != "" and super(InventorySource, self).is_successful

    def add_credential(self, credential):
        with suppress(exc.NoContent):
            self.related.credentials.post(dict(id=credential.id, associate=True))

    def remove_credential(self, credential):
        with suppress(exc.NoContent):
            self.related.credentials.post(dict(id=credential.id, disassociate=True))


page.register_page([resources.inventory_source, (resources.inventory_sources, 'post')], InventorySource)


class InventorySources(page.PageList, InventorySource):
    pass


page.register_page([resources.inventory_sources, resources.related_inventory_sources], InventorySources)


class InventorySourceGroups(page.PageList, Group):
    pass


page.register_page(resources.inventory_sources_related_groups, InventorySourceGroups)


class InventorySourceUpdate(base.Base):
    pass


page.register_page([resources.inventory_sources_related_update, resources.inventory_related_update_inventory_sources], InventorySourceUpdate)


class InventoryUpdate(UnifiedJob):
    pass


page.register_page(resources.inventory_update, InventoryUpdate)


class InventoryUpdates(page.PageList, InventoryUpdate):
    pass


page.register_page([resources.inventory_updates, resources.inventory_source_updates, resources.project_update_scm_inventory_updates], InventoryUpdates)


class InventoryUpdateCancel(base.Base):
    pass


page.register_page(resources.inventory_update_cancel, InventoryUpdateCancel)


class InventoryCopy(base.Base):
    pass


page.register_page(resources.inventory_copy, InventoryCopy)
07070100000024000081A400000000000000000000000166846B9200001C3F000000000000000000000000000000000000002D00000000awx-24.6.1/awxkit/api/pages/job_templates.pyfrom contextlib import suppress
import json

from awxkit.utils import filter_by_class, not_provided, random_title, update_payload, set_payload_foreign_key_args, PseudoNamespace
from awxkit.api.pages import Credential, Inventory, Project, UnifiedJobTemplate
from awxkit.api.mixins import HasCreate, HasInstanceGroups, HasNotifications, HasSurvey, HasCopy, DSAdapter
from awxkit.api.resources import resources
import awxkit.exceptions as exc
from . import base
from . import page


class JobTemplate(HasCopy, HasCreate, HasInstanceGroups, HasNotifications, HasSurvey, UnifiedJobTemplate):
    optional_dependencies = [Inventory, Credential, Project]
    NATURAL_KEY = ('organization', 'name')

    def launch(self, payload={}):
        """Launch the job_template using related->launch endpoint."""
        # get related->launch
        launch_pg = self.get_related('launch')

        # launch the job_template
        result = launch_pg.post(payload)

        # return job
        if result.json['type'] == 'job':
            jobs_pg = self.get_related('jobs', id=result.json['job'])
            assert jobs_pg.count == 1, "job_template launched (id:%s) but job not found in response at %s/jobs/" % (result.json['job'], self.url)
            return jobs_pg.results[0]
        elif result.json['type'] == 'workflow_job':
            slice_workflow_jobs = self.get_related('slice_workflow_jobs', id=result.json['id'])
            assert slice_workflow_jobs.count == 1, "job_template launched sliced job (id:%s) but not found in related %s/slice_workflow_jobs/" % (
                result.json['id'],
                self.url,
            )
            return slice_workflow_jobs.results[0]
        else:
            raise RuntimeError('Unexpected type of job template spawned job.')

    def payload(self, job_type='run', playbook='ping.yml', **kwargs):
        name = kwargs.get('name') or 'JobTemplate - {}'.format(random_title())
        description = kwargs.get('description') or random_title(10)
        payload = PseudoNamespace(name=name, description=description, job_type=job_type)

        optional_fields = (
            'ask_scm_branch_on_launch',
            'ask_credential_on_launch',
            'ask_diff_mode_on_launch',
            'ask_inventory_on_launch',
            'ask_job_type_on_launch',
            'ask_limit_on_launch',
            'ask_skip_tags_on_launch',
            'ask_tags_on_launch',
            'ask_variables_on_launch',
            'ask_verbosity_on_launch',
            'ask_execution_environment_on_launch',
            'ask_labels_on_launch',
            'ask_forks_on_launch',
            'ask_job_slice_count_on_launch',
            'ask_timeout_on_launch',
            'ask_instance_groups_on_launch',
            'allow_simultaneous',
            'become_enabled',
            'diff_mode',
            'force_handlers',
            'forks',
            'host_config_key',
            'job_tags',
            'limit',
            'skip_tags',
            'start_at_task',
            'survey_enabled',
            'timeout',
            'use_fact_cache',
            'vault_credential',
            'verbosity',
            'job_slice_count',
            'webhook_service',
            'webhook_credential',
            'scm_branch',
            'prevent_instance_group_fallback',
        )

        update_payload(payload, optional_fields, kwargs)

        extra_vars = kwargs.get('extra_vars', not_provided)
        if extra_vars != not_provided:
            if isinstance(extra_vars, dict):
                extra_vars = json.dumps(extra_vars)
            payload.update(extra_vars=extra_vars)

        if kwargs.get('project'):
            payload.update(project=kwargs.get('project').id, playbook=playbook)

        payload = set_payload_foreign_key_args(payload, ('inventory', 'credential', 'webhook_credential', 'execution_environment'), kwargs)

        return payload

    def add_label(self, label):
        if isinstance(label, page.Page):
            label = label.json
        with suppress(exc.NoContent):
            self.related.labels.post(label)

    def create_payload(self, name='', description='', job_type='run', playbook='ping.yml', credential=Credential, inventory=Inventory, project=None, **kwargs):
        if not project:
            project = Project
        if not inventory and not kwargs.get('ask_inventory_on_launch', False):
            inventory = Inventory

        self.create_and_update_dependencies(*filter_by_class((credential, Credential), (inventory, Inventory), (project, Project)))
        project = self.ds.project if project else None
        inventory = self.ds.inventory if inventory else None
        credential = self.ds.credential if credential else None

        payload = self.payload(
            name=name, description=description, job_type=job_type, playbook=playbook, credential=credential, inventory=inventory, project=project, **kwargs
        )
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload, credential

    def create(self, name='', description='', job_type='run', playbook='ping.yml', credential=Credential, inventory=Inventory, project=None, **kwargs):
        payload, credential = self.create_payload(
            name=name, description=description, job_type=job_type, playbook=playbook, credential=credential, inventory=inventory, project=project, **kwargs
        )
        ret = self.update_identity(JobTemplates(self.connection).post(payload))
        if credential:
            with suppress(exc.NoContent):
                self.related.credentials.post(dict(id=credential.id))
        if 'vault_credential' in kwargs:
            with suppress(exc.NoContent):
                if not isinstance(kwargs['vault_credential'], int):
                    raise ValueError("Expected 'vault_credential' value to be an integer, the id of the desired vault credential")
                self.related.credentials.post(dict(id=kwargs['vault_credential']))
        return ret

    def add_credential(self, credential):
        with suppress(exc.NoContent):
            self.related.credentials.post(dict(id=credential.id, associate=True))

    def remove_credential(self, credential):
        with suppress(exc.NoContent):
            self.related.credentials.post(dict(id=credential.id, disassociate=True))

    def remove_all_credentials(self):
        for cred in self.related.credentials.get().results:
            with suppress(exc.NoContent):
                self.related.credentials.post(dict(id=cred.id, disassociate=True))


page.register_page([resources.job_template, (resources.job_templates, 'post'), (resources.job_template_copy, 'post')], JobTemplate)


class JobTemplates(page.PageList, JobTemplate):
    pass


page.register_page([resources.job_templates, resources.related_job_templates], JobTemplates)


class JobTemplateCallback(base.Base):
    pass


page.register_page(resources.job_template_callback, JobTemplateCallback)


class JobTemplateLaunch(base.Base):
    pass


page.register_page(resources.job_template_launch, JobTemplateLaunch)


class JobTemplateCopy(base.Base):
    pass


page.register_page([resources.job_template_copy], JobTemplateCopy)
07070100000025000081A400000000000000000000000166846B92000006D5000000000000000000000000000000000000002400000000awx-24.6.1/awxkit/api/pages/jobs.pyfrom awxkit.api.pages import UnifiedJob
from awxkit.api.resources import resources
from . import base
from . import page


class Job(UnifiedJob):
    def relaunch(self, payload={}):
        result = self.related.relaunch.post(payload)
        return self.walk(result.endpoint)


page.register_page(resources.job, Job)


class Jobs(page.PageList, Job):
    pass


page.register_page([resources.jobs, resources.job_template_jobs, resources.system_job_template_jobs], Jobs)


class JobCancel(UnifiedJob):
    pass


page.register_page(resources.job_cancel, JobCancel)


class JobEvent(base.Base):
    pass


page.register_page([resources.job_event, resources.job_job_event], JobEvent)


class JobEvents(page.PageList, JobEvent):
    pass


page.register_page([resources.job_events, resources.job_job_events, resources.job_event_children, resources.group_related_job_events], JobEvents)


class JobPlay(base.Base):
    pass


page.register_page(resources.job_play, JobPlay)


class JobPlays(page.PageList, JobPlay):
    pass


page.register_page(resources.job_plays, JobPlays)


class JobTask(base.Base):
    pass


page.register_page(resources.job_task, JobTask)


class JobTasks(page.PageList, JobTask):
    pass


page.register_page(resources.job_tasks, JobTasks)


class JobHostSummary(base.Base):
    pass


page.register_page(resources.job_host_summary, JobHostSummary)


class JobHostSummaries(page.PageList, JobHostSummary):
    pass


page.register_page([resources.job_host_summaries, resources.group_related_job_host_summaries], JobHostSummaries)


class JobRelaunch(base.Base):
    pass


page.register_page(resources.job_relaunch, JobRelaunch)


class JobStdout(base.Base):
    pass


page.register_page(resources.related_stdout, JobStdout)
07070100000026000081A400000000000000000000000166846B92000006F6000000000000000000000000000000000000002600000000awx-24.6.1/awxkit/api/pages/labels.pyfrom awxkit.utils import random_title, PseudoNamespace
from awxkit.api.mixins import HasCreate, DSAdapter
from awxkit.api.resources import resources
from awxkit.api.pages import Organization
from . import base
from . import page


class Label(HasCreate, base.Base):
    dependencies = [Organization]
    NATURAL_KEY = ('organization', 'name')

    def silent_delete(self):
        """Label pages do not support DELETE requests. Here, we override the base page object
        silent_delete method to account for this.
        """
        pass

    def payload(self, organization, **kwargs):
        payload = PseudoNamespace(
            name=kwargs.get('name') or 'Label - {}'.format(random_title()),
            description=kwargs.get('description') or random_title(10),
            organization=organization.id,
        )
        return payload

    def create_payload(self, name='', description='', organization=Organization, **kwargs):
        self.create_and_update_dependencies(organization)
        payload = self.payload(organization=self.ds.organization, name=name, description=description, **kwargs)
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, name='', description='', organization=Organization, **kwargs):
        payload = self.create_payload(name=name, description=description, organization=organization, **kwargs)
        return self.update_identity(Labels(self.connection).post(payload))


page.register_page([resources.label, (resources.labels, 'post')], Label)


class Labels(page.PageList, Label):
    pass


page.register_page(
    [resources.labels, resources.inventory_labels, resources.job_labels, resources.job_template_labels, resources.workflow_job_template_labels], Labels
)
07070100000027000081A400000000000000000000000166846B92000000BD000000000000000000000000000000000000002F00000000awx-24.6.1/awxkit/api/pages/mesh_visualizer.pyfrom awxkit.api.resources import resources
from . import base
from . import page


class MeshVisualizer(base.Base):
    pass


page.register_page(resources.mesh_visualizer, MeshVisualizer)
07070100000028000081A400000000000000000000000166846B920000017D000000000000000000000000000000000000002700000000awx-24.6.1/awxkit/api/pages/metrics.pyfrom awxkit.api.resources import resources
from . import base
from . import page


class Metrics(base.Base):
    def get(self, **query_parameters):
        request = self.connection.get(self.endpoint, query_parameters, headers={'Accept': 'application/json'})
        return self.page_identity(request)


page.register_page([resources.metrics, (resources.metrics, 'get')], Metrics)
07070100000029000081A400000000000000000000000166846B9200001F3C000000000000000000000000000000000000003600000000awx-24.6.1/awxkit/api/pages/notification_templates.pyfrom contextlib import suppress

from awxkit.api.mixins import HasCreate, HasCopy, DSAdapter
from awxkit.api.pages import Organization
from awxkit.api.resources import resources
from awxkit.config import config
import awxkit.exceptions as exc
from awxkit.utils import not_provided, random_title, PseudoNamespace
from . import base
from . import page


job_results = ('any', 'error', 'success')
notification_types = ('awssns', 'email', 'irc', 'pagerduty', 'slack', 'twilio', 'webhook', 'mattermost', 'grafana', 'rocketchat')


class NotificationTemplate(HasCopy, HasCreate, base.Base):
    dependencies = [Organization]
    NATURAL_KEY = ('organization', 'name')

    def test(self):
        """Create test notification"""
        assert 'test' in self.related, "No such related attribute 'test'"

        # trigger test notification
        notification_id = self.related.test.post().notification

        # return notification page
        notifications_pg = self.get_related('notifications', id=notification_id).wait_until_count(1)
        assert notifications_pg.count == 1, "test notification triggered (id:%s) but notification not found in response at %s/notifications/" % (
            notification_id,
            self.url,
        )
        return notifications_pg.results[0]

    def silent_delete(self):
        """Delete the Notification Template, ignoring the exception that is raised
        if there are notifications pending.
        """
        try:
            super(NotificationTemplate, self).silent_delete()
        except exc.MethodNotAllowed:
            pass

    def payload(self, organization, notification_type='slack', messages=not_provided, **kwargs):
        payload = PseudoNamespace(
            name=kwargs.get('name') or 'NotificationTemplate ({0}) - {1}'.format(notification_type, random_title()),
            description=kwargs.get('description') or random_title(10),
            organization=organization.id,
            notification_type=notification_type,
        )
        if messages != not_provided:
            payload['messages'] = messages

        notification_configuration = kwargs.get('notification_configuration', {})
        payload.notification_configuration = notification_configuration

        if payload.notification_configuration == {}:
            services = config.credentials.notification_services

            if notification_type == 'awssns':
                fields = ('aws_region', 'aws_access_key_id', 'aws_secret_access_key', 'aws_session_token', 'sns_topic_arn')
                cred = services.awssns
            elif notification_type == 'email':
                fields = ('host', 'username', 'password', 'port', 'use_ssl', 'use_tls', 'sender', 'recipients')
                cred = services.email
            elif notification_type == 'irc':
                fields = ('server', 'port', 'use_ssl', 'password', 'nickname', 'targets')
                cred = services.irc
            elif notification_type == 'pagerduty':
                fields = ('client_name', 'service_key', 'subdomain', 'token')
                cred = services.pagerduty
            elif notification_type == 'slack':
                fields = ('channels', 'token')
                cred = services.slack
            elif notification_type == 'twilio':
                fields = ('account_sid', 'account_token', 'from_number', 'to_numbers')
                cred = services.twilio
            elif notification_type == 'webhook':
                fields = ('url', 'headers')
                cred = services.webhook
            elif notification_type == 'mattermost':
                fields = ('mattermost_url', 'mattermost_username', 'mattermost_channel', 'mattermost_icon_url', 'mattermost_no_verify_ssl')
                cred = services.mattermost
            elif notification_type == 'grafana':
                fields = ('grafana_url', 'grafana_key')
                cred = services.grafana
            elif notification_type == 'rocketchat':
                fields = ('rocketchat_url', 'rocketchat_no_verify_ssl')
                cred = services.rocketchat
            else:
                raise ValueError('Unknown notification_type {0}'.format(notification_type))

            for field in fields:
                if field == 'bot_token':
                    payload_field = 'token'
                else:
                    payload_field = field
                value = kwargs.get(field, cred.get(field, not_provided))
                if value != not_provided:
                    payload.notification_configuration[payload_field] = value

        return payload

    def create_payload(self, name='', description='', notification_type='slack', organization=Organization, messages=not_provided, **kwargs):
        if notification_type not in notification_types:
            raise ValueError('Unsupported notification type "{0}".  Please use one of {1}.'.format(notification_type, notification_types))
        self.create_and_update_dependencies(organization)
        payload = self.payload(
            organization=self.ds.organization, notification_type=notification_type, name=name, description=description, messages=messages, **kwargs
        )
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, name='', description='', notification_type='slack', organization=Organization, messages=not_provided, **kwargs):
        payload = self.create_payload(
            name=name, description=description, notification_type=notification_type, organization=organization, messages=messages, **kwargs
        )
        return self.update_identity(NotificationTemplates(self.connection).post(payload))

    def associate(self, resource, job_result='any'):
        """Associates a NotificationTemplate with the provided resource"""
        return self._associate(resource, job_result)

    def disassociate(self, resource, job_result='any'):
        """Disassociates a NotificationTemplate with the provided resource"""
        return self._associate(resource, job_result, disassociate=True)

    def _associate(self, resource, job_result='any', disassociate=False):
        if job_result not in job_results:
            raise ValueError('Unsupported job_result type "{0}".  Please use one of {1}.'.format(job_result, job_results))

        result_attr = 'notification_templates_{0}'.format(job_result)
        if result_attr not in resource.related:
            raise ValueError('Unsupported resource "{0}".  Does not have a related {1} field.'.format(resource, result_attr))

        payload = dict(id=self.id)
        if disassociate:
            payload['disassociate'] = True

        with suppress(exc.NoContent):
            getattr(resource.related, result_attr).post(payload)


page.register_page(
    [
        resources.notification_template,
        (resources.notification_templates, 'post'),
        (resources.notification_template_copy, 'post'),
        resources.notification_template_any,
        resources.notification_template_started,
        resources.notification_template_error,
        resources.notification_template_success,
        resources.notification_template_approval,
    ],
    NotificationTemplate,
)


class NotificationTemplates(page.PageList, NotificationTemplate):
    pass


page.register_page(
    [
        resources.notification_templates,
        resources.related_notification_templates,
        resources.notification_templates_any,
        resources.notification_templates_started,
        resources.notification_templates_error,
        resources.notification_templates_success,
        resources.notification_templates_approvals,
    ],
    NotificationTemplates,
)


class NotificationTemplateCopy(base.Base):
    pass


page.register_page(resources.notification_template_copy, NotificationTemplateCopy)


class NotificationTemplateTest(base.Base):
    pass


page.register_page(resources.notification_template_test, NotificationTemplateTest)
0707010000002A000081A400000000000000000000000166846B9200000812000000000000000000000000000000000000002D00000000awx-24.6.1/awxkit/api/pages/notifications.pyfrom awxkit.api.mixins import HasStatus
from awxkit.api.resources import resources
from awxkit.utils import poll_until, seconds_since_date_string
from . import base
from . import page


class Notification(HasStatus, base.Base):
    def __str__(self):
        items = ['id', 'notification_type', 'status', 'error', 'notifications_sent', 'subject', 'recipients']
        info = []
        for item in [x for x in items if hasattr(self, x)]:
            info.append('{0}:{1}'.format(item, getattr(self, item)))
        output = '<{0.__class__.__name__} {1}>'.format(self, ', '.join(info))
        return output.replace('%', '%%')

    @property
    def is_successful(self):
        """Return whether the notification was created successfully. This means that:
        * self.status == 'successful'
        * self.error == False
        """
        return super(Notification, self).is_successful and not self.error

    def wait_until_status(self, status, interval=5, timeout=30, **kwargs):
        adjusted_timeout = timeout - seconds_since_date_string(self.created)
        return super(Notification, self).wait_until_status(status, interval, adjusted_timeout, **kwargs)

    def wait_until_completed(self, interval=5, timeout=240):
        """Notifications need a longer timeout, since the backend often has
        to wait for the request (sending the notification) to timeout itself
        """
        adjusted_timeout = timeout - seconds_since_date_string(self.created)
        return super(Notification, self).wait_until_completed(interval, adjusted_timeout)


page.register_page(resources.notification, Notification)


class Notifications(page.PageList, Notification):
    def wait_until_count(self, count, interval=10, timeout=60, **kw):
        """Poll notifications page until it is populated with `count` number of notifications."""
        poll_until(lambda: getattr(self.get(), 'count') == count, interval=interval, timeout=timeout, **kw)
        return self


page.register_page([resources.notifications, resources.related_notifications], Notifications)
0707010000002B000081A400000000000000000000000166846B92000009FB000000000000000000000000000000000000002D00000000awx-24.6.1/awxkit/api/pages/organizations.pyfrom contextlib import suppress

from awxkit.api.mixins import HasCreate, HasInstanceGroups, HasNotifications, DSAdapter
from awxkit.utils import random_title, set_payload_foreign_key_args, PseudoNamespace
from awxkit.api.resources import resources
import awxkit.exceptions as exc
from . import base
from . import page


class Organization(HasCreate, HasInstanceGroups, HasNotifications, base.Base):
    NATURAL_KEY = ('name',)

    def add_admin(self, user):
        if isinstance(user, page.Page):
            user = user.json
        with suppress(exc.NoContent):
            self.related.admins.post(user)

    def add_user(self, user):
        if isinstance(user, page.Page):
            user = user.json
        with suppress(exc.NoContent):
            self.related.users.post(user)

    def add_galaxy_credential(self, credential):
        if isinstance(credential, page.Page):
            credential = credential.json
        with suppress(exc.NoContent):
            self.related.galaxy_credentials.post(
                {
                    "id": credential.id,
                }
            )

    def remove_galaxy_credential(self, credential):
        if isinstance(credential, page.Page):
            credential = credential.json
        with suppress(exc.NoContent):
            self.related.galaxy_credentials.post(
                {
                    "id": credential.id,
                    "disassociate": True,
                }
            )

    def payload(self, **kwargs):
        payload = PseudoNamespace(
            name=kwargs.get('name') or 'Organization - {}'.format(random_title()), description=kwargs.get('description') or random_title(10)
        )

        payload = set_payload_foreign_key_args(payload, ('default_environment',), kwargs)

        return payload

    def create_payload(self, name='', description='', **kwargs):
        payload = self.payload(name=name, description=description, **kwargs)
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, name='', description='', **kwargs):
        payload = self.create_payload(name=name, description=description, **kwargs)
        return self.update_identity(Organizations(self.connection).post(payload))


page.register_page([resources.organization, (resources.organizations, 'post')], Organization)


class Organizations(page.PageList, Organization):
    pass


page.register_page([resources.organizations, resources.user_organizations, resources.project_organizations], Organizations)
0707010000002C000081A400000000000000000000000166846B92000053CA000000000000000000000000000000000000002400000000awx-24.6.1/awxkit/api/pages/page.pyfrom contextlib import suppress
import inspect
import logging
import json
import re

from requests import Response
import http.client as http

from awxkit.utils import PseudoNamespace, is_relative_endpoint, are_same_endpoint, super_dir_set, is_list_or_tuple, to_str
from awxkit.api import utils
from awxkit.api.client import Connection
from awxkit.api.registry import URLRegistry
from awxkit.api.resources import resources
from awxkit.config import config
import awxkit.exceptions as exc


log = logging.getLogger(__name__)


_page_registry = URLRegistry()
get_registered_page = _page_registry.get


def is_license_invalid(response):
    if re.match(r".*Invalid license.*", response.text):
        return True
    if re.match(r".*Missing 'eula_accepted' property.*", response.text):
        return True
    if re.match(r".*'eula_accepted' must be True.*", response.text):
        return True
    if re.match(r".*Invalid license data.*", response.text):
        return True


def is_license_exceeded(response):
    if re.match(r".*license range of.*instances has been exceeded.*", response.text):
        return True
    if re.match(r".*License count of.*instances has been reached.*", response.text):
        return True
    if re.match(r".*License count of.*instances has been exceeded.*", response.text):
        return True
    if re.match(r".*License has expired.*", response.text):
        return True
    if re.match(r".*License is missing.*", response.text):
        return True


def is_duplicate_error(response):
    if re.match(r".*already exists.*", response.text):
        return True


def register_page(urls, page_cls):
    if not _page_registry.default:
        from awxkit.api.pages import Base

        _page_registry.setdefault(Base)

    if not is_list_or_tuple(urls):
        urls = [urls]
    # Register every methodless page with wildcard method
    # until more granular page objects exist (options, head, etc.)
    updated_urls = []
    for url_method_pair in urls:
        if isinstance(url_method_pair, str):
            url = url_method_pair
            method = '.*'
        else:
            url, method = url_method_pair
        updated_urls.append((url, method))

    page_cls.endpoint = updated_urls[0][0]
    return _page_registry.register(updated_urls, page_cls)


def objectify_response_json(response):
    """return a PseudoNamespace() from requests.Response.json()."""
    try:
        json = response.json()
    except ValueError:
        json = dict()

    # PseudoNamespace arg must be a dict, and json can be an array.
    # TODO: Assess if list elements should be PseudoNamespace
    if isinstance(json, dict):
        return PseudoNamespace(json)
    return json


class Page(object):
    endpoint = ''

    def __init__(self, connection=None, *a, **kw):
        if 'endpoint' in kw:
            self.endpoint = kw['endpoint']

        self.connection = connection or Connection(config.base_url, kw.get('verify', not config.assume_untrusted))

        self.r = kw.get('r', None)
        self.json = kw.get('json', objectify_response_json(self.r) if self.r else {})
        self.last_elapsed = kw.get('last_elapsed', None)

    def __getattr__(self, name):
        if 'json' in self.__dict__ and name in self.json:
            value = self.json[name]
            if not isinstance(value, TentativePage) and is_relative_endpoint(value):
                value = TentativePage(value, self.connection)
            elif isinstance(value, dict):
                for key, item in value.items():
                    if not isinstance(item, TentativePage) and is_relative_endpoint(item):
                        value[key] = TentativePage(item, self.connection)
            return value
        raise AttributeError("{!r} object has no attribute {!r}".format(self.__class__.__name__, name))

    def __setattr__(self, name, value):
        if 'json' in self.__dict__ and name in self.json:
            # Update field only.  For new field use explicit patch
            self.patch(**{name: value})
        else:
            self.__dict__[name] = value

    def __str__(self):
        if hasattr(self, 'json'):
            return json.dumps(self.json, indent=4)
        return str(super(Page, self).__repr__())

    __repr__ = __str__

    def __dir__(self):
        attrs = super_dir_set(self.__class__)
        if 'json' in self.__dict__ and hasattr(self.json, 'keys'):
            attrs.update(self.json.keys())
        return sorted(attrs)

    def __getitem__(self, key):
        return getattr(self, key)

    def __iter__(self):
        return iter(self.json)

    @property
    def __item_class__(self):
        """Returns the class representing a single 'Page' item"""
        return self.__class__

    @classmethod
    def from_json(cls, raw, connection=None):
        resp = Response()
        data = json.dumps(raw)
        resp._content = bytes(data, 'utf-8')
        resp.encoding = 'utf-8'
        resp.status_code = 200
        return cls(r=resp, connection=connection)

    @property
    def bytes(self):
        if self.r is None:
            return b''
        return self.r.content

    def extract_data(self, response):
        """Takes a `requests.Response` and returns a data dict."""
        try:
            data = response.json()
        except ValueError as e:  # If there was no json to parse
            data = {}
            if response.text or response.status_code not in (200, 202, 204):
                text = response.text
                if len(text) > 1024:
                    text = text[:1024] + '... <<< Truncated >>> ...'
                log.debug("Unable to parse JSON response ({0.status_code}): {1} - '{2}'".format(response, e, text))

        return data

    def page_identity(self, response, request_json=None):
        """Takes a `requests.Response` and
        returns a new __item_class__ instance if the request method is not a get, or returns
           a __class__ instance if the request path is different than the caller's `endpoint`.
        """
        request_path = response.request.path_url
        if request_path == '/migrations_notran/':
            raise exc.IsMigrating('You have been redirected to the migration-in-progress page.')
        request_method = response.request.method.lower()

        self.last_elapsed = response.elapsed

        if isinstance(request_json, dict) and 'ds' in request_json:
            ds = request_json.ds
        else:
            ds = None

        data = self.extract_data(response)
        exc_str = "%s (%s) received" % (http.responses[response.status_code], response.status_code)

        exception = exception_from_status_code(response.status_code)
        if exception:
            raise exception(exc_str, data)

        if response.status_code in (http.OK, http.CREATED, http.ACCEPTED):
            # Not all JSON responses include a URL.  Grab it from the request
            # object, if needed.
            if 'url' in data:
                endpoint = data['url']
            else:
                endpoint = request_path

            data = objectify_response_json(response)

            if request_method in ('get', 'patch', 'put'):
                #  Update existing resource and return it
                if are_same_endpoint(self.endpoint, request_path):
                    self.json = data
                    self.r = response
                    return self

            registered_type = get_registered_page(request_path, request_method)
            return registered_type(self.connection, endpoint=endpoint, json=data, last_elapsed=response.elapsed, r=response, ds=ds)

        elif response.status_code == http.FORBIDDEN:
            if is_license_invalid(response):
                raise exc.LicenseInvalid(exc_str, data)
            elif is_license_exceeded(response):
                raise exc.LicenseExceeded(exc_str, data)
            else:
                raise exc.Forbidden(exc_str, data)

        elif response.status_code == http.BAD_REQUEST:
            if is_license_invalid(response):
                raise exc.LicenseInvalid(exc_str, data)
            if is_duplicate_error(response):
                raise exc.Duplicate(exc_str, data)
            else:
                raise exc.BadRequest(exc_str, data)
        else:
            raise exc.Unknown(exc_str, data)

    def update_identity(self, obj):
        """Takes a `Page` and updates attributes to reflect its content"""
        self.endpoint = obj.endpoint
        self.json = obj.json
        self.last_elapsed = obj.last_elapsed
        self.r = obj.r
        return self

    def delete(self):
        r = self.connection.delete(self.endpoint)
        with suppress(exc.NoContent):
            return self.page_identity(r)

    def get(self, all_pages=False, **query_parameters):
        r = self.connection.get(self.endpoint, query_parameters)
        page = self.page_identity(r)
        if all_pages and getattr(page, 'next', None):
            paged_results = [r.json()['results']]
            while page.next:
                r = self.connection.get(self.next)
                page = self.page_identity(r)
                paged_results.append(r.json()['results'])
            json = r.json()
            json['results'] = []
            for page in paged_results:
                json['results'].extend(page)
            page = self.__class__.from_json(json, connection=self.connection)
        return page

    def head(self):
        r = self.connection.head(self.endpoint)
        return self.page_identity(r)

    def options(self):
        r = self.connection.options(self.endpoint)
        return self.page_identity(r)

    def patch(self, **json):
        r = self.connection.patch(self.endpoint, json)
        return self.page_identity(r, request_json=json)

    def post(self, json={}):
        r = self.connection.post(self.endpoint, json)
        return self.page_identity(r, request_json=json)

    def put(self, json=None):
        """If a payload is supplied, PUT the payload. If not, submit our existing page JSON as our payload."""
        json = self.json if json is None else json
        r = self.connection.put(self.endpoint, json=json)
        return self.page_identity(r, request_json=json)

    def get_related(self, related_name, **kwargs):
        assert related_name in self.json.get('related', [])
        endpoint = self.json['related'][related_name]
        return self.walk(endpoint, **kwargs)

    def walk(self, endpoint, **kw):
        page_cls = get_registered_page(endpoint)
        return page_cls(self.connection, endpoint=endpoint).get(**kw)

    def get_natural_key(self, cache=None):
        if cache is None:
            cache = PageCache()

        if not getattr(self, 'NATURAL_KEY', None):
            log.warning("This object does not have a natural key: %s", getattr(self, 'endpoint', ''))
            return None

        natural_key = {}
        for key in self.NATURAL_KEY:
            if key in self.related:
                related_endpoint = cache.get_page(self.related[key])
                if related_endpoint is not None:
                    natural_key[key] = related_endpoint.get_natural_key(cache=cache)
                else:
                    natural_key[key] = None
            elif key in self:
                natural_key[key] = self[key]

        natural_key['type'] = self['type']
        return natural_key


_exception_map = {
    http.NO_CONTENT: exc.NoContent,
    http.NOT_FOUND: exc.NotFound,
    http.INTERNAL_SERVER_ERROR: exc.InternalServerError,
    http.BAD_GATEWAY: exc.BadGateway,
    http.METHOD_NOT_ALLOWED: exc.MethodNotAllowed,
    http.UNAUTHORIZED: exc.Unauthorized,
    http.PAYMENT_REQUIRED: exc.PaymentRequired,
    http.CONFLICT: exc.Conflict,
}


def exception_from_status_code(status_code):
    return _exception_map.get(status_code, None)


class PageList(object):
    NATURAL_KEY = None

    @property
    def __item_class__(self):
        """Returns the class representing a single 'Page' item
        With an inheritence of OrgListSubClass -> OrgList -> PageList -> Org -> Base -> Page, the following
        will return the parent class of the current object (e.g. 'Org').

        Obtaining a page type by registered endpoint is highly recommended over using this method.
        """
        mro = inspect.getmro(self.__class__)
        bl_index = mro.index(PageList)
        return mro[bl_index + 1]

    @property
    def results(self):
        items = []
        for item in self.json['results']:
            endpoint = item.get('url')
            if endpoint is None:
                registered_type = self.__item_class__
            else:
                registered_type = get_registered_page(endpoint)
            items.append(registered_type(self.connection, endpoint=endpoint, json=item, r=self.r))
        return items

    def go_to_next(self):
        if self.next:
            next_page = self.__class__(self.connection, endpoint=self.next)
            return next_page.get()

    def go_to_previous(self):
        if self.previous:
            prev_page = self.__class__(self.connection, endpoint=self.previous)
            return prev_page.get()

    def create(self, *a, **kw):
        return self.__item_class__(self.connection).create(*a, **kw)

    def get_natural_key(self, cache=None):
        log.warning("This object does not have a natural key: %s", getattr(self, 'endpoint', ''))
        return None


class TentativePage(str):
    def __new__(cls, endpoint, connection):
        return super(TentativePage, cls).__new__(cls, to_str(endpoint))

    def __init__(self, endpoint, connection):
        self.endpoint = to_str(endpoint)
        self.connection = connection

    def _create(self):
        return get_registered_page(self.endpoint)(self.connection, endpoint=self.endpoint)

    def get(self, **params):
        return self._create().get(**params)

    def create_or_replace(self, **query_parameters):
        """Create an object, and if any other item shares the name, delete that one first.

        Generally, requires 'name' of object.

        Exceptions:
          - Users are looked up by username
          - Teams need to be looked up by name + organization
        """
        page = None
        # look up users by username not name
        if 'users' in self:
            assert query_parameters.get('username'), 'For this resource, you must call this method with a "username" to look up the object by'
            page = self.get(username=query_parameters['username'])
        else:
            assert query_parameters.get('name'), 'For this resource, you must call this method with a "name" to look up the object by'
            if query_parameters.get('organization'):
                if isinstance(query_parameters.get('organization'), int):
                    page = self.get(name=query_parameters['name'], organization=query_parameters.get('organization'))
                else:
                    page = self.get(name=query_parameters['name'], organization=query_parameters.get('organization').id)
            else:
                page = self.get(name=query_parameters['name'])
        if page and page.results:
            for item in page.results:
                # We found a duplicate item, we will delete it
                # Some things, like inventory scripts, allow multiple scripts
                # by same name as long as they have different organization
                item.delete()
        # Now that we know that there is no duplicate, we create a new object
        return self.create(**query_parameters)

    def get_or_create(self, **query_parameters):
        """Get an object by this name or id if it exists, otherwise create it.

        Exceptions:
          - Users are looked up by username
          - Teams need to be looked up by name + organization
        """
        page = None
        # look up users by username not name
        if query_parameters.get('username') and 'users' in self:
            page = self.get(username=query_parameters['username'])
        if query_parameters.get('name'):
            if query_parameters.get('organization'):
                if isinstance(query_parameters.get('organization'), int):
                    page = self.get(name=query_parameters['name'], organization=query_parameters.get('organization'))
                else:
                    page = self.get(name=query_parameters['name'], organization=query_parameters.get('organization').id)
            else:
                page = self.get(name=query_parameters['name'])

        elif query_parameters.get('id'):
            page = self.get(id=query_parameters['id'])
        if page and page.results:
            item = page.results.pop()
            return item.url.get()
        else:
            # We did not find it given these params, we will create it instead
            return self.create(**query_parameters)

    def post(self, payload={}):
        return self._create().post(payload)

    def put(self):
        return self._create().put()

    def patch(self, **payload):
        return self._create().patch(**payload)

    def delete(self):
        return self._create().delete()

    def options(self):
        return self._create().options()

    def create(self, *a, **kw):
        return self._create().create(*a, **kw)

    def payload(self, *a, **kw):
        return self._create().payload(*a, **kw)

    def create_payload(self, *a, **kw):
        return self._create().create_payload(*a, **kw)

    def __str__(self):
        if hasattr(self, 'endpoint'):
            return self.endpoint
        return super(TentativePage, self).__str__()

    __repr__ = __str__

    def __eq__(self, other):
        return self.endpoint == other

    def __ne__(self, other):
        return self.endpoint != other


class PageCache(object):
    def __init__(self, connection=None):
        self.options = {}
        self.pages_by_url = {}
        self.pages_by_natural_key = {}
        self.connection = connection or Connection(config.base_url, not config.assume_untrusted)

    def get_options(self, page):
        url = page.endpoint if isinstance(page, Page) else str(page)
        if url in self.options:
            return self.options[url]

        try:
            options = page.options()
        except exc.Common:
            log.error("This endpoint raised an error: %s", url)
            return self.options.setdefault(url, None)

        warning = options.r.headers.get('Warning', '')
        if '299' in warning and 'deprecated' in warning:
            log.warning("This endpoint is deprecated: %s", url)
            return self.options.setdefault(url, None)

        return self.options.setdefault(url, options)

    def set_page(self, page):
        log.debug("set_page: %s %s", type(page), page.endpoint)
        self.pages_by_url[page.endpoint] = page
        if getattr(page, 'NATURAL_KEY', None):
            log.debug("set_page has natural key fields.")
            natural_key = page.get_natural_key(cache=self)
            if natural_key is not None:
                log.debug("set_page natural_key: %s", repr(natural_key))
                self.pages_by_natural_key[utils.freeze(natural_key)] = page.endpoint
        if 'results' in page:
            for p in page.results:
                self.set_page(p)
        return page

    def get_page(self, page):
        url = page.endpoint if isinstance(page, Page) else str(page)
        if url in self.pages_by_url:
            return self.pages_by_url[url]

        try:
            page = page.get(all_pages=True)
        except exc.Common:
            log.error("This endpoint raised an error: %s", url)
            return self.pages_by_url.setdefault(url, None)

        warning = page.r.headers.get('Warning', '')
        if '299' in warning and 'deprecated' in warning:
            log.warning("This endpoint is deprecated: %s", url)
            return self.pages_by_url.setdefault(url, None)

        log.debug("get_page: %s", page.endpoint)
        return self.set_page(page)

    def get_by_natural_key(self, natural_key):
        page = self.pages_by_natural_key.get(utils.freeze(natural_key))
        if page is None:
            # We need some way to get ahold of the top-level resource
            # list endpoint from the natural_key type.  The resources
            # object more or less has that for each of the detail
            # views.  Just chop off the /<id>/ bit.
            endpoint = getattr(resources, natural_key['type'], None)
            if endpoint is None:
                return
            endpoint = ''.join([endpoint.rsplit('/', 2)[0], '/'])
            page_type = get_registered_page(endpoint)

            kwargs = {}
            for k, v in natural_key.items():
                if isinstance(v, str) and k != 'type':
                    kwargs[k] = v

            # Do a filtered query against the list endpoint, usually
            # with the name of the object but sometimes more.
            list_page = page_type(self.connection, endpoint=endpoint).get(all_pages=True, **kwargs)
            if 'results' in list_page:
                for p in list_page.results:
                    self.set_page(p)
            page = self.pages_by_natural_key.get(utils.freeze(natural_key))

        log.debug("get_by_natural_key: %s, endpoint: %s", repr(natural_key), page)
        if page:
            return self.get_page(page)
0707010000002D000081A400000000000000000000000166846B920000009E000000000000000000000000000000000000002400000000awx-24.6.1/awxkit/api/pages/ping.pyfrom awxkit.api.resources import resources
from . import base
from . import page


class Ping(base.Base):
    pass


page.register_page(resources.ping, Ping)
0707010000002E000081A400000000000000000000000166846B920000174D000000000000000000000000000000000000002800000000awx-24.6.1/awxkit/api/pages/projects.pyimport json

from awxkit.api.pages import Credential, Organization, UnifiedJob, UnifiedJobTemplate
from awxkit.utils import filter_by_class, random_title, update_payload, set_payload_foreign_key_args, PseudoNamespace
from awxkit.api.mixins import HasCreate, HasNotifications, HasCopy, DSAdapter
from awxkit.api.resources import resources
from awxkit.config import config

from . import base
from . import page


class Project(HasCopy, HasCreate, HasNotifications, UnifiedJobTemplate):
    optional_dependencies = [Credential, Organization]
    optional_schedule_fields = tuple()
    NATURAL_KEY = ('organization', 'name')

    def payload(self, organization, scm_type='git', **kwargs):
        payload = PseudoNamespace(
            name=kwargs.get('name') or 'Project - {}'.format(random_title()),
            description=kwargs.get('description') or random_title(10),
            scm_type=scm_type,
            scm_url=kwargs.get('scm_url') or config.project_urls.get(scm_type, ''),
        )

        if organization is not None:
            payload.organization = organization.id

        if kwargs.get('credential'):
            payload.credential = kwargs.get('credential').id

        fields = (
            'scm_branch',
            'local_path',
            'scm_clean',
            'scm_delete_on_update',
            'scm_track_submodules',
            'scm_update_cache_timeout',
            'scm_update_on_launch',
            'scm_refspec',
            'allow_override',
            'signature_validation_credential',
        )
        update_payload(payload, fields, kwargs)

        payload = set_payload_foreign_key_args(payload, ('execution_environment', 'default_environment'), kwargs)

        return payload

    def create_payload(self, name='', description='', scm_type='git', scm_url='', scm_branch='', organization=Organization, credential=None, **kwargs):
        if credential:
            if isinstance(credential, Credential):
                if credential.ds.credential_type.namespace not in ('scm', 'insights'):
                    credential = None  # ignore incompatible credential from HasCreate dependency injection
            elif credential in (Credential,):
                credential = (Credential, dict(credential_type=(True, dict(kind='scm'))))
            elif credential is True:
                credential = (Credential, dict(credential_type=(True, dict(kind='scm'))))

        self.create_and_update_dependencies(*filter_by_class((credential, Credential), (organization, Organization)))

        credential = self.ds.credential if credential else None
        organization = self.ds.organization if organization else None

        payload = self.payload(
            organization=organization,
            scm_type=scm_type,
            name=name,
            description=description,
            scm_url=scm_url,
            scm_branch=scm_branch,
            credential=credential,
            **kwargs
        )
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, name='', description='', scm_type='git', scm_url='', scm_branch='', organization=Organization, credential=None, **kwargs):
        payload = self.create_payload(
            name=name,
            description=description,
            scm_type=scm_type,
            scm_url=scm_url,
            scm_branch=scm_branch,
            organization=organization,
            credential=credential,
            **kwargs
        )
        self.update_identity(Projects(self.connection).post(payload))

        if kwargs.get('wait', True):
            update = self.related.current_update.get()
            update.wait_until_completed().assert_successful()
            return self.get()

        return self

    def update(self):
        """Update the project using related->update endpoint."""
        # get related->launch
        update_pg = self.get_related('update')

        # assert can_update == True
        assert update_pg.can_update, "The specified project (id:%s) is not able to update (can_update:%s)" % (self.id, update_pg.can_update)

        # start the update
        result = update_pg.post()

        # assert JSON response
        assert 'project_update' in result.json, "Unexpected JSON response when starting an project_update.\n%s" % json.dumps(result.json, indent=2)

        # locate and return the specific update
        jobs_pg = self.get_related('project_updates', id=result.json['project_update'])
        assert jobs_pg.count == 1, "An project_update started (id:%s) but job not found in response at %s/inventory_updates/" % (
            result.json['project_update'],
            self.url,
        )
        return jobs_pg.results[0]

    @property
    def is_successful(self):
        """An project is considered successful when:
        0) scm_type != ""
        1) unified_job_template.is_successful
        """
        return self.scm_type != "" and super(Project, self).is_successful


page.register_page([resources.project, (resources.projects, 'post'), (resources.project_copy, 'post')], Project)


class Projects(page.PageList, Project):
    pass


page.register_page([resources.projects, resources.related_projects], Projects)


class ProjectUpdate(UnifiedJob):
    pass


page.register_page(resources.project_update, ProjectUpdate)


class ProjectUpdates(page.PageList, ProjectUpdate):
    pass


page.register_page([resources.project_updates, resources.project_project_updates], ProjectUpdates)


class ProjectUpdateLaunch(base.Base):
    pass


page.register_page(resources.project_related_update, ProjectUpdateLaunch)


class ProjectUpdateCancel(base.Base):
    pass


page.register_page(resources.project_update_cancel, ProjectUpdateCancel)


class ProjectCopy(base.Base):
    pass


page.register_page(resources.project_copy, ProjectCopy)


class Playbooks(base.Base):
    pass


page.register_page(resources.project_playbooks, Playbooks)
0707010000002F000081A400000000000000000000000166846B9200000418000000000000000000000000000000000000002500000000awx-24.6.1/awxkit/api/pages/roles.pyimport logging

from awxkit.api.resources import resources

from . import base
from . import page


log = logging.getLogger(__name__)


class Role(base.Base):
    NATURAL_KEY = ('name',)

    def get_natural_key(self, cache=None):
        if cache is None:
            cache = page.PageCache()

        natural_key = super(Role, self).get_natural_key(cache=cache)
        related_objs = [related for name, related in self.related.items() if name not in ('users', 'teams')]
        if related_objs:
            related_endpoint = cache.get_page(related_objs[0])
            if related_endpoint is None:
                log.error("Unable to obtain content_object %s for role %s", related_objs[0], self.endpoint)
                return None
            natural_key['content_object'] = related_endpoint.get_natural_key(cache=cache)

        return natural_key


page.register_page(resources.role, Role)


class Roles(page.PageList, Role):
    pass


page.register_page([resources.roles, resources.related_roles, resources.related_object_roles], Roles)
07070100000030000081A400000000000000000000000166846B9200000925000000000000000000000000000000000000002900000000awx-24.6.1/awxkit/api/pages/schedules.pyfrom contextlib import suppress

from awxkit.api.pages import JobTemplate, SystemJobTemplate, Project, InventorySource
from awxkit.api.pages.workflow_job_templates import WorkflowJobTemplate
from awxkit.api.mixins import HasCreate
from awxkit.api.resources import resources
from awxkit.config import config
import awxkit.exceptions as exc

from . import page
from . import base


class Schedule(HasCreate, base.Base):
    dependencies = [JobTemplate, SystemJobTemplate, Project, InventorySource, WorkflowJobTemplate]
    NATURAL_KEY = ('unified_job_template', 'name')

    def silent_delete(self):
        """
        In every case, we start by disabling the schedule
        to avoid cascading errors from a cleanup failure.
        Then, if we are told to prevent_teardown of schedules, we keep them
        """
        try:
            self.patch(enabled=False)
            if not config.prevent_teardown:
                return self.delete()
        except (exc.NoContent, exc.NotFound, exc.Forbidden):
            pass


page.register_page([resources.schedule, resources.related_schedule], Schedule)


class Schedules(page.PageList, Schedule):
    def get_zoneinfo(self):
        return SchedulesZoneInfo(self.connection).get()

    def preview(self, rrule=''):
        payload = dict(rrule=rrule)
        return SchedulesPreview(self.connection).post(payload)

    def add_credential(self, cred):
        with suppress(exc.NoContent):
            self.related.credentials.post(dict(id=cred.id))

    def remove_credential(self, cred):
        with suppress(exc.NoContent):
            self.related.credentials.post(dict(id=cred.id, disassociate=True))

    def add_label(self, label):
        with suppress(exc.NoContent):
            self.related.labels.post(dict(id=label.id))

    def add_instance_group(self, instance_group):
        with suppress(exc.NoContent):
            self.related.instance_groups.post(dict(id=instance_group.id))


page.register_page([resources.schedules, resources.related_schedules], Schedules)


class SchedulesPreview(base.Base):
    pass


page.register_page(((resources.schedules_preview, 'post'),), SchedulesPreview)


class SchedulesZoneInfo(base.Base):
    def __getitem__(self, idx):
        return self.json[idx]


page.register_page(((resources.schedules_zoneinfo, 'get'),), SchedulesZoneInfo)
07070100000031000081A400000000000000000000000166846B9200000490000000000000000000000000000000000000002800000000awx-24.6.1/awxkit/api/pages/settings.pyfrom awxkit.api.resources import resources
from . import base
from . import page


class Setting(base.Base):
    pass


page.register_page(
    [
        resources.setting,
        resources.settings_all,
        resources.settings_authentication,
        resources.settings_changed,
        resources.settings_github,
        resources.settings_github_org,
        resources.settings_github_team,
        resources.settings_google_oauth2,
        resources.settings_jobs,
        resources.settings_ldap,
        resources.settings_radius,
        resources.settings_saml,
        resources.settings_system,
        resources.settings_tacacsplus,
        resources.settings_ui,
        resources.settings_user,
        resources.settings_user_defaults,
    ],
    Setting,
)


class Settings(page.PageList, Setting):
    def get_endpoint(self, endpoint):
        """Helper method used to navigate to a specific settings endpoint.
        (Pdb) settings_pg.get_endpoint('all')
        """
        base_url = '{0}{1}/'.format(self.endpoint, endpoint)
        return self.walk(base_url)

    get_setting = get_endpoint


page.register_page(resources.settings, Settings)
07070100000032000081A400000000000000000000000166846B92000000F7000000000000000000000000000000000000002D00000000awx-24.6.1/awxkit/api/pages/subscriptions.pyfrom awxkit.api.resources import resources
from . import page


class Subscriptions(page.Page):
    def get_possible_licenses(self, **kwargs):
        return self.post(json=kwargs).json


page.register_page(resources.subscriptions, Subscriptions)
07070100000033000081A400000000000000000000000166846B920000033B000000000000000000000000000000000000002B00000000awx-24.6.1/awxkit/api/pages/survey_spec.pyfrom . import base
from . import page

from awxkit.api.resources import resources


class SurveySpec(base.Base):
    def get_variable_default(self, var):
        for item in self.spec:
            if item.get('variable') == var:
                return item.get('default')

    def get_default_vars(self):
        default_vars = dict()
        for item in self.spec:
            if item.get("default", None):
                default_vars[item.variable] = item.default
        return default_vars

    def get_required_vars(self):
        required_vars = []
        for item in self.spec:
            if item.get("required", None):
                required_vars.append(item.variable)
        return required_vars


page.register_page([resources.job_template_survey_spec, resources.workflow_job_template_survey_spec], SurveySpec)
07070100000034000081A400000000000000000000000166846B920000039B000000000000000000000000000000000000003400000000awx-24.6.1/awxkit/api/pages/system_job_templates.pyfrom awxkit.api.mixins import HasNotifications
from awxkit.api.pages import UnifiedJobTemplate
from awxkit.api.resources import resources
from . import page


class SystemJobTemplate(UnifiedJobTemplate, HasNotifications):
    NATURAL_KEY = ('name', 'organization')

    def launch(self, payload={}):
        """Launch the system_job_template using related->launch endpoint."""
        result = self.related.launch.post(payload)

        # return job
        jobs_pg = self.get_related('jobs', id=result.json['system_job'])
        assert jobs_pg.count == 1, "system_job_template launched (id:%s) but unable to find matching job at %s/jobs/" % (result.json['job'], self.url)
        return jobs_pg.results[0]


page.register_page(resources.system_job_template, SystemJobTemplate)


class SystemJobTemplates(page.PageList, SystemJobTemplate):
    pass


page.register_page(resources.system_job_templates, SystemJobTemplates)
07070100000035000081A400000000000000000000000166846B92000001A4000000000000000000000000000000000000002B00000000awx-24.6.1/awxkit/api/pages/system_jobs.pyfrom awxkit.api.pages import UnifiedJob
from awxkit.api.resources import resources
from . import page


class SystemJob(UnifiedJob):
    pass


page.register_page(resources.system_job, SystemJob)


class SystemJobs(page.PageList, SystemJob):
    pass


page.register_page(resources.system_jobs, SystemJobs)


class SystemJobCancel(UnifiedJob):
    pass


page.register_page(resources.system_job_cancel, SystemJobCancel)
07070100000036000081A400000000000000000000000166846B92000006DB000000000000000000000000000000000000002500000000awx-24.6.1/awxkit/api/pages/teams.pyfrom contextlib import suppress

from awxkit.api.mixins import HasCreate, DSAdapter
from awxkit.utils import random_title, PseudoNamespace
from awxkit.api.resources import resources
from awxkit.api.pages import Organization
from awxkit.exceptions import NoContent

from . import base
from . import page


class Team(HasCreate, base.Base):
    dependencies = [Organization]
    NATURAL_KEY = ('organization', 'name')

    def add_user(self, user):
        if isinstance(user, page.Page):
            user = user.json
        with suppress(NoContent):
            self.related.users.post(user)

    def payload(self, organization, **kwargs):
        payload = PseudoNamespace(
            name=kwargs.get('name') or 'Team - {}'.format(random_title()),
            description=kwargs.get('description') or random_title(10),
            organization=organization.id,
        )
        return payload

    def create_payload(self, name='', description='', organization=Organization, **kwargs):
        self.create_and_update_dependencies(organization)
        payload = self.payload(organization=self.ds.organization, name=name, description=description, **kwargs)
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, name='', description='', organization=Organization, **kwargs):
        payload = self.create_payload(name=name, description=description, organization=organization, **kwargs)
        return self.update_identity(Teams(self.connection).post(payload))


page.register_page([resources.team, (resources.teams, 'post')], Team)


class Teams(page.PageList, Team):
    pass


page.register_page([resources.teams, resources.credential_owner_teams, resources.related_teams], Teams)
07070100000037000081A400000000000000000000000166846B9200000B8C000000000000000000000000000000000000003500000000awx-24.6.1/awxkit/api/pages/unified_job_templates.pyfrom awxkit.api.resources import resources
from awxkit.utils import random_title, update_payload
from awxkit.api.mixins import HasStatus
from . import base
from . import page


class UnifiedJobTemplate(HasStatus, base.Base):
    """Base class for unified job template pages (e.g. project, inventory_source,
    and job_template).
    """

    optional_schedule_fields = (
        'extra_data',
        'diff_mode',
        'limit',
        'job_tags',
        'skip_tags',
        'job_type',
        'verbosity',
        'inventory',
        'forks',
        'timeout',
        'job_slice_count',
        'execution_environment',
    )

    def __str__(self):
        # NOTE: I use .replace('%', '%%') to workaround an odd string
        # formatting issue where result_stdout contained '%s'.  This later caused
        # a python traceback when attempting to display output from this
        # method.
        items = ['id', 'name', 'status', 'source', 'last_update_failed', 'last_updated', 'result_traceback', 'job_explanation', 'job_args']
        info = []
        for item in [x for x in items if hasattr(self, x)]:
            info.append('{0}:{1}'.format(item, getattr(self, item)))
        output = '<{0.__class__.__name__} {1}>'.format(self, ', '.join(info))
        return output.replace('%', '%%')

    def add_schedule(self, name='', description='', enabled=True, rrule=None, **kwargs):
        if rrule is None:
            rrule = "DTSTART:30180101T000000Z RRULE:FREQ=YEARLY;INTERVAL=1"
        payload = dict(
            name=name or "{0} Schedule {1}".format(self.name, random_title()), description=description or random_title(10), enabled=enabled, rrule=str(rrule)
        )

        update_payload(payload, self.optional_schedule_fields, kwargs)

        schedule = self.related.schedules.post(payload)
        # register schedule in temporary dependency store as means of
        # getting its teardown method to run on cleanup
        if not hasattr(self, '_schedules_store'):
            self._schedules_store = set()
        if schedule not in self._schedules_store:
            self._schedules_store.add(schedule)
        return schedule

    def silent_delete(self):
        if hasattr(self, '_schedules_store'):
            for schedule in self._schedules_store:
                schedule.silent_delete()
        return super(UnifiedJobTemplate, self).silent_delete()

    @property
    def is_successful(self):
        """An unified_job_template is considered successful when:
        1) status == 'successful'
        2) not last_update_failed
        3) last_updated
        """
        return super(UnifiedJobTemplate, self).is_successful and not self.last_update_failed and self.last_updated is not None


page.register_page(resources.unified_job_template, UnifiedJobTemplate)


class UnifiedJobTemplates(page.PageList, UnifiedJobTemplate):
    pass


page.register_page(resources.unified_job_templates, UnifiedJobTemplates)
07070100000038000081A400000000000000000000000166846B9200001A6F000000000000000000000000000000000000002C00000000awx-24.6.1/awxkit/api/pages/unified_jobs.pyfrom pprint import pformat

import yaml.parser
import yaml.scanner
import yaml

from awxkit.utils import args_string_to_list, seconds_since_date_string
from awxkit.api.resources import resources
from awxkit.api.mixins import HasStatus
import awxkit.exceptions as exc
from . import base
from . import page


class UnifiedJob(HasStatus, base.Base):
    """Base class for unified job pages (e.g. project_updates, inventory_updates
    and jobs).
    """

    def __str__(self):
        # NOTE: I use .replace('%', '%%') to workaround an odd string
        # formatting issue where result_stdout contained '%s'.  This later caused
        # a python traceback when attempting to display output from this method.
        items = ['id', 'name', 'status', 'failed', 'result_stdout', 'result_traceback', 'job_explanation', 'job_args']
        info = []
        for item in [x for x in items if hasattr(self, x)]:
            info.append('{0}:{1}'.format(item, getattr(self, item)))
        output = '<{0.__class__.__name__} {1}>'.format(self, ', '.join(info))
        return output.replace('%', '%%')

    @property
    def result_stdout(self):
        if 'result_stdout' not in self.json and 'stdout' in self.related:
            return self.connection.get(self.related.stdout, query_parameters=dict(format='txt_download')).content.decode()
        return self.json.result_stdout.decode()

    def assert_text_in_stdout(self, expected_text, replace_spaces=None, replace_newlines=' '):
        """Assert text is found in stdout, and if not raise exception with entire stdout.

        Default behavior is to replace newline characters with a space, but this can be modified, including replacement
        with ''. Pass replace_newlines=None to disable.

        Additionally, you may replace any  with another character (including ''). This is applied after the newline
        replacement. Default behavior is to not replace spaces.
        """
        self.wait_until_completed()
        stdout = self.result_stdout
        if replace_newlines is not None:
            # make text into string with no line breaks, but watch out for trailing whitespace
            stdout = replace_newlines.join([line.strip() for line in stdout.split('\n')])
        if replace_spaces is not None:
            stdout = stdout.replace(' ', replace_spaces)
        if expected_text not in stdout:
            pretty_stdout = pformat(stdout)
            raise AssertionError('Expected "{}", but it was not found in stdout. Full stdout:\n {}'.format(expected_text, pretty_stdout))

    @property
    def is_successful(self):
        """Return whether the current has completed successfully.

        This means that:
         * self.status == 'successful'
         * self.has_traceback == False
         * self.failed == False
        """
        return super(UnifiedJob, self).is_successful and not (self.has_traceback or self.failed)

    def wait_until_status(self, status, interval=1, timeout=60, since_job_created=True, **kwargs):
        if since_job_created:
            timeout = timeout - seconds_since_date_string(self.created)
        return super(UnifiedJob, self).wait_until_status(status, interval, timeout, **kwargs)

    def wait_until_completed(self, interval=5, timeout=60 * 8, since_job_created=True, **kwargs):
        if since_job_created:
            timeout = timeout - seconds_since_date_string(self.created)
        return super(UnifiedJob, self).wait_until_completed(interval, timeout, **kwargs)

    @property
    def has_traceback(self):
        """Return whether a traceback has been detected in result_traceback"""
        try:
            tb = str(self.result_traceback)
        except AttributeError:
            # If record obtained from list view, then traceback isn't given
            # and result_stdout is only given for some types
            # we must suppress AttributeError or else it will be mis-interpreted
            # by __getattr__
            tb = ''
        return 'Traceback' in tb

    def cancel(self):
        cancel = self.get_related('cancel')
        if not cancel.can_cancel:
            return
        try:
            cancel.post()
        except exc.MethodNotAllowed as e:
            # Race condition where job finishes between can_cancel
            # check and post.
            if not any("not allowed" in field for field in e.msg.values()):
                raise (e)
        return self.get()

    @property
    def job_args(self):
        """Helper property to return flattened cmdline arg tokens in a list.
        Flattens arg strings for rough inclusion checks:
        ```assert "thing" in unified_job.job_args```
        ```assert dict(extra_var=extra_var_val) in unified_job.job_args```
        If you need to ensure the job_args are of awx-provided format use raw unified_job.json.job_args.
        """

        def attempt_yaml_load(arg):
            try:
                return yaml.safe_load(arg)
            except (yaml.parser.ParserError, yaml.scanner.ScannerError):
                return str(arg)

        args = []
        if not self.json.job_args:
            return ""
        for arg in yaml.safe_load(self.json.job_args):
            try:
                args.append(yaml.safe_load(arg))
            except (yaml.parser.ParserError, yaml.scanner.ScannerError):
                if arg[0] == '@':  # extra var file reference
                    args.append(attempt_yaml_load(arg))
                elif args[-1] == '-c':  # this arg is likely sh arg string
                    args.extend([attempt_yaml_load(item) for item in args_string_to_list(arg)])
                else:
                    raise
        return args

    @property
    def controller_dir(self):
        """Returns the path to the private_data_dir on the controller node for the job
        This can be used if trying to shell in and inspect the files used by the job
        Cannot use job_cwd, because that is path inside EE container
        """
        self.get()
        job_args = self.job_args
        expected_prefix = '/tmp/awx_{}'.format(self.id)
        for arg1, arg2 in zip(job_args[:-1], job_args[1:]):
            if arg1 == '-v':
                if ':' in arg2:
                    host_loc = arg2.split(':')[0]
                    if host_loc.startswith(expected_prefix):
                        return host_loc
        raise RuntimeError(
            'Could not find a controller private_data_dir for this job. Searched for volume mount to {} inside of args {}'.format(expected_prefix, job_args)
        )


class UnifiedJobs(page.PageList, UnifiedJob):
    pass


page.register_page([resources.unified_jobs, resources.instance_related_jobs, resources.instance_group_related_jobs, resources.schedules_jobs], UnifiedJobs)
07070100000039000081A400000000000000000000000166846B920000076B000000000000000000000000000000000000002500000000awx-24.6.1/awxkit/api/pages/users.pyfrom awxkit.api.mixins import HasCreate, DSAdapter
from awxkit.utils import random_title, PseudoNamespace
from awxkit.api.resources import resources
from awxkit.config import config

from . import base
from . import page


class User(HasCreate, base.Base):
    NATURAL_KEY = ('username',)

    def payload(self, **kwargs):
        payload = PseudoNamespace(
            username=kwargs.get('username') or 'User-{}'.format(random_title(non_ascii=False)),
            password=kwargs.get('password') or config.credentials.default.password,
            is_superuser=kwargs.get('is_superuser', False),
            is_system_auditor=kwargs.get('is_system_auditor', False),
            first_name=kwargs.get('first_name', random_title()),
            last_name=kwargs.get('last_name', random_title()),
            email=kwargs.get('email', '{}@example.com'.format(random_title(5, non_ascii=False))),
        )
        return payload

    def create_payload(self, username='', password='', **kwargs):
        payload = self.payload(username=username, password=password, **kwargs)
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, username='', password='', organization=None, **kwargs):
        payload = self.create_payload(username=username, password=password, **kwargs)
        self.password = payload.password

        self.update_identity(Users(self.connection).post(payload))

        if organization:
            organization.add_user(self)

        return self


page.register_page([resources.user, (resources.users, 'post')], User)


class Users(page.PageList, User):
    pass


page.register_page(
    [resources.users, resources.organization_admins, resources.related_users, resources.credential_owner_users, resources.user_admin_organizations], Users
)


class Me(Users):
    pass


page.register_page(resources.me, Me)
0707010000003A000081A400000000000000000000000166846B9200000227000000000000000000000000000000000000003B00000000awx-24.6.1/awxkit/api/pages/workflow_approval_templates.pyfrom awxkit.api.pages.unified_job_templates import UnifiedJobTemplate
from awxkit.api.resources import resources
from . import page


class WorkflowApprovalTemplate(UnifiedJobTemplate):
    pass


page.register_page(
    [
        resources.workflow_approval_template,
        resources.workflow_job_template_node_create_approval_template,
    ],
    WorkflowApprovalTemplate,
)


class WorkflowApprovalTemplates(page.PageList, WorkflowApprovalTemplate):
    pass


page.register_page(resources.workflow_approval_templates, WorkflowApprovalTemplates)
0707010000003B000081A400000000000000000000000166846B9200000278000000000000000000000000000000000000003200000000awx-24.6.1/awxkit/api/pages/workflow_approvals.pyfrom awxkit.api.pages import UnifiedJob
from awxkit.api.resources import resources
from . import page
from awxkit import exceptions


class WorkflowApproval(UnifiedJob):
    def approve(self):
        try:
            self.related.approve.post()
        except exceptions.NoContent:
            pass

    def deny(self):
        try:
            self.related.deny.post()
        except exceptions.NoContent:
            pass


page.register_page(resources.workflow_approval, WorkflowApproval)


class WorkflowApprovals(page.PageList, WorkflowApproval):
    pass


page.register_page(resources.workflow_approvals, WorkflowApprovals)
0707010000003C000081A400000000000000000000000166846B9200000420000000000000000000000000000000000000003200000000awx-24.6.1/awxkit/api/pages/workflow_job_nodes.pyfrom awxkit.api.pages import base
from awxkit.api.resources import resources
from awxkit.utils import poll_until, seconds_since_date_string
from . import page


class WorkflowJobNode(base.Base):
    def wait_for_job(self, interval=5, timeout=60, **kw):
        """Waits until node's job exists"""
        adjusted_timeout = timeout - seconds_since_date_string(self.created)

        poll_until(self.job_exists, interval=interval, timeout=adjusted_timeout, **kw)

        return self

    def job_exists(self):
        self.get()
        try:
            return self.job
        except AttributeError:
            return False


page.register_page(resources.workflow_job_node, WorkflowJobNode)


class WorkflowJobNodes(page.PageList, WorkflowJobNode):
    pass


page.register_page(
    [
        resources.workflow_job_nodes,
        resources.workflow_job_workflow_nodes,
        resources.workflow_job_node_always_nodes,
        resources.workflow_job_node_failure_nodes,
        resources.workflow_job_node_success_nodes,
    ],
    WorkflowJobNodes,
)
0707010000003D000081A400000000000000000000000166846B9200001489000000000000000000000000000000000000003B00000000awx-24.6.1/awxkit/api/pages/workflow_job_template_nodes.pyfrom contextlib import suppress

import awxkit.exceptions as exc
from awxkit.api.pages import base, WorkflowJobTemplate, UnifiedJobTemplate, JobTemplate
from awxkit.api.mixins import HasCreate, DSAdapter
from awxkit.api.resources import resources
from awxkit.utils import update_payload, PseudoNamespace, random_title
from . import page


class WorkflowJobTemplateNode(HasCreate, base.Base):
    dependencies = [WorkflowJobTemplate, UnifiedJobTemplate]
    NATURAL_KEY = ('workflow_job_template', 'identifier')

    def payload(self, workflow_job_template, unified_job_template, **kwargs):
        if not unified_job_template:
            # May pass "None" to explicitly create an approval node
            payload = PseudoNamespace(workflow_job_template=workflow_job_template.id)
        else:
            payload = PseudoNamespace(workflow_job_template=workflow_job_template.id, unified_job_template=unified_job_template.id)

        optional_fields = (
            'diff_mode',
            'extra_data',
            'limit',
            'scm_branch',
            'job_tags',
            'job_type',
            'skip_tags',
            'verbosity',
            'extra_data',
            'identifier',
            'all_parents_must_converge',
            # prompt fields for JTs
            'job_slice_count',
            'forks',
            'timeout',
            'execution_environment',
        )

        update_payload(payload, optional_fields, kwargs)

        if 'inventory' in kwargs:
            payload['inventory'] = kwargs['inventory'].id

        return payload

    def create_payload(self, workflow_job_template=WorkflowJobTemplate, unified_job_template=JobTemplate, **kwargs):
        if not unified_job_template:
            self.create_and_update_dependencies(workflow_job_template)
            payload = self.payload(workflow_job_template=self.ds.workflow_job_template, unified_job_template=None, **kwargs)
        else:
            self.create_and_update_dependencies(workflow_job_template, unified_job_template)
            payload = self.payload(workflow_job_template=self.ds.workflow_job_template, unified_job_template=self.ds.unified_job_template, **kwargs)
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, workflow_job_template=WorkflowJobTemplate, unified_job_template=JobTemplate, **kwargs):
        payload = self.create_payload(workflow_job_template=workflow_job_template, unified_job_template=unified_job_template, **kwargs)
        return self.update_identity(WorkflowJobTemplateNodes(self.connection).post(payload))

    def _add_node(self, endpoint, unified_job_template, **kwargs):
        node = endpoint.post(dict(unified_job_template=unified_job_template.id, **kwargs))
        node.create_and_update_dependencies(self.ds.workflow_job_template, unified_job_template)
        return node

    def add_always_node(self, unified_job_template, **kwargs):
        return self._add_node(self.related.always_nodes, unified_job_template, **kwargs)

    def add_failure_node(self, unified_job_template, **kwargs):
        return self._add_node(self.related.failure_nodes, unified_job_template, **kwargs)

    def add_success_node(self, unified_job_template, **kwargs):
        return self._add_node(self.related.success_nodes, unified_job_template, **kwargs)

    def add_credential(self, credential):
        with suppress(exc.NoContent):
            self.related.credentials.post(dict(id=credential.id, associate=True))

    def remove_credential(self, credential):
        with suppress(exc.NoContent):
            self.related.credentials.post(dict(id=credential.id, disassociate=True))

    def remove_all_credentials(self):
        for cred in self.related.credentials.get().results:
            with suppress(exc.NoContent):
                self.related.credentials.post(dict(id=cred.id, disassociate=True))

    def make_approval_node(self, **kwargs):
        if 'name' not in kwargs:
            kwargs['name'] = 'approval node {}'.format(random_title())
        self.related.create_approval_template.post(kwargs)
        return self.get()

    def get_job_node(self, workflow_job):
        candidates = workflow_job.get_related('workflow_nodes', identifier=self.identifier)
        return candidates.results.pop()

    def add_label(self, label):
        with suppress(exc.NoContent):
            self.related.labels.post(dict(id=label.id))

    def add_instance_group(self, instance_group):
        with suppress(exc.NoContent):
            self.related.instance_groups.post(dict(id=instance_group.id))


page.register_page(
    [resources.workflow_job_template_node, (resources.workflow_job_template_nodes, 'post'), (resources.workflow_job_template_workflow_nodes, 'post')],
    WorkflowJobTemplateNode,
)


class WorkflowJobTemplateNodes(page.PageList, WorkflowJobTemplateNode):
    pass


page.register_page(
    [
        resources.workflow_job_template_nodes,
        resources.workflow_job_template_workflow_nodes,
        resources.workflow_job_template_node_always_nodes,
        resources.workflow_job_template_node_failure_nodes,
        resources.workflow_job_template_node_success_nodes,
    ],
    WorkflowJobTemplateNodes,
)
0707010000003E000081A400000000000000000000000166846B920000117B000000000000000000000000000000000000003600000000awx-24.6.1/awxkit/api/pages/workflow_job_templates.pyfrom contextlib import suppress
import json

from awxkit.api.mixins import HasCreate, HasNotifications, HasSurvey, HasCopy, DSAdapter
from awxkit.api.pages import Organization, UnifiedJobTemplate
from awxkit.utils import filter_by_class, not_provided, update_payload, random_title, PseudoNamespace
from awxkit.api.resources import resources
import awxkit.exceptions as exc

from . import base
from . import page


class WorkflowJobTemplate(HasCopy, HasCreate, HasNotifications, HasSurvey, UnifiedJobTemplate):
    optional_dependencies = [Organization]
    NATURAL_KEY = ('organization', 'name')

    def launch(self, payload={}):
        """Launch using related->launch endpoint."""
        # get related->launch
        launch_pg = self.get_related('launch')

        # launch the workflow_job_template
        result = launch_pg.post(payload)

        # return job
        jobs_pg = self.related.workflow_jobs.get(id=result.workflow_job)
        if jobs_pg.count != 1:
            msg = "workflow_job_template launched (id:{}) but job not found in response at {}/workflow_jobs/".format(result.json['workflow_job'], self.url)
            raise exc.UnexpectedAWXState(msg)
        return jobs_pg.results[0]

    def payload(self, **kwargs):
        payload = PseudoNamespace(
            name=kwargs.get('name') or 'WorkflowJobTemplate - {}'.format(random_title()), description=kwargs.get('description') or random_title(10)
        )

        optional_fields = (
            "allow_simultaneous",
            "ask_variables_on_launch",
            "ask_inventory_on_launch",
            "ask_scm_branch_on_launch",
            "ask_limit_on_launch",
            "ask_labels_on_launch",
            "ask_skip_tags_on_launch",
            "ask_tags_on_launch",
            "limit",
            "scm_branch",
            "survey_enabled",
            "webhook_service",
            "webhook_credential",
        )
        update_payload(payload, optional_fields, kwargs)

        extra_vars = kwargs.get('extra_vars', not_provided)
        if extra_vars != not_provided:
            if isinstance(extra_vars, dict):
                extra_vars = json.dumps(extra_vars)
            payload.update(extra_vars=extra_vars)

        if kwargs.get('organization'):
            payload.organization = kwargs.get('organization').id

        if kwargs.get('inventory'):
            payload.inventory = kwargs.get('inventory').id

        if kwargs.get('webhook_credential'):
            webhook_cred = kwargs.get('webhook_credential')
            if isinstance(webhook_cred, int):
                payload.update(webhook_credential=int(webhook_cred))
            elif hasattr(webhook_cred, 'id'):
                payload.update(webhook_credential=webhook_cred.id)
            else:
                raise AttributeError("Webhook credential must either be integer of pkid or Credential object")

        return payload

    def create_payload(self, name='', description='', organization=None, **kwargs):
        self.create_and_update_dependencies(*filter_by_class((organization, Organization)))
        organization = self.ds.organization if organization else None
        payload = self.payload(name=name, description=description, organization=organization, **kwargs)
        payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
        return payload

    def create(self, name='', description='', organization=None, **kwargs):
        payload = self.create_payload(name=name, description=description, organization=organization, **kwargs)
        return self.update_identity(WorkflowJobTemplates(self.connection).post(payload))

    def add_label(self, label):
        if isinstance(label, page.Page):
            label = label.json
        with suppress(exc.NoContent):
            self.related.labels.post(label)


page.register_page(
    [resources.workflow_job_template, (resources.workflow_job_templates, 'post'), (resources.workflow_job_template_copy, 'post')], WorkflowJobTemplate
)


class WorkflowJobTemplates(page.PageList, WorkflowJobTemplate):
    pass


page.register_page([resources.workflow_job_templates, resources.related_workflow_job_templates], WorkflowJobTemplates)


class WorkflowJobTemplateLaunch(base.Base):
    pass


page.register_page(resources.workflow_job_template_launch, WorkflowJobTemplateLaunch)


class WorkflowJobTemplateCopy(base.Base):
    pass


page.register_page([resources.workflow_job_template_copy], WorkflowJobTemplateCopy)
0707010000003F000081A400000000000000000000000166846B92000007AF000000000000000000000000000000000000002D00000000awx-24.6.1/awxkit/api/pages/workflow_jobs.pyfrom awxkit.api.pages import UnifiedJob
from awxkit.api.resources import resources
from . import page


class WorkflowJob(UnifiedJob):
    def __str__(self):
        # TODO: Update after endpoint's fields are finished filling out
        return super(UnifiedJob, self).__str__()

    def relaunch(self, payload={}):
        result = self.related.relaunch.post(payload)
        return self.walk(result.url)

    def failure_output_details(self):
        """Special implementation of this part of assert_status so that
        workflow_job.assert_successful() will give a breakdown of failure
        """
        node_list = self.related.workflow_nodes.get().results

        msg = '\nNode summary:'
        for node in node_list:
            msg += '\n{}: {}'.format(node.id, node.summary_fields.get('job'))
            for rel in ('failure_nodes', 'always_nodes', 'success_nodes'):
                val = getattr(node, rel, [])
                if val:
                    msg += ' {} {}'.format(rel, val)

        msg += '\n\nUnhandled individual job failures:\n'
        for node in node_list:
            # nodes without always or failure paths consider failures unhandled
            if node.job and not (node.failure_nodes or node.always_nodes):
                job = node.related.job.get()
                try:
                    job.assert_successful()
                except Exception as e:
                    msg += str(e)

        return msg

    @property
    def result_stdout(self):
        # workflow jobs do not have result_stdout
        if 'result_stdout' not in self.json:
            return 'Unprovided AWX field.'
        else:
            return super(WorkflowJob, self).result_stdout


page.register_page(resources.workflow_job, WorkflowJob)


class WorkflowJobs(page.PageList, WorkflowJob):
    pass


page.register_page([resources.workflow_jobs, resources.workflow_job_template_jobs, resources.job_template_slice_workflow_jobs], WorkflowJobs)
07070100000040000081A400000000000000000000000166846B9200001A1F000000000000000000000000000000000000002200000000awx-24.6.1/awxkit/api/registry.pyfrom collections import defaultdict
import logging
import re

from awxkit.utils import is_list_or_tuple, not_provided

log = logging.getLogger(__name__)


class URLRegistry(object):
    def __init__(self):
        self.store = defaultdict(dict)
        self.default = {}

    def url_pattern(self, pattern_str):
        """Converts some regex-friendly url pattern (Resources().resource string)
        to a compiled pattern.
        """
        # should account for any relative endpoint w/ query parameters
        pattern = r'^' + pattern_str + r'(\?.*)*$'
        return re.compile(pattern)

    def _generate_url_iterable(self, url_iterable):
        parsed_urls = []
        for url in url_iterable:
            method = not_provided
            if is_list_or_tuple(url):
                url, method = url
            if not is_list_or_tuple(method):
                methods = (method,)
            else:
                methods = method
            for method in methods:
                method_pattern = re.compile(method)
                url_pattern = self.url_pattern(url)
                parsed_urls.append((url_pattern, method_pattern))
        return parsed_urls

    def register(self, *args):
        """Registers a single resource (generic python type or object) to either
        1. a single url string (internally coverted via URLRegistry.url_pattern) and optional method or method iterable
        2. a list or tuple of url string and optional method or method iterables
        for retrieval via get().

        reg.register('/some/path/', ResourceOne)
        reg.get('/some/path/')
        -> ResourceOne
        reg.register('/some/other/path/', 'method', ResourceTwo)
        reg.get('/some/other/path/', 'method')
        -> ResourceTwo
        reg.register('/some/additional/path/', ('method_one', 'method_two'), ResourceThree)
        reg.get('/some/additional/path/', 'method_one')
        -> ResourceThree
        reg.get('/some/additional/path/', 'method_two')
        -> ResourceThree
        reg.register(('/some/new/path/one/', '/some/new/path/two/',
                      ('/some/other/new/path', 'method'),
                      ('/some/other/additional/path/, ('method_one', 'method_two')), ResourceFour))
        reg.get('/some/other/new/path/', 'method')
        -> ResourceFour
        """
        if not args or len(args) == 1:
            raise TypeError('register needs at least a url and Resource.')
        elif len(args) not in (2, 3):
            raise TypeError('register takes at most 3 arguments ({} given).'.format(len(args)))

        if len(args) == 3:  # url, method (iterable), and Resource
            url_iterable = (args[:2],)
            resource = args[2]
        else:
            urls, resource = args
            if not is_list_or_tuple(urls):
                url_iterable = [(urls, not_provided)]
            else:
                url_iterable = urls

        url_iterable = self._generate_url_iterable(url_iterable)
        for url_pattern, method_pattern in url_iterable:
            if url_pattern in self.store and method_pattern in self.store[url_pattern]:
                if method_pattern.pattern == not_provided:
                    exc_msg = '"{0.pattern}" already has methodless registration.'.format(url_pattern)
                else:
                    exc_msg = '"{0.pattern}" already has registered method "{1.pattern}"'.format(url_pattern, method_pattern)
                raise TypeError(exc_msg)
            self.store[url_pattern][method_pattern] = resource

    def setdefault(self, *args):
        """Establishes a default return value for get() by optional method (iterable).

        reg.setdefault(ResourceOne)
        reg.get('/some/unregistered/path')
        -> ResourceOne
        reg.setdefault('method', ResourceTwo)
        reg.get('/some/registered/methodless/path/', 'method')
        -> ResourceTwo
        reg.setdefault(('method_one', 'method_two'), ResourceThree)
        reg.get('/some/unregistered/path', 'method_two')
        -> ResourceThree
        reg.setdefault('supports.*regex', ResourceFour)
        reg.get('supports123regex')
        -> ResourceFour
        """
        if not args:
            raise TypeError('setdefault needs at least a Resource.')
        if len(args) == 1:  # all methods
            self.default[re.compile('.*')] = args[0]
        elif len(args) == 2:
            if is_list_or_tuple(args[0]):
                methods = args[0]
            else:
                methods = (args[0],)
            for method in methods:
                method_pattern = re.compile(method)
                self.default[method_pattern] = args[1]
        else:
            raise TypeError('setdefault takes at most 2 arguments ({} given).'.format(len(args)))

    def get(self, url, method=not_provided):
        """Returns a single resource by previously registered path and optional method where
        1.  If a registration was methodless and a method is provided to get() the return value will be
            None or, if applicable, a registry default (see setdefault()).
        2.  If a registration included a method (excluding the method wildcard '.*') and no method is provided to get()
            the return value will be None or, if applicable, a registry default.

        reg.register('/some/path/', ResourceOne)
        reg.get('/some/path/')
        -> ResourceOne
        reg.get('/some/path/', 'method')
        -> None
        reg.register('/some/other/path/', 'method', ResourceTwo)
        reg.get('/some/other/path/', 'method')
        -> ResourceTwo
        reg.get('/some/other/path')
        -> None
        reg.register('/some/additional/path/', '.*', ResourceThree)
        reg.get('/some/additional/path/', 'method')
        -> ResourceThree
        reg.get('/some/additional/path/')
        -> ResourceThree
        """
        registered_type = None
        default_methods = list(self.default)
        # Make sure dot character evaluated last
        default_methods.sort(key=lambda x: x.pattern == '.*')
        for method_key in default_methods:
            if method_key.match(method):
                registered_type = self.default[method_key]
                break

        for re_key in self.store:
            if re_key.match(url):
                keys = list(self.store[re_key])
                keys.sort(key=lambda x: x.pattern == '.*')
                for method_key in keys:
                    if method_key.match(method):
                        registered_type = self.store[re_key][method_key]
                        break
        log.debug('Retrieved {} by url: {}'.format(registered_type, url))
        return registered_type
07070100000041000081A400000000000000000000000166846B9200003D4C000000000000000000000000000000000000002300000000awx-24.6.1/awxkit/api/resources.pyfrom awxkit.config import config


class Resources(object):
    _activity = r'activity_stream/\d+/'
    _activity_stream = 'activity_stream/'
    _ad_hoc_command = r'ad_hoc_commands/\d+/'
    _ad_hoc_command_relaunch = r'ad_hoc_commands/\d+/relaunch/'
    _ad_hoc_commands = 'ad_hoc_commands/'
    _ad_hoc_event = r'ad_hoc_command_events/\d+/'
    _ad_hoc_events = r'ad_hoc_commands/\d+/events/'
    _ad_hoc_related_cancel = r'ad_hoc_commands/\d+/cancel/'
    _ad_hoc_relaunch = r'ad_hoc_commands/\d+/relaunch/'
    _ansible_facts = r'hosts/\d+/ansible_facts/'
    _application = r'applications/\d+/'
    _applications = 'applications/'
    _auth = 'auth/'
    _authtoken = 'authtoken/'
    _bulk = 'bulk/'
    _bulk_job_launch = 'bulk/job_launch/'
    _config = 'config/'
    _config_attach = 'config/attach/'
    _credential = r'credentials/\d+/'
    _credential_access_list = r'credentials/\d+/access_list/'
    _credential_copy = r'credentials/\d+/copy/'
    _credential_input_source = r'credential_input_sources/\d+/'
    _credential_input_sources = 'credential_input_sources/'
    _credential_owner_teams = r'credentials/\d+/owner_teams/'
    _credential_owner_users = r'credentials/\d+/owner_users/'
    _credential_type = r'credential_types/\d+/'
    _credential_types = 'credential_types/'
    _credentials = 'credentials/'
    _dashboard = 'dashboard/'
    _execution_environment = r'execution_environments/\d+/'
    _execution_environments = 'execution_environments/'
    _fact_view = r'hosts/\d+/fact_view/'
    _group = r'groups/\d+/'
    _group_access_list = r'groups/\d+/access_list/'
    _group_children = r'groups/\d+/children/'
    _group_potential_children = r'groups/\d+/potential_children/'
    _group_related_ad_hoc_commands = r'groups/\d+/ad_hoc_commands/'
    _group_related_all_hosts = r'groups/\d+/all_hosts/'
    _group_related_hosts = r'groups/\d+/hosts/'
    _group_related_job_events = r'groups/\d+/job_events/'
    _group_related_job_host_summaries = r'groups/\d+/job_host_summaries/'
    _group_variable_data = r'groups/\d+/variable_data/'
    _groups = 'groups/'
    _host = r'hosts/\d+/'
    _host_groups = r'hosts/\d+/groups/'
    _host_metrics = 'host_metrics/'
    _host_metric = r'host_metrics/\d+/'
    _host_insights = r'hosts/\d+/insights/'
    _host_related_ad_hoc_commands = r'hosts/\d+/ad_hoc_commands/'
    _host_related_fact_version = r'hosts/\d+/fact_versions/\d+/'
    _host_related_fact_versions = r'hosts/\d+/fact_versions/'
    _host_variable_data = r'hosts/\d+/variable_data/'
    _hosts = 'hosts/'
    _instance = r'instances/\d+/'
    _instance_group = r'instance_groups/\d+/'
    _instance_group_related_jobs = r'instance_groups/\d+/jobs/'
    _instance_groups = 'instance_groups/'
    _instance_install_bundle = r'instances/\d+/install_bundle/'
    _instance_peers = r'instances/\d+/peers/'
    _instance_related_jobs = r'instances/\d+/jobs/'
    _instances = 'instances/'
    _inventories = 'inventories/'
    _constructed_inventories = 'constructed_inventories/'
    _inventory = r'inventories/\d+/'
    _constructed_inventory = r'constructed_inventories/\d+/'
    _inventory_access_list = r'inventories/\d+/access_list/'
    _inventory_copy = r'inventories/\d+/copy/'
    _inventory_labels = r'inventories/\d+/labels/'
    _inventory_related_ad_hoc_commands = r'inventories/\d+/ad_hoc_commands/'
    _inventory_related_groups = r'inventories/\d+/groups/'
    _inventory_related_hosts = r'inventories/\d+/hosts/'
    _inventory_related_root_groups = r'inventories/\d+/root_groups/'
    _inventory_related_script = r'inventories/\d+/script/'
    _inventory_related_update_inventory_sources = r'inventories/\d+/update_inventory_sources/'
    _inventory_source = r'inventory_sources/\d+/'
    _inventory_source_schedule = r'inventory_sources/\d+/schedules/\d+/'
    _inventory_source_schedules = r'inventory_sources/\d+/schedules/'
    _inventory_source_updates = r'inventory_sources/\d+/inventory_updates/'
    _inventory_sources = 'inventory_sources/'
    _inventory_sources_related_groups = r'inventory_sources/\d+/groups/'
    _inventory_sources_related_hosts = r'inventory_sources/\d+/hosts/'
    _inventory_sources_related_update = r'inventory_sources/\d+/update/'
    _inventory_tree = r'inventories/\d+/tree/'
    _inventory_update = r'inventory_updates/\d+/'
    _inventory_update_cancel = r'inventory_updates/\d+/cancel/'
    _inventory_update_events = r'inventory_updates/\d+/events/'
    _inventory_updates = 'inventory_updates/'
    _inventory_variable_data = r'inventories/\d+/variable_data/'
    _workflow_approval = r'workflow_approvals/\d+/'
    _workflow_approvals = 'workflow_approvals/'
    _workflow_approval_template = r'workflow_approval_templates/\d+/'
    _workflow_approval_templates = 'workflow_approval_templates/'
    _workflow_job_template_node_create_approval_template = r'workflow_job_template_nodes/\d+/create_approval_template/'
    _job = r'jobs/\d+/'
    _job_cancel = r'jobs/\d+/cancel/'
    _job_create_schedule = r'jobs/\d+/create_schedule/'
    _job_event = r'job_events/\d+/'
    _job_event_children = r'job_events/\d+/children/'
    _job_events = 'job_events/'
    _job_host_summaries = r'jobs/\d+/job_host_summaries/'
    _job_host_summary = r'job_host_summaries/\d+/'
    _job_job_event = r'jobs/\d+/job_events/\d+/'
    _job_job_events = r'jobs/\d+/job_events/'
    _job_labels = r'jobs/\d+/labels/'
    _job_notifications = r'jobs/\d+/notifications/'
    _job_play = r'jobs/\d+/job_plays/\d+/'
    _job_plays = r'jobs/\d+/job_plays/'
    _job_relaunch = r'jobs/\d+/relaunch/'
    _job_start = r'jobs/\d+/start/'
    _job_task = r'jobs/\d+/job_tasks/\d+/'
    _job_tasks = r'jobs/\d+/job_tasks/'
    _job_template = r'job_templates/\d+/'
    _job_template_access_list = r'job_templates/\d+/access_list/'
    _job_template_callback = r'job_templates/\d+/callback/'
    _job_template_copy = r'job_templates/\d+/copy/'
    _job_template_jobs = r'job_templates/\d+/jobs/'
    _job_template_labels = r'job_templates/\d+/labels/'
    _job_template_launch = r'job_templates/\d+/launch/'
    _job_template_schedule = r'job_templates/\d+/schedules/\d+/'
    _job_template_schedules = r'job_templates/\d+/schedules/'
    _job_template_slice_workflow_jobs = r'job_templates/\d+/slice_workflow_jobs/'
    _job_template_survey_spec = r'job_templates/\d+/survey_spec/'
    _job_templates = 'job_templates/'
    _jobs = 'jobs/'
    _label = r'labels/\d+/'
    _labels = 'labels/'
    _me = 'me/'
    _metrics = 'metrics/'
    _mesh_visualizer = 'mesh_visualizer/'
    _notification = r'notifications/\d+/'
    _notification_template = r'notification_templates/\d+/'
    _notification_template_any = r'\w+/\d+/notification_templates_any/\d+/'
    _notification_template_started = r'\w+/\d+/notification_templates_started/\d+/'
    _notification_template_copy = r'notification_templates/\d+/copy/'
    _notification_template_error = r'\w+/\d+/notification_templates_error/\d+/'
    _notification_template_success = r'\w+/\d+/notification_templates_success/\d+/'
    _notification_template_approval = r'\w+/\d+/notification_templates_approvals/\d+/'
    _notification_template_test = r'notification_templates/\d+/test/'
    _notification_templates = 'notification_templates/'
    _notification_templates_any = r'\w+/\d+/notification_templates_any/'
    _notification_templates_started = r'\w+/\d+/notification_templates_started/'
    _notification_templates_error = r'\w+/\d+/notification_templates_error/'
    _notification_templates_success = r'\w+/\d+/notification_templates_success/'
    _notification_templates_approvals = r'\w+/\d+/notification_templates_approvals/'
    _notifications = 'notifications/'
    _object_activity_stream = r'[^/]+/\d+/activity_stream/'
    _org_projects = r'organizations/\d+/projects/'
    _org_teams = r'organizations/\d+/teams/'
    _organization = r'organizations/\d+/'
    _organization_access_list = r'organizations/\d+/access_list/'
    _organization_admins = r'organizations/\d+/admins/'
    _organization_applications = r'organizations/\d+/applications/'
    _organization_execution_environments = r'organizations/\d+/execution_environments/'
    _organization_galaxy_credentials = r'organizations/\d+/galaxy_credentials/'
    _organization_inventories = r'organizations/\d+/inventories/'
    _organization_users = r'organizations/\d+/users/'
    _organizations = 'organizations/'
    _ping = 'ping/'
    _project = r'projects/\d+/'
    _project_access_list = r'projects/\d+/access_list/'
    _project_copy = r'projects/\d+/copy/'
    _project_inventories = r'projects/\d+/inventories/'
    _project_organizations = r'projects/\d+/organizations/'
    _project_playbooks = r'projects/\d+/playbooks/'
    _project_project_updates = r'projects/\d+/project_updates/'
    _project_related_update = r'projects/\d+/update/'
    _project_schedule = r'projects/\d+/schedules/\d+/'
    _project_schedules = r'projects/\d+/schedules/'
    _project_scm_inventory_sources = r'projects/\d+/scm_inventory_sources/'
    _project_teams = r'projects/\d+/teams/'
    _project_update = r'project_updates/\d+/'
    _project_update_cancel = r'project_updates/\d+/cancel/'
    _project_update_events = r'project_updates/\d+/events/'
    _project_update_scm_inventory_updates = r'project_updates/\d+/scm_inventory_updates/'
    _project_updates = 'project_updates/'
    _projects = 'projects/'
    _related_credentials = r'\w+/\d+/credentials/'
    _related_input_sources = r'\w+/\d+/input_sources/'
    _related_instance_groups = r'\w+/\d+/instance_groups/'
    _related_instances = r'\w+/\d+/instances/'
    _related_inventories = r'(?!projects)\w+/\d+/inventories/'  # project related inventories are inventory files (.ini)
    _related_inventory_sources = r'\w+/\d+/inventory_sources/'
    _related_job_templates = r'\w+/\d+/job_templates/'
    _related_notification_templates = r'\w+/\d+/notification_templates/'
    _related_notifications = r'\w+/\d+/notifications/'
    _related_object_roles = r'\w+/\d+/object_roles/'
    _related_projects = r'\w+/\d+/projects/'
    _related_roles = r'\w+/\d+/roles/'
    _related_schedule = r'\w+/\d+/schedules/\d+/'
    _related_schedules = r'\w+/\d+/schedules/'
    _related_stdout = r'\w+/\d+/stdout/'
    _related_teams = r'\w+/\d+/teams/'
    _related_users = r'\w+/\d+/users/'
    _related_workflow_job_templates = r'\w+/\d+/workflow_job_templates/'
    _role = r'roles/\d+/'
    _roles = 'roles/'
    _roles_related_teams = r'roles/\d+/teams/'
    _schedule = r'schedules/\d+/'
    _schedules = 'schedules/'
    _schedules_jobs = r'schedules/\d+/jobs/'
    _schedules_preview = 'schedules/preview/'
    _schedules_zoneinfo = 'schedules/zoneinfo/'
    _setting = r'settings/\w+/'
    _settings = 'settings/'
    _settings_all = 'settings/all/'
    _settings_authentication = 'settings/authentication/'
    _settings_azuread_oauth2 = 'settings/azuread-oauth2/'
    _settings_changed = 'settings/changed/'
    _settings_github = 'settings/github/'
    _settings_github_org = 'settings/github-org/'
    _settings_github_team = 'settings/github-team/'
    _settings_google_oauth2 = 'settings/google-oauth2/'
    _settings_jobs = 'settings/jobs/'
    _settings_ldap = 'settings/ldap/'
    _settings_logging = 'settings/logging/'
    _settings_named_url = 'settings/named-url/'
    _settings_radius = 'settings/radius/'
    _settings_saml = 'settings/saml/'
    _settings_system = 'settings/system/'
    _settings_tacacsplus = 'settings/tacacsplus/'
    _settings_ui = 'settings/ui/'
    _settings_user = 'settings/user/'
    _settings_user_defaults = 'settings/user-defaults/'
    _system_job = r'system_jobs/\d+/'
    _system_job_cancel = r'system_jobs/\d+/cancel/'
    _system_job_events = r'system_jobs/\d+/events/'
    _system_job_template = r'system_job_templates/\d+/'
    _system_job_template_jobs = r'system_job_templates/\d+/jobs/'
    _system_job_template_launch = r'system_job_templates/\d+/launch/'
    _system_job_template_schedule = r'system_job_templates/\d+/schedules/\d+/'
    _system_job_template_schedules = r'system_job_templates/\d+/schedules/'
    _system_job_templates = 'system_job_templates/'
    _system_jobs = 'system_jobs/'
    _team = r'teams/\d+/'
    _team_access_list = r'teams/\d+/access_list/'
    _team_credentials = r'teams/\d+/credentials/'
    _team_permission = r'teams/\d+/permissions/\d+/'
    _team_permissions = r'teams/\d+/permissions/'
    _team_users = r'teams/\d+/users/'
    _teams = 'teams/'
    _token = r'tokens/\d+/'
    _tokens = 'tokens/'
    _unified_job_template = r'unified_job_templates/\d+/'
    _unified_job_templates = 'unified_job_templates/'
    _unified_jobs = 'unified_jobs/'
    _user = r'users/\d+/'
    _user_access_list = r'users/\d+/access_list/'
    _user_admin_organizations = r'users/\d+/admin_of_organizations/'
    _user_credentials = r'users/\d+/credentials/'
    _user_organizations = r'users/\d+/organizations/'
    _user_permission = r'users/\d+/permissions/\d+/'
    _user_permissions = r'users/\d+/permissions/'
    _user_teams = r'users/\d+/teams/'
    _users = 'users/'
    _variable_data = r'.*\/variable_data/'
    _workflow_job = r'workflow_jobs/\d+/'
    _workflow_job_cancel = r'workflow_jobs/\d+/cancel/'
    _workflow_job_labels = r'workflow_jobs/\d+/labels/'
    _workflow_job_node = r'workflow_job_nodes/\d+/'
    _workflow_job_node_always_nodes = r'workflow_job_nodes/\d+/always_nodes/'
    _workflow_job_node_failure_nodes = r'workflow_job_nodes/\d+/failure_nodes/'
    _workflow_job_node_success_nodes = r'workflow_job_nodes/\d+/success_nodes/'
    _workflow_job_nodes = 'workflow_job_nodes/'
    _workflow_job_relaunch = r'workflow_jobs/\d+/relaunch/'
    _workflow_job_template = r'workflow_job_templates/\d+/'
    _workflow_job_template_copy = r'workflow_job_templates/\d+/copy/'
    _workflow_job_template_jobs = r'workflow_job_templates/\d+/workflow_jobs/'
    _workflow_job_template_labels = r'workflow_job_templates/\d+/labels/'
    _workflow_job_template_launch = r'workflow_job_templates/\d+/launch/'
    _workflow_job_template_node = r'workflow_job_template_nodes/\d+/'
    _workflow_job_template_node_always_nodes = r'workflow_job_template_nodes/\d+/always_nodes/'
    _workflow_job_template_node_failure_nodes = r'workflow_job_template_nodes/\d+/failure_nodes/'
    _workflow_job_template_node_success_nodes = r'workflow_job_template_nodes/\d+/success_nodes/'
    _workflow_job_template_nodes = 'workflow_job_template_nodes/'
    _workflow_job_template_schedule = r'workflow_job_templates/\d+/schedules/\d+/'
    _workflow_job_template_schedules = r'workflow_job_templates/\d+/schedules/'
    _workflow_job_template_survey_spec = r'workflow_job_templates/\d+/survey_spec/'
    _workflow_job_template_workflow_nodes = r'workflow_job_templates/\d+/workflow_nodes/'
    _workflow_job_templates = 'workflow_job_templates/'
    _workflow_job_workflow_nodes = r'workflow_jobs/\d+/workflow_nodes/'
    _subscriptions = 'config/subscriptions/'
    _workflow_jobs = 'workflow_jobs/'
    api = str(config.api_base_path)
    common = api + r'v\d+/'
    v2 = api + 'v2/'

    def __getattr__(self, resource):
        if resource[:3] == '___':
            raise AttributeError('No existing resource: {}'.format(resource))
        # Currently we don't handle anything under:
        # /api/o/
        # /api/login/
        # /api/logout/
        # If/when we do we will probably need to modify this __getattr__ method
        # Also, if we add another API version, this would be handled here
        prefix = 'v2'
        resource = '_' + resource
        return '{0}{1}'.format(getattr(self, prefix), getattr(self, resource))


resources = Resources()
07070100000042000081A400000000000000000000000166846B92000005D3000000000000000000000000000000000000001F00000000awx-24.6.1/awxkit/api/utils.pyimport logging
import re


log = logging.getLogger(__name__)

descRE = re.compile(r'^[*] `(\w+)`: [^(]*\((\w+), ([^)]+)\)')


def freeze(key):
    if key is None:
        return None
    return frozenset((k, freeze(v) if isinstance(v, dict) else v) for k, v in key.items())


def parse_description(desc):
    options = {}
    desc_lines = []
    if 'POST' in desc:
        desc_lines = desc[desc.index('POST') :].splitlines()
    else:
        desc_lines = desc.splitlines()
    for line in desc_lines:
        match = descRE.match(line)
        if not match:
            continue
        options[match.group(1)] = {'type': match.group(2), 'required': match.group(3) == 'required'}
    return options


def remove_encrypted(value):
    if value == '$encrypted$':
        return ''
    if isinstance(value, list):
        return [remove_encrypted(item) for item in value]
    if isinstance(value, dict):
        return {k: remove_encrypted(v) for k, v in value.items()}
    return value


def get_post_fields(page, cache):
    options_page = cache.get_options(page)
    if options_page is None:
        return None

    if 'POST' not in options_page.r.headers.get('Allow', ''):
        return None

    if 'POST' in options_page.json['actions']:
        return options_page.json['actions']['POST']
    else:
        log.warning("Insufficient privileges on %s, inferring POST fields from description.", options_page.endpoint)
        return parse_description(options_page.json['description'])
07070100000043000041ED00000000000000000000000266846B9200000000000000000000000000000000000000000000001600000000awx-24.6.1/awxkit/awx07070100000044000081A400000000000000000000000166846B9200000067000000000000000000000000000000000000002200000000awx-24.6.1/awxkit/awx/__init__.pyfrom distutils.version import LooseVersion


def version_cmp(x, y):
    return LooseVersion(x)._cmp(y)
07070100000045000081A400000000000000000000000166846B9200001376000000000000000000000000000000000000002300000000awx-24.6.1/awxkit/awx/inventory.pyimport optparse
import json

from awxkit.utils import random_title


def upload_inventory(ansible_runner, nhosts=10, ini=False):
    """Helper to upload inventory script to target host"""
    # Create an inventory script
    if ini:
        copy_mode = '0644'
        copy_dest = '/tmp/inventory{}.ini'.format(random_title(non_ascii=False))
        copy_content = ini_inventory(nhosts)
    else:
        copy_mode = '0755'
        copy_dest = '/tmp/inventory{}.sh'.format(random_title(non_ascii=False))
        copy_content = '''#!/bin/bash
cat <<EOF
%s
EOF''' % json_inventory(
            nhosts
        )

    # Copy script to test system
    contacted = ansible_runner.copy(dest=copy_dest, force=True, mode=copy_mode, content=copy_content)
    for result in contacted.values():
        assert not result.get('failed', False), "Failed to create inventory file: %s" % result
    return copy_dest


def generate_inventory(nhosts=100):
    """Generate a somewhat complex inventory with a configurable number of hosts"""
    inv_list = {
        '_meta': {
            'hostvars': {},
        },
    }

    for n in range(nhosts):
        hostname = 'host-%08d.example.com' % n
        group_evens_odds = 'evens.example.com' if n % 2 == 0 else 'odds.example.com'
        group_threes = 'threes.example.com' if n % 3 == 0 else ''
        group_fours = 'fours.example.com' if n % 4 == 0 else ''
        group_fives = 'fives.example.com' if n % 5 == 0 else ''
        group_sixes = 'sixes.example.com' if n % 6 == 0 else ''
        group_sevens = 'sevens.example.com' if n % 7 == 0 else ''
        group_eights = 'eights.example.com' if n % 8 == 0 else ''
        group_nines = 'nines.example.com' if n % 9 == 0 else ''
        group_tens = 'tens.example.com' if n % 10 == 0 else ''
        group_by_10s = 'group-%07dX.example.com' % (n / 10)
        group_by_100s = 'group-%06dXX.example.com' % (n / 100)
        group_by_1000s = 'group-%05dXXX.example.com' % (n / 1000)
        for group in [group_evens_odds, group_threes, group_fours, group_fives, group_sixes, group_sevens, group_eights, group_nines, group_tens, group_by_10s]:
            if not group:
                continue
            if group in inv_list:
                inv_list[group]['hosts'].append(hostname)
            else:
                inv_list[group] = {'hosts': [hostname], 'children': [], 'vars': {'group_prefix': group.split('.')[0]}}
        if group_by_1000s not in inv_list:
            inv_list[group_by_1000s] = {'hosts': [], 'children': [], 'vars': {'group_prefix': group_by_1000s.split('.')[0]}}
        if group_by_100s not in inv_list:
            inv_list[group_by_100s] = {'hosts': [], 'children': [], 'vars': {'group_prefix': group_by_100s.split('.')[0]}}
        if group_by_100s not in inv_list[group_by_1000s]['children']:
            inv_list[group_by_1000s]['children'].append(group_by_100s)
        if group_by_10s not in inv_list[group_by_100s]['children']:
            inv_list[group_by_100s]['children'].append(group_by_10s)
        inv_list['_meta']['hostvars'][hostname] = {
            'ansible_user': 'example',
            'ansible_connection': 'local',
            'host_prefix': hostname.split('.')[0],
            'host_id': n,
        }

    return inv_list


def json_inventory(nhosts=10):
    """Return a JSON representation of inventory"""
    return json.dumps(generate_inventory(nhosts), indent=4)


def ini_inventory(nhosts=10):
    """Return a .INI representation of inventory"""
    output = list()
    inv_list = generate_inventory(nhosts)

    for group in inv_list.keys():
        if group == '_meta':
            continue

        # output host groups
        output.append('[%s]' % group)
        for host in inv_list[group].get('hosts', []):
            output.append(host)
        output.append('')  # newline

        # output child groups
        output.append('[%s:children]' % group)
        for child in inv_list[group].get('children', []):
            output.append(child)
        output.append('')  # newline

        # output group vars
        output.append('[%s:vars]' % group)
        for k, v in inv_list[group].get('vars', {}).items():
            output.append('%s=%s' % (k, v))
        output.append('')  # newline

    return '\n'.join(output)


if __name__ == '__main__':
    parser = optparse.OptionParser()
    parser.add_option('--json', action='store_true', dest='json')
    parser.add_option('--ini', action='store_true', dest='ini')
    parser.add_option('--host', dest='hostname', default='')
    parser.add_option('--nhosts', dest='nhosts', action='store', type='int', default=10)
    options, args = parser.parse_args()
    if options.json:
        print(json_inventory(nhosts=options.nhosts))
    elif options.ini:
        print(ini_inventory(nhosts=options.nhosts))
    elif options.hostname:
        print(json_inventory(nhosts=options.nhosts)['_meta']['hostvars'][options.hostname])
    else:
        print(json.dumps({}, indent=4))
07070100000046000081A400000000000000000000000166846B9200000F72000000000000000000000000000000000000001F00000000awx-24.6.1/awxkit/awx/utils.pyfrom contextlib import contextmanager, suppress

from awxkit import api, exceptions
from awxkit.config import config


__all__ = ('as_user', 'check_related', 'delete_all', 'uses_sessions')


def get_all(endpoint):
    results = []
    while True:
        get_args = dict(page_size=200) if 'page_size' not in endpoint else dict()
        resource = endpoint.get(**get_args)
        results.extend(resource.results)
        if not resource.next:
            return results
        endpoint = resource.next


def _delete_all(endpoint):
    while True:
        resource = endpoint.get()
        for item in resource.results:
            try:
                item.delete()
            except Exception as e:
                print(e)
        if not resource.next:
            return


def delete_all(v):
    for endpoint in (
        v.unified_jobs,
        v.job_templates,
        v.workflow_job_templates,
        v.notification_templates,
        v.projects,
        v.inventory,
        v.hosts,
        v.labels,
        v.credentials,
        v.teams,
        v.users,
        v.organizations,
        v.schedules,
    ):
        _delete_all(endpoint)


def check_related(resource):
    examined = []
    for related in resource.related.values():
        if related in examined:
            continue
        print(related)
        with suppress(exceptions.NotFound):
            child_related = related.get()
            examined.append(related)
            if 'results' in child_related and child_related.results:
                child_related = child_related.results.pop()
            if 'related' in child_related:
                for _related in child_related.related.values():
                    if not isinstance(_related, api.page.TentativePage) or _related in examined:
                        continue
                    print(_related)
                    with suppress(exceptions.NotFound):
                        _related.get()
                        examined.append(_related)


@contextmanager
def as_user(v, username, password=None):
    """Context manager to allow running tests as an alternative login user."""
    access_token = False
    if not isinstance(v, api.client.Connection):
        connection = v.connection
    else:
        connection = v

    if isinstance(username, api.User):
        password = username.password
        username = username.username

    if isinstance(username, api.OAuth2AccessToken):
        access_token = username.token
        username = None
        password = None

    try:
        if config.use_sessions:
            session_id = None
            domain = None
            # requests doesn't provide interface for retrieving
            # domain segregated cookies other than iterating.
            for cookie in connection.session.cookies:
                if cookie.name == connection.session_cookie_name:
                    session_id = cookie.value
                    domain = cookie.domain
                    break
            if session_id:
                del connection.session.cookies[connection.session_cookie_name]
            if access_token:
                kwargs = dict(token=access_token)
            else:
                kwargs = connection.get_session_requirements()
        else:
            previous_auth = connection.session.auth
            kwargs = dict()
        connection.login(username, password, **kwargs)
        yield
    finally:
        if config.use_sessions:
            if access_token:
                connection.session.auth = None
            del connection.session.cookies[connection.session_cookie_name]
            if session_id:
                connection.session.cookies.set(connection.session_cookie_name, session_id, domain=domain)
        else:
            connection.session.auth = previous_auth


def uses_sessions(connection):
    session_login = connection.get(f"{config.api_base_path}login/")
    return session_login.status_code == 200
07070100000047000041ED00000000000000000000000266846B9200000000000000000000000000000000000000000000001600000000awx-24.6.1/awxkit/cli07070100000048000081A400000000000000000000000166846B9200000947000000000000000000000000000000000000002200000000awx-24.6.1/awxkit/cli/__init__.pyimport json
import sys
import traceback
import yaml
import urllib3

from requests.exceptions import ConnectionError, SSLError

from .client import CLI
from awxkit.utils import to_str
from awxkit.exceptions import Unauthorized, Common
from awxkit.cli.utils import cprint


# you'll only see these warnings if you've explicitly *disabled* SSL
# verification, so they're a little annoying, redundant
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)


def run(stdout=sys.stdout, stderr=sys.stderr, argv=[]):
    cli = CLI(stdout=stdout, stderr=stderr)
    try:
        cli.parse_args(argv or sys.argv)
        cli.connect()
        cli.parse_resource()
    except KeyboardInterrupt:
        sys.exit(1)
    except ConnectionError as e:
        cli.parser.print_help()
        msg = (
            '\nThere was a network error of some kind trying to reach '
            '{}.\nYou might need to specify (or double-check) '
            '--conf.host'.format(cli.get_config('host'))
        )
        if isinstance(e, SSLError):
            msg = (
                '\nCould not establish a secure connection.  '
                '\nPlease add your server to your certificate authority.'
                '\nYou can also run this command by specifying '
                '-k or --conf.insecure'
            )
        cprint(msg + '\n', 'red', file=stderr)
        cprint(e, 'red', file=stderr)
        sys.exit(1)
    except Unauthorized as e:
        cli.parser.print_help()
        msg = '\nValid credentials were not provided.\n$ awx login --help'
        cprint(msg + '\n', 'red', file=stderr)
        if cli.verbose:
            cprint(e.__class__, 'red', file=stderr)
        sys.exit(1)
    except Common as e:
        if cli.verbose:
            print(traceback.format_exc(), sys.stderr)
        if cli.get_config('format') == 'json':
            json.dump(e.msg, sys.stdout)
            print('')
        elif cli.get_config('format') == 'yaml':
            sys.stdout.write(to_str(yaml.safe_dump(e.msg, default_flow_style=False, encoding='utf-8', allow_unicode=True)))
        elif cli.get_config('format') == 'human':
            sys.stdout.write(e.__class__.__name__)
            print('')
        sys.exit(1)
    except Exception as e:
        if cli.verbose:
            e = traceback.format_exc()
        cprint(e, 'red', file=stderr)
        sys.exit(1)
07070100000049000081ED00000000000000000000000166846B92000031DF000000000000000000000000000000000000002000000000awx-24.6.1/awxkit/cli/client.pyfrom __future__ import print_function

import logging
import os
import pkg_resources
import sys

from requests.exceptions import RequestException

from .custom import handle_custom_actions
from .format import add_authentication_arguments, add_output_formatting_arguments, FORMATTERS, format_response
from .options import ResourceOptionsParser, UNIQUENESS_RULES
from .resource import parse_resource, is_control_resource
from awxkit import api, config, utils, exceptions, WSClient  # noqa
from awxkit.cli.utils import HelpfulArgumentParser, cprint, disable_color, colored
from awxkit.awx.utils import uses_sessions  # noqa


__version__ = pkg_resources.get_distribution('awxkit').version


class CLI(object):
    """A programmatic HTTP OPTIONS-based CLI for AWX/Ansible Tower.

    This CLI works by:

    - Configuring CLI options via Python's argparse (authentication, formatting
      options, etc...)
    - Discovering AWX API endpoints at /api/v2/ and mapping them to _resources_
    - Discovering HTTP OPTIONS _actions_ on resources to determine how
      resources can be interacted with (e.g., list, modify, delete, etc...)
    - Parsing sys.argv to map CLI arguments and flags to
      awxkit SDK calls

    ~ awx <resource> <action> --parameters

    e.g.,

    ~ awx users list -v
    GET /api/ HTTP/1.1" 200
    GET /api/v2/ HTTP/1.1" 200
    POST /api/login/ HTTP/1.1" 302
    OPTIONS /api/v2/users/ HTTP/1.1" 200
    GET /api/v2/users/
    {
     "count": 2,
     "results": [
     ...

    Interacting with this class generally involves a few critical methods:

    1.  parse_args() - this method is used to configure and parse global CLI
        flags, such as formatting flags, and arguments which represent client
        configuration (including authentication details)
    2.  connect() - once configuration is parsed, this method fetches /api/v2/
        and itemizes the list of supported resources
    3.  parse_resource() - attempts to parse the <resource> specified on the
        command line (e.g., users, organizations), including logic
        for discovering available actions for endpoints using HTTP OPTIONS
        requests

    At multiple stages of this process, an internal argparse.ArgumentParser()
    is progressively built and parsed based on sys.argv, (meaning, that if you
    supply invalid or incomplete arguments, argparse will print the usage
    message and an explanation of what you got wrong).
    """

    subparsers = {}
    original_action = None

    def __init__(self, stdout=sys.stdout, stderr=sys.stderr, stdin=sys.stdin):
        self.stdout = stdout
        self.stderr = stderr
        self.stdin = stdin

    def get_config(self, key):
        """Helper method for looking up the value of a --conf.xyz flag"""
        return getattr(self.args, 'conf.{}'.format(key))

    @property
    def help(self):
        return '--help' in self.argv or '-h' in self.argv

    def authenticate(self):
        """Configure the current session (or OAuth2.0 token)"""
        token = self.get_config('token')
        if token:
            self.root.connection.login(
                None,
                None,
                token=token,
            )
        else:
            config.use_sessions = True
            self.root.load_session().get()

    def connect(self):
        """Fetch top-level resources from /api/v2"""
        config.base_url = self.get_config('host')
        config.client_connection_attempts = 1
        config.assume_untrusted = False
        if self.get_config('insecure'):
            config.assume_untrusted = True

        config.credentials = utils.PseudoNamespace(
            {
                'default': {
                    'username': self.get_config('username'),
                    'password': self.get_config('password'),
                }
            }
        )

        _, remainder = self.parser.parse_known_args()
        if remainder and remainder[0] == 'config':
            # the config command is special; it doesn't require
            # API connectivity
            return
        # ...otherwise, set up a awxkit connection because we're
        # likely about to do some requests to /api/v2/
        self.root = api.Api()
        try:
            self.fetch_version_root()
        except RequestException:
            # If we can't reach the API root (this usually means that the
            # hostname is wrong, or the credentials are wrong)
            if self.help:
                # ...but the user specified -h...
                known, unknown = self.parser.parse_known_args(self.argv)
                if len(unknown) == 1 and os.path.basename(unknown[0]) == 'awx':
                    return
            raise

    def fetch_version_root(self):
        try:
            self.v2 = self.root.get().available_versions.v2.get()
        except AttributeError:
            raise RuntimeError('An error occurred while fetching {}/api/'.format(self.get_config('host')))

    def parse_resource(self, skip_deprecated=False):
        """Attempt to parse the <resource> (e.g., jobs) specified on the CLI

        If a valid resource is discovered, the user will be authenticated
        (either via an OAuth2.0 token or session-based auth) and the remaining
        CLI arguments will be processed (to determine the requested action
        e.g., list, create, delete)

        :param skip_deprecated: when False (the default), deprecated resource
                                names from the open source tower-cli project
                                will be allowed
        """
        self.resource = parse_resource(self, skip_deprecated=skip_deprecated)
        if self.resource:
            self.authenticate()
            resource = getattr(self.v2, self.resource)
            if is_control_resource(self.resource):
                # control resources are special endpoints that you can only
                # do an HTTP GET to, and which return plain JSON metadata
                # examples are `/api/v2/ping/`, `/api/v2/config/`, etc...
                if self.help:
                    self.subparsers[self.resource].print_help()
                    raise SystemExit()
                self.method = 'get'
                response = getattr(resource, self.method)()
            else:
                response = self.parse_action(resource)

            _filter = self.get_config('filter')

            # human format for metrics, settings is special
            if self.resource in ('metrics', 'settings') and self.get_config('format') == 'human':
                response.json = {'count': len(response.json), 'results': [{'key': k, 'value': v} for k, v in response.json.items()]}
                _filter = 'key, value'

            if self.get_config('format') == 'human' and _filter == '.' and self.resource in UNIQUENESS_RULES:
                _filter = ', '.join(UNIQUENESS_RULES[self.resource])

            formatted = format_response(
                response, fmt=self.get_config('format'), filter=_filter, changed=self.original_action in ('modify', 'create', 'associate', 'disassociate')
            )
            if formatted:
                print(utils.to_str(formatted), file=self.stdout)
            if hasattr(response, 'rc'):
                raise SystemExit(response.rc)
        else:
            self.parser.print_help()

    def parse_action(self, page, from_sphinx=False):
        """Perform an HTTP OPTIONS request

        This method performs an HTTP OPTIONS request to build a list of valid
        actions, and (if provided) runs the code for the action specified on
        the CLI

        :param page: a awxkit.api.pages.TentativePage object representing the
                     top-level resource in question (e.g., /api/v2/jobs)
        :param from_sphinx: a flag specified by our sphinx plugin, which allows
                            us to walk API OPTIONS using this function
                            _without_ triggering a SystemExit (argparse's
                            behavior if required arguments are missing)
        """
        subparsers = self.subparsers[self.resource].add_subparsers(dest='action', metavar='action')
        subparsers.required = True

        # parse the action from OPTIONS
        parser = ResourceOptionsParser(self.v2, page, self.resource, subparsers)
        if parser.deprecated:
            description = 'This resource has been deprecated and will be removed in a future release.'
            if not from_sphinx:
                description = colored(description, 'yellow')
            self.subparsers[self.resource].description = description

        if from_sphinx:
            # Our Sphinx plugin runs `parse_action` for *every* available
            # resource + action in the API so that it can generate usage
            # strings for automatic doc generation.
            #
            # Because of this behavior, we want to silently ignore the
            # `SystemExit` argparse will raise when you're missing required
            # positional arguments (which some actions have).
            try:
                self.parser.parse_known_args(self.argv)[0]
            except SystemExit:
                pass
        else:
            self.parser.parse_known_args()[0]

        # parse any action arguments
        if self.resource != 'settings':
            for method in ('list', 'modify', 'create'):
                if method in parser.parser.choices:
                    parser.build_query_arguments(method, 'GET' if method == 'list' else 'POST')
        if from_sphinx:
            parsed, extra = self.parser.parse_known_args(self.argv)
        else:
            parsed, extra = self.parser.parse_known_args()

        if extra and self.verbose:
            # If extraneous arguments were provided, warn the user
            cprint('{}: unrecognized arguments: {}'.format(self.parser.prog, ' '.join(extra)), 'yellow', file=self.stdout)

        # build a dictionary of all of the _valid_ flags specified on the
        # command line so we can pass them on to the underlying awxkit call
        # we ignore special global flags like `--help` and `--conf.xyz`, and
        # the positional resource argument (i.e., "jobs")
        # everything else is a flag used as a query argument for the HTTP
        # request we'll make (e.g., --username="Joe", --verbosity=3)
        parsed = parsed.__dict__
        parsed = dict((k, v) for k, v in parsed.items() if (v is not None and k not in ('help', 'resource') and not k.startswith('conf.')))

        # if `id` is one of the arguments, it's a detail view
        if 'id' in parsed:
            page.endpoint += '{}/'.format(str(parsed.pop('id')))

        # determine the awxkit method to call
        action = self.original_action = parsed.pop('action')
        page, action = handle_custom_actions(self.resource, action, page)
        self.method = {
            'list': 'get',
            'modify': 'patch',
        }.get(action, action)

        if self.method == 'patch' and not parsed:
            # If we're doing an HTTP PATCH with an empty payload,
            # just print the help message (it's a no-op anyways)
            parser.parser.choices['modify'].print_help()
            return

        if self.help:
            # If --help is specified on a subarg parser, bail out
            # and print its help text
            parser.parser.choices[self.original_action].print_help()
            return

        if self.original_action == 'create':
            return page.post(parsed)

        return getattr(page, self.method)(**parsed)

    def parse_args(self, argv, env=None):
        """Configure the global parser.ArgumentParser object and apply
        global flags (such as --help, authentication, and formatting arguments)
        """
        env = env or os.environ
        self.argv = argv
        self.parser = HelpfulArgumentParser(add_help=False)
        self.parser.add_argument(
            '--help',
            action='store_true',
            help='prints usage information for the awx tool',
        )
        self.parser.add_argument('--version', dest='conf.version', action='version', help='display awx CLI version', version=__version__)
        add_authentication_arguments(self.parser, env)
        add_output_formatting_arguments(self.parser, env)

        self.args = self.parser.parse_known_args(self.argv)[0]
        self.verbose = self.get_config('verbose')
        if self.verbose:
            logging.basicConfig(level='DEBUG')
        self.color = self.get_config('color')
        if not self.color:
            disable_color()
        fmt = self.get_config('format')
        if fmt not in FORMATTERS.keys():
            self.parser.error('No formatter %s available.' % (fmt))
0707010000004A000081A400000000000000000000000166846B9200004F57000000000000000000000000000000000000002000000000awx-24.6.1/awxkit/cli/custom.pyimport functools
import json

from .stdout import monitor, monitor_workflow
from .utils import CustomRegistryMeta, color_enabled
from awxkit import api
from awxkit.config import config
from awxkit.exceptions import NoContent


def handle_custom_actions(resource, action, page):
    key = ' '.join([resource, action])
    if key in CustomAction.registry:
        page = CustomAction.registry[key](page)
        action = 'perform'
    return page, action


class CustomActionRegistryMeta(CustomRegistryMeta):
    @property
    def name(self):
        return ' '.join([self.resource, self.action])


class CustomAction(metaclass=CustomActionRegistryMeta):
    """Base class for defining a custom action for a resource."""

    def __init__(self, page):
        self.page = page

    @property
    def action(self):
        raise NotImplementedError()

    @property
    def resource(self):
        raise NotImplementedError()

    @property
    def perform(self):
        raise NotImplementedError()

    def add_arguments(self, parser, resource_options_parser):
        pass


class Launchable(object):
    @property
    def options_endpoint(self):
        return self.page.endpoint + '1/{}/'.format(self.action)

    def add_arguments(self, parser, resource_options_parser, with_pk=True):
        from .options import pk_or_name

        if with_pk:
            parser.choices[self.action].add_argument('id', type=functools.partial(pk_or_name, None, self.resource, page=self.page), help='')
        parser.choices[self.action].add_argument('--monitor', action='store_true', help='If set, prints stdout of the launched job until it finishes.')
        parser.choices[self.action].add_argument('--action-timeout', type=int, help='If set with --monitor or --wait, time out waiting on job completion.')
        parser.choices[self.action].add_argument('--wait', action='store_true', help='If set, waits until the launched job finishes.')
        parser.choices[self.action].add_argument(
            '--interval',
            type=float,
            help='If set with --monitor or --wait, amount of time to wait in seconds between api calls. Minimum value is 2.5 seconds to avoid overwhelming the api',
        )

        launch_time_options = self.page.connection.options(self.options_endpoint)
        if launch_time_options.ok:
            launch_time_options = launch_time_options.json()['actions']['POST']
            resource_options_parser.options['LAUNCH'] = launch_time_options
            resource_options_parser.build_query_arguments(self.action, 'LAUNCH')

    def monitor(self, response, **kwargs):
        mon = monitor_workflow if response.type == 'workflow_job' else monitor
        if kwargs.get('monitor') or kwargs.get('wait'):
            status = mon(
                response,
                self.page.connection.session,
                print_stdout=not kwargs.get('wait'),
                action_timeout=kwargs.get('action_timeout'),
                interval=kwargs.get('interval'),
            )
            if status:
                response.json['status'] = status
                if status in ('failed', 'error'):
                    setattr(response, 'rc', 1)
        return response

    def perform(self, **kwargs):
        monitor_kwargs = {
            'monitor': kwargs.pop('monitor', False),
            'wait': kwargs.pop('wait', False),
            'action_timeout': kwargs.pop('action_timeout', False),
            'interval': kwargs.pop('interval', 5),
        }
        response = self.page.get().related.get(self.action).post(kwargs)
        self.monitor(response, **monitor_kwargs)
        return response


class JobTemplateLaunch(Launchable, CustomAction):
    action = 'launch'
    resource = 'job_templates'


class BulkJobLaunch(Launchable, CustomAction):
    action = 'job_launch'
    resource = 'bulk'

    @property
    def options_endpoint(self):
        return self.page.endpoint + '{}/'.format(self.action)

    def add_arguments(self, parser, resource_options_parser):
        Launchable.add_arguments(self, parser, resource_options_parser, with_pk=False)

    def perform(self, **kwargs):
        monitor_kwargs = {
            'monitor': kwargs.pop('monitor', False),
            'wait': kwargs.pop('wait', False),
            'action_timeout': kwargs.pop('action_timeout', False),
        }
        response = self.page.get().job_launch.post(kwargs)
        self.monitor(response, **monitor_kwargs)
        return response


class BulkHostCreate(CustomAction):
    action = 'host_create'
    resource = 'bulk'

    @property
    def options_endpoint(self):
        return self.page.endpoint + '{}/'.format(self.action)

    def add_arguments(self, parser, resource_options_parser):
        options = self.page.connection.options(self.options_endpoint)
        if options.ok:
            options = options.json()['actions']['POST']
            resource_options_parser.options['HOSTCREATEPOST'] = options
            resource_options_parser.build_query_arguments(self.action, 'HOSTCREATEPOST')

    def perform(self, **kwargs):
        response = self.page.get().host_create.post(kwargs)
        return response


class BulkHostDelete(CustomAction):
    action = 'host_delete'
    resource = 'bulk'

    @property
    def options_endpoint(self):
        return self.page.endpoint + '{}/'.format(self.action)

    def add_arguments(self, parser, resource_options_parser):
        options = self.page.connection.options(self.options_endpoint)
        if options.ok:
            options = options.json()['actions']['POST']
            resource_options_parser.options['HOSTDELETEPOST'] = options
            resource_options_parser.build_query_arguments(self.action, 'HOSTDELETEPOST')

    def perform(self, **kwargs):
        response = self.page.get().host_delete.post(kwargs)
        return response


class ProjectUpdate(Launchable, CustomAction):
    action = 'update'
    resource = 'projects'


class ProjectCreate(CustomAction):
    action = 'create'
    resource = 'projects'

    def add_arguments(self, parser, resource_options_parser):
        parser.choices[self.action].add_argument('--monitor', action='store_true', help=('If set, prints stdout of the project update until ' 'it finishes.'))
        parser.choices[self.action].add_argument('--wait', action='store_true', help='If set, waits until the new project has updated.')

    def post(self, kwargs):
        should_monitor = kwargs.pop('monitor', False)
        wait = kwargs.pop('wait', False)
        response = self.page.post(kwargs)
        if should_monitor or wait:
            update = response.related.project_updates.get(order_by='-created').results[0]
            monitor(
                update,
                self.page.connection.session,
                print_stdout=not wait,
            )
        return response


class InventoryUpdate(Launchable, CustomAction):
    action = 'update'
    resource = 'inventory_sources'


class AdhocCommandLaunch(Launchable, CustomAction):
    action = 'create'
    resource = 'ad_hoc_commands'

    def add_arguments(self, parser, resource_options_parser):
        Launchable.add_arguments(self, parser, resource_options_parser, with_pk=False)

    def perform(self, **kwargs):
        monitor_kwargs = {
            'monitor': kwargs.pop('monitor', False),
            'wait': kwargs.pop('wait', False),
        }
        response = self.page.post(kwargs)
        self.monitor(response, **monitor_kwargs)
        return response

    def post(self, kwargs):
        return self.perform(**kwargs)


class WorkflowLaunch(Launchable, CustomAction):
    action = 'launch'
    resource = 'workflow_job_templates'


class HasStdout(object):
    action = 'stdout'

    def add_arguments(self, parser, resource_options_parser):
        from .options import pk_or_name

        parser.choices['stdout'].add_argument('id', type=functools.partial(pk_or_name, None, self.resource, page=self.page), help='')

    def perform(self):
        fmt = 'txt_download'
        if color_enabled():
            fmt = 'ansi_download'
        return self.page.connection.get(self.page.get().related.stdout, query_parameters=dict(format=fmt)).content.decode('utf-8')


class JobStdout(HasStdout, CustomAction):
    resource = 'jobs'


class ProjectUpdateStdout(HasStdout, CustomAction):
    resource = 'project_updates'


class InventoryUpdateStdout(HasStdout, CustomAction):
    resource = 'inventory_updates'


class AdhocCommandStdout(HasStdout, CustomAction):
    resource = 'ad_hoc_commands'


class AssociationMixin(object):
    action = 'associate'

    def add_arguments(self, parser, resource_options_parser):
        from .options import pk_or_name

        parser.choices[self.action].add_argument('id', type=functools.partial(pk_or_name, None, self.resource, page=self.page), help='')
        group = parser.choices[self.action].add_mutually_exclusive_group(required=True)
        for param, endpoint in self.targets.items():
            field, model_name = endpoint
            if not model_name:
                model_name = param
            help_text = 'The ID (or name) of the {} to {}'.format(model_name, self.action)

            class related_page(object):
                def __init__(self, connection, resource):
                    self.conn = connection
                    self.resource = {
                        'approval_notification': 'notification_templates',
                        'start_notification': 'notification_templates',
                        'success_notification': 'notification_templates',
                        'failure_notification': 'notification_templates',
                        'credential': 'credentials',
                        'galaxy_credential': 'credentials',
                    }[resource]

                def get(self, **kwargs):
                    v2 = api.Api(connection=self.conn).get().current_version.get()
                    return getattr(v2, self.resource).get(**kwargs)

            group.add_argument(
                '--{}'.format(param),
                metavar='',
                type=functools.partial(pk_or_name, None, param, page=related_page(self.page.connection, param)),
                help=help_text,
            )

    def perform(self, **kwargs):
        for k, v in kwargs.items():
            endpoint, _ = self.targets[k]
            try:
                self.page.get().related[endpoint].post({'id': v, self.action: True})
            except NoContent:
                # we expect to enter this block because these endpoints return
                # HTTP 204 on success
                pass
            return self.page.get().related[endpoint].get()


class NotificationAssociateMixin(AssociationMixin):
    targets = {
        'start_notification': ['notification_templates_started', 'notification_template'],
        'success_notification': ['notification_templates_success', 'notification_template'],
        'failure_notification': ['notification_templates_error', 'notification_template'],
    }


class JobTemplateNotificationAssociation(NotificationAssociateMixin, CustomAction):
    resource = 'job_templates'
    action = 'associate'
    targets = NotificationAssociateMixin.targets.copy()


class JobTemplateNotificationDisAssociation(NotificationAssociateMixin, CustomAction):
    resource = 'job_templates'
    action = 'disassociate'
    targets = NotificationAssociateMixin.targets.copy()


JobTemplateNotificationAssociation.targets.update(
    {
        'credential': ['credentials', None],
    }
)
JobTemplateNotificationDisAssociation.targets.update(
    {
        'credential': ['credentials', None],
    }
)


class WorkflowJobTemplateNotificationAssociation(NotificationAssociateMixin, CustomAction):
    resource = 'workflow_job_templates'
    action = 'associate'
    targets = NotificationAssociateMixin.targets.copy()


class WorkflowJobTemplateNotificationDisAssociation(NotificationAssociateMixin, CustomAction):
    resource = 'workflow_job_templates'
    action = 'disassociate'
    targets = NotificationAssociateMixin.targets.copy()


WorkflowJobTemplateNotificationAssociation.targets.update(
    {
        'approval_notification': ['notification_templates_approvals', 'notification_template'],
    }
)
WorkflowJobTemplateNotificationDisAssociation.targets.update(
    {
        'approval_notification': ['notification_templates_approvals', 'notification_template'],
    }
)


class ProjectNotificationAssociation(NotificationAssociateMixin, CustomAction):
    resource = 'projects'
    action = 'associate'


class ProjectNotificationDisAssociation(NotificationAssociateMixin, CustomAction):
    resource = 'projects'
    action = 'disassociate'


class InventorySourceNotificationAssociation(NotificationAssociateMixin, CustomAction):
    resource = 'inventory_sources'
    action = 'associate'


class InventorySourceNotificationDisAssociation(NotificationAssociateMixin, CustomAction):
    resource = 'inventory_sources'
    action = 'disassociate'


class OrganizationNotificationAssociation(NotificationAssociateMixin, CustomAction):
    resource = 'organizations'
    action = 'associate'
    targets = NotificationAssociateMixin.targets.copy()


class OrganizationNotificationDisAssociation(NotificationAssociateMixin, CustomAction):
    resource = 'organizations'
    action = 'disassociate'
    targets = NotificationAssociateMixin.targets.copy()


OrganizationNotificationAssociation.targets.update(
    {
        'approval_notification': ['notification_templates_approvals', 'notification_template'],
        'galaxy_credential': ['galaxy_credentials', 'credential'],
    }
)
OrganizationNotificationDisAssociation.targets.update(
    {
        'approval_notification': ['notification_templates_approvals', 'notification_template'],
        'galaxy_credential': ['galaxy_credentials', 'credential'],
    }
)


class SettingsList(CustomAction):
    action = 'list'
    resource = 'settings'

    def add_arguments(self, parser, resource_options_parser):
        parser.choices['list'].add_argument('--slug', help='optional setting category/slug', default='all')

    def perform(self, slug):
        self.page.endpoint = self.page.endpoint + '{}/'.format(slug)
        return self.page.get()


class RoleMixin(object):
    has_roles = [
        ['organizations', 'organization'],
        ['projects', 'project'],
        ['inventories', 'inventory'],
        ['teams', 'team'],
        ['credentials', 'credential'],
        ['job_templates', 'job_template'],
        ['workflow_job_templates', 'workflow_job_template'],
        ['instance_groups', 'instance_group'],
    ]
    roles = {}  # this is calculated once

    def add_arguments(self, parser, resource_options_parser):
        from .options import pk_or_name

        if not RoleMixin.roles:
            for resource, flag in self.has_roles:
                options = self.page.__class__(self.page.endpoint.replace(self.resource, resource), self.page.connection).options()
                RoleMixin.roles[flag] = [role.replace('_role', '') for role in options.json.get('object_roles', [])]

        possible_roles = set()
        for v in RoleMixin.roles.values():
            possible_roles.update(v)

        resource_group = parser.choices[self.action].add_mutually_exclusive_group(required=True)
        parser.choices[self.action].add_argument(
            'id',
            type=functools.partial(pk_or_name, None, self.resource, page=self.page),
            help='The ID (or name) of the {} to {} access to/from'.format(self.resource, self.action),
        )
        for _type in RoleMixin.roles.keys():
            if _type == 'team' and self.resource == 'team':
                # don't add a team to a team
                continue

            class related_page(object):
                def __init__(self, connection, resource):
                    self.conn = connection
                    if resource == 'inventories':
                        resource = 'inventory'  # d'oh, this is special
                    self.resource = resource

                def get(self, **kwargs):
                    v2 = api.Api(connection=self.conn).get().current_version.get()
                    return getattr(v2, self.resource).get(**kwargs)

            resource_group.add_argument(
                '--{}'.format(_type),
                type=functools.partial(pk_or_name, None, _type, page=related_page(self.page.connection, dict((v, k) for k, v in self.has_roles)[_type])),
                metavar='ID',
                help='The ID (or name) of the target {}'.format(_type),
            )
        parser.choices[self.action].add_argument(
            '--role', type=str, choices=possible_roles, required=True, help='The name of the role to {}'.format(self.action)
        )

    def perform(self, **kwargs):
        for resource, flag in self.has_roles:
            if flag in kwargs:
                role = kwargs['role']
                if role not in RoleMixin.roles[flag]:
                    options = ', '.join(RoleMixin.roles[flag])
                    raise ValueError("invalid choice: '{}' must be one of {}".format(role, options))
                value = kwargs[flag]
                target = '{}v2/{}/{}'.format(config.api_base_path, resource, value)
                detail = self.page.__class__(target, self.page.connection).get()
                object_roles = detail['summary_fields']['object_roles']
                actual_role = object_roles[role + '_role']
                params = {'id': actual_role['id']}
                if self.action == 'grant':
                    params['associate'] = True
                if self.action == 'revoke':
                    params['disassociate'] = True

                try:
                    self.page.get().related.roles.post(params)
                except NoContent:
                    # we expect to enter this block because these endpoints return
                    # HTTP 204 on success
                    pass


class UserGrant(RoleMixin, CustomAction):
    resource = 'users'
    action = 'grant'


class UserRevoke(RoleMixin, CustomAction):
    resource = 'users'
    action = 'revoke'


class TeamGrant(RoleMixin, CustomAction):
    resource = 'teams'
    action = 'grant'


class TeamRevoke(RoleMixin, CustomAction):
    resource = 'teams'
    action = 'revoke'


class SettingsModify(CustomAction):
    action = 'modify'
    resource = 'settings'

    def add_arguments(self, parser, resource_options_parser):
        options = self.page.__class__(self.page.endpoint + 'all/', self.page.connection).options()
        parser.choices['modify'].add_argument('key', choices=sorted(options['actions']['PUT'].keys()), metavar='key', help='')
        parser.choices['modify'].add_argument('value', help='')

    def perform(self, key, value):
        self.page.endpoint = self.page.endpoint + 'all/'
        patch_value = value
        if self.is_json(value):
            patch_value = json.loads(value)
        resp = self.page.patch(**{key: patch_value})
        return resp.from_json({'key': key, 'value': resp[key]})

    def is_json(self, data):
        try:
            json.loads(data)
        except json.decoder.JSONDecodeError:
            return False
        return True


class HasMonitor(object):
    action = 'monitor'

    def add_arguments(self, parser, resource_options_parser):
        from .options import pk_or_name

        parser.choices[self.action].add_argument('id', type=functools.partial(pk_or_name, None, self.resource, page=self.page), help='')

    def perform(self, **kwargs):
        response = self.page.get()
        mon = monitor_workflow if response.type == 'workflow_job' else monitor
        if not response.failed and response.status != 'successful':
            status = mon(
                response,
                self.page.connection.session,
            )
            if status:
                response.json['status'] = status
                if status in ('failed', 'error'):
                    setattr(response, 'rc', 1)
        else:
            return 'Unable to monitor finished job'


class JobMonitor(HasMonitor, CustomAction):
    resource = 'jobs'


class WorkflowJobMonitor(HasMonitor, CustomAction):
    resource = 'workflow_jobs'
0707010000004B000041ED00000000000000000000000266846B9200000000000000000000000000000000000000000000001B00000000awx-24.6.1/awxkit/cli/docs0707010000004C000081A400000000000000000000000166846B920000027E000000000000000000000000000000000000002400000000awx-24.6.1/awxkit/cli/docs/Makefile# Minimal makefile for Sphinx documentation
#

# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS    ?=
SPHINXBUILD   ?= sphinx-build
SOURCEDIR     = source
BUILDDIR      = build

# Put it first so that "make" without argument is like "make help".
help:
	@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

.PHONY: help Makefile

# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option.  $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
	@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
0707010000004D000081A400000000000000000000000166846B9200000181000000000000000000000000000000000000002500000000awx-24.6.1/awxkit/cli/docs/README.mdBuilding the Documentation
--------------------------
To build the docs, spin up a real AWX server, `pip install sphinx sphinxcontrib-autoprogram`, and run:

    ~ CONTROLLER_HOST=https://awx.example.org CONTROLLER_USERNAME=example CONTROLLER_PASSWORD=secret make clean html
    ~ cd build/html/ && python -m http.server
    Serving HTTP on 0.0.0.0 port 8000 (http://0.0.0.0:8000/) ..
0707010000004E000081A400000000000000000000000166846B920000031F000000000000000000000000000000000000002400000000awx-24.6.1/awxkit/cli/docs/make.bat@ECHO OFF

pushd %~dp0

REM Command file for Sphinx documentation

if "%SPHINXBUILD%" == "" (
	set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=source
set BUILDDIR=build

if "%1" == "" goto help

%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
	echo.
	echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
	echo.installed, then set the SPHINXBUILD environment variable to point
	echo.to the full path of the 'sphinx-build' executable. Alternatively you
	echo.may add the Sphinx directory to PATH.
	echo.
	echo.If you don't have Sphinx installed, grab it from
	echo.http://sphinx-doc.org/
	exit /b 1
)

%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end

:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%

:end
popd
0707010000004F000041ED00000000000000000000000266846B9200000000000000000000000000000000000000000000002200000000awx-24.6.1/awxkit/cli/docs/source07070100000050000081A400000000000000000000000166846B92000008A9000000000000000000000000000000000000003500000000awx-24.6.1/awxkit/cli/docs/source/authentication.rst.. _authentication:

Authentication
==============

Generating a Personal Access Token
----------------------------------

The preferred mechanism for authenticating with AWX and |RHAT| is by generating and storing an OAuth2.0 token.  Tokens can be scoped for read/write permissions, are easily revoked, and are more suited to third party tooling integration than session-based authentication.

|prog| provides a simple login command for generating a personal access token from your username and password.

.. code:: bash

    CONTROLLER_HOST=https://awx.example.org \
        CONTROLLER_USERNAME=alice \
        CONTROLLER_PASSWORD=secret \
        awx login

As a convenience, the ``awx login -f human`` command prints a shell-formatted token
value:

.. code:: bash

    export CONTROLLER_OAUTH_TOKEN=6E5SXhld7AMOhpRveZsLJQsfs9VS8U

By ingesting this token, you can run subsequent CLI commands without having to
specify your username and password each time:

.. code:: bash

    export CONTROLLER_HOST=https://awx.example.org
    $(CONTROLLER_USERNAME=alice CONTROLLER_PASSWORD=secret awx login -f human)
    awx config

Working with OAuth2.0 Applications
----------------------------------

AWX and |RHAT| allow you to configure OAuth2.0 applications scoped to specific
organizations.  To generate an application token (instead of a personal access
token), specify the **Client ID** and **Client Secret** generated when the
application was created.

.. code:: bash

    CONTROLLER_USERNAME=alice CONTROLLER_PASSWORD=secret awx login \
        --conf.client_id <value> --conf.client_secret <value>


OAuth2.0 Token Scoping
----------------------

By default, tokens created with ``awx login`` are write-scoped.  To generate
a read-only token, specify ``--scope read``:

.. code:: bash

    CONTROLLER_USERNAME=alice CONTROLLER_PASSWORD=secret \
        awx login --conf.scope read

Session Authentication
----------------------
If you do not want or need to generate a long-lived token, |prog| allows you to
specify your username and password on every invocation:

.. code:: bash

    CONTROLLER_USERNAME=alice CONTROLLER_PASSWORD=secret awx jobs list
    awx --conf.username alice --conf.password secret jobs list
07070100000051000081A400000000000000000000000166846B92000007E9000000000000000000000000000000000000002A00000000awx-24.6.1/awxkit/cli/docs/source/conf.py# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# http://www.sphinx-doc.org/en/master/config

# -- Path setup --------------------------------------------------------------

# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))


# -- Project information -----------------------------------------------------

project = 'AWX CLI'
copyright = '2024, Ansible by Red Hat'
author = 'Ansible by Red Hat'


# -- General configuration ---------------------------------------------------

# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['awxkit.cli.sphinx']

# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']

# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []


# -- Options for HTML output -------------------------------------------------

# The theme to use for HTML and HTML Help pages.  See the documentation for
# a list of builtin themes.
#
html_theme = 'classic'

# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']

rst_epilog = '''
.. |prog| replace:: awx
.. |at| replace:: automation controller
.. |At| replace:: Automation controller
.. |RHAT| replace:: Red Hat Ansible Automation Platform controller
'''
07070100000052000081A400000000000000000000000166846B920000093E000000000000000000000000000000000000002F00000000awx-24.6.1/awxkit/cli/docs/source/examples.rstUsage Examples
==============

Verifying CLI Configuration
---------------------------

To confirm that you've properly configured ``awx`` to point at the correct
AWX/|RHAT| host, and that your authentication credentials are correct, run:

.. code:: bash

    awx config

.. note:: For help configuring authentication settings with the awx CLI, see :ref:`authentication`.

Printing the History of a Particular Job
----------------------------------------

To print a table containing the recent history of any jobs named ``Example Job Template``:

.. code:: bash

    awx jobs list --all --name 'Example Job Template' \
        -f human --filter 'name,created,status'

Creating and Launching a Job Template
-------------------------------------

Assuming you have an existing Inventory named ``Demo Inventory``, here's how
you might set up a new project from a GitHub repository, and run (and monitor
the output of) a playbook from that repository:

.. code:: bash

    awx projects create --wait \
        --organization 1 --name='Example Project' \
        --scm_type git --scm_url 'https://github.com/ansible/ansible-tower-samples' \
        -f human
    awx job_templates create \
        --name='Example Job Template' --project 'Example Project' \
        --playbook hello_world.yml --inventory 'Demo Inventory' \
        -f human
    awx job_templates launch 'Example Job Template' --monitor -f human

Updating a Job Template with Extra Vars
---------------------------------------

.. code:: bash

    awx job_templates modify 1 --extra_vars "@vars.yml"
    awx job_templates modify 1 --extra_vars "@vars.json"

Importing an SSH Key
--------------------

.. code:: bash

    awx credentials create --credential_type 'Machine' \
        --name 'My SSH Key' --user 'alice' \
        --inputs '{"username": "server-login", "ssh_key_data": "@~/.ssh/id_rsa"}'

Import/Export
-------------

Intended to be similar to `tower-cli send` and `tower-cli receive`.

Exporting everything:

.. code:: bash

    awx export

Exporting everything of some particular type or types:

.. code:: bash

    awx export --users

Exporting a particular named resource:

.. code:: bash

    awx export --users admin

Exporting a resource by id:

.. code:: bash

    awx export --users 42

Importing a set of resources stored as a file:

.. code:: bash

    awx import < resources.json
07070100000053000081A400000000000000000000000166846B92000003D5000000000000000000000000000000000000002C00000000awx-24.6.1/awxkit/cli/docs/source/index.rst.. AWX CLI documentation master file, created by
   sphinx-quickstart on Mon Jul 22 11:39:10 2019.
   You can adapt this file completely to your liking, but it should at least
   contain the root `toctree` directive.

AWX Command Line Interface
==========================

|prog| is the official command-line client for AWX and |RHAT|.  It:

* Uses naming and structure consistent with the AWX HTTP API
* Provides consistent output formats with optional machine-parsable formats
* To the extent possible, auto-detects API versions, available endpoints, and
  feature support across multiple versions of AWX and |RHAT|.

Potential uses include:

* Configuring and launching jobs/playbooks
* Checking on the status and output of job runs
* Managing objects like organizations, users, teams, etc...

.. toctree::
   :maxdepth: 3

   usage
   authentication
   output
   examples
   reference

Indices and tables
==================

* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
07070100000054000081A400000000000000000000000166846B9200000141000000000000000000000000000000000000002E00000000awx-24.6.1/awxkit/cli/docs/source/install.rstThe preferred way to install the AWX CLI is through pip:

.. code:: bash

    pip install "git+https://github.com/ansible/awx.git@$VERSION#egg=awxkit&subdirectory=awxkit"

...where ``$VERSION`` is the version of AWX you're running.  To see a list of all available releases, visit: https://github.com/ansible/awx/releases
07070100000055000081A400000000000000000000000166846B92000005E2000000000000000000000000000000000000002D00000000awx-24.6.1/awxkit/cli/docs/source/output.rst.. _formatting:


Output Formatting
=================

By default, awx prints valid JSON for successful commands.  The ``-f`` (or
``--conf.format``) global flag can be used to specify alternative output
formats.

YAML Formatting
---------------

To print results in YAML, specify ``-f yaml``:

.. code:: bash

    awx jobs list -f yaml

Human-Readable (Tabular) Formatting
-----------------------------------

|prog| also provides support for printing results in a human-readable
ASCII table format:

.. code:: bash

    awx jobs list -f human
    awx jobs list -f human --filter name,created,status
    awx jobs list -f human --filter *


Custom Formatting with jq
-------------------------

|prog| provides *optional* support for filtering results using the ``jq`` JSON
processor, but it requires an additional Python software dependency,
``jq``.

To use ``-f jq``, you must install the optional dependency via ``pip
install jq``.  Note that some platforms may require additional programs to
build ``jq`` from source (like ``libtool``).  See https://pypi.org/project/jq/ for instructions.

.. code:: bash

    awx jobs list \
        -f jq --filter '.results[] | .name + " is " + .status'

For details on ``jq`` filtering usage, see the ``jq`` manual at https://stedolan.github.io/jq/


Colorized Output
----------------

By default, |prog| prints colorized output using ANSI color codes.  To disable
this functionality, specify ``--conf.color f`` or set the environment variable
``CONTROLLER_COLOR=f``.
07070100000056000081A400000000000000000000000166846B920000004A000000000000000000000000000000000000003000000000awx-24.6.1/awxkit/cli/docs/source/reference.rst.. autoprogram:: awxkit.cli.sphinx:parser
    :prog: awx
    :maxdepth: 3
07070100000057000081A400000000000000000000000166846B9200000BF8000000000000000000000000000000000000002C00000000awx-24.6.1/awxkit/cli/docs/source/usage.rstBasic Usage
===========

Installation
------------

.. include:: install.rst


Synopsis
--------

|prog| commands follow a simple format:

.. code:: bash

    awx [<global-options>] <resource> <action> [<arguments>]
    awx --help

The ``resource`` is a type of object within AWX (a noun), such as ``users`` or ``organizations``.

The ``action`` is the thing you want to do (a verb). Resources generally have a base set of actions (``get``, ``list``, ``create``, ``modify``, and ``delete``), and have options corresponding to fields on the object in AWX.  Some resources have special actions, like ``job_templates launch``.


Getting Started
---------------

Using |prog| requires some initial configuration.  Here is a simple example for interacting with an AWX or |RHAT| server:

.. code:: bash

    awx --conf.host https://awx.example.org \
        --conf.username joe --conf.password secret \
        --conf.insecure \
        users list

There are multiple ways to configure and authenticate with an AWX or |RHAT| server.  For more details, see :ref:`authentication`.

By default, |prog| prints valid JSON for successful commands.  Certain commands (such as those for printing job stdout) print raw text and do not allow for custom formatting.  For details on customizing |prog|'s output format, see :ref:`formatting`.


Resources and Actions
---------------------

To get a list of available resources:

.. code:: bash

    awx --conf.host https://awx.example.org --help

To get a description of a specific resource, and list its available actions (and their arguments):

.. code:: bash

    awx --conf.host https://awx.example.org users --help
    awx --conf.host https://awx.example.org users create --help


.. note:: The list of resources and actions may vary based on context.  For
    example, certain resources may not be available based on role-based access
    control (e.g., if you do not have permission to launch certain Job Templates,
    `launch` may not show up as an action for certain `job_templates` objects.


Global Options
--------------
|prog| accepts global options that control overall behavior.  In addition to CLI flags, most global options have a corresponding environment variable that may be used to set the value.  If both are provided, the command line option takes priority.

A few of the most important ones are:

``-h, --help``
    Prints usage information for the |prog| tool

``-v, --verbose``
    prints debug-level logs, including HTTP(s) requests made

``-f, --conf.format``
    used to specify a custom output format (the default is json)

``--conf.host, CONTROLLER_HOST``
    the full URL of the AWX/|RHAT| host (i.e., https://my.awx.example.org)

``-k, --conf.insecure, CONTROLLER_VERIFY_SSL``
    allows insecure server connections when using SSL

``--conf.username, CONTROLLER_USERNAME``
    the AWX username to use for authentication

``--conf.password, CONTROLLER_PASSWORD``
    the AWX password to use for authentication

``--conf.token, CONTROLLER_OAUTH_TOKEN``
    an OAuth2.0 token to use for authentication
07070100000058000081A400000000000000000000000166846B92000019F9000000000000000000000000000000000000002000000000awx-24.6.1/awxkit/cli/format.pyimport locale
import json
from distutils.util import strtobool

import yaml

from awxkit.cli.utils import colored
from awxkit import config


def get_config_credentials():
    """Load username and password from config.credentials.default.

    In order to respect configurations from AWXKIT_CREDENTIAL_FILE.
    """
    default_username = 'admin'
    default_password = 'password'

    if not hasattr(config, 'credentials'):
        return default_username, default_password

    default = config.credentials.get('default', {})
    return (default.get('username', default_username), default.get('password', default_password))


def add_authentication_arguments(parser, env):
    auth = parser.add_argument_group('authentication')
    auth.add_argument(
        '--conf.host',
        default=env.get('CONTROLLER_HOST', env.get('TOWER_HOST', 'https://127.0.0.1:443')),
        metavar='https://example.awx.org',
    )
    auth.add_argument(
        '--conf.token',
        default=env.get('CONTROLLER_OAUTH_TOKEN', env.get('CONTROLLER_TOKEN', env.get('TOWER_OAUTH_TOKEN', env.get('TOWER_TOKEN', '')))),
        help='an OAuth2.0 token (get one by using `awx login`)',
        metavar='TEXT',
    )

    config_username, config_password = get_config_credentials()
    # options configured via cli args take higher precedence than those from the config
    auth.add_argument(
        '--conf.username',
        default=env.get('CONTROLLER_USERNAME', env.get('TOWER_USERNAME', config_username)),
        metavar='TEXT',
    )
    auth.add_argument(
        '--conf.password',
        default=env.get('CONTROLLER_PASSWORD', env.get('TOWER_PASSWORD', config_password)),
        metavar='TEXT',
    )

    auth.add_argument(
        '-k',
        '--conf.insecure',
        help='Allow insecure server connections when using SSL',
        default=not strtobool(env.get('CONTROLLER_VERIFY_SSL', env.get('TOWER_VERIFY_SSL', 'True'))),
        action='store_true',
    )


def add_verbose(formatting, env):
    formatting.add_argument(
        '-v',
        '--verbose',
        dest='conf.verbose',
        help='print debug-level logs, including requests made',
        default=strtobool(env.get('CONTROLLER_VERBOSE', env.get('TOWER_VERBOSE', 'f'))),
        action="store_true",
    )


def add_formatting_import_export(parser, env):
    formatting = parser.add_argument_group('input/output formatting')
    formatting.add_argument(
        '-f',
        '--conf.format',
        dest='conf.format',
        choices=['json', 'yaml'],
        default=env.get('CONTROLLER_FORMAT', env.get('TOWER_FORMAT', 'json')),
        help=('specify a format for the input and output'),
    )
    add_verbose(formatting, env)


def add_output_formatting_arguments(parser, env):
    formatting = parser.add_argument_group('input/output formatting')

    formatting.add_argument(
        '-f',
        '--conf.format',
        dest='conf.format',
        choices=FORMATTERS.keys(),
        default=env.get('CONTROLLER_FORMAT', env.get('TOWER_FORMAT', 'json')),
        help=('specify a format for the input and output'),
    )
    formatting.add_argument(
        '--filter',
        dest='conf.filter',
        default='.',
        metavar='TEXT',
        help=('specify an output filter (only valid with jq or human format)'),
    )
    formatting.add_argument(
        '--conf.color',
        metavar='BOOLEAN',
        help='Display colorized output.  Defaults to True',
        default=env.get('CONTROLLER_COLOR', env.get('TOWER_COLOR', 't')),
        type=strtobool,
    )
    add_verbose(formatting, env)


def format_response(response, fmt='json', filter='.', changed=False):
    if response is None:
        return  # HTTP 204
    if isinstance(response, str):
        return response

    if 'results' in response.__dict__:
        results = getattr(response, 'results')
    else:
        results = [response]
    for result in results:
        if 'related' in result.json:
            result.json.pop('related')

    formatted = FORMATTERS[fmt](response.json, filter)

    if changed:
        formatted = colored(formatted, 'green')
    return formatted


def format_jq(output, fmt):
    try:
        import jq
    except ImportError:
        if fmt == '.':
            return output
        raise ImportError(
            'To use `-f jq`, you must install the optional jq dependency.\n`pip install jq`\n',
            'Note that some platforms may require additional programs to '
            'build jq from source (like `libtool`).\n'
            'See https://pypi.org/project/jq/ for instructions.',
        )
    results = []
    for x in jq.jq(fmt).transform(output, multiple_output=True):
        if x not in (None, ''):
            if isinstance(x, str):
                results.append(x)
            else:
                results.append(json.dumps(x))
    return '\n'.join(results)


def format_json(output, fmt):
    return json.dumps(output, indent=5)


def format_yaml(output, fmt):
    output = json.loads(json.dumps(output))
    return yaml.safe_dump(output, default_flow_style=False, allow_unicode=True)


def format_human(output, fmt):
    lines = []
    if fmt == '.':
        fmt = 'id,name'
    column_names = [col.strip() for col in fmt.split(',')]
    if 'count' in output:
        output = output['results']
    else:
        output = [output]

    if fmt == '*' and len(output):
        column_names = list(output[0].keys())
        for k in ('summary_fields', 'related'):
            if k in column_names:
                column_names.remove(k)

    table = [column_names]
    table.extend([[record.get(col, '') for col in column_names] for record in output])
    col_paddings = []

    def format_num(v):
        try:
            return locale.format_string("%.*f", (0, int(v)), True)
        except (ValueError, TypeError):
            if isinstance(v, (list, dict)):
                return json.dumps(v)
            if v is None:
                return ''
            return v

    # calculate the max width of each column
    for i, _ in enumerate(column_names):
        max_width = max([len(format_num(row[i])) for row in table])
        col_paddings.append(max_width)

    # insert a row of === header lines
    table.insert(1, ['=' * i for i in col_paddings])

    # print each row of the table data, justified based on col_paddings
    for row in table:
        line = ''
        for i, value in enumerate(row):
            line += format_num(value).ljust(col_paddings[i] + 1)
        lines.append(line)
    return '\n'.join(lines)


FORMATTERS = {'json': format_json, 'yaml': format_yaml, 'jq': format_jq, 'human': format_human}
07070100000059000081A400000000000000000000000166846B9200003128000000000000000000000000000000000000002100000000awx-24.6.1/awxkit/cli/options.pyimport argparse
import functools
import json
import os
import re
import sys
import yaml

from distutils.util import strtobool

from .custom import CustomAction
from .format import add_output_formatting_arguments
from .resource import DEPRECATED_RESOURCES_REVERSE


UNIQUENESS_RULES = {
    'me': ('id', 'username'),
    'users': ('id', 'username'),
    'instances': ('id', 'hostname'),
}


def pk_or_name_list(v2, model_name, value, page=None):
    return [pk_or_name(v2, model_name, v.strip(), page=page) for v in value.split(',')]


def pk_or_name(v2, model_name, value, page=None):
    if isinstance(value, int):
        return value

    if re.match(r'^[\d]+$', value):
        return int(value)

    identity = 'name'

    if not page:
        if not hasattr(v2, model_name):
            if model_name in DEPRECATED_RESOURCES_REVERSE:
                model_name = DEPRECATED_RESOURCES_REVERSE[model_name]

        if hasattr(v2, model_name):
            page = getattr(v2, model_name)

    if model_name in UNIQUENESS_RULES:
        identity = UNIQUENESS_RULES[model_name][-1]

    # certain related fields follow a pattern of <foo>_<model> e.g.,
    # target_credential etc...
    if not page and '_' in model_name:
        return pk_or_name(v2, model_name.split('_')[-1], value, page)

    if page:
        results = page.get(**{identity: value})
        if results.count == 1:
            return int(results.results[0].id)
        if results.count > 1:
            raise argparse.ArgumentTypeError(
                'Multiple {0} exist with that {1}. To look up an ID, run:\nawx {0} list --{1} "{2}" -f human'.format(model_name, identity, value)
            )
        raise argparse.ArgumentTypeError('Could not find any {0} with that {1}.'.format(model_name, identity))

    return value


class JsonDumpsAction(argparse.Action):
    def __call__(self, parser, namespace, values, option_string=None):
        # This Action gets called repeatedly on each instance of the flag that it is
        # tied to, and unfortunately doesn't come with a good way of noticing we are at
        # the end. So it's necessary to keep doing json.loads and json.dumps each time.

        json_vars = json.loads(getattr(namespace, self.dest, None) or '{}')
        json_vars.update(values)
        setattr(namespace, self.dest, json.dumps(json_vars))


class ResourceOptionsParser(object):
    deprecated = False

    def __init__(self, v2, page, resource, parser):
        """Used to submit an OPTIONS request to the appropriate endpoint
        and apply the appropriate argparse arguments

        :param v2: a awxkit.api.pages.page.TentativePage instance
        :param page: a awxkit.api.pages.page.TentativePage instance
        :param resource: a string containing the resource (e.g., jobs)
        :param parser: an argparse.ArgumentParser object to append new args to
        """
        self.v2 = v2
        self.page = page
        self.resource = resource
        self.parser = parser
        self.options = getattr(self.page.options().json, 'actions', {'GET': {}})
        self.get_allowed_options()
        if self.resource != 'settings':
            # /api/v2/settings is a special resource that doesn't have
            # traditional list/detail endpoints
            self.build_list_actions()
            self.build_detail_actions()

        self.handle_custom_actions()

    def get_allowed_options(self):
        options = self.page.connection.options(self.page.endpoint + '1/')
        warning = options.headers.get('Warning', '')
        if '299' in warning and 'deprecated' in warning:
            self.deprecated = True
        self.allowed_options = options.headers.get('Allow', '').split(', ')

    def build_list_actions(self):
        action_map = {
            'GET': 'list',
            'POST': 'create',
        }
        for method, action in self.options.items():
            method = action_map[method]
            parser = self.parser.add_parser(method, help='')
            if method == 'list':
                parser.add_argument(
                    '--all',
                    dest='all_pages',
                    action='store_true',
                    help=('fetch all pages of content from the API when returning results (instead of just the first page)'),
                )
                parser.add_argument(
                    '--order_by',
                    dest='order_by',
                    help=(
                        'order results by given field name, '
                        'prefix the field name with a dash (-) to sort in reverse eg --order_by=\'-name\','
                        'multiple sorting fields may be specified by separating the field names with a comma (,)'
                    ),
                )
                add_output_formatting_arguments(parser, {})

    def build_detail_actions(self):
        allowed = ['get']
        if 'PUT' in self.allowed_options:
            allowed.append('modify')
        if 'DELETE' in self.allowed_options:
            allowed.append('delete')
        for method in allowed:
            parser = self.parser.add_parser(method, help='')
            self.parser.choices[method].add_argument(
                'id', type=functools.partial(pk_or_name, self.v2, self.resource), help='the ID (or unique name) of the resource'
            )
            if method == 'get':
                add_output_formatting_arguments(parser, {})

    def build_query_arguments(self, method, http_method):
        required_group = None
        for k, param in self.options.get(http_method, {}).items():
            required = method == 'create' and param.get('required', False) is True
            help_text = param.get('help_text', '')
            args = ['--{}'.format(k)]

            if method == 'list':
                if k == 'id':
                    # don't allow `awx <resource> list` to filter on `--id`
                    # it's weird, and that's what awx <resource> get is for
                    continue
                help_text = 'only list {} with the specified {}'.format(self.resource, k)

            if method == 'list' and param.get('filterable') is False:
                continue

            def list_of_json_or_yaml(v):
                return json_or_yaml(v, expected_type=list)

            def json_or_yaml(v, expected_type=dict):
                if v.startswith('@'):
                    v = open(os.path.expanduser(v[1:])).read()
                try:
                    parsed = json.loads(v)
                except Exception:
                    try:
                        parsed = yaml.safe_load(v)
                    except Exception:
                        raise argparse.ArgumentTypeError("{} is not valid JSON or YAML".format(v))

                if not isinstance(parsed, expected_type):
                    raise argparse.ArgumentTypeError("{} is not valid JSON or YAML".format(v))

                if expected_type is dict:
                    for k, v in parsed.items():
                        # add support for file reading at top-level JSON keys
                        # (to make things like SSH key data easier to work with)
                        if isinstance(v, str) and v.startswith('@'):
                            path = os.path.expanduser(v[1:])
                            parsed[k] = open(path).read()

                return parsed

            kwargs = {
                'help': help_text,
                'required': required,
                'type': {
                    'string': str,
                    'field': int,
                    'integer': int,
                    'boolean': strtobool,
                    'id': functools.partial(pk_or_name, self.v2, k),
                    'json': json_or_yaml,
                    'list_of_ids': functools.partial(pk_or_name_list, self.v2, k),
                }.get(param['type'], str),
            }
            meta_map = {
                'string': 'TEXT',
                'integer': 'INTEGER',
                'boolean': 'BOOLEAN',
                'id': 'ID',  # foreign key
                'list_of_ids': '[ID, ID, ...]',
                'json': 'JSON/YAML',
            }
            if param.get('choices', []):
                kwargs['choices'] = [c[0] for c in param['choices']]
                # if there are choices, try to guess at the type (we can't
                # just assume it's a list of str, but the API doesn't actually
                # explicitly tell us in OPTIONS all the time)
                sphinx = 'sphinx-build' in ' '.join(sys.argv)
                if isinstance(kwargs['choices'][0], int) and not sphinx:
                    kwargs['type'] = int
                else:
                    kwargs['choices'] = [str(choice) for choice in kwargs['choices']]
            elif param['type'] in meta_map:
                kwargs['metavar'] = meta_map[param['type']]

                if param['type'] == 'id' and not kwargs.get('help'):
                    kwargs['help'] = 'the ID of the associated  {}'.format(k)

                if param['type'] == 'list_of_ids':
                    kwargs['help'] = 'a list of comma-delimited {} to associate (IDs or unique names)'.format(k)

                if param['type'] == 'json' and method != 'list':
                    help_parts = []
                    if kwargs.get('help'):
                        help_parts.append(kwargs['help'])
                    else:
                        help_parts.append('a JSON or YAML string.')
                    help_parts.append('You can optionally specify a file path e.g., @path/to/file.yml')
                    kwargs['help'] = ' '.join(help_parts)

            # SPECIAL CUSTOM LOGIC GOES HERE :'(
            # There are certain requirements that aren't captured well by our
            # HTTP OPTIONS due to $reasons
            # This is where custom handling for those goes.
            if self.resource == 'users' and method == 'create' and k == 'password':
                kwargs['required'] = required = True
            if self.resource == 'ad_hoc_commands' and method == 'create' and k in ('inventory', 'credential'):
                kwargs['required'] = required = True
            if self.resource == 'job_templates' and method == 'create' and k in ('project', 'playbook'):
                kwargs['required'] = required = True

            # unlike *other* actual JSON fields in the API, inventory and JT
            # variables *actually* want json.dumps() strings (ugh)
            # see: https://github.com/ansible/awx/issues/2371
            if (self.resource in ('job_templates', 'workflow_job_templates') and k == 'extra_vars') or (
                self.resource in ('inventory', 'groups', 'hosts') and k == 'variables'
            ):
                kwargs['type'] = json_or_yaml
                kwargs['action'] = JsonDumpsAction

                if k == 'extra_vars':
                    args.append('-e')

            # special handling for bulk endpoints
            if self.resource == 'bulk':
                if method == "host_create":
                    if k == "inventory":
                        kwargs['required'] = required = True
                    if k == 'hosts':
                        kwargs['type'] = list_of_json_or_yaml
                        kwargs['required'] = required = True
                if method == "host_delete":
                    if k == 'hosts':
                        kwargs['type'] = list_of_json_or_yaml
                        kwargs['required'] = required = True
                if method == "job_launch":
                    if k == 'jobs':
                        kwargs['type'] = list_of_json_or_yaml
                        kwargs['required'] = required = True

            if required:
                if required_group is None:
                    required_group = self.parser.choices[method].add_argument_group('required arguments')
                    # put the required group first (before the optional args group)
                    self.parser.choices[method]._action_groups.reverse()
                required_group.add_argument(*args, **kwargs)
            else:
                self.parser.choices[method].add_argument(*args, **kwargs)

    def handle_custom_actions(self):
        for _, action in CustomAction.registry.items():
            if action.resource != self.resource:
                continue
            if action.action not in self.parser.choices:
                self.parser.add_parser(action.action, help='')
            action(self.page).add_arguments(self.parser, self)
0707010000005A000081A400000000000000000000000166846B9200002259000000000000000000000000000000000000002200000000awx-24.6.1/awxkit/cli/resource.pyimport yaml
import json
import os

from awxkit import api, config, yaml_file
from awxkit.exceptions import ImportExportError
from awxkit.utils import to_str
from awxkit.api.pages import Page
from awxkit.api.pages.api import EXPORTABLE_RESOURCES
from awxkit.cli.format import FORMATTERS, format_response, add_authentication_arguments, add_formatting_import_export
from awxkit.cli.utils import CustomRegistryMeta, cprint


CONTROL_RESOURCES = ['ping', 'config', 'me', 'metrics', 'mesh_visualizer']

DEPRECATED_RESOURCES = {
    'ad_hoc_commands': 'ad_hoc',
    'applications': 'application',
    'credentials': 'credential',
    'credential_types': 'credential_type',
    'groups': 'group',
    'hosts': 'host',
    'instances': 'instance',
    'instance_groups': 'instance_group',
    'inventory': 'inventories',
    'inventory_sources': 'inventory_source',
    'inventory_updates': 'inventory_update',
    'jobs': 'job',
    'job_templates': 'job_template',
    'execution_environments': 'execution_environment',
    'labels': 'label',
    'workflow_job_template_nodes': 'node',
    'notification_templates': 'notification_template',
    'organizations': 'organization',
    'projects': 'project',
    'project_updates': 'project_update',
    'roles': 'role',
    'schedules': 'schedule',
    'settings': 'setting',
    'teams': 'team',
    'workflow_job_templates': 'workflow',
    'workflow_jobs': 'workflow_job',
    'users': 'user',
}
DEPRECATED_RESOURCES_REVERSE = dict((v, k) for k, v in DEPRECATED_RESOURCES.items())


class CustomCommand(metaclass=CustomRegistryMeta):
    """Base class for implementing custom commands.

    Custom commands represent static code which should run - they are
    responsible for returning and formatting their own output (which may or may
    not be JSON/YAML).
    """

    help_text = ''

    @property
    def name(self):
        raise NotImplementedError()

    def handle(self, client, parser):
        """To be implemented by subclasses.
        Should return a dictionary that is JSON serializable
        """
        raise NotImplementedError()


class Login(CustomCommand):
    name = 'login'
    help_text = 'authenticate and retrieve an OAuth2 token'

    def print_help(self, parser):
        add_authentication_arguments(parser, os.environ)
        parser.print_help()

    def handle(self, client, parser):
        auth = parser.add_argument_group('OAuth2.0 Options')
        auth.add_argument('--description', help='description of the generated OAuth2.0 token', metavar='TEXT')
        auth.add_argument('--conf.client_id', metavar='TEXT')
        auth.add_argument('--conf.client_secret', metavar='TEXT')
        auth.add_argument('--conf.scope', choices=['read', 'write'], default='write')
        if client.help:
            self.print_help(parser)
            raise SystemExit()
        parsed = parser.parse_known_args()[0]
        kwargs = {
            'client_id': getattr(parsed, 'conf.client_id', None),
            'client_secret': getattr(parsed, 'conf.client_secret', None),
            'scope': getattr(parsed, 'conf.scope', None),
        }
        if getattr(parsed, 'description', None):
            kwargs['description'] = parsed.description
        try:
            token = api.Api().get_oauth2_token(**kwargs)
        except Exception as e:
            self.print_help(parser)
            cprint('Error retrieving an OAuth2.0 token ({}).'.format(e.__class__), 'red')
        else:
            fmt = client.get_config('format')
            if fmt == 'human':
                print('export CONTROLLER_OAUTH_TOKEN={}'.format(token))
            else:
                print(to_str(FORMATTERS[fmt]({'token': token}, '.')).strip())


class Config(CustomCommand):
    name = 'config'
    help_text = 'print current configuration values'

    def handle(self, client, parser):
        if client.help:
            parser.print_help()
            raise SystemExit()
        return {
            'base_url': config.base_url,
            'token': client.get_config('token'),
            'use_sessions': config.use_sessions,
            'credentials': config.credentials,
        }


class Import(CustomCommand):
    name = 'import'
    help_text = 'import resources into Tower'

    def handle(self, client, parser):
        if parser:
            parser.usage = 'awx import < exportfile'
            parser.description = 'import resources from stdin'
            add_formatting_import_export(parser, {})
        if client.help:
            parser.print_help()
            raise SystemExit()

        fmt = client.get_config('format')
        if fmt == 'json':
            data = json.load(client.stdin)
        elif fmt == 'yaml':
            data = yaml.load(client.stdin, Loader=yaml_file.Loader)
        else:
            raise ImportExportError("Unsupported format for Import: " + fmt)

        client.authenticate()
        client.v2.import_assets(data)

        self._has_error = getattr(client.v2, '_has_error', False)

        return {}


class Export(CustomCommand):
    name = 'export'
    help_text = 'export resources from Tower'

    def extend_parser(self, parser):
        resources = parser.add_argument_group('resources')

        for resource in EXPORTABLE_RESOURCES:
            # This parsing pattern will result in 3 different possible outcomes:
            # 1) the resource flag is not used at all, which will result in the attr being None
            # 2) the resource flag is used with no argument, which will result in the attr being ''
            # 3) the resource flag is used with an argument, and the attr will be that argument's value
            resources.add_argument('--{}'.format(resource), nargs='*')

    def handle(self, client, parser):
        self.extend_parser(parser)
        parser.usage = 'awx export > exportfile'
        parser.description = 'export resources to stdout'
        add_formatting_import_export(parser, {})
        if client.help:
            parser.print_help()
            raise SystemExit()

        parsed = parser.parse_known_args()[0]
        kwargs = {resource: getattr(parsed, resource, None) for resource in EXPORTABLE_RESOURCES}

        client.authenticate()
        data = client.v2.export_assets(**kwargs)

        self._has_error = getattr(client.v2, '_has_error', False)

        return data


def parse_resource(client, skip_deprecated=False):
    subparsers = client.parser.add_subparsers(
        dest='resource',
        metavar='resource',
    )

    _system_exit = 0

    # check if the user is running a custom command
    for command in CustomCommand.__subclasses__():
        client.subparsers[command.name] = subparsers.add_parser(command.name, help=command.help_text)

    if hasattr(client, 'v2'):
        for k in client.v2.json.keys():
            if k in ('dashboard', 'config'):
                # - the Dashboard API is deprecated and not supported
                # - the Config command is already dealt with by the
                #    CustomCommand section above
                continue

            # argparse aliases are *only* supported in Python3 (not 2.7)
            kwargs = {}
            if not skip_deprecated:
                if k in DEPRECATED_RESOURCES:
                    kwargs['aliases'] = [DEPRECATED_RESOURCES[k]]

            client.subparsers[k] = subparsers.add_parser(k, help='', **kwargs)

    resource = client.parser.parse_known_args()[0].resource
    if resource in DEPRECATED_RESOURCES.values():
        client.argv[client.argv.index(resource)] = DEPRECATED_RESOURCES_REVERSE[resource]
        resource = DEPRECATED_RESOURCES_REVERSE[resource]

    if resource in CustomCommand.registry:
        parser = client.subparsers[resource]
        command = CustomCommand.registry[resource]()
        response = command.handle(client, parser)

        if getattr(command, '_has_error', False):
            _system_exit = 1

        if response:
            _filter = client.get_config('filter')
            if resource == 'config' and client.get_config('format') == 'human':
                response = {'count': len(response), 'results': [{'key': k, 'value': v} for k, v in response.items()]}
                _filter = 'key, value'
            try:
                connection = client.root.connection
            except AttributeError:
                connection = None
            formatted = format_response(Page.from_json(response, connection=connection), fmt=client.get_config('format'), filter=_filter)
            print(formatted)
        raise SystemExit(_system_exit)
    else:
        return resource


def is_control_resource(resource):
    # special root level resources that don't don't represent database
    # entities that follow the list/detail semantic
    return resource in CONTROL_RESOURCES
0707010000005B000081A400000000000000000000000166846B9200000B98000000000000000000000000000000000000002000000000awx-24.6.1/awxkit/cli/sphinx.pyimport os

from docutils.nodes import Text, paragraph
from sphinxcontrib.autoprogram import AutoprogramDirective

from .client import CLI
from .resource import is_control_resource, CustomCommand


class CustomAutoprogramDirective(AutoprogramDirective):
    def run(self):
        nodes = super(CustomAutoprogramDirective, self).run()

        # By default, the document generated by sphinxcontrib.autoprogram
        # just has a page title which is the program name ("awx")
        # The code here changes this slightly so the reference guide starts
        # with a human-friendly title and preamble

        # configure a custom page heading (not `awx`)
        heading = Text('Reference Guide')
        heading.parent = nodes[0][0]
        nodes[0][0].children = [heading]

        # add a descriptive top synopsis of the reference guide
        nodes[0].children.insert(1, paragraph(text=('This is an exhaustive guide of every available command in the awx CLI tool.')))
        disclaimer = (
            'The commands and parameters documented here can (and will) '
            'vary based on a variety of factors, such as the AWX API '
            'version, AWX settings, and access level of the authenticated '
            'user.  For the most accurate view of available commands, '
            'invoke the awx CLI using the --help flag.'
        )
        nodes[0].children.insert(2, paragraph(text=disclaimer))
        return nodes


def render():
    # This function is called by Sphinx when making the docs.
    #
    # It loops over every resource at `/api/v2/` and performs an HTTP OPTIONS
    # request to determine all of the supported actions and their arguments.
    #
    # The return value of this function is an argparse.ArgumentParser, which
    # the sphinxcontrib.autoprogram plugin crawls and generates an indexed
    # Sphinx document from.
    for e in (
        ('CONTROLLER_HOST', 'TOWER_HOST'),
        ('CONTROLLER_USERNAME', 'TOWER_USERNAME'),
        ('CONTROLLER_PASSWORD', 'TOWER_PASSWORD'),
    ):
        if not os.environ.get(e[0]) and not os.environ.get(e[1]):
            raise SystemExit('Please specify a valid {} for a real (running) installation.'.format(e[0]))  # noqa
    cli = CLI()
    cli.parse_args(['awx', '--help'])
    cli.connect()
    cli.authenticate()
    try:
        cli.parse_resource(skip_deprecated=True)
    except SystemExit:
        pass
    for resource in cli.subparsers.keys():
        cli.argv = [resource, '--help']
        cli.resource = resource
        if resource in CustomCommand.registry or is_control_resource(resource):
            pass
        else:
            page = getattr(cli.v2, resource, None)
            if page:
                try:
                    cli.parse_action(page, from_sphinx=True)
                except SystemExit:
                    pass
    return cli.parser


def setup(app):
    app.add_directive('autoprogram', CustomAutoprogramDirective)


parser = render()
0707010000005C000081A400000000000000000000000166846B9200000F1E000000000000000000000000000000000000002000000000awx-24.6.1/awxkit/cli/stdout.py# -*- coding: utf-8 -*-
from __future__ import print_function

import sys

import time

from .utils import cprint, color_enabled, STATUS_COLORS
from awxkit.config import config
from awxkit.utils import to_str


def monitor_workflow(response, session, print_stdout=True, action_timeout=None, interval=5):
    get = response.url.get
    payload = {
        'order_by': 'finished',
        'unified_job_node__workflow_job': response.id,
    }

    def fetch(seen):
        results = response.connection.get(f"{config.api_base_path}v2/unified_jobs", payload).json()['results']

        # erase lines we've previously printed
        if print_stdout and sys.stdout.isatty():
            for _ in seen:
                sys.stdout.write('\x1b[1A')
                sys.stdout.write('\x1b[2K')

        for result in results:
            result['name'] = to_str(result['name'])
            if print_stdout:
                print(' ↳ {id} - {name} '.format(**result), end='')
                status = result['status']
                if color_enabled():
                    color = STATUS_COLORS.get(status, 'white')
                    cprint(status, color)
                else:
                    print(status)
                seen.add(result['id'])

    if print_stdout:
        cprint('------Starting Standard Out Stream------', 'red')

    if print_stdout:
        print('Launching {}...'.format(to_str(get().json.name)))

    started = time.time()
    seen = set()
    while True:
        if action_timeout and time.time() - started > action_timeout:
            if print_stdout:
                cprint('Monitoring aborted due to action-timeout.', 'red')
            break

        if sys.stdout.isatty():
            # if this is a tty-like device, we can send ANSI codes
            # to draw an auto-updating view
            # otherwise, just wait for the job to finish and print it *once*
            # all at the end
            fetch(seen)

        time.sleep(max(2.5, interval))
        json = get().json
        if json.finished:
            fetch(seen)
            break
    if print_stdout:
        cprint('------End of Standard Out Stream--------\n', 'red')
    return get().json.status


def monitor(response, session, print_stdout=True, action_timeout=None, interval=5):
    get = response.url.get
    payload = {'order_by': 'start_line', 'no_truncate': True}
    if response.type == 'job':
        events = response.related.job_events.get
    else:
        events = response.related.events.get

    next_line = 0

    def fetch(next_line):
        for result in events(**payload).json.results:
            if result['start_line'] != next_line:
                # If this event is a line from _later_ in the stdout,
                # it means that the events didn't arrive in order;
                # skip it for now and wait until the prior lines arrive and are
                # printed
                continue
            stdout = to_str(result.get('stdout'))
            if stdout and print_stdout:
                print(stdout)
            next_line = result['end_line']
        return next_line

    if print_stdout:
        cprint('------Starting Standard Out Stream------', 'red')

    started = time.time()
    while True:
        if action_timeout and time.time() - started > action_timeout:
            if print_stdout:
                cprint('Monitoring aborted due to action-timeout.', 'red')
            break
        next_line = fetch(next_line)
        if next_line:
            payload['start_line__gte'] = next_line

        time.sleep(max(2.5, interval))
        json = get().json
        if json.event_processing_finished is True or json.status in ('error', 'canceled'):
            fetch(next_line)
            break
    if print_stdout:
        cprint('------End of Standard Out Stream--------\n', 'red')
    return get().json.status
0707010000005D000081A400000000000000000000000166846B92000008E2000000000000000000000000000000000000001F00000000awx-24.6.1/awxkit/cli/utils.pyfrom __future__ import print_function

from argparse import ArgumentParser
import os
import sys
import threading

_color = threading.local()
_color.enabled = True


__all__ = ['CustomRegistryMeta', 'HelpfulArgumentParser', 'disable_color', 'color_enabled', 'colored', 'cprint', 'STATUS_COLORS']


STATUS_COLORS = {
    'new': 'grey',
    'pending': 'grey',
    'running': 'yellow',
    'successful': 'green',
    'failed': 'red',
    'error': 'red',
    'canceled': 'grey',
}


class CustomRegistryMeta(type):
    @property
    def registry(cls):
        return dict((command.name, command) for command in cls.__subclasses__())


class HelpfulArgumentParser(ArgumentParser):
    def error(self, message):  # pragma: nocover
        """Prints a usage message incorporating the message to stderr and
        exits.
        If you override this in a subclass, it should not return -- it
        should either exit or raise an exception.
        """
        self.print_help(sys.stderr)
        self._print_message('\n')
        self.exit(2, '%s: %s\n' % (self.prog, message))

    def _parse_known_args(self, args, ns):
        for arg in ('-h', '--help'):
            # the -h argument is extraneous; if you leave it off,
            # awx-cli will just print usage info
            if arg in args:
                args.remove(arg)
        return super(HelpfulArgumentParser, self)._parse_known_args(args, ns)


def color_enabled():
    return _color.enabled


def disable_color():
    _color.enabled = False


COLORS = dict(
    list(
        zip(
            [
                'grey',
                'red',
                'green',
                'yellow',
                'blue',
                'magenta',
                'cyan',
                'white',
            ],
            list(range(30, 38)),
        )
    )
)


def colored(text, color=None):
    '''Colorize text w/ ANSI color sequences'''
    if _color.enabled and os.getenv('ANSI_COLORS_DISABLED') is None:
        fmt_str = '\033[%dm%s'
        if color is not None:
            text = fmt_str % (COLORS[color], text)
        text += '\033[0m'
    return text


def cprint(text, color, **kwargs):
    if _color.enabled:
        print(colored(text, color), **kwargs)
    else:
        print(text, **kwargs)
0707010000005E000081A400000000000000000000000166846B92000003D9000000000000000000000000000000000000001C00000000awx-24.6.1/awxkit/config.pyimport types
import os

from .utils import (
    PseudoNamespace,
    load_credentials,
    load_projects,
    to_bool,
)

config = PseudoNamespace()


def getvalue(self, name):
    return self.__getitem__(name)


if os.getenv('AWXKIT_BASE_URL'):
    config.base_url = os.getenv('AWXKIT_BASE_URL')

if os.getenv('AWXKIT_CREDENTIAL_FILE'):
    config.credentials = load_credentials(os.getenv('AWXKIT_CREDENTIAL_FILE'))

if os.getenv('AWXKIT_PROJECT_FILE'):
    config.project_urls = load_projects(config.get('AWXKIT_PROJECT_FILE'))

# kludge to mimic pytest.config
config.getvalue = types.MethodType(getvalue, config)

config.assume_untrusted = config.get('assume_untrusted', True)

config.client_connection_attempts = int(os.getenv('AWXKIT_CLIENT_CONNECTION_ATTEMPTS', 5))
config.prevent_teardown = to_bool(os.getenv('AWXKIT_PREVENT_TEARDOWN', False))
config.use_sessions = to_bool(os.getenv('AWXKIT_SESSIONS', False))
config.api_base_path = os.getenv('AWXKIT_API_BASE_PATH', '/api/')
0707010000005F000081A400000000000000000000000166846B92000004DB000000000000000000000000000000000000002000000000awx-24.6.1/awxkit/exceptions.pyclass Common(Exception):
    def __init__(self, status_string='', message=''):
        if isinstance(status_string, Exception):
            self.status_string = ''
            return super(Common, self).__init__(*status_string)
        self.status_string = status_string
        self.msg = message

    def __getitem__(self, val):
        return (self.status_string, self.msg)[val]

    def __repr__(self):
        return self.__str__()

    def __str__(self):
        return '{} - {}'.format(self.status_string, self.msg)


class BadRequest(Common):
    pass


class Conflict(Common):
    pass


class Duplicate(Common):
    pass


class Forbidden(Common):
    pass


class InternalServerError(Common):
    pass


class BadGateway(Common):
    pass


class LicenseExceeded(Common):
    pass


class LicenseInvalid(Common):
    pass


class MethodNotAllowed(Common):
    pass


class NoContent(Common):
    message = ''


class NotFound(Common):
    pass


class PaymentRequired(Common):
    pass


class Unauthorized(Common):
    pass


class Unknown(Common):
    pass


class WaitUntilTimeout(Common):
    pass


class UnexpectedAWXState(Common):
    pass


class IsMigrating(Common):
    pass


class ImportExportError(Exception):
    pass
07070100000060000041ED00000000000000000000000266846B9200000000000000000000000000000000000000000000001A00000000awx-24.6.1/awxkit/scripts07070100000061000081A400000000000000000000000166846B9200000000000000000000000000000000000000000000002600000000awx-24.6.1/awxkit/scripts/__init__.py07070100000062000081ED00000000000000000000000166846B9200000E6D000000000000000000000000000000000000002B00000000awx-24.6.1/awxkit/scripts/basic_session.pyfrom argparse import ArgumentParser
import logging
import pdb  # noqa
import sys
import os

from awxkit import api, config, utils, exceptions, WSClient  # noqa
from awxkit.awx.utils import check_related, delete_all, get_all, uses_sessions  # noqa
from awxkit.awx.utils import as_user as _as_user

if str(os.getenv('AWXKIT_DEBUG', 'false')).lower() in ['true', '1']:
    logging.basicConfig(level='DEBUG')


def parse_args():
    parser = ArgumentParser()
    parser.add_argument(
        '--base-url',
        dest='base_url',
        default=os.getenv('AWXKIT_BASE_URL', 'http://127.0.0.1:8013'),
        help='URL for AWX.  Defaults to env var AWXKIT_BASE_URL or http://127.0.0.1:8013',
    )
    parser.add_argument(
        '-c',
        '--credential-file',
        dest='credential_file',
        default=os.getenv('AWXKIT_CREDENTIAL_FILE', utils.not_provided),
        help='Path for yml credential file.  If not provided or set by AWXKIT_CREDENTIAL_FILE, set '
        'AWXKIT_USER and AWXKIT_USER_PASSWORD env vars for awx user credentials.',
    )
    parser.add_argument(
        '-p',
        '--project-file',
        dest='project_file',
        default=os.getenv('AWXKIT_PROJECT_FILE'),
        help='Path for yml project config file.If not provided or set by AWXKIT_PROJECT_FILE, projects will not have default SCM_URL',
    )
    parser.add_argument('-f', '--file', dest='akit_script', default=False, help='akit script file to run in interactive session.')
    parser.add_argument('-x', '--non-interactive', action='store_true', dest='non_interactive', help='Do not run in interactive mode.')
    return parser.parse_known_args()[0]


def main():
    exc = None
    try:
        global akit_args
        akit_args = parse_args()
        config.base_url = akit_args.base_url

        if akit_args.credential_file != utils.not_provided:
            config.credentials = utils.load_credentials(akit_args.credential_file)
        else:
            config.credentials = utils.PseudoNamespace(
                {'default': {'username': os.getenv('AWXKIT_USER', 'admin'), 'password': os.getenv('AWXKIT_USER_PASSWORD', 'password')}}
            )

        if akit_args.project_file != utils.not_provided:
            config.project_urls = utils.load_projects(akit_args.project_file)

        global root
        root = api.Api()
        if uses_sessions(root.connection):
            config.use_sessions = True
            root.load_session().get()
        else:
            root.load_authtoken().get()

        if 'v2' in root.available_versions:
            global v2
            v2 = root.available_versions.v2.get()

        rc = 0
        if akit_args.akit_script:
            try:
                exec(open(akit_args.akit_script).read(), globals())
            except Exception as e:
                exc = e
                raise
    except Exception as e:
        exc = e  # noqa
        rc = 1  # noqa
        raise


def as_user(username, password=None):
    return _as_user(root, username, password)


def load_interactive():
    if '--help' in sys.argv or '-h' in sys.argv:
        return parse_args()

    try:
        from IPython import start_ipython

        basic_session_path = os.path.abspath(__file__)
        if basic_session_path[-1] == 'c':  # start_ipython doesn't work w/ .pyc
            basic_session_path = basic_session_path[:-1]
        sargs = ['-i', basic_session_path]
        if sys.argv[1:]:
            sargs.extend(['--'] + sys.argv[1:])
        return start_ipython(argv=sargs)
    except ImportError:
        from code import interact

        main()
        interact('', local=dict(globals(), **locals()))


if __name__ == '__main__':
    main()
07070100000063000041ED00000000000000000000000266846B9200000000000000000000000000000000000000000000001800000000awx-24.6.1/awxkit/utils07070100000064000081A400000000000000000000000166846B92000032A3000000000000000000000000000000000000002400000000awx-24.6.1/awxkit/utils/__init__.pyfrom datetime import datetime, timedelta, tzinfo
import inspect
import logging
import random
import shlex
import types
import time
import sys
import re
import os

import yaml

from awxkit.words import words
from awxkit.exceptions import WaitUntilTimeout

log = logging.getLogger(__name__)

cloud_types = (
    'aws',
    'azure',
    'azure_ad',
    'azure_classic',
    'azure_rm',
    'cloudforms',
    'ec2',
    'gce',
    'openstack',
    'openstack_v2',
    'openstack_v3',
    'rhv',
    'satellite6',
    'tower',
    'vmware',
)
credential_type_kinds = ('cloud', 'net')

not_provided = 'xx__NOT_PROVIDED__xx'


def super_dir_set(cls):
    attrs = set()
    for _class in inspect.getmro(cls):
        attrs.update(dir(_class))
    return attrs


class NoReloadError(Exception):
    pass


class PseudoNamespace(dict):
    def __init__(self, _d=None, **loaded):
        if not isinstance(_d, dict):
            _d = {}
        _d.update(loaded)
        super(PseudoNamespace, self).__init__(_d)

        # Convert nested structures into PseudoNamespaces
        for k, v in _d.items():
            tuple_converted = False
            if isinstance(v, tuple):
                self[k] = v = list(v)
                tuple_converted = True

            if isinstance(v, list):
                for i, item in enumerate(v):
                    if isinstance(item, dict):
                        self[k][i] = PseudoNamespace(item)
                if tuple_converted:
                    self[k] = tuple(self[k])
            elif isinstance(v, dict):
                self[k] = PseudoNamespace(v)

    def __getattr__(self, attr):
        try:
            return self.__getitem__(attr)
        except KeyError:
            raise AttributeError("{!r} has no attribute {!r}".format(self.__class__.__name__, attr))

    def __setattr__(self, attr, value):
        self.__setitem__(attr, value)

    def __setitem__(self, key, value):
        if not isinstance(value, PseudoNamespace):
            tuple_converted = False
            if isinstance(value, dict):
                value = PseudoNamespace(value)
            elif isinstance(value, tuple):
                value = list(value)
                tuple_converted = True

            if isinstance(value, list):
                for i, item in enumerate(value):
                    if isinstance(item, dict) and not isinstance(item, PseudoNamespace):
                        value[i] = PseudoNamespace(item)
                if tuple_converted:
                    value = tuple(value)

        super(PseudoNamespace, self).__setitem__(key, value)

    def __delattr__(self, attr):
        self.__delitem__(attr)

    def __dir__(self):
        attrs = super_dir_set(self.__class__)
        attrs.update(self.keys())
        return sorted(attrs)

    # override builtin in order to have updated content become
    # PseudoNamespaces if applicable
    def update(self, iterable=None, **kw):
        if iterable:
            if hasattr(iterable, 'keys') and isinstance(iterable.keys, (types.FunctionType, types.BuiltinFunctionType, types.MethodType)):
                for key in iterable:
                    self[key] = iterable[key]
            else:
                for k, v in iterable:
                    self[k] = v
        for k in kw:
            self[k] = kw[k]


def is_relative_endpoint(candidate):
    return isinstance(candidate, (str,)) and candidate.startswith('/api/')


def is_class_or_instance(obj, cls):
    """returns True is obj is an instance of cls or is cls itself"""
    return isinstance(obj, cls) or obj is cls


def filter_by_class(*item_class_tuples):
    """takes an arbitrary number of (item, class) tuples and returns a list consisting
    of each item if it's an instance of the class, the item if it's a (class, dict()) tuple,
    the class itself if item is truthy but not an instance of the
    class or (class, dict()) tuple, or None if item is falsy in the same order as the arguments
    ```
    _cred = Credential()
    inv, org, cred = filter_base_subclasses((True, Inventory), (None, Organization), (_cred, Credential))
    inv == Inventory
    org == None
    cred == _cred
    ```
    """
    results = []
    for item, cls in item_class_tuples:
        if item:
            was_tuple = False
            if isinstance(item, tuple):
                was_tuple = True
                examined_item = item[0]
            else:
                examined_item = item
            if is_class_or_instance(examined_item, cls) or is_proper_subclass(examined_item, cls):
                results.append(item)
            else:
                updated = (cls, item[1]) if was_tuple else cls
                results.append(updated)
        else:
            results.append(None)
    return results


def load_credentials(filename=None):
    if filename is None:
        path = os.path.join(os.getcwd(), 'credentials.yaml')
    else:
        path = os.path.abspath(filename)

    if os.path.isfile(path):
        with open(path) as credentials_fh:
            credentials_dict = yaml.safe_load(credentials_fh)
            return credentials_dict
    else:
        msg = 'Unable to load credentials file at %s' % path
        raise Exception(msg)


def load_projects(filename=None):
    if filename is None:
        return {}
    else:
        path = os.path.abspath(filename)

    if os.path.isfile(path):
        with open(path) as projects_fh:
            projects_dict = yaml.safe_load(projects_fh)
            return projects_dict
    else:
        msg = 'Unable to load projects file at %s' % path
        raise Exception(msg)


def logged_sleep(duration, level='DEBUG', stack_depth=1):
    level = getattr(logging, level.upper())
    # based on
    # http://stackoverflow.com/questions/1095543/get-name-of-calling-functions-module-in-python
    try:
        frm = inspect.stack()[stack_depth]
        logger = logging.getLogger(inspect.getmodule(frm[0]).__name__)
    except AttributeError:  # module is None (interactive shell)
        logger = log  # fall back to utils logger
    logger.log(level, 'Sleeping for {0} seconds.'.format(duration))
    time.sleep(duration)


def poll_until(function, interval=5, timeout=0):
    """Polls `function` every `interval` seconds until it returns a non-falsey
    value. If this does not occur within the provided `timeout`,
    a WaitUntilTimeout is raised.

    Each attempt will log the time that has elapsed since the original
    request.
    """
    start_time = time.time()

    while True:
        elapsed = time.time() - start_time
        log.debug('elapsed: {0:4.1f}'.format(elapsed))

        value = function()
        if value:
            return value

        if elapsed > timeout:
            break

        logged_sleep(interval, stack_depth=3)

    msg = 'Timeout after {0} seconds.'.format(elapsed)
    raise WaitUntilTimeout(None, msg)


def gen_utf_char():
    is_char = False
    b = 'b'
    while not is_char:
        b = random.randint(32, 0x10FFFF)
        is_char = chr(b).isprintable()
    return chr(b)


def random_int(maxint=sys.maxsize):
    max = int(maxint)
    return random.randint(0, max)


def random_ipv4():
    """Generates a random ipv4 address;; useful for testing."""
    return ".".join(str(random.randint(1, 255)) for i in range(4))


def random_ipv6():
    """Generates a random ipv6 address;; useful for testing."""
    return ':'.join('{0:x}'.format(random.randint(0, 2**16 - 1)) for i in range(8))


def random_loopback_ip():
    """Generates a random loopback ipv4 address;; useful for testing."""
    return "127.{}.{}.{}".format(random_int(255), random_int(255), random_int(255))


def random_utf8(*args, **kwargs):
    """This function exists due to a bug in ChromeDriver that throws an
    exception when a character outside of the BMP is sent to `send_keys`.
    Code pulled from http://stackoverflow.com/a/3220210.
    """
    pattern = re.compile('[^\u0000-\uD7FF\uE000-\uFFFF]', re.UNICODE)
    length = args[0] if len(args) else kwargs.get('length', 10)
    scrubbed = pattern.sub('\uFFFD', ''.join([gen_utf_char() for _ in range(length)]))

    return scrubbed


def random_title(num_words=2, non_ascii=True):
    base = ''.join([random.choice(words) for word in range(num_words)])
    if os.getenv('AWXKIT_FORCE_ONLY_ASCII', False):
        title = ''.join([base, ''.join(str(random_int(99)))])
    else:
        if non_ascii:
            title = ''.join([base, random_utf8(1)])
        else:
            title = ''.join([base, ''.join([str(random_int()) for _ in range(3)])])
    return title


def update_payload(payload, fields, kwargs):
    """Takes a list of fields and adds their kwargs value to payload if defined.
    If the payload has an existing value and not_provided is the kwarg value for that key,
    the existing key/value are stripped from the payload.
    """
    not_provided_as_kwarg = 'xx_UPDATE_PAYLOAD_FIELD_NOT_PROVIDED_AS_KWARG_xx'
    for field in fields:
        field_val = kwargs.get(field, not_provided_as_kwarg)
        if field_val not in (not_provided, not_provided_as_kwarg):
            payload[field] = field_val
        elif field in payload and field_val == not_provided:
            payload.pop(field)
    return payload


def set_payload_foreign_key_args(payload, fk_fields, kwargs):
    if isinstance(fk_fields, str):
        fk_fields = (fk_fields,)

    for fk_field in fk_fields:
        rel_obj = kwargs.get(fk_field)
        if rel_obj is None:
            continue
        elif isinstance(rel_obj, int):
            payload.update(**{fk_field: int(rel_obj)})
        elif hasattr(rel_obj, 'id'):
            payload.update(**{fk_field: rel_obj.id})
        else:
            raise AttributeError(f'Related field {fk_field} must be either integer of pkid or object')
    return payload


def to_str(obj):
    if isinstance(obj, bytes):
        return obj.decode('utf-8')
    return obj


def to_bool(obj):
    if isinstance(obj, (str,)):
        return obj.lower() not in ('false', 'off', 'no', 'n', '0', '')
    return bool(obj)


def load_json_or_yaml(obj):
    try:
        return yaml.safe_load(obj)
    except AttributeError:
        raise TypeError("Provide valid YAML/JSON.")


def get_class_if_instance(obj):
    if not inspect.isclass(obj):
        return obj.__class__
    return obj


def class_name_to_kw_arg(class_name):
    """'ClassName' -> 'class_name'"""
    first_pass = re.sub(r'([a-z])([A-Z0-9])', r'\1_\2', class_name)
    second_pass = re.sub(r'([0-9])([a-zA-Z])', r'\1_\2', first_pass).lower()
    return second_pass.replace('v2_', '')


def is_proper_subclass(obj, cls):
    return inspect.isclass(obj) and obj is not cls and issubclass(obj, cls)


def are_same_endpoint(first, second):
    """Equivalence check of two urls, stripped of query parameters"""

    def strip(url):
        return url.replace('www.', '').split('?')[0]

    return strip(first) == strip(second)


def utcnow():
    """Provide a wrapped copy of the built-in utcnow that can be easily mocked."""
    return datetime.utcnow()


class UTC(tzinfo):
    """Concrete implementation of tzinfo for UTC. For more information, see:
    https://docs.python.org/2/library/datetime.html
    """

    def tzname(self, dt):
        return 'UTC'

    def dst(self, dt):
        return timedelta(0)

    def utcoffset(self, dt):
        return timedelta(0)


def seconds_since_date_string(date_str, fmt='%Y-%m-%dT%H:%M:%S.%fZ', default_tz=UTC()):
    """Return the number of seconds since the date and time indicated by a date
    string and its corresponding format string.

    :param date_str: string representing a date and time.
    :param fmt: Formatting string - by default, this value is set to parse
        date strings originating from awx API response data.
    :param default_tz: Assumed tzinfo if the parsed date_str does not include tzinfo

    For more information on python date string formatting directives, see
        https://docs.python.org/2/library/datetime.httpsml#strftime-strptime-behavior
    """
    parsed_datetime = datetime.strptime(date_str, fmt)

    if not parsed_datetime.tzinfo:
        parsed_datetime = parsed_datetime.replace(tzinfo=default_tz)

    elapsed = utcnow().replace(tzinfo=UTC()) - parsed_datetime

    return elapsed.total_seconds()


def to_ical(dt):
    return re.sub('[:-]', '', dt.strftime("%Y%m%dT%H%M%SZ"))


def version_from_endpoint(endpoint):
    return endpoint.split('/api/')[1].split('/')[0] or 'common'


def args_string_to_list(args):
    """Converts cmdline arg string to list of args.  The reverse of subprocess.list2cmdline()
    heavily inspired by robot.utils.argumentparser.cmdline2list()
    """
    lexer = shlex.shlex(args, posix=True)
    lexer.escapedquotes = '"\''
    lexer.commenters = ''
    lexer.whitespace_split = True
    return [token.decode('utf-8') for token in lexer]


def is_list_or_tuple(item):
    return isinstance(item, list) or isinstance(item, tuple)
07070100000065000081A400000000000000000000000166846B9200000C32000000000000000000000000000000000000002400000000awx-24.6.1/awxkit/utils/toposort.py#######################################################################
# Implements a topological sort algorithm.
#
# Copyright 2014 True Blade Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Notes:
#  Based on http://code.activestate.com/recipes/578272-topological-sort
#   with these major changes:
#    Added unittests.
#    Deleted doctests (maybe not the best idea in the world, but it cleans
#     up the docstring).
#    Moved functools import to the top of the file.
#    Changed assert to a ValueError.
#    Changed iter[items|keys] to [items|keys], for python 3
#     compatibility. I don't think it matters for python 2 these are
#     now lists instead of iterables.
#    Copy the input so as to leave it unmodified.
#    Renamed function from toposort2 to toposort.
#    Handle empty input.
#    Switch tests to use set literals.
#
########################################################################

from functools import reduce as _reduce

__all__ = ['toposort', 'CircularDependencyError']


class CircularDependencyError(ValueError):
    def __init__(self, data):
        # Sort the data just to make the output consistent, for use in
        #  error messages.  That's convenient for doctests.
        s = 'Circular dependencies exist among these items: {{{}}}'.format(
            ', '.join('{!r}:{!r}'.format(key, value) for key, value in sorted(data.items()))
        )  # noqa
        super(CircularDependencyError, self).__init__(s)
        self.data = data


def toposort(data):
    """Dependencies are expressed as a dictionary whose keys are items
    and whose values are a set of dependent items. Output is a list of
    sets in topological order. The first set consists of items with no
    dependences, each subsequent set consists of items that depend upon
    items in the preceeding sets."""

    # Special case empty input.
    if len(data) == 0:
        return

    # Copy the input so as to leave it unmodified.
    data = data.copy()

    # Ignore self dependencies.
    for k, v in data.items():
        v.discard(k)
    # Find all items that don't depend on anything.
    extra_items_in_deps = _reduce(set.union, data.values()) - set(data.keys())
    # Add empty dependences where needed.
    data.update({item: set() for item in extra_items_in_deps})
    while True:
        ordered = set(item for item, dep in data.items() if len(dep) == 0)
        if not ordered:
            break
        yield ordered
        data = {item: (dep - ordered) for item, dep in data.items() if item not in ordered}
    if len(data) != 0:
        raise CircularDependencyError(data)
07070100000066000081A400000000000000000000000166846B92000053EE000000000000000000000000000000000000001B00000000awx-24.6.1/awxkit/words.py# list of random English nouns used for resource name utilities

words = [
    'People',
    'History',
    'Way',
    'Art',
    'World',
    'Information',
    'Map',
    'Two',
    'Family',
    'Government',
    'Health',
    'System',
    'Computer',
    'Meat',
    'Year',
    'Thanks',
    'Music',
    'Person',
    'Reading',
    'Method',
    'Data',
    'Food',
    'Understanding',
    'Theory',
    'Law',
    'Bird',
    'Literature',
    'Problem',
    'Software',
    'Control',
    'Knowledge',
    'Power',
    'Ability',
    'Economics',
    'Love',
    'Internet',
    'Television',
    'Science',
    'Library',
    'Nature',
    'Fact',
    'Product',
    'Idea',
    'Temperature',
    'Investment',
    'Area',
    'Society',
    'Activity',
    'Story',
    'Industry',
    'Media',
    'Thing',
    'Oven',
    'Community',
    'Definition',
    'Safety',
    'Quality',
    'Development',
    'Language',
    'Management',
    'Player',
    'Variety',
    'Video',
    'Week',
    'Security',
    'Country',
    'Exam',
    'Movie',
    'Organization',
    'Equipment',
    'Physics',
    'Analysis',
    'Policy',
    'Series',
    'Thought',
    'Basis',
    'Boyfriend',
    'Direction',
    'Strategy',
    'Technology',
    'Army',
    'Camera',
    'Freedom',
    'Paper',
    'Environment',
    'Child',
    'Instance',
    'Month',
    'Truth',
    'Marketing',
    'University',
    'Writing',
    'Article',
    'Department',
    'Difference',
    'Goal',
    'News',
    'Audience',
    'Fishing',
    'Growth',
    'Income',
    'Marriage',
    'User',
    'Combination',
    'Failure',
    'Meaning',
    'Medicine',
    'Philosophy',
    'Teacher',
    'Communication',
    'Night',
    'Chemistry',
    'Disease',
    'Disk',
    'Energy',
    'Nation',
    'Road',
    'Role',
    'Soup',
    'Advertising',
    'Location',
    'Success',
    'Addition',
    'Apartment',
    'Education',
    'Math',
    'Moment',
    'Painting',
    'Politics',
    'Attention',
    'Decision',
    'Event',
    'Property',
    'Shopping',
    'Student',
    'Wood',
    'Competition',
    'Distribution',
    'Entertainment',
    'Office',
    'Population',
    'President',
    'Unit',
    'Category',
    'Cigarette',
    'Context',
    'Introduction',
    'Opportunity',
    'Performance',
    'Driver',
    'Flight',
    'Length',
    'Magazine',
    'Newspaper',
    'Relationship',
    'Teaching',
    'Cell',
    'Dealer',
    'Debate',
    'Finding',
    'Lake',
    'Member',
    'Message',
    'Phone',
    'Scene',
    'Appearance',
    'Association',
    'Concept',
    'Customer',
    'Death',
    'Discussion',
    'Housing',
    'Inflation',
    'Insurance',
    'Mood',
    'Woman',
    'Advice',
    'Blood',
    'Effort',
    'Expression',
    'Importance',
    'Opinion',
    'Payment',
    'Reality',
    'Responsibility',
    'Situation',
    'Skill',
    'Statement',
    'Wealth',
    'Application',
    'City',
    'County',
    'Depth',
    'Estate',
    'Foundation',
    'Grandmother',
    'Heart',
    'Perspective',
    'Photo',
    'Recipe',
    'Studio',
    'Topic',
    'Collection',
    'Depression',
    'Imagination',
    'Passion',
    'Percentage',
    'Resource',
    'Setting',
    'Ad',
    'Agency',
    'College',
    'Connection',
    'Criticism',
    'Debt',
    'Description',
    'Memory',
    'Patience',
    'Secretary',
    'Solution',
    'Administration',
    'Aspect',
    'Attitude',
    'Director',
    'Personality',
    'Psychology',
    'Recommendation',
    'Response',
    'Selection',
    'Storage',
    'Version',
    'Alcohol',
    'Argument',
    'Complaint',
    'Contract',
    'Emphasis',
    'Highway',
    'Loss',
    'Membership',
    'Possession',
    'Preparation',
    'Steak',
    'Union',
    'Agreement',
    'Cancer',
    'Currency',
    'Employment',
    'Engineering',
    'Entry',
    'Interaction',
    'Limit',
    'Mixture',
    'Preference',
    'Region',
    'Republic',
    'Seat',
    'Tradition',
    'Virus',
    'Actor',
    'Classroom',
    'Delivery',
    'Device',
    'Difficulty',
    'Drama',
    'Election',
    'Engine',
    'Football',
    'Guidance',
    'Hotel',
    'Match',
    'Owner',
    'Priority',
    'Protection',
    'Suggestion',
    'Tension',
    'Variation',
    'Anxiety',
    'Atmosphere',
    'Awareness',
    'Bread',
    'Climate',
    'Comparison',
    'Confusion',
    'Construction',
    'Elevator',
    'Emotion',
    'Employee',
    'Employer',
    'Guest',
    'Height',
    'Leadership',
    'Mall',
    'Manager',
    'Operation',
    'Recording',
    'Respect',
    'Sample',
    'Transportation',
    'Boring',
    'Charity',
    'Cousin',
    'Disaster',
    'Editor',
    'Efficiency',
    'Excitement',
    'Extent',
    'Feedback',
    'Guitar',
    'Homework',
    'Leader',
    'Mom',
    'Outcome',
    'Permission',
    'Presentation',
    'Promotion',
    'Reflection',
    'Refrigerator',
    'Resolution',
    'Revenue',
    'Session',
    'Singer',
    'Tennis',
    'Basket',
    'Bonus',
    'Cabinet',
    'Childhood',
    'Church',
    'Clothes',
    'Coffee',
    'Dinner',
    'Drawing',
    'Hair',
    'Hearing',
    'Initiative',
    'Judgment',
    'Lab',
    'Measurement',
    'Mode',
    'Mud',
    'Orange',
    'Poetry',
    'Police',
    'Possibility',
    'Procedure',
    'Queen',
    'Ratio',
    'Relation',
    'Restaurant',
    'Satisfaction',
    'Sector',
    'Signature',
    'Significance',
    'Song',
    'Tooth',
    'Town',
    'Vehicle',
    'Volume',
    'Wife',
    'Accident',
    'Airport',
    'Appointment',
    'Arrival',
    'Assumption',
    'Baseball',
    'Chapter',
    'Committee',
    'Conversation',
    'Database',
    'Enthusiasm',
    'Error',
    'Explanation',
    'Farmer',
    'Gate',
    'Girl',
    'Hall',
    'Historian',
    'Hospital',
    'Injury',
    'Instruction',
    'Maintenance',
    'Manufacturer',
    'Meal',
    'Perception',
    'Pie',
    'Poem',
    'Presence',
    'Proposal',
    'Reception',
    'Replacement',
    'Revolution',
    'River',
    'Son',
    'Speech',
    'Tea',
    'Village',
    'Warning',
    'Winner',
    'Worker',
    'Writer',
    'Assistance',
    'Breath',
    'Buyer',
    'Chest',
    'Chocolate',
    'Conclusion',
    'Contribution',
    'Cookie',
    'Courage',
    'Dad',
    'Desk',
    'Drawer',
    'Establishment',
    'Examination',
    'Garbage',
    'Grocery',
    'Honey',
    'Impression',
    'Improvement',
    'Independence',
    'Insect',
    'Inspection',
    'Inspector',
    'King',
    'Ladder',
    'Menu',
    'Penalty',
    'Piano',
    'Potato',
    'Profession',
    'Professor',
    'Quantity',
    'Reaction',
    'Requirement',
    'Salad',
    'Sister',
    'Supermarket',
    'Tongue',
    'Weakness',
    'Wedding',
    'Affair',
    'Ambition',
    'Analyst',
    'Apple',
    'Assignment',
    'Assistant',
    'Bathroom',
    'Bedroom',
    'Beer',
    'Birthday',
    'Celebration',
    'Championship',
    'Cheek',
    'Client',
    'Consequence',
    'Departure',
    'Diamond',
    'Dirt',
    'Ear',
    'Fortune',
    'Friendship',
    'Snapewife',
    'Funeral',
    'Gene',
    'Girlfriend',
    'Hat',
    'Indication',
    'Intention',
    'Lady',
    'Midnight',
    'Negotiation',
    'Obligation',
    'Passenger',
    'Pizza',
    'Platform',
    'Poet',
    'Pollution',
    'Recognition',
    'Reputation',
    'Shirt',
    'Sir',
    'Speaker',
    'Stranger',
    'Surgery',
    'Sympathy',
    'Tale',
    'Throat',
    'Trainer',
    'Uncle',
    'Youth',
    'Time',
    'Work',
    'Film',
    'Water',
    'Money',
    'Example',
    'While',
    'Business',
    'Study',
    'Game',
    'Life',
    'Form',
    'Air',
    'Day',
    'Place',
    'Number',
    'Part',
    'Field',
    'Fish',
    'Back',
    'Process',
    'Heat',
    'Hand',
    'Experience',
    'Job',
    'Book',
    'End',
    'Point',
    'Type',
    'Home',
    'Economy',
    'Value',
    'Body',
    'Market',
    'Guide',
    'Interest',
    'State',
    'Radio',
    'Course',
    'Company',
    'Price',
    'Size',
    'Card',
    'List',
    'Mind',
    'Trade',
    'Line',
    'Care',
    'Group',
    'Risk',
    'Word',
    'Fat',
    'Force',
    'Key',
    'Light',
    'Training',
    'Name',
    'School',
    'Top',
    'Amount',
    'Level',
    'Order',
    'Practice',
    'Research',
    'Sense',
    'Service',
    'Piece',
    'Web',
    'Boss',
    'Sport',
    'Fun',
    'House',
    'Page',
    'Term',
    'Test',
    'Answer',
    'Sound',
    'Focus',
    'Matter',
    'Kind',
    'Soil',
    'Board',
    'Oil',
    'Picture',
    'Access',
    'Garden',
    'Range',
    'Rate',
    'Reason',
    'Future',
    'Site',
    'Demand',
    'Exercise',
    'Image',
    'Case',
    'Cause',
    'Coast',
    'Action',
    'Age',
    'Bad',
    'Boat',
    'Record',
    'Result',
    'Section',
    'Building',
    'Mouse',
    'Cash',
    'Class',
    'Nothing',
    'Period',
    'Plan',
    'Store',
    'Tax',
    'Side',
    'Subject',
    'Space',
    'Rule',
    'Stock',
    'Weather',
    'Chance',
    'Figure',
    'Man',
    'Model',
    'Source',
    'Beginning',
    'Earth',
    'Program',
    'Chicken',
    'Design',
    'Feature',
    'Head',
    'Material',
    'Purpose',
    'Question',
    'Rock',
    'Salt',
    'Act',
    'Birth',
    'Car',
    'Dog',
    'Object',
    'Scale',
    'Sun',
    'Note',
    'Profit',
    'Rent',
    'Speed',
    'Style',
    'War',
    'Bank',
    'Craft',
    'Half',
    'Inside',
    'Outside',
    'Standard',
    'Bus',
    'Exchange',
    'Eye',
    'Fire',
    'Position',
    'Pressure',
    'Stress',
    'Advantage',
    'Benefit',
    'Box',
    'Frame',
    'Issue',
    'Step',
    'Cycle',
    'Face',
    'Item',
    'Metal',
    'Paint',
    'Review',
    'Room',
    'Screen',
    'Structure',
    'View',
    'Account',
    'Ball',
    'Discipline',
    'Medium',
    'Share',
    'Balance',
    'Bit',
    'Black',
    'Bottom',
    'Choice',
    'Gift',
    'Impact',
    'Machine',
    'Shape',
    'Tool',
    'Wind',
    'Address',
    'Average',
    'Career',
    'Culture',
    'Morning',
    'Pot',
    'Sign',
    'Table',
    'Task',
    'Condition',
    'Contact',
    'Credit',
    'Egg',
    'Hope',
    'Ice',
    'Network',
    'North',
    'Square',
    'Attempt',
    'Date',
    'Effect',
    'Link',
    'Post',
    'Star',
    'Voice',
    'Capital',
    'Challenge',
    'Friend',
    'Self',
    'Shot',
    'Brush',
    'Couple',
    'Exit',
    'Front',
    'Function',
    'Lack',
    'Living',
    'Plant',
    'Plastic',
    'Spot',
    'Summer',
    'Taste',
    'Theme',
    'Track',
    'Wing',
    'Brain',
    'Button',
    'Click',
    'Desire',
    'Foot',
    'Gas',
    'Influence',
    'Notice',
    'Rain',
    'Wall',
    'Base',
    'Damage',
    'Distance',
    'Feeling',
    'Pair',
    'Savings',
    'Staff',
    'Sugar',
    'Target',
    'Text',
    'Animal',
    'Author',
    'Budget',
    'Discount',
    'File',
    'Ground',
    'Lesson',
    'Minute',
    'Officer',
    'Phase',
    'Reference',
    'Register',
    'Sky',
    'Stage',
    'Stick',
    'Title',
    'Trouble',
    'Bowl',
    'Bridge',
    'Campaign',
    'Character',
    'Club',
    'Edge',
    'Evidence',
    'Fan',
    'Letter',
    'Lock',
    'Maximum',
    'Novel',
    'Option',
    'Pack',
    'Park',
    'Plenty',
    'Quarter',
    'Skin',
    'Sort',
    'Weight',
    'Baby',
    'Background',
    'Carry',
    'Dish',
    'Factor',
    'Fruit',
    'Glass',
    'Joint',
    'Master',
    'Muscle',
    'Red',
    'Strength',
    'Traffic',
    'Trip',
    'Vegetable',
    'Appeal',
    'Chart',
    'Gear',
    'Ideal',
    'Kitchen',
    'Land',
    'Log',
    'Mother',
    'Net',
    'Party',
    'Principle',
    'Relative',
    'Sale',
    'Season',
    'Signal',
    'Spirit',
    'Street',
    'Tree',
    'Wave',
    'Belt',
    'Bench',
    'Commission',
    'Copy',
    'Drop',
    'Minimum',
    'Path',
    'Progress',
    'Project',
    'Sea',
    'South',
    'Status',
    'Stuff',
    'Ticket',
    'Tour',
    'Angle',
    'Blue',
    'Breakfast',
    'Confidence',
    'Daughter',
    'Degree',
    'Doctor',
    'Dot',
    'Dream',
    'Duty',
    'Essay',
    'Father',
    'Fee',
    'Finance',
    'Hour',
    'Juice',
    'Luck',
    'Milk',
    'Mouth',
    'Peace',
    'Pipe',
    'Stable',
    'Storm',
    'Substance',
    'Team',
    'Trick',
    'Afternoon',
    'Bat',
    'Beach',
    'Blank',
    'Catch',
    'Chain',
    'Consideration',
    'Cream',
    'Crew',
    'Detail',
    'Gold',
    'Interview',
    'Kid',
    'Mark',
    'Mission',
    'Pain',
    'Pleasure',
    'Score',
    'Screw',
    'Gratitude',
    'Shop',
    'Shower',
    'Suit',
    'Tone',
    'Window',
    'Agent',
    'Band',
    'Bath',
    'Block',
    'Bone',
    'Calendar',
    'Candidate',
    'Cap',
    'Coat',
    'Contest',
    'Corner',
    'Court',
    'Cup',
    'District',
    'Door',
    'East',
    'Finger',
    'Garage',
    'Guarantee',
    'Hole',
    'Hook',
    'Implement',
    'Layer',
    'Lecture',
    'Lie',
    'Manner',
    'Meeting',
    'Nose',
    'Parking',
    'Partner',
    'Profile',
    'Rice',
    'Routine',
    'Schedule',
    'Swimming',
    'Telephone',
    'Tip',
    'Winter',
    'Airline',
    'Bag',
    'Battle',
    'Bed',
    'Bill',
    'Bother',
    'Cake',
    'Code',
    'Curve',
    'Designer',
    'Dimension',
    'Dress',
    'Ease',
    'Emergency',
    'Evening',
    'Extension',
    'Farm',
    'Fight',
    'Gap',
    'Grade',
    'Holiday',
    'Horror',
    'Horse',
    'Host',
    'Husband',
    'Loan',
    'Mistake',
    'Mountain',
    'Nail',
    'Noise',
    'Occasion',
    'Package',
    'Patient',
    'Pause',
    'Phrase',
    'Proof',
    'Race',
    'Relief',
    'Sand',
    'Sentence',
    'Shoulder',
    'Smoke',
    'Stomach',
    'String',
    'Tourist',
    'Towel',
    'Vacation',
    'West',
    'Wheel',
    'Wine',
    'Arm',
    'Aside',
    'Associate',
    'Bet',
    'Blow',
    'Border',
    'Branch',
    'Breast',
    'Brother',
    'Buddy',
    'Bunch',
    'Chip',
    'Coach',
    'Cross',
    'Document',
    'Draft',
    'Dust',
    'Expert',
    'Floor',
    'God',
    'Golf',
    'Habit',
    'Iron',
    'Judge',
    'Knife',
    'Landscape',
    'League',
    'Mail',
    'Mess',
    'Native',
    'Opening',
    'Parent',
    'Pattern',
    'Pin',
    'Pool',
    'Pound',
    'Request',
    'Salary',
    'Shame',
    'Shelter',
    'Shoe',
    'Silver',
    'Tackle',
    'Tank',
    'Trust',
    'Assist',
    'Bake',
    'Bar',
    'Bell',
    'Bike',
    'Blame',
    'Boy',
    'Brick',
    'Chair',
    'Closet',
    'Clue',
    'Collar',
    'Comment',
    'Conference',
    'Devil',
    'Diet',
    'Fear',
    'Fuel',
    'Glove',
    'Jacket',
    'Lunch',
    'Monitor',
    'Mortgage',
    'Nurse',
    'Pace',
    'Panic',
    'Peak',
    'Plane',
    'Reward',
    'Row',
    'Sandwich',
    'Shock',
    'Spite',
    'Spray',
    'Surprise',
    'Till',
    'Transition',
    'Weekend',
    'Welcome',
    'Yard',
    'Alarm',
    'Bend',
    'Bicycle',
    'Bite',
    'Blind',
    'Bottle',
    'Cable',
    'Candle',
    'Clerk',
    'Cloud',
    'Concert',
    'Counter',
    'Flower',
    'Grandfather',
    'Harm',
    'Knee',
    'Lawyer',
    'Leather',
    'Load',
    'Mirror',
    'Neck',
    'Pension',
    'Plate',
    'Purple',
    'Ruin',
    'Ship',
    'Skirt',
    'Slice',
    'Snow',
    'Specialist',
    'Stroke',
    'Switch',
    'Trash',
    'Tune',
    'Zone',
    'Anger',
    'Award',
    'Bid',
    'Bitter',
    'Boot',
    'Bug',
    'Camp',
    'Candy',
    'Carpet',
    'Cat',
    'Champion',
    'Channel',
    'Clock',
    'Comfort',
    'Cow',
    'Crack',
    'Engineer',
    'Entrance',
    'Fault',
    'Grass',
    'Guy',
    'Hell',
    'Highlight',
    'Incident',
    'Island',
    'Joke',
    'Jury',
    'Leg',
    'Lip',
    'Mate',
    'Motor',
    'Nerve',
    'Passage',
    'Pen',
    'Pride',
    'Priest',
    'Prize',
    'Promise',
    'Resident',
    'Resort',
    'Ring',
    'Roof',
    'Rope',
    'Sail',
    'Scheme',
    'Script',
    'Sock',
    'Station',
    'Toe',
    'Tower',
    'Truck',
    'Witness',
    'Asparagus',
    'You',
    'It',
    'Can',
    'Will',
    'If',
    'One',
    'Many',
    'Most',
    'Other',
    'Use',
    'Make',
    'Good',
    'Look',
    'Help',
    'Go',
    'Great',
    'Being',
    'Few',
    'Might',
    'Still',
    'Public',
    'Read',
    'Keep',
    'Start',
    'Give',
    'Human',
    'Local',
    'General',
    'She',
    'Specific',
    'Long',
    'Play',
    'Feel',
    'High',
    'Tonight',
    'Put',
    'Common',
    'Set',
    'Change',
    'Simple',
    'Past',
    'Big',
    'Possible',
    'Particular',
    'Today',
    'Major',
    'Personal',
    'Current',
    'National',
    'Cut',
    'Natural',
    'Physical',
    'Show',
    'Try',
    'Check',
    'Second',
    'Call',
    'Move',
    'Pay',
    'Let',
    'Increase',
    'Single',
    'Individual',
    'Turn',
    'Ask',
    'Buy',
    'Guard',
    'Hold',
    'Main',
    'Offer',
    'Potential',
    'Professional',
    'International',
    'Travel',
    'Cook',
    'Alternative',
    'Following',
    'Special',
    'Working',
    'Whole',
    'Dance',
    'Excuse',
    'Cold',
    'Commercial',
    'Low',
    'Purchase',
    'Deal',
    'Primary',
    'Worth',
    'Fall',
    'Necessary',
    'Positive',
    'Produce',
    'Search',
    'Present',
    'Spend',
    'Talk',
    'Creative',
    'Tell',
    'Cost',
    'Drive',
    'Green',
    'Support',
    'Glad',
    'Remove',
    'Return',
    'Run',
    'Complex',
    'Due',
    'Effective',
    'Middle',
    'Regular',
    'Reserve',
    'Independent',
    'Leave',
    'Original',
    'Reach',
    'Rest',
    'Serve',
    'Watch',
    'Beautiful',
    'Charge',
    'Active',
    'Break',
    'Negative',
    'Safe',
    'Stay',
    'Visit',
    'Visual',
    'Affect',
    'Cover',
    'Report',
    'Rise',
    'Walk',
    'White',
    'Beyond',
    'Junior',
    'Pick',
    'Unique',
    'Anything',
    'Classic',
    'Final',
    'Lift',
    'Mix',
    'Private',
    'Stop',
    'Teach',
    'Western',
    'Concern',
    'Familiar',
    'Fly',
    'Official',
    'Broad',
    'Comfortable',
    'Gain',
    'Maybe',
    'Rich',
    'Save',
    'Stand',
    'Young',
    'Heavy',
    'Hello',
    'Lead',
    'Listen',
    'Valuable',
    'Worry',
    'Handle',
    'Leading',
    'Meet',
    'Release',
    'Sell',
    'Finish',
    'Normal',
    'Press',
    'Ride',
    'Secret',
    'Spread',
    'Spring',
    'Tough',
    'Wait',
    'Brown',
    'Deep',
    'Display',
    'Flow',
    'Hit',
    'Objective',
    'Shoot',
    'Touch',
    'Cancel',
    'Chemical',
    'Cry',
    'Dump',
    'Extreme',
    'Push',
    'Conflict',
    'Eat',
    'Fill',
    'Formal',
    'Jump',
    'Kick',
    'Opposite',
    'Pass',
    'Pitch',
    'Remote',
    'Total',
    'Treat',
    'Vast',
    'Abuse',
    'Beat',
    'Burn',
    'Deposit',
    'Print',
    'Raise',
    'Sleep',
    'Somewhere',
    'Advance',
    'Anywhere',
    'Consist',
    'Dark',
    'Double',
    'Draw',
    'Equal',
    'Fix',
    'Hire',
    'Internal',
    'Join',
    'Kill',
    'Sensitive',
    'Tap',
    'Win',
    'Attack',
    'Claim',
    'Constant',
    'Drag',
    'Drink',
    'Guess',
    'Minor',
    'Pull',
    'Raw',
    'Soft',
    'Solid',
    'Wear',
    'Weird',
    'Wonder',
    'Annual',
    'Count',
    'Dead',
    'Doubt',
    'Feed',
    'Forever',
    'Impress',
    'Nobody',
    'Repeat',
    'Round',
    'Sing',
    'Slide',
    'Strip',
    'Whereas',
    'Wish',
    'Combine',
    'Command',
    'Dig',
    'Divide',
    'Equivalent',
    'Hang',
    'Hunt',
    'Initial',
    'March',
    'Mention',
    'Spiritual',
    'Survey',
    'Tie',
    'Adult',
    'Brief',
    'Crazy',
    'Escape',
    'Gather',
    'Hate',
    'Prior',
    'Repair',
    'Rough',
    'Sad',
    'Scratch',
    'Sick',
    'Strike',
    'Employ',
    'External',
    'Hurt',
    'Illegal',
    'Laugh',
    'Lay',
    'Mobile',
    'Nasty',
    'Ordinary',
    'Respond',
    'Royal',
    'Senior',
    'Split',
    'Strain',
    'Struggle',
    'Swim',
    'Train',
    'Upper',
    'Wash',
    'Yellow',
    'Convert',
    'Crash',
    'Dependent',
    'Fold',
    'Funny',
    'Grab',
    'Hide',
    'Miss',
    'Permit',
    'Quote',
    'Recover',
    'Resolve',
    'Roll',
    'Sink',
    'Slip',
    'Spare',
    'Suspect',
    'Sweet',
    'Swing',
    'Twist',
    'Upstairs',
    'Usual',
    'Abroad',
    'Brave',
    'Calm',
    'Concentrate',
    'Estimate',
    'Grand',
    'Male',
    'Mine',
    'Prompt',
    'Quiet',
    'Refuse',
    'Regret',
    'Reveal',
    'Rush',
    'Shake',
    'Shift',
    'Shine',
    'Steal',
    'Suck',
    'Surround',
    'Anybody',
    'Bear',
    'Brilliant',
    'Dare',
    'Dear',
    'Delay',
    'Drunk',
    'Female',
    'Hurry',
    'Inevitable',
    'Invite',
    'Kiss',
    'Neat',
    'Pop',
    'Punch',
    'Quit',
    'Reply',
    'Representative',
    'Resist',
    'Rip',
    'Rub',
    'Silly',
    'Smile',
    'Spell',
    'Stretch',
    'Stupid',
    'Tear',
    'Temporary',
    'Tomorrow',
    'Wake',
    'Wrap',
    'Yesterday',
]
07070100000067000081A400000000000000000000000166846B9200002393000000000000000000000000000000000000001800000000awx-24.6.1/awxkit/ws.pyimport threading
import logging
import atexit
import json
import ssl
import datetime

from queue import Queue, Empty
from urllib.parse import urlparse

from awxkit.config import config


log = logging.getLogger(__name__)


class WSClientException(Exception):
    pass


changed = 'changed'
limit_reached = 'limit_reached'
status_changed = 'status_changed'
summary = 'summary'


class WSClient(object):
    """Provides a basic means of testing pub/sub notifications with payloads similar to
    'groups': {'jobs': ['status_changed', 'summary'],
               'schedules': ['changed'],
               'ad_hoc_command_events': [ids...],
               'job_events': [ids...],
               'workflow_events': [ids...],
               'project_update_events': [ids...],
               'inventory_update_events': [ids...],
               'system_job_events': [ids...],
               'control': ['limit_reached']}
    e.x:
    ```
    ws = WSClient(token, port=8013, secure=False).connect()
    ws.job_details()
    ... # launch job
    job_messages = [msg for msg in ws]
    ws.ad_hoc_stdout()
    ... # launch ad hoc command
    ad_hoc_messages = [msg for msg in ws]
    ws.close()
    ```
    """

    # Subscription group types

    def __init__(
        self,
        token=None,
        hostname='',
        port=443,
        secure=True,
        ws_suffix='websocket/',
        session_id=None,
        csrftoken=None,
        add_received_time=False,
        session_cookie_name='awx_sessionid',
    ):
        # delay this import, because this is an optional dependency
        import websocket

        if not hostname:
            result = urlparse(config.base_url)
            secure = result.scheme == 'https'
            port = result.port
            if port is None:
                port = 80
                if secure:
                    port = 443
            # should we be adding result.path here?
            hostname = result.hostname

        self.port = port
        self.suffix = ws_suffix
        self._use_ssl = secure
        self.hostname = hostname
        self.token = token
        self.session_id = session_id
        self.csrftoken = csrftoken
        self._recv_queue = Queue()
        self._ws_closed = False
        self._ws_connected_flag = threading.Event()
        if self.token is not None:
            auth_cookie = 'token="{0.token}";'.format(self)
        elif self.session_id is not None:
            auth_cookie = '{1}="{0.session_id}"'.format(self, session_cookie_name)
            if self.csrftoken:
                auth_cookie += ';csrftoken={0.csrftoken}'.format(self)
        else:
            auth_cookie = ''
        pref = 'wss://' if self._use_ssl else 'ws://'
        url = '{0}{1.hostname}:{1.port}/{1.suffix}'.format(pref, self)
        self.ws = websocket.WebSocketApp(
            url, on_open=self._on_open, on_message=self._on_message, on_error=self._on_error, on_close=self._on_close, cookie=auth_cookie
        )
        self._message_cache = []
        self._should_subscribe_to_pending_job = False
        self._pending_unsubscribe = threading.Event()
        self._add_received_time = add_received_time

    def connect(self):
        wst = threading.Thread(target=self._ws_run_forever, args=(self.ws, {"cert_reqs": ssl.CERT_NONE}))
        wst.daemon = True
        wst.start()
        atexit.register(self.close)
        if not self._ws_connected_flag.wait(20):
            raise WSClientException('Failed to establish channel connection w/ AWX.')
        return self

    def close(self):
        log.info('close method was called, but ignoring')
        if not self._ws_closed:
            log.info('Closing websocket connection.')
            self.ws.close()

    def job_details(self, *job_ids):
        """subscribes to job status, summary, and, for the specified ids, job events"""
        self.subscribe(jobs=[status_changed, summary], job_events=list(job_ids))

    def pending_job_details(self):
        """subscribes to job status and summary, with responsive
        job event subscription for an id provided by AWX
        """
        self.subscribe_to_pending_events('job_events', [status_changed, summary])

    def status_changes(self):
        self.subscribe(jobs=[status_changed])

    def job_stdout(self, *job_ids):
        self.subscribe(jobs=[status_changed], job_events=list(job_ids))

    def pending_job_stdout(self):
        self.subscribe_to_pending_events('job_events')

    # mirror page behavior
    def ad_hoc_stdout(self, *ahc_ids):
        self.subscribe(jobs=[status_changed], ad_hoc_command_events=list(ahc_ids))

    def pending_ad_hoc_stdout(self):
        self.subscribe_to_pending_events('ad_hoc_command_events')

    def project_update_stdout(self, *project_update_ids):
        self.subscribe(jobs=[status_changed], project_update_events=list(project_update_ids))

    def pending_project_update_stdout(self):
        self.subscribe_to_pending_events('project_update_events')

    def inventory_update_stdout(self, *inventory_update_ids):
        self.subscribe(jobs=[status_changed], inventory_update_events=list(inventory_update_ids))

    def pending_inventory_update_stdout(self):
        self.subscribe_to_pending_events('inventory_update_events')

    def workflow_events(self, *wfjt_ids):
        self.subscribe(jobs=[status_changed], workflow_events=list(wfjt_ids))

    def pending_workflow_events(self):
        self.subscribe_to_pending_events('workflow_events')

    def system_job_events(self, *system_job_ids):
        self.subscribe(jobs=[status_changed], system_job_events=list(system_job_ids))

    def pending_system_job_events(self):
        self.subscribe_to_pending_events('system_job_events')

    def subscribe_to_pending_events(self, events, jobs=[status_changed]):
        self._should_subscribe_to_pending_job = dict(jobs=jobs, events=events)
        self.subscribe(jobs=jobs)

    # mirror page behavior
    def jobs_list(self):
        self.subscribe(jobs=[status_changed, summary], schedules=[changed])

    # mirror page behavior
    def dashboard(self):
        self.subscribe(jobs=[status_changed])

    def subscribe(self, **groups):
        """Sends a subscription request for the specified channel groups.
        ```
        ws.subscribe(jobs=[ws.status_changed, ws.summary],
                     job_events=[1,2,3])
        ```
        """
        self._subscribe(groups=groups)

    def _subscribe(self, **payload):
        payload['xrftoken'] = self.csrftoken
        self._send(json.dumps(payload))

    def unsubscribe(self, wait=True, timeout=10):
        if wait:
            # Other unnsubscribe events could have caused the edge to trigger.
            # This way the _next_ event will trigger our waiting.
            self._pending_unsubscribe.clear()
            self._send(json.dumps(dict(groups={}, xrftoken=self.csrftoken)))
            if not self._pending_unsubscribe.wait(timeout):
                raise RuntimeError("Failed while waiting on unsubscribe reply because timeout of {} seconds was reached.".format(timeout))
        else:
            self._send(json.dumps(dict(groups={}, xrftoken=self.csrftoken)))

    def _on_message(self, message):
        message = json.loads(message)
        log.debug('received message: {}'.format(message))
        if self._add_received_time:
            message['received_time'] = datetime.datetime.utcnow()

        if all([message.get('group_name') == 'jobs', message.get('status') == 'pending', message.get('unified_job_id'), self._should_subscribe_to_pending_job]):
            if bool(message.get('project_id')) == (self._should_subscribe_to_pending_job['events'] == 'project_update_events'):
                self._update_subscription(message['unified_job_id'])

        ret = self._recv_queue.put(message)

        # unsubscribe acknowledgement
        if 'groups_current' in message:
            self._pending_unsubscribe.set()

        return ret

    def _update_subscription(self, job_id):
        subscription = dict(jobs=self._should_subscribe_to_pending_job['jobs'])
        events = self._should_subscribe_to_pending_job['events']
        subscription[events] = [job_id]
        self.subscribe(**subscription)
        self._should_subscribe_to_pending_job = False

    def _on_open(self):
        self._ws_connected_flag.set()

    def _on_error(self, error):
        log.info('Error received: {}'.format(error))

    def _on_close(self):
        log.info('Successfully closed ws.')
        self._ws_closed = True

    def _ws_run_forever(self, sockopt=None, sslopt=None):
        self.ws.run_forever(sslopt=sslopt)
        log.debug('ws.run_forever finished')

    def _recv(self, wait=False, timeout=10):
        try:
            msg = self._recv_queue.get(wait, timeout)
        except Empty:
            return None
        return msg

    def _send(self, data):
        self.ws.send(data)
        log.debug('successfully sent {}'.format(data))

    def __iter__(self):
        while True:
            val = self._recv()
            if not val:
                return
            yield val
07070100000068000081A400000000000000000000000166846B9200000C38000000000000000000000000000000000000001F00000000awx-24.6.1/awxkit/yaml_file.pyimport os
import yaml
import glob
import logging


log = logging.getLogger(__name__)


file_pattern_cache = {}
file_path_cache = {}


class Loader(yaml.SafeLoader):
    def __init__(self, stream):
        self._root = os.path.split(stream.name)[0]
        super(Loader, self).__init__(stream)
        Loader.add_constructor('!include', Loader.include)
        Loader.add_constructor('!import', Loader.include)

    def include(self, node):
        if isinstance(node, yaml.ScalarNode):
            return self.extractFile(self.construct_scalar(node))

        elif isinstance(node, yaml.SequenceNode):
            result = []
            for filename in self.construct_sequence(node):
                result += self.extractFile(filename)
            return result

        elif isinstance(node, yaml.MappingNode):
            result = {}
            for k, v in self.construct_mapping(node).items():
                result[k] = self.extractFile(v)[k]
            return result

        else:
            log.error("unrecognised node type in !include statement")
            raise yaml.constructor.ConstructorError

    def extractFile(self, filename):
        file_pattern = os.path.join(self._root, filename)
        log.debug('Will attempt to extract schema from: {0}'.format(file_pattern))
        if file_pattern in file_pattern_cache:
            log.debug('File pattern cache hit: {0}'.format(file_pattern))
            return file_pattern_cache[file_pattern]

        data = dict()
        for file_path in glob.glob(file_pattern):
            file_path = os.path.abspath(file_path)
            if file_path in file_path_cache:
                log.debug('Schema cache hit: {0}'.format(file_path))
                path_data = file_path_cache[file_path]
            else:
                log.debug('Loading schema from {0}'.format(file_path))
                with open(file_path, 'r') as f:
                    path_data = yaml.load(f, Loader)
                file_path_cache[file_path] = path_data
            data.update(path_data)

        file_pattern_cache[file_pattern] = data
        return data


def load_file(filename):
    """Loads a YAML file from the given filename.

    If the filename is omitted or None, attempts will be made to load it from
    its normal location in the parent of the utils directory.

    The awx_data dict loaded with this method supports value randomization,
    thanks to the RandomizeValues class. See that class for possible options

    Example usage in data.yaml (quotes are important!):

    top_level:
      list:
      - "{random_str}"
      - "{random_int}"
      - "{random_uuid}"
      random_thing: "{random_string:24}"
    """
    from py.path import local

    if filename is None:
        this_file = os.path.abspath(__file__)
        path = local(this_file).new(basename='../data.yaml')
    else:
        path = local(filename)

    if path.check():
        with open(path, 'r') as fp:
            # FIXME - support load_all()
            return yaml.load(fp, Loader=Loader)
    else:
        msg = 'Unable to load data file at %s' % path
        raise Exception(msg)
07070100000069000081A400000000000000000000000166846B9200000002000000000000000000000000000000000000001C00000000awx-24.6.1/requirements.txt.
0707010000006A000081A400000000000000000000000166846B9200000D2A000000000000000000000000000000000000001400000000awx-24.6.1/setup.pyimport os
import glob
import shutil
from setuptools import setup, find_packages, Command


def use_scm_version():
    return False if version_file() else True


def get_version_from_file():
    vf = version_file()
    if vf:
        with open(vf, 'r') as file:
            return file.read().strip()


def version_file():
    current_dir = os.path.dirname(os.path.abspath(__file__))
    version_file = os.path.join(current_dir, 'VERSION')

    if os.path.exists(version_file):
        return version_file


def setup_requires():
    if version_file():
        return []
    else:
        return ['setuptools_scm']


extra_setup_args = {}
if not version_file():
    extra_setup_args.update(dict(use_scm_version=dict(root="..", relative_to=__file__), setup_requires=setup_requires()))


class CleanCommand(Command):
    description = "Custom clean command that forcefully removes dist/build directories"
    user_options = []

    def initialize_options(self):
        self.cwd = None

    def finalize_options(self):
        self.cwd = os.getcwd()

    def run(self):
        assert os.getcwd() == self.cwd, 'Must be in package root: %s' % self.cwd

        # List of things to remove
        rm_list = list()

        # Find any .pyc files or __pycache__ dirs
        for root, dirs, files in os.walk(self.cwd, topdown=False):
            for fname in files:
                if fname.endswith('.pyc') and os.path.isfile(os.path.join(root, fname)):
                    rm_list.append(os.path.join(root, fname))
            if root.endswith('__pycache__'):
                rm_list.append(root)

        # Find egg's
        for egg_dir in glob.glob('*.egg') + glob.glob('*egg-info'):
            rm_list.append(egg_dir)

        # Zap!
        for rm in rm_list:
            if self.verbose:
                print("Removing '%s'" % rm)
            if os.path.isdir(rm):
                if not self.dry_run:
                    shutil.rmtree(rm)
            else:
                if not self.dry_run:
                    os.remove(rm)


setup(
    name='awxkit',
    version=get_version_from_file(),
    description='The official command line interface for Ansible AWX',
    author='Red Hat, Inc.',
    author_email='info@ansible.com',
    url='https://github.com/ansible/awx',
    packages=find_packages(exclude=['test']),
    cmdclass={
        'clean': CleanCommand,
    },
    include_package_data=True,
    install_requires=[
        'PyYAML',
        'requests',
        'setuptools',
    ],
    python_requires=">=3.8",
    extras_require={'formatting': ['jq'], 'websockets': ['websocket-client==0.57.0'], 'crypto': ['cryptography']},
    license='Apache 2.0',
    classifiers=[
        'Development Status :: 5 - Production/Stable',
        'Environment :: Console',
        'Intended Audience :: Developers',
        'Intended Audience :: System Administrators',
        'License :: OSI Approved :: Apache Software License',
        'Operating System :: MacOS :: MacOS X',
        'Operating System :: POSIX :: Linux',
        'Programming Language :: Python',
        'Programming Language :: Python :: 3.8',
        'Topic :: System :: Software Distribution',
        'Topic :: System :: Systems Administration',
    ],
    entry_points={'console_scripts': ['akit=awxkit.scripts.basic_session:load_interactive', 'awx=awxkit.cli:run']},
    **extra_setup_args,
)
0707010000006B000041ED00000000000000000000000266846B9200000000000000000000000000000000000000000000001000000000awx-24.6.1/test0707010000006C000081A400000000000000000000000166846B9200000000000000000000000000000000000000000000001C00000000awx-24.6.1/test/__init__.py0707010000006D000041ED00000000000000000000000266846B9200000000000000000000000000000000000000000000001400000000awx-24.6.1/test/cli0707010000006E000081A400000000000000000000000166846B9200000571000000000000000000000000000000000000002300000000awx-24.6.1/test/cli/test_client.pyimport pytest
from requests.exceptions import ConnectionError

from awxkit.cli import run, CLI


class MockedCLI(CLI):
    def fetch_version_root(self):
        pass

    @property
    def v2(self):
        return MockedCLI()

    @property
    def json(self):
        return {'users': None}


@pytest.mark.parametrize('help_param', ['-h', '--help'])
def test_help(capfd, help_param):
    with pytest.raises(SystemExit):
        run(['awx {}'.format(help_param)])
    out, err = capfd.readouterr()

    assert "usage:" in out
    for snippet in ('--conf.host https://example.awx.org]', '-v, --verbose'):
        assert snippet in out


def test_connection_error(capfd):
    cli = CLI()
    cli.parse_args(['awx'])
    with pytest.raises(ConnectionError):
        cli.connect()


@pytest.mark.parametrize('resource', ['', 'invalid'])
def test_list_resources(capfd, resource):
    # if a valid resource isn't specified, print --help
    cli = MockedCLI()
    cli.parse_args(['awx {}'.format(resource)])
    cli.connect()

    try:
        cli.parse_resource()
        out, err = capfd.readouterr()
    except SystemExit:
        # python2 argparse raises SystemExit for invalid/missing required args,
        # py3 doesn't
        _, out = capfd.readouterr()

    assert "usage:" in out
    for snippet in ('--conf.host https://example.awx.org]', '-v, --verbose'):
        assert snippet in out
0707010000006F000081A400000000000000000000000166846B9200000DE2000000000000000000000000000000000000002300000000awx-24.6.1/test/cli/test_config.pyimport os
import json
import pytest
from requests.exceptions import ConnectionError

from awxkit.cli import CLI
from awxkit import config


def test_host_from_environment():
    cli = CLI()
    cli.parse_args(['awx'], env={'CONTROLLER_HOST': 'https://xyz.local'})
    with pytest.raises(ConnectionError):
        cli.connect()
    assert config.base_url == 'https://xyz.local'


def test_host_from_argv():
    cli = CLI()
    cli.parse_args(['awx', '--conf.host', 'https://xyz.local'])
    with pytest.raises(ConnectionError):
        cli.connect()
    assert config.base_url == 'https://xyz.local'


def test_username_and_password_from_environment():
    cli = CLI()
    cli.parse_args(['awx'], env={'CONTROLLER_USERNAME': 'mary', 'CONTROLLER_PASSWORD': 'secret'})
    with pytest.raises(ConnectionError):
        cli.connect()

    assert config.credentials.default.username == 'mary'
    assert config.credentials.default.password == 'secret'


def test_username_and_password_argv():
    cli = CLI()
    cli.parse_args(['awx', '--conf.username', 'mary', '--conf.password', 'secret'])
    with pytest.raises(ConnectionError):
        cli.connect()

    assert config.credentials.default.username == 'mary'
    assert config.credentials.default.password == 'secret'


def test_config_precedence():
    cli = CLI()
    cli.parse_args(['awx', '--conf.username', 'mary', '--conf.password', 'secret'], env={'CONTROLLER_USERNAME': 'IGNORE', 'CONTROLLER_PASSWORD': 'IGNORE'})
    with pytest.raises(ConnectionError):
        cli.connect()

    assert config.credentials.default.username == 'mary'
    assert config.credentials.default.password == 'secret'


def test_config_file_precedence():
    """Ignores AWXKIT_CREDENTIAL_FILE if cli args are set"""
    os.makedirs('/tmp/awx-test/', exist_ok=True)
    with open('/tmp/awx-test/config.json', 'w') as f:
        json.dump({'default': {'username': 'IGNORE', 'password': 'IGNORE'}}, f)

    cli = CLI()
    cli.parse_args(
        ['awx', '--conf.username', 'mary', '--conf.password', 'secret'],
        env={
            'AWXKIT_CREDENTIAL_FILE': '/tmp/awx-test/config.json',
        },
    )
    with pytest.raises(ConnectionError):
        cli.connect()

    assert config.credentials.default.username == 'mary'
    assert config.credentials.default.password == 'secret'


def test_config_file_precedence_2():
    """Ignores AWXKIT_CREDENTIAL_FILE if TOWER_* vars are set."""
    os.makedirs('/tmp/awx-test/', exist_ok=True)
    with open('/tmp/awx-test/config.json', 'w') as f:
        json.dump({'default': {'username': 'IGNORE', 'password': 'IGNORE'}}, f)

    cli = CLI()
    cli.parse_args(['awx'], env={'AWXKIT_CREDENTIAL_FILE': '/tmp/awx-test/config.json', 'TOWER_USERNAME': 'mary', 'TOWER_PASSWORD': 'secret'})
    with pytest.raises(ConnectionError):
        cli.connect()

    assert config.credentials.default.username == 'mary'
    assert config.credentials.default.password == 'secret'


def test_config_file():
    """Reads username and password from AWXKIT_CREDENTIAL_FILE."""
    os.makedirs('/tmp/awx-test/', exist_ok=True)
    with open('/tmp/awx-test/config.json', 'w') as f:
        json.dump({'default': {'username': 'mary', 'password': 'secret'}}, f)

    cli = CLI()
    cli.parse_args(
        ['awx'],
        env={
            'AWXKIT_CREDENTIAL_FILE': '/tmp/awx-test/config.json',
        },
    )
    with pytest.raises(ConnectionError):
        cli.connect()

    assert config.credentials.default.username == 'mary'
    assert config.credentials.default.password == 'secret'
07070100000070000081A400000000000000000000000166846B92000006B9000000000000000000000000000000000000002300000000awx-24.6.1/test/cli/test_format.pyimport io
import json

import yaml

from awxkit.api.pages import Page
from awxkit.api.pages.users import Users
from awxkit.cli import CLI
from awxkit.cli.format import format_response
from awxkit.cli.resource import Import


def test_json_empty_list():
    page = Page.from_json({'results': []})
    formatted = format_response(page)
    assert json.loads(formatted) == {'results': []}


def test_yaml_empty_list():
    page = Page.from_json({'results': []})
    formatted = format_response(page, fmt='yaml')
    assert yaml.safe_load(formatted) == {'results': []}


def test_json_list():
    users = {
        'results': [
            {'username': 'betty'},
            {'username': 'tom'},
            {'username': 'anne'},
        ]
    }
    page = Users.from_json(users)
    formatted = format_response(page)
    assert json.loads(formatted) == users


def test_yaml_list():
    users = {
        'results': [
            {'username': 'betty'},
            {'username': 'tom'},
            {'username': 'anne'},
        ]
    }
    page = Users.from_json(users)
    formatted = format_response(page, fmt='yaml')
    assert yaml.safe_load(formatted) == users


def test_yaml_import():
    class MockedV2:
        def import_assets(self, data):
            self._parsed_data = data

    def _dummy_authenticate():
        pass

    yaml_fd = io.StringIO(
        """
        workflow_job_templates:
          - name: Workflow1
        """
    )
    yaml_fd.name = 'file.yaml'
    cli = CLI(stdin=yaml_fd)
    cli.parse_args(['--conf.format', 'yaml'])
    cli.v2 = MockedV2()
    cli.authenticate = _dummy_authenticate

    Import().handle(cli, None)
    assert cli.v2._parsed_data['workflow_job_templates'][0]['name']
07070100000071000081A400000000000000000000000166846B9200001AD8000000000000000000000000000000000000002400000000awx-24.6.1/test/cli/test_options.pyimport argparse
import unittest
from io import StringIO

from awxkit.api.pages import Page
from awxkit.cli.options import ResourceOptionsParser


class ResourceOptionsParser(ResourceOptionsParser):
    def get_allowed_options(self):
        self.allowed_options = ['GET', 'POST', 'PUT', 'PATCH', 'DELETE']


class OptionsPage(Page):
    def options(self):
        return self

    def endswith(self, v):
        return self.endpoint.endswith(v)

    def __getitem__(self, k):
        return {
            'GET': {},
            'POST': {},
            'PUT': {},
        }


class TestOptions(unittest.TestCase):
    def setUp(self):
        _parser = argparse.ArgumentParser()
        self.parser = _parser.add_subparsers(help='action')

    def test_list(self):
        page = OptionsPage.from_json(
            {
                'actions': {
                    'GET': {},
                    'POST': {},
                }
            }
        )
        ResourceOptionsParser(None, page, 'users', self.parser)
        assert 'list' in self.parser.choices

    def test_list_filtering(self):
        page = OptionsPage.from_json(
            {
                'actions': {
                    'GET': {},
                    'POST': {'first_name': {'type': 'string'}},
                }
            }
        )
        options = ResourceOptionsParser(None, page, 'users', self.parser)
        options.build_query_arguments('list', 'POST')
        assert 'list' in self.parser.choices

        out = StringIO()
        self.parser.choices['list'].print_help(out)
        assert '--first_name TEXT' in out.getvalue()

    def test_list_not_filterable(self):
        page = OptionsPage.from_json(
            {
                'actions': {
                    'GET': {},
                    'POST': {'middle_name': {'type': 'string', 'filterable': False}},
                }
            }
        )
        options = ResourceOptionsParser(None, page, 'users', self.parser)
        options.build_query_arguments('list', 'POST')
        assert 'list' in self.parser.choices

        out = StringIO()
        self.parser.choices['list'].print_help(out)
        assert '--middle_name' not in out.getvalue()

    def test_creation_optional_argument(self):
        page = OptionsPage.from_json(
            {
                'actions': {
                    'POST': {
                        'first_name': {
                            'type': 'string',
                            'help_text': 'Please specify your first name',
                        }
                    },
                }
            }
        )
        options = ResourceOptionsParser(None, page, 'users', self.parser)
        options.build_query_arguments('create', 'POST')
        assert 'create' in self.parser.choices

        out = StringIO()
        self.parser.choices['create'].print_help(out)
        assert '--first_name TEXT  Please specify your first name' in out.getvalue()

    def test_creation_required_argument(self):
        page = OptionsPage.from_json(
            {
                'actions': {
                    'POST': {'username': {'type': 'string', 'help_text': 'Please specify a username', 'required': True}},
                }
            }
        )
        options = ResourceOptionsParser(None, page, 'users', self.parser)
        options.build_query_arguments('create', 'POST')
        assert 'create' in self.parser.choices

        out = StringIO()
        self.parser.choices['create'].print_help(out)
        assert '--username TEXT  Please specify a username'

    def test_integer_argument(self):
        page = OptionsPage.from_json(
            {
                'actions': {
                    'POST': {'max_hosts': {'type': 'integer'}},
                }
            }
        )
        options = ResourceOptionsParser(None, page, 'organizations', self.parser)
        options.build_query_arguments('create', 'POST')
        assert 'create' in self.parser.choices

        out = StringIO()
        self.parser.choices['create'].print_help(out)
        assert '--max_hosts INTEGER' in out.getvalue()

    def test_boolean_argument(self):
        page = OptionsPage.from_json(
            {
                'actions': {
                    'POST': {'diff_mode': {'type': 'boolean'}},
                }
            }
        )
        options = ResourceOptionsParser(None, page, 'users', self.parser)
        options.build_query_arguments('create', 'POST')
        assert 'create' in self.parser.choices

        out = StringIO()
        self.parser.choices['create'].print_help(out)
        assert '--diff_mode BOOLEAN' in out.getvalue()

    def test_choices(self):
        page = OptionsPage.from_json(
            {
                'actions': {
                    'POST': {
                        'verbosity': {
                            'type': 'integer',
                            'choices': [
                                (0, '0 (Normal)'),
                                (1, '1 (Verbose)'),
                                (2, '2 (More Verbose)'),
                                (3, '3 (Debug)'),
                                (4, '4 (Connection Debug)'),
                                (5, '5 (WinRM Debug)'),
                            ],
                        }
                    },
                }
            }
        )
        options = ResourceOptionsParser(None, page, 'users', self.parser)
        options.build_query_arguments('create', 'POST')
        assert 'create' in self.parser.choices

        out = StringIO()
        self.parser.choices['create'].print_help(out)
        assert '--verbosity {0,1,2,3,4,5}' in out.getvalue()

    def test_actions_with_primary_key(self):
        page = OptionsPage.from_json({'actions': {'GET': {}, 'POST': {}}})
        ResourceOptionsParser(None, page, 'jobs', self.parser)

        for method in ('get', 'modify', 'delete'):
            assert method in self.parser.choices

            out = StringIO()
            self.parser.choices[method].print_help(out)
            assert 'positional arguments:\n  id' in out.getvalue()


class TestSettingsOptions(unittest.TestCase):
    def setUp(self):
        _parser = argparse.ArgumentParser()
        self.parser = _parser.add_subparsers(help='action')

    def test_list(self):
        page = OptionsPage.from_json(
            {
                'actions': {
                    'GET': {},
                    'POST': {},
                    'PUT': {},
                }
            }
        )
        page.endpoint = '/settings/all/'
        ResourceOptionsParser(None, page, 'settings', self.parser)
        assert 'list' in self.parser.choices
        assert 'modify' in self.parser.choices

        out = StringIO()
        self.parser.choices['modify'].print_help(out)
        assert 'modify [-h] key value' in out.getvalue()
07070100000072000081A400000000000000000000000166846B9200000000000000000000000000000000000000000000001B00000000awx-24.6.1/test/pytest.ini07070100000073000081A400000000000000000000000166846B92000007E6000000000000000000000000000000000000002400000000awx-24.6.1/test/test_credentials.pyimport pytest


from awxkit.api.pages import credentials
from awxkit.utils import PseudoNamespace


def set_config_cred_to_desired(config, location):
    split = location.split('.')
    config_ref = config.credentials
    for _location in split[:-1]:
        setattr(config_ref, _location, PseudoNamespace())
        config_ref = config_ref[_location]
    setattr(config_ref, split[-1], 'desired')


class MockCredentialType(object):
    def __init__(self, name, kind, managed=True):
        self.name = name
        self.kind = kind
        self.managed = managed


@pytest.mark.parametrize(
    'field, kind, config_cred, desired_field, desired_value',
    [
        ('field', 'ssh', PseudoNamespace(field=123), 'field', 123),
        ('subscription', 'azure', PseudoNamespace(subscription_id=123), 'subscription', 123),
        ('project_id', 'gce', PseudoNamespace(project=123), 'project', 123),
        ('authorize_password', 'net', PseudoNamespace(authorize=123), 'authorize_password', 123),
    ],
)
def test_get_payload_field_and_value_from_config_cred(field, kind, config_cred, desired_field, desired_value):
    ret_field, ret_val = credentials.get_payload_field_and_value_from_kwargs_or_config_cred(field, kind, {}, config_cred)
    assert ret_field == desired_field
    assert ret_val == desired_value


@pytest.mark.parametrize(
    'field, kind, kwargs, desired_field, desired_value',
    [
        ('field', 'ssh', dict(field=123), 'field', 123),
        ('subscription', 'azure', dict(subscription=123), 'subscription', 123),
        ('project_id', 'gce', dict(project_id=123), 'project', 123),
        ('authorize_password', 'net', dict(authorize_password=123), 'authorize_password', 123),
    ],
)
def test_get_payload_field_and_value_from_kwarg(field, kind, kwargs, desired_field, desired_value):
    ret_field, ret_val = credentials.get_payload_field_and_value_from_kwargs_or_config_cred(field, kind, kwargs, PseudoNamespace())
    assert ret_field == desired_field
    assert ret_val == desired_value
07070100000074000081A400000000000000000000000166846B92000055B7000000000000000000000000000000000000002C00000000awx-24.6.1/test/test_dependency_resolver.pyimport pytest

from awxkit.utils import filter_by_class
from awxkit.utils.toposort import CircularDependencyError
from awxkit.api.mixins import has_create


class MockHasCreate(has_create.HasCreate):
    connection = None

    def __str__(self):
        return "instance of {0.__class__.__name__} ({1})".format(self, hex(id(self)))

    def __init__(self, *a, **kw):
        self.cleaned = False
        super(MockHasCreate, self).__init__()

    def silent_cleanup(self):
        self.cleaned = True


class A(MockHasCreate):
    def create(self, **kw):
        return self


class B(MockHasCreate):
    optional_dependencies = [A]

    def create(self, a=None, **kw):
        self.create_and_update_dependencies(*filter_by_class((a, A)))
        return self


class C(MockHasCreate):
    dependencies = [A, B]

    def create(self, a=A, b=B, **kw):
        self.create_and_update_dependencies(b, a)
        return self


class D(MockHasCreate):
    dependencies = [A]
    optional_dependencies = [B]

    def create(self, a=A, b=None, **kw):
        self.create_and_update_dependencies(*filter_by_class((a, A), (b, B)))
        return self


class E(MockHasCreate):
    dependencies = [D, C]

    def create(self, c=C, d=D, **kw):
        self.create_and_update_dependencies(d, c)
        return self


class F(MockHasCreate):
    dependencies = [B]
    optional_dependencies = [E]

    def create(self, b=B, e=None, **kw):
        self.create_and_update_dependencies(*filter_by_class((b, B), (e, E)))
        return self


class G(MockHasCreate):
    dependencies = [D]
    optional_dependencies = [F, E]

    def create(self, d=D, f=None, e=None, **kw):
        self.create_and_update_dependencies(*filter_by_class((d, D), (f, F), (e, E)))
        return self


class H(MockHasCreate):
    optional_dependencies = [E, A]

    def create(self, a=None, e=None, **kw):
        self.create_and_update_dependencies(*filter_by_class((a, A), (e, E)))
        return self


class MultipleWordClassName(MockHasCreate):
    def create(self, **kw):
        return self


class AnotherMultipleWordClassName(MockHasCreate):
    optional_dependencies = [MultipleWordClassName]

    def create(self, multiple_word_class_name=None, **kw):
        self.create_and_update_dependencies(*filter_by_class((multiple_word_class_name, MultipleWordClassName)))
        return self


def test_dependency_graph_single_page():
    """confirms that `dependency_graph(Base)` will return a dependency graph
    consisting of only dependencies and dependencies of dependencies (if any)
    """
    desired = {}
    desired[G] = set([D])
    desired[D] = set([A])
    desired[A] = set()
    assert has_create.dependency_graph(G) == desired


def test_dependency_graph_page_with_optional():
    """confirms that `dependency_graph(Base, OptionalBase)` will return a dependency
    graph consisting of only dependencies and dependencies of dependencies (if any)
    with the exception that the OptionalBase and its dependencies are included as well.
    """
    desired = {}
    desired[G] = set([D])
    desired[E] = set([D, C])
    desired[C] = set([A, B])
    desired[D] = set([A])
    desired[B] = set()
    desired[A] = set()
    assert has_create.dependency_graph(G, E) == desired


def test_dependency_graph_page_with_additionals():
    """confirms that `dependency_graph(Base, AdditionalBaseOne, AdditionalBaseTwo)`
    will return a dependency graph consisting of only dependencies and dependencies
    of dependencies (if any) with the exception that the AdditionalBases
    are treated as a dependencies of Base (when they aren't) and their dependencies
    are included as well.
    """
    desired = {}
    desired[E] = set([D, C])
    desired[D] = set([A])
    desired[C] = set([A, B])
    desired[F] = set([B])
    desired[G] = set([D])
    desired[A] = set()
    desired[B] = set()
    assert has_create.dependency_graph(E, F, G) == desired


def test_optional_dependency_graph_single_page():
    """confirms that has_create._optional_dependency_graph(Base) returns a complete dependency tree
    including all optional_dependencies
    """
    desired = {}
    desired[H] = set([E, A])
    desired[E] = set([D, C])
    desired[D] = set([A, B])
    desired[C] = set([A, B])
    desired[B] = set([A])
    desired[A] = set()
    assert has_create.optional_dependency_graph(H) == desired


def test_optional_dependency_graph_with_additional():
    """confirms that has_create._optional_dependency_graph(Base) returns a complete dependency tree
    including all optional_dependencies with the AdditionalBases treated as a dependencies
    of Base (when they aren't) and their dependencies and optional_dependencies included as well.
    """
    desired = {}
    desired[F] = set([B, E])
    desired[H] = set([E, A])
    desired[E] = set([D, C])
    desired[D] = set([A, B])
    desired[C] = set([A, B])
    desired[B] = set([A])
    desired[A] = set()
    assert has_create.optional_dependency_graph(F, H, A) == desired


def test_creation_order():
    """confirms that `has_create.creation_order()` returns a valid creation order in the desired list of sets format"""
    dependency_graph = dict(
        eight=set(['seven', 'six']),
        seven=set(['five']),
        six=set(),
        five=set(['two', 'one']),
        four=set(['one']),
        three=set(['two']),
        two=set(['one']),
        one=set(),
    )
    desired = [set(['one', 'six']), set(['two', 'four']), set(['three', 'five']), set(['seven']), set(['eight'])]
    assert has_create.creation_order(dependency_graph) == desired


def test_creation_order_with_loop():
    """confirms that `has_create.creation_order()` raises toposort.CircularDependencyError when evaluating
    a cyclic dependency graph
    """
    dependency_graph = dict(
        eight=set(['seven', 'six']),
        seven=set(['five']),
        six=set(),
        five=set(['two', 'one']),
        four=set(['one']),
        three=set(['two']),
        two=set(['one']),
        one=set(['eight']),
    )
    with pytest.raises(CircularDependencyError):
        assert has_create.creation_order(dependency_graph)


class One(MockHasCreate):
    pass


class Two(MockHasCreate):
    dependencies = [One]


class Three(MockHasCreate):
    dependencies = [Two, One]


class Four(MockHasCreate):
    optional_dependencies = [Two]


class Five(MockHasCreate):
    dependencies = [Two]
    optional_dependencies = [One]


class IsntAHasCreate(object):
    pass


class Six(MockHasCreate, IsntAHasCreate):
    dependencies = [Two]


class Seven(MockHasCreate):
    dependencies = [IsntAHasCreate]


def test_separate_async_optionals_none_exist():
    """confirms that when creation group classes have no async optional dependencies the order is unchanged"""
    order = has_create.creation_order(has_create.optional_dependency_graph(Three, Two, One))
    assert has_create.separate_async_optionals(order) == order


def test_separate_async_optionals_two_exist():
    """confirms that when two creation group classes have async dependencies
    the class that has shared item as a dependency occurs first in a separate creation group
    """
    order = has_create.creation_order(has_create.optional_dependency_graph(Four, Three, Two))
    assert has_create.separate_async_optionals(order) == [set([One]), set([Two]), set([Three]), set([Four])]


def test_separate_async_optionals_three_exist():
    """confirms that when three creation group classes have async dependencies
    the class that has shared item as a dependency occurs first in a separate creation group
    """
    order = has_create.creation_order(has_create.optional_dependency_graph(Five, Four, Three))
    assert has_create.separate_async_optionals(order) == [set([One]), set([Two]), set([Three]), set([Five]), set([Four])]


def test_separate_async_optionals_not_has_create():
    """confirms that when a dependency isn't a HasCreate has_create.separate_aysnc_optionals doesn't
    unnecessarily move it from the initial creation group
    """
    order = has_create.creation_order(has_create.optional_dependency_graph(Seven, Six))
    assert has_create.separate_async_optionals(order) == [set([One, IsntAHasCreate]), set([Two, Seven]), set([Six])]


def test_page_creation_order_single_page():
    """confirms that `has_create.page_creation_order()` returns a valid creation order"""
    desired = [set([A]), set([D]), set([G])]
    assert has_create.page_creation_order(G) == desired


def test_page_creation_order_optionals_provided():
    """confirms that `has_create.page_creation_order()` returns a valid creation order
    when optional_dependencies are included
    """
    desired = [set([A]), set([B]), set([C]), set([D]), set([E]), set([H])]
    assert has_create.page_creation_order(H, A, E) == desired


def test_page_creation_order_additionals_provided():
    """confirms that `has_create.page_creation_order()` returns a valid creation order
    when additional pages are included
    """
    desired = [set([A]), set([B]), set([D]), set([F, H]), set([G])]
    assert has_create.page_creation_order(F, H, G) == desired


def test_all_instantiated_dependencies_single_page():
    f = F().create()
    b = f._dependency_store[B]
    desired = set([b, f])
    assert set(has_create.all_instantiated_dependencies(f, A, B, C, D, E, F, G, H)) == desired


def test_all_instantiated_dependencies_single_page_are_ordered():
    f = F().create()
    b = f._dependency_store[B]
    desired = [b, f]
    assert has_create.all_instantiated_dependencies(f, A, B, C, D, E, F, G, H) == desired


def test_all_instantiated_dependencies_optionals():
    a = A().create()
    b = B().create(a=a)
    c = C().create(a=a, b=b)
    d = D().create(a=a, b=b)
    e = E().create(c=c, d=d)
    h = H().create(a=a, e=e)
    desired = set([a, b, c, d, e, h])
    assert set(has_create.all_instantiated_dependencies(h, A, B, C, D, E, F, G, H)) == desired


def test_all_instantiated_dependencies_optionals_are_ordered():
    a = A().create()
    b = B().create(a=a)
    c = C().create(a=a, b=b)
    d = D().create(a=a, b=b)
    e = E().create(c=c, d=d)
    h = H().create(a=a, e=e)
    desired = [a, b, c, d, e, h]
    assert has_create.all_instantiated_dependencies(h, A, B, C, D, E, F, G, H) == desired


def test_dependency_resolution_complete():
    h = H().create(a=True, e=True)
    a = h._dependency_store[A]
    e = h._dependency_store[E]
    c = e._dependency_store[C]
    d = e._dependency_store[D]
    b = c._dependency_store[B]

    for item in (h, a, e, d, c, b):
        if item._dependency_store:
            assert all(item._dependency_store.values()), "{0} missing dependency: {0._dependency_store}".format(item)

    assert a == b._dependency_store[A], "Duplicate dependency detected"
    assert a == c._dependency_store[A], "Duplicate dependency detected"
    assert a == d._dependency_store[A], "Duplicate dependency detected"
    assert b == c._dependency_store[B], "Duplicate dependency detected"
    assert b == d._dependency_store[B], "Duplicate dependency detected"


def test_ds_mapping():
    h = H().create(a=True, e=True)
    a = h._dependency_store[A]
    e = h._dependency_store[E]
    c = e._dependency_store[C]
    d = e._dependency_store[D]
    b = c._dependency_store[B]

    assert a == h.ds.a
    assert e == h.ds.e
    assert c == e.ds.c
    assert d == e.ds.d
    assert b == c.ds.b


def test_ds_multiple_word_class_and_attribute_name():
    amwcn = AnotherMultipleWordClassName().create(multiple_word_class_name=True)
    mwcn = amwcn._dependency_store[MultipleWordClassName]
    assert amwcn.ds.multiple_word_class_name == mwcn


def test_ds_missing_dependency():
    a = A().create()

    with pytest.raises(AttributeError):
        a.ds.b


def test_teardown_calls_silent_cleanup():
    g = G().create(f=True, e=True)
    f = g._dependency_store[F]
    e = g._dependency_store[E]
    b = f._dependency_store[B]
    d = e._dependency_store[D]
    c = e._dependency_store[C]
    a = c._dependency_store[A]
    instances = [g, f, e, b, d, c, a]

    for instance in instances:
        assert not instance.cleaned

    g.teardown()
    for instance in instances:
        assert instance.cleaned


def test_teardown_dependency_store_cleared():
    g = G().create(f=True, e=True)
    f = g._dependency_store[F]
    e = g._dependency_store[E]
    c = e._dependency_store[C]

    g.teardown()

    assert not g._dependency_store[F]
    assert not g._dependency_store[E]
    assert not f._dependency_store[B]
    assert not e._dependency_store[D]
    assert not e._dependency_store[C]
    assert not c._dependency_store[A]


def test_idempotent_teardown_dependency_store_cleared():
    g = G().create(f=True, e=True)
    f = g._dependency_store[F]
    e = g._dependency_store[E]
    b = f._dependency_store[B]
    d = e._dependency_store[D]
    c = e._dependency_store[C]
    a = c._dependency_store[A]

    for item in (g, f, e, b, d, c, a):
        item.teardown()
        item.teardown()

    assert not g._dependency_store[F]
    assert not g._dependency_store[E]
    assert not f._dependency_store[B]
    assert not e._dependency_store[D]
    assert not e._dependency_store[C]
    assert not c._dependency_store[A]


def test_teardown_ds_cleared():
    g = G().create(f=True, e=True)
    f = g._dependency_store[F]
    e = g._dependency_store[E]
    c = e._dependency_store[C]

    g.teardown()

    for former_dep in ('f', 'e'):
        with pytest.raises(AttributeError):
            getattr(g.ds, former_dep)

    with pytest.raises(AttributeError):
        getattr(f.ds, 'b')

    for former_dep in ('d', 'c'):
        with pytest.raises(AttributeError):
            getattr(e.ds, former_dep)

    with pytest.raises(AttributeError):
        getattr(c.ds, 'a')


class OneWithArgs(MockHasCreate):
    def create(self, **kw):
        self.kw = kw
        return self


class TwoWithArgs(MockHasCreate):
    dependencies = [OneWithArgs]

    def create(self, one_with_args=OneWithArgs, **kw):
        if not one_with_args and kw.pop('make_one_with_args', False):
            one_with_args = (OneWithArgs, dict(a='a', b='b', c='c'))
        self.create_and_update_dependencies(one_with_args)
        self.kw = kw
        return self


class ThreeWithArgs(MockHasCreate):
    dependencies = [OneWithArgs]
    optional_dependencies = [TwoWithArgs]

    def create(self, one_with_args=OneWithArgs, two_with_args=None, **kw):
        self.create_and_update_dependencies(*filter_by_class((one_with_args, OneWithArgs), (two_with_args, TwoWithArgs)))
        self.kw = kw
        return self


class FourWithArgs(MockHasCreate):
    dependencies = [TwoWithArgs, ThreeWithArgs]

    def create(self, two_with_args=TwoWithArgs, three_with_args=ThreeWithArgs, **kw):
        self.create_and_update_dependencies(*filter_by_class((two_with_args, TwoWithArgs), (three_with_args, ThreeWithArgs)))
        self.kw = kw
        return self


def test_single_kwargs_class_in_create_and_update_dependencies():
    two_wa = TwoWithArgs().create(one_with_args=False, make_one_with_args=True, two_with_args_kw_arg=123)
    assert isinstance(two_wa.ds.one_with_args, OneWithArgs)
    assert two_wa.ds.one_with_args.kw == dict(a='a', b='b', c='c')
    assert two_wa.kw == dict(two_with_args_kw_arg=123)


def test_no_tuple_for_class_arg_causes_shared_dependencies_staggered():
    three_wo = ThreeWithArgs().create(two_with_args=True)
    assert isinstance(three_wo.ds.one_with_args, OneWithArgs)
    assert isinstance(three_wo.ds.two_with_args, TwoWithArgs)
    assert isinstance(three_wo.ds.two_with_args.ds.one_with_args, OneWithArgs)
    assert three_wo.ds.one_with_args == three_wo.ds.two_with_args.ds.one_with_args


def test_no_tuple_for_class_arg_causes_shared_dependencies_nested_staggering():
    four_wo = FourWithArgs().create()
    assert isinstance(four_wo.ds.two_with_args, TwoWithArgs)
    assert isinstance(four_wo.ds.three_with_args, ThreeWithArgs)
    assert isinstance(four_wo.ds.two_with_args.ds.one_with_args, OneWithArgs)
    assert isinstance(four_wo.ds.three_with_args.ds.one_with_args, OneWithArgs)
    assert isinstance(four_wo.ds.three_with_args.ds.two_with_args, TwoWithArgs)
    assert four_wo.ds.two_with_args.ds.one_with_args == four_wo.ds.three_with_args.ds.one_with_args
    assert four_wo.ds.two_with_args == four_wo.ds.three_with_args.ds.two_with_args


def test_tuple_for_class_arg_causes_unshared_dependencies_when_downstream():
    """Confirms that provided arg-tuple for dependency type is applied instead of chained dependency"""
    three_wa = ThreeWithArgs().create(
        two_with_args=(TwoWithArgs, dict(one_with_args=False, make_one_with_args=True, two_with_args_kw_arg=234)), three_with_args_kw_arg=345
    )
    assert isinstance(three_wa.ds.one_with_args, OneWithArgs)
    assert isinstance(three_wa.ds.two_with_args, TwoWithArgs)
    assert isinstance(three_wa.ds.two_with_args.ds.one_with_args, OneWithArgs)
    assert three_wa.ds.one_with_args != three_wa.ds.two_with_args.ds.one_with_args
    assert three_wa.ds.one_with_args.kw == dict()
    assert three_wa.ds.two_with_args.kw == dict(two_with_args_kw_arg=234)
    assert three_wa.ds.two_with_args.ds.one_with_args.kw == dict(a='a', b='b', c='c')
    assert three_wa.kw == dict(three_with_args_kw_arg=345)


def test_tuples_for_class_arg_cause_unshared_dependencies_when_downstream():
    """Confirms that provided arg-tuple for dependency type is applied instead of chained dependency"""
    four_wa = FourWithArgs().create(
        two_with_args=(TwoWithArgs, dict(one_with_args=False, make_one_with_args=True, two_with_args_kw_arg=456)),
        # No shared dependencies with four_wa.ds.two_with_args
        three_with_args=(ThreeWithArgs, dict(one_with_args=(OneWithArgs, {}), two_with_args=False)),
        four_with_args_kw=567,
    )
    assert isinstance(four_wa.ds.two_with_args, TwoWithArgs)
    assert isinstance(four_wa.ds.three_with_args, ThreeWithArgs)
    assert isinstance(four_wa.ds.two_with_args.ds.one_with_args, OneWithArgs)
    assert isinstance(four_wa.ds.three_with_args.ds.one_with_args, OneWithArgs)
    assert four_wa.ds.three_with_args.ds.one_with_args != four_wa.ds.two_with_args.ds.one_with_args
    with pytest.raises(AttributeError):
        four_wa.ds.three_with_args.ds.two_with_args
    assert four_wa.kw == dict(four_with_args_kw=567)


class NotHasCreate(object):
    pass


class MixinUserA(MockHasCreate, NotHasCreate):
    def create(self, **kw):
        return self


class MixinUserB(MockHasCreate, NotHasCreate):
    def create(self, **kw):
        return self


class MixinUserC(MixinUserB):
    def create(self, **kw):
        return self


class MixinUserD(MixinUserC):
    def create(self, **kw):
        return self


class NotHasCreateDependencyHolder(MockHasCreate):
    dependencies = [NotHasCreate]

    def create(self, not_has_create=MixinUserA):
        self.create_and_update_dependencies(not_has_create)
        return self


def test_not_has_create_default_dependency():
    """Confirms that HasCreates that claim non-HasCreates as dependencies claim them by correct kwarg
    class name in _dependency_store
    """
    dep_holder = NotHasCreateDependencyHolder().create()
    assert isinstance(dep_holder.ds.not_has_create, MixinUserA)


def test_not_has_create_passed_dependency():
    """Confirms that passed non-HasCreate subclasses are sourced as dependency"""
    dep = MixinUserB().create()
    assert isinstance(dep, MixinUserB)
    dep_holder = NotHasCreateDependencyHolder().create(not_has_create=dep)
    assert dep_holder.ds.not_has_create == dep


class HasCreateParentDependencyHolder(MockHasCreate):
    dependencies = [MixinUserB]

    def create(self, mixin_user_b=MixinUserC):
        self.create_and_update_dependencies(mixin_user_b)
        return self


def test_has_create_stored_as_parent_dependency():
    """Confirms that HasCreate subclasses are sourced as their parent"""
    dep = MixinUserC().create()
    assert isinstance(dep, MixinUserC)
    assert isinstance(dep, MixinUserB)
    dep_holder = HasCreateParentDependencyHolder().create(mixin_user_b=dep)
    assert dep_holder.ds.mixin_user_b == dep


class DynamicallyDeclaresNotHasCreateDependency(MockHasCreate):
    dependencies = [NotHasCreate]

    def create(self, not_has_create=MixinUserA):
        dynamic_dependency = dict(mixinusera=MixinUserA, mixinuserb=MixinUserB, mixinuserc=MixinUserC)
        self.create_and_update_dependencies(dynamic_dependency[not_has_create])
        return self


@pytest.mark.parametrize('dependency,dependency_class', [('mixinusera', MixinUserA), ('mixinuserb', MixinUserB), ('mixinuserc', MixinUserC)])
def test_subclass_or_parent_dynamic_not_has_create_dependency_declaration(dependency, dependency_class):
    """Confirms that dependencies that dynamically declare dependencies subclassed from not HasCreate
    are properly linked
    """
    dep_holder = DynamicallyDeclaresNotHasCreateDependency().create(dependency)
    assert dep_holder.ds.not_has_create.__class__ == dependency_class


class DynamicallyDeclaresHasCreateDependency(MockHasCreate):
    dependencies = [MixinUserB]

    def create(self, mixin_user_b=MixinUserB):
        dynamic_dependency = dict(mixinuserb=MixinUserB, mixinuserc=MixinUserC, mixinuserd=MixinUserD)
        self.create_and_update_dependencies(dynamic_dependency[mixin_user_b])
        return self


@pytest.mark.parametrize('dependency,dependency_class', [('mixinuserb', MixinUserB), ('mixinuserc', MixinUserC), ('mixinuserd', MixinUserD)])
def test_subclass_or_parent_dynamic_has_create_dependency_declaration(dependency, dependency_class):
    """Confirms that dependencies that dynamically declare dependencies subclassed from not HasCreate
    are properly linked
    """
    dep_holder = DynamicallyDeclaresHasCreateDependency().create(dependency)
    assert dep_holder.ds.mixin_user_b.__class__ == dependency_class
07070100000075000081A400000000000000000000000166846B92000023D1000000000000000000000000000000000000002100000000awx-24.6.1/test/test_registry.pyimport pytest

from awxkit.api.registry import URLRegistry


class One(object):
    pass


class Two(object):
    pass


@pytest.fixture
def reg():
    return URLRegistry()


def test_url_pattern(reg):
    desired = r'^/some/resources/\d+/(\?.*)*$'
    assert reg.url_pattern(r'/some/resources/\d+/').pattern == desired


def test_methodless_get_from_empty_registry(reg):
    assert reg.get('nonexistent') is None


def test_method_get_from_empty_registry(reg):
    assert reg.get('nonexistent', 'method') is None


def test_methodless_setdefault_methodless_get(reg):
    reg.setdefault(One)
    assert reg.get('some_path') is One


def test_methodless_setdefault_method_get(reg):
    reg.setdefault(One)
    assert reg.get('some_path', 'method') is One


def test_method_setdefault_methodless_get(reg):
    reg.setdefault('method', One)
    assert reg.get('some_path') is None


def test_method_setdefault_matching_method_get(reg):
    reg.setdefault('method', One)
    assert reg.get('some_path', 'method') is One


def test_method_setdefault_nonmatching_method_get(reg):
    reg.setdefault('method', One)
    assert reg.get('some_path', 'nonexistent') is None


def test_multimethod_setdefault_matching_method_get(reg):
    reg.setdefault(('method_one', 'method_two'), One)
    assert reg.get('some_path', 'method_one') is One
    assert reg.get('some_path', 'method_two') is One


def test_multimethod_setdefault_nonmatching_method_get(reg):
    reg.setdefault(('method_one', 'method_two'), One)
    assert reg.get('some_path') is None
    assert reg.get('some_path', 'nonexistent') is None


def test_wildcard_setdefault_methodless_get(reg):
    reg.setdefault('.*', One)
    assert reg.get('some_path') is One


def test_wildcard_setdefault_method_get(reg):
    reg.setdefault('.*', One)
    assert reg.get('some_path', 'method') is One


def test_regex_method_setdefaults_over_wildcard_method_get(reg):
    reg.setdefault('.*', One)
    reg.setdefault('reg.*ex', Two)
    for _ in range(1000):
        assert reg.get('some_path', 'regex') is Two


def test_methodless_registration_with_matching_path_methodless_get(reg):
    reg.register('some_path', One)
    assert reg.get('some_path') is One


def test_methodless_registraion_with_nonmatching_path_methodless_get(reg):
    reg.register('some_path', One)
    assert reg.get('nonexistent') is None


def test_methodless_registration_with_matching_path_nonmatching_method_get(reg):
    reg.register('some_path', One)
    assert reg.get('some_path', 'method') is None


def test_method_registration_with_matching_path_matching_method_get(reg):
    reg.register('some_path', 'method', One)
    assert reg.get('some_path', 'method') is One


def test_method_registration_with_matching_path_nonmatching_method_get(reg):
    reg.register('some_path', 'method_one', One)
    assert reg.get('some_path', 'method_two') is None


def test_multimethod_registration_with_matching_path_matching_method_get(reg):
    reg.register('some_path', ('method_one', 'method_two'), One)
    assert reg.get('some_path', 'method_one') is One
    assert reg.get('some_path', 'method_two') is One


def test_multimethod_registration_with_path_matching_method_get(reg):
    reg.register('some_path', ('method_one', 'method_two'), One)
    assert reg.get('some_path', 'method_three') is None


def test_multipath_methodless_registration_with_matching_path_methodless_get(reg):
    reg.register(('some_path_one', 'some_path_two'), One)
    assert reg.get('some_path_one') is One
    assert reg.get('some_path_two') is One


def test_multipath_methodless_registration_with_matching_path_nonmatching_method_get(reg):
    reg.register(('some_path_one', 'some_path_two'), One)
    assert reg.get('some_path_one', 'method') is None
    assert reg.get('some_path_two', 'method') is None


def test_multipath_method_registration_with_matching_path_matching_method_get(reg):
    reg.register((('some_path_one', 'method_one'), ('some_path_two', 'method_two')), One)
    assert reg.get('some_path_one', 'method_one') is One
    assert reg.get('some_path_two', 'method_two') is One


def test_multipath_partial_method_registration_with_matching_path_matching_method_get(reg):
    reg.register(('some_path_one', ('some_path_two', 'method')), One)
    assert reg.get('some_path_one') is One
    assert reg.get('some_path_two', 'method') is One


def test_wildcard_method_registration_with_methodless_get(reg):
    reg.register('some_path', '.*', One)
    assert reg.get('some_path') is One


def test_wildcard_method_registration_with_method_get(reg):
    reg.register('some_path', '.*', One)
    assert reg.get('some_path', 'method') is One


def test_wildcard_and_specific_method_registration_acts_as_default(reg):
    reg.register('some_path', 'method_one', Two)
    reg.register('some_path', '.*', One)
    reg.register('some_path', 'method_two', Two)
    for _ in range(1000):  # eliminate overt randomness
        assert reg.get('some_path', 'nonexistent') is One
        assert reg.get('some_path', 'method_one') is Two
        assert reg.get('some_path', 'method_two') is Two


@pytest.mark.parametrize('method', ('method', '.*'))
def test_multiple_method_registrations_disallowed_for_single_path_single_registration(reg, method):
    with pytest.raises(TypeError) as e:
        reg.register((('some_path', method), ('some_path', method)), One)
    assert str(e.value) == ('"{0.pattern}" already has registered method "{1}"'.format(reg.url_pattern('some_path'), method))


@pytest.mark.parametrize('method', ('method', '.*'))
def test_multiple_method_registrations_disallowed_for_single_path_multiple_registrations(reg, method):
    reg.register('some_path', method, One)
    with pytest.raises(TypeError) as e:
        reg.register('some_path', method, One)
    assert str(e.value) == ('"{0.pattern}" already has registered method "{1}"'.format(reg.url_pattern('some_path'), method))


def test_paths_can_be_patterns(reg):
    reg.register('.*pattern.*', One)
    assert reg.get('XYZpattern123') is One


def test_mixed_form_single_registration(reg):
    reg.register(
        [('some_path_one', 'method_one'), 'some_path_two', ('some_path_three', ('method_two', 'method_three')), 'some_path_four', 'some_path_five'], One
    )
    assert reg.get('some_path_one', 'method_one') is One
    assert reg.get('some_path_one') is None
    assert reg.get('some_path_one', 'nonexistent') is None
    assert reg.get('some_path_two') is One
    assert reg.get('some_path_two', 'nonexistent') is None
    assert reg.get('some_path_three', 'method_two') is One
    assert reg.get('some_path_three', 'method_three') is One
    assert reg.get('some_path_three') is None
    assert reg.get('some_path_three', 'nonexistent') is None
    assert reg.get('some_path_four') is One
    assert reg.get('some_path_four', 'nonexistent') is None
    assert reg.get('some_path_five') is One
    assert reg.get('some_path_five', 'nonexistent') is None


def test_mixed_form_single_registration_with_methodless_default(reg):
    reg.setdefault(One)
    reg.register(
        [('some_path_one', 'method_one'), 'some_path_two', ('some_path_three', ('method_two', 'method_three')), 'some_path_four', 'some_path_five'], Two
    )
    assert reg.get('some_path_one', 'method_one') is Two
    assert reg.get('some_path_one') is One
    assert reg.get('some_path_one', 'nonexistent') is One
    assert reg.get('some_path_two') is Two
    assert reg.get('some_path_two', 'nonexistent') is One
    assert reg.get('some_path_three', 'method_two') is Two
    assert reg.get('some_path_three', 'method_three') is Two
    assert reg.get('some_path_three') is One
    assert reg.get('some_path_three', 'nonexistent') is One
    assert reg.get('some_path_four') is Two
    assert reg.get('some_path_four', 'nonexistent') is One
    assert reg.get('some_path_five') is Two
    assert reg.get('some_path_five', 'nonexistent') is One


def test_mixed_form_single_registration_with_method_default(reg):
    reg.setdefault('existent', One)
    reg.register(
        [('some_path_one', 'method_one'), 'some_path_two', ('some_path_three', ('method_two', 'method_three')), 'some_path_four', 'some_path_five'], Two
    )
    assert reg.get('some_path_one', 'method_one') is Two
    assert reg.get('some_path_one') is None
    assert reg.get('some_path_one', 'existent') is One
    assert reg.get('some_path_one', 'nonexistent') is None
    assert reg.get('some_path_two') is Two
    assert reg.get('some_path_two', 'existent') is One
    assert reg.get('some_path_two', 'nonexistent') is None
    assert reg.get('some_path_three', 'method_two') is Two
    assert reg.get('some_path_three', 'method_three') is Two
    assert reg.get('some_path_three') is None
    assert reg.get('some_path_three', 'existent') is One
    assert reg.get('some_path_three', 'nonexistent') is None
    assert reg.get('some_path_four') is Two
    assert reg.get('some_path_four', 'existent') is One
    assert reg.get('some_path_four', 'nonexistent') is None
    assert reg.get('some_path_five') is Two
    assert reg.get('some_path_five', 'existent') is One
    assert reg.get('some_path_five', 'nonexistent') is None
07070100000076000081A400000000000000000000000166846B9200003461000000000000000000000000000000000000001E00000000awx-24.6.1/test/test_utils.py# -*- coding: utf-8 -*-
from datetime import datetime
import sys

from unittest import mock
import pytest

from awxkit import utils
from awxkit import exceptions as exc


@pytest.mark.parametrize(
    'inp, out',
    [
        [True, True],
        [False, False],
        [1, True],
        [0, False],
        [1.0, True],
        [0.0, False],
        ['TrUe', True],
        ['FalSe', False],
        ['yEs', True],
        ['No', False],
        ['oN', True],
        ['oFf', False],
        ['asdf', True],
        ['0', False],
        ['', False],
        [{1: 1}, True],
        [{}, False],
        [(0,), True],
        [(), False],
        [[1], True],
        [[], False],
    ],
)
def test_to_bool(inp, out):
    assert utils.to_bool(inp) == out


@pytest.mark.parametrize(
    'inp, out',
    [
        ["{}", {}],
        ["{'null': null}", {"null": None}],
        ["{'bool': true}", {"bool": True}],
        ["{'bool': false}", {"bool": False}],
        ["{'int': 0}", {"int": 0}],
        ["{'float': 1.0}", {"float": 1.0}],
        ["{'str': 'abc'}", {"str": "abc"}],
        ["{'obj': {}}", {"obj": {}}],
        ["{'list': []}", {"list": []}],
        ["---", None],
        ["---\n'null': null", {'null': None}],
        ["---\n'bool': true", {'bool': True}],
        ["---\n'bool': false", {'bool': False}],
        ["---\n'int': 0", {'int': 0}],
        ["---\n'float': 1.0", {'float': 1.0}],
        ["---\n'string': 'abc'", {'string': 'abc'}],
        ["---\n'obj': {}", {'obj': {}}],
        ["---\n'list': []", {'list': []}],
        ["", None],
        ["'null': null", {'null': None}],
        ["'bool': true", {'bool': True}],
        ["'bool': false", {'bool': False}],
        ["'int': 0", {'int': 0}],
        ["'float': 1.0", {'float': 1.0}],
        ["'string': 'abc'", {'string': 'abc'}],
        ["'obj': {}", {'obj': {}}],
        ["'list': []", {'list': []}],
    ],
)
def test_load_valid_json_or_yaml(inp, out):
    assert utils.load_json_or_yaml(inp) == out


@pytest.mark.parametrize('inp', [True, False, 0, 1.0, {}, [], None])
def test_load_invalid_json_or_yaml(inp):
    with pytest.raises(TypeError):
        utils.load_json_or_yaml(inp)


@pytest.mark.parametrize('non_ascii', [True, False])
@pytest.mark.skipif(sys.version_info < (3, 6), reason='this is only intended to be used in py3, not the CLI')
def test_random_titles_are_unicode(non_ascii):
    assert isinstance(utils.random_title(non_ascii=non_ascii), str)


@pytest.mark.parametrize('non_ascii', [True, False])
@pytest.mark.skipif(sys.version_info < (3, 6), reason='this is only intended to be used in py3, not the CLI')
def test_random_titles_generates_correct_characters(non_ascii):
    title = utils.random_title(non_ascii=non_ascii)
    if non_ascii:
        with pytest.raises(UnicodeEncodeError):
            title.encode('ascii')
        title.encode('utf-8')
    else:
        title.encode('ascii')
        title.encode('utf-8')


@pytest.mark.parametrize(
    'inp, out',
    [
        ['ClassNameShouldChange', 'class_name_should_change'],
        ['classnameshouldntchange', 'classnameshouldntchange'],
        ['Classspacingshouldntchange', 'classspacingshouldntchange'],
        ['Class1Name2Should3Change', 'class_1_name_2_should_3_change'],
        ['Class123name234should345change456', 'class_123_name_234_should_345_change_456'],
    ],
)
def test_class_name_to_kw_arg(inp, out):
    assert utils.class_name_to_kw_arg(inp) == out


@pytest.mark.parametrize(
    'first, second, expected',
    [
        ['/api/v2/resources/', '/api/v2/resources/', True],
        ['/api/v2/resources/', '/api/v2/resources/?test=ignored', True],
        ['/api/v2/resources/?one=ignored', '/api/v2/resources/?two=ignored', True],
        ['http://one.com', 'http://one.com', True],
        ['http://one.com', 'http://www.one.com', True],
        ['http://one.com', 'http://one.com?test=ignored', True],
        ['http://one.com', 'http://www.one.com?test=ignored', True],
        ['http://one.com', 'https://one.com', False],
        ['http://one.com', 'https://one.com?test=ignored', False],
    ],
)
def test_are_same_endpoint(first, second, expected):
    assert utils.are_same_endpoint(first, second) == expected


@pytest.mark.parametrize('endpoint, expected', [['/api/v2/resources/', 'v2'], ['/api/v2000/resources/', 'v2000'], ['/api/', 'common']])
def test_version_from_endpoint(endpoint, expected):
    assert utils.version_from_endpoint(endpoint) == expected


class OneClass:
    pass


class TwoClass:
    pass


class ThreeClass:
    pass


class FourClass(ThreeClass):
    pass


def test_filter_by_class_with_subclass_class():
    filtered = utils.filter_by_class((OneClass, OneClass), (FourClass, ThreeClass))
    assert filtered == [OneClass, FourClass]


def test_filter_by_class_with_subclass_instance():
    one = OneClass()
    four = FourClass()
    filtered = utils.filter_by_class((one, OneClass), (four, ThreeClass))
    assert filtered == [one, four]


def test_filter_by_class_no_arg_tuples():
    three = ThreeClass()
    filtered = utils.filter_by_class((True, OneClass), (False, TwoClass), (three, ThreeClass))
    assert filtered == [OneClass, None, three]


def test_filter_by_class_with_arg_tuples_containing_class():
    one = OneClass()
    three = (ThreeClass, dict(one=1, two=2))
    filtered = utils.filter_by_class((one, OneClass), (False, TwoClass), (three, ThreeClass))
    assert filtered == [one, None, three]


def test_filter_by_class_with_arg_tuples_containing_subclass():
    one = OneClass()
    three = (FourClass, dict(one=1, two=2))
    filtered = utils.filter_by_class((one, OneClass), (False, TwoClass), (three, ThreeClass))
    assert filtered == [one, None, three]


@pytest.mark.parametrize('truthy', (True, 123, 'yes'))
def test_filter_by_class_with_arg_tuples_containing_truthy(truthy):
    one = OneClass()
    three = (truthy, dict(one=1, two=2))
    filtered = utils.filter_by_class((one, OneClass), (False, TwoClass), (three, ThreeClass))
    assert filtered == [one, None, (ThreeClass, dict(one=1, two=2))]


@pytest.mark.parametrize(
    'date_string,now,expected',
    [
        ('2017-12-20T00:00:01.5Z', datetime(2017, 12, 20, 0, 0, 2, 750000), 1.25),
        ('2017-12-20T00:00:01.5Z', datetime(2017, 12, 20, 0, 0, 1, 500000), 0.00),
        ('2017-12-20T00:00:01.5Z', datetime(2017, 12, 20, 0, 0, 0, 500000), -1.00),
    ],
)
def test_seconds_since_date_string(date_string, now, expected):
    with mock.patch('awxkit.utils.utcnow', return_value=now):
        assert utils.seconds_since_date_string(date_string) == expected


class RecordingCallback(object):
    def __init__(self, value=True):
        self.call_count = 0
        self.value = value

    def __call__(self):
        self.call_count += 1
        return self.value


class TestPollUntil(object):
    @pytest.mark.parametrize('timeout', [0, 0.0, -0.5, -1, -9999999])
    def test_callback_called_once_for_non_positive_timeout(self, timeout):
        with mock.patch('awxkit.utils.logged_sleep') as sleep:
            callback = RecordingCallback()
            utils.poll_until(callback, timeout=timeout)
            assert not sleep.called
            assert callback.call_count == 1

    def test_exc_raised_on_timeout(self):
        with mock.patch('awxkit.utils.logged_sleep'):
            with pytest.raises(exc.WaitUntilTimeout):
                utils.poll_until(lambda: False, timeout=0)

    @pytest.mark.parametrize('callback_value', [{'hello': 1}, 'foo', True])
    def test_non_falsey_callback_value_is_returned(self, callback_value):
        with mock.patch('awxkit.utils.logged_sleep'):
            assert utils.poll_until(lambda: callback_value) == callback_value


class TestPseudoNamespace(object):
    def test_set_item_check_item(self):
        pn = utils.PseudoNamespace()
        pn['key'] = 'value'
        assert pn['key'] == 'value'

    def test_set_item_check_attr(self):
        pn = utils.PseudoNamespace()
        pn['key'] = 'value'
        assert pn.key == 'value'

    def test_set_attr_check_item(self):
        pn = utils.PseudoNamespace()
        pn.key = 'value'
        assert pn['key'] == 'value'

    def test_set_attr_check_attr(self):
        pn = utils.PseudoNamespace()
        pn.key = 'value'
        assert pn.key == 'value'

    def test_auto_dicts_cast(self):
        pn = utils.PseudoNamespace()
        pn.one = dict()
        pn.one.two = dict(three=3)
        assert pn.one.two.three == 3
        assert pn == dict(one=dict(two=dict(three=3)))

    def test_auto_list_of_dicts_cast(self):
        pn = utils.PseudoNamespace()
        pn.one = [dict(two=2), dict(three=3)]
        assert pn.one[0].two == 2
        assert pn == dict(one=[dict(two=2), dict(three=3)])

    def test_auto_tuple_of_dicts_cast(self):
        pn = utils.PseudoNamespace()
        pn.one = (dict(two=2), dict(three=3))
        assert pn.one[0].two == 2
        assert pn == dict(one=(dict(two=2), dict(three=3)))

    def test_instantiation_via_dict(self):
        pn = utils.PseudoNamespace(dict(one=1, two=2, three=3))
        assert pn.one == 1
        assert pn == dict(one=1, two=2, three=3)
        assert len(pn.keys()) == 3

    def test_instantiation_via_kwargs(self):
        pn = utils.PseudoNamespace(one=1, two=2, three=3)
        assert pn.one == 1
        assert pn == dict(one=1, two=2, three=3)
        assert len(pn.keys()) == 3

    def test_instantiation_via_dict_and_kwargs(self):
        pn = utils.PseudoNamespace(dict(one=1, two=2, three=3), four=4, five=5)
        assert pn.one == 1
        assert pn.four == 4
        assert pn == dict(one=1, two=2, three=3, four=4, five=5)
        assert len(pn.keys()) == 5

    def test_instantiation_via_nested_dict(self):
        pn = utils.PseudoNamespace(dict(one=1, two=2), three=dict(four=4, five=dict(six=6)))
        assert pn.one == 1
        assert pn.three.four == 4
        assert pn.three.five.six == 6
        assert pn == dict(one=1, two=2, three=dict(four=4, five=dict(six=6)))

    def test_instantiation_via_nested_dict_with_list(self):
        pn = utils.PseudoNamespace(dict(one=[dict(two=2), dict(three=3)]))
        assert pn.one[0].two == 2
        assert pn.one[1].three == 3
        assert pn == dict(one=[dict(two=2), dict(three=3)])

    def test_instantiation_via_nested_dict_with_lists(self):
        pn = utils.PseudoNamespace(dict(one=[dict(two=2), dict(three=dict(four=4, five=[dict(six=6), dict(seven=7)]))]))
        assert pn.one[1].three.five[1].seven == 7

    def test_instantiation_via_nested_dict_with_tuple(self):
        pn = utils.PseudoNamespace(dict(one=(dict(two=2), dict(three=3))))
        assert pn.one[0].two == 2
        assert pn.one[1].three == 3
        assert pn == dict(one=(dict(two=2), dict(three=3)))

    def test_instantiation_via_nested_dict_with_tuples(self):
        pn = utils.PseudoNamespace(dict(one=(dict(two=2), dict(three=dict(four=4, five=(dict(six=6), dict(seven=7)))))))
        assert pn.one[1].three.five[1].seven == 7

    def test_update_with_nested_dict(self):
        pn = utils.PseudoNamespace()
        pn.update(dict(one=1, two=2, three=3), four=4, five=5)
        assert pn.one == 1
        assert pn.four == 4
        assert pn == dict(one=1, two=2, three=3, four=4, five=5)
        assert len(pn.keys()) == 5

    def test_update_with_nested_dict_with_lists(self):
        pn = utils.PseudoNamespace()
        pn.update(dict(one=[dict(two=2), dict(three=dict(four=4, five=[dict(six=6), dict(seven=7)]))]))
        assert pn.one[1].three.five[1].seven == 7

    def test_update_with_nested_dict_with_tuples(self):
        pn = utils.PseudoNamespace()
        pn.update(dict(one=(dict(two=2), dict(three=dict(four=4, five=(dict(six=6), dict(seven=7)))))))
        assert pn.one[1].three.five[1].seven == 7


class TestUpdatePayload(object):
    def test_empty_payload(self):
        fields = ('one', 'two', 'three', 'four')
        kwargs = dict(two=2, four=4)
        payload = {}
        utils.update_payload(payload, fields, kwargs)
        assert payload == kwargs

    def test_untouched_payload(self):
        fields = ('not', 'in', 'kwargs')
        kwargs = dict(one=1, two=2)
        payload = dict(three=3, four=4)
        utils.update_payload(payload, fields, kwargs)
        assert payload == dict(three=3, four=4)

    def test_overwritten_payload(self):
        fields = ('one', 'two')
        kwargs = dict(one=1, two=2)
        payload = dict(one='one', two='two')
        utils.update_payload(payload, fields, kwargs)
        assert payload == kwargs

    def test_falsy_kwargs(self):
        fields = ('one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight')
        kwargs = dict(one=False, two=(), three='', four=None, five=0, six={}, seven=set(), eight=[])
        payload = {}
        utils.update_payload(payload, fields, kwargs)
        assert payload == kwargs

    def test_not_provided_strips_payload(self):
        fields = ('one', 'two')
        kwargs = dict(one=utils.not_provided)
        payload = dict(one=1, two=2)
        utils.update_payload(payload, fields, kwargs)
        assert payload == dict(two=2)


def test_to_ical():
    now = datetime.utcnow()
    ical_datetime = utils.to_ical(now)
    date = str(now.date()).replace('-', '')
    time = str(now.time()).split('.')[0].replace(':', '')
    assert ical_datetime == '{}T{}Z'.format(date, time)
07070100000077000081A400000000000000000000000166846B92000004D3000000000000000000000000000000000000001B00000000awx-24.6.1/test/test_ws.py# -*- coding: utf-8 -*-
from collections import namedtuple

from unittest.mock import patch
import pytest

from awxkit.ws import WSClient

ParseResult = namedtuple("ParseResult", ["port", "hostname", "secure"])


def test_explicit_hostname():
    client = WSClient("token", "some-hostname", 556, False)
    assert client.port == 556
    assert client.hostname == "some-hostname"
    assert client._use_ssl == False
    assert client.token == "token"


def test_websocket_suffix():
    client = WSClient("token", "hostname", 566, ws_suffix='my-websocket/')
    assert client.suffix == 'my-websocket/'


@pytest.mark.parametrize(
    'url, result',
    [
        ['https://somename:123', ParseResult(123, "somename", True)],
        ['http://othername:456', ParseResult(456, "othername", False)],
        ['http://othername', ParseResult(80, "othername", False)],
        ['https://othername', ParseResult(443, "othername", True)],
    ],
)
def test_urlparsing(url, result):
    with patch("awxkit.ws.config") as mock_config:
        mock_config.base_url = url

        client = WSClient("token")
        assert client.port == result.port
        assert client.hostname == result.hostname
        assert client._use_ssl == result.secure
07070100000078000081A400000000000000000000000166846B920000032F000000000000000000000000000000000000001300000000awx-24.6.1/tox.ini[tox]
distshare = {homedir}/.tox/distshare
envlist =
    lint,
    test
skip_missing_interpreters = true
# recreate = true
# skipsdist = true

[testenv]
basepython = python3.11
setenv =
    PYTHONPATH = {toxinidir}:{env:PYTHONPATH:}:.
deps =
    websocket-client
    coverage
    mock
    pytest
    pytest-mock

commands = coverage run --parallel --source awxkit -m pytest --doctest-glob='*.md' --junit-xml=report.xml {posargs}

[testenv:lint]
deps =
    {[testenv]deps}
    flake8
commands =
    flake8 awxkit
# pylama --report report.pylama awxkit
# py.test awxkit --pylama --junitxml=report.pylama {posargs}
    - coverage erase

[testenv:coveralls]
commands=
    - coverage combine
    - coverage report -m
    - coveralls

[flake8]
max-line-length = 120

[pytest]
addopts = -v --tb=native
junit_family=xunit2
07070100000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000B00000000TRAILER!!!876 blocks
openSUSE Build Service is sponsored by