File test-repo-1-0.1.obscpio of Package salt-extensions
07070100000000000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000001500000000test-repo-1-0.1/.obs07070100000001000081A400000000000000000000000167471E9C000001BD000000000000000000000000000000000000002300000000test-repo-1-0.1/.obs/workflows.ymlmain_workflow:
steps:
- branch_package:
source_project: home:PSuarezHernandez:tests:github
source_package: salt-extensions
target_project: home:PSuarezHernandez:tests:github:CI
filters:
event: pull_request
rebuild_master:
steps:
- trigger_services:
project: home:PSuarezHernandez:tests:github
package: salt-extensions
filters:
event: push
branches:
only:
- main
07070100000002000081A400000000000000000000000167471E9C0000000E000000000000000000000000000000000000001A00000000test-repo-1-0.1/README.md# test-repo-1
07070100000003000081A400000000000000000000000167471E9C000000D6000000000000000000000000000000000000002800000000test-repo-1-0.1/salt-extensions.changes-------------------------------------------------------------------
Mon Nov 25 15:24:45 UTC 2024 - Pablo Suárez Hernández <psuarezhernandez@suse.com>
- Initial packages:
* saltext-mysql
* saltext-prometheus
07070100000004000081A400000000000000000000000167471E9C00000B76000000000000000000000000000000000000002500000000test-repo-1-0.1/salt-extensions.spec#
# spec file for package salt-extensions
#
# Copyright (c) 2024 SUSE LLC
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
# upon. The license for this file, and modifications and additions to the
# file, is the same license as for the pristine package itself (unless the
# license for the pristine package is not an Open Source License, in which
# case the license is the MIT License). An "Open Source License" is a
# license that conforms to the Open Source Definition (Version 1.9)
# published by the Open Source Initiative.
# Please submit bugfixes or comments via https://bugs.opensuse.org/
#
%global saltext_mysql_version 1.0.0
%global saltext_prometheus_version 2.1.0
%{?sle15_python_module_pythons}
Name: salt-extensions
Version: 0.1
Release: 0
%global saltext_version %{version}
Summary: Salt Extensions provided by openSUSE
License: Apache-2.0
URL: https://github.com/meaksh/test-repo-1
Source: test-repo-1-%{version}.tar.gz
BuildRequires: %{python_module pip}
BuildRequires: %{python_module setuptools_scm}
BuildRequires: %{python_module wheel}
BuildRequires: fdupes
BuildRequires: python-rpm-macros
Requires: python-salt >= 3006.0
Requires: python-salt-extensions-mysql
Requires: python-salt-extensions-prometheus
%python_subpackages
%description
A collection of different Salt Extensions packages by openSUSE
%prep
%autosetup -p1 -n test-repo-1-%{saltext_version}
%build
pushd saltext_mysql-%{saltext_mysql_version}
%pyproject_wheel
popd
pushd saltext.prometheus-%{saltext_prometheus_version}
%pyproject_wheel
popd
%install
pushd saltext_mysql-%{saltext_mysql_version}
%pyproject_install
popd
pushd saltext.prometheus-%{saltext_prometheus_version}
%pyproject_install
popd
%python_expand %fdupes %{buildroot}/%{$python_sitelib}
%package mysql
Version: %{saltext_mysql_version}
Summary: Salt Extension for interacting with MySQL
Requires: (python-PyMySQL or python-mysqlclient)
Requires: python-sqlparse
Requires: python-salt >= 3006.0
%description mysql
Salt Extension for interacting with MySQL
%package prometheus
Version: %{saltext_prometheus_version}
Summary: Salt Extension for interacting with Prometheus
Requires: python-prometheus-client
Requires: python-salt >= 3006.0
%description prometheus
Salt Extension for interacting with Prometheus
%files %{python_files}
%dir %{python_sitelib}/saltext/
%{python_sitelib}/saltext/__init__.py
%{python_sitelib}/saltext/__pycache__
%files %{python_files mysql}
%{python_sitelib}/saltext/mysql
%{python_sitelib}/saltext.mysql-%{saltext_mysql_version}*-info
%files %{python_files prometheus}
%{python_sitelib}/saltext/prometheus
%{python_sitelib}/saltext.prometheus-%{saltext_prometheus_version}*-info
%changelog
07070100000005000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000002900000000test-repo-1-0.1/saltext.prometheus-2.1.007070100000006000081A400000000000000000000000167471E9C000002AD000000000000000000000000000000000000003D00000000test-repo-1-0.1/saltext.prometheus-2.1.0/.copier-answers.yml# Autogenerated. Do not edit this by hand, use `copier update`.
---
_commit: 0.2.6
_src_path: https://github.com/lkubb/salt-extension-copier
author: EITR Technologies, LLC
author_email: devops@eitr.tech
docs_url: ''
license: apache
loaders:
- engine
- returner
max_salt_version: 3006
no_saltext_namespace: false
package_name: prometheus
project_name: prometheus
python_requires: '3.8'
salt_version: '3005'
source_url: https://github.com/salt-extensions/saltext-prometheus
ssh_fixtures: false
summary: Salt Extension for interacting with Prometheus
tracker_url: https://github.com/salt-extensions/saltext-prometheus/issues
url: https://github.com/salt-extensions/saltext-prometheus
07070100000007000081A400000000000000000000000167471E9C000002CA000000000000000000000000000000000000003500000000test-repo-1-0.1/saltext.prometheus-2.1.0/.coveragerc[run]
branch = True
cover_pylib = False
relative_files = True
parallel = True
concurrency = multiprocessing
omit =
.nox/*
setup.py
[report]
# Regexes for lines to exclude from consideration
exclude_lines =
# Have to re-enable the standard pragma
pragma: no cover
# Don't complain about missing debug-only code:
def __repr__
# Don't complain if tests don't hit defensive assertion code:
raise AssertionError
raise NotImplementedError
# Don't complain if non-runnable code isn't run:
if 0:
if False:
if __name__ == .__main__.:
omit =
.nox/*
setup.py
ignore_errors = True
[paths]
source =
saltext/prometheus
src/saltext/prometheus
testsuite =
tests/
07070100000008000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003100000000test-repo-1-0.1/saltext.prometheus-2.1.0/.github07070100000009000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000004000000000test-repo-1-0.1/saltext.prometheus-2.1.0/.github/ISSUE_TEMPLATE0707010000000A000081A400000000000000000000000167471E9C000001CF000000000000000000000000000000000000004300000000test-repo-1-0.1/saltext.prometheus-2.1.0/.github/ISSUE_TEMPLATE.md### Description of Issue
<!-- Note: Please direct questions to the salt-users google group, Slack, IRC, etc. Only post issues and feature requests here -->
### Setup
(Please provide relevant configs and/or SLS files (Be sure to remove sensitive info).)
### Steps to Reproduce Issue
(Include debug logs if possible and relevant.)
### Versions Report
(Provided by running `salt --versions-report`. Please also mention any differences in master/minion versions.)
0707010000000B000081A400000000000000000000000167471E9C00000504000000000000000000000000000000000000004E00000000test-repo-1-0.1/saltext.prometheus-2.1.0/.github/ISSUE_TEMPLATE/bug_report.md---
name: Bug report
about: Create a report to help us improve
title: "[BUG]"
labels: bug, needs-triage
assignees: ''
---
**Description**
A clear and concise description of what the bug is.
**Setup**
(Please provide relevant configs and/or SLS files (be sure to remove sensitive info. There is no general set-up of Salt.)
Please be as specific as possible and give set-up details.
- [ ] on-prem machine
- [ ] VM (Virtualbox, KVM, etc. please specify)
- [ ] VM running on a cloud service, please be explicit and add details
- [ ] container (Kubernetes, Docker, containerd, etc. please specify)
- [ ] or a combination, please be explicit
- [ ] jails if it is FreeBSD
- [ ] classic packaging
- [ ] onedir packaging
- [ ] used bootstrap to install
**Steps to Reproduce the behavior**
(Include debug logs if possible and relevant)
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Versions Report**
<details><summary>salt --versions-report</summary>
(Provided by running salt --versions-report. Please also mention any differences in master/minion versions.)
```yaml
PASTE HERE
```
</details>
**Additional context**
Add any other context about the problem here.
0707010000000C000081A400000000000000000000000167471E9C00000232000000000000000000000000000000000000004B00000000test-repo-1-0.1/saltext.prometheus-2.1.0/.github/ISSUE_TEMPLATE/config.ymlblank_issues_enabled: true
contact_links:
- name: Salt Community Slack
url: https://saltstackcommunity.slack.com/
about: Please ask and answer questions here.
- name: Salt-Users Forum
url: https://groups.google.com/forum/#!forum/salt-users
about: Please ask and answer questions here.
- name: Salt on LiberaChat
url: https://web.libera.chat/#salt
about: Please ask and answer questions here.
- name: Security vulnerabilities
email: saltproject-security.pdl@broadcom.com
about: Please report security vulnerabilities here.
0707010000000D000081A400000000000000000000000167471E9C0000022A000000000000000000000000000000000000004800000000test-repo-1-0.1/saltext.prometheus-2.1.0/.github/ISSUE_TEMPLATE/docs.md---
name: Docs
about: Issue related to Salt Documentation
title: "[DOCS]"
labels: documentation, needs-triage
assignees: ''
---
**Description**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Suggested Fix**
What did you expect to see in the documentation that is missing or needs updating?
**Type of documentation**
This could be module documentation or a guide.
**Location or format of documentation**
Insert page URL if applicable.
**Additional context**
Add any other context or screenshots here.
0707010000000E000081A400000000000000000000000167471E9C00000277000000000000000000000000000000000000005300000000test-repo-1-0.1/saltext.prometheus-2.1.0/.github/ISSUE_TEMPLATE/feature_request.md---
name: Feature request
about: Suggest an idea for this project
title: "[FEATURE REQUEST]"
labels: feature, needs-triage
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.
0707010000000F000081A400000000000000000000000167471E9C000001AF000000000000000000000000000000000000004D00000000test-repo-1-0.1/saltext.prometheus-2.1.0/.github/ISSUE_TEMPLATE/tech-debt.md---
name: Tech Debt
about: Issue is related to tech debt. This includes compatibility changes for newer versions of software and OSes that salt interacts with.
title: "[TECH DEBT]"
labels: tech-debt
assignees: ''
---
### Description of the tech debt to be addressed, include links and screenshots
### Versions Report
(Provided by running `salt --versions-report`. Please also mention any differences in master/minion versions.)
07070100000010000081A400000000000000000000000167471E9C000003A5000000000000000000000000000000000000004A00000000test-repo-1-0.1/saltext.prometheus-2.1.0/.github/PULL_REQUEST_TEMPLATE.md### What does this PR do?
### What issues does this PR fix or reference?
Fixes:
### Previous Behavior
Remove this section if not relevant
### New Behavior
Remove this section if not relevant
### Merge requirements satisfied?
**[NOTICE] Bug fixes or features added to Salt require tests.**
<!-- Please review the [test documentation](https://docs.saltproject.io/en/master/topics/tutorials/writing_tests.html) for details on how to implement tests into Salt's test suite. -->
- [ ] Docs
- [ ] Changelog - https://docs.saltproject.io/en/master/topics/development/changelog.html
- [ ] Tests written/updated
### Commits signed with GPG?
Yes/No
Please review [Salt's Contributing Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html) for best practices.
See GitHub's [page on GPG signing](https://help.github.com/articles/signing-commits-using-gpg/) for more information about signing commits with GPG.
07070100000011000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext.prometheus-2.1.0/.github/workflows07070100000012000081A400000000000000000000000167471E9C0000017F000000000000000000000000000000000000004200000000test-repo-1-0.1/saltext.prometheus-2.1.0/.github/workflows/pr.ymlname: Pull Request or Push
on:
push:
branches:
- 'main' # Run on pushes to main
tags-ignore:
- '*' # Ignore pushes to tags
pull_request:
jobs:
call_central_workflow:
name: CI
uses: salt-extensions/central-artifacts/.github/workflows/ci.yml@main
with:
setup-vault: true
permissions:
contents: write
pull-requests: read
07070100000013000081A400000000000000000000000167471E9C00000315000000000000000000000000000000000000004300000000test-repo-1-0.1/saltext.prometheus-2.1.0/.github/workflows/tag.ymlname: Tagged Releases
on:
push:
tags:
- "v*" # Only tags starting with "v" for "v1.0.0", etc.
jobs:
get_tag_version:
runs-on: ubuntu-latest
outputs:
version: ${{ steps.get_version.outputs.version }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Extract tag name
id: get_version
run: echo "::set-output name=version::$(echo ${GITHUB_REF#refs/tags/v})"
call_central_workflow:
needs: get_tag_version
uses: salt-extensions/central-artifacts/.github/workflows/ci.yml@main
with:
setup-vault: true
release: true
version: ${{ needs.get_tag_version.outputs.version }}
permissions:
contents: write
id-token: write
pull-requests: read
secrets: inherit
07070100000014000081A400000000000000000000000167471E9C000007CB000000000000000000000000000000000000003400000000test-repo-1-0.1/saltext.prometheus-2.1.0/.gitignore# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# Ignore the setuptools_scm auto-generated version module
src/saltext/prometheus/version.py
# Ignore CI generated artifacts
artifacts/
# IDE
.vscode/
.DS_Store
.idea/
*.swp
#salt env
Saltfile
07070100000015000081ED00000000000000000000000167471E9C00001174000000000000000000000000000000000000004100000000test-repo-1-0.1/saltext.prometheus-2.1.0/.pre-commit-config.yaml---
minimum_pre_commit_version: 2.4.0
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: check-merge-conflict # Check for files that contain merge conflict strings.
- id: trailing-whitespace # Trims trailing whitespace.
args: [--markdown-linebreak-ext=md]
- id: mixed-line-ending # Replaces or checks mixed line ending.
args: [--fix=lf]
- id: end-of-file-fixer # Makes sure files end in a newline and only a newline.
- id: check-merge-conflict # Check for files that contain merge conflict strings.
- id: check-ast # Simply check whether files parse as valid python.
# ----- Formatting ---------------------------------------------------------------------------->
- repo: https://github.com/saltstack/pre-commit-remove-import-headers
rev: 1.1.0
hooks:
- id: remove-import-headers
- repo: local
hooks:
- id: check-cli-examples
name: Check CLI examples on execution modules
entry: python .pre-commit-hooks/check-cli-examples.py
language: system
files: ^src/saltext/prometheus/modules/.*\.py$
- repo: local
hooks:
- id: check-docs
name: Check rST doc files exist for modules/states
entry: python .pre-commit-hooks/make-autodocs.py
language: system
pass_filenames: false
- repo: https://github.com/s0undt3ch/salt-rewrite
# Automatically rewrite code with known rules
rev: 2.5.2
hooks:
- id: salt-rewrite
alias: rewrite-docstrings
name: Salt extensions docstrings auto-fixes
files: ^src/saltext/prometheus/.*\.py$
args: [--silent]
- repo: https://github.com/s0undt3ch/salt-rewrite
# Automatically rewrite code with known rules
rev: 2.5.2
hooks:
- id: salt-rewrite
alias: rewrite-tests
name: Rewrite the test suite
files: ^tests/.*\.py$
args: [--silent, -E, fix_docstrings]
- repo: https://github.com/asottile/pyupgrade
rev: v2.37.2
hooks:
- id: pyupgrade
name: Rewrite Code to be Py3.8+
args: [
--py38-plus
]
exclude: src/saltext/prometheus/version.py
- repo: https://github.com/asottile/reorder_python_imports
rev: v3.10.0
hooks:
- id: reorder-python-imports
args: [
--py38-plus,
]
exclude: src/saltext/prometheus/version.py
- repo: https://github.com/psf/black
rev: 22.6.0
hooks:
- id: black
args: [-l 100]
exclude: src/saltext/prometheus/version.py
- repo: https://github.com/adamchainz/blacken-docs
rev: v1.12.1
hooks:
- id: blacken-docs
args: [--skip-errors]
files: ^(docs/.*\.rst|src/saltext/prometheus/.*\.py)$
additional_dependencies:
- black==22.6.0
# <---- Formatting -----------------------------------------------------------------------------
# ----- Security ------------------------------------------------------------------------------>
- repo: https://github.com/PyCQA/bandit
rev: "1.7.4"
hooks:
- id: bandit
alias: bandit-salt
name: Run bandit against the code base
args: [--silent, -lll, --skip, B701]
exclude: src/saltext/prometheus/version.py
- repo: https://github.com/PyCQA/bandit
rev: "1.7.4"
hooks:
- id: bandit
alias: bandit-tests
name: Run bandit against the test suite
args: [--silent, -lll, --skip, B701]
files: ^tests/.*
# <---- Security -------------------------------------------------------------------------------
# ----- Code Analysis ------------------------------------------------------------------------->
- repo: https://github.com/saltstack/mirrors-nox
rev: v2021.6.12
hooks:
- id: nox
alias: lint-src
name: Lint Source Code
files: ^((setup|noxfile)|src/.*)\.py$
require_serial: true
args:
- -e
- lint-code-pre-commit
- --
- repo: https://github.com/saltstack/mirrors-nox
rev: v2021.6.12
hooks:
- id: nox
alias: lint-tests
name: Lint Tests
files: ^tests/.*\.py$
require_serial: true
args:
- -e
- lint-tests-pre-commit
- --
# <---- Code Analysis --------------------------------------------------------------------------
07070100000016000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext.prometheus-2.1.0/.pre-commit-hooks07070100000017000081A400000000000000000000000167471E9C00000826000000000000000000000000000000000000005100000000test-repo-1-0.1/saltext.prometheus-2.1.0/.pre-commit-hooks/check-cli-examples.pyimport ast
import pathlib
import re
import sys
CODE_ROOT = pathlib.Path(__file__).resolve().parent.parent
EXECUTION_MODULES_PATH = CODE_ROOT / "src" / "saltext" / "prometheus" / "modules"
def check_cli_examples(files):
"""
Check that every function on every execution module provides a CLI example
"""
errors = 0
for file in files:
path = pathlib.Path(file).resolve()
try:
relpath = path.relative_to(EXECUTION_MODULES_PATH)
if str(relpath.parent) != ".":
# We don't want to check nested packages
continue
except ValueError:
# We're only interested in execution modules
continue
module = ast.parse(path.read_text(), filename=str(path))
for funcdef in [node for node in module.body if isinstance(node, ast.FunctionDef)]:
if funcdef.name.startswith("_"):
# We're not interested in internal functions
continue
docstring = ast.get_docstring(funcdef, clean=False)
if not docstring:
errors += 1
print(
"The function {!r} on '{}' does not have a docstring".format(
funcdef.name,
path.relative_to(CODE_ROOT),
),
file=sys.stderr,
)
continue
if _check_cli_example_present(docstring) is False:
errors += 1
print(
"The function {!r} on '{}' does not have a 'CLI Example:' in it's docstring".format(
funcdef.name,
path.relative_to(CODE_ROOT),
),
file=sys.stderr,
)
continue
sys.exit(errors)
CLI_EXAMPLE_PRESENT_RE = re.compile(r"CLI Example(?:s)?:")
def _check_cli_example_present(docstring):
return CLI_EXAMPLE_PRESENT_RE.search(docstring) is not None
if __name__ == "__main__":
check_cli_examples(sys.argv[1:])
07070100000018000081A400000000000000000000000167471E9C00000E30000000000000000000000000000000000000004C00000000test-repo-1-0.1/saltext.prometheus-2.1.0/.pre-commit-hooks/make-autodocs.pyimport ast
import os.path
import subprocess
from pathlib import Path
repo_path = Path(subprocess.check_output(["git", "rev-parse", "--show-toplevel"]).decode().strip())
src_dir = repo_path / "src" / "saltext" / "prometheus"
doc_dir = repo_path / "docs"
docs_by_kind = {}
changed_something = False
def _find_virtualname(path):
tree = ast.parse(path.read_text())
for node in ast.walk(tree):
if isinstance(node, ast.Assign):
for target in node.targets:
if isinstance(target, ast.Name) and target.id == "__virtualname__":
if isinstance(node.value, ast.Str):
virtualname = node.value.s
break
else:
continue
break
else:
virtualname = path.with_suffix("").name
return virtualname
def write_module(rst_path, path, use_virtualname=True):
if use_virtualname:
virtualname = "``" + _find_virtualname(path) + "``"
else:
virtualname = make_import_path(path)
module_contents = f"""\
{virtualname}
{'='*len(virtualname)}
.. automodule:: {make_import_path(path)}
:members:
"""
if not rst_path.exists() or rst_path.read_text() != module_contents:
print(rst_path)
rst_path.write_text(module_contents)
return True
return False
def write_index(index_rst, import_paths, kind):
if kind == "utils":
header_text = "Utilities"
common_path = os.path.commonpath(tuple(x.replace(".", "/") for x in import_paths)).replace(
"/", "."
)
if any(x == common_path for x in import_paths):
common_path = common_path[: common_path.rfind(".")]
else:
header_text = (
"execution modules" if kind.lower() == "modules" else kind.rstrip("s") + " modules"
)
common_path = import_paths[0][: import_paths[0].rfind(".")]
header = f"{'_'*len(header_text)}\n{header_text.title()}\n{'_'*len(header_text)}"
index_contents = f"""\
.. all-saltext.prometheus.{kind}:
{header}
.. currentmodule:: {common_path}
.. autosummary::
:toctree:
{chr(10).join(sorted(' '+p[len(common_path)+1:] for p in import_paths))}
"""
if not index_rst.exists() or index_rst.read_text() != index_contents:
print(index_rst)
index_rst.write_text(index_contents)
return True
return False
def make_import_path(path):
if path.name == "__init__.py":
path = path.parent
return ".".join(path.relative_to(repo_path / "src").with_suffix("").parts)
for path in src_dir.glob("*/*.py"):
if path.name != "__init__.py":
kind = path.parent.name
if kind != "utils":
docs_by_kind.setdefault(kind, set()).add(path)
# Utils can have subdirectories, treat them separately
for path in (src_dir / "utils").rglob("*.py"):
if path.name == "__init__.py" and not path.read_text():
continue
docs_by_kind.setdefault("utils", set()).add(path)
for kind in docs_by_kind:
kind_path = doc_dir / "ref" / kind
index_rst = kind_path / "index.rst"
import_paths = []
for path in sorted(docs_by_kind[kind]):
import_path = make_import_path(path)
import_paths.append(import_path)
rst_path = kind_path / (import_path + ".rst")
rst_path.parent.mkdir(parents=True, exist_ok=True)
change = write_module(rst_path, path, use_virtualname=kind != "utils")
changed_something = changed_something or change
write_index(index_rst, import_paths, kind)
# Ensure pre-commit realizes we did something
if changed_something:
exit(2)
07070100000019000081ED00000000000000000000000167471E9C00005641000000000000000000000000000000000000003300000000test-repo-1-0.1/saltext.prometheus-2.1.0/.pylintrc[MAIN]
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Clear in-memory caches upon conclusion of linting. Useful if running pylint
# in a server-like mode.
clear-cache-post-run=no
# Load and enable all available extensions. Use --list-extensions to see a list
# all available extensions.
#enable-all-extensions=
# In error mode, messages with a category besides ERROR or FATAL are
# suppressed, and no reports are done by default. Error mode is compatible with
# disabling specific errors.
#errors-only=
# Always return a 0 (non-error) status code, even if lint errors are found.
# This is primarily useful in continuous integration scripts.
#exit-zero=
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-allow-list=
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
# for backward compatibility.)
extension-pkg-whitelist=
# Return non-zero exit code if any of these messages/categories are detected,
# even if score is above --fail-under value. Syntax same as enable. Messages
# specified are enabled, while categories only check already-enabled messages.
fail-on=
# Specify a score threshold under which the program will exit with error.
fail-under=10
# Interpret the stdin as a python script, whose filename needs to be passed as
# the module_or_package argument.
#from-stdin=
# Files or directories to be skipped. They should be base names, not paths.
ignore=CVS
# Add files or directories matching the regular expressions patterns to the
# ignore-list. The regex matches against paths and can be in Posix or Windows
# format. Because '\\' represents the directory delimiter on Windows systems,
# it can't be used as an escape character.
ignore-paths=
# Files or directories matching the regular expression patterns are skipped.
# The regex matches against base names, not paths. The default value ignores
# Emacs file locks
ignore-patterns=^\.#
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use, and will cap the count on Windows to
# avoid hangs.
jobs=0
# Control the amount of potential inferred values when inferring a single
# object. This can help the performance when dealing with large functions or
# complex, nested conditions.
limit-inference-results=100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# Minimum Python version to use for version dependent checks. Will default to
# the version used to run pylint.
py-version=3.10
# Discover python modules and packages in the file system subtree.
recursive=no
# Add paths to the list of the source roots. Supports globbing patterns. The
# source root is an absolute path or a path relative to the current working
# directory used to determine a package namespace for modules located under the
# source root.
source-roots=
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
# In verbose mode, extra non-checker-related info will be displayed.
#verbose=
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style. If left empty, argument names will be checked with the set
# naming style.
#argument-rgx=
# Naming style matching correct attribute names.
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style. If left empty, attribute names will be checked with the set naming
# style.
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma.
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Bad variable names regexes, separated by a comma. If names match any regex,
# they will always be refused
bad-names-rgxs=
# Naming style matching correct class attribute names.
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style. If left empty, class attribute names will be checked
# with the set naming style.
#class-attribute-rgx=
# Naming style matching correct class constant names.
class-const-naming-style=UPPER_CASE
# Regular expression matching correct class constant names. Overrides class-
# const-naming-style. If left empty, class constant names will be checked with
# the set naming style.
#class-const-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style. If left empty, class names will be checked with the set naming style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style. If left empty, constant names will be checked with the set naming
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style. If left empty, function names will be checked with the set
# naming style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
k,
ex,
Run,
_
# Good variable names regexes, separated by a comma. If names match any regex,
# they will always be accepted
good-names-rgxs=
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style. If left empty, inline iteration names will be checked
# with the set naming style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style. If left empty, method names will be checked with the set naming style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style. If left empty, module names will be checked with the set naming style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Regular expression matching correct type alias names. If left empty, type
# alias names will be checked with the set naming style.
#typealias-rgx=
# Regular expression matching correct type variable names. If left empty, type
# variable names will be checked with the set naming style.
#typevar-rgx=
# Naming style matching correct variable names.
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style. If left empty, variable names will be checked with the set
# naming style.
#variable-rgx=
[CLASSES]
# Warn about protected attribute access inside special methods
check-protected-access-in-special-methods=no
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp,
asyncSetUp,
__post_init__
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
[DESIGN]
# List of regular expressions of class ancestor names to ignore when counting
# public methods (see R0903)
exclude-too-few-public-methods=
# List of qualified class names to ignore when counting class parents (see
# R0901)
ignored-parents=
# Maximum number of arguments for function / method.
max-args=15
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr=5
# Maximum number of branch for function / method body.
max-branches=12
# Maximum number of locals for function / method body.
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=25
# Maximum number of return / yield for function / method body.
max-returns=6
# Maximum number of statements in function / method body.
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[EXCEPTIONS]
# Exceptions that will emit a warning when caught.
overgeneral-exceptions=builtins.BaseException,builtins.Exception
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=100
# Maximum number of lines in a module.
max-module-lines=2000
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[IMPORTS]
# List of modules that can be imported at any level, not just the top level
# one.
allow-any-import-level=
# Allow explicit reexports by alias from a package __init__.
allow-reexport-from-package=no
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Deprecated modules which should not be used, separated by a comma.
deprecated-modules=
# Output a graph (.gv or any supported image format) of external dependencies
# to the given file (report RP0402 must not be disabled).
ext-import-graph=
# Output a graph (.gv or any supported image format) of all (i.e. internal and
# external) dependencies to the given file (report RP0402 must not be
# disabled).
import-graph=
# Output a graph (.gv or any supported image format) of internal dependencies
# to the given file (report RP0402 must not be disabled).
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Couples of modules and preferred modules, separated by a comma.
preferred-modules=
[LOGGING]
# The type of string formatting that logging methods do. `old` means using %
# formatting, `new` is for `{}` formatting.
logging-format-style=old
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules=logging
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
# UNDEFINED.
confidence=HIGH,
CONTROL_FLOW,
INFERENCE,
INFERENCE_FAILURE,
UNDEFINED
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then re-enable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=R,
locally-disabled,
file-ignored,
unexpected-special-method-signature,
import-error,
no-member,
unsubscriptable-object,
blacklisted-name,
invalid-name,
missing-docstring,
empty-docstring,
unidiomatic-typecheck,
wrong-import-order,
ungrouped-imports,
wrong-import-position,
bad-mcs-method-argument,
bad-mcs-classmethod-argument,
line-too-long,
too-many-lines,
bad-continuation,
exec-used,
attribute-defined-outside-init,
protected-access,
reimported,
fixme,
global-statement,
unused-variable,
unused-argument,
redefined-outer-name,
redefined-builtin,
undefined-loop-variable,
logging-format-interpolation,
invalid-format-index,
line-too-long,
import-outside-toplevel,
deprecated-method,
keyword-arg-before-vararg,
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[METHOD_ARGS]
# List of qualified names (i.e., library.method) which require a timeout
# parameter e.g. 'requests.api.get,requests.api.post'
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
# Regular expression of note tags to take in consideration.
notes-rgx=
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=sys.exit,argparse.parse_error
[REPORTS]
# Python expression which should return a score less than or equal to 10. You
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
# 'convention', and 'info' which contain the number of messages in each
# category, as well as 'statement' which is the total number of statements
# analyzed. This score is used by the global evaluation report (RP0004).
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio). You can also give a reporter class, e.g.
# mypackage.mymodule.MyReporterClass.
#output-format=
# Tells whether to display a full report or only the messages.
reports=no
# Activate the evaluation score.
score=yes
[SIMILARITIES]
# Comments are removed from the similarity computation
ignore-comments=yes
# Docstrings are removed from the similarity computation
ignore-docstrings=yes
# Imports are removed from the similarity computation
ignore-imports=yes
# Signatures are removed from the similarity computation
ignore-signatures=yes
# Minimum lines number of a similarity.
min-similarity-lines=4
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions=4
# Spelling dictionary name. No available dictionaries : You need to install the
# system dependency for enchant to work..
spelling-dict=
# List of comma separated words that should be considered directives if they
# appear at the beginning of a comment and should not be checked.
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains the private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to the private dictionary (see the
# --spelling-private-dict-file option) instead of raising a message.
spelling-store-unknown-words=no
[STRING]
# This flag controls whether inconsistent-quotes generates a warning when the
# character used as a quote delimiter is used inconsistently within a module.
check-quote-consistency=no
# This flag controls whether the implicit-str-concat should generate a warning
# on implicit string concatenation in sequences defined over several lines.
check-str-concat-over-line-jumps=no
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether to warn about missing members when the owner of the attribute
# is inferred to be None.
ignore-none=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of symbolic message names to ignore for Mixin members.
ignored-checks-for-mixins=no-member,
not-async-context-manager,
not-context-manager,
attribute-defined-outside-init
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
# Regex pattern to define which classes are considered mixins.
mixin-class-rgx=.*[Mm]ixin
# List of decorators that change the signature of a decorated function.
signature-mutators=
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid defining new builtins when possible.
additional-builtins=__opts__,
__salt__,
__pillar__,
__grains__,
__context__,
__runner__,
__ret__,
__env__,
__low__,
__states__,
__lowstate__,
__running__,
__active_provider_name__,
__master_opts__,
__jid_event__,
__instance_id__,
__salt_system_encoding__,
__proxy__,
__serializers__,
__reg__,
__executors__,
__events__
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of names allowed to shadow builtins
allowed-redefined-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expected to
# not be used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored.
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
0707010000001A000081A400000000000000000000000167471E9C000001C9000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext.prometheus-2.1.0/.readthedocs.yaml# .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
python:
install:
- method: pip
path: .
extra_requirements:
- docs
# Set the version of Python and other tools you might need
build:
os: ubuntu-20.04
tools:
python: "3.9"
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/conf.py
0707010000001B000081A400000000000000000000000167471E9C000003E5000000000000000000000000000000000000003600000000test-repo-1-0.1/saltext.prometheus-2.1.0/CHANGELOG.mdThe changelog format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
This project uses [Semantic Versioning](https://semver.org/) - MAJOR.MINOR.PATCH
# Changelog
## 2.1.0 (2024-02-22)
### Fixed
- Fix `salt_aborted` metric lacking a state label [#26](https://github.com/salt-extensions/saltext-prometheus/issues/26)
## 2.0.3 (2023-08-14)
### Fixed
- Fix salt deps by removing them (#22)
## 2.0.2 (2023-03-13)
### Fixed
- Fix mode set to int octal instead of octal notation (#21)
## 2.0.1 (2023-03-12)
### Fixed
- Fix KeyError thrown when requiring state is not run (#20)
## 2.0.0 (2022-10-13)
### Added
- Add ability to use prometheus_client library (#2)
## 1.1.1 (2022-05-04)
### Added
- Hide raw version numbers to normalize data on release version (#6)
## 1.0.1 (2022-03-18)
### Fixed
- Fix textfile output to view None result as success (#3)
## 1.0.0 (2022-01-29)
### Added
- Initial version of Prometheus Text Exposition Format Returner
0707010000001C000081A400000000000000000000000167471E9C00001494000000000000000000000000000000000000003C00000000test-repo-1-0.1/saltext.prometheus-2.1.0/CODE-OF-CONDUCT.md# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in Salt
Extension Modules for Prometheus project and our community a harassment-free
experience for everyone, regardless of age, body size, visible or invisible
disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at devops@eitr.tech.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.
0707010000001D000081A400000000000000000000000167471E9C000002B3000000000000000000000000000000000000003900000000test-repo-1-0.1/saltext.prometheus-2.1.0/CONTRIBUTING.mdThanks for your interest in contributing to the Salt Extension Modules for
Prometheus! We welcome any contribution, large or small - from adding a new
feature to fixing a single letter typo.
This is a companion to the Salt Project and the [Salt Contributing
Guide][salt-contributing] should be considered the default for this project.
Where this project disagrees with the Salt Project, the guidelines here take
precedence. Where this project is silent, the Salt guidelines should be used.
See the **Contributing** section in the [README][README.md] for a quickstart.
[README.md]: README.md
[salt-contributing]: https://docs.saltproject.io/en/master/topics/development/contributing.html
0707010000001E000081A400000000000000000000000167471E9C00002C58000000000000000000000000000000000000003100000000test-repo-1-0.1/saltext.prometheus-2.1.0/LICENSE Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2022 EITR Technologies, LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
0707010000001F000081A400000000000000000000000167471E9C000001C7000000000000000000000000000000000000003400000000test-repo-1-0.1/saltext.prometheus-2.1.0/NOTICE.txtSalt Extension Modules for VMware Copyright 2021 VMware, Inc.
This product is licensed to you under the Apache 2.0 license (the "License").
You may not use this product except in compliance with the Apache 2.0 License.
This product may include a number of subcomponents with separate copyright
notices and license terms. Your use of these subcomponents is subject to the
terms and conditions of the subcomponent's license, as noted in the LICENSE
file.
07070100000020000081A400000000000000000000000167471E9C000014AC000000000000000000000000000000000000003200000000test-repo-1-0.1/saltext.prometheus-2.1.0/PKG-INFOMetadata-Version: 2.1
Name: saltext.prometheus
Version: 2.1.0
Summary: Salt Extension for interacting with Prometheus
Author-email: "EITR Technologies, LLC" <devops@eitr.tech>
License: Apache Software License
Project-URL: Homepage, https://github.com/salt-extensions/saltext-prometheus
Project-URL: Source, https://github.com/salt-extensions/saltext-prometheus
Project-URL: Tracker, https://github.com/salt-extensions/saltext-prometheus/issues
Keywords: salt-extension
Platform: any
Classifier: Programming Language :: Python
Classifier: Programming Language :: Cython
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Development Status :: 4 - Beta
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Apache Software License
Requires-Python: >=3.8
Description-Content-Type: text/markdown
License-File: LICENSE
License-File: NOTICE.txt
Requires-Dist: prometheus_client
Requires-Dist: salt>=3005
Provides-Extra: changelog
Requires-Dist: towncrier==22.12.0; extra == "changelog"
Provides-Extra: dev
Requires-Dist: nox; extra == "dev"
Requires-Dist: pre-commit>=2.4.0; extra == "dev"
Requires-Dist: pylint; extra == "dev"
Requires-Dist: saltpylint; extra == "dev"
Provides-Extra: docs
Requires-Dist: sphinx; extra == "docs"
Requires-Dist: sphinx-prompt; extra == "docs"
Requires-Dist: sphinxcontrib-spelling; extra == "docs"
Requires-Dist: sphinx-copybutton; extra == "docs"
Requires-Dist: towncrier==22.12.0; extra == "docs"
Requires-Dist: sphinxcontrib-towncrier; extra == "docs"
Requires-Dist: myst_parser; extra == "docs"
Requires-Dist: furo; extra == "docs"
Requires-Dist: sphinx-inline-tabs; extra == "docs"
Provides-Extra: docsauto
Requires-Dist: sphinx-autobuild; extra == "docsauto"
Provides-Extra: lint
Requires-Dist: pylint; extra == "lint"
Requires-Dist: saltpylint; extra == "lint"
Provides-Extra: tests
Requires-Dist: pytest>=6.1.0; extra == "tests"
Requires-Dist: pytest-salt-factories>=1.0.0rc19; extra == "tests"
# Salt Extension Modules for Prometheus
[](https://saltext-prometheus.readthedocs.io/en/latest/?badge=latest)
This is a collection of Salt extension modules for use with Prometheus.
## Security
If you think you've found a security vulnerability, see
[Salt's security guide][security].
## User Documentation
This README is more for contributing to the project. If you just want to get
started, check out the [User Documentation][docs].
## Contributing
The saltext-prometheus project team welcomes contributions from the community.
The [Salt Contributing guide][salt-contributing] has a lot of relevant
information, but if you'd like to jump right in here's how to get started:
# Clone the repo
git clone --origin salt git@github.com:salt-extensions/saltext-prometheus.git
# Change to the repo dir
cd saltext-prometheus
# Create a new venv
python3 -m venv env --prompt salt-ext-prom
source env/bin/activate
# On mac, you may need to upgrade pip
python -m pip install --upgrade pip
# On WSL or some flavors of linux you may need to install the `enchant`
# library in order to build the docs
sudo apt-get install -y enchant
# Install extension + test/dev/doc dependencies into your environment
python -m pip install -e .\[tests,dev,docs\]
# Run tests!
python -m nox -e tests-3
# skip requirements install for next time
export SKIP_REQUIREMENTS_INSTALL=1
# Build the docs, serve, and view in your web browser:
python -m nox -e docs && (cd docs/_build/html; python -m webbrowser localhost:8000; python -m http.server; cd -)
Writing code isn't the only way to contribute! We value contributions in any of
these areas:
* Documentation - especially examples of how to use this module to solve
specific problems.
* Triaging [issues][issues] and participating in [discussions][discussions]
* Reviewing [Pull Requests][PRs] (we really like
[Conventional Comments][comments]!)
You could also contribute in other ways:
* Writing blog posts
* Posting on social media about how you used Salt+Prometheus to solve your
problems, including videos
* Giving talks at conferences
* Publishing videos
* Asking/answering questions in IRC, Slack, or email groups
Any of these things are super valuable to our community, and we sincerely
appreciate every contribution!
For more information, build the docs and head over to http://localhost:8000/ —
that's where you'll find the rest of the documentation.
[security]: https://github.com/saltstack/salt/blob/master/SECURITY.md
[salt-contributing]: https://docs.saltproject.io/en/master/topics/development/contributing.html
[issues]: https://github.com/salt-extensions/saltext-prometheus/issues
[PRs]: https://github.com/salt-extensions/saltext-prometheus/pulls
[discussions]: https://github.com/salt-extensions/saltext-prometheus/discussions
[comments]: https://conventionalcomments.org/
[docs]: https://docs.saltproject.io/salt/extensions/saltext-prometheus/en/latest/index.html
07070100000021000081A400000000000000000000000167471E9C00000C2C000000000000000000000000000000000000003300000000test-repo-1-0.1/saltext.prometheus-2.1.0/README.md# Salt Extension Modules for Prometheus
[](https://saltext-prometheus.readthedocs.io/en/latest/?badge=latest)
This is a collection of Salt extension modules for use with Prometheus.
## Security
If you think you've found a security vulnerability, see
[Salt's security guide][security].
## User Documentation
This README is more for contributing to the project. If you just want to get
started, check out the [User Documentation][docs].
## Contributing
The saltext-prometheus project team welcomes contributions from the community.
The [Salt Contributing guide][salt-contributing] has a lot of relevant
information, but if you'd like to jump right in here's how to get started:
# Clone the repo
git clone --origin salt git@github.com:salt-extensions/saltext-prometheus.git
# Change to the repo dir
cd saltext-prometheus
# Create a new venv
python3 -m venv env --prompt salt-ext-prom
source env/bin/activate
# On mac, you may need to upgrade pip
python -m pip install --upgrade pip
# On WSL or some flavors of linux you may need to install the `enchant`
# library in order to build the docs
sudo apt-get install -y enchant
# Install extension + test/dev/doc dependencies into your environment
python -m pip install -e .\[tests,dev,docs\]
# Run tests!
python -m nox -e tests-3
# skip requirements install for next time
export SKIP_REQUIREMENTS_INSTALL=1
# Build the docs, serve, and view in your web browser:
python -m nox -e docs && (cd docs/_build/html; python -m webbrowser localhost:8000; python -m http.server; cd -)
Writing code isn't the only way to contribute! We value contributions in any of
these areas:
* Documentation - especially examples of how to use this module to solve
specific problems.
* Triaging [issues][issues] and participating in [discussions][discussions]
* Reviewing [Pull Requests][PRs] (we really like
[Conventional Comments][comments]!)
You could also contribute in other ways:
* Writing blog posts
* Posting on social media about how you used Salt+Prometheus to solve your
problems, including videos
* Giving talks at conferences
* Publishing videos
* Asking/answering questions in IRC, Slack, or email groups
Any of these things are super valuable to our community, and we sincerely
appreciate every contribution!
For more information, build the docs and head over to http://localhost:8000/ —
that's where you'll find the rest of the documentation.
[security]: https://github.com/saltstack/salt/blob/master/SECURITY.md
[salt-contributing]: https://docs.saltproject.io/en/master/topics/development/contributing.html
[issues]: https://github.com/salt-extensions/saltext-prometheus/issues
[PRs]: https://github.com/salt-extensions/saltext-prometheus/pulls
[discussions]: https://github.com/salt-extensions/saltext-prometheus/discussions
[comments]: https://conventionalcomments.org/
[docs]: https://docs.saltproject.io/salt/extensions/saltext-prometheus/en/latest/index.html
07070100000022000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003300000000test-repo-1-0.1/saltext.prometheus-2.1.0/changelog07070100000023000081A400000000000000000000000167471E9C0000000C000000000000000000000000000000000000003E00000000test-repo-1-0.1/saltext.prometheus-2.1.0/changelog/.gitignore!.gitignore
07070100000024000081A400000000000000000000000167471E9C00000135000000000000000000000000000000000000004300000000test-repo-1-0.1/saltext.prometheus-2.1.0/changelog/.template.jinja{% if sections[""] %}
{% for category, val in definitions.items() if category in sections[""] %}
### {{ definitions[category]['name'] }}
{% for text, values in sections[""][category].items() %}
- {{ text }} {{ values|join(', ') }}
{% endfor %}
{% endfor %}
{% else %}
No significant changes.
{% endif %}
07070100000025000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000002E00000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs07070100000026000081A400000000000000000000000167471E9C0000027A000000000000000000000000000000000000003700000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/Makefile# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
07070100000027000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003600000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/_static07070100000028000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000003F00000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/_static/.gitkeep07070100000029000081A400000000000000000000000167471E9C0000011A000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/changelog.md# Changelog
The changelog format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
This project uses [Semantic Versioning](https://semver.org/) - MAJOR.MINOR.PATCH
```{towncrier-draft-entries}
```
```{include} ../CHANGELOG.md
:start-after: '# Changelog'
```
0707010000002A000081ED00000000000000000000000167471E9C0000184E000000000000000000000000000000000000003600000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/conf.py# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import datetime
import email.policy
import os
import sys
from pathlib import Path
try:
from importlib_metadata import distribution
except ImportError:
from importlib.metadata import distribution
try:
docs_basepath = os.path.abspath(os.path.dirname(__file__))
except NameError:
# sphinx-intl and six execute some code which will raise this NameError
# assume we're in the doc/ directory
docs_basepath = os.path.abspath(os.path.dirname("."))
PROJECT_ROOT_DIR = Path(docs_basepath).parent
addtl_paths = (
os.path.join(os.pardir, "src"), # saltext.prometheus itself (for autodoc)
"_ext", # custom Sphinx extensions
)
for addtl_path in addtl_paths:
sys.path.insert(0, os.path.abspath(os.path.join(docs_basepath, addtl_path)))
dist = distribution("saltext.prometheus")
# -- Project information -----------------------------------------------------
this_year = datetime.datetime.today().year
if this_year == 2021:
copyright_year = 2021
else:
copyright_year = f"2021 - {this_year}"
project = dist.metadata["Summary"]
author = dist.metadata["Author"]
if author is None:
# Core metadata is serialized differently with pyproject.toml:
# https://packaging.python.org/en/latest/specifications/pyproject-toml/#authors-maintainers
author_email = dist.metadata["Author-email"]
em = email.message_from_string(
f"To: {author_email}",
policy=email.policy.default,
)
if em["To"].addresses and em["To"].addresses[0]:
author = em["To"].addresses[0].display_name
author = author or ""
copyright = f"{copyright_year}, {author}"
# The full version, including alpha/beta/rc tags
release = dist.version
# Variables to pass into the docs from sitevars.rst for rst substitution
with open("sitevars.rst") as site_vars_file:
site_vars = site_vars_file.read().splitlines()
rst_prolog = """
{}
""".format(
"\n".join(site_vars[:])
)
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.napoleon",
"sphinx.ext.intersphinx",
"sphinx.ext.viewcode",
"sphinx.ext.todo",
"sphinx.ext.coverage",
"sphinx_copybutton",
"sphinxcontrib.spelling",
"sphinxcontrib.towncrier.ext",
"myst_parser",
"sphinx_inline_tabs",
]
myst_enable_extensions = [
"colon_fence",
"deflist",
"tasklist",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = [
"_build",
"Thumbs.db",
".DS_Store",
".vscode",
".venv",
".git",
".gitlab-ci",
".gitignore",
"sitevars.rst",
]
autosummary_generate = False
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "furo"
html_title = project
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = ""
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large. Favicons can be up to at least 228x228. PNG
# format is supported as well, not just .ico'
html_favicon = ""
# Sphinx Napoleon Config
napoleon_google_docstring = True
napoleon_numpy_docstring = False
napoleon_include_init_with_doc = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
# ----- Intersphinx Config ---------------------------------------------------------------------------------------->
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
"pytest": ("https://docs.pytest.org/en/stable", None),
"salt": ("https://docs.saltproject.io/en/latest", None),
}
# <---- Intersphinx Config -----------------------------------------------------------------------------------------
# ----- Autodoc Config ---------------------------------------------------------------------------------------------->
autodoc_default_options = {"member-order": "bysource"}
autodoc_mock_imports = ["salt"]
# <---- Autodoc Config -----------------------------------------------------------------------------------------------
# Towncrier draft config
towncrier_draft_autoversion_mode = "sphinx-release"
towncrier_draft_include_empty = True
towncrier_draft_working_directory = str(PROJECT_ROOT_DIR)
def setup(app):
app.add_crossref_type(
directivename="fixture",
rolename="fixture",
indextemplate="pair: %s; fixture",
)
# Allow linking to pytest's confvals.
app.add_object_type(
"confval",
"pytest-confval",
objname="configuration value",
indextemplate="pair: %s; configuration value",
)
0707010000002B000081A400000000000000000000000167471E9C000003A7000000000000000000000000000000000000003800000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/index.rst``saltext-prometheus``: Integrate Salt with Prometheus
======================================================
Salt Extension for interacting with Prometheus
This guide will walk you through the process of installing and setting up the Prometheus Salt extension on your machine.
What is Prometheus Salt Extension?
==================================
The Prometheus Salt Extension is a Salt extension module that provides features and functionality realated to Prometheus for easy plugin capabilities.
Contents
--------
.. toctree::
:maxdepth: 2
:caption: Guides
:hidden:
topics/quick_start
topics/installation
topics/user_documentation
.. toctree::
:maxdepth: 2
:caption: Provided Modules
:hidden:
ref/engines/index
ref/returners/index
.. toctree::
:maxdepth: 2
:caption: Reference
:hidden:
changelog
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
0707010000002C000081A400000000000000000000000167471E9C000002F8000000000000000000000000000000000000003700000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/make.bat@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=_build
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
:end
popd
0707010000002D000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003200000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/ref0707010000002E000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/ref/.gitkeep0707010000002F000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003A00000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/ref/engines07070100000030000081A400000000000000000000000167471E9C000000B4000000000000000000000000000000000000004400000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/ref/engines/index.rst.. all-saltext.prometheus.engines:
______________
Engine Modules
______________
.. currentmodule:: saltext.prometheus.engines
.. autosummary::
:toctree:
prometheus_mod
07070100000031000081A400000000000000000000000167471E9C00000067000000000000000000000000000000000000006800000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/ref/engines/saltext.prometheus.engines.prometheus_mod.rst``prometheus``
==============
.. automodule:: saltext.prometheus.engines.prometheus_mod
:members:
07070100000032000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003C00000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/ref/returners07070100000033000081A400000000000000000000000167471E9C000000C3000000000000000000000000000000000000004600000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/ref/returners/index.rst.. all-saltext.prometheus.returners:
________________
Returner Modules
________________
.. currentmodule:: saltext.prometheus.returners
.. autosummary::
:toctree:
prometheus_textfile
07070100000034000081A400000000000000000000000167471E9C00000080000000000000000000000000000000000000007100000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/ref/returners/saltext.prometheus.returners.prometheus_textfile.rst``prometheus_textfile``
=======================
.. automodule:: saltext.prometheus.returners.prometheus_textfile
:members:
07070100000035000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/sitevars.rst07070100000036000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003500000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/topics07070100000037000081A400000000000000000000000167471E9C0000037A000000000000000000000000000000000000004500000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/topics/installation.md# Installation
Generally, extensions need to be installed into the same Python environment Salt uses.
:::{tab} State
```yaml
Install Salt Prometheus extension:
pip.installed:
- name: saltext-prometheus
```
:::
:::{tab} Onedir installation
```bash
salt-pip install saltext-prometheus
```
:::
:::{tab} Regular installation
```bash
pip install saltext-prometheus
```
:::
:::{important}
Currently, there is [an issue][issue-second-saltext] where the installation of a Saltext fails silently
if the environment already has another one installed. You can workaround this by
removing all Saltexts and reinstalling them in one transaction.
:::
:::{hint}
Saltexts are not distributed automatically via the fileserver like custom modules, they need to be installed
on each node you want them to be available on.
:::
[issue-second-saltext]: https://github.com/saltstack/salt/issues/65433
07070100000038000081A400000000000000000000000167471E9C000009AD000000000000000000000000000000000000004500000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/topics/quick_start.rstQuick Start Guide
=================
This guide will show you how to get started running the Prometheus Salt extension.
Before You Start
----------------
Ensure Salt 3005 or above is installed and running on your machine.
If you haven't installed Salt yet, refer to the `Salt Installation Guide <https://docs.saltproject.io/salt/install-guide/en/latest>`_
Installing the Extension
------------------------
Several methods are available for installing the Prometheus Salt extension:
- **Method 1: Using pip**
.. code-block::
pip install saltext-prometheus
.. note::
Depending on the Salt version, Salt may not be using the system Python. For those versions, ensure you're using the Python associated with Salt (typically found at **/opt/saltstack/salt/bin/python**).
- **Method 2: Using Salt**
Use an execution module like:
.. code-block::
salt \* pip.install saltext-prometheus
Note: The extension can be installed and used on all minions or specific minions where reporting data is needed
Verify Installation - (Optional)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Verify that the extension is installed
.. code-block::
salt --versions-report
You should see `saltext.prometheus` listed under Salt extensions.
Getting Started
---------------
After successfully installing the extension, you are prepared to execute Prometheus Salt extension modules.
.. raw:: html
<br />
**Example:** Apply a test state using the prometheus_textfile as the returner
Create a test.sls file in directory /srv/salt
/srv/salt/test.sls
.. code-block:: yaml
/tmp/dummy.text:
file.managed:
- contents: |
helloworld
Execute the following command
.. code-block::
salt \* state.apply test --return prometheus_textfile
You should see an output file created on the minion machine (default location: **/var/cache/salt/minion/prometheus_textfile/salt.prom**).
**Example output file:**
.. code-block::
salt_last_completed 1.698364953e+09
# HELP salt_version Version of installed Salt package
# TYPE salt_version gauge
salt_version 3006.3
# HELP salt_version_tagged Version of installed Salt package as a tag
# TYPE salt_version_tagged gauge
salt_version_tagged{salt_version="3006.3"} 1.0
Additional Resources
--------------------
For more detailed information on functionality, use cases, and configuration, please vist our :ref:`user-documentation`
07070100000039000081A400000000000000000000000167471E9C00001F4A000000000000000000000000000000000000004C00000000test-repo-1-0.1/saltext.prometheus-2.1.0/docs/topics/user_documentation.rst.. _user-documentation:
User Documentation
==================
Welcome to the Prometheus Salt Extension Documentation. This guide provides information on installing and using the Prometheus Salt Extension.
Introduction
------------
Prometheus Salt Extension is an extension of Salt that houses Prometheus related modules for easy plugin functionality.
Prometheus and Salt
~~~~~~~~~~~~~~~~~~~
The Prometheus tool is a widely-used open-source solution for monitoring and processing data. It provides a robust platform for real-time tracking of various system and application aspects.
In contrast, Salt is a powerful automation tool that offers flexible and scalable infrastructure management, streamlining tasks like configuration management and remote execution.
The Prometheus Salt extension helps integrate these tools by enhancing Salt's capabilities, catering specifically to Prometheus users' needs. This extension integrates seamlessly with Prometheus infrastructure, enabling effective monitoring and metrics gathering.
Modules
-------
The Prometheus Salt extension utilizes custom Salt modules to perform various functionalities.
Prometheus Textfile
~~~~~~~~~~~~~~~~~~~
Prometheus textfile module is a custom returner module.
**What's a salt returner module?** A returner module defines the method and format in which the results of Salt execution commands are transmitted from the minions back to the master.
This module is used to output a `Text Exposition Format <https://prometheus.io/docs/instrumenting/exposition_formats/#text-format-example>`_ file on the minion. The output includes salt specific metrics gathered from the minion. The output is formatted to a file that can be ingested by Prometheus infrastructure for monitoring purposes.
To use the extension, you will need to provide a Salt state command and add a return flag pointed to the `prometheus_textfile` module. You can also utilize different configuration files to set the returner module.
Example usage command: ``salt \* state.apply test --return prometheus_textfile``
By default, the output file is located in the following file location: ``/var/cache/salt/minion/prometheus_textfile/salt.prom``, but this can be changed via a configuration file.
Note: The extension can be installed and used on all minions or specific minions where reporting data is needed
**Example output file:**
.. code-block::
salt_last_completed 1.698364953e+09
# HELP salt_version Version of installed Salt package
# TYPE salt_version gauge
salt_version 3006.3
# HELP salt_version_tagged Version of installed Salt package as a tag
# TYPE salt_version_tagged gauge
salt_version_tagged{salt_version="3006.3"} 1.0
Use Cases
*********
The Prometheus Salt Extension can be used to easily output Salt-specific metric data across systems using Salt. This data can be used in a variety of ways, including tracking minion states, Salt versions, and other Salt-specific metrics.
`Consider the following example:`
In a large environment where Salt is used for infrastructure management, Prometheus/Grafana infrastructure is installed with node-exporter running on the machines. An engineer installs the Prometheus Salt extension, enabling the generation of metrics data in a compatible format for node-exporter. Node-exporter transmits this data to the Prometheus infrastructure, where it becomes available for visualization in Grafana. Grafana serves as the visualization tool for these metrics and also includes an alerting system capable of identifying active minions and signaling failures within the system. This automated approach seamlessly integrates into both existing and new monitoring and alerting workflows within the Prometheus/Grafana ecosystem, facilitating the visualization and accessibility of information through established pipelines.
`Consider another example:`
A security vulnerability was found in the Salt version used on your minion machines. A patch was released, and your machines need to upgrade their Salt version to comply with security protocols. To facilitate this process, the Prometheus Salt Extension was integrated into your environment. The extension generated metric data that was incorporated into a Grafana dashboard within your current monitoring infrastructure, allowing you to track Salt versions across all minion machines.
Video Demo: `Nick the Salt Guy Demos the Prometheus Salt Extension <https://www.youtube.com/watch?v=8yv_AeHOHOE&t>`_
Installation
------------
Dependencies
~~~~~~~~~~~~
Before installing the Prometheus Salt extension, ensure you have the following
dependencies installed:
- Salt: Version 3005 or higher - If you don't have salt on your machine, visit salt installation guide here: `Salt Installation Guide <https://docs.saltproject.io/salt/install-guide/en/latest>`_
Several methods are available for installing this extension:
Method 1: Using pip
~~~~~~~~~~~~~~~~~~~
1. Open a terminal or command prompt.
2. Run the following command to install the Prometheus Salt extension:
.. code-block:: bash
pip install prometheus-salt
**IMPORTANT:** Depending on the version of Salt used, verify that the python you are using is the salt python most commonly found at **/opt/saltstack/salt/bin/python**
.. raw:: html
<br />
Method 2: Using salt
~~~~~~~~~~~~~~~~~~~~
1. Verify salt is installed on the target machine
2. Run the following command to install the Prometheus Salt extension:
.. code-block:: bash
salt \* pip.install saltext-prometheus
`Once the extension is installed, you can verify the installation, or proceed to use the extension in your environment.`
**Verify Installation** - `(Optional)`
Verify that the extension is installed by running the following command:
.. code-block::
salt --versions-report
You should see `saltext.prometheus` listed under Salt extensions.
**Using the Extension**
After successfully installing the extension, you are ready to execute Prometheus Salt extension modules.
.. raw:: html
<br />
**Example:** Apply a test state using the prometheus_textfile as the returner
1. Create a test.sls file in the directory /srv/salt
**/srv/salt/test.sls**
.. code-block:: yaml
/tmp/dummy.text:
file.managed:
- contents: |
helloworld
2. Execute the following command:
.. code-block::
salt \* state.apply test --return prometheus_textfile
3. Check the output file created on the minion machine (default location: **/var/cache/salt/minion/prometheus_textfile/salt.prom**).
**Example output file:**
.. code-block::
salt_last_completed 1.698364953e+09
# HELP salt_version Version of installed Salt package
# TYPE salt_version gauge
salt_version 3006.3
# HELP salt_version_tagged Version of installed Salt package as a tag
# TYPE salt_version_tagged gauge
salt_version_tagged{salt_version="3006.3"} 1.0
Configuration
-------------
The Prometheus Salt extension can be executed out-of-the-box with default settings. However, it offers configurable components that can be customized by modifying settings within a configuration file.
When adding a configuration file, the extension follows the same Salt convention for adding configurations. In the following example, we use the default location for Salt config files and the `prometheus_textfile` returner module.
In directory **/etc/salt/minion.d** we created a file called **prometheus.conf**
.. code-block::
prometheus_textfile.filename: /prometheus/metrics/salt.prom
return:
- prometheus_textfile
`The example configuration sets the return to the prometheus_textfile and sets the prometheus_textfile location to a custom location.`
**Configurable Options**
Prometheus Textfile: See module documentation
Uninstall
---------
You can uninstall the Prometheus Salt Extension using pip:
.. code-block::
pip uninstall prometheus-salt
0707010000003A000081ED00000000000000000000000167471E9C00004543000000000000000000000000000000000000003400000000test-repo-1-0.1/saltext.prometheus-2.1.0/noxfile.py# pylint: disable=missing-module-docstring,import-error,protected-access,missing-function-docstring
import datetime
import json
import os
import pathlib
import shutil
import sys
import tempfile
from pathlib import Path
import nox
from nox.command import CommandFailed
from nox.virtualenv import VirtualEnv
# Nox options
# Reuse existing virtualenvs
nox.options.reuse_existing_virtualenvs = True
# Don't fail on missing interpreters
nox.options.error_on_missing_interpreters = False
# Python versions to test against
PYTHON_VERSIONS = ("3", "3.8", "3.9", "3.10")
# Be verbose when running under a CI context
CI_RUN = (
os.environ.get("JENKINS_URL") or os.environ.get("CI") or os.environ.get("DRONE") is not None
)
PIP_INSTALL_SILENT = CI_RUN is False
SKIP_REQUIREMENTS_INSTALL = "SKIP_REQUIREMENTS_INSTALL" in os.environ
EXTRA_REQUIREMENTS_INSTALL = os.environ.get("EXTRA_REQUIREMENTS_INSTALL")
COVERAGE_VERSION_REQUIREMENT = "coverage==5.2"
SALT_REQUIREMENT = os.environ.get("SALT_REQUIREMENT") or "salt>=3005"
if SALT_REQUIREMENT == "salt==master":
SALT_REQUIREMENT = "git+https://github.com/saltstack/salt.git@master"
# Prevent Python from writing bytecode
os.environ["PYTHONDONTWRITEBYTECODE"] = "1"
# Global Path Definitions
REPO_ROOT = pathlib.Path(__file__).resolve().parent
# Change current directory to REPO_ROOT
os.chdir(str(REPO_ROOT))
ARTIFACTS_DIR = REPO_ROOT / "artifacts"
# Make sure the artifacts directory exists
ARTIFACTS_DIR.mkdir(parents=True, exist_ok=True)
CUR_TIME = datetime.datetime.now().strftime("%Y%m%d%H%M%S.%f")
RUNTESTS_LOGFILE = ARTIFACTS_DIR / f"runtests-{CUR_TIME}.log"
COVERAGE_REPORT_DB = REPO_ROOT / ".coverage"
COVERAGE_REPORT_PROJECT = ARTIFACTS_DIR.relative_to(REPO_ROOT) / "coverage-project.xml"
COVERAGE_REPORT_TESTS = ARTIFACTS_DIR.relative_to(REPO_ROOT) / "coverage-tests.xml"
JUNIT_REPORT = ARTIFACTS_DIR.relative_to(REPO_ROOT) / "junit-report.xml"
def _get_session_python_version_info(session):
try:
version_info = session._runner._real_python_version_info
except AttributeError:
session_py_version = session.run_always(
"python",
"-c",
'import sys; sys.stdout.write("{}.{}.{}".format(*sys.version_info))',
silent=True,
log=False,
)
version_info = tuple(int(part) for part in session_py_version.split(".") if part.isdigit())
session._runner._real_python_version_info = version_info
return version_info
def _get_pydir(session):
version_info = _get_session_python_version_info(session)
if version_info < (3, 8):
session.error("Only Python >= 3.8 is supported")
return f"py{version_info[0]}.{version_info[1]}"
def _install_requirements(
session,
*passed_requirements, # pylint: disable=unused-argument
install_coverage_requirements=True,
install_test_requirements=True,
install_source=False,
install_salt=True,
install_extras=None,
):
install_extras = install_extras or []
if SKIP_REQUIREMENTS_INSTALL is False:
# Always have the wheel package installed
session.install("--progress-bar=off", "wheel", silent=PIP_INSTALL_SILENT)
if install_coverage_requirements:
session.install(
"--progress-bar=off", COVERAGE_VERSION_REQUIREMENT, silent=PIP_INSTALL_SILENT
)
if install_salt:
session.install("--progress-bar=off", SALT_REQUIREMENT, silent=PIP_INSTALL_SILENT)
if install_test_requirements:
install_extras.append("tests")
if EXTRA_REQUIREMENTS_INSTALL:
session.log(
"Installing the following extra requirements because the "
"EXTRA_REQUIREMENTS_INSTALL environment variable was set: "
"EXTRA_REQUIREMENTS_INSTALL='%s'",
EXTRA_REQUIREMENTS_INSTALL,
)
install_command = ["--progress-bar=off"]
install_command += [req.strip() for req in EXTRA_REQUIREMENTS_INSTALL.split()]
session.install(*install_command, silent=PIP_INSTALL_SILENT)
if install_source:
pkg = "."
if install_extras:
pkg += f"[{','.join(install_extras)}]"
session.install("-e", pkg, silent=PIP_INSTALL_SILENT)
elif install_extras:
pkg = f".[{','.join(install_extras)}]"
session.install(pkg, silent=PIP_INSTALL_SILENT)
@nox.session(python=PYTHON_VERSIONS)
def tests(session):
_install_requirements(session, install_source=True)
sitecustomize_dir = session.run("salt-factories", "--coverage", silent=True, log=False)
python_path_env_var = os.environ.get("PYTHONPATH") or None
if python_path_env_var is None:
python_path_env_var = sitecustomize_dir
else:
python_path_entries = python_path_env_var.split(os.pathsep)
if sitecustomize_dir in python_path_entries:
python_path_entries.remove(sitecustomize_dir)
python_path_entries.insert(0, sitecustomize_dir)
python_path_env_var = os.pathsep.join(python_path_entries)
env = {
# The updated python path so that sitecustomize is importable
"PYTHONPATH": python_path_env_var,
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
"COVERAGE_FILE": str(COVERAGE_REPORT_DB),
# Instruct sub processes to also run under coverage
"COVERAGE_PROCESS_START": str(REPO_ROOT / ".coveragerc"),
}
session.run("coverage", "erase")
args = [
"--rootdir",
str(REPO_ROOT),
f"--log-file={RUNTESTS_LOGFILE.relative_to(REPO_ROOT)}",
"--log-file-level=debug",
"--show-capture=no",
f"--junitxml={JUNIT_REPORT}",
"--showlocals",
"-ra",
"-s",
]
if session._runner.global_config.forcecolor:
args.append("--color=yes")
if not session.posargs:
args.append("tests/")
else:
for arg in session.posargs:
if arg.startswith("--color") and args[0].startswith("--color"):
args.pop(0)
args.append(arg)
for arg in session.posargs:
if arg.startswith("-"):
continue
if arg.startswith(f"tests{os.sep}"):
break
try:
pathlib.Path(arg).resolve().relative_to(REPO_ROOT / "tests")
break
except ValueError:
continue
else:
args.append("tests/")
try:
session.run("coverage", "run", "-m", "pytest", *args, env=env)
finally:
# Always combine and generate the XML coverage report
try:
session.run("coverage", "combine")
except CommandFailed:
# Sometimes some of the coverage files are corrupt which would
# trigger a CommandFailed exception
pass
# Generate report for salt code coverage
session.run(
"coverage",
"xml",
"-o",
str(COVERAGE_REPORT_PROJECT),
"--omit=tests/*",
"--include=src/saltext/prometheus/*",
)
# Generate report for tests code coverage
session.run(
"coverage",
"xml",
"-o",
str(COVERAGE_REPORT_TESTS),
"--omit=src/saltext/prometheus/*",
"--include=tests/*",
)
try:
session.run(
"coverage", "report", "--show-missing", "--include=src/saltext/prometheus/*"
)
# If you also want to display the code coverage report on the CLI
# for the tests, comment the call above and uncomment the line below
# session.run(
# "coverage", "report", "--show-missing",
# "--include=src/saltext/prometheus/*,tests/*"
# )
finally:
# Move the coverage DB to artifacts/coverage in order for it to be archived by CI
if COVERAGE_REPORT_DB.exists():
shutil.move(str(COVERAGE_REPORT_DB), str(ARTIFACTS_DIR / COVERAGE_REPORT_DB.name))
class Tee:
"""
Python class to mimic linux tee behaviour
"""
def __init__(self, first, second):
self._first = first
self._second = second
def write(self, buf):
wrote = self._first.write(buf)
self._first.flush()
self._second.write(buf)
self._second.flush()
return wrote
def fileno(self):
return self._first.fileno()
def _lint(session, rcfile, flags, paths, tee_output=True):
_install_requirements(
session,
install_salt=False,
install_coverage_requirements=False,
install_test_requirements=False,
install_extras=["dev", "tests"],
)
if tee_output:
session.run("pylint", "--version")
pylint_report_path = os.environ.get("PYLINT_REPORT")
cmd_args = ["pylint", f"--rcfile={rcfile}"] + list(flags) + list(paths)
src_path = str(REPO_ROOT / "src")
python_path_env_var = os.environ.get("PYTHONPATH") or None
if python_path_env_var is None:
python_path_env_var = src_path
else:
python_path_entries = python_path_env_var.split(os.pathsep)
if src_path in python_path_entries:
python_path_entries.remove(src_path)
python_path_entries.insert(0, src_path)
python_path_env_var = os.pathsep.join(python_path_entries)
env = {
# The updated python path so that the project is importable without installing it
"PYTHONPATH": python_path_env_var,
"PYTHONUNBUFFERED": "1",
}
cmd_kwargs = {"env": env}
if tee_output:
stdout = tempfile.TemporaryFile(mode="w+b")
cmd_kwargs["stdout"] = Tee(stdout, sys.__stdout__)
try:
session.run(*cmd_args, **cmd_kwargs)
finally:
if tee_output:
stdout.seek(0)
contents = stdout.read()
if contents:
contents = contents.decode("utf-8")
sys.stdout.write(contents)
sys.stdout.flush()
if pylint_report_path:
# Write report
with open(pylint_report_path, "w", encoding="utf-8") as wfh:
wfh.write(contents)
session.log("Report file written to %r", pylint_report_path)
stdout.close()
def _lint_pre_commit(session, rcfile, flags, paths):
if "VIRTUAL_ENV" not in os.environ:
session.error(
"This should be running from within a virtualenv and "
"'VIRTUAL_ENV' was not found as an environment variable."
)
if "pre-commit" not in os.environ["VIRTUAL_ENV"]:
session.error(
"This should be running from within a pre-commit virtualenv and "
f"'VIRTUAL_ENV'({os.environ['VIRTUAL_ENV']}) does not appear to be a pre-commit virtualenv."
)
# Let's patch nox to make it run inside the pre-commit virtualenv
session._runner.venv = VirtualEnv(
os.environ["VIRTUAL_ENV"],
interpreter=session._runner.func.python,
reuse_existing=True,
venv=True,
)
_lint(session, rcfile, flags, paths, tee_output=False)
@nox.session(python="3")
def lint(session):
"""
Run PyLint against the code and the test suite. Set PYLINT_REPORT to a path to capture output.
"""
session.notify(f"lint-code-{session.python}")
session.notify(f"lint-tests-{session.python}")
@nox.session(python="3", name="lint-code")
def lint_code(session):
"""
Run PyLint against the code. Set PYLINT_REPORT to a path to capture output.
"""
flags = ["--disable=I"]
if session.posargs:
paths = session.posargs
else:
paths = ["setup.py", "noxfile.py", "src/"]
_lint(session, ".pylintrc", flags, paths)
@nox.session(python="3", name="lint-tests")
def lint_tests(session):
"""
Run PyLint against the test suite. Set PYLINT_REPORT to a path to capture output.
"""
flags = [
"--disable=I,redefined-outer-name,missing-function-docstring,no-member,missing-module-docstring"
]
if session.posargs:
paths = session.posargs
else:
paths = ["tests/"]
_lint(session, ".pylintrc", flags, paths)
@nox.session(python=False, name="lint-code-pre-commit")
def lint_code_pre_commit(session):
"""
Run PyLint against the code. Set PYLINT_REPORT to a path to capture output.
"""
flags = ["--disable=I"]
if session.posargs:
paths = session.posargs
else:
paths = ["setup.py", "noxfile.py", "src/"]
_lint_pre_commit(session, ".pylintrc", flags, paths)
@nox.session(python=False, name="lint-tests-pre-commit")
def lint_tests_pre_commit(session):
"""
Run PyLint against the code and the test suite. Set PYLINT_REPORT to a path to capture output.
"""
flags = [
"--disable=I,redefined-outer-name,missing-function-docstring,no-member,missing-module-docstring",
]
if session.posargs:
paths = session.posargs
else:
paths = ["tests/"]
_lint_pre_commit(session, ".pylintrc", flags, paths)
@nox.session(python="3")
def docs(session):
"""
Build Docs
"""
_install_requirements(
session,
install_coverage_requirements=False,
install_test_requirements=False,
install_source=True,
install_extras=["docs"],
)
os.chdir("docs/")
session.run("make", "clean", external=True)
session.run("make", "linkcheck", "SPHINXOPTS=-W", external=True)
session.run("make", "coverage", "SPHINXOPTS=-W", external=True)
docs_coverage_file = os.path.join("_build", "html", "python.txt")
if os.path.exists(docs_coverage_file):
with open(docs_coverage_file) as rfh: # pylint: disable=unspecified-encoding
contents = rfh.readlines()[2:]
if contents:
session.error("\n" + "".join(contents))
session.run("make", "html", "SPHINXOPTS=-W", external=True)
os.chdir(str(REPO_ROOT))
@nox.session(name="docs-html", python="3")
@nox.parametrize("clean", [False, True])
@nox.parametrize("include_api_docs", [False, True])
def docs_html(session, clean, include_api_docs):
"""
Build Sphinx HTML Documentation
TODO: Add option for `make linkcheck` and `make coverage`
calls via Sphinx. Ran into problems with two when
using Furo theme and latest Sphinx.
"""
_install_requirements(
session,
install_coverage_requirements=False,
install_test_requirements=False,
install_source=True,
install_extras=["docs"],
)
if include_api_docs:
gen_api_docs(session)
build_dir = Path("docs", "_build", "html")
sphinxopts = "-Wn"
if clean:
sphinxopts += "E"
args = [sphinxopts, "--keep-going", "docs", str(build_dir)]
session.run("sphinx-build", *args, external=True)
@nox.session(name="docs-dev", python="3")
@nox.parametrize("clean", [False, True])
def docs_dev(session, clean) -> None:
"""
Build and serve the Sphinx HTML documentation, with live reloading on file changes, via sphinx-autobuild.
Note: Only use this in INTERACTIVE DEVELOPMENT MODE. This SHOULD NOT be called
in CI/CD pipelines, as it will hang.
"""
_install_requirements(
session,
install_coverage_requirements=False,
install_test_requirements=False,
install_source=True,
install_extras=["docs", "docsauto"],
)
# Launching LIVE reloading Sphinx session
build_dir = Path("docs", "_build", "html")
args = ["--watch", ".", "--open-browser", "docs", str(build_dir)]
if clean and build_dir.exists():
shutil.rmtree(build_dir)
session.run("sphinx-autobuild", *args)
@nox.session(name="docs-crosslink-info", python="3")
def docs_crosslink_info(session):
"""
Report intersphinx cross links information
"""
_install_requirements(
session,
install_coverage_requirements=False,
install_test_requirements=False,
install_source=True,
install_extras=["docs"],
)
os.chdir("docs/")
intersphinx_mapping = json.loads(
session.run(
"python",
"-c",
"import json; import conf; print(json.dumps(conf.intersphinx_mapping))",
silent=True,
log=False,
)
)
intersphinx_mapping_list = ", ".join(list(intersphinx_mapping))
try:
mapping_entry = intersphinx_mapping[session.posargs[0]]
except IndexError:
session.error(
f"You need to pass at least one argument whose value must be one of: {intersphinx_mapping_list}"
)
except KeyError:
session.error(f"Only acceptable values for first argument are: {intersphinx_mapping_list}")
session.run(
"python", "-m", "sphinx.ext.intersphinx", mapping_entry[0].rstrip("/") + "/objects.inv"
)
os.chdir(str(REPO_ROOT))
@nox.session(name="gen-api-docs", python="3")
def gen_api_docs(session):
"""
Generate API Docs
"""
_install_requirements(
session,
install_coverage_requirements=False,
install_test_requirements=False,
install_source=True,
install_extras=["docs"],
)
try:
shutil.rmtree("docs/ref")
except FileNotFoundError:
pass
session.run(
"sphinx-apidoc",
"--implicit-namespaces",
"--module-first",
"-o",
"docs/ref/",
"src/saltext",
"src/saltext/prometheus/config/schemas",
)
0707010000003B000081A400000000000000000000000167471E9C00000D46000000000000000000000000000000000000003800000000test-repo-1-0.1/saltext.prometheus-2.1.0/pyproject.toml[build-system]
requires = [
"wheel",
"setuptools>=50.3.2",
"setuptools_scm[toml]>=3.4",
]
build-backend = "setuptools.build_meta"
[tool.setuptools_scm]
write_to = "src/saltext/prometheus/version.py"
write_to_template = "__version__ = \"{version}\""
[project]
name = "saltext.prometheus"
description = "Salt Extension for interacting with Prometheus"
authors = [
{name = "EITR Technologies, LLC", email = "devops@eitr.tech"},
]
keywords = [
"salt-extension",
]
license = {text = "Apache Software License"}
classifiers = [
"Programming Language :: Python",
"Programming Language :: Cython",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
]
requires-python = ">= 3.8"
dynamic = ["version"]
dependencies = [
"prometheus_client",
"salt>=3005",
]
[project.readme]
file = "README.md"
content-type = "text/markdown"
[project.urls]
Homepage = "https://github.com/salt-extensions/saltext-prometheus"
Source = "https://github.com/salt-extensions/saltext-prometheus"
Tracker = "https://github.com/salt-extensions/saltext-prometheus/issues"
[project.optional-dependencies]
changelog = ["towncrier==22.12.0"]
dev = [
"nox",
"pre-commit>=2.4.0",
"pylint",
"saltpylint",
]
docs = [
"sphinx",
"sphinx-prompt",
"sphinxcontrib-spelling",
"sphinx-copybutton",
"towncrier==22.12.0",
"sphinxcontrib-towncrier",
"myst_parser",
"furo",
"sphinx-inline-tabs",
]
docsauto = ["sphinx-autobuild"]
lint = [
"pylint",
"saltpylint",
]
tests = [
"pytest>=6.1.0",
"pytest-salt-factories>=1.0.0rc19",
]
[project.entry-points."salt.loader"]
"saltext.prometheus" = "saltext.prometheus"
[tool.setuptools]
zip-safe = false
include-package-data = true
platforms = ["any"]
[tool.setuptools.packages.find]
where = ["src"]
exclude = ["tests"]
[tool.distutils.bdist_wheel]
# Use this option if your package is pure-python
universal = 1
[tool.distutils.sdist]
owner = "root"
group = "root"
[tool.build_sphinx]
source_dir = "docs"
build_dir = "build/sphinx"
[tool.black]
line-length = 100
[tool.towncrier]
package = "saltext.prometheus"
filename = "CHANGELOG.md"
template = "changelog/.template.jinja"
directory = "changelog/"
start_string = "# Changelog\n"
underlines = ["", "", ""]
title_format = "## {version} ({project_date})"
issue_format = "[#{issue}](https://github.com/salt-extensions/saltext-prometheus/issues/{issue})"
[[tool.towncrier.type]]
directory = "removed"
name = "Removed"
showcontent = true
[[tool.towncrier.type]]
directory = "deprecated"
name = "Deprecated"
showcontent = true
[[tool.towncrier.type]]
directory = "changed"
name = "Changed"
showcontent = true
[[tool.towncrier.type]]
directory = "fixed"
name = "Fixed"
showcontent = true
[[tool.towncrier.type]]
directory = "added"
name = "Added"
showcontent = true
[[tool.towncrier.type]]
directory = "security"
name = "Security"
showcontent = true
0707010000003C000081A400000000000000000000000167471E9C00000026000000000000000000000000000000000000003300000000test-repo-1-0.1/saltext.prometheus-2.1.0/setup.cfg[egg_info]
tag_build =
tag_date = 0
0707010000003D000081A400000000000000000000000167471E9C00000084000000000000000000000000000000000000003200000000test-repo-1-0.1/saltext.prometheus-2.1.0/setup.py# pylint: disable=missing-module-docstring
import setuptools
if __name__ == "__main__":
setuptools.setup(use_scm_version=True)
0707010000003E000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000002D00000000test-repo-1-0.1/saltext.prometheus-2.1.0/src0707010000003F000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003500000000test-repo-1-0.1/saltext.prometheus-2.1.0/src/saltext07070100000040000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004100000000test-repo-1-0.1/saltext.prometheus-2.1.0/src/saltext/__init__.py07070100000041000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000004000000000test-repo-1-0.1/saltext.prometheus-2.1.0/src/saltext/prometheus07070100000042000081A400000000000000000000000167471E9C0000035F000000000000000000000000000000000000004C00000000test-repo-1-0.1/saltext.prometheus-2.1.0/src/saltext/prometheus/__init__.py# pylint: disable=missing-module-docstring
import pathlib
PACKAGE_ROOT = pathlib.Path(__file__).resolve().parent
try:
from .version import __version__
except ImportError: # pragma: no cover
__version__ = "0.0.0.not-installed"
try:
from importlib.metadata import version, PackageNotFoundError
try:
__version__ = version(__name__)
except PackageNotFoundError:
# package is not installed
pass
except ImportError:
try:
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
pass
except ImportError:
# pkg resources isn't even available?!
pass
07070100000043000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000004800000000test-repo-1-0.1/saltext.prometheus-2.1.0/src/saltext/prometheus/engines07070100000044000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000005400000000test-repo-1-0.1/saltext.prometheus-2.1.0/src/saltext/prometheus/engines/__init__.py07070100000045000081A400000000000000000000000167471E9C000000E9000000000000000000000000000000000000005A00000000test-repo-1-0.1/saltext.prometheus-2.1.0/src/saltext/prometheus/engines/prometheus_mod.py"""
Salt engine module
"""
import logging
log = logging.getLogger(__name__)
__virtualname__ = "prometheus"
def __virtual__():
# return __virtualname__
return (False, "The prometheus engine module is not implemented yet")
07070100000046000081A400000000000000000000000167471E9C00000200000000000000000000000000000000000000004A00000000test-repo-1-0.1/saltext.prometheus-2.1.0/src/saltext/prometheus/loader.py"""
Define the required entry-points functions in order for Salt to know
what and from where it should load this extension's loaders
"""
from . import PACKAGE_ROOT # pylint: disable=unused-import
def get_returner_dirs():
"""
Return a list of paths from where salt should load returner modules
"""
return [str(PACKAGE_ROOT / "returners")]
def get_engines_dirs():
"""
Return a list of paths from where salt should load engine modules
"""
return [str(PACKAGE_ROOT / "engines")]
07070100000047000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000004A00000000test-repo-1-0.1/saltext.prometheus-2.1.0/src/saltext/prometheus/returners07070100000048000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000005600000000test-repo-1-0.1/saltext.prometheus-2.1.0/src/saltext/prometheus/returners/__init__.py07070100000049000081A400000000000000000000000167471E9C00003A26000000000000000000000000000000000000006100000000test-repo-1-0.1/saltext.prometheus-2.1.0/src/saltext/prometheus/returners/prometheus_textfile.py"""
Take data from salt and "return" it into a file formatted for Prometheus using
the `Text Exposition Format <https://prometheus.io/docs/instrumenting/exposition_formats/#text-format-example>`_
which rolls up state success and failure data.
.. versionadded:: 3005
The intended use case for this module is to have distributed success/failure
reporting from minions for unattended state or highstate runs.
Add the following to the minion or master configuration file to configure the
output location which Prometheus will monitor via Node Exporter.
.. code-block:: yaml
prometheus_textfile.filename: <path_to_output_file>
Default is ``/var/cache/salt/minion/prometheus_textfile/salt.prom`` using the
``cachedir`` minion configuration.
The ``salt_procs`` metric will look for ``salt-minion`` processes by name. If
you have a custom installation of Salt, you might want to change the ``psutil``
process name to be matched or switch to matching the "exe" attribute of the
``Process.info`` dictionary.
.. code-block:: yaml
prometheus_textfile.proc_name: custom-minion
.. code-block:: yaml
prometheus_textfile.match_exe: True
prometheus_textfile.exe: /opt/salt/bin/python3
The default operation of sending state metrics to a single file works well for
the use case of running distributed highstate or a single state run on minions.
However, there may be a use case for tracking multiple scheduled states in
separate files. To enable this behavior, set the following option in the
configuration file:
.. code-block:: yaml
prometheus_textfile.add_state_name: True
This option will add the state name which was run as a filename suffix and also
inside the file as a metric parameter. Highstate runs will receive ``highstate``
as the state name, while running specific states will pass the first argument to
``state.apply`` or ``state.sls`` as the state name.
.. code-block:: bash
# Filename is "salt-highstate.prom" and metrics get '{state="highstate"}'
salt-call state.highstate
# Filename is "salt-highstate.prom" and metrics get '{state="highstate"}'
salt-call state.apply
# Filename is "salt-test.prom" and metrics get '{state="test"}'
salt-call state.apply test
# Filename is "salt-test.prom" and metrics get '{state="test"}'
salt-call state.sls test
Additionally, the inferred state name can be overridden on the command line by
passing the ``prom_textfile_state`` keyword argument to the state function.
.. code-block:: bash
# Filename is "salt-hello.prom" and metrics get '{state="hello"}'
salt-call state.highstate prom_textfile_state=hello
# Filename is "salt-hello.prom" and metrics get '{state="hello"}'
salt-call state.apply test prom_textfile_state=hello
Output file user, group, and mode can optionally be set through configuration
options:
.. code-block:: yaml
prometheus_textfile.uid: 0
prometheus_textfile.gid: 0
prometheus_textfile.mode: "0644"
The metrics can include the names of failed states if necessary. This is
sometimes beneficial for monitoring purposes so engineers can see what in
particular caused a failure condition on a host and whether it is critical.
.. code-block:: yaml
prometheus_textfile.show_failed_states: true
An additional way of viewing failure conditions is through the
``abort_state_ids`` option. If this option is used, a state ID or list of state
IDs can be provided to indicate an "abort" condition. This allows the user to
see that a failure was related to a "circuit breaker" condition which prevented
the state run to complete.
.. code-block:: yaml
prometheus_textfile.abort_state_ids:
- circuit_breaker_state
- my_other_circuit_breaker
The state IDs in this configuration option can also be presented as a string in
the following manner:
.. code-block:: yaml
# comma-separated states
prometheus_textfile.abort_state_ids: circuit_breaker_state,my_other_circuit_breaker
# single state
prometheus_textfile.abort_state_ids: circuit_breaker_state
If you have systems running Salt versions which have a non-standard version scheme
(such as running from patched versions), the entire version string can be shown
by utilizing the ``raw_version`` parameter. Otherwise, only the portion of the
string before a plus sign (``+``) will be shown. So, a version string such as
"3004+12.g557e6cc0fc" will be shown as "3004" by default unless ``raw_version``
is enabled.
.. code-block:: yaml
prometheus_textfile.raw_version: true
"""
import logging
import os
import time
import salt.modules.file
import salt.returners
import salt.utils.files
import salt.utils.platform
from prometheus_client import CollectorRegistry
from prometheus_client import Gauge
from prometheus_client import write_to_textfile
log = logging.getLogger(__name__)
HAS_PSUTIL = False
try:
import psutil
HAS_PSUTIL = True
except (ImportError, ModuleNotFoundError):
log.warning("The psutil library is required for the salt_procs metric.")
# Define the module's virtual name
__virtualname__ = "prometheus_textfile"
# Loader workaround
try:
__grains__ # pylint: disable=used-before-assignment
except NameError:
import salt.version # pylint: disable=ungrouped-imports
__grains__ = {"saltversion": salt.version.__version__}
def __virtual__():
return __virtualname__
def _get_options(ret):
"""
Returns options used for the prometheus_textfile returner.
"""
defaults = {
"exe": None,
"filename": os.path.join(__opts__["cachedir"], "prometheus_textfile", "salt.prom"),
"uid": -1, # fpopen default
"gid": -1, # fpopen default
"mode": None,
"match_exe": False,
"proc_name": "salt-minion",
"add_state_name": False,
"abort_state_ids": None,
"show_failed_states": False,
"raw_version": False,
"fail_comment_length": None,
}
attrs = {
"exe": "exe",
"filename": "filename",
"uid": "uid",
"gid": "gid",
"mode": "mode",
"match_exe": "match_exe",
"proc_name": "proc_name",
"add_state_name": "add_state_name",
"abort_state_ids": "abort_state_ids",
"show_failed_states": "show_failed_states",
"raw_version": "raw_version",
"fail_comment_length": "fail_comment_length",
}
_options = salt.returners.get_returner_options(
__virtualname__,
ret,
attrs,
__salt__=__salt__,
__opts__=__opts__,
defaults=defaults,
)
return _options
def _count_minion_procs(proc_name="salt-minion", match_exe=False, exe=None):
"""
Return the count of processes with name matching "salt-minion"
"""
proclist = []
if HAS_PSUTIL:
for proc in psutil.process_iter(["name", "exe"]):
if match_exe and proc.info["exe"] == exe:
proclist.append(proc)
elif proc.info["name"] == proc_name:
proclist.append(proc)
return len(proclist)
def returner(ret):
"""
Write Prometheus metrics to a file on the minion.
"""
state_functions = [
"state.apply",
"state.sls",
"state.highstate",
]
if ret["fun"] not in state_functions:
log.info(
"The prometheus_textfile returner is only intended to run on %s functions... not %s",
", ".join(state_functions),
ret["fun"],
)
return
opts = _get_options(ret)
prom_state = ""
if opts["add_state_name"]:
if ret["fun"] == "state.highstate":
prom_state = "highstate"
elif ret["fun"] == "state.apply" and (not ret["fun_args"] or "=" in ret["fun_args"][0]):
prom_state = "highstate"
else:
prom_state = ret["fun_args"][0]
for fun_arg in ret["fun_args"]:
if not isinstance(fun_arg, str):
continue
if fun_arg.lower() == "test=true":
log.info("The prometheus_textfile returner is not enabled in Test mode.")
return
if opts["add_state_name"] and fun_arg.lower().startswith("prom_textfile_state="):
prom_state = "".join(fun_arg.split("=")[1:])
log.debug("Prometheus text file returner state name: %s", prom_state)
out_dir = os.path.dirname(opts["filename"])
if not os.path.isdir(out_dir):
try:
os.makedirs(out_dir)
except OSError:
log.error("Could not create directory for prometheus output: %s", out_dir)
return
success = 0
failure = 0
changed = 0
total = 0
duration = 0
for data in ret.get("return", {}).values():
total += 1
duration += data.get("duration", 0)
if data["result"] is False:
failure += 1
else:
success += 1
if data.get("changes"):
changed += 1
if not total:
log.error("Total states run equals 0. There may be something wrong...")
return
salt_procs = _count_minion_procs(
proc_name=opts["proc_name"],
match_exe=opts["match_exe"],
exe=opts["exe"],
)
now = int(time.time())
output = {
"salt_procs": {
"help": "Number of salt minion processes running",
"value": salt_procs,
},
"salt_states_succeeded": {
"help": "Number of successful states in the run",
"value": success,
},
"salt_states_failed": {
"help": "Number of failed states in the run",
"value": failure,
},
"salt_states_changed": {
"help": "Number of changed states in the run",
"value": changed,
},
"salt_states_total": {
"help": "Total states in the run",
"value": total,
},
"salt_states_success_pct": {
"help": "Percent of successful states in the run",
"value": round((success / total) * 100, 2),
},
"salt_states_failure_pct": {
"help": "Percent of failed states in the run",
"value": round((failure / total) * 100, 2),
},
"salt_states_changed_pct": {
"help": "Percent of changed states in the run",
"value": round((changed / total) * 100, 2),
},
"salt_elapsed_time": {
"help": "Time spent for all operations during the state run",
"value": round(duration, 3),
},
"salt_last_started": {
"help": "Estimated time the state run started",
"value": int(now - duration / 1000),
},
"salt_last_completed": {
"help": "Time of last state run completion",
"value": now,
},
"salt_version": {
"help": "Version of installed Salt package",
"value": __grains__["saltversion"].split("+", maxsplit=1)[0],
},
"salt_version_tagged": {
"help": "Version of installed Salt package as a tag",
"value": 1,
},
}
registry = CollectorRegistry()
if opts["show_failed_states"]:
labels = ["state_id", "state_comment"]
if opts["add_state_name"]:
labels.append("state")
gauge_show_failed_states = Gauge(
"salt_failed",
"Information regarding state with failure condition",
labels,
registry=registry,
)
for state_id, state_return in ret["return"].items():
if state_return["result"] is False:
failed_comment = state_return.get("comment", "").replace('"', "").replace("\n", " ")
label_values = [
state_id.split("_|-")[1],
failed_comment[0 : opts["fail_comment_length"]],
]
if opts["add_state_name"]:
label_values.append(prom_state)
gauge_show_failed_states.labels(*label_values).set(1)
if opts["abort_state_ids"]:
labels = []
label_values = []
if opts["add_state_name"]:
labels.append("state")
label_values.append(prom_state)
if not isinstance(opts["abort_state_ids"], list):
opts["abort_state_ids"] = [item.strip() for item in opts["abort_state_ids"].split(",")]
aborted_value = 0
for state_id, state_return in ret["return"].items():
if not state_return["result"] and state_return.get("__id__") in opts["abort_state_ids"]:
aborted_value = 1
labels.append("state_id")
label_values.append(state_id.split("_|-")[1])
gauge_salt_aborted = Gauge(
"salt_aborted",
"Flag to show that a specific abort state failed",
labels,
registry=registry,
)
if label_values:
gauge_salt_aborted.labels(*label_values).set(aborted_value)
else:
gauge_salt_aborted.set(aborted_value)
if opts["add_state_name"]:
old_name, ext = os.path.splitext(opts["filename"])
opts["filename"] = f"{old_name}-{prom_state}{ext}"
log.debug(
"Modified Prometheus filename from %s to %s",
old_name + ext,
opts["filename"],
)
for key in list(output.keys()):
labels = []
label_values = []
if opts["add_state_name"]:
labels.append("state")
label_values.append(prom_state)
if key == "salt_version_tagged":
labels.append("salt_version")
if opts["raw_version"]:
label_values.append(__grains__["saltversion"])
else:
label_values.append(__grains__["saltversion"].split("+", maxsplit=1)[0])
keys_dict = output.pop(key)
gauge_keys = Gauge(key, keys_dict["help"], labels, registry=registry)
try:
if label_values:
gauge_keys.labels(*label_values).set(keys_dict["value"])
else:
gauge_keys.set(keys_dict["value"])
except ValueError:
keys_dict["value"] = keys_dict["value"].split("rc", maxsplit=1)[0]
if label_values:
gauge_keys.labels(*label_values).set(keys_dict["value"])
else:
gauge_keys.set(keys_dict["value"])
write_to_textfile(opts["filename"], registry)
if not salt.utils.platform.is_windows():
salt.modules.file.chown(opts["filename"], opts["uid"], opts["gid"])
if opts["mode"]:
try:
salt.modules.file.set_mode(opts["filename"], opts["mode"])
except ValueError:
opts["mode"] = None
log.exception("Unable to convert mode to octal. Using system default.")
return True
0707010000004A000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000002F00000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests0707010000004B000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/__init__.py0707010000004C000081A400000000000000000000000167471E9C00000406000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/conftest.pyimport logging
import os
import pytest
from saltext.prometheus import PACKAGE_ROOT
from saltfactories.utils import random_string
# Reset the root logger to its default level(because salt changed it)
logging.root.setLevel(logging.WARNING)
# This swallows all logging to stdout.
# To show select logs, set --log-cli-level=<level>
for handler in logging.root.handlers[:]:
logging.root.removeHandler(handler)
handler.close()
@pytest.fixture(scope="session")
def salt_factories_config():
"""
Return a dictionary with the keyword arguments for FactoriesManager
"""
return {
"code_dir": str(PACKAGE_ROOT),
"inject_sitecustomize": "COVERAGE_PROCESS_START" in os.environ,
"start_timeout": 120 if os.environ.get("CI") else 60,
}
@pytest.fixture(scope="package")
def master(salt_factories):
return salt_factories.salt_master_daemon(random_string("master-"))
@pytest.fixture(scope="package")
def minion(master):
return master.salt_minion_daemon(random_string("minion-"))
0707010000004D000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003A00000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/functional0707010000004E000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004600000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/functional/__init__.py0707010000004F000081A400000000000000000000000167471E9C00000DAC000000000000000000000000000000000000004600000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/functional/conftest.pyimport logging
import shutil
import pytest
from saltfactories.utils.functional import Loaders
log = logging.getLogger(__name__)
@pytest.fixture(scope="package")
def minion_id():
return "func-tests-minion-opts"
@pytest.fixture(scope="module")
def state_tree(tmp_path_factory):
state_tree_path = tmp_path_factory.mktemp("state-tree-base")
try:
yield state_tree_path
finally:
shutil.rmtree(str(state_tree_path), ignore_errors=True)
@pytest.fixture(scope="module")
def state_tree_prod(tmp_path_factory):
state_tree_path = tmp_path_factory.mktemp("state-tree-prod")
try:
yield state_tree_path
finally:
shutil.rmtree(str(state_tree_path), ignore_errors=True)
@pytest.fixture(scope="module")
def minion_config_defaults():
"""
Functional test modules can provide this fixture to tweak the default
configuration dictionary passed to the minion factory
"""
return {}
@pytest.fixture(scope="module")
def minion_config_overrides():
"""
Functional test modules can provide this fixture to tweak the configuration
overrides dictionary passed to the minion factory
"""
return {}
@pytest.fixture(scope="module")
def minion_opts(
salt_factories,
minion_id,
state_tree,
state_tree_prod,
minion_config_defaults,
minion_config_overrides,
):
minion_config_overrides.update(
{
"file_client": "local",
"file_roots": {
"base": [
str(state_tree),
],
"prod": [
str(state_tree_prod),
],
},
}
)
factory = salt_factories.salt_minion_daemon(
minion_id,
defaults=minion_config_defaults or None,
overrides=minion_config_overrides,
)
return factory.config.copy()
@pytest.fixture(scope="module")
def master_config_defaults():
"""
Functional test modules can provide this fixture to tweak the default
configuration dictionary passed to the master factory
"""
return {}
@pytest.fixture(scope="module")
def master_config_overrides():
"""
Functional test modules can provide this fixture to tweak the configuration
overrides dictionary passed to the master factory
"""
return {}
@pytest.fixture(scope="module")
def master_opts(
salt_factories,
state_tree,
state_tree_prod,
master_config_defaults,
master_config_overrides,
):
master_config_overrides.update(
{
"file_client": "local",
"file_roots": {
"base": [
str(state_tree),
],
"prod": [
str(state_tree_prod),
],
},
}
)
factory = salt_factories.salt_master_daemon(
"func-tests-master-opts",
defaults=master_config_defaults or None,
overrides=master_config_overrides,
)
return factory.config.copy()
@pytest.fixture(scope="module")
def loaders(minion_opts):
return Loaders(minion_opts, loaded_base_name=f"{__name__}.loaded")
@pytest.fixture(autouse=True)
def reset_loaders_state(loaders):
try:
# Run the tests
yield
finally:
# Reset the loaders state
loaders.reset_state()
@pytest.fixture(scope="module")
def modules(loaders):
return loaders.modules
@pytest.fixture(scope="module")
def states(loaders):
return loaders.states
07070100000050000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000004200000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/functional/engines07070100000051000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004E00000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/functional/engines/__init__.py07070100000052000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000004400000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/functional/returners07070100000053000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000005000000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/functional/returners/__init__.py07070100000054000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/integration07070100000055000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004700000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/integration/__init__.py07070100000056000081A400000000000000000000000167471E9C000001BD000000000000000000000000000000000000004700000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/integration/conftest.pyimport pytest
@pytest.fixture(scope="package")
def master(master):
with master.started():
yield master
@pytest.fixture(scope="package")
def minion(minion):
with minion.started():
yield minion
@pytest.fixture
def salt_run_cli(master):
return master.salt_run_cli()
@pytest.fixture
def salt_cli(master):
return master.salt_cli()
@pytest.fixture
def salt_call_cli(minion):
return minion.salt_call_cli()
07070100000057000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000004300000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/integration/engines07070100000058000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004F00000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/integration/engines/__init__.py07070100000059000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000004500000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/integration/returners0707010000005A000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000005100000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/integration/returners/__init__.py0707010000005B000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003700000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/support0707010000005C000081A400000000000000000000000167471E9C00003EA8000000000000000000000000000000000000003F00000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/support/mock.py"""
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
tests.support.mock
~~~~~~~~~~~~~~~~~~
Helper module that wraps `mock` and provides some fake objects in order to
properly set the function/class decorators and yet skip the test case's
execution.
Note: mock >= 2.0.0 required since unittest.mock does not have
MagicMock.assert_called in Python < 3.6.
"""
# pylint: disable=unused-import,function-redefined
import copy
import errno
import fnmatch
import sys
from unittest import mock
from unittest.mock import ANY
from unittest.mock import call
from unittest.mock import create_autospec
from unittest.mock import DEFAULT
from unittest.mock import FILTER_DIR
from unittest.mock import MagicMock
from unittest.mock import Mock
from unittest.mock import NonCallableMagicMock
from unittest.mock import NonCallableMock
from unittest.mock import patch
from unittest.mock import PropertyMock
from unittest.mock import sentinel
import salt.utils.stringutils
# By these days, we should blowup if mock is not available
# pylint: disable=no-name-in-module,no-member
# pylint: disable=no-name-in-module,no-member
class MockFH:
"""
MockFH class
"""
def __init__(self, filename, read_data, *args, **kwargs):
self.filename = filename
self.read_data = read_data
try:
self.mode = args[0]
except IndexError:
self.mode = kwargs.get("mode", "r")
self.binary_mode = "b" in self.mode
self.read_mode = any(x in self.mode for x in ("r", "+"))
self.write_mode = any(x in self.mode for x in ("w", "a", "+"))
self.empty_string = b"" if self.binary_mode else ""
self.call = MockCall(filename, *args, **kwargs)
self.read_data_iter = self._iterate_read_data(read_data)
self.read = Mock(side_effect=self._read)
self.readlines = Mock(side_effect=self._readlines)
self.readline = Mock(side_effect=self._readline)
self.write = Mock(side_effect=self._write)
self.writelines = Mock(side_effect=self._writelines)
self.close = Mock()
self.seek = Mock()
self.__loc = 0
self.__read_data_ok = False
def _iterate_read_data(self, read_data):
"""
Helper for mock_open:
Retrieve lines from read_data via a generator so that separate calls to
readline, read, and readlines are properly interleaved
"""
# Newline will always be a bytestring on PY2 because mock_open will have
# normalized it to one.
newline = b"\n" if isinstance(read_data, bytes) else "\n"
read_data = [line + newline for line in read_data.split(newline)]
if read_data[-1] == newline:
# If the last line ended in a newline, the list comprehension will have an
# extra entry that's just a newline. Remove this.
read_data = read_data[:-1]
else:
# If there wasn't an extra newline by itself, then the file being
# emulated doesn't have a newline to end the last line, so remove the
# newline that we added in the list comprehension.
read_data[-1] = read_data[-1][:-1]
yield from read_data
@property
def write_calls(self):
"""
Return a list of all calls to the .write() mock
"""
return [x[1][0] for x in self.write.mock_calls]
@property
def writelines_calls(self):
"""
Return a list of all calls to the .writelines() mock
"""
return [x[1][0] for x in self.writelines.mock_calls]
def tell(self):
return self.__loc
def __check_read_data(self):
if not self.__read_data_ok:
if self.binary_mode:
if not isinstance(self.read_data, bytes):
raise TypeError(
f"{self.filename} opened in binary mode, expected read_data to be bytes, not {type(self.read_data).__name__}"
)
else:
if not isinstance(self.read_data, str):
raise TypeError(
f"{self.filename} opened in non-binary mode, expected read_data to be str, not {type(self.read_data).__name__}"
)
# No need to repeat this the next time we check
self.__read_data_ok = True
def _read(self, size=0):
self.__check_read_data()
if not self.read_mode:
raise OSError("File not open for reading")
if not isinstance(size, int) or size < 0:
raise TypeError("a positive integer is required")
joined = self.empty_string.join(self.read_data_iter)
if not size:
# read() called with no args, return everything
self.__loc += len(joined)
return joined
else:
# read() called with an explicit size. Return a slice matching the
# requested size, but before doing so, reset read_data to reflect
# what we read.
self.read_data_iter = self._iterate_read_data(joined[size:])
ret = joined[:size]
self.__loc += len(ret)
return ret
def _readlines(self, size=None): # pylint: disable=unused-argument
# TODO: Implement "size" argument
self.__check_read_data()
if not self.read_mode:
raise OSError("File not open for reading")
ret = list(self.read_data_iter)
self.__loc += sum(len(x) for x in ret)
return ret
def _readline(self, size=None): # pylint: disable=unused-argument
# TODO: Implement "size" argument
self.__check_read_data()
if not self.read_mode:
raise OSError("File not open for reading")
try:
ret = next(self.read_data_iter)
self.__loc += len(ret)
return ret
except StopIteration:
return self.empty_string
def __iter__(self):
self.__check_read_data()
if not self.read_mode:
raise OSError("File not open for reading")
while True:
try:
ret = next(self.read_data_iter)
self.__loc += len(ret)
yield ret
except StopIteration:
break
def _write(self, content):
if not self.write_mode:
raise OSError("File not open for writing")
else:
content_type = type(content)
if self.binary_mode and content_type is not bytes:
raise TypeError(f"a bytes-like object is required, not '{content_type.__name__}'")
elif not self.binary_mode and content_type is not str:
raise TypeError(f"write() argument must be str, not {content_type.__name__}")
def _writelines(self, lines):
if not self.write_mode:
raise OSError("File not open for writing")
for line in lines:
self._write(line)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb): # pylint: disable=unused-argument
pass
class MockCall:
"""
MockCall class
"""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def __repr__(self):
ret = "MockCall("
for arg in self.args:
ret += repr(arg) + ", "
if not self.kwargs:
if self.args:
# Remove trailing ', '
ret = ret[:-2]
else:
for key, val in self.kwargs.items():
ret += f"{salt.utils.stringutils.to_str(key)}={repr(val)}"
ret += ")"
return ret
def __str__(self):
return self.__repr__()
def __eq__(self, other):
return self.args == other.args and self.kwargs == other.kwargs
class MockOpen:
r'''
This class can be used to mock the use of ``open()``.
``read_data`` is a string representing the contents of the file to be read.
By default, this is an empty string.
Optionally, ``read_data`` can be a dictionary mapping ``fnmatch.fnmatch()``
patterns to strings (or optionally, exceptions). This allows the mocked
filehandle to serve content for more than one file path.
.. code-block:: python
data = {
'/etc/foo.conf': textwrap.dedent("""\
Foo
Bar
Baz
"""),
'/etc/bar.conf': textwrap.dedent("""\
A
B
C
"""),
}
with patch('salt.utils.files.fopen', mock_open(read_data=data):
do stuff
If the file path being opened does not match any of the glob expressions,
an IOError will be raised to simulate the file not existing.
Passing ``read_data`` as a string is equivalent to passing it with a glob
expression of "*". That is to say, the below two invocations are
equivalent:
.. code-block:: python
mock_open(read_data='foo\n')
mock_open(read_data={'*': 'foo\n'})
Instead of a string representing file contents, ``read_data`` can map to an
exception, and that exception will be raised if a file matching that
pattern is opened:
.. code-block:: python
data = {
'/etc/*': IOError(errno.EACCES, 'Permission denied'),
'*': 'Hello world!\n',
}
with patch('salt.utils.files.fopen', mock_open(read_data=data)):
do stuff
The above would raise an exception if any files within /etc are opened, but
would produce a mocked filehandle if any other file is opened.
To simulate file contents changing upon subsequent opens, the file contents
can be a list of strings/exceptions. For example:
.. code-block:: python
data = {
'/etc/foo.conf': [
'before\n',
'after\n',
],
'/etc/bar.conf': [
IOError(errno.ENOENT, 'No such file or directory', '/etc/bar.conf'),
'Hey, the file exists now!',
],
}
with patch('salt.utils.files.fopen', mock_open(read_data=data):
do stuff
The first open of ``/etc/foo.conf`` would return "before\n" when read,
while the second would return "after\n" when read. For ``/etc/bar.conf``,
the first read would raise an exception, while the second would open
successfully and read the specified string.
Expressions will be attempted in dictionary iteration order (the exception
being ``*`` which is tried last), so if a file path matches more than one
fnmatch expression then the first match "wins". If your use case calls for
overlapping expressions, then an OrderedDict can be used to ensure that the
desired matching behavior occurs:
.. code-block:: python
data = OrderedDict()
data['/etc/foo.conf'] = 'Permission granted!'
data['/etc/*'] = IOError(errno.EACCES, 'Permission denied')
data['*'] = '*': 'Hello world!\n'
with patch('salt.utils.files.fopen', mock_open(read_data=data):
do stuff
The following attributes are tracked for the life of a mock object:
* call_count - Tracks how many fopen calls were attempted
* filehandles - This is a dictionary mapping filenames to lists of MockFH
objects, representing the individual times that a given file was opened.
'''
def __init__(self, read_data=""):
# If the read_data contains lists, we will be popping it. So, don't
# modify the original value passed.
read_data = copy.copy(read_data)
# Normalize read_data, Python 2 filehandles should never produce unicode
# types on read.
if not isinstance(read_data, dict):
read_data = {"*": read_data}
self.read_data = read_data
self.filehandles = {}
self.calls = []
self.call_count = 0
def __call__(self, name, *args, **kwargs):
"""
Match the file being opened to the patterns in the read_data and spawn
a mocked filehandle with the corresponding file contents.
"""
call = MockCall(name, *args, **kwargs)
self.calls.append(call)
self.call_count += 1
for pat in self.read_data:
if pat == "*":
continue
if fnmatch.fnmatch(name, pat):
matched_pattern = pat
break
else:
# No non-glob match in read_data, fall back to '*'
matched_pattern = "*"
try:
matched_contents = self.read_data[matched_pattern]
try:
# Assuming that the value for the matching expression is a
# list, pop the first element off of it.
file_contents = matched_contents.pop(0)
except AttributeError:
# The value for the matching expression is a string (or exception)
file_contents = matched_contents
except IndexError:
# We've run out of file contents, abort!
raise RuntimeError(
f"File matching expression '{matched_pattern}' opened more times than expected"
) from None
try:
# Raise the exception if the matched file contents are an
# instance of an exception class.
raise file_contents
except TypeError:
# Contents were not an exception, so proceed with creating the
# mocked filehandle.
pass
ret = MockFH(name, file_contents, *args, **kwargs)
self.filehandles.setdefault(name, []).append(ret)
return ret
except KeyError:
# No matching glob in read_data, treat this as a file that does
# not exist and raise the appropriate exception.
raise OSError(errno.ENOENT, "No such file or directory", name) from None
def write_calls(self, path=None):
"""
Returns the contents passed to all .write() calls. Use `path` to narrow
the results to files matching a given pattern.
"""
ret = []
for filename, handles in self.filehandles.items():
if path is None or fnmatch.fnmatch(filename, path):
for fh_ in handles:
ret.extend(fh_.write_calls)
return ret
def writelines_calls(self, path=None):
"""
Returns the contents passed to all .writelines() calls. Use `path` to
narrow the results to files matching a given pattern.
"""
ret = []
for filename, handles in self.filehandles.items():
if path is None or fnmatch.fnmatch(filename, path):
for fh_ in handles:
ret.extend(fh_.writelines_calls)
return ret
class MockTimedProc:
"""
Class used as a stand-in for salt.utils.timed_subprocess.TimedProc
"""
class _Process:
"""
Used to provide a dummy "process" attribute
"""
def __init__(self, returncode=0, pid=12345):
self.returncode = returncode
self.pid = pid
def __init__(self, stdout=None, stderr=None, returncode=0, pid=12345):
if stdout is not None and not isinstance(stdout, bytes):
raise TypeError("Must pass stdout to MockTimedProc as bytes")
if stderr is not None and not isinstance(stderr, bytes):
raise TypeError("Must pass stderr to MockTimedProc as bytes")
self._stdout = stdout
self._stderr = stderr
self.process = self._Process(returncode=returncode, pid=pid)
def run(self):
pass
@property
def stdout(self):
return self._stdout
@property
def stderr(self):
return self._stderr
# reimplement mock_open to support multiple filehandles
mock_open = MockOpen # pylint: disable=invalid-name
0707010000005D000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003400000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/unit0707010000005E000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004000000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/unit/__init__.py0707010000005F000081A400000000000000000000000167471E9C00000693000000000000000000000000000000000000004000000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/unit/conftest.pyimport pytest
import salt.config
@pytest.fixture
def minion_opts(tmp_path):
"""
Default minion configuration with relative temporary paths to not
require root permissions.
"""
root_dir = tmp_path / "minion"
opts = salt.config.DEFAULT_MINION_OPTS.copy()
opts["__role"] = "minion"
opts["root_dir"] = str(root_dir)
for name in ("cachedir", "pki_dir", "sock_dir", "conf_dir"):
dirpath = root_dir / name
dirpath.mkdir(parents=True)
opts[name] = str(dirpath)
opts["log_file"] = "logs/minion.log"
return opts
@pytest.fixture
def master_opts(tmp_path):
"""
Default master configuration with relative temporary paths to not
require root permissions.
"""
root_dir = tmp_path / "master"
opts = salt.config.master_config(None)
opts["__role"] = "master"
opts["root_dir"] = str(root_dir)
for name in ("cachedir", "pki_dir", "sock_dir", "conf_dir"):
dirpath = root_dir / name
dirpath.mkdir(parents=True)
opts[name] = str(dirpath)
opts["log_file"] = "logs/master.log"
return opts
@pytest.fixture
def syndic_opts(tmp_path):
"""
Default master configuration with relative temporary paths to not
require root permissions.
"""
root_dir = tmp_path / "syndic"
opts = salt.config.DEFAULT_MINION_OPTS.copy()
opts["syndic_master"] = "127.0.0.1"
opts["__role"] = "minion"
opts["root_dir"] = str(root_dir)
for name in ("cachedir", "pki_dir", "sock_dir", "conf_dir"):
dirpath = root_dir / name
dirpath.mkdir(parents=True)
opts[name] = str(dirpath)
opts["log_file"] = "logs/syndic.log"
return opts
07070100000060000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003C00000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/unit/engines07070100000061000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004800000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/unit/engines/__init__.py07070100000062000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003E00000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/unit/returners07070100000063000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004A00000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/unit/returners/__init__.py07070100000064000081A400000000000000000000000167471E9C00007AA0000000000000000000000000000000000000006100000000test-repo-1-0.1/saltext.prometheus-2.1.0/tests/unit/returners/test_prometheus_textfile_return.py# pylint: disable=unused-argument,invalid-name
import os
import re
from pathlib import Path
import pytest
import salt.utils.files
import salt.version
import saltext.prometheus.returners.prometheus_textfile as prometheus_textfile
from tests.support.mock import MagicMock
from tests.support.mock import patch
@pytest.fixture()
def root_dir(tmp_path):
return str(tmp_path / "root_dir")
@pytest.fixture()
def cache_dir(root_dir):
return os.path.join(root_dir, "cachedir")
@pytest.fixture(scope="function")
def job_ret():
ret = {
"jid": "20211109174620871797",
"return": {
"cmd_|-echo includeme_|-echo includeme_|-run": {
"name": "echo includeme",
"changes": {
"pid": 10549,
"retcode": 0,
"stdout": "includeme",
"stderr": "",
},
"result": True,
"comment": 'Command "echo includeme" run',
"__sls__": "includeme",
"__run_num__": 0,
"start_time": "17:46:21.013878",
"duration": 7.688,
"__id__": "echo includeme",
},
"cmd_|-echo applyme_|-echo applyme_|-run": {
"name": "echo applyme",
"changes": {
"pid": 10550,
"retcode": 0,
"stdout": "applyme",
"stderr": "",
},
"result": None,
"comment": 'Command "echo applyme" run',
"__sls__": "applyme",
"__run_num__": 1,
"start_time": "17:46:21.021948",
"duration": 6.007,
"__id__": "echo applyme",
},
},
"retcode": 0,
"out": "highstate",
"id": "d10-master-01.example.local",
"fun": "state.apply",
"fun_args": ["applyme"],
"success": True,
}
return ret
@pytest.fixture(scope="function")
def opts(cache_dir, minion):
opts = minion.config.copy()
opts["cachedir"] = cache_dir
return opts
def test_basic_prometheus_output_with_default_options(job_ret, cache_dir, opts):
expected = "\n".join(
sorted(
[
"# HELP salt_procs Number of salt minion processes running",
"# TYPE salt_procs gauge",
"salt_procs 0.0",
"# HELP salt_states_succeeded Number of successful states in the run",
"# TYPE salt_states_succeeded gauge",
"salt_states_succeeded 2.0",
"# HELP salt_states_failed Number of failed states in the run",
"# TYPE salt_states_failed gauge",
"salt_states_failed 0.0",
"# HELP salt_states_changed Number of changed states in the run",
"# TYPE salt_states_changed gauge",
"salt_states_changed 2.0",
"# HELP salt_states_total Total states in the run",
"# TYPE salt_states_total gauge",
"salt_states_total 2.0",
"# HELP salt_states_success_pct Percent of successful states in the run",
"# TYPE salt_states_success_pct gauge",
"salt_states_success_pct 100.0",
"# HELP salt_states_failure_pct Percent of failed states in the run",
"# TYPE salt_states_failure_pct gauge",
"salt_states_failure_pct 0.0",
"# HELP salt_states_changed_pct Percent of changed states in the run",
"# TYPE salt_states_changed_pct gauge",
"salt_states_changed_pct 100.0",
"# HELP salt_elapsed_time Time spent for all operations during the state run",
"# TYPE salt_elapsed_time gauge",
"salt_elapsed_time 13.695",
"# HELP salt_last_started Estimated time the state run started",
"# TYPE salt_last_started gauge",
"# HELP salt_last_completed Time of last state run completion",
"# TYPE salt_last_completed gauge",
"# HELP salt_version Version of installed Salt package",
"# TYPE salt_version gauge",
"salt_version {}".format( # pylint: disable=consider-using-f-string
salt.version.__version__.split("rc", maxsplit=1)[0]
),
"# HELP salt_version_tagged Version of installed Salt package as a tag",
"# TYPE salt_version_tagged gauge",
f'salt_version_tagged{{salt_version="{salt.version.__version__}"}} 1.0',
]
)
)
with patch(
"saltext.prometheus.returners.prometheus_textfile.__opts__", opts, create=True
), patch("saltext.prometheus.returners.prometheus_textfile.__salt__", {}, create=True):
prometheus_textfile.returner(job_ret)
with salt.utils.files.fopen(
os.path.join(cache_dir, "prometheus_textfile", "salt.prom")
) as prom_file:
# Drop time-based fields for comparison
salt_prom = "\n".join(
sorted(
line[:-1]
for line in prom_file
if not line.startswith("salt_last_started")
and not line.startswith("salt_last_completed")
)
)
assert salt_prom == expected
@pytest.mark.parametrize(
"state_name,filename,expected_filename",
[
("aaa", "one", "one-aaa"),
("bbb", "one.two", "one-bbb.two"),
("ccc", "one.two.three", "one.two-ccc.three"),
("ddd", "one.two.three.four", "one.two.three-ddd.four"),
],
)
def test_when_add_state_name_is_set_then_correct_output_should_be_in_correct_file(
state_name,
filename,
expected_filename,
opts,
cache_dir,
job_ret,
):
job_ret["fun_args"][0] = state_name
opts.update({"add_state_name": True, "filename": os.path.join(cache_dir, filename)})
expected = "\n".join(
sorted(
[
"# HELP salt_procs Number of salt minion processes running",
"# TYPE salt_procs gauge",
f'salt_procs{{state="{state_name}"}} 0.0',
"# HELP salt_states_succeeded Number of successful states in the run",
"# TYPE salt_states_succeeded gauge",
f'salt_states_succeeded{{state="{state_name}"}} 2.0',
"# HELP salt_states_failed Number of failed states in the run",
"# TYPE salt_states_failed gauge",
f'salt_states_failed{{state="{state_name}"}} 0.0',
"# HELP salt_states_changed Number of changed states in the run",
"# TYPE salt_states_changed gauge",
f'salt_states_changed{{state="{state_name}"}} 2.0',
"# HELP salt_states_total Total states in the run",
"# TYPE salt_states_total gauge",
f'salt_states_total{{state="{state_name}"}} 2.0',
"# HELP salt_states_success_pct Percent of successful states in the run",
"# TYPE salt_states_success_pct gauge",
f'salt_states_success_pct{{state="{state_name}"}} 100.0',
"# HELP salt_states_failure_pct Percent of failed states in the run",
"# TYPE salt_states_failure_pct gauge",
f'salt_states_failure_pct{{state="{state_name}"}} 0.0',
"# HELP salt_states_changed_pct Percent of changed states in the run",
"# TYPE salt_states_changed_pct gauge",
f'salt_states_changed_pct{{state="{state_name}"}} 100.0',
"# HELP salt_elapsed_time Time spent for all operations during the state run",
"# TYPE salt_elapsed_time gauge",
f'salt_elapsed_time{{state="{state_name}"}} 13.695',
"# HELP salt_last_started Estimated time the state run started",
"# TYPE salt_last_started gauge",
"# HELP salt_last_completed Time of last state run completion",
"# TYPE salt_last_completed gauge",
"# HELP salt_version Version of installed Salt package",
"# TYPE salt_version gauge",
'salt_version{{state="{}"}} {}'.format( # pylint: disable=consider-using-f-string
state_name, salt.version.__version__.split("rc", maxsplit=1)[0]
),
"# HELP salt_version_tagged Version of installed Salt package as a tag",
"# TYPE salt_version_tagged gauge",
'salt_version_tagged{{salt_version="{}",state="{}"}} 1.0'.format( # pylint: disable=consider-using-f-string
salt.version.__version__, state_name
),
]
)
)
with patch(
"saltext.prometheus.returners.prometheus_textfile.__opts__", opts, create=True
), patch("saltext.prometheus.returners.prometheus_textfile.__salt__", {}, create=True):
prometheus_textfile.returner(job_ret)
with salt.utils.files.fopen(os.path.join(cache_dir, expected_filename)) as prom_file:
# use line[:-1] to strip off the newline, but only one. It may be extra
# paranoid due to how Python file iteration works, but...
salt_prom = "\n".join(
sorted(
line[:-1]
for line in prom_file
if not line.startswith("salt_last_started")
and not line.startswith("salt_last_completed")
)
)
assert salt_prom == expected
def test_prometheus_output_with_show_failed_state_option_and_abort_state_ids(
job_ret, cache_dir, opts
):
job_ret["return"]["cmd_|-echo includeme_|-echo includeme_|-run"]["result"] = False
opts.update({"show_failed_states": True})
promfile_lines = [
"# HELP salt_procs Number of salt minion processes running",
"# TYPE salt_procs gauge",
"salt_procs 0.0",
"# HELP salt_states_succeeded Number of successful states in the run",
"# TYPE salt_states_succeeded gauge",
"salt_states_succeeded 1.0",
"# HELP salt_states_failed Number of failed states in the run",
"# TYPE salt_states_failed gauge",
"salt_states_failed 1.0",
"# HELP salt_states_changed Number of changed states in the run",
"# TYPE salt_states_changed gauge",
"salt_states_changed 2.0",
"# HELP salt_states_total Total states in the run",
"# TYPE salt_states_total gauge",
"salt_states_total 2.0",
"# HELP salt_states_success_pct Percent of successful states in the run",
"# TYPE salt_states_success_pct gauge",
"salt_states_success_pct 50.0",
"# HELP salt_states_failure_pct Percent of failed states in the run",
"# TYPE salt_states_failure_pct gauge",
"salt_states_failure_pct 50.0",
"# HELP salt_states_changed_pct Percent of changed states in the run",
"# TYPE salt_states_changed_pct gauge",
"salt_states_changed_pct 100.0",
"# HELP salt_elapsed_time Time spent for all operations during the state run",
"# TYPE salt_elapsed_time gauge",
"salt_elapsed_time 13.695",
"# HELP salt_last_started Estimated time the state run started",
"# TYPE salt_last_started gauge",
"# HELP salt_last_completed Time of last state run completion",
"# TYPE salt_last_completed gauge",
"# HELP salt_version Version of installed Salt package",
"# TYPE salt_version gauge",
"salt_version {}".format( # pylint: disable=consider-using-f-string
salt.version.__version__.split("rc", maxsplit=1)[0]
),
"# HELP salt_version_tagged Version of installed Salt package as a tag",
"# TYPE salt_version_tagged gauge",
f'salt_version_tagged{{salt_version="{salt.version.__version__}"}} 1.0',
"# HELP salt_failed Information regarding state with failure condition",
"# TYPE salt_failed gauge",
'salt_failed{state_comment="Command echo includeme run",state_id="echo includeme"} 1.0',
]
# Test one failed state
expected = "\n".join(sorted(promfile_lines))
with patch(
"saltext.prometheus.returners.prometheus_textfile.__opts__", opts, create=True
), patch("saltext.prometheus.returners.prometheus_textfile.__salt__", {}, create=True):
prometheus_textfile.returner(job_ret)
with salt.utils.files.fopen(
os.path.join(cache_dir, "prometheus_textfile", "salt.prom")
) as prom_file:
# Drop time-based fields for comparison
salt_prom = "\n".join(
sorted(
line[:-1]
for line in prom_file
if not line.startswith("salt_last_started")
and not line.startswith("salt_last_completed")
)
)
assert salt_prom == expected
# Test two failed states
job_ret["return"]["cmd_|-echo applyme_|-echo applyme_|-run"]["result"] = False
promfile_lines[5] = "salt_states_succeeded 0.0"
promfile_lines[8] = "salt_states_failed 2.0"
promfile_lines[17] = "salt_states_success_pct 0.0"
promfile_lines[20] = "salt_states_failure_pct 100.0"
promfile_lines.append(
'salt_failed{state_comment="Command echo applyme run",state_id="echo applyme"} 1.0'
)
expected = "\n".join(sorted(promfile_lines))
with patch(
"saltext.prometheus.returners.prometheus_textfile.__opts__", opts, create=True
), patch("saltext.prometheus.returners.prometheus_textfile.__salt__", {}, create=True):
prometheus_textfile.returner(job_ret)
with salt.utils.files.fopen(
os.path.join(cache_dir, "prometheus_textfile", "salt.prom")
) as prom_file:
# Drop time-based fields for comparison
salt_prom = "\n".join(
sorted(
line[:-1]
for line in prom_file
if not line.startswith("salt_last_started")
and not line.startswith("salt_last_completed")
)
)
assert salt_prom == expected
# Test abort state ID
opts.update({"abort_state_ids": ["echo includeme"]})
promfile_lines.extend(
[
"# HELP salt_aborted Flag to show that a specific abort state failed",
"# TYPE salt_aborted gauge",
'salt_aborted{state_id="echo includeme"} 1.0',
]
)
expected = "\n".join(sorted(promfile_lines))
with patch(
"saltext.prometheus.returners.prometheus_textfile.__opts__", opts, create=True
), patch("saltext.prometheus.returners.prometheus_textfile.__salt__", {}, create=True):
prometheus_textfile.returner(job_ret)
with salt.utils.files.fopen(
os.path.join(cache_dir, "prometheus_textfile", "salt.prom")
) as prom_file:
# Drop time-based fields for comparison
salt_prom = "\n".join(
sorted(
line[:-1]
for line in prom_file
if not line.startswith("salt_last_started")
and not line.startswith("salt_last_completed")
)
)
assert salt_prom == expected
def test_fail_comments_lengths(job_ret, cache_dir, opts):
opts.update({"show_failed_states": True})
promfile_lines = [
"# HELP salt_procs Number of salt minion processes running",
"# TYPE salt_procs gauge",
"salt_procs 0.0",
"# HELP salt_states_succeeded Number of successful states in the run",
"# TYPE salt_states_succeeded gauge",
"salt_states_succeeded 0.0",
"# HELP salt_states_failed Number of failed states in the run",
"# TYPE salt_states_failed gauge",
"salt_states_failed 2.0",
"# HELP salt_states_changed Number of changed states in the run",
"# TYPE salt_states_changed gauge",
"salt_states_changed 2.0",
"# HELP salt_states_total Total states in the run",
"# TYPE salt_states_total gauge",
"salt_states_total 2.0",
"# HELP salt_states_success_pct Percent of successful states in the run",
"# TYPE salt_states_success_pct gauge",
"salt_states_success_pct 0.0",
"# HELP salt_states_failure_pct Percent of failed states in the run",
"# TYPE salt_states_failure_pct gauge",
"salt_states_failure_pct 100.0",
"# HELP salt_states_changed_pct Percent of changed states in the run",
"# TYPE salt_states_changed_pct gauge",
"salt_states_changed_pct 100.0",
"# HELP salt_elapsed_time Time spent for all operations during the state run",
"# TYPE salt_elapsed_time gauge",
"salt_elapsed_time 13.695",
"# HELP salt_last_started Estimated time the state run started",
"# TYPE salt_last_started gauge",
"# HELP salt_last_completed Time of last state run completion",
"# TYPE salt_last_completed gauge",
"# HELP salt_version Version of installed Salt package",
"# TYPE salt_version gauge",
"salt_version {}".format( # pylint: disable=consider-using-f-string
salt.version.__version__.split("rc", maxsplit=1)[0]
),
"# HELP salt_version_tagged Version of installed Salt package as a tag",
"# TYPE salt_version_tagged gauge",
f'salt_version_tagged{{salt_version="{salt.version.__version__}"}} 1.0',
"# HELP salt_failed Information regarding state with failure condition",
"# TYPE salt_failed gauge",
'salt_failed{state_comment="Command echo includeme run",state_id="echo includeme"} 1.0',
'salt_failed{state_comment="Command echo applyme run",state_id="echo applyme"} 1.0',
]
# Test two failed states with no comment length limit
opts.update({"fail_comment_length": None})
expected = "\n".join(sorted(promfile_lines))
job_ret["return"]["cmd_|-echo includeme_|-echo includeme_|-run"]["result"] = False
job_ret["return"]["cmd_|-echo applyme_|-echo applyme_|-run"]["result"] = False
with patch(
"saltext.prometheus.returners.prometheus_textfile.__opts__", opts, create=True
), patch("saltext.prometheus.returners.prometheus_textfile.__salt__", {}, create=True):
prometheus_textfile.returner(job_ret)
with salt.utils.files.fopen(
os.path.join(cache_dir, "prometheus_textfile", "salt.prom")
) as prom_file:
# Drop time-based fields for comparison
salt_prom = "\n".join(
sorted(
line[:-1]
for line in prom_file
if not line.startswith("salt_last_started")
and not line.startswith("salt_last_completed")
)
)
assert salt_prom == expected
promfile_lines = [
"# HELP salt_procs Number of salt minion processes running",
"# TYPE salt_procs gauge",
"salt_procs 0.0",
"# HELP salt_states_succeeded Number of successful states in the run",
"# TYPE salt_states_succeeded gauge",
"salt_states_succeeded 0.0",
"# HELP salt_states_failed Number of failed states in the run",
"# TYPE salt_states_failed gauge",
"salt_states_failed 2.0",
"# HELP salt_states_changed Number of changed states in the run",
"# TYPE salt_states_changed gauge",
"salt_states_changed 2.0",
"# HELP salt_states_total Total states in the run",
"# TYPE salt_states_total gauge",
"salt_states_total 2.0",
"# HELP salt_states_success_pct Percent of successful states in the run",
"# TYPE salt_states_success_pct gauge",
"salt_states_success_pct 0.0",
"# HELP salt_states_failure_pct Percent of failed states in the run",
"# TYPE salt_states_failure_pct gauge",
"salt_states_failure_pct 100.0",
"# HELP salt_states_changed_pct Percent of changed states in the run",
"# TYPE salt_states_changed_pct gauge",
"salt_states_changed_pct 100.0",
"# HELP salt_elapsed_time Time spent for all operations during the state run",
"# TYPE salt_elapsed_time gauge",
"salt_elapsed_time 13.695",
"# HELP salt_last_started Estimated time the state run started",
"# TYPE salt_last_started gauge",
"# HELP salt_last_completed Time of last state run completion",
"# TYPE salt_last_completed gauge",
"# HELP salt_version Version of installed Salt package",
"# TYPE salt_version gauge",
"salt_version {}".format( # pylint: disable=consider-using-f-string
salt.version.__version__.split("rc", maxsplit=1)[0]
),
"# HELP salt_version_tagged Version of installed Salt package as a tag",
"# TYPE salt_version_tagged gauge",
f'salt_version_tagged{{salt_version="{salt.version.__version__}"}} 1.0',
"# HELP salt_failed Information regarding state with failure condition",
"# TYPE salt_failed gauge",
'salt_failed{state_comment="Command echo in",state_id="echo includeme"} 1.0',
'salt_failed{state_comment="Command echo ap",state_id="echo applyme"} 1.0',
]
# Test two failed states with comment length limit of 15
opts.update({"fail_comment_length": 15})
expected = "\n".join(sorted(promfile_lines))
job_ret["return"]["cmd_|-echo includeme_|-echo includeme_|-run"]["result"] = False
job_ret["return"]["cmd_|-echo applyme_|-echo applyme_|-run"]["result"] = False
with patch(
"saltext.prometheus.returners.prometheus_textfile.__opts__", opts, create=True
), patch("saltext.prometheus.returners.prometheus_textfile.__salt__", {}, create=True):
prometheus_textfile.returner(job_ret)
with salt.utils.files.fopen(
os.path.join(cache_dir, "prometheus_textfile", "salt.prom")
) as prom_file:
# Drop time-based fields for comparison
salt_prom = "\n".join(
sorted(
line[:-1]
for line in prom_file
if not line.startswith("salt_last_started")
and not line.startswith("salt_last_completed")
)
)
assert salt_prom == expected
def test_prometheus_output_with_raw_version(job_ret, cache_dir, opts):
expected_version = "3004+12.g557e6cc0fc"
short_version = expected_version.split("+", maxsplit=1)[0]
float_version = str(float(short_version))
# raw_version == False
with patch(
"saltext.prometheus.returners.prometheus_textfile.__opts__", opts, create=True
), patch("saltext.prometheus.returners.prometheus_textfile.__salt__", {}, create=True), patch(
"saltext.prometheus.returners.prometheus_textfile.__grains__",
{"saltversion": expected_version},
create=True,
):
prometheus_textfile.returner(job_ret)
with salt.utils.files.fopen(
os.path.join(cache_dir, "prometheus_textfile", "salt.prom")
) as prom_file:
# Grab only the salt version
for line in prom_file:
if line.startswith("salt_version "):
salt_version = line.split()[1]
elif line.startswith("salt_version_tagged"):
expression_pattern = re.compile('salt_version="(.+)"')
version = expression_pattern.search(line)
salt_version_tagged = version.groups()[0]
assert salt_version == float_version
assert salt_version_tagged == short_version
# raw_version == True
opts.update({"raw_version": True})
with patch(
"saltext.prometheus.returners.prometheus_textfile.__opts__", opts, create=True
), patch("saltext.prometheus.returners.prometheus_textfile.__salt__", {}, create=True), patch(
"saltext.prometheus.returners.prometheus_textfile.__grains__",
{"saltversion": expected_version},
create=True,
):
prometheus_textfile.returner(job_ret)
with salt.utils.files.fopen(
os.path.join(cache_dir, "prometheus_textfile", "salt.prom")
) as prom_file:
# Grab only the salt version
for line in prom_file:
if line.startswith("salt_version "):
salt_version = line.split()[1]
elif line.startswith("salt_version_tagged"):
expression_pattern = re.compile('salt_version="(.+)"')
version = expression_pattern.search(line)
salt_version_tagged = version.groups()[0]
assert salt_version == float_version
assert salt_version_tagged == expected_version
def test_requisite_handling(cache_dir, opts):
job_ret = {
"fun": "state.apply",
"fun_args": ["prom_ret"],
"id": "d10-saltgit-01.example.local",
"jid": "20230312161844491819",
"out": "highstate",
"retcode": 2,
"return": {
"test_|-failure_|-failure_|-fail_without_changes": {
"__id__": "failure",
"__run_num__": 0,
"__sls__": "prom_ret",
"changes": {},
"comment": "Failure!",
"duration": 1.074,
"name": "failure",
"result": False,
"start_time": "16:18:44.771989",
},
"test_|-wont_run_|-wont_run_|-succeed_without_changes": {
"__run_num__": 1,
"__sls__": "prom_ret",
"changes": {},
"comment": "One or more requisite failed: prom_ret.failure",
"duration": 0.005,
"result": False,
"start_time": "16:18:44.773443",
},
},
"success": True,
}
opts.update({"abort_state_ids": ["echo includeme"]})
with patch(
"saltext.prometheus.returners.prometheus_textfile.__opts__", opts, create=True
), patch("saltext.prometheus.returners.prometheus_textfile.__salt__", {}, create=True):
prometheus_textfile.returner(job_ret)
assert Path(os.path.join(cache_dir, "prometheus_textfile", "salt.prom")).exists()
@pytest.mark.skip_on_windows(reason="mode setting not available on Windows")
def test_mode_passed_to_set_mode(cache_dir, job_ret, opts):
mock_set_mode = MagicMock(return_value=True)
opts.update({"mode": "0644"})
with patch(
"saltext.prometheus.returners.prometheus_textfile.__opts__", opts, create=True
), patch("saltext.prometheus.returners.prometheus_textfile.__salt__", {}, create=True), patch(
"salt.modules.file.set_mode", mock_set_mode
):
prometheus_textfile.returner(job_ret)
mock_set_mode.assert_called_with(
os.path.join(cache_dir, "prometheus_textfile", "salt.prom"), "0644"
)
@pytest.mark.parametrize(
"state_name,filename,expected_filename",
[
("aaa", "one", "one-aaa"),
("bbb", "one.two", "one-bbb.two"),
("ccc", "one.two.three", "one.two-ccc.three"),
("ddd", "one.two.three.four", "one.two.three-ddd.four"),
],
)
def test_add_state_name_adds_salt_aborted_label(
state_name,
filename,
expected_filename,
opts,
cache_dir,
job_ret,
):
job_ret["fun_args"][0] = state_name
job_ret["return"]["cmd_|-echo includeme_|-echo includeme_|-run"]["result"] = False
opts.update(
{
"add_state_name": True,
"filename": os.path.join(cache_dir, filename),
"abort_state_ids": ["echo includeme"],
}
)
expected = "\n".join(
sorted(
[
"# HELP salt_procs Number of salt minion processes running",
"# TYPE salt_procs gauge",
f'salt_procs{{state="{state_name}"}} 0.0',
"# HELP salt_states_succeeded Number of successful states in the run",
"# TYPE salt_states_succeeded gauge",
f'salt_states_succeeded{{state="{state_name}"}} 1.0',
"# HELP salt_states_failed Number of failed states in the run",
"# TYPE salt_states_failed gauge",
f'salt_states_failed{{state="{state_name}"}} 1.0',
"# HELP salt_states_changed Number of changed states in the run",
"# TYPE salt_states_changed gauge",
f'salt_states_changed{{state="{state_name}"}} 2.0',
"# HELP salt_states_total Total states in the run",
"# TYPE salt_states_total gauge",
f'salt_states_total{{state="{state_name}"}} 2.0',
"# HELP salt_states_success_pct Percent of successful states in the run",
"# TYPE salt_states_success_pct gauge",
f'salt_states_success_pct{{state="{state_name}"}} 50.0',
"# HELP salt_states_failure_pct Percent of failed states in the run",
"# TYPE salt_states_failure_pct gauge",
f'salt_states_failure_pct{{state="{state_name}"}} 50.0',
"# HELP salt_states_changed_pct Percent of changed states in the run",
"# TYPE salt_states_changed_pct gauge",
f'salt_states_changed_pct{{state="{state_name}"}} 100.0',
"# HELP salt_elapsed_time Time spent for all operations during the state run",
"# TYPE salt_elapsed_time gauge",
f'salt_elapsed_time{{state="{state_name}"}} 13.695',
"# HELP salt_last_started Estimated time the state run started",
"# TYPE salt_last_started gauge",
"# HELP salt_last_completed Time of last state run completion",
"# TYPE salt_last_completed gauge",
"# HELP salt_version Version of installed Salt package",
"# TYPE salt_version gauge",
'salt_version{{state="{}"}} {}'.format( # pylint: disable=consider-using-f-string
state_name, salt.version.__version__.split("rc", maxsplit=1)[0]
),
"# HELP salt_version_tagged Version of installed Salt package as a tag",
"# TYPE salt_version_tagged gauge",
'salt_version_tagged{{salt_version="{}",state="{}"}} 1.0'.format( # pylint: disable=consider-using-f-string
salt.version.__version__, state_name
),
"# HELP salt_aborted Flag to show that a specific abort state failed",
"# TYPE salt_aborted gauge",
f'salt_aborted{{state="{state_name}",state_id="echo includeme"}} 1.0',
]
)
)
with patch(
"saltext.prometheus.returners.prometheus_textfile.__opts__", opts, create=True
), patch("saltext.prometheus.returners.prometheus_textfile.__salt__", {}, create=True):
prometheus_textfile.returner(job_ret)
with salt.utils.files.fopen(os.path.join(cache_dir, expected_filename)) as prom_file:
# use line[:-1] to strip off the newline, but only one. It may be extra
# paranoid due to how Python file iteration works, but...
salt_prom = "\n".join(
sorted(
line[:-1]
for line in prom_file
if not line.startswith("salt_last_started")
and not line.startswith("salt_last_completed")
)
)
assert salt_prom == expected
07070100000065000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000002400000000test-repo-1-0.1/saltext_mysql-1.0.007070100000066000081A400000000000000000000000167471E9C00000365000000000000000000000000000000000000003800000000test-repo-1-0.1/saltext_mysql-1.0.0/.copier-answers.yml# Autogenerated. Do not edit this by hand, use `copier update`.
---
_commit: 0.3.7
_src_path: https://github.com/salt-extensions/salt-extension-copier
author: Salt Core Team
author_email: saltproject@vmware.com
coc_contact: ''
copyright_begin: 2023
deploy_docs: rolling
docs_url: https://salt-extensions.github.io/saltext-mysql/
integration_name: MySQL
license: apache
loaders:
- auth
- cache
- module
- pillar
- returner
- state
max_salt_version: 3007
no_saltext_namespace: false
package_name: mysql
project_name: mysql
python_requires: '3.8'
salt_version: '3006'
source_url: https://github.com/salt-extensions/saltext-mysql
ssh_fixtures: false
summary: Salt Extension for interacting with MySQL
test_containers: true
tracker_url: https://github.com/salt-extensions/saltext-mysql/issues
url: https://github.com/salt-extensions/saltext-mysql
workflows: org
07070100000067000081A400000000000000000000000167471E9C000002C0000000000000000000000000000000000000003000000000test-repo-1-0.1/saltext_mysql-1.0.0/.coveragerc[run]
branch = True
cover_pylib = False
relative_files = True
parallel = True
concurrency = multiprocessing
omit =
.nox/*
setup.py
[report]
# Regexes for lines to exclude from consideration
exclude_lines =
# Have to re-enable the standard pragma
pragma: no cover
# Don't complain about missing debug-only code:
def __repr__
# Don't complain if tests don't hit defensive assertion code:
raise AssertionError
raise NotImplementedError
# Don't complain if non-runnable code isn't run:
if 0:
if False:
if __name__ == .__main__.:
omit =
.nox/*
setup.py
ignore_errors = True
[paths]
source =
saltext/mysql
src/saltext/mysql
testsuite =
tests/
07070100000068000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000002C00000000test-repo-1-0.1/saltext_mysql-1.0.0/.github07070100000069000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext_mysql-1.0.0/.github/ISSUE_TEMPLATE0707010000006A000081A400000000000000000000000167471E9C000001CF000000000000000000000000000000000000003E00000000test-repo-1-0.1/saltext_mysql-1.0.0/.github/ISSUE_TEMPLATE.md### Description of Issue
<!-- Note: Please direct questions to the salt-users google group, Slack, IRC, etc. Only post issues and feature requests here -->
### Setup
(Please provide relevant configs and/or SLS files (Be sure to remove sensitive info).)
### Steps to Reproduce Issue
(Include debug logs if possible and relevant.)
### Versions Report
(Provided by running `salt --versions-report`. Please also mention any differences in master/minion versions.)
0707010000006B000081A400000000000000000000000167471E9C00000504000000000000000000000000000000000000004900000000test-repo-1-0.1/saltext_mysql-1.0.0/.github/ISSUE_TEMPLATE/bug_report.md---
name: Bug report
about: Create a report to help us improve
title: "[BUG]"
labels: bug, needs-triage
assignees: ''
---
**Description**
A clear and concise description of what the bug is.
**Setup**
(Please provide relevant configs and/or SLS files (be sure to remove sensitive info. There is no general set-up of Salt.)
Please be as specific as possible and give set-up details.
- [ ] on-prem machine
- [ ] VM (Virtualbox, KVM, etc. please specify)
- [ ] VM running on a cloud service, please be explicit and add details
- [ ] container (Kubernetes, Docker, containerd, etc. please specify)
- [ ] or a combination, please be explicit
- [ ] jails if it is FreeBSD
- [ ] classic packaging
- [ ] onedir packaging
- [ ] used bootstrap to install
**Steps to Reproduce the behavior**
(Include debug logs if possible and relevant)
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Versions Report**
<details><summary>salt --versions-report</summary>
(Provided by running salt --versions-report. Please also mention any differences in master/minion versions.)
```yaml
PASTE HERE
```
</details>
**Additional context**
Add any other context about the problem here.
0707010000006C000081A400000000000000000000000167471E9C00000232000000000000000000000000000000000000004600000000test-repo-1-0.1/saltext_mysql-1.0.0/.github/ISSUE_TEMPLATE/config.ymlblank_issues_enabled: true
contact_links:
- name: Salt Community Slack
url: https://saltstackcommunity.slack.com/
about: Please ask and answer questions here.
- name: Salt-Users Forum
url: https://groups.google.com/forum/#!forum/salt-users
about: Please ask and answer questions here.
- name: Salt on LiberaChat
url: https://web.libera.chat/#salt
about: Please ask and answer questions here.
- name: Security vulnerabilities
email: saltproject-security.pdl@broadcom.com
about: Please report security vulnerabilities here.
0707010000006D000081A400000000000000000000000167471E9C0000022A000000000000000000000000000000000000004300000000test-repo-1-0.1/saltext_mysql-1.0.0/.github/ISSUE_TEMPLATE/docs.md---
name: Docs
about: Issue related to Salt Documentation
title: "[DOCS]"
labels: documentation, needs-triage
assignees: ''
---
**Description**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Suggested Fix**
What did you expect to see in the documentation that is missing or needs updating?
**Type of documentation**
This could be module documentation or a guide.
**Location or format of documentation**
Insert page URL if applicable.
**Additional context**
Add any other context or screenshots here.
0707010000006E000081A400000000000000000000000167471E9C00000277000000000000000000000000000000000000004E00000000test-repo-1-0.1/saltext_mysql-1.0.0/.github/ISSUE_TEMPLATE/feature_request.md---
name: Feature request
about: Suggest an idea for this project
title: "[FEATURE REQUEST]"
labels: feature, needs-triage
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.
0707010000006F000081A400000000000000000000000167471E9C000001AF000000000000000000000000000000000000004800000000test-repo-1-0.1/saltext_mysql-1.0.0/.github/ISSUE_TEMPLATE/tech-debt.md---
name: Tech Debt
about: Issue is related to tech debt. This includes compatibility changes for newer versions of software and OSes that salt interacts with.
title: "[TECH DEBT]"
labels: tech-debt
assignees: ''
---
### Description of the tech debt to be addressed, include links and screenshots
### Versions Report
(Provided by running `salt --versions-report`. Please also mention any differences in master/minion versions.)
07070100000070000081A400000000000000000000000167471E9C000003A5000000000000000000000000000000000000004500000000test-repo-1-0.1/saltext_mysql-1.0.0/.github/PULL_REQUEST_TEMPLATE.md### What does this PR do?
### What issues does this PR fix or reference?
Fixes:
### Previous Behavior
Remove this section if not relevant
### New Behavior
Remove this section if not relevant
### Merge requirements satisfied?
**[NOTICE] Bug fixes or features added to Salt require tests.**
<!-- Please review the [test documentation](https://docs.saltproject.io/en/master/topics/tutorials/writing_tests.html) for details on how to implement tests into Salt's test suite. -->
- [ ] Docs
- [ ] Changelog - https://docs.saltproject.io/en/master/topics/development/changelog.html
- [ ] Tests written/updated
### Commits signed with GPG?
Yes/No
Please review [Salt's Contributing Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html) for best practices.
See GitHub's [page on GPG signing](https://help.github.com/articles/signing-commits-using-gpg/) for more information about signing commits with GPG.
07070100000071000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003600000000test-repo-1-0.1/saltext_mysql-1.0.0/.github/workflows07070100000072000081A400000000000000000000000167471E9C000001A8000000000000000000000000000000000000003D00000000test-repo-1-0.1/saltext_mysql-1.0.0/.github/workflows/pr.ymlname: Pull Request or Push
on:
push:
branches:
- 'main' # Run on pushes to main
tags-ignore:
- '*' # Ignore pushes to tags
pull_request:
jobs:
call_central_workflow:
name: CI
uses: salt-extensions/central-artifacts/.github/workflows/ci.yml@main
with:
deploy-docs: true
permissions:
contents: write
id-token: write
pages: write
pull-requests: read
07070100000073000081A400000000000000000000000167471E9C00000327000000000000000000000000000000000000003E00000000test-repo-1-0.1/saltext_mysql-1.0.0/.github/workflows/tag.ymlname: Tagged Releases
on:
push:
tags:
- "v*" # Only tags starting with "v" for "v1.0.0", etc.
jobs:
get_tag_version:
runs-on: ubuntu-latest
outputs:
version: ${{ steps.get_version.outputs.version }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Extract tag name
id: get_version
run: echo "version=$(echo ${GITHUB_REF#refs/tags/v})" >> $GITHUB_OUTPUT
call_central_workflow:
needs: get_tag_version
uses: salt-extensions/central-artifacts/.github/workflows/ci.yml@main
with:
deploy-docs: true
release: true
version: ${{ needs.get_tag_version.outputs.version }}
permissions:
contents: write
id-token: write
pages: write
pull-requests: read
secrets: inherit
07070100000074000081A400000000000000000000000167471E9C0000078B000000000000000000000000000000000000002F00000000test-repo-1-0.1/saltext_mysql-1.0.0/.gitignore# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# Ignore the setuptools_scm auto-generated version module
src/saltext/mysql/version.py
# Ignore CI generated artifacts
artifacts/
07070100000075000081ED00000000000000000000000167471E9C0000112F000000000000000000000000000000000000003C00000000test-repo-1-0.1/saltext_mysql-1.0.0/.pre-commit-config.yaml---
minimum_pre_commit_version: 2.4.0
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
hooks:
- id: check-merge-conflict # Check for files that contain merge conflict strings.
- id: trailing-whitespace # Trims trailing whitespace.
args: [--markdown-linebreak-ext=md]
- id: mixed-line-ending # Replaces or checks mixed line ending.
args: [--fix=lf]
- id: end-of-file-fixer # Makes sure files end in a newline and only a newline.
- id: check-merge-conflict # Check for files that contain merge conflict strings.
- id: check-ast # Simply check whether files parse as valid python.
# ----- Formatting ---------------------------------------------------------------------------->
- repo: https://github.com/saltstack/pre-commit-remove-import-headers
rev: 1.1.0
hooks:
- id: remove-import-headers
- repo: local
hooks:
- id: check-cli-examples
name: Check CLI examples on execution modules
entry: python .pre-commit-hooks/check-cli-examples.py
language: system
files: ^src/saltext/mysql/modules/.*\.py$
- repo: local
hooks:
- id: check-docs
name: Check rST doc files exist for modules/states
entry: python .pre-commit-hooks/make-autodocs.py
language: system
pass_filenames: false
- repo: https://github.com/s0undt3ch/salt-rewrite
# Automatically rewrite code with known rules
rev: 2.5.2
hooks:
- id: salt-rewrite
alias: rewrite-docstrings
name: Salt extensions docstrings auto-fixes
files: ^src/saltext/mysql/.*\.py$
args: [--silent]
- repo: https://github.com/s0undt3ch/salt-rewrite
# Automatically rewrite code with known rules
rev: 2.5.2
hooks:
- id: salt-rewrite
alias: rewrite-tests
name: Rewrite the test suite
files: ^tests/.*\.py$
args: [--silent, -E, fix_docstrings]
- repo: https://github.com/asottile/pyupgrade
rev: v3.15.2
hooks:
- id: pyupgrade
name: Rewrite Code to be Py3.8+
args: [
--py38-plus
]
exclude: src/saltext/mysql/version.py
- repo: https://github.com/PyCQA/isort
rev: 5.13.2
hooks:
- id: isort
args: [
--py 38,
]
exclude: src/saltext/mysql/(__init__|version).py
- repo: https://github.com/psf/black
rev: 24.2.0
hooks:
- id: black
args: [-l 100]
exclude: src/saltext/mysql/version.py
- repo: https://github.com/adamchainz/blacken-docs
rev: 1.16.0
hooks:
- id: blacken-docs
args: [--skip-errors]
files: ^(docs/.*\.rst|src/saltext/mysql/.*\.py)$
additional_dependencies:
- black==24.2.0
# <---- Formatting -----------------------------------------------------------------------------
# ----- Security ------------------------------------------------------------------------------>
- repo: https://github.com/PyCQA/bandit
rev: 1.7.8
hooks:
- id: bandit
alias: bandit-salt
name: Run bandit against the code base
args: [--silent, -lll, --skip, B701]
exclude: src/saltext/mysql/version.py
- repo: https://github.com/PyCQA/bandit
rev: 1.7.8
hooks:
- id: bandit
alias: bandit-tests
name: Run bandit against the test suite
args: [--silent, -lll, --skip, B701]
files: ^tests/.*
# <---- Security -------------------------------------------------------------------------------
# ----- Code Analysis ------------------------------------------------------------------------->
- repo: https://github.com/saltstack/mirrors-nox
rev: v2022.11.21
hooks:
- id: nox
alias: lint-src
name: Lint Source Code
files: ^((setup|noxfile)|src/.*)\.py$
require_serial: true
args:
- -e
- lint-code-pre-commit
- --
- repo: https://github.com/saltstack/mirrors-nox
rev: v2022.11.21
hooks:
- id: nox
alias: lint-tests
name: Lint Tests
files: ^tests/.*\.py$
require_serial: true
args:
- -e
- lint-tests-pre-commit
- --
# <---- Code Analysis --------------------------------------------------------------------------
07070100000076000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003600000000test-repo-1-0.1/saltext_mysql-1.0.0/.pre-commit-hooks07070100000077000081A400000000000000000000000167471E9C00000821000000000000000000000000000000000000004C00000000test-repo-1-0.1/saltext_mysql-1.0.0/.pre-commit-hooks/check-cli-examples.pyimport ast
import pathlib
import re
import sys
CODE_ROOT = pathlib.Path(__file__).resolve().parent.parent
EXECUTION_MODULES_PATH = CODE_ROOT / "src" / "saltext" / "mysql" / "modules"
def check_cli_examples(files):
"""
Check that every function on every execution module provides a CLI example
"""
errors = 0
for file in files:
path = pathlib.Path(file).resolve()
try:
relpath = path.relative_to(EXECUTION_MODULES_PATH)
if str(relpath.parent) != ".":
# We don't want to check nested packages
continue
except ValueError:
# We're only interested in execution modules
continue
module = ast.parse(path.read_text(), filename=str(path))
for funcdef in [node for node in module.body if isinstance(node, ast.FunctionDef)]:
if funcdef.name.startswith("_"):
# We're not interested in internal functions
continue
docstring = ast.get_docstring(funcdef, clean=False)
if not docstring:
errors += 1
print(
"The function {!r} on '{}' does not have a docstring".format(
funcdef.name,
path.relative_to(CODE_ROOT),
),
file=sys.stderr,
)
continue
if _check_cli_example_present(docstring) is False:
errors += 1
print(
"The function {!r} on '{}' does not have a 'CLI Example:' in it's docstring".format(
funcdef.name,
path.relative_to(CODE_ROOT),
),
file=sys.stderr,
)
continue
sys.exit(errors)
CLI_EXAMPLE_PRESENT_RE = re.compile(r"CLI Example(?:s)?:")
def _check_cli_example_present(docstring):
return CLI_EXAMPLE_PRESENT_RE.search(docstring) is not None
if __name__ == "__main__":
check_cli_examples(sys.argv[1:])
07070100000078000081A400000000000000000000000167471E9C00000E25000000000000000000000000000000000000004700000000test-repo-1-0.1/saltext_mysql-1.0.0/.pre-commit-hooks/make-autodocs.pyimport ast
import os.path
import subprocess
from pathlib import Path
repo_path = Path(subprocess.check_output(["git", "rev-parse", "--show-toplevel"]).decode().strip())
src_dir = repo_path / "src" / "saltext" / "mysql"
doc_dir = repo_path / "docs"
docs_by_kind = {}
changed_something = False
def _find_virtualname(path):
tree = ast.parse(path.read_text())
for node in ast.walk(tree):
if isinstance(node, ast.Assign):
for target in node.targets:
if isinstance(target, ast.Name) and target.id == "__virtualname__":
if isinstance(node.value, ast.Str):
virtualname = node.value.s
break
else:
continue
break
else:
virtualname = path.with_suffix("").name
return virtualname
def write_module(rst_path, path, use_virtualname=True):
if use_virtualname:
virtualname = "``" + _find_virtualname(path) + "``"
else:
virtualname = make_import_path(path)
module_contents = f"""\
{virtualname}
{'='*len(virtualname)}
.. automodule:: {make_import_path(path)}
:members:
"""
if not rst_path.exists() or rst_path.read_text() != module_contents:
print(rst_path)
rst_path.write_text(module_contents)
return True
return False
def write_index(index_rst, import_paths, kind):
if kind == "utils":
header_text = "Utilities"
common_path = os.path.commonpath(tuple(x.replace(".", "/") for x in import_paths)).replace(
"/", "."
)
if any(x == common_path for x in import_paths):
common_path = common_path[: common_path.rfind(".")]
else:
header_text = (
"execution modules" if kind.lower() == "modules" else kind.rstrip("s") + " modules"
)
common_path = import_paths[0][: import_paths[0].rfind(".")]
header = f"{'_'*len(header_text)}\n{header_text.title()}\n{'_'*len(header_text)}"
index_contents = f"""\
.. all-saltext.mysql.{kind}:
{header}
.. currentmodule:: {common_path}
.. autosummary::
:toctree:
{chr(10).join(sorted(' '+p[len(common_path)+1:] for p in import_paths))}
"""
if not index_rst.exists() or index_rst.read_text() != index_contents:
print(index_rst)
index_rst.write_text(index_contents)
return True
return False
def make_import_path(path):
if path.name == "__init__.py":
path = path.parent
return ".".join(path.relative_to(repo_path / "src").with_suffix("").parts)
for path in src_dir.glob("*/*.py"):
if path.name != "__init__.py":
kind = path.parent.name
if kind != "utils":
docs_by_kind.setdefault(kind, set()).add(path)
# Utils can have subdirectories, treat them separately
for path in (src_dir / "utils").rglob("*.py"):
if path.name == "__init__.py" and not path.read_text():
continue
docs_by_kind.setdefault("utils", set()).add(path)
for kind in docs_by_kind:
kind_path = doc_dir / "ref" / kind
index_rst = kind_path / "index.rst"
import_paths = []
for path in sorted(docs_by_kind[kind]):
import_path = make_import_path(path)
import_paths.append(import_path)
rst_path = kind_path / (import_path + ".rst")
rst_path.parent.mkdir(parents=True, exist_ok=True)
change = write_module(rst_path, path, use_virtualname=kind != "utils")
changed_something = changed_something or change
write_index(index_rst, import_paths, kind)
# Ensure pre-commit realizes we did something
if changed_something:
exit(2)
07070100000079000081ED00000000000000000000000167471E9C0000565C000000000000000000000000000000000000002E00000000test-repo-1-0.1/saltext_mysql-1.0.0/.pylintrc[MAIN]
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Clear in-memory caches upon conclusion of linting. Useful if running pylint
# in a server-like mode.
clear-cache-post-run=no
# Load and enable all available extensions. Use --list-extensions to see a list
# all available extensions.
#enable-all-extensions=
# In error mode, messages with a category besides ERROR or FATAL are
# suppressed, and no reports are done by default. Error mode is compatible with
# disabling specific errors.
#errors-only=
# Always return a 0 (non-error) status code, even if lint errors are found.
# This is primarily useful in continuous integration scripts.
#exit-zero=
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-allow-list=
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
# for backward compatibility.)
extension-pkg-whitelist=
# Return non-zero exit code if any of these messages/categories are detected,
# even if score is above --fail-under value. Syntax same as enable. Messages
# specified are enabled, while categories only check already-enabled messages.
fail-on=
# Specify a score threshold under which the program will exit with error.
fail-under=10
# Interpret the stdin as a python script, whose filename needs to be passed as
# the module_or_package argument.
#from-stdin=
# Files or directories to be skipped. They should be base names, not paths.
ignore=CVS
# Add files or directories matching the regular expressions patterns to the
# ignore-list. The regex matches against paths and can be in Posix or Windows
# format. Because '\\' represents the directory delimiter on Windows systems,
# it can't be used as an escape character.
ignore-paths=
# Files or directories matching the regular expression patterns are skipped.
# The regex matches against base names, not paths. The default value ignores
# Emacs file locks
ignore-patterns=^\.#
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use, and will cap the count on Windows to
# avoid hangs.
jobs=0
# Control the amount of potential inferred values when inferring a single
# object. This can help the performance when dealing with large functions or
# complex, nested conditions.
limit-inference-results=100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# Minimum Python version to use for version dependent checks. Will default to
# the version used to run pylint.
py-version=3.10
# Discover python modules and packages in the file system subtree.
recursive=no
# Add paths to the list of the source roots. Supports globbing patterns. The
# source root is an absolute path or a path relative to the current working
# directory used to determine a package namespace for modules located under the
# source root.
source-roots=
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
# In verbose mode, extra non-checker-related info will be displayed.
#verbose=
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style. If left empty, argument names will be checked with the set
# naming style.
#argument-rgx=
# Naming style matching correct attribute names.
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style. If left empty, attribute names will be checked with the set naming
# style.
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma.
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Bad variable names regexes, separated by a comma. If names match any regex,
# they will always be refused
bad-names-rgxs=
# Naming style matching correct class attribute names.
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style. If left empty, class attribute names will be checked
# with the set naming style.
#class-attribute-rgx=
# Naming style matching correct class constant names.
class-const-naming-style=UPPER_CASE
# Regular expression matching correct class constant names. Overrides class-
# const-naming-style. If left empty, class constant names will be checked with
# the set naming style.
#class-const-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style. If left empty, class names will be checked with the set naming style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style. If left empty, constant names will be checked with the set naming
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style. If left empty, function names will be checked with the set
# naming style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
k,
ex,
Run,
_
# Good variable names regexes, separated by a comma. If names match any regex,
# they will always be accepted
good-names-rgxs=
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style. If left empty, inline iteration names will be checked
# with the set naming style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style. If left empty, method names will be checked with the set naming style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style. If left empty, module names will be checked with the set naming style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Regular expression matching correct type alias names. If left empty, type
# alias names will be checked with the set naming style.
#typealias-rgx=
# Regular expression matching correct type variable names. If left empty, type
# variable names will be checked with the set naming style.
#typevar-rgx=
# Naming style matching correct variable names.
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style. If left empty, variable names will be checked with the set
# naming style.
#variable-rgx=
[CLASSES]
# Warn about protected attribute access inside special methods
check-protected-access-in-special-methods=no
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp,
asyncSetUp,
__post_init__
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
[DESIGN]
# List of regular expressions of class ancestor names to ignore when counting
# public methods (see R0903)
exclude-too-few-public-methods=
# List of qualified class names to ignore when counting class parents (see
# R0901)
ignored-parents=
# Maximum number of arguments for function / method.
max-args=15
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr=5
# Maximum number of branch for function / method body.
max-branches=12
# Maximum number of locals for function / method body.
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=25
# Maximum number of return / yield for function / method body.
max-returns=6
# Maximum number of statements in function / method body.
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[EXCEPTIONS]
# Exceptions that will emit a warning when caught.
overgeneral-exceptions=builtins.BaseException,builtins.Exception
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=100
# Maximum number of lines in a module.
max-module-lines=2000
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[IMPORTS]
# List of modules that can be imported at any level, not just the top level
# one.
allow-any-import-level=
# Allow explicit reexports by alias from a package __init__.
allow-reexport-from-package=no
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Deprecated modules which should not be used, separated by a comma.
deprecated-modules=
# Output a graph (.gv or any supported image format) of external dependencies
# to the given file (report RP0402 must not be disabled).
ext-import-graph=
# Output a graph (.gv or any supported image format) of all (i.e. internal and
# external) dependencies to the given file (report RP0402 must not be
# disabled).
import-graph=
# Output a graph (.gv or any supported image format) of internal dependencies
# to the given file (report RP0402 must not be disabled).
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Couples of modules and preferred modules, separated by a comma.
preferred-modules=
[LOGGING]
# The type of string formatting that logging methods do. `old` means using %
# formatting, `new` is for `{}` formatting.
logging-format-style=old
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules=logging
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
# UNDEFINED.
confidence=HIGH,
CONTROL_FLOW,
INFERENCE,
INFERENCE_FAILURE,
UNDEFINED
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then re-enable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=R,
locally-disabled,
file-ignored,
unexpected-special-method-signature,
import-error,
no-member,
unsubscriptable-object,
blacklisted-name,
invalid-name,
missing-docstring,
empty-docstring,
unidiomatic-typecheck,
wrong-import-order,
ungrouped-imports,
wrong-import-position,
bad-mcs-method-argument,
bad-mcs-classmethod-argument,
line-too-long,
too-many-lines,
bad-continuation,
exec-used,
attribute-defined-outside-init,
protected-access,
reimported,
fixme,
global-statement,
unused-variable,
unused-argument,
redefined-outer-name,
redefined-builtin,
undefined-loop-variable,
logging-format-interpolation,
invalid-format-index,
line-too-long,
import-outside-toplevel,
deprecated-method,
keyword-arg-before-vararg,
consider-using-f-string,
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[METHOD_ARGS]
# List of qualified names (i.e., library.method) which require a timeout
# parameter e.g. 'requests.api.get,requests.api.post'
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
# Regular expression of note tags to take in consideration.
notes-rgx=
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=sys.exit,argparse.parse_error
[REPORTS]
# Python expression which should return a score less than or equal to 10. You
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
# 'convention', and 'info' which contain the number of messages in each
# category, as well as 'statement' which is the total number of statements
# analyzed. This score is used by the global evaluation report (RP0004).
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio). You can also give a reporter class, e.g.
# mypackage.mymodule.MyReporterClass.
#output-format=
# Tells whether to display a full report or only the messages.
reports=no
# Activate the evaluation score.
score=yes
[SIMILARITIES]
# Comments are removed from the similarity computation
ignore-comments=yes
# Docstrings are removed from the similarity computation
ignore-docstrings=yes
# Imports are removed from the similarity computation
ignore-imports=yes
# Signatures are removed from the similarity computation
ignore-signatures=yes
# Minimum lines number of a similarity.
min-similarity-lines=4
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions=4
# Spelling dictionary name. No available dictionaries : You need to install the
# system dependency for enchant to work..
spelling-dict=
# List of comma separated words that should be considered directives if they
# appear at the beginning of a comment and should not be checked.
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains the private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to the private dictionary (see the
# --spelling-private-dict-file option) instead of raising a message.
spelling-store-unknown-words=no
[STRING]
# This flag controls whether inconsistent-quotes generates a warning when the
# character used as a quote delimiter is used inconsistently within a module.
check-quote-consistency=no
# This flag controls whether the implicit-str-concat should generate a warning
# on implicit string concatenation in sequences defined over several lines.
check-str-concat-over-line-jumps=no
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether to warn about missing members when the owner of the attribute
# is inferred to be None.
ignore-none=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of symbolic message names to ignore for Mixin members.
ignored-checks-for-mixins=no-member,
not-async-context-manager,
not-context-manager,
attribute-defined-outside-init
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
# Regex pattern to define which classes are considered mixins.
mixin-class-rgx=.*[Mm]ixin
# List of decorators that change the signature of a decorated function.
signature-mutators=
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid defining new builtins when possible.
additional-builtins=__opts__,
__salt__,
__pillar__,
__grains__,
__context__,
__runner__,
__ret__,
__env__,
__low__,
__states__,
__lowstate__,
__running__,
__active_provider_name__,
__master_opts__,
__jid_event__,
__instance_id__,
__salt_system_encoding__,
__proxy__,
__serializers__,
__reg__,
__executors__,
__events__
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of names allowed to shadow builtins
allowed-redefined-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expected to
# not be used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored.
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
0707010000007A000081A400000000000000000000000167471E9C0000014E000000000000000000000000000000000000003100000000test-repo-1-0.1/saltext_mysql-1.0.0/CHANGELOG.mdThe changelog format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
This project uses [Semantic Versioning](https://semver.org/) - MAJOR.MINOR.PATCH
# Changelog
## v1.0.0 (2024-08-08)
Initial release of `saltext-mysql`. This release tracks the functionality in the core Salt code base as of version 3007.1.
0707010000007B000081A400000000000000000000000167471E9C0000147B000000000000000000000000000000000000003700000000test-repo-1-0.1/saltext_mysql-1.0.0/CODE-OF-CONDUCT.md# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in Salt
Extension Modules for MySQL project and our community a
harassment-free experience for everyone, regardless of age, body size, visible
or invisible disability, ethnicity, sex characteristics, gender identity and
expression, level of experience, education, socio-economic status, nationality,
personal appearance, race, religion, or sexual identity and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.
0707010000007C000081A400000000000000000000000167471E9C000002AE000000000000000000000000000000000000003400000000test-repo-1-0.1/saltext_mysql-1.0.0/CONTRIBUTING.mdThanks for your interest in contributing to the Salt Extension Modules for
MySQL! We welcome any contribution, large or small - from
adding a new feature to fixing a single letter typo.
This is a companion to the Salt Project and the [Salt Contributing
Guide][salt-contributing] should be considered the default for this project.
Where this project disagrees with the Salt Project, the guidelines here take
precedence. Where this project is silent, the Salt guidelines should be used.
See the **Contributing** section in the [README][README.md] for a quickstart.
[README.md]: README.md
[salt-contributing]: https://docs.saltproject.io/en/master/topics/development/contributing.html
0707010000007D000081A400000000000000000000000167471E9C00002C50000000000000000000000000000000000000002C00000000test-repo-1-0.1/saltext_mysql-1.0.0/LICENSE Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2023 Salt Core Team
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
0707010000007E000081A400000000000000000000000167471E9C000001C8000000000000000000000000000000000000002B00000000test-repo-1-0.1/saltext_mysql-1.0.0/NOTICESalt Extension Modules for MySQL
Copyright 2023 Salt Core Team
This product is licensed to you under the Apache 2.0 license (the "License").
You may not use this product except in compliance with the Apache 2.0 License.
This product may include a number of subcomponents with separate copyright
notices and license terms. Your use of these subcomponents is subject to the
terms and conditions of the subcomponent's license, as noted in the LICENSE
file.
0707010000007F000081A400000000000000000000000167471E9C0000158C000000000000000000000000000000000000002D00000000test-repo-1-0.1/saltext_mysql-1.0.0/PKG-INFOMetadata-Version: 2.1
Name: saltext.mysql
Version: 1.0.0
Summary: Salt Extension for interacting with MySQL
Author-email: Salt Core Team <saltproject@vmware.com>
License: Apache Software License
Project-URL: Homepage, https://github.com/salt-extensions/saltext-mysql
Project-URL: Documentation, https://salt-extensions.github.io/saltext-mysql/
Project-URL: Source, https://github.com/salt-extensions/saltext-mysql
Project-URL: Tracker, https://github.com/salt-extensions/saltext-mysql/issues
Keywords: salt-extension
Platform: any
Classifier: Programming Language :: Python
Classifier: Programming Language :: Cython
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Development Status :: 4 - Beta
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Apache Software License
Requires-Python: >=3.8
Description-Content-Type: text/markdown
License-File: LICENSE
License-File: NOTICE
Requires-Dist: salt>=3006
Requires-Dist: sqlparse
Provides-Extra: changelog
Requires-Dist: towncrier==22.12.0; extra == "changelog"
Provides-Extra: dev
Requires-Dist: nox; extra == "dev"
Requires-Dist: pre-commit>=2.4.0; extra == "dev"
Requires-Dist: pylint; extra == "dev"
Requires-Dist: saltpylint; extra == "dev"
Provides-Extra: docs
Requires-Dist: sphinx; extra == "docs"
Requires-Dist: sphinx-prompt; extra == "docs"
Requires-Dist: sphinxcontrib-spelling; extra == "docs"
Requires-Dist: sphinx-copybutton; extra == "docs"
Requires-Dist: towncrier==22.12.0; extra == "docs"
Requires-Dist: sphinxcontrib-towncrier; extra == "docs"
Requires-Dist: myst_parser; extra == "docs"
Requires-Dist: furo; extra == "docs"
Requires-Dist: sphinx-inline-tabs; extra == "docs"
Provides-Extra: docsauto
Requires-Dist: sphinx-autobuild; extra == "docsauto"
Provides-Extra: lint
Requires-Dist: pylint; extra == "lint"
Requires-Dist: saltpylint; extra == "lint"
Provides-Extra: tests
Requires-Dist: pytest>=7.2.0; extra == "tests"
Requires-Dist: pytest-salt-factories>=1.0.0; sys_platform == "win32" and extra == "tests"
Requires-Dist: pytest-salt-factories[docker]>=1.0.0; sys_platform != "win32" and extra == "tests"
Requires-Dist: pytest-subtests; extra == "tests"
Requires-Dist: pymysql; extra == "tests"
Requires-Dist: cryptography; extra == "tests"
Provides-Extra: pymysql
Requires-Dist: pymysql; extra == "pymysql"
Provides-Extra: mysqlclient
Requires-Dist: mysqlclient; extra == "mysqlclient"
# Salt Extension for MySQL
Salt Extension for interacting with MySQL
## Security
If you think you have found a security vulnerability, see
[Salt's security guide][security].
## User Documentation
This README is for people aiming to contribute to the project.
If you just want to get started with the extension, check out the
[User Documentation][docs].
## Contributing
The saltext-mysql project team welcomes contributions from the community.
The [Salt Contributing guide][salt-contributing] has a lot of relevant
information, but if you'd like to jump right in here's how to get started:
```bash
# Clone the repo
git clone --origin salt git@github.com:salt-extensions/saltext-mysql.git
# Change to the repo dir
cd saltext-mysql
# Create a new venv
python3 -m venv env --prompt saltext-mysql
source env/bin/activate
# On mac, you may need to upgrade pip
python -m pip install --upgrade pip
# On WSL or some flavors of linux you may need to install the `enchant`
# library in order to build the docs
sudo apt-get install -y enchant
# Install extension + test/dev/doc dependencies into your environment
python -m pip install -e '.[tests,dev,docs]'
# Run tests!
python -m nox -e tests-3
# skip requirements install for next time
export SKIP_REQUIREMENTS_INSTALL=1
# Build the docs, serve, and view in your web browser:
python -m nox -e docs && (cd docs/_build/html; python -m webbrowser localhost:8000; python -m http.server; cd -)
```
Writing code isn't the only way to contribute! We value contributions in any of
these areas:
* Documentation - especially examples of how to use this module to solve
specific problems.
* Triaging [issues][issues] and participating in [discussions][discussions]
* Reviewing [Pull Requests][PRs] (we really like
[Conventional Comments][comments]!)
You could also contribute in other ways:
* Writing blog posts
* Posting on social media about how you used Salt + MySQL to solve your
problems, including videos
* Giving talks at conferences
* Publishing videos
* Asking/answering questions in IRC, Discord or email groups
Any of these things are super valuable to our community, and we sincerely
appreciate every contribution!
For more information, build the docs and head over to http://localhost:8000/ —
that's where you'll find the rest of the documentation.
[security]: https://github.com/saltstack/salt/blob/master/SECURITY.md
[salt-contributing]: https://docs.saltproject.io/en/master/topics/development/contributing.html
[issues]: https://github.com/salt-extensions/saltext-mysql/issues
[PRs]: https://github.com/salt-extensions/saltext-mysql/pulls
[discussions]: https://github.com/salt-extensions/saltext-mysql/discussions
[comments]: https://conventionalcomments.org/
[docs]: https://salt-extensions.github.io/saltext-mysql/
07070100000080000081A400000000000000000000000167471E9C00000AEF000000000000000000000000000000000000002E00000000test-repo-1-0.1/saltext_mysql-1.0.0/README.md# Salt Extension for MySQL
Salt Extension for interacting with MySQL
## Security
If you think you have found a security vulnerability, see
[Salt's security guide][security].
## User Documentation
This README is for people aiming to contribute to the project.
If you just want to get started with the extension, check out the
[User Documentation][docs].
## Contributing
The saltext-mysql project team welcomes contributions from the community.
The [Salt Contributing guide][salt-contributing] has a lot of relevant
information, but if you'd like to jump right in here's how to get started:
```bash
# Clone the repo
git clone --origin salt git@github.com:salt-extensions/saltext-mysql.git
# Change to the repo dir
cd saltext-mysql
# Create a new venv
python3 -m venv env --prompt saltext-mysql
source env/bin/activate
# On mac, you may need to upgrade pip
python -m pip install --upgrade pip
# On WSL or some flavors of linux you may need to install the `enchant`
# library in order to build the docs
sudo apt-get install -y enchant
# Install extension + test/dev/doc dependencies into your environment
python -m pip install -e '.[tests,dev,docs]'
# Run tests!
python -m nox -e tests-3
# skip requirements install for next time
export SKIP_REQUIREMENTS_INSTALL=1
# Build the docs, serve, and view in your web browser:
python -m nox -e docs && (cd docs/_build/html; python -m webbrowser localhost:8000; python -m http.server; cd -)
```
Writing code isn't the only way to contribute! We value contributions in any of
these areas:
* Documentation - especially examples of how to use this module to solve
specific problems.
* Triaging [issues][issues] and participating in [discussions][discussions]
* Reviewing [Pull Requests][PRs] (we really like
[Conventional Comments][comments]!)
You could also contribute in other ways:
* Writing blog posts
* Posting on social media about how you used Salt + MySQL to solve your
problems, including videos
* Giving talks at conferences
* Publishing videos
* Asking/answering questions in IRC, Discord or email groups
Any of these things are super valuable to our community, and we sincerely
appreciate every contribution!
For more information, build the docs and head over to http://localhost:8000/ —
that's where you'll find the rest of the documentation.
[security]: https://github.com/saltstack/salt/blob/master/SECURITY.md
[salt-contributing]: https://docs.saltproject.io/en/master/topics/development/contributing.html
[issues]: https://github.com/salt-extensions/saltext-mysql/issues
[PRs]: https://github.com/salt-extensions/saltext-mysql/pulls
[discussions]: https://github.com/salt-extensions/saltext-mysql/discussions
[comments]: https://conventionalcomments.org/
[docs]: https://salt-extensions.github.io/saltext-mysql/
07070100000081000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000002E00000000test-repo-1-0.1/saltext_mysql-1.0.0/changelog07070100000082000081A400000000000000000000000167471E9C00000135000000000000000000000000000000000000003E00000000test-repo-1-0.1/saltext_mysql-1.0.0/changelog/.template.jinja{% if sections[""] %}
{% for category, val in definitions.items() if category in sections[""] %}
### {{ definitions[category]['name'] }}
{% for text, values in sections[""][category].items() %}
- {{ text }} {{ values|join(', ') }}
{% endfor %}
{% endfor %}
{% else %}
No significant changes.
{% endif %}
07070100000083000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000002900000000test-repo-1-0.1/saltext_mysql-1.0.0/docs07070100000084000081A400000000000000000000000167471E9C0000027A000000000000000000000000000000000000003200000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/Makefile# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
07070100000085000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000002E00000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/_ext07070100000086000081A400000000000000000000000167471E9C000001EE000000000000000000000000000000000000003C00000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/_ext/saltdomain.py"""
Copied/distilled from Salt doc/_ext/saltdomain.py in order to be able
to use Salt's custom doc refs.
"""
def setup(app):
app.add_crossref_type(
directivename="conf_master",
rolename="conf_master",
indextemplate="pair: %s; conf/master",
)
app.add_crossref_type(
directivename="conf_minion",
rolename="conf_minion",
indextemplate="pair: %s; conf/minion",
)
return {"parallel_read_safe": True, "parallel_write_safe": True}
07070100000087000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003100000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/_static07070100000088000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000003A00000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/_static/.gitkeep07070100000089000081A400000000000000000000000167471E9C0000011A000000000000000000000000000000000000003600000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/changelog.md# Changelog
The changelog format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
This project uses [Semantic Versioning](https://semver.org/) - MAJOR.MINOR.PATCH
```{towncrier-draft-entries}
```
```{include} ../CHANGELOG.md
:start-after: '# Changelog'
```
0707010000008A000081ED00000000000000000000000167471E9C0000188A000000000000000000000000000000000000003100000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/conf.py# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import datetime
import email.policy
import os
import sys
from pathlib import Path
try:
from importlib_metadata import distribution
except ImportError:
from importlib.metadata import distribution
try:
docs_basepath = os.path.abspath(os.path.dirname(__file__))
except NameError:
# sphinx-intl and six execute some code which will raise this NameError
# assume we're in the doc/ directory
docs_basepath = os.path.abspath(os.path.dirname("."))
PROJECT_ROOT_DIR = Path(docs_basepath).parent
addtl_paths = (
os.path.join(os.pardir, "src"), # saltext.mysql itself (for autodoc)
"_ext", # custom Sphinx extensions
)
for addtl_path in addtl_paths:
sys.path.insert(0, os.path.abspath(os.path.join(docs_basepath, addtl_path)))
dist = distribution("saltext.mysql")
# -- Project information -----------------------------------------------------
this_year = datetime.datetime.today().year
if this_year == 2023:
copyright_year = "2023"
else:
copyright_year = f"2023 - {this_year}"
project = dist.metadata["Summary"]
author = dist.metadata.get("Author")
if author is None:
# Core metadata is serialized differently with pyproject.toml:
# https://packaging.python.org/en/latest/specifications/pyproject-toml/#authors-maintainers
author_email = dist.metadata["Author-email"]
em = email.message_from_string(
f"To: {author_email}",
policy=email.policy.default,
)
if em["To"].addresses and em["To"].addresses[0]:
author = em["To"].addresses[0].display_name
author = author or ""
copyright = f"{copyright_year}, {author}"
# The full version, including alpha/beta/rc tags
release = dist.version
# Variables to pass into the docs from sitevars.rst for rst substitution
with open("sitevars.rst") as site_vars_file:
site_vars = site_vars_file.read().splitlines()
rst_prolog = """
{}
""".format(
"\n".join(site_vars[:])
)
# -- General configuration ---------------------------------------------------
linkcheck_ignore = [r"http://localhost:\d+"]
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.napoleon",
"sphinx.ext.intersphinx",
"sphinx.ext.viewcode",
"sphinx.ext.todo",
"sphinx.ext.coverage",
"sphinx_copybutton",
"sphinxcontrib.spelling",
"saltdomain",
"sphinxcontrib.towncrier.ext",
"myst_parser",
"sphinx_inline_tabs",
]
myst_enable_extensions = [
"colon_fence",
"deflist",
"tasklist",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = [
"_build",
"Thumbs.db",
".DS_Store",
".vscode",
".venv",
".git",
".gitlab-ci",
".gitignore",
"sitevars.rst",
]
autosummary_generate = False
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "furo"
html_title = project
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = ""
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large. Favicons can be up to at least 228x228. PNG
# format is supported as well, not just .ico'
html_favicon = ""
# Sphinx Napoleon Config
napoleon_google_docstring = True
napoleon_numpy_docstring = False
napoleon_include_init_with_doc = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
# ----- Intersphinx Config ---------------------------------------------------------------------------------------->
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
"pytest": ("https://docs.pytest.org/en/stable", None),
"salt": ("https://docs.saltproject.io/en/latest", None),
}
# <---- Intersphinx Config -----------------------------------------------------------------------------------------
# ----- Autodoc Config ---------------------------------------------------------------------------------------------->
autodoc_default_options = {"member-order": "bysource"}
autodoc_mock_imports = ["salt"]
# <---- Autodoc Config -----------------------------------------------------------------------------------------------
# Towncrier draft config
towncrier_draft_autoversion_mode = "sphinx-release"
towncrier_draft_include_empty = True
towncrier_draft_working_directory = str(PROJECT_ROOT_DIR)
def setup(app):
app.add_crossref_type(
directivename="fixture",
rolename="fixture",
indextemplate="pair: %s; fixture",
)
# Allow linking to pytest's confvals.
app.add_object_type(
"confval",
"pytest-confval",
objname="configuration value",
indextemplate="pair: %s; configuration value",
)
0707010000008B000081A400000000000000000000000167471E9C00000238000000000000000000000000000000000000003300000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/index.rst``saltext-mysql``: Integrate Salt with Mysql
============================================
Salt Extension for interacting with MySQL
.. toctree::
:maxdepth: 2
:caption: Guides
:hidden:
topics/installation
.. toctree::
:maxdepth: 2
:caption: Provided Modules
:hidden:
ref/auth/index
ref/cache/index
ref/modules/index
ref/pillar/index
ref/returners/index
ref/states/index
.. toctree::
:maxdepth: 2
:caption: Reference
:hidden:
changelog
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
0707010000008C000081A400000000000000000000000167471E9C000002F8000000000000000000000000000000000000003200000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/make.bat@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=_build
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
:end
popd
0707010000008D000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000002D00000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref0707010000008E000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003200000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/auth0707010000008F000081A400000000000000000000000167471E9C00000095000000000000000000000000000000000000003C00000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/auth/index.rst.. all-saltext.mysql.auth:
____________
Auth Modules
____________
.. currentmodule:: saltext.mysql.auth
.. autosummary::
:toctree:
mysql
07070100000090000081A400000000000000000000000167471E9C0000004C000000000000000000000000000000000000004F00000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/auth/saltext.mysql.auth.mysql.rst``mysql``
=========
.. automodule:: saltext.mysql.auth.mysql
:members:
07070100000091000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003300000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/cache07070100000092000081A400000000000000000000000167471E9C000000A0000000000000000000000000000000000000003D00000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/cache/index.rst.. all-saltext.mysql.cache:
_____________
Cache Modules
_____________
.. currentmodule:: saltext.mysql.cache
.. autosummary::
:toctree:
mysql_cache
07070100000093000081A400000000000000000000000167471E9C00000053000000000000000000000000000000000000005700000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/cache/saltext.mysql.cache.mysql_cache.rst``mysql``
=========
.. automodule:: saltext.mysql.cache.mysql_cache
:members:
07070100000094000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003500000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/modules07070100000095000081A400000000000000000000000167471E9C000000AA000000000000000000000000000000000000003F00000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/modules/index.rst.. all-saltext.mysql.modules:
_________________
Execution Modules
_________________
.. currentmodule:: saltext.mysql.modules
.. autosummary::
:toctree:
mysql
07070100000096000081A400000000000000000000000167471E9C0000004F000000000000000000000000000000000000005500000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/modules/saltext.mysql.modules.mysql.rst``mysql``
=========
.. automodule:: saltext.mysql.modules.mysql
:members:
07070100000097000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003400000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/pillar07070100000098000081A400000000000000000000000167471E9C0000009F000000000000000000000000000000000000003E00000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/pillar/index.rst.. all-saltext.mysql.pillar:
______________
Pillar Modules
______________
.. currentmodule:: saltext.mysql.pillar
.. autosummary::
:toctree:
mysql
07070100000099000081A400000000000000000000000167471E9C0000004E000000000000000000000000000000000000005300000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/pillar/saltext.mysql.pillar.mysql.rst``mysql``
=========
.. automodule:: saltext.mysql.pillar.mysql
:members:
0707010000009A000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003700000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/returners0707010000009B000081A400000000000000000000000167471E9C000000AB000000000000000000000000000000000000004100000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/returners/index.rst.. all-saltext.mysql.returners:
________________
Returner Modules
________________
.. currentmodule:: saltext.mysql.returners
.. autosummary::
:toctree:
mysql
0707010000009C000081A400000000000000000000000167471E9C00000051000000000000000000000000000000000000005900000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/returners/saltext.mysql.returners.mysql.rst``mysql``
=========
.. automodule:: saltext.mysql.returners.mysql
:members:
0707010000009D000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003400000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/states0707010000009E000081A400000000000000000000000167471E9C000000D5000000000000000000000000000000000000003E00000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/states/index.rst.. all-saltext.mysql.states:
_____________
State Modules
_____________
.. currentmodule:: saltext.mysql.states
.. autosummary::
:toctree:
mysql_database
mysql_grants
mysql_query
mysql_user
0707010000009F000081A400000000000000000000000167471E9C00000069000000000000000000000000000000000000005C00000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/states/saltext.mysql.states.mysql_database.rst``mysql_database``
==================
.. automodule:: saltext.mysql.states.mysql_database
:members:
070701000000A0000081A400000000000000000000000167471E9C00000063000000000000000000000000000000000000005A00000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/states/saltext.mysql.states.mysql_grants.rst``mysql_grants``
================
.. automodule:: saltext.mysql.states.mysql_grants
:members:
070701000000A1000081A400000000000000000000000167471E9C00000060000000000000000000000000000000000000005900000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/states/saltext.mysql.states.mysql_query.rst``mysql_query``
===============
.. automodule:: saltext.mysql.states.mysql_query
:members:
070701000000A2000081A400000000000000000000000167471E9C0000005D000000000000000000000000000000000000005800000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/ref/states/saltext.mysql.states.mysql_user.rst``mysql_user``
==============
.. automodule:: saltext.mysql.states.mysql_user
:members:
070701000000A3000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000003600000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/sitevars.rst070701000000A4000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003000000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/topics070701000000A5000081A400000000000000000000000167471E9C00000211000000000000000000000000000000000000004000000000test-repo-1-0.1/saltext_mysql-1.0.0/docs/topics/installation.md# Installation
Generally, extensions need to be installed into the same Python environment Salt uses.
:::{tab} State
```yaml
Install Salt Mysql extension:
pip.installed:
- name: saltext-mysql
```
:::
:::{tab} Onedir installation
```bash
salt-pip install saltext-mysql
```
:::
:::{tab} Regular installation
```bash
pip install saltext-mysql
```
:::
:::{hint}
Saltexts are not distributed automatically via the fileserver like custom modules, they need to be installed
on each node you want them to be available on.
:::
070701000000A6000081ED00000000000000000000000167471E9C00004528000000000000000000000000000000000000002F00000000test-repo-1-0.1/saltext_mysql-1.0.0/noxfile.py# pylint: disable=missing-module-docstring,import-error,protected-access,missing-function-docstring
import datetime
import json
import os
import pathlib
import shutil
import sys
import tempfile
from pathlib import Path
import nox
from nox.command import CommandFailed
from nox.virtualenv import VirtualEnv
# Nox options
# Reuse existing virtualenvs
nox.options.reuse_existing_virtualenvs = True
# Don't fail on missing interpreters
nox.options.error_on_missing_interpreters = False
# Python versions to test against
PYTHON_VERSIONS = ("3", "3.8", "3.9", "3.10", "3.11", "3.12")
# Be verbose when running under a CI context
CI_RUN = (
os.environ.get("JENKINS_URL") or os.environ.get("CI") or os.environ.get("DRONE") is not None
)
PIP_INSTALL_SILENT = CI_RUN is False
SKIP_REQUIREMENTS_INSTALL = os.environ.get("SKIP_REQUIREMENTS_INSTALL", "0") == "1"
EXTRA_REQUIREMENTS_INSTALL = os.environ.get("EXTRA_REQUIREMENTS_INSTALL")
COVERAGE_REQUIREMENT = os.environ.get("COVERAGE_REQUIREMENT") or "coverage==7.5.1"
SALT_REQUIREMENT = os.environ.get("SALT_REQUIREMENT") or "salt>=3006"
if SALT_REQUIREMENT == "salt==master":
SALT_REQUIREMENT = "git+https://github.com/saltstack/salt.git@master"
# Prevent Python from writing bytecode
os.environ["PYTHONDONTWRITEBYTECODE"] = "1"
# Global Path Definitions
REPO_ROOT = pathlib.Path(__file__).resolve().parent
# Change current directory to REPO_ROOT
os.chdir(str(REPO_ROOT))
ARTIFACTS_DIR = REPO_ROOT / "artifacts"
# Make sure the artifacts directory exists
ARTIFACTS_DIR.mkdir(parents=True, exist_ok=True)
CUR_TIME = datetime.datetime.now().strftime("%Y%m%d%H%M%S.%f")
RUNTESTS_LOGFILE = ARTIFACTS_DIR / f"runtests-{CUR_TIME}.log"
COVERAGE_REPORT_DB = REPO_ROOT / ".coverage"
COVERAGE_REPORT_PROJECT = ARTIFACTS_DIR.relative_to(REPO_ROOT) / "coverage-project.xml"
COVERAGE_REPORT_TESTS = ARTIFACTS_DIR.relative_to(REPO_ROOT) / "coverage-tests.xml"
JUNIT_REPORT = ARTIFACTS_DIR.relative_to(REPO_ROOT) / "junit-report.xml"
def _get_session_python_version_info(session):
try:
version_info = session._runner._real_python_version_info
except AttributeError:
session_py_version = session.run_always(
"python",
"-c",
'import sys; sys.stdout.write("{}.{}.{}".format(*sys.version_info))',
silent=True,
log=False,
)
version_info = tuple(int(part) for part in session_py_version.split(".") if part.isdigit())
session._runner._real_python_version_info = version_info
return version_info
def _get_pydir(session):
version_info = _get_session_python_version_info(session)
if version_info < (3, 8):
session.error("Only Python >= 3.8 is supported")
return f"py{version_info[0]}.{version_info[1]}"
def _install_requirements(
session,
*passed_requirements, # pylint: disable=unused-argument
install_coverage_requirements=True,
install_test_requirements=True,
install_source=False,
install_salt=True,
install_extras=None,
):
install_extras = install_extras or []
if SKIP_REQUIREMENTS_INSTALL is False:
# Always have the wheel package installed
session.install("--progress-bar=off", "wheel", silent=PIP_INSTALL_SILENT)
if install_coverage_requirements:
session.install("--progress-bar=off", COVERAGE_REQUIREMENT, silent=PIP_INSTALL_SILENT)
if install_salt:
session.install("--progress-bar=off", SALT_REQUIREMENT, silent=PIP_INSTALL_SILENT)
if install_test_requirements:
install_extras.append("tests")
if EXTRA_REQUIREMENTS_INSTALL:
session.log(
"Installing the following extra requirements because the "
"EXTRA_REQUIREMENTS_INSTALL environment variable was set: "
"EXTRA_REQUIREMENTS_INSTALL='%s'",
EXTRA_REQUIREMENTS_INSTALL,
)
install_command = ["--progress-bar=off"]
install_command += [req.strip() for req in EXTRA_REQUIREMENTS_INSTALL.split()]
session.install(*install_command, silent=PIP_INSTALL_SILENT)
if install_source:
pkg = "."
if install_extras:
pkg += f"[{','.join(install_extras)}]"
session.install("-e", pkg, silent=PIP_INSTALL_SILENT)
elif install_extras:
pkg = f".[{','.join(install_extras)}]"
session.install(pkg, silent=PIP_INSTALL_SILENT)
@nox.session(python=PYTHON_VERSIONS)
def tests(session):
_install_requirements(session, install_source=True)
sitecustomize_dir = session.run("salt-factories", "--coverage", silent=True, log=False)
python_path_env_var = os.environ.get("PYTHONPATH") or None
if python_path_env_var is None:
python_path_env_var = sitecustomize_dir
else:
python_path_entries = python_path_env_var.split(os.pathsep)
if sitecustomize_dir in python_path_entries:
python_path_entries.remove(sitecustomize_dir)
python_path_entries.insert(0, sitecustomize_dir)
python_path_env_var = os.pathsep.join(python_path_entries)
env = {
# The updated python path so that sitecustomize is importable
"PYTHONPATH": python_path_env_var,
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
"COVERAGE_FILE": str(COVERAGE_REPORT_DB),
# Instruct sub processes to also run under coverage
"COVERAGE_PROCESS_START": str(REPO_ROOT / ".coveragerc"),
}
session.run("coverage", "erase")
args = [
"--rootdir",
str(REPO_ROOT),
f"--log-file={RUNTESTS_LOGFILE.relative_to(REPO_ROOT)}",
"--log-file-level=debug",
"--show-capture=no",
f"--junitxml={JUNIT_REPORT}",
"--showlocals",
"-ra",
"-s",
]
if session._runner.global_config.forcecolor:
args.append("--color=yes")
if not session.posargs:
args.append("tests/")
else:
for arg in session.posargs:
if arg.startswith("--color") and args[0].startswith("--color"):
args.pop(0)
args.append(arg)
for arg in session.posargs:
if arg.startswith("-"):
continue
if arg.startswith(f"tests{os.sep}"):
break
try:
pathlib.Path(arg).resolve().relative_to(REPO_ROOT / "tests")
break
except ValueError:
continue
else:
args.append("tests/")
try:
session.run("coverage", "run", "-m", "pytest", *args, env=env)
finally:
# Always combine and generate the XML coverage report
try:
session.run("coverage", "combine")
except CommandFailed:
# Sometimes some of the coverage files are corrupt which would
# trigger a CommandFailed exception
pass
# Generate report for salt code coverage
session.run(
"coverage",
"xml",
"-o",
str(COVERAGE_REPORT_PROJECT),
"--omit=tests/*",
"--include=src/saltext/mysql/*",
)
# Generate report for tests code coverage
session.run(
"coverage",
"xml",
"-o",
str(COVERAGE_REPORT_TESTS),
"--omit=src/saltext/mysql/*",
"--include=tests/*",
)
try:
session.run("coverage", "report", "--show-missing", "--include=src/saltext/mysql/*")
# If you also want to display the code coverage report on the CLI
# for the tests, comment the call above and uncomment the line below
# session.run(
# "coverage", "report", "--show-missing",
# "--include=src/saltext/mysql/*,tests/*"
# )
finally:
# Move the coverage DB to artifacts/coverage in order for it to be archived by CI
if COVERAGE_REPORT_DB.exists():
shutil.move(str(COVERAGE_REPORT_DB), str(ARTIFACTS_DIR / COVERAGE_REPORT_DB.name))
class Tee:
"""
Python class to mimic linux tee behaviour
"""
def __init__(self, first, second):
self._first = first
self._second = second
def write(self, buf):
wrote = self._first.write(buf)
self._first.flush()
self._second.write(buf)
self._second.flush()
return wrote
def fileno(self):
return self._first.fileno()
def _lint(session, rcfile, flags, paths, tee_output=True):
_install_requirements(
session,
install_salt=False,
install_coverage_requirements=False,
install_test_requirements=False,
install_extras=["dev", "tests"],
)
if tee_output:
session.run("pylint", "--version")
pylint_report_path = os.environ.get("PYLINT_REPORT")
cmd_args = ["pylint", f"--rcfile={rcfile}"] + list(flags) + list(paths)
src_path = str(REPO_ROOT / "src")
python_path_env_var = os.environ.get("PYTHONPATH") or None
if python_path_env_var is None:
python_path_env_var = src_path
else:
python_path_entries = python_path_env_var.split(os.pathsep)
if src_path in python_path_entries:
python_path_entries.remove(src_path)
python_path_entries.insert(0, src_path)
python_path_env_var = os.pathsep.join(python_path_entries)
env = {
# The updated python path so that the project is importable without installing it
"PYTHONPATH": python_path_env_var,
"PYTHONUNBUFFERED": "1",
}
cmd_kwargs = {"env": env}
if tee_output:
stdout = tempfile.TemporaryFile(mode="w+b")
cmd_kwargs["stdout"] = Tee(stdout, sys.__stdout__)
try:
session.run(*cmd_args, **cmd_kwargs)
finally:
if tee_output:
stdout.seek(0)
contents = stdout.read()
if contents:
contents = contents.decode("utf-8")
sys.stdout.write(contents)
sys.stdout.flush()
if pylint_report_path:
# Write report
with open(pylint_report_path, "w", encoding="utf-8") as wfh:
wfh.write(contents)
session.log("Report file written to %r", pylint_report_path)
stdout.close()
def _lint_pre_commit(session, rcfile, flags, paths):
if "VIRTUAL_ENV" not in os.environ:
session.error(
"This should be running from within a virtualenv and "
"'VIRTUAL_ENV' was not found as an environment variable."
)
if "pre-commit" not in os.environ["VIRTUAL_ENV"]:
session.error(
"This should be running from within a pre-commit virtualenv and "
f"'VIRTUAL_ENV'({os.environ['VIRTUAL_ENV']}) does not appear to be a pre-commit virtualenv."
)
# Let's patch nox to make it run inside the pre-commit virtualenv
session._runner.venv = VirtualEnv(
os.environ["VIRTUAL_ENV"],
interpreter=session._runner.func.python,
reuse_existing=True,
venv=True,
)
_lint(session, rcfile, flags, paths, tee_output=False)
@nox.session(python="3")
def lint(session):
"""
Run PyLint against the code and the test suite. Set PYLINT_REPORT to a path to capture output.
"""
session.notify(f"lint-code-{session.python}")
session.notify(f"lint-tests-{session.python}")
@nox.session(python="3", name="lint-code")
def lint_code(session):
"""
Run PyLint against the code. Set PYLINT_REPORT to a path to capture output.
"""
flags = ["--disable=I"]
if session.posargs:
paths = session.posargs
else:
paths = ["setup.py", "noxfile.py", "src/"]
_lint(session, ".pylintrc", flags, paths)
@nox.session(python="3", name="lint-tests")
def lint_tests(session):
"""
Run PyLint against the test suite. Set PYLINT_REPORT to a path to capture output.
"""
flags = [
"--disable=I,redefined-outer-name,missing-function-docstring,no-member,missing-module-docstring"
]
if session.posargs:
paths = session.posargs
else:
paths = ["tests/"]
_lint(session, ".pylintrc", flags, paths)
@nox.session(python=False, name="lint-code-pre-commit")
def lint_code_pre_commit(session):
"""
Run PyLint against the code. Set PYLINT_REPORT to a path to capture output.
"""
flags = ["--disable=I"]
if session.posargs:
paths = session.posargs
else:
paths = ["setup.py", "noxfile.py", "src/"]
_lint_pre_commit(session, ".pylintrc", flags, paths)
@nox.session(python=False, name="lint-tests-pre-commit")
def lint_tests_pre_commit(session):
"""
Run PyLint against the code and the test suite. Set PYLINT_REPORT to a path to capture output.
"""
flags = [
"--disable=I,redefined-outer-name,missing-function-docstring,no-member,missing-module-docstring",
]
if session.posargs:
paths = session.posargs
else:
paths = ["tests/"]
_lint_pre_commit(session, ".pylintrc", flags, paths)
@nox.session(python="3")
def docs(session):
"""
Build Docs
"""
_install_requirements(
session,
install_coverage_requirements=False,
install_test_requirements=False,
install_source=True,
install_extras=["docs"],
)
os.chdir("docs/")
session.run("make", "clean", external=True)
session.run("make", "linkcheck", "SPHINXOPTS=-W", external=True)
session.run("make", "coverage", "SPHINXOPTS=-W", external=True)
docs_coverage_file = os.path.join("_build", "html", "python.txt")
if os.path.exists(docs_coverage_file):
with open(docs_coverage_file) as rfh: # pylint: disable=unspecified-encoding
contents = rfh.readlines()[2:]
if contents:
session.error("\n" + "".join(contents))
session.run("make", "html", "SPHINXOPTS=-W", external=True)
os.chdir(str(REPO_ROOT))
@nox.session(name="docs-html", python="3")
@nox.parametrize("clean", [False, True])
@nox.parametrize("include_api_docs", [False, True])
def docs_html(session, clean, include_api_docs):
"""
Build Sphinx HTML Documentation
TODO: Add option for `make linkcheck` and `make coverage`
calls via Sphinx. Ran into problems with two when
using Furo theme and latest Sphinx.
"""
_install_requirements(
session,
install_coverage_requirements=False,
install_test_requirements=False,
install_source=True,
install_extras=["docs"],
)
if include_api_docs:
gen_api_docs(session)
build_dir = Path("docs", "_build", "html")
sphinxopts = "-Wn"
if clean:
sphinxopts += "E"
args = [sphinxopts, "--keep-going", "docs", str(build_dir)]
session.run("sphinx-build", *args, external=True)
@nox.session(name="docs-dev", python="3")
@nox.parametrize("clean", [False, True])
def docs_dev(session, clean) -> None:
"""
Build and serve the Sphinx HTML documentation, with live reloading on file changes, via sphinx-autobuild.
Note: Only use this in INTERACTIVE DEVELOPMENT MODE. This SHOULD NOT be called
in CI/CD pipelines, as it will hang.
"""
_install_requirements(
session,
install_coverage_requirements=False,
install_test_requirements=False,
install_source=True,
install_extras=["docs", "docsauto"],
)
# Launching LIVE reloading Sphinx session
build_dir = Path("docs", "_build", "html")
args = ["--watch", ".", "--open-browser", "docs", str(build_dir)]
if clean and build_dir.exists():
shutil.rmtree(build_dir)
session.run("sphinx-autobuild", *args)
@nox.session(name="docs-crosslink-info", python="3")
def docs_crosslink_info(session):
"""
Report intersphinx cross links information
"""
_install_requirements(
session,
install_coverage_requirements=False,
install_test_requirements=False,
install_source=True,
install_extras=["docs"],
)
os.chdir("docs/")
intersphinx_mapping = json.loads(
session.run(
"python",
"-c",
"import json; import conf; print(json.dumps(conf.intersphinx_mapping))",
silent=True,
log=False,
)
)
intersphinx_mapping_list = ", ".join(list(intersphinx_mapping))
try:
mapping_entry = intersphinx_mapping[session.posargs[0]]
except IndexError:
session.error(
f"You need to pass at least one argument whose value must be one of: {intersphinx_mapping_list}"
)
except KeyError:
session.error(f"Only acceptable values for first argument are: {intersphinx_mapping_list}")
session.run(
"python", "-m", "sphinx.ext.intersphinx", mapping_entry[0].rstrip("/") + "/objects.inv"
)
os.chdir(str(REPO_ROOT))
@nox.session(name="gen-api-docs", python="3")
def gen_api_docs(session):
"""
Generate API Docs
"""
_install_requirements(
session,
install_coverage_requirements=False,
install_test_requirements=False,
install_source=True,
install_extras=["docs"],
)
try:
shutil.rmtree("docs/ref")
except FileNotFoundError:
pass
session.run(
"sphinx-apidoc",
"--implicit-namespaces",
"--module-first",
"-o",
"docs/ref/",
"src/saltext",
"src/saltext/mysql/config/schemas",
)
070701000000A7000081A400000000000000000000000167471E9C00000EA8000000000000000000000000000000000000003300000000test-repo-1-0.1/saltext_mysql-1.0.0/pyproject.toml[build-system]
requires = [
"wheel",
"setuptools>=50.3.2",
"setuptools_scm[toml]>=3.4",
]
build-backend = "setuptools.build_meta"
[tool.setuptools_scm]
write_to = "src/saltext/mysql/version.py"
write_to_template = "__version__ = \"{version}\""
[project]
name = "saltext.mysql"
description = "Salt Extension for interacting with MySQL"
authors = [
{name = "Salt Core Team", email = "saltproject@vmware.com"},
]
keywords = [
"salt-extension",
]
license = {text = "Apache Software License"}
classifiers = [
"Programming Language :: Python",
"Programming Language :: Cython",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
]
requires-python = ">= 3.8"
dynamic = ["version"]
dependencies = [
"salt>=3006",
"sqlparse",
]
[project.readme]
file = "README.md"
content-type = "text/markdown"
[project.urls]
Homepage = "https://github.com/salt-extensions/saltext-mysql"
Documentation = "https://salt-extensions.github.io/saltext-mysql/"
Source = "https://github.com/salt-extensions/saltext-mysql"
Tracker = "https://github.com/salt-extensions/saltext-mysql/issues"
[project.optional-dependencies]
changelog = ["towncrier==22.12.0"]
dev = [
"nox",
"pre-commit>=2.4.0",
"pylint",
"saltpylint",
]
docs = [
"sphinx",
"sphinx-prompt",
"sphinxcontrib-spelling",
"sphinx-copybutton",
"towncrier==22.12.0",
"sphinxcontrib-towncrier",
"myst_parser",
"furo",
"sphinx-inline-tabs",
]
docsauto = ["sphinx-autobuild"]
lint = [
"pylint",
"saltpylint",
]
tests = [
"pytest>=7.2.0",
"pytest-salt-factories>=1.0.0; sys_platform == 'win32'",
"pytest-salt-factories[docker]>=1.0.0; sys_platform != 'win32'",
"pytest-subtests",
"pymysql", # Let's use a pure Python module for testing.
"cryptography", # required by pymysql for sha256_password/caching_sha2_password auth method
]
pymysql = [
"pymysql",
]
mysqlclient = [
"mysqlclient",
]
[project.entry-points."salt.loader"]
"saltext.mysql" = "saltext.mysql"
[tool.setuptools]
zip-safe = false
include-package-data = true
platforms = ["any"]
[tool.setuptools.packages.find]
where = ["src"]
exclude = ["tests"]
[tool.distutils.bdist_wheel]
# Use this option if your package is pure-python
universal = 1
[tool.distutils.sdist]
owner = "root"
group = "root"
[tool.build_sphinx]
source_dir = "docs"
build_dir = "build/sphinx"
[tool.black]
line-length = 100
[tool.isort]
force_single_line = true
skip = ["src/saltext/mysql/__init__.py"]
profile = "black"
line_length = 100
[tool.towncrier]
package = "saltext.mysql"
filename = "CHANGELOG.md"
template = "changelog/.template.jinja"
directory = "changelog/"
start_string = "# Changelog\n"
underlines = ["", "", ""]
title_format = "## {version} ({project_date})"
issue_format = "[#{issue}](https://github.com/salt-extensions/saltext-mysql/issues/{issue})"
[[tool.towncrier.type]]
directory = "removed"
name = "Removed"
showcontent = true
[[tool.towncrier.type]]
directory = "deprecated"
name = "Deprecated"
showcontent = true
[[tool.towncrier.type]]
directory = "changed"
name = "Changed"
showcontent = true
[[tool.towncrier.type]]
directory = "fixed"
name = "Fixed"
showcontent = true
[[tool.towncrier.type]]
directory = "added"
name = "Added"
showcontent = true
[[tool.towncrier.type]]
directory = "security"
name = "Security"
showcontent = true
070701000000A8000081A400000000000000000000000167471E9C00000026000000000000000000000000000000000000002E00000000test-repo-1-0.1/saltext_mysql-1.0.0/setup.cfg[egg_info]
tag_build =
tag_date = 0
070701000000A9000081A400000000000000000000000167471E9C00000084000000000000000000000000000000000000002D00000000test-repo-1-0.1/saltext_mysql-1.0.0/setup.py# pylint: disable=missing-module-docstring
import setuptools
if __name__ == "__main__":
setuptools.setup(use_scm_version=True)
070701000000AA000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000002800000000test-repo-1-0.1/saltext_mysql-1.0.0/src070701000000AB000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003000000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext070701000000AC000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003600000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql070701000000AD000081A400000000000000000000000167471E9C0000035F000000000000000000000000000000000000004200000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/__init__.py# pylint: disable=missing-module-docstring
import pathlib
PACKAGE_ROOT = pathlib.Path(__file__).resolve().parent
try:
from .version import __version__
except ImportError: # pragma: no cover
__version__ = "0.0.0.not-installed"
try:
from importlib.metadata import version, PackageNotFoundError
try:
__version__ = version(__name__)
except PackageNotFoundError:
# package is not installed
pass
except ImportError:
try:
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
pass
except ImportError:
# pkg resources isn't even available?!
pass
070701000000AE000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/auth070701000000AF000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004700000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/auth/__init__.py070701000000B0000081A400000000000000000000000167471E9C00000C32000000000000000000000000000000000000004400000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/auth/mysql.py"""
Provide authentication using MySQL.
When using MySQL as an authentication backend, you will need to create or
use an existing table that has a username and a password column.
To get started, create a simple table that holds just a username and
a password. The password field will hold a SHA256 checksum.
.. code-block:: sql
CREATE TABLE `users` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`username` varchar(25) DEFAULT NULL,
`password` varchar(70) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1;
To create a user within MySQL, execute the following statement.
.. code-block:: sql
INSERT INTO users VALUES (NULL, 'diana', SHA2('secret', 256))
.. code-block:: yaml
mysql_auth:
hostname: localhost
database: SaltStack
username: root
password: letmein
auth_sql: 'SELECT username FROM users WHERE username = "{0}" AND password = SHA2("{1}", 256)'
The `auth_sql` contains the SQL that will validate a user to ensure they are
correctly authenticated. This is where you can specify other SQL queries to
authenticate users.
Enable MySQL authentication.
.. code-block:: yaml
external_auth:
mysql:
damian:
- test.*
:depends: - MySQL-python Python module
"""
import logging
log = logging.getLogger(__name__)
try:
# Trying to import MySQLdb
import MySQLdb
import MySQLdb.converters
import MySQLdb.cursors
from MySQLdb.connections import OperationalError
except ImportError:
try:
# MySQLdb import failed, try to import PyMySQL
import pymysql
pymysql.install_as_MySQLdb()
import MySQLdb
import MySQLdb.converters
import MySQLdb.cursors
from MySQLdb.err import OperationalError
except ImportError:
MySQLdb = None
def __virtual__():
"""
Confirm that a python mysql client is installed.
"""
return bool(MySQLdb), "No python mysql client installed." if MySQLdb is None else ""
def __get_connection_info():
"""
Grab MySQL Connection Details
"""
conn_info = {}
try:
conn_info["hostname"] = __opts__["mysql_auth"]["hostname"]
conn_info["username"] = __opts__["mysql_auth"]["username"]
conn_info["password"] = __opts__["mysql_auth"]["password"]
conn_info["database"] = __opts__["mysql_auth"]["database"]
conn_info["auth_sql"] = __opts__["mysql_auth"]["auth_sql"]
except KeyError as e:
log.error("%s does not exist", e)
return None
return conn_info
def auth(username, password):
"""
Authenticate using a MySQL user table
"""
_info = __get_connection_info()
if _info is None:
return False
try:
conn = MySQLdb.connect(
_info["hostname"], _info["username"], _info["password"], _info["database"]
)
except OperationalError as e:
log.error(e)
return False
cur = conn.cursor()
cur.execute(_info["auth_sql"].format(username, password))
if cur.rowcount == 1:
return True
return False
070701000000B1000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003C00000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/cache070701000000B2000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004800000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/cache/__init__.py070701000000B3000081A400000000000000000000000167471E9C000029C4000000000000000000000000000000000000004B00000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/cache/mysql_cache.py"""
Minion data cache plugin for MySQL database.
.. versionadded:: 2018.3.0
It is up to the system administrator to set up and configure the MySQL
infrastructure. All is needed for this plugin is a working MySQL server.
.. warning::
The mysql.database and mysql.table_name will be directly added into certain
queries. Salt treats these as trusted input.
The module requires the database (default ``salt_cache``) to exist but creates
its own table if needed. The keys are indexed using the ``bank`` and
``etcd_key`` columns.
To enable this cache plugin, the master will need the python client for
MySQL installed. This can be easily installed with pip:
.. code-block:: bash
pip install pymysql
Optionally, depending on the MySQL agent configuration, the following values
could be set in the master config. These are the defaults:
.. code-block:: yaml
mysql.host: 127.0.0.1
mysql.port: 2379
mysql.user: None
mysql.password: None
mysql.database: salt_cache
mysql.table_name: cache
# This may be enabled to create a fresh connection on every call
mysql.fresh_connection: false
To use the mysql as a minion data cache backend, set the master ``cache`` config
value to ``mysql``:
.. code-block:: yaml
cache: mysql
.. _`MySQL documentation`: https://github.com/coreos/mysql
"""
import copy
import logging
import time
import salt.payload
import salt.utils.stringutils
from salt.exceptions import SaltCacheError
try:
# Trying to import MySQLdb
import MySQLdb
import MySQLdb.converters
import MySQLdb.cursors
from MySQLdb.connections import OperationalError
# Define the interface error as a subclass of exception
# It will never be thrown/used, it is defined to support the pymysql error below
class InterfaceError(Exception):
pass
except ImportError:
try:
# MySQLdb import failed, try to import PyMySQL
import pymysql
from pymysql.err import InterfaceError
pymysql.install_as_MySQLdb()
import MySQLdb
import MySQLdb.converters
import MySQLdb.cursors
from MySQLdb.err import OperationalError
except ImportError:
MySQLdb = None
_DEFAULT_DATABASE_NAME = "salt_cache"
_DEFAULT_CACHE_TABLE_NAME = "cache"
_RECONNECT_INTERVAL_SEC = 0.050
log = logging.getLogger(__name__)
# Module properties
__virtualname__ = "mysql"
__func_alias__ = {"ls": "list"}
def __virtual__():
"""
Confirm that a python mysql client is installed.
"""
return bool(MySQLdb), "No python mysql client installed." if MySQLdb is None else ""
def force_reconnect():
"""
Force a reconnection to the MySQL database, by removing the client from
Salt's __context__.
"""
__context__.pop("mysql_client", None)
def run_query(conn, query, args=None, retries=3):
"""
Get a cursor and run a query. Reconnect up to ``retries`` times if
needed.
Returns: cursor, affected rows counter
Raises: SaltCacheError, AttributeError, OperationalError, InterfaceError
"""
if __context__.get("mysql_fresh_connection"):
# Create a new connection if configured
conn = MySQLdb.connect(**__context__["mysql_kwargs"])
__context__["mysql_client"] = conn
if conn is None:
conn = __context__.get("mysql_client")
try:
cur = conn.cursor()
if not args:
log.debug("Doing query: %s", query)
out = cur.execute(query)
else:
log.debug("Doing query: %s args: %s ", query, repr(args))
out = cur.execute(query, args)
return cur, out
except (AttributeError, OperationalError, InterfaceError) as e:
if retries == 0:
raise
# reconnect creating new client
time.sleep(_RECONNECT_INTERVAL_SEC)
if conn is None:
log.debug("mysql_cache: creating db connection")
else:
log.info("mysql_cache: recreating db connection due to: %r", e)
__context__["mysql_client"] = MySQLdb.connect(**__context__["mysql_kwargs"])
return run_query(
conn=__context__.get("mysql_client"),
query=query,
args=args,
retries=(retries - 1),
)
except Exception as e: # pylint: disable=broad-except
if len(query) > 150:
query = query[:150] + "<...>"
raise SaltCacheError(
"Error running {}{}: {}".format(query, f"- args: {args}" if args else "", e)
) from e
def _create_table():
"""
Create table if needed
"""
# Explicitly check if the table already exists as the library logs a
# warning on CREATE TABLE
query = """SELECT COUNT(TABLE_NAME) FROM information_schema.tables
WHERE table_schema = %s AND table_name = %s"""
cur, _ = run_query(
__context__.get("mysql_client"),
query,
args=(__context__["mysql_kwargs"]["db"], __context__["mysql_table_name"]),
)
r = cur.fetchone()
cur.close()
if r[0] == 1:
query = """
SELECT COUNT(TABLE_NAME)
FROM
information_schema.columns
WHERE
table_schema = %s
AND table_name = %s
AND column_name = 'last_update'
"""
cur, _ = run_query(
__context__["mysql_client"],
query,
args=(__context__["mysql_kwargs"]["db"], __context__["mysql_table_name"]),
)
r = cur.fetchone()
cur.close()
if r[0] == 1:
return
else:
query = """
ALTER TABLE {}.{}
ADD COLUMN last_update TIMESTAMP NOT NULL
DEFAULT CURRENT_TIMESTAMP
ON UPDATE CURRENT_TIMESTAMP
""".format(
__context__["mysql_kwargs"]["db"], __context__["mysql_table_name"]
)
cur, _ = run_query(__context__["mysql_client"], query)
cur.close()
return
query = """CREATE TABLE IF NOT EXISTS {} (
bank CHAR(255),
etcd_key CHAR(255),
data MEDIUMBLOB,
last_update TIMESTAMP NOT NULL
DEFAULT CURRENT_TIMESTAMP
ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY(bank, etcd_key)
);""".format(
__context__["mysql_table_name"]
)
log.info("mysql_cache: creating table %s", __context__["mysql_table_name"])
cur, _ = run_query(__context__.get("mysql_client"), query)
cur.close()
def _init_client():
"""Initialize connection and create table if needed"""
if __context__.get("mysql_client") is not None:
return
opts = copy.deepcopy(__opts__)
mysql_kwargs = {
"autocommit": True,
"host": opts.pop("mysql.host", "127.0.0.1"),
"user": opts.pop("mysql.user", None),
"passwd": opts.pop("mysql.password", None),
"db": opts.pop("mysql.database", _DEFAULT_DATABASE_NAME),
"port": opts.pop("mysql.port", 3306),
"unix_socket": opts.pop("mysql.unix_socket", None),
"connect_timeout": opts.pop("mysql.connect_timeout", None),
}
mysql_kwargs["autocommit"] = True
__context__["mysql_table_name"] = opts.pop("mysql.table_name", "salt")
__context__["mysql_fresh_connection"] = opts.pop("mysql.fresh_connection", False)
# Gather up any additional MySQL configuration options
for k in opts:
if k.startswith("mysql."):
_key = k.split(".")[1]
mysql_kwargs[_key] = opts.get(k)
# TODO: handle SSL connection parameters
for k, v in copy.deepcopy(mysql_kwargs).items():
if v is None:
mysql_kwargs.pop(k)
kwargs_copy = mysql_kwargs.copy()
kwargs_copy["passwd"] = "<hidden>"
log.info("mysql_cache: Setting up client with params: %r", kwargs_copy)
__context__["mysql_kwargs"] = mysql_kwargs
# The MySQL client is created later on by run_query
_create_table()
def store(bank, key, data):
"""
Store a key value.
"""
_init_client()
data = salt.payload.dumps(data)
query = "REPLACE INTO {} (bank, etcd_key, data) values(%s,%s,%s)".format(
__context__["mysql_table_name"]
)
args = (bank, key, data)
cur, cnt = run_query(__context__.get("mysql_client"), query, args=args)
cur.close()
if cnt not in (1, 2):
raise SaltCacheError(f"Error storing {bank} {key} returned {cnt}")
def fetch(bank, key):
"""
Fetch a key value.
"""
_init_client()
query = "SELECT data FROM {} WHERE bank=%s AND etcd_key=%s".format(
__context__["mysql_table_name"]
)
cur, _ = run_query(__context__.get("mysql_client"), query, args=(bank, key))
r = cur.fetchone()
cur.close()
if r is None:
return {}
return salt.payload.loads(r[0])
def flush(bank, key=None):
"""
Remove the key from the cache bank with all the key content.
"""
_init_client()
query = "DELETE FROM {} WHERE bank=%s".format(__context__["mysql_table_name"])
if key is None:
data = (bank,)
else:
data = (bank, key)
query += " AND etcd_key=%s"
cur, _ = run_query(__context__.get("mysql_client"), query, args=data)
cur.close()
def ls(bank):
"""
Return an iterable object containing all entries stored in the specified
bank.
"""
_init_client()
query = "SELECT etcd_key FROM {} WHERE bank=%s".format(__context__["mysql_table_name"])
cur, _ = run_query(__context__.get("mysql_client"), query, args=(bank,))
out = [row[0] for row in cur.fetchall()]
cur.close()
return out
def contains(bank, key):
"""
Checks if the specified bank contains the specified key.
"""
_init_client()
if key is None:
data = (bank,)
query = "SELECT COUNT(data) FROM {} WHERE bank=%s".format(__context__["mysql_table_name"])
else:
data = (bank, key)
query = "SELECT COUNT(data) FROM {} WHERE bank=%s AND etcd_key=%s".format(
__context__["mysql_table_name"]
)
cur, _ = run_query(__context__.get("mysql_client"), query, args=data)
r = cur.fetchone()
cur.close()
return r[0] == 1
def updated(bank, key):
"""
Return the integer Unix epoch update timestamp of the specified bank and
key.
"""
_init_client()
query = "SELECT UNIX_TIMESTAMP(last_update) FROM {} WHERE bank=%s " "AND etcd_key=%s".format(
__context__["mysql_table_name"]
)
data = (bank, key)
cur, _ = run_query(__context__.get("mysql_client"), query=query, args=data)
r = cur.fetchone()
cur.close()
return int(r[0]) if r else r
070701000000B4000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003E00000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/modules070701000000B5000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004A00000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/modules/__init__.py070701000000B6000081A400000000000000000000000167471E9C000168AC000000000000000000000000000000000000004700000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/modules/mysql.py"""
Module to provide MySQL compatibility to salt.
:depends: - Python module: MySQLdb, mysqlclient, or PyMYSQL
:configuration: In order to connect to MySQL, certain configuration is required
in either the relevant minion config (/etc/salt/minion), or pillar.
Some sample configs might look like::
mysql.host: 'localhost'
mysql.port: 3306
mysql.user: 'root'
mysql.pass: ''
mysql.db: 'mysql'
mysql.unix_socket: '/tmp/mysql.sock'
mysql.charset: 'utf8'
You can also use a defaults file::
mysql.default_file: '/etc/mysql/debian.cnf'
.. versionchanged:: 2014.1.0
\'charset\' connection argument added. This is a MySQL charset, not a python one.
.. versionchanged:: 0.16.2
Connection arguments from the minion config file can be overridden on the
CLI by using the arguments defined :mod:`here <salt.states.mysql_user>`.
Additionally, it is now possible to setup a user with no password.
"""
import copy
import hashlib
import logging
import os
import re
import shlex
import sys
import time
import salt.utils.data
import salt.utils.files
import salt.utils.stringutils
try:
# Trying to import MySQLdb
import MySQLdb
import MySQLdb.converters
import MySQLdb.cursors
from MySQLdb import OperationalError
from MySQLdb.constants import CLIENT
from MySQLdb.constants import FIELD_TYPE
from MySQLdb.constants import FLAG
except ImportError:
try:
# MySQLdb import failed, try to import PyMySQL
import pymysql
pymysql.install_as_MySQLdb()
import MySQLdb
import MySQLdb.converters
import MySQLdb.cursors
from MySQLdb import OperationalError
from MySQLdb.constants import CLIENT
from MySQLdb.constants import FIELD_TYPE
from MySQLdb.constants import FLAG
except ImportError:
MySQLdb = None
try:
import sqlparse
HAS_SQLPARSE = True
except ImportError:
HAS_SQLPARSE = False
log = logging.getLogger(__name__)
__grants__ = [
"ALL PRIVILEGES",
"ALTER",
"ALTER ROUTINE",
"BACKUP_ADMIN",
"BINLOG_ADMIN",
"BINLOG ADMIN", # MariaDB since 10.5.2
"BINLOG MONITOR", # MariaDB since 10.5.2
"BINLOG REPLAY", # MariaDB since 10.5.2
"CONNECTION_ADMIN",
"CONNECTION ADMIN", # MariaDB since 10.5.2
"CREATE",
"CREATE ROLE",
"CREATE ROUTINE",
"CREATE TABLESPACE",
"CREATE TEMPORARY TABLES",
"CREATE USER",
"CREATE VIEW",
"DELETE",
"DELETE HISTORY", # MariaDB since 10.3.4
"DROP",
"DROP ROLE",
"ENCRYPTION_KEY_ADMIN",
"EVENT",
"EXECUTE",
"FEDERATED ADMIN", # MariaDB since 10.5.2
"FILE",
"GRANT OPTION",
"GROUP_REPLICATION_ADMIN",
"INDEX",
"INSERT",
"LOCK TABLES",
"PERSIST_RO_VARIABLES_ADMIN",
"PROCESS",
"READ_ONLY ADMIN", # MariaDB since 10.5.2
"REFERENCES",
"RELOAD",
"REPLICA MONITOR", # MariaDB since 10.5.9
"REPLICATION CLIENT",
"REPLICATION MASTER ADMIN", # MariaDB since 10.5.2
"REPLICATION REPLICA", # MariaDB since 10.5.1
"REPLICATION SLAVE",
"REPLICATION_SLAVE_ADMIN",
"REPLICATION SLAVE ADMIN", # MariaDB since 10.5.2
"RESOURCE_GROUP_ADMIN",
"RESOURCE_GROUP_USER",
"ROLE_ADMIN",
"SELECT",
"SET USER", # MariaDB since 10.5.2
"SET_USER_ID",
"SERVICE_CONNECTION_ADMIN", # # MySQL since 8.0.14
"SHOW DATABASES",
"SHOW VIEW",
"SHUTDOWN",
"SLAVE MONITOR", # MariaDB since 10.5.9
"SUPER",
"SYSTEM_VARIABLES_ADMIN",
"TRIGGER",
"UPDATE",
"USAGE",
"XA_RECOVER_ADMIN",
]
__ssl_options_parameterized__ = ["CIPHER", "ISSUER", "SUBJECT"]
__ssl_options__ = __ssl_options_parameterized__ + ["SSL", "X509"]
__all_privileges__ = [
"ALTER",
"ALTER ROUTINE",
"BACKUP_ADMIN",
"BINLOG_ADMIN",
"CONNECTION_ADMIN",
"CREATE",
"CREATE ROLE",
"CREATE ROUTINE",
"CREATE TABLESPACE",
"CREATE TEMPORARY TABLES",
"CREATE USER",
"CREATE VIEW",
"DELETE",
"DROP",
"DROP ROLE",
"ENCRYPTION_KEY_ADMIN",
"EVENT",
"EXECUTE",
"FILE",
"GROUP_REPLICATION_ADMIN",
"INDEX",
"INSERT",
"LOCK TABLES",
"PERSIST_RO_VARIABLES_ADMIN",
"PROCESS",
"REFERENCES",
"RELOAD",
"REPLICATION CLIENT",
"REPLICATION SLAVE",
"REPLICATION_SLAVE_ADMIN",
"RESOURCE_GROUP_ADMIN",
"RESOURCE_GROUP_USER",
"ROLE_ADMIN",
"SELECT",
"SET_USER_ID",
"SHOW DATABASES",
"SHOW VIEW",
"SHUTDOWN",
"SUPER",
"SYSTEM_VARIABLES_ADMIN",
"TRIGGER",
"UPDATE",
"XA_RECOVER_ADMIN",
]
# The empty docstring is needed to ignore the developer note during
# docs rendering (quick fix). There might be better ways.
"""
"""
# pylint: disable=pointless-string-statement
r'''
DEVELOPER NOTE: ABOUT arguments management, escapes, formats, arguments and
security of SQL.
A general rule of SQL security is to use queries with _execute call in this
code using args parameter to let MySQLdb manage the arguments proper escaping.
Another way of escaping values arguments could be '{0!r}'.format(), using
__repr__ to ensure things get properly used as strings. But this could lead
to three problems:
* In ANSI mode, which is available on MySQL, but not by default, double
quotes " should not be used as a string delimiters, in ANSI mode this is an
identifier delimiter (like `).
* Some rare exploits with bad multibytes management, either on python or
MySQL could defeat this barrier, bindings internal escape functions
should manage theses cases.
* Unicode strings in Python 2 will include the 'u' before the repr'ed string,
like so:
Python 2.7.10 (default, May 26 2015, 04:16:29)
[GCC 5.1.0] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> u'something something {0!r}'.format(u'foo')
u"something something u'foo'"
So query with arguments should use a paramstyle defined in PEP249:
http://www.python.org/dev/peps/pep-0249/#paramstyle
We use pyformat, which means 'SELECT * FROM foo WHERE bar=%(myval)s'
used with {'myval': 'some user input'}
So far so good. But this cannot be used for identifier escapes. Identifiers
are database names, table names and column names. Theses names are not values
and do not follow the same escape rules (see quote_identifier function for
details on `_ and % escape policies on identifiers). Using value escaping on
identifier could fool the SQL engine (badly escaping quotes and not doubling
` characters. So for identifiers a call to quote_identifier should be done and
theses identifiers should then be added in strings with format, but without
__repr__ filter.
Note also that when using query with arguments in _execute all '%' characters
used in the query should get escaped to '%%' fo MySQLdb, but should not be
escaped if the query runs without arguments. This is managed by _execute() and
quote_identifier. This is not the same as escaping '%' to '\%' or '_' to '\%'
when using a LIKE query (example in db_exists), as this escape is there to
avoid having _ or % characters interpreted in LIKE queries. The string parted
of the first query could become (still used with args dictionary for myval):
'SELECT * FROM {0} WHERE bar=%(myval)s'.format(quote_identifier('user input'))
Check integration tests if you find a hole in theses strings and escapes rules
Finally some examples to sum up.
Given a name f_o%o`b'a"r, in python that would be """f_o%o`b'a"r""". I'll
avoid python syntax for clarity:
The MySQL way of writing this name is:
value : 'f_o%o`b\'a"r' (managed by MySQLdb)
identifier : `f_o%o``b'a"r`
db identifier in general GRANT: `f\_o\%o``b'a"r`
db identifier in table GRANT : `f_o%o``b'a"r`
in mySQLdb, query with args : `f_o%%o``b'a"r` (as identifier)
in mySQLdb, query without args: `f_o%o``b'a"r` (as identifier)
value in a LIKE query : 'f\_o\%o`b\'a"r' (quotes managed by MySQLdb)
And theses could be mixed, in a like query value with args: 'f\_o\%%o`b\'a"r'
'''
def __virtual__():
"""
Confirm that a python mysql client is installed.
"""
return bool(MySQLdb), "No python mysql client installed." if MySQLdb is None else ""
def __mysql_hash_password(password):
# It's actually used for security purposes, but that's the way
# the MySQL native password plugin works and is why it's deprecated
_password = hashlib.sha1(password.encode()).digest() # nosec
_password = f"*{hashlib.sha1(_password).hexdigest().upper()}" # nosec
return _password
def __check_table(name, table, **connection_args):
dbc = _connect(**connection_args)
if dbc is None:
return {}
cur = dbc.cursor(MySQLdb.cursors.DictCursor)
s_name = quote_identifier(name)
s_table = quote_identifier(table)
# identifiers cannot be used as values
qry = f"CHECK TABLE {s_name}.{s_table}"
_execute(cur, qry)
results = cur.fetchall()
log.debug(results)
return results
def __repair_table(name, table, **connection_args):
dbc = _connect(**connection_args)
if dbc is None:
return {}
cur = dbc.cursor(MySQLdb.cursors.DictCursor)
s_name = quote_identifier(name)
s_table = quote_identifier(table)
# identifiers cannot be used as values
qry = f"REPAIR TABLE {s_name}.{s_table}"
_execute(cur, qry)
results = cur.fetchall()
log.debug(results)
return results
def __optimize_table(name, table, **connection_args):
dbc = _connect(**connection_args)
if dbc is None:
return {}
cur = dbc.cursor(MySQLdb.cursors.DictCursor)
s_name = quote_identifier(name)
s_table = quote_identifier(table)
# identifiers cannot be used as values
qry = f"OPTIMIZE TABLE {s_name}.{s_table}"
_execute(cur, qry)
results = cur.fetchall()
log.debug(results)
return results
def __password_column(**connection_args):
if "mysql.password_column" in __context__:
return __context__["mysql.password_column"]
dbc = _connect(**connection_args)
if dbc is None:
return "Password"
cur = dbc.cursor()
qry = (
"SELECT column_name from information_schema.COLUMNS "
"WHERE table_schema=%(schema)s and table_name=%(table)s "
"and column_name=%(column)s"
)
args = {"schema": "mysql", "table": "user", "column": "Password"}
_execute(cur, qry, args)
if int(cur.rowcount) > 0:
__context__["mysql.password_column"] = "Password"
else:
__context__["mysql.password_column"] = "authentication_string"
return __context__["mysql.password_column"]
def __get_auth_plugin(user, host, **connection_args):
dbc = _connect(**connection_args)
if dbc is None:
return []
cur = dbc.cursor(MySQLdb.cursors.DictCursor)
try:
qry = "SELECT plugin FROM mysql.user WHERE User=%(user)s and Host=%(host)s"
args = {"user": user, "host": host}
_execute(cur, qry, args)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return "mysql_native_password"
results = cur.fetchall()
log.debug(results)
if results:
return results[0].get("plugin", "mysql_native_password")
else:
return "mysql_native_password"
def _connect(**kwargs):
"""
wrap authentication credentials here
"""
connargs = dict()
def _connarg(name, key=None, get_opts=True):
"""
Add key to connargs, only if name exists in our kwargs or,
if get_opts is true, as mysql.<name> in __opts__ or __pillar__
If get_opts is true, evaluate in said order - kwargs, opts
then pillar. To avoid collision with other functions,
kwargs-based connection arguments are prefixed with 'connection_'
(i.e. 'connection_host', 'connection_user', etc.).
"""
if key is None:
key = name
if name in kwargs:
connargs[key] = kwargs[name]
elif get_opts:
prefix = "connection_"
if name.startswith(prefix):
try:
name = name[len(prefix) :]
except IndexError:
return
val = __salt__["config.option"](f"mysql.{name}", None)
if val is not None:
connargs[key] = val
# If a default file is explicitly passed to kwargs, don't grab the
# opts/pillar settings, as it can override info in the defaults file
if "connection_default_file" in kwargs:
get_opts = False
else:
get_opts = True
connargs["client_flag"] = 0
available_client_flags = {}
for flag in dir(CLIENT):
if not flag.startswith("__"):
available_client_flags[flag.lower()] = getattr(CLIENT, flag)
for flag in kwargs.get("client_flags", []):
if available_client_flags.get(flag):
connargs["client_flag"] |= available_client_flags[flag]
else:
log.error("MySQL client flag %s not valid, ignoring.", flag)
_connarg("connection_host", "host", get_opts)
_connarg("connection_user", "user", get_opts)
_connarg("connection_pass", "passwd", get_opts)
_connarg("connection_port", "port", get_opts)
_connarg("connection_db", "db", get_opts)
_connarg("connection_conv", "conv", get_opts)
_connarg("connection_unix_socket", "unix_socket", get_opts)
_connarg("connection_default_file", "read_default_file", get_opts)
_connarg("connection_default_group", "read_default_group", get_opts)
# MySQLdb states that this is required for charset usage
# but in fact it's more than it's internally activated
# when charset is used, activating use_unicode here would
# retrieve utf8 strings as unicode() objects in salt
# and we do not want that.
# _connarg('connection_use_unicode', 'use_unicode')
connargs["use_unicode"] = False
_connarg("connection_charset", "charset")
# Ensure MySQldb knows the format we use for queries with arguments
MySQLdb.paramstyle = "pyformat"
for key in copy.deepcopy(connargs):
if not connargs[key]:
del connargs[key]
if connargs.get("passwd", True) is None: # If present but set to None. (Extreme edge case.)
log.warning("MySQL password of None found. Attempting passwordless login.")
connargs.pop("passwd")
try:
dbc = MySQLdb.connect(**connargs)
except OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return None
except MySQLdb.err.InternalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return None
dbc.autocommit(True)
return dbc
def _grant_to_tokens(grant):
"""
This should correspond fairly closely to the YAML rendering of a
mysql_grants state which comes out as follows:
OrderedDict([
('whatever_identifier',
OrderedDict([
('mysql_grants.present',
[
OrderedDict([('database', 'testdb.*')]),
OrderedDict([('user', 'testuser')]),
OrderedDict([('grant', 'ALTER, SELECT, LOCK TABLES')]),
OrderedDict([('host', 'localhost')])
]
)
])
)
])
:param grant: An un-parsed MySQL GRANT statement str, like
"GRANT SELECT, ALTER, LOCK TABLES ON `mydb`.* TO 'testuser'@'localhost'"
or a dictionary with 'qry' and 'args' keys for 'user' and 'host'.
:return:
A Python dict with the following keys/values:
- user: MySQL User
- host: MySQL host
- grant: [grant1, grant2] (ala SELECT, USAGE, etc)
- database: MySQL DB
"""
log.debug("_grant_to_tokens entry '%s'", grant)
dict_mode = False
if isinstance(grant, dict):
dict_mode = True
# Everything coming in dictionary form was made for a MySQLdb execute
# call and contain a '%%' escaping of '%' characters for MySQLdb
# that we should remove here.
grant_sql = grant.get("qry", "undefined").replace("%%", "%")
sql_args = grant.get("args", {})
host = sql_args.get("host", "undefined")
user = sql_args.get("user", "undefined")
else:
grant_sql = grant
user = ""
# the replace part is for presence of ` character in the db name
# the shell escape is \` but mysql escape is ``. Spaces should not be
# exploded as users or db names could contain spaces.
# Examples of splitting:
# "GRANT SELECT, LOCK TABLES, UPDATE, CREATE ON `test ``(:=saltdb)`.*
# TO 'foo'@'localhost' WITH GRANT OPTION"
# ['GRANT', 'SELECT', ',', 'LOCK', 'TABLES', ',', 'UPDATE', ',', 'CREATE',
# 'ON', '`test `', '`(:=saltdb)`', '.', '*', 'TO', "'foo'", '@',
# "'localhost'", 'WITH', 'GRANT', 'OPTION']
#
# 'GRANT SELECT, INSERT, UPDATE, CREATE ON `te s.t\'"sa;ltdb`.`tbl ``\'"xx`
# TO \'foo \' bar\'@\'localhost\''
# ['GRANT', 'SELECT', ',', 'INSERT', ',', 'UPDATE', ',', 'CREATE', 'ON',
# '`te s.t\'"sa;ltdb`', '.', '`tbl `', '`\'"xx`', 'TO', "'foo '", "bar'",
# '@', "'localhost'"]
#
# "GRANT USAGE ON *.* TO 'user \";--,?:&/\\'@'localhost'"
# ['GRANT', 'USAGE', 'ON', '*', '.', '*', 'TO', '\'user ";--,?:&/\\\'',
# '@', "'localhost'"]
lex = shlex.shlex(grant_sql)
lex.quotes = "'`"
lex.whitespace_split = False
lex.commenters = ""
lex.wordchars += '"'
exploded_grant = list(lex)
grant_tokens = []
multiword_statement = []
position_tracker = 1 # Skip the initial 'GRANT' word token
database = ""
phrase = "grants"
column = False
current_grant = ""
for token in exploded_grant[position_tracker:]:
if token == "," and phrase == "grants":
position_tracker += 1
continue
if token == "(" and phrase == "grants":
position_tracker += 1
column = True
continue
if token == ")" and phrase == "grants":
position_tracker += 1
column = False
continue
if token == "ON" and phrase == "grants":
phrase = "db"
position_tracker += 1
continue
elif token == "TO" and phrase == "tables":
phrase = "user"
position_tracker += 1
continue
elif token == "@" and phrase == "pre-host":
phrase = "host"
position_tracker += 1
continue
if phrase == "grants":
# Read-ahead
if (
exploded_grant[position_tracker + 1] == ","
or exploded_grant[position_tracker + 1] == "ON"
or exploded_grant[position_tracker + 1] in ["(", ")"]
):
# End of token detected
if multiword_statement:
multiword_statement.append(token)
grant_tokens.append(" ".join(multiword_statement))
multiword_statement = []
else:
if not column:
current_grant = token
else:
token = f"{current_grant}.{token}"
grant_tokens.append(token)
else: # This is a multi-word, ala LOCK TABLES
multiword_statement.append(token)
elif phrase == "db":
# the shlex splitter may have split on special database characters `
database += token
# Read-ahead
try:
if exploded_grant[position_tracker + 1] == ".":
phrase = "tables"
except IndexError:
break
elif phrase == "tables":
database += token
elif phrase == "user":
if dict_mode:
break
else:
user += token
# Read-ahead
if exploded_grant[position_tracker + 1] == "@":
phrase = "pre-host"
elif phrase == "host":
host = token
break
position_tracker += 1
try:
if not dict_mode:
user = user.strip("'")
host = host.strip("'")
log.debug("grant to token '%s'::'%s'::'%s'::'%s'", user, host, grant_tokens, database)
except UnboundLocalError:
host = ""
return dict(user=user, host=host, grant=grant_tokens, database=database)
def _resolve_grant_aliases(grants, server_version):
"""
There can be a situation where the database supports grants "A" and "B", where
"B" is an alias for "A". In that case, when you want to grant "B" to a user,
the database will actually report it added "A". We need to resolve those
aliases to not report (wrong) errors.
:param grants: the tokenized grants
:param server_version: version string of the connected database
"""
if "MariaDB" not in server_version:
return grants
mariadb_version_compare_replication_replica = "10.5.1"
mariadb_version_compare_binlog_monitor = "10.5.2"
mariadb_version_compare_slave_monitor = "10.5.9"
resolved_tokens = []
for token in grants:
if (
salt.utils.versions.version_cmp(
server_version, mariadb_version_compare_replication_replica
)
>= 0
):
if token == "REPLICATION REPLICA":
# https://mariadb.com/kb/en/grant/#replication-replica
resolved_tokens.append("REPLICATION SLAVE")
continue
if (
salt.utils.versions.version_cmp(server_version, mariadb_version_compare_binlog_monitor)
>= 0
):
if token == "REPLICATION CLIENT":
# https://mariadb.com/kb/en/grant/#replication-client
resolved_tokens.append("BINLOG MONITOR")
continue
if (
salt.utils.versions.version_cmp(server_version, mariadb_version_compare_slave_monitor)
>= 0
):
if token == "REPLICA MONITOR":
# https://mariadb.com/kb/en/grant/#replica-monitor
resolved_tokens.append("SLAVE MONITOR")
continue
resolved_tokens.append(token)
return resolved_tokens
def quote_identifier(identifier, for_grants=False):
r"""
Return an identifier name (column, table, database, etc) escaped for MySQL
This means surrounded by "`" character and escaping this character inside.
It also means doubling the '%' character for MySQLdb internal usage.
:param identifier: the table, column or database identifier
:param for_grants: is False by default, when using database names on grant
queries you should set it to True to also escape "_" and "%" characters as
requested by MySQL. Note that theses characters should only be escaped when
requesting grants on the database level (`my\_\%db`.*) but not for table
level grants (`my_%db`.`foo`)
CLI Example:
.. code-block:: bash
salt '*' mysql.quote_identifier 'foo`bar'
"""
if for_grants:
return "`" + identifier.replace("`", "``").replace("_", r"\_").replace("%", r"%%") + "`"
else:
return "`" + identifier.replace("`", "``").replace("%", "%%") + "`"
def _execute(cur, qry, args=None):
"""
Internal wrapper around MySQLdb cursor.execute() function
MySQLDb does not apply the same filters when arguments are used with the
query. For example '%' characters on the query must be encoded as '%%' and
will be restored as '%' when arguments are applied. But when there're no
arguments the '%%' is not managed. We cannot apply Identifier quoting in a
predictable way if the query are not always applying the same filters. So
this wrapper ensure this escape is not made if no arguments are used.
"""
if args is None or args == {}:
qry = qry.replace("%%", "%")
log.debug("Doing query: %s", qry)
return cur.execute(qry)
else:
log.debug("Doing query: %s args: %s ", qry, repr(args))
return cur.execute(qry, args)
def _sanitize_comments(content):
# Remove comments which might affect line by line parsing
# Regex should remove any text beginning with # (or --) not inside of ' or "
if not HAS_SQLPARSE:
log.error("_sanitize_comments unavailable, no python sqlparse library installed.")
return content
return sqlparse.format(content, strip_comments=True)
def query(database, query, **connection_args):
"""
Run an arbitrary SQL query and return the results or
the number of affected rows.
CLI Example:
.. code-block:: bash
salt '*' mysql.query mydb "UPDATE mytable set myfield=1 limit 1"
Return data:
.. code-block:: python
{"query time": {"human": "39.0ms", "raw": "0.03899"}, "rows affected": 1l}
CLI Example:
.. code-block:: bash
salt '*' mysql.query mydb "SELECT id,name,cash from users limit 3"
Return data:
.. code-block:: python
{
"columns": ("id", "name", "cash"),
"query time": {"human": "1.0ms", "raw": "0.001"},
"results": (
(1l, "User 1", Decimal("110.000000")),
(2l, "User 2", Decimal("215.636756")),
(3l, "User 3", Decimal("0.040000")),
),
"rows returned": 3l,
}
CLI Example:
.. code-block:: bash
salt '*' mysql.query mydb 'INSERT into users values (null,"user 4", 5)'
Return data:
.. code-block:: python
{"query time": {"human": "25.6ms", "raw": "0.02563"}, "rows affected": 1l}
CLI Example:
.. code-block:: bash
salt '*' mysql.query mydb 'DELETE from users where id = 4 limit 1'
Return data:
.. code-block:: python
{"query time": {"human": "39.0ms", "raw": "0.03899"}, "rows affected": 1l}
Jinja Example: Run a query on ``mydb`` and use row 0, column 0's data.
.. code-block:: jinja
{{ salt['mysql.query']('mydb', 'SELECT info from mytable limit 1')['results'][0][0] }}
"""
# Doesn't do anything about sql warnings, e.g. empty values on an insert.
# I don't think it handles multiple queries at once, so adding "commit"
# might not work.
# The following 3 lines stops MySQLdb from converting the MySQL results
# into Python objects. It leaves them as strings.
orig_conv = MySQLdb.converters.conversions
conv_iter = iter(orig_conv)
conv = dict(zip(conv_iter, [str] * len(orig_conv)))
# some converters are lists, do not break theses
conv_mysqldb = {"MYSQLDB": True}
if conv_mysqldb.get(MySQLdb.__package__.upper()):
conv[FIELD_TYPE.BLOB] = [
(FLAG.BINARY, str),
]
conv[FIELD_TYPE.STRING] = [
(FLAG.BINARY, str),
]
conv[FIELD_TYPE.VAR_STRING] = [
(FLAG.BINARY, str),
]
conv[FIELD_TYPE.VARCHAR] = [
(FLAG.BINARY, str),
]
connection_args.update({"connection_db": database, "connection_conv": conv})
dbc = _connect(**connection_args)
if dbc is None:
return {}
cur = dbc.cursor()
start = time.time()
log.debug("Using db: %s to run query %s", database, query)
try:
affected = _execute(cur, query)
except OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return False
results = cur.fetchall()
elapsed = time.time() - start
if elapsed < 0.200:
elapsed_h = str(round(elapsed * 1000, 1)) + "ms"
else:
elapsed_h = str(round(elapsed, 2)) + "s"
ret = {}
ret["query time"] = {"human": elapsed_h, "raw": str(round(elapsed, 5))}
select_keywords = ["SELECT", "SHOW", "DESC"]
select_query = False
for keyword in select_keywords:
if query.upper().strip().startswith(keyword):
select_query = True
break
if select_query:
ret["rows returned"] = affected
columns = ()
for column in cur.description:
columns += (column[0],)
ret["columns"] = columns
ret["results"] = results
return ret
else:
ret["rows affected"] = affected
return ret
def file_query(database, file_name, **connection_args):
"""
Run an arbitrary SQL query from the specified file and return the
the number of affected rows.
.. versionadded:: 2017.7.0
database
database to run script inside
file_name
File name of the script. This can be on the minion, or a file that is reachable by the fileserver
CLI Example:
.. code-block:: bash
salt '*' mysql.file_query mydb file_name=/tmp/sqlfile.sql
salt '*' mysql.file_query mydb file_name=salt://sqlfile.sql
Return data:
.. code-block:: python
{"query time": {"human": "39.0ms", "raw": "0.03899"}, "rows affected": 1l}
"""
if not HAS_SQLPARSE:
log.error("mysql.file_query unavailable, no python sqlparse library installed.")
return False
if any(
file_name.startswith(proto)
for proto in ("salt://", "http://", "https://", "swift://", "s3://")
):
file_name = __salt__["cp.cache_file"](file_name)
if os.path.exists(file_name):
with salt.utils.files.fopen(file_name, "r") as ifile:
contents = salt.utils.stringutils.to_unicode(ifile.read())
else:
log.error('File "%s" does not exist', file_name)
return False
query_string = ""
ret = {
"rows returned": 0,
"columns": [],
"results": [],
"rows affected": 0,
"query time": {"raw": 0},
}
contents = _sanitize_comments(contents)
# Walk the each line of the sql file to get accurate row affected results
for line in contents.splitlines():
if not re.search(r"[^-;]+;", line): # keep appending lines that don't end in ;
query_string = query_string + line
else:
query_string = query_string + line # append lines that end with ; and run query
query_result = query(database, query_string, **connection_args)
query_string = ""
if query_result is False:
# Fail out on error
return False
if "query time" in query_result:
ret["query time"]["raw"] += float(query_result["query time"]["raw"])
if "rows returned" in query_result:
ret["rows returned"] += query_result["rows returned"]
if "columns" in query_result:
ret["columns"].append(query_result["columns"])
if "results" in query_result:
ret["results"].append(query_result["results"])
if "rows affected" in query_result:
ret["rows affected"] += query_result["rows affected"]
ret["query time"]["human"] = str(round(float(ret["query time"]["raw"]), 2)) + "s"
ret["query time"]["raw"] = round(float(ret["query time"]["raw"]), 5)
# Remove empty keys in ret
ret = {k: v for k, v in ret.items() if v}
return ret
def status(**connection_args):
"""
Return the status of a MySQL server using the output from the ``SHOW
STATUS`` query.
CLI Example:
.. code-block:: bash
salt '*' mysql.status
"""
dbc = _connect(**connection_args)
if dbc is None:
return {}
cur = dbc.cursor()
qry = "SHOW STATUS"
try:
_execute(cur, qry)
except OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return {}
ret = {}
for _ in range(cur.rowcount):
row = cur.fetchone()
ret[row[0]] = row[1]
return ret
def version(**connection_args):
"""
Return the version of a MySQL server using the output from the ``SELECT
VERSION()`` query.
CLI Example:
.. code-block:: bash
salt '*' mysql.version
"""
if "mysql.version" in __context__:
return __context__["mysql.version"]
dbc = _connect(**connection_args)
if dbc is None:
return ""
cur = dbc.cursor()
qry = "SELECT VERSION()"
try:
_execute(cur, qry)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return ""
try:
__context__["mysql.version"] = salt.utils.data.decode(cur.fetchone()[0])
return __context__["mysql.version"]
except IndexError:
return ""
def slave_lag(**connection_args):
"""
Return the number of seconds that a slave SQL server is lagging behind the
master, if the host is not a slave it will return -1. If the server is
configured to be a slave for replication but slave IO is not running then
-2 will be returned. If there was an error connecting to the database or
checking the slave status, -3 will be returned.
CLI Example:
.. code-block:: bash
salt '*' mysql.slave_lag
"""
dbc = _connect(**connection_args)
if dbc is None:
return -3
cur = dbc.cursor(MySQLdb.cursors.DictCursor)
qry = "show slave status"
try:
_execute(cur, qry)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return -3
results = cur.fetchone()
if cur.rowcount == 0:
# Server is not a slave if master is not defined. Return empty tuple
# in this case. Could probably check to see if Slave_IO_Running and
# Slave_SQL_Running are both set to 'Yes' as well to be really really
# sure that it is a slave.
return -1
else:
if results["Slave_IO_Running"] == "Yes":
return results["Seconds_Behind_Master"]
else:
# Replication is broken if you get here.
return -2
def free_slave(**connection_args):
"""
Frees a slave from its master. This is a WIP, do not use.
CLI Example:
.. code-block:: bash
salt '*' mysql.free_slave
"""
slave_db = _connect(**connection_args)
if slave_db is None:
return ""
slave_cur = slave_db.cursor(MySQLdb.cursors.DictCursor)
slave_cur.execute("show slave status")
slave_status = slave_cur.fetchone()
master = {"host": slave_status["Master_Host"]}
try:
# Try to connect to the master and flush logs before promoting to
# master. This may fail if the master is no longer available.
# I am also assuming that the admin password is the same on both
# servers here, and only overriding the host option in the connect
# function.
master_db = _connect(**master)
if master_db is None:
return ""
master_cur = master_db.cursor()
master_cur.execute("flush logs")
master_db.close()
except MySQLdb.OperationalError:
pass
slave_cur.execute("stop slave")
slave_cur.execute("reset master")
slave_cur.execute("change master to MASTER_HOST=")
slave_cur.execute("show slave status")
results = slave_cur.fetchone()
if results is None:
return "promoted"
else:
return "failed"
# Database related actions
def db_list(**connection_args):
"""
Return a list of databases of a MySQL server using the output
from the ``SHOW DATABASES`` query.
CLI Example:
.. code-block:: bash
salt '*' mysql.db_list
"""
dbc = _connect(**connection_args)
if dbc is None:
return []
cur = dbc.cursor()
qry = "SHOW DATABASES"
try:
_execute(cur, qry)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return []
ret = []
results = cur.fetchall()
for dbs in results:
ret.append(dbs[0])
log.debug(ret)
return ret
def alter_db(name, character_set=None, collate=None, **connection_args):
"""
Modify database using ``ALTER DATABASE %(dbname)s CHARACTER SET %(charset)s
COLLATE %(collation)s;`` query.
CLI Example:
.. code-block:: bash
salt '*' mysql.alter_db testdb charset='latin1'
"""
dbc = _connect(**connection_args)
if dbc is None:
return []
cur = dbc.cursor()
existing = db_get(name, **connection_args)
# escaping database name is not required because of backticks in query expression
qry = "ALTER DATABASE `{}` CHARACTER SET {} COLLATE {};".format(
name,
character_set or existing.get("character_set"),
collate or existing.get("collate"),
)
args = {}
try:
if _execute(cur, qry, args):
log.info("DB '%s' altered", name)
return True
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return False
def db_get(name, **connection_args):
"""
Return a list of databases of a MySQL server using the output
from the ``SELECT DEFAULT_CHARACTER_SET_NAME, DEFAULT_COLLATION_NAME FROM
INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME='dbname';`` query.
CLI Example:
.. code-block:: bash
salt '*' mysql.db_get test
"""
dbc = _connect(**connection_args)
if dbc is None:
return []
cur = dbc.cursor()
qry = (
"SELECT DEFAULT_CHARACTER_SET_NAME, DEFAULT_COLLATION_NAME FROM "
"INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME=%(dbname)s;"
)
args = {"dbname": name}
try:
_execute(cur, qry, args)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return []
if cur.rowcount:
rows = cur.fetchall()
return {"character_set": rows[0][0], "collate": rows[0][1]}
return {}
def db_tables(name, **connection_args):
"""
Shows the tables in the given MySQL database (if exists)
CLI Example:
.. code-block:: bash
salt '*' mysql.db_tables 'database'
"""
if not db_exists(name, **connection_args):
log.info("Database '%s' does not exist", name)
return False
dbc = _connect(**connection_args)
if dbc is None:
return []
cur = dbc.cursor()
s_name = quote_identifier(name)
# identifiers cannot be used as values
qry = f"SHOW TABLES IN {s_name}"
try:
_execute(cur, qry)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return []
ret = []
results = cur.fetchall()
for table in results:
ret.append(table[0])
log.debug(ret)
return ret
def db_exists(name, **connection_args):
"""
Checks if a database exists on the MySQL server.
CLI Example:
.. code-block:: bash
salt '*' mysql.db_exists 'dbname'
"""
dbc = _connect(**connection_args)
if dbc is None:
return False
cur = dbc.cursor()
# Warn: here db identifier is not backtyped but should be
# escaped as a string value. Note also that LIKE special characters
# '_' and '%' should also be escaped.
args = {"dbname": name}
qry = "SHOW DATABASES LIKE %(dbname)s;"
try:
_execute(cur, qry, args)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return False
cur.fetchall()
return cur.rowcount == 1
def db_create(name, character_set=None, collate=None, **connection_args):
"""
Adds a databases to the MySQL server.
name
The name of the database to manage
character_set
The character set, if left empty the MySQL default will be used
collate
The collation, if left empty the MySQL default will be used
CLI Example:
.. code-block:: bash
salt '*' mysql.db_create 'dbname'
salt '*' mysql.db_create 'dbname' 'utf8' 'utf8_general_ci'
"""
# check if db exists
if db_exists(name, **connection_args):
log.info("DB '%s' already exists", name)
return False
# db doesn't exist, proceed
dbc = _connect(**connection_args)
if dbc is None:
return False
cur = dbc.cursor()
s_name = quote_identifier(name)
# identifiers cannot be used as values
qry = f"CREATE DATABASE IF NOT EXISTS {s_name}"
args = {}
if character_set is not None:
qry += " CHARACTER SET %(character_set)s"
args["character_set"] = character_set
if collate is not None:
qry += " COLLATE %(collate)s"
args["collate"] = collate
qry += ";"
try:
if _execute(cur, qry, args):
log.info("DB '%s' created", name)
return True
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return False
def db_remove(name, **connection_args):
"""
Removes a databases from the MySQL server.
CLI Example:
.. code-block:: bash
salt '*' mysql.db_remove 'dbname'
"""
# check if db exists
if not db_exists(name, **connection_args):
log.info("DB '%s' does not exist", name)
return False
if name in ("mysql", "information_scheme"):
log.info("DB '%s' may not be removed", name)
return False
# db does exists, proceed
dbc = _connect(**connection_args)
if dbc is None:
return False
cur = dbc.cursor()
s_name = quote_identifier(name)
# identifiers cannot be used as values
qry = f"DROP DATABASE {s_name};"
try:
_execute(cur, qry)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return False
if not db_exists(name, **connection_args):
log.info("Database '%s' has been removed", name)
return True
log.info("Database '%s' has not been removed", name)
return False
# User related actions
def user_list(**connection_args):
"""
Return a list of users on a MySQL server
CLI Example:
.. code-block:: bash
salt '*' mysql.user_list
"""
dbc = _connect(**connection_args)
if dbc is None:
return []
cur = dbc.cursor(MySQLdb.cursors.DictCursor)
try:
qry = "SELECT User,Host FROM mysql.user"
_execute(cur, qry)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return []
results = cur.fetchall()
log.debug(results)
return results
def _mysql_user_exists(
user,
host="localhost",
password=None,
password_hash=None,
passwordless=False,
unix_socket=False,
password_column=None,
auth_plugin="mysql_native_password",
**connection_args,
):
server_version = salt.utils.data.decode(version(**connection_args))
compare_version = "8.0.11"
qry = "SELECT User,Host FROM mysql.user WHERE User = %(user)s AND Host = %(host)s"
args = {}
args["user"] = user
args["host"] = host
if salt.utils.data.is_true(passwordless):
if salt.utils.data.is_true(unix_socket):
qry += " AND plugin=%(unix_socket)s"
args["unix_socket"] = "auth_socket"
else:
qry += " AND " + password_column + " = ''"
elif password:
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
if auth_plugin == "mysql_native_password":
_password = __mysql_hash_password(str(password))
qry += " AND " + password_column + " = %(password)s"
args["password"] = str(_password)
else:
err = "Unable to verify password."
log.error(err)
__context__["mysql.error"] = err
else:
qry += " AND " + password_column + " = PASSWORD(%(password)s)"
args["password"] = str(password)
elif password_hash:
qry += " AND " + password_column + " = %(password)s"
args["password"] = password_hash
return qry, args
def _mariadb_user_exists(
user,
host="localhost",
password=None,
password_hash=None,
passwordless=False,
unix_socket=False,
password_column=None,
auth_plugin="mysql_native_password",
**connection_args,
):
qry = "SELECT User,Host FROM mysql.user WHERE User = %(user)s AND Host = %(host)s"
args = {}
args["user"] = user
args["host"] = host
if salt.utils.data.is_true(passwordless):
if salt.utils.data.is_true(unix_socket):
qry += " AND plugin=%(unix_socket)s"
args["unix_socket"] = "unix_socket"
else:
qry += " AND " + password_column + " = ''"
elif password:
qry += " AND " + password_column + " = PASSWORD(%(password)s)"
args["password"] = str(password)
elif password_hash:
qry += " AND " + password_column + " = %(password)s"
args["password"] = password_hash
return qry, args
def user_exists(
user,
host="localhost",
password=None,
password_hash=None,
passwordless=False,
unix_socket=False,
password_column=None,
**connection_args,
):
"""
Checks if a user exists on the MySQL server. A login can be checked to see
if passwordless login is permitted by omitting ``password`` and
``password_hash``, and using ``passwordless=True``.
.. versionadded:: 0.16.2
The ``passwordless`` option was added.
CLI Example:
.. code-block:: bash
salt '*' mysql.user_exists 'username' 'hostname' 'password'
salt '*' mysql.user_exists 'username' 'hostname' password_hash='hash'
salt '*' mysql.user_exists 'username' passwordless=True
salt '*' mysql.user_exists 'username' password_column='authentication_string'
"""
run_verify = False
server_version = salt.utils.data.decode(version(**connection_args))
if not server_version and password:
# Did we fail to connect with the user we are checking
# Its password might have previously change with the same command/state
# Clear the previous error
__context__["mysql.error"] = None
connection_args["connection_pass"] = password
server_version = salt.utils.data.decode(version(**connection_args))
if not server_version:
last_err = __context__["mysql.error"]
err = (
"MySQL Error: Unable to fetch current server version. Last error was:"
' "{}"'.format(last_err)
)
log.error(err)
return False
dbc = _connect(**connection_args)
# Did we fail to connect with the user we are checking
# Its password might have previously change with the same command/state
if (
dbc is None
and __context__["mysql.error"].startswith(
f"MySQL Error 1045: Access denied for user '{user}'@"
)
and password
):
# Clear the previous error
__context__["mysql.error"] = None
connection_args["connection_pass"] = password
dbc = _connect(**connection_args)
if dbc is None:
return False
if not password_column:
password_column = __password_column(**connection_args)
auth_plugin = __get_auth_plugin(user, host, **connection_args)
cur = dbc.cursor()
if "MariaDB" in server_version:
qry, args = _mariadb_user_exists(
user,
host,
password,
password_hash,
passwordless,
unix_socket,
password_column=password_column,
auth_plugin=auth_plugin,
**connection_args,
)
else:
qry, args = _mysql_user_exists(
user,
host,
password,
password_hash,
passwordless,
unix_socket,
password_column=password_column,
auth_plugin=auth_plugin,
**connection_args,
)
try:
_execute(cur, qry, args)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return False
return cur.rowcount == 1
def user_info(user, host="localhost", **connection_args):
"""
Get full info on a MySQL user
CLI Example:
.. code-block:: bash
salt '*' mysql.user_info root localhost
"""
dbc = _connect(**connection_args)
if dbc is None:
return False
cur = dbc.cursor(MySQLdb.cursors.DictCursor)
qry = "SELECT * FROM mysql.user WHERE User = %(user)s AND Host = %(host)s"
args = {}
args["user"] = user
args["host"] = host
try:
_execute(cur, qry, args)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return False
result = cur.fetchone()
log.debug(result)
return result
def _mysql_user_create(
user,
host="localhost",
password=None,
password_hash=None,
allow_passwordless=False,
unix_socket=False,
password_column=None,
auth_plugin="mysql_native_password",
**connection_args,
):
server_version = salt.utils.data.decode(version(**connection_args))
compare_version = "8.0.11"
qry = "CREATE USER %(user)s@%(host)s"
args = {}
args["user"] = user
args["host"] = host
if unix_socket:
if not plugin_status("auth_socket", **connection_args):
err = "The auth_socket plugin is not enabled."
log.error(err)
__context__["mysql.error"] = err
qry = False
else:
if host == "localhost":
qry += " IDENTIFIED WITH auth_socket"
else:
log.error("Auth via unix_socket can be set only for host=localhost")
__context__["mysql.error"] = err
qry = False
else:
if not salt.utils.data.is_true(allow_passwordless):
if password is not None:
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
args["auth_plugin"] = auth_plugin
qry += " IDENTIFIED WITH %(auth_plugin)s BY %(password)s"
else:
qry += " IDENTIFIED BY %(password)s"
args["password"] = str(password)
elif password_hash is not None:
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
args["auth_plugin"] = auth_plugin
qry += " IDENTIFIED WITH %(auth_plugin)s AS %(password)s"
else:
qry += " IDENTIFIED BY PASSWORD %(password)s"
args["password"] = password_hash
else:
log.error(
"password or password_hash must be specified, unless allow_passwordless=True"
)
qry = False
return qry, args
def _mariadb_user_create(
user,
host="localhost",
password=None,
password_hash=None,
allow_passwordless=False,
unix_socket=False,
password_column=None,
auth_plugin="mysql_native_password",
**connection_args,
):
qry = "CREATE USER %(user)s@%(host)s"
args = {}
args["user"] = user
args["host"] = host
if unix_socket:
if not plugin_status("unix_socket", **connection_args):
err = "The unix_socket plugin is not enabled."
log.error(err)
__context__["mysql.error"] = err
qry = False
else:
if host == "localhost":
qry += " IDENTIFIED VIA unix_socket"
else:
log.error("Auth via unix_socket can be set only for host=localhost")
__context__["mysql.error"] = err
qry = False
else:
if not salt.utils.data.is_true(allow_passwordless):
if password is not None:
qry += " IDENTIFIED BY %(password)s"
args["password"] = str(password)
elif password_hash is not None:
qry += " IDENTIFIED BY PASSWORD %(password)s"
args["password"] = password_hash
else:
log.error(
"password or password_hash must be specified, unless allow_passwordless=True"
)
qry = False
return qry, args
def user_create(
user,
host="localhost",
password=None,
password_hash=None,
allow_passwordless=False,
unix_socket=False,
password_column=None,
auth_plugin="mysql_native_password",
**connection_args,
):
"""
Creates a MySQL user
host
Host for which this user/password combo applies
password
The password to use for the new user. Will take precedence over the
``password_hash`` option if both are specified.
password_hash
The password in hashed form. Be sure to quote the password because YAML
doesn't like the ``*``. A password hash can be obtained from the mysql
command-line client like so::
mysql> SELECT PASSWORD('mypass');
+-------------------------------------------+
| PASSWORD('mypass') |
+-------------------------------------------+
| *6C8989366EAF75BB670AD8EA7A7FC1176A95CEF4 |
+-------------------------------------------+
1 row in set (0.00 sec)
allow_passwordless
If ``True``, then ``password`` and ``password_hash`` can be omitted (or
set to ``None``) to permit a passwordless login.
unix_socket
If ``True`` and allow_passwordless is ``True`` then will be used unix_socket auth plugin.
password_column
The password column to use in the user table.
auth_plugin
The authentication plugin to use, default is to use the mysql_native_password plugin.
.. versionadded:: 0.16.2
The ``allow_passwordless`` option was added.
CLI Examples:
.. code-block:: bash
salt '*' mysql.user_create 'username' 'hostname' 'password'
salt '*' mysql.user_create 'username' 'hostname' password_hash='hash'
salt '*' mysql.user_create 'username' 'hostname' allow_passwordless=True
"""
server_version = salt.utils.data.decode(version(**connection_args))
if not server_version and password:
# Did we fail to connect with the user we are checking
# Its password might have previously change with the same command/state
# Clear the previous error
__context__["mysql.error"] = None
connection_args["connection_pass"] = password
server_version = salt.utils.data.decode(version(**connection_args))
if not server_version:
last_err = __context__["mysql.error"]
err = (
"MySQL Error: Unable to fetch current server version. Last error was:"
' "{}"'.format(last_err)
)
log.error(err)
return False
if user_exists(user, host, **connection_args):
log.info("User '%s'@'%s' already exists", user, host)
return False
dbc = _connect(**connection_args)
if dbc is None:
return False
if not password_column:
password_column = __password_column(**connection_args)
cur = dbc.cursor()
if "MariaDB" in server_version:
qry, args = _mariadb_user_create(
user,
host,
password,
password_hash,
allow_passwordless,
unix_socket,
password_column=password_column,
auth_plugin=auth_plugin,
**connection_args,
)
else:
qry, args = _mysql_user_create(
user,
host,
password,
password_hash,
allow_passwordless,
unix_socket,
password_column=password_column,
auth_plugin=auth_plugin,
**connection_args,
)
if isinstance(qry, bool):
return qry
try:
_execute(cur, qry, args)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return False
if user_exists(
user, host, password, password_hash, password_column=password_column, **connection_args
):
msg = f"User '{user}'@'{host}' has been created"
if not any((password, password_hash)):
msg += " with passwordless login"
log.info(msg)
return True
log.info("User '%s'@'%s' was not created", user, host)
return False
def _mysql_user_chpass(
user,
host="localhost",
password=None,
password_hash=None,
allow_passwordless=False,
unix_socket=None,
password_column=None,
auth_plugin="mysql_native_password",
**connection_args,
):
server_version = salt.utils.data.decode(version(**connection_args))
compare_version = "8.0.11"
args = {}
if password is not None:
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
password_sql = "%(password)s"
else:
password_sql = "PASSWORD(%(password)s)"
args["password"] = password
elif password_hash is not None:
password_sql = "%(password)s"
args["password"] = password_hash
elif not salt.utils.data.is_true(allow_passwordless):
log.error("password or password_hash must be specified, unless allow_passwordless=True")
return False
else:
password_sql = "''"
args["user"] = user
args["host"] = host
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
args["auth_plugin"] = auth_plugin
qry = "ALTER USER %(user)s@%(host)s IDENTIFIED WITH %(auth_plugin)s "
if password is not None:
qry += "BY %(password)s;"
elif password_hash is not None:
qry += "AS %(password)s;"
else:
qry = (
"UPDATE mysql.user SET "
+ password_column
+ "="
+ password_sql
+ " WHERE User=%(user)s AND Host = %(host)s;"
)
if salt.utils.data.is_true(allow_passwordless) and salt.utils.data.is_true(unix_socket):
if host == "localhost":
if not plugin_status("auth_socket", **connection_args):
err = "The auth_socket plugin is not enabled."
log.error(err)
__context__["mysql.error"] = err
qry = False
else:
args["unix_socket"] = "auth_socket"
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
qry = (
"ALTER USER %(user)s@%(host)s IDENTIFIED WITH %(unix_socket)s"
" AS %(user)s;"
)
else:
qry = (
"UPDATE mysql.user SET "
+ password_column
+ "="
+ password_sql
+ ", plugin=%(unix_socket)s"
+ " WHERE User=%(user)s AND Host = %(host)s;"
)
else:
log.error("Auth via unix_socket can be set only for host=localhost")
return qry, args
def _mariadb_user_chpass(
user,
host="localhost",
password=None,
password_hash=None,
allow_passwordless=False,
unix_socket=None,
password_column=None,
auth_plugin="mysql_native_password",
**connection_args,
):
server_version = salt.utils.data.decode(version(**connection_args))
compare_version = "10.4"
args = {}
if password is not None:
password_sql = "PASSWORD(%(password)s)"
args["password"] = password
elif password_hash is not None:
password_sql = "%(password)s"
args["password"] = password_hash
elif not salt.utils.data.is_true(allow_passwordless):
log.error("password or password_hash must be specified, unless allow_passwordless=True")
return False
else:
password_sql = "''"
args["user"] = user
args["host"] = host
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
args["auth_plugin"] = auth_plugin
qry = "ALTER USER %(user)s@%(host)s IDENTIFIED VIA %(auth_plugin)s USING "
qry += password_sql
else:
qry = (
"UPDATE mysql.user SET "
+ password_column
+ "="
+ password_sql
+ " WHERE User=%(user)s AND Host = %(host)s;"
)
if salt.utils.data.is_true(allow_passwordless) and salt.utils.data.is_true(unix_socket):
if host == "localhost":
if not plugin_status("unix_socket", **connection_args):
err = "The unix_socket plugin is not enabled."
log.error(err)
__context__["mysql.error"] = err
qry = False
else:
args["unix_socket"] = "unix_socket"
qry = (
"UPDATE mysql.user SET "
+ password_column
+ "="
+ password_sql
+ ", plugin=%(unix_socket)s"
+ " WHERE User=%(user)s AND Host = %(host)s;"
)
else:
log.error("Auth via unix_socket can be set only for host=localhost")
return qry, args
def user_chpass(
user,
host="localhost",
password=None,
password_hash=None,
allow_passwordless=False,
unix_socket=None,
password_column=None,
**connection_args,
):
"""
Change password for a MySQL user
host
Host for which this user/password combo applies
password
The password to set for the new user. Will take precedence over the
``password_hash`` option if both are specified.
password_hash
The password in hashed form. Be sure to quote the password because YAML
doesn't like the ``*``. A password hash can be obtained from the mysql
command-line client like so::
mysql> SELECT PASSWORD('mypass');
+-------------------------------------------+
| PASSWORD('mypass') |
+-------------------------------------------+
| *6C8989366EAF75BB670AD8EA7A7FC1176A95CEF4 |
+-------------------------------------------+
1 row in set (0.00 sec)
allow_passwordless
If ``True``, then ``password`` and ``password_hash`` can be omitted (or
set to ``None``) to permit a passwordless login.
.. versionadded:: 0.16.2
The ``allow_passwordless`` option was added.
CLI Examples:
.. code-block:: bash
salt '*' mysql.user_chpass frank localhost newpassword
salt '*' mysql.user_chpass frank localhost password_hash='hash'
salt '*' mysql.user_chpass frank localhost allow_passwordless=True
"""
server_version = salt.utils.data.decode(version(**connection_args))
if not server_version and password:
# Did we fail to connect with the user we are checking
# Its password might have previously change with the same command/state
# Clear the previous error
__context__["mysql.error"] = None
connection_args["connection_pass"] = password
server_version = salt.utils.data.decode(version(**connection_args))
if not server_version:
last_err = __context__["mysql.error"]
err = (
"MySQL Error: Unable to fetch current server version. Last error was:"
' "{}"'.format(last_err)
)
log.error(err)
return False
if not user_exists(user, host, **connection_args):
log.info("User '%s'@'%s' does not exists", user, host)
return False
dbc = _connect(**connection_args)
if dbc is None:
return False
if not password_column:
password_column = __password_column(**connection_args)
auth_plugin = __get_auth_plugin(user, host, **connection_args)
cur = dbc.cursor()
if "MariaDB" in server_version:
qry, args = _mariadb_user_chpass(
user,
host,
password,
password_hash,
allow_passwordless,
unix_socket,
password_column=password_column,
auth_plugin=auth_plugin,
**connection_args,
)
else:
qry, args = _mysql_user_chpass(
user,
host,
password,
password_hash,
allow_passwordless,
unix_socket,
password_column=password_column,
auth_plugin=auth_plugin,
**connection_args,
)
try:
result = _execute(cur, qry, args)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return False
compare_version = "10.4.0" if "MariaDB" in server_version else "8.0.11"
res = False
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
_execute(cur, "FLUSH PRIVILEGES;")
res = True
else:
if result:
_execute(cur, "FLUSH PRIVILEGES;")
res = True
if res:
log.info(
"Password for user '%s'@'%s' has been %s",
user,
host,
"changed" if any((password, password_hash)) else "cleared",
)
return True
else:
log.info(
"Password for user '%s'@'%s' was not %s",
user,
host,
"changed" if any((password, password_hash)) else "cleared",
)
return False
def user_remove(user, host="localhost", **connection_args):
"""
Delete MySQL user
CLI Example:
.. code-block:: bash
salt '*' mysql.user_remove frank localhost
"""
if not user_exists(user, host, **connection_args):
err = "User '%s'@'%s' does not exists", user, host
__context__["mysql.error"] = err
log.info(err)
return False
dbc = _connect(**connection_args)
if dbc is None:
return False
cur = dbc.cursor()
qry = "DROP USER %(user)s@%(host)s"
args = {}
args["user"] = user
args["host"] = host
try:
_execute(cur, qry, args)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return False
if not user_exists(user, host, **connection_args):
log.info("User '%s'@'%s' has been removed", user, host)
return True
log.info("User '%s'@'%s' has NOT been removed", user, host)
return False
def tokenize_grant(grant):
"""
External wrapper function
:param grant:
:return: dict
CLI Example:
.. code-block:: bash
salt '*' mysql.tokenize_grant \
"GRANT SELECT, INSERT ON testdb.* TO 'testuser'@'localhost'"
"""
return _grant_to_tokens(grant)
# Maintenance
def db_check(name, table=None, **connection_args):
"""
Repairs the full database or just a given table
CLI Example:
.. code-block:: bash
salt '*' mysql.db_check dbname
salt '*' mysql.db_check dbname dbtable
"""
ret = []
if table is None:
# we need to check all tables
tables = db_tables(name, **connection_args)
for table in tables:
log.info("Checking table '%s' in db '%s'..", name, table)
ret.append(__check_table(name, table, **connection_args))
else:
log.info("Checking table '%s' in db '%s'..", name, table)
ret = __check_table(name, table, **connection_args)
return ret
def db_repair(name, table=None, **connection_args):
"""
Repairs the full database or just a given table
CLI Example:
.. code-block:: bash
salt '*' mysql.db_repair dbname
"""
ret = []
if table is None:
# we need to repair all tables
tables = db_tables(name, **connection_args)
for table in tables:
log.info("Repairing table '%s' in db '%s'..", name, table)
ret.append(__repair_table(name, table, **connection_args))
else:
log.info("Repairing table '%s' in db '%s'..", name, table)
ret = __repair_table(name, table, **connection_args)
return ret
def db_optimize(name, table=None, **connection_args):
"""
Optimizes the full database or just a given table
CLI Example:
.. code-block:: bash
salt '*' mysql.db_optimize dbname
"""
ret = []
if table is None:
# we need to optimize all tables
tables = db_tables(name, **connection_args)
for table in tables:
log.info("Optimizing table '%s' in db '%s'..", name, table)
ret.append(__optimize_table(name, table, **connection_args))
else:
log.info("Optimizing table '%s' in db '%s'..", name, table)
ret = __optimize_table(name, table, **connection_args)
return ret
# Grants
def __grant_normalize(grant):
# MySQL normalizes ALL to ALL PRIVILEGES, we do the same so that
# grant_exists and grant_add ALL work correctly
if grant.strip().upper() == "ALL":
grant = "ALL PRIVILEGES"
# Grants are paste directly in SQL, must filter it
exploded_grants = __grant_split(grant)
for chkgrant, _ in exploded_grants:
if chkgrant.strip().upper() not in __grants__:
raise ValueError(f"Invalid grant : '{chkgrant}'")
return grant
def __grant_split(grant):
pattern = re.compile(r"([\w\s]+)(\([^)(]*\))?\s*,?")
return pattern.findall(grant)
def __ssl_option_sanitize(ssl_option):
new_ssl_option = []
# Like most other "salt dsl" YAML structures, ssl_option is a list of single-element dicts
for opt in ssl_option:
key = next(iter(opt.keys()))
normal_key = key.strip().upper()
if normal_key not in __ssl_options__:
raise ValueError(f"Invalid SSL option : '{key}'")
if normal_key in __ssl_options_parameterized__:
# SSL option parameters (cipher, issuer, subject) are pasted directly to SQL so
# we need to sanitize for single quotes...
new_ssl_option.append("{} '{}'".format(normal_key, opt[key].replace("'", "")))
# omit if falsey
elif opt[key]:
new_ssl_option.append(normal_key)
return " REQUIRE " + " AND ".join(new_ssl_option)
def __grant_generate(
grant,
database,
user,
host="localhost",
grant_option=False,
escape=True,
ssl_option=False,
):
"""
Validate grants and build the query that could set the given grants
Note that this query contains arguments for user and host but not for
grants or database.
"""
# TODO: Re-order the grant so it is according to the
# SHOW GRANTS for xxx@yyy query (SELECT comes first, etc)
grant = re.sub(r"\s*,\s*", ", ", grant).upper()
grant = __grant_normalize(grant)
db_part = database.rpartition(".")
dbc = db_part[0]
table = db_part[2]
if escape:
if dbc != "*":
# _ and % are authorized on GRANT queries and should get escaped
# on the db name, but only if not requesting a table level grant
dbc = quote_identifier(dbc, for_grants=table == "*")
if table != "*":
table = quote_identifier(table)
# identifiers cannot be used as values, and same thing for grants
qry = f"GRANT {grant} ON {dbc}.{table} TO %(user)s@%(host)s"
args = {}
args["user"] = user
args["host"] = host
if ssl_option and isinstance(ssl_option, list):
qry += __ssl_option_sanitize(ssl_option)
if salt.utils.data.is_true(grant_option):
qry += " WITH GRANT OPTION"
log.debug("Grant Query generated: %s args %s", qry, repr(args))
return {"qry": qry, "args": args}
def user_grants(user, host="localhost", **connection_args):
"""
Shows the grants for the given MySQL user (if it exists)
CLI Example:
.. code-block:: bash
salt '*' mysql.user_grants 'frank' 'localhost'
"""
if not user_exists(user, host, **connection_args):
log.info("User '%s'@'%s' does not exist", user, host)
return False
dbc = _connect(**connection_args)
if dbc is None:
return False
cur = dbc.cursor()
qry = "SHOW GRANTS FOR %(user)s@%(host)s"
args = {}
args["user"] = user
args["host"] = host
try:
_execute(cur, qry, args)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return False
ret = []
results = salt.utils.data.decode(cur.fetchall())
for grant in results:
tmp = grant[0].split(" IDENTIFIED BY")[0]
if "WITH GRANT OPTION" in grant[0] and "WITH GRANT OPTION" not in tmp:
tmp = f"{tmp} WITH GRANT OPTION"
ret.append(tmp)
log.debug(ret)
return ret
def grant_exists(
grant, database, user, host="localhost", grant_option=False, escape=True, **connection_args
):
"""
Checks to see if a grant exists in the database
CLI Example:
.. code-block:: bash
salt '*' mysql.grant_exists \
'SELECT,INSERT,UPDATE,...' 'database.*' 'frank' 'localhost'
"""
server_version = salt.utils.data.decode(version(**connection_args))
if not server_version:
last_err = __context__["mysql.error"]
err = 'MySQL Error: Unable to fetch current server version. Last error was: "{}"'.format(
last_err
)
log.error(err)
return False
if "ALL" in grant.upper():
if (
salt.utils.versions.version_cmp(server_version, "8.0") >= 0
and "MariaDB" not in server_version
and database == "*.*"
):
grant = ",".join([i for i in __all_privileges__])
else:
grant = "ALL PRIVILEGES"
try:
target = __grant_generate(grant, database, user, host, grant_option, escape)
except Exception as exc: # pylint: disable=broad-except
log.error("Error during grant generation.")
log.error(exc)
return False
grants = user_grants(user, host, **connection_args)
if grants is False:
log.error(
"Grant does not exist or may not be ordered properly. In some cases, "
"this could also indicate a connection error. Check your configuration."
)
return False
# Combine grants that match the same database
_grants = {}
for grant in grants:
grant_token = _grant_to_tokens(grant)
grant_token["grant"] = _resolve_grant_aliases(grant_token["grant"], server_version)
if grant_token["database"] not in _grants:
_grants[grant_token["database"]] = {
"user": grant_token["user"],
"database": grant_token["database"],
"host": grant_token["host"],
"grant": grant_token["grant"],
}
else:
_grants[grant_token["database"]]["grant"].extend(grant_token["grant"])
target_tokens = _grant_to_tokens(target)
target_tokens["grant"] = _resolve_grant_aliases(target_tokens["grant"], server_version)
for database, grant_tokens in _grants.items():
try:
_grant_tokens = {}
_target_tokens = {}
_grant_matches = [
True if i in grant_tokens["grant"] else False for i in target_tokens["grant"]
]
for item in ["user", "database", "host"]:
_grant_tokens[item] = (
grant_tokens[item].replace('"', "").replace("\\", "").replace("`", "")
)
_target_tokens[item] = (
target_tokens[item].replace('"', "").replace("\\", "").replace("`", "")
)
if (
_grant_tokens["user"] == _target_tokens["user"]
and _grant_tokens["database"] == _target_tokens["database"]
and _grant_tokens["host"] == _target_tokens["host"]
and all(_grant_matches)
):
return True
else:
log.debug("grants mismatch '%s'<>'%s'", grant_tokens, target_tokens)
except Exception as exc: # pylint: disable=broad-except
# Fallback to strict parsing
log.exception(exc)
if grants is not False and target in grants:
log.debug("Grant exists.")
return True
log.debug("Grant does not exist, or is perhaps not ordered properly?")
return False
def grant_add(
grant,
database,
user,
host="localhost",
grant_option=False,
escape=True,
ssl_option=False,
**connection_args,
):
"""
Adds a grant to the MySQL server.
For database, make sure you specify database.table or database.*
CLI Example:
.. code-block:: bash
salt '*' mysql.grant_add \
'SELECT,INSERT,UPDATE,...' 'database.*' 'frank' 'localhost'
"""
dbc = _connect(**connection_args)
if dbc is None:
return False
cur = dbc.cursor()
# Avoid spaces problems
grant = grant.strip()
try:
qry = __grant_generate(grant, database, user, host, grant_option, escape, ssl_option)
except Exception: # pylint: disable=broad-except
log.error("Error during grant generation")
return False
try:
_execute(cur, qry["qry"], qry["args"])
except (MySQLdb.OperationalError, MySQLdb.ProgrammingError) as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return False
if grant_exists(grant, database, user, host, grant_option, escape, **connection_args):
log.info("Grant '%s' on '%s' for user '%s' has been added", grant, database, user)
return True
log.info("Grant '%s' on '%s' for user '%s' has NOT been added", grant, database, user)
return False
def grant_revoke(
grant, database, user, host="localhost", grant_option=False, escape=True, **connection_args
):
"""
Removes a grant from the MySQL server.
CLI Example:
.. code-block:: bash
salt '*' mysql.grant_revoke \
'SELECT,INSERT,UPDATE' 'database.*' 'frank' 'localhost'
"""
dbc = _connect(**connection_args)
if dbc is None:
return False
cur = dbc.cursor()
grant = __grant_normalize(grant)
if salt.utils.data.is_true(grant_option):
grant += ", GRANT OPTION"
db_part = database.rpartition(".")
dbc = db_part[0]
table = db_part[2]
if dbc != "*":
# _ and % are authorized on GRANT queries and should get escaped
# on the db name, but only if not requesting a table level grant
s_database = quote_identifier(dbc, for_grants=table == "*")
else:
# add revoke for *.*
# before the modification query send to mysql will looks like
# REVOKE SELECT ON `*`.* FROM %(user)s@%(host)s
s_database = dbc
if table != "*":
table = quote_identifier(table)
# identifiers cannot be used as values, same thing for grants
qry = f"REVOKE {grant} ON {s_database}.{table} FROM %(user)s@%(host)s;"
args = {}
args["user"] = user
args["host"] = host
try:
_execute(cur, qry, args)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return False
if not grant_exists(grant, database, user, host, grant_option, escape, **connection_args):
log.info(
"Grant '%s' on '%s' for user '%s' has been revoked",
grant,
database,
user,
)
return True
log.info(
"Grant '%s' on '%s' for user '%s' has NOT been revoked",
grant,
database,
user,
)
return False
def processlist(**connection_args):
"""
Retrieves the processlist from the MySQL server via
"SHOW FULL PROCESSLIST".
Returns: a list of dicts, with each dict representing a process:
.. code-block:: python
{
"Command": "Query",
"Host": "localhost",
"Id": 39,
"Info": "SHOW FULL PROCESSLIST",
"Rows_examined": 0,
"Rows_read": 1,
"Rows_sent": 0,
"State": None,
"Time": 0,
"User": "root",
"db": "mysql",
}
CLI Example:
.. code-block:: bash
salt '*' mysql.processlist
"""
ret = []
dbc = _connect(**connection_args)
if dbc is None:
return []
cur = dbc.cursor()
_execute(cur, "SHOW FULL PROCESSLIST")
hdr = [c[0] for c in cur.description]
for _ in range(cur.rowcount):
row = cur.fetchone()
idx_r = {}
for idx_j, value_j in enumerate(hdr):
idx_r[hdr[idx_j]] = row[idx_j]
ret.append(idx_r)
cur.close()
return ret
def __do_query_into_hash(conn, sql_str):
"""
Perform the query that is passed to it (sql_str).
Returns:
results in a dict.
"""
mod = sys._getframe().f_code.co_name
log.debug("%s<--(%s)", mod, sql_str)
rtn_results = []
try:
cursor = conn.cursor()
except MySQLdb.MySQLError:
log.error("%s: Can't get cursor for SQL->%s", mod, sql_str)
log.debug("%s-->", mod)
return rtn_results
try:
_execute(cursor, sql_str)
except MySQLdb.MySQLError:
log.error("%s: try to execute : SQL->%s", mod, sql_str)
cursor.close()
log.debug("%s-->", mod)
return rtn_results
qrs = cursor.fetchall()
for row_data in qrs:
col_cnt = 0
row = {}
for col_data in cursor.description:
col_name = col_data[0]
row[col_name] = row_data[col_cnt]
col_cnt += 1
rtn_results.append(row)
cursor.close()
log.debug("%s-->", mod)
return rtn_results
def get_master_status(**connection_args):
"""
Retrieves the master status from the minion.
Returns::
{'host.domain.com': {'Binlog_Do_DB': '',
'Binlog_Ignore_DB': '',
'File': 'mysql-bin.000021',
'Position': 107}}
CLI Example:
.. code-block:: bash
salt '*' mysql.get_master_status
"""
mod = sys._getframe().f_code.co_name
log.debug("%s<--", mod)
conn = _connect(**connection_args)
if conn is None:
return []
rtnv = __do_query_into_hash(conn, "SHOW MASTER STATUS")
conn.close()
# check for if this minion is not a master
if not rtnv:
rtnv.append([])
log.debug("%s-->%s", mod, len(rtnv[0]))
return rtnv[0]
def get_slave_status(**connection_args):
"""
Retrieves the slave status from the minion.
Returns::
{'host.domain.com': {'Connect_Retry': 60,
'Exec_Master_Log_Pos': 107,
'Last_Errno': 0,
'Last_Error': '',
'Last_IO_Errno': 0,
'Last_IO_Error': '',
'Last_SQL_Errno': 0,
'Last_SQL_Error': '',
'Master_Host': 'comet.scion-eng.com',
'Master_Log_File': 'mysql-bin.000021',
'Master_Port': 3306,
'Master_SSL_Allowed': 'No',
'Master_SSL_CA_File': '',
'Master_SSL_CA_Path': '',
'Master_SSL_Cert': '',
'Master_SSL_Cipher': '',
'Master_SSL_Key': '',
'Master_SSL_Verify_Server_Cert': 'No',
'Master_Server_Id': 1,
'Master_User': 'replu',
'Read_Master_Log_Pos': 107,
'Relay_Log_File': 'klo-relay-bin.000071',
'Relay_Log_Pos': 253,
'Relay_Log_Space': 553,
'Relay_Master_Log_File': 'mysql-bin.000021',
'Replicate_Do_DB': '',
'Replicate_Do_Table': '',
'Replicate_Ignore_DB': '',
'Replicate_Ignore_Server_Ids': '',
'Replicate_Ignore_Table': '',
'Replicate_Wild_Do_Table': '',
'Replicate_Wild_Ignore_Table': '',
'Seconds_Behind_Master': 0,
'Skip_Counter': 0,
'Slave_IO_Running': 'Yes',
'Slave_IO_State': 'Waiting for master to send event',
'Slave_SQL_Running': 'Yes',
'Until_Condition': 'None',
'Until_Log_File': '',
'Until_Log_Pos': 0}}
CLI Example:
.. code-block:: bash
salt '*' mysql.get_slave_status
"""
mod = sys._getframe().f_code.co_name
log.debug("%s<--", mod)
conn = _connect(**connection_args)
if conn is None:
return []
rtnv = __do_query_into_hash(conn, "SHOW SLAVE STATUS")
conn.close()
# check for if this minion is not a slave
if not rtnv:
rtnv.append([])
log.debug("%s-->%s", mod, len(rtnv[0]))
return rtnv[0]
def showvariables(**connection_args):
"""
Retrieves the show variables from the minion.
Returns::
show variables full dict
CLI Example:
.. code-block:: bash
salt '*' mysql.showvariables
"""
mod = sys._getframe().f_code.co_name
log.debug("%s<--", mod)
conn = _connect(**connection_args)
if conn is None:
return []
rtnv = __do_query_into_hash(conn, "SHOW VARIABLES")
conn.close()
if not rtnv:
rtnv.append([])
log.debug("%s-->%s", mod, len(rtnv[0]))
return rtnv
def showglobal(**connection_args):
"""
Retrieves the show global variables from the minion.
Returns::
show global variables full dict
CLI Example:
.. code-block:: bash
salt '*' mysql.showglobal
"""
mod = sys._getframe().f_code.co_name
log.debug("%s<--", mod)
conn = _connect(**connection_args)
if conn is None:
return []
rtnv = __do_query_into_hash(conn, "SHOW GLOBAL VARIABLES")
conn.close()
if not rtnv:
rtnv.append([])
log.debug("%s-->%s", mod, len(rtnv[0]))
return rtnv
def verify_login(user, password=None, **connection_args):
"""
Attempt to login using the provided credentials.
If successful, return true. Otherwise, return False.
CLI Example:
.. code-block:: bash
salt '*' mysql.verify_login root password
"""
# Override the connection args for username and password
connection_args["connection_user"] = user
connection_args["connection_pass"] = password
dbc = _connect(**connection_args)
if dbc is None:
# Clear the mysql.error if unable to connect
# if the connection fails, we simply return False
if "mysql.error" in __context__:
del __context__["mysql.error"]
return False
return True
def plugins_list(**connection_args):
"""
Return a list of plugins and their status
from the ``SHOW PLUGINS`` query.
CLI Example:
.. code-block:: bash
salt '*' mysql.plugins_list
"""
dbc = _connect(**connection_args)
if dbc is None:
return []
cur = dbc.cursor()
qry = "SHOW PLUGINS"
try:
_execute(cur, qry)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return []
ret = []
results = cur.fetchall()
for dbs in results:
ret.append({"name": dbs[0], "status": dbs[1]})
log.debug(ret)
return ret
def plugin_add(name, soname=None, **connection_args):
"""
Add a plugina.
CLI Example:
.. code-block:: bash
salt '*' mysql.plugin_add auth_socket
"""
if not name:
log.error("Plugin name is required.")
return False
if plugin_status(name, **connection_args):
log.error("Plugin %s is already installed.", name)
return True
dbc = _connect(**connection_args)
if dbc is None:
return False
cur = dbc.cursor()
qry = f"INSTALL PLUGIN {name}"
if soname:
qry += f' SONAME "{soname}"'
else:
qry += f' SONAME "{name}.so"'
try:
_execute(cur, qry)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return False
return True
def plugin_remove(name, **connection_args):
"""
Remove a plugin.
CLI Example:
.. code-block:: bash
salt '*' mysql.plugin_remove auth_socket
"""
if not name:
log.error("Plugin name is required.")
return False
if not plugin_status(name, **connection_args):
log.error("Plugin %s is not installed.", name)
return True
dbc = _connect(**connection_args)
if dbc is None:
return False
cur = dbc.cursor()
qry = f"UNINSTALL PLUGIN {name}"
args = {}
args["name"] = name
try:
_execute(cur, qry)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return False
return True
def plugin_status(name, **connection_args):
"""
Return the status of a plugin.
CLI Example:
.. code-block:: bash
salt '*' mysql.plugin_status auth_socket
"""
if not name:
log.error("Plugin name is required.")
return False
dbc = _connect(**connection_args)
if dbc is None:
return ""
cur = dbc.cursor()
qry = "SELECT PLUGIN_STATUS FROM INFORMATION_SCHEMA.PLUGINS WHERE PLUGIN_NAME = %(name)s"
args = {}
args["name"] = name
try:
_execute(cur, qry, args)
except MySQLdb.OperationalError as exc:
err = "MySQL Error {}: {}".format(*exc.args)
__context__["mysql.error"] = err
log.error(err)
return ""
try:
status = cur.fetchone()
if status is None:
return ""
else:
return status[0]
except IndexError:
return ""
070701000000B7000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003D00000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/pillar070701000000B8000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004900000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/pillar/__init__.py070701000000B9000081A400000000000000000000000167471E9C00000F12000000000000000000000000000000000000004600000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/pillar/mysql.py"""
Retrieve Pillar data by doing a MySQL query
MariaDB provides Python support through the MySQL Python package.
Therefore, you may use this module with both MySQL or MariaDB.
This module is a concrete implementation of the sql_base ext_pillar for MySQL.
:maturity: new
:depends: python-mysqldb
:platform: all
Configuring the mysql ext_pillar
================================
Use the 'mysql' key under ext_pillar for configuration of queries.
MySQL configuration of the MySQL returner is being used (mysql.db, mysql.user,
mysql.pass, mysql.port, mysql.host) for database connection info.
Required python modules: MySQLdb
Complete example
================
.. code-block:: yaml
mysql:
user: 'salt'
pass: 'super_secret_password'
db: 'salt_db'
port: 3306
ssl:
cert: /etc/mysql/client-cert.pem
key: /etc/mysql/client-key.pem
ext_pillar:
- mysql:
fromdb:
query: 'SELECT col1,col2,col3,col4,col5,col6,col7
FROM some_random_table
WHERE minion_pattern LIKE %s'
depth: 5
as_list: True
with_lists: [1,3]
"""
import logging
from contextlib import contextmanager
from salt.pillar.sql_base import SqlBaseExtPillar
# Set up logging
log = logging.getLogger(__name__)
try:
# Trying to import MySQLdb
import MySQLdb
import MySQLdb.converters
import MySQLdb.cursors
except ImportError:
try:
# MySQLdb import failed, try to import PyMySQL
import pymysql
pymysql.install_as_MySQLdb()
import MySQLdb
import MySQLdb.converters
import MySQLdb.cursors
except ImportError:
MySQLdb = None
def __virtual__():
"""
Confirm that a python mysql client is installed.
"""
return bool(MySQLdb), "No python mysql client installed." if MySQLdb is None else ""
class MySQLExtPillar(SqlBaseExtPillar):
"""
This class receives and processes the database rows from MySQL.
"""
@classmethod
def _db_name(cls):
return "MySQL"
def _get_options(self):
"""
Returns options used for the MySQL connection.
"""
defaults = {
"host": "localhost",
"user": "salt",
"pass": "salt",
"db": "salt",
"port": 3306,
"ssl": {},
}
_options = {}
_opts = __opts__.get("mysql", {})
for attr, default in defaults.items():
if attr not in _opts:
log.debug("Using default for MySQL %s", attr)
_options[attr] = default
continue
_options[attr] = _opts[attr]
return _options
@contextmanager
def _get_cursor(self):
"""
Yield a MySQL cursor
"""
_options = self._get_options()
conn = MySQLdb.connect(
host=_options["host"],
user=_options["user"],
passwd=_options["pass"],
db=_options["db"],
port=_options["port"],
ssl=_options["ssl"],
)
cursor = conn.cursor()
try:
yield cursor
except MySQLdb.DatabaseError as err:
log.exception("Error in ext_pillar MySQL: %s", err.args)
finally:
conn.close()
def extract_queries(self, args, kwargs): # pylint: disable=useless-super-delegation
"""
This function normalizes the config block into a set of queries we
can use. The return is a list of consistently laid out dicts.
"""
return super().extract_queries(args, kwargs)
def ext_pillar(minion_id, pillar, *args, **kwargs):
"""
Execute queries against MySQL, merge and return as a dict
"""
return MySQLExtPillar().fetch(minion_id, pillar, *args, **kwargs)
070701000000BA000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000004000000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/returners070701000000BB000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004C00000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/returners/__init__.py070701000000BC000081A400000000000000000000000167471E9C0000491B000000000000000000000000000000000000004900000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/returners/mysql.py"""
Return data to a mysql server
:maintainer: Dave Boucha <dave@saltstack.com>, Seth House <shouse@saltstack.com>
:maturity: mature
:depends: python-mysqldb
:platform: all
To enable this returner, the minion will need the python client for mysql
installed and the following values configured in the minion or master
config. These are the defaults:
.. code-block:: yaml
mysql.host: 'salt'
mysql.user: 'salt'
mysql.pass: 'salt'
mysql.db: 'salt'
mysql.port: 3306
SSL is optional. The defaults are set to None. If you do not want to use SSL,
either exclude these options or set them to None.
.. code-block:: yaml
mysql.ssl_ca: None
mysql.ssl_cert: None
mysql.ssl_key: None
Alternative configuration values can be used by prefacing the configuration
with `alternative.`. Any values not found in the alternative configuration will
be pulled from the default location. As stated above, SSL configuration is
optional. The following ssl options are simply for illustration purposes:
.. code-block:: yaml
alternative.mysql.host: 'salt'
alternative.mysql.user: 'salt'
alternative.mysql.pass: 'salt'
alternative.mysql.db: 'salt'
alternative.mysql.port: 3306
alternative.mysql.ssl_ca: '/etc/pki/mysql/certs/localhost.pem'
alternative.mysql.ssl_cert: '/etc/pki/mysql/certs/localhost.crt'
alternative.mysql.ssl_key: '/etc/pki/mysql/certs/localhost.key'
Should you wish the returner data to be cleaned out every so often, set
`keep_jobs_seconds` to the number of hours for the jobs to live in the
tables. Setting it to `0` will cause the data to stay in the tables. The
default setting for `keep_jobs_seconds` is set to `86400`.
Should you wish to archive jobs in a different table for later processing,
set `archive_jobs` to True. Salt will create 3 archive tables
- `jids_archive`
- `salt_returns_archive`
- `salt_events_archive`
and move the contents of `jids`, `salt_returns`, and `salt_events` that are
more than `keep_jobs_seconds` seconds old to these tables.
Use the following mysql database schema:
.. code-block:: sql
CREATE DATABASE `salt`
DEFAULT CHARACTER SET utf8
DEFAULT COLLATE utf8_general_ci;
USE `salt`;
--
-- Table structure for table `jids`
--
DROP TABLE IF EXISTS `jids`;
CREATE TABLE `jids` (
`jid` varchar(255) NOT NULL,
`load` mediumtext NOT NULL,
UNIQUE KEY `jid` (`jid`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `salt_returns`
--
DROP TABLE IF EXISTS `salt_returns`;
CREATE TABLE `salt_returns` (
`fun` varchar(50) NOT NULL,
`jid` varchar(255) NOT NULL,
`return` mediumtext NOT NULL,
`id` varchar(255) NOT NULL,
`success` varchar(10) NOT NULL,
`full_ret` mediumtext NOT NULL,
`alter_time` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
KEY `id` (`id`),
KEY `jid` (`jid`),
KEY `fun` (`fun`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `salt_events`
--
DROP TABLE IF EXISTS `salt_events`;
CREATE TABLE `salt_events` (
`id` BIGINT NOT NULL AUTO_INCREMENT,
`tag` varchar(255) NOT NULL,
`data` mediumtext NOT NULL,
`alter_time` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
`master_id` varchar(255) NOT NULL,
PRIMARY KEY (`id`),
KEY `tag` (`tag`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
Required python modules: MySQLdb
To use the mysql returner, append '--return mysql' to the salt command.
.. code-block:: bash
salt '*' test.ping --return mysql
To use the alternative configuration, append '--return_config alternative' to the salt command.
.. versionadded:: 2015.5.0
.. code-block:: bash
salt '*' test.ping --return mysql --return_config alternative
To override individual configuration items, append --return_kwargs '{"key:": "value"}' to the salt command.
.. versionadded:: 2016.3.0
.. code-block:: bash
salt '*' test.ping --return mysql --return_kwargs '{"db": "another-salt"}'
"""
import logging
import sys
from contextlib import contextmanager
import salt.exceptions
import salt.returners
import salt.utils.data
import salt.utils.job
import salt.utils.json
try:
# Trying to import MySQLdb
import MySQLdb
import MySQLdb.converters
import MySQLdb.cursors
from MySQLdb.connections import OperationalError
except ImportError:
try:
# MySQLdb import failed, try to import PyMySQL
import pymysql
pymysql.install_as_MySQLdb()
import MySQLdb
import MySQLdb.converters
import MySQLdb.cursors
from MySQLdb.err import OperationalError
except ImportError:
MySQLdb = None
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = "mysql"
def __virtual__():
"""
Confirm that a python mysql client is installed.
"""
return bool(MySQLdb), "No python mysql client installed." if MySQLdb is None else ""
def _get_options(ret=None):
"""
Returns options used for the MySQL connection.
"""
defaults = {
"host": "salt",
"user": "salt",
"pass": "salt",
"db": "salt",
"port": 3306,
"ssl_ca": None,
"ssl_cert": None,
"ssl_key": None,
}
attrs = {
"host": "host",
"user": "user",
"pass": "pass",
"db": "db",
"port": "port",
"ssl_ca": "ssl_ca",
"ssl_cert": "ssl_cert",
"ssl_key": "ssl_key",
}
_options = salt.returners.get_returner_options(
__virtualname__,
ret,
attrs,
__salt__=__salt__,
__opts__=__opts__,
defaults=defaults,
)
# post processing
for k, v in _options.items():
if isinstance(v, str) and v.lower() == "none":
# Ensure 'None' is rendered as None
_options[k] = None
if k == "port":
# Ensure port is an int
_options[k] = int(v)
return _options
@contextmanager
def _get_serv(ret=None, commit=False):
"""
Return a mysql cursor
"""
_options = _get_options(ret)
connect = True
if __context__ and "mysql_returner_conn" in __context__:
try:
log.debug("Trying to reuse MySQL connection pool")
conn = __context__["mysql_returner_conn"]
conn.ping()
connect = False
except OperationalError as exc:
log.debug("OperationalError on ping: %s", exc)
if connect:
log.debug("Generating new MySQL connection pool")
try:
# An empty ssl_options dictionary passed to MySQLdb.connect will
# effectively connect w/o SSL.
ssl_options = {}
if _options.get("ssl_ca"):
ssl_options["ca"] = _options.get("ssl_ca")
if _options.get("ssl_cert"):
ssl_options["cert"] = _options.get("ssl_cert")
if _options.get("ssl_key"):
ssl_options["key"] = _options.get("ssl_key")
conn = MySQLdb.connect(
host=_options.get("host"),
user=_options.get("user"),
passwd=_options.get("pass"),
db=_options.get("db"),
port=_options.get("port"),
ssl=ssl_options,
)
try:
__context__["mysql_returner_conn"] = conn
except TypeError:
pass
except OperationalError as exc:
raise salt.exceptions.SaltMasterError(
f"MySQL returner could not connect to database: {exc}"
)
cursor = conn.cursor()
try:
yield cursor
except MySQLdb.DatabaseError as err:
error = err.args
sys.stderr.write(str(error))
cursor.execute("ROLLBACK")
raise
else:
if commit:
cursor.execute("COMMIT")
else:
cursor.execute("ROLLBACK")
def returner(ret):
"""
Return data to a mysql server
"""
# if a minion is returning a standalone job, get a jobid
if ret["jid"] == "req":
ret["jid"] = prep_jid(nocache=ret.get("nocache", False))
save_load(ret["jid"], ret)
try:
with _get_serv(ret, commit=True) as cur:
sql = """INSERT INTO `salt_returns`
(`fun`, `jid`, `return`, `id`, `success`, `full_ret`)
VALUES (%s, %s, %s, %s, %s, %s)"""
cleaned_return = salt.utils.data.decode(ret)
cur.execute(
sql,
(
ret["fun"],
ret["jid"],
salt.utils.json.dumps(cleaned_return["return"]),
ret["id"],
ret.get("success", False),
salt.utils.json.dumps(cleaned_return),
),
)
except salt.exceptions.SaltMasterError as exc:
log.critical(exc)
log.critical("Could not store return with MySQL returner. MySQL server unavailable.")
def event_return(events):
"""
Return event to mysql server
Requires that configuration be enabled via 'event_return'
option in master config.
"""
with _get_serv(events, commit=True) as cur:
for event in events:
tag = event.get("tag", "")
data = event.get("data", "")
sql = """INSERT INTO `salt_events` (`tag`, `data`, `master_id`)
VALUES (%s, %s, %s)"""
cur.execute(sql, (tag, salt.utils.json.dumps(data), __opts__["id"]))
def save_load(jid, load, minions=None):
"""
Save the load to the specified jid id
"""
with _get_serv(commit=True) as cur:
sql = """INSERT INTO `jids` (`jid`, `load`) VALUES (%s, %s)"""
json_data = salt.utils.json.dumps(salt.utils.data.decode(load))
try:
cur.execute(sql, (jid, json_data))
except MySQLdb.IntegrityError:
# https://github.com/saltstack/salt/issues/22171
# Without this try/except we get tons of duplicate entry errors
# which result in job returns not being stored properly
pass
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
"""
Included for API consistency
"""
def get_load(jid):
"""
Return the load data that marks a specified jid
"""
with _get_serv(ret=None, commit=True) as cur:
sql = """SELECT `load` FROM `jids` WHERE `jid` = %s;"""
cur.execute(sql, (jid,))
data = cur.fetchone()
if data:
return salt.utils.json.loads(data[0])
return {}
def get_jid(jid):
"""
Return the information returned when the specified job id was executed
"""
with _get_serv(ret=None, commit=True) as cur:
sql = """SELECT id, full_ret FROM `salt_returns`
WHERE `jid` = %s"""
cur.execute(sql, (jid,))
data = cur.fetchall()
ret = {}
if data:
for minion, full_ret in data:
ret[minion] = salt.utils.json.loads(full_ret)
return ret
def get_fun(fun):
"""
Return a dict of the last function called for all minions
"""
with _get_serv(ret=None, commit=True) as cur:
sql = """SELECT s.id,s.jid, s.full_ret
FROM `salt_returns` s
JOIN ( SELECT MAX(`jid`) as jid
from `salt_returns` GROUP BY fun, id) max
ON s.jid = max.jid
WHERE s.fun = %s
"""
cur.execute(sql, (fun,))
data = cur.fetchall()
ret = {}
if data:
for minion, _, full_ret in data:
ret[minion] = salt.utils.json.loads(full_ret)
return ret
def get_jids():
"""
Return a list of all job ids
"""
with _get_serv(ret=None, commit=True) as cur:
sql = """SELECT DISTINCT `jid`, `load`
FROM `jids`"""
cur.execute(sql)
data = cur.fetchall()
ret = {}
for jid in data:
ret[jid[0]] = salt.utils.jid.format_jid_instance(jid[0], salt.utils.json.loads(jid[1]))
return ret
def get_jids_filter(count, filter_find_job=True):
"""
Return a list of all job ids
:param int count: show not more than the count of most recent jobs
:param bool filter_find_jobs: filter out 'saltutil.find_job' jobs
"""
with _get_serv(ret=None, commit=True) as cur:
sql = """SELECT * FROM (
SELECT DISTINCT `jid` ,`load` FROM `jids`
{0}
ORDER BY `jid` DESC limit {1}
) `tmp`
ORDER BY `jid`;"""
where = """WHERE `load` NOT LIKE '%"fun": "saltutil.find_job"%' """
cur.execute(sql.format(where if filter_find_job else "", count))
data = cur.fetchall()
ret = []
for jid in data:
ret.append(
salt.utils.jid.format_jid_instance_ext(jid[0], salt.utils.json.loads(jid[1]))
)
return ret
def get_minions():
"""
Return a list of minions
"""
with _get_serv(ret=None, commit=True) as cur:
sql = """SELECT DISTINCT id
FROM `salt_returns`"""
cur.execute(sql)
data = cur.fetchall()
ret = []
for minion in data:
ret.append(minion[0])
return ret
def prep_jid(nocache=False, passed_jid=None): # pylint: disable=unused-argument
"""
Do any work necessary to prepare a JID, including sending a custom id
"""
return passed_jid if passed_jid is not None else salt.utils.jid.gen_jid(__opts__)
def _purge_jobs(timestamp):
"""
Purge records from the returner tables.
:param job_age_in_seconds: Purge jobs older than this
:return:
"""
with _get_serv() as cur:
try:
sql = (
"delete from `jids` where jid in (select distinct jid from salt_returns"
" where alter_time < %s)"
)
cur.execute(sql, (timestamp,))
cur.execute("COMMIT")
except MySQLdb.Error as e:
log.error("mysql returner archiver was unable to delete contents of table 'jids'")
log.error(str(e))
raise salt.exceptions.SaltRunnerError(str(e))
try:
sql = "delete from `salt_returns` where alter_time < %s"
cur.execute(sql, (timestamp,))
cur.execute("COMMIT")
except MySQLdb.Error as e:
log.error(
"mysql returner archiver was unable to delete contents of table 'salt_returns'"
)
log.error(str(e))
raise salt.exceptions.SaltRunnerError(str(e))
try:
sql = "delete from `salt_events` where alter_time < %s"
cur.execute(sql, (timestamp,))
cur.execute("COMMIT")
except MySQLdb.Error as e:
log.error(
"mysql returner archiver was unable to delete contents of table 'salt_events'"
)
log.error(str(e))
raise salt.exceptions.SaltRunnerError(str(e))
return True
def _archive_jobs(timestamp):
"""
Copy rows to a set of backup tables, then purge rows.
:param timestamp: Archive rows older than this timestamp
:return:
"""
source_tables = ["jids", "salt_returns", "salt_events"]
with _get_serv() as cur:
target_tables = {}
for table_name in source_tables:
try:
tmp_table_name = table_name + "_archive"
sql = f"create table if not exists {tmp_table_name} like {table_name}"
cur.execute(sql)
cur.execute("COMMIT")
target_tables[table_name] = tmp_table_name
except MySQLdb.Error as e:
log.error("mysql returner archiver was unable to create the archive tables.")
log.error(str(e))
raise salt.exceptions.SaltRunnerError(str(e))
try:
sql = (
"insert into `{}` select * from `{}` where jid in (select distinct jid"
" from salt_returns where alter_time < %s)".format(target_tables["jids"], "jids")
)
cur.execute(sql, (timestamp,))
cur.execute("COMMIT")
except MySQLdb.Error as e:
log.error("mysql returner archiver was unable to copy contents of table 'jids'")
log.error(str(e))
raise salt.exceptions.SaltRunnerError(str(e))
except Exception as e: # pylint: disable=broad-except
log.error(e)
raise
try:
sql = "insert into `{}` select * from `{}` where alter_time < %s".format(
target_tables["salt_returns"], "salt_returns"
)
cur.execute(sql, (timestamp,))
cur.execute("COMMIT")
except MySQLdb.Error as e:
log.error("mysql returner archiver was unable to copy contents of table 'salt_returns'")
log.error(str(e))
raise salt.exceptions.SaltRunnerError(str(e))
try:
sql = "insert into `{}` select * from `{}` where alter_time < %s".format(
target_tables["salt_events"], "salt_events"
)
cur.execute(sql, (timestamp,))
cur.execute("COMMIT")
except MySQLdb.Error as e:
log.error("mysql returner archiver was unable to copy contents of table 'salt_events'")
log.error(str(e))
raise salt.exceptions.SaltRunnerError(str(e))
return _purge_jobs(timestamp)
def clean_old_jobs():
"""
Called in the master's event loop every loop_interval. Archives and/or
deletes the events and job details from the database.
:return:
"""
keep_jobs_seconds = int(salt.utils.job.get_keep_jobs_seconds(__opts__))
if keep_jobs_seconds > 0:
try:
with _get_serv() as cur:
sql = "select date_sub(now(), interval {} second) as stamp;".format(
keep_jobs_seconds
)
cur.execute(sql)
rows = cur.fetchall()
stamp = rows[0][0]
if __opts__.get("archive_jobs", False):
_archive_jobs(stamp)
else:
_purge_jobs(stamp)
except MySQLdb.Error as e:
log.error("Mysql returner was unable to get timestamp for purge/archive of jobs")
log.error(str(e))
raise salt.exceptions.SaltRunnerError(str(e))
070701000000BD000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003D00000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/states070701000000BE000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004900000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/states/__init__.py070701000000BF000081A400000000000000000000000167471E9C00001751000000000000000000000000000000000000004F00000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/states/mysql_database.py"""
Management of MySQL databases (schemas)
=======================================
:depends: - MySQLdb Python module
:configuration: See :py:mod:`salt.modules.mysql` for setup instructions.
The mysql_database module is used to create and manage MySQL databases.
Databases can be set as either absent or present.
.. code-block:: yaml
frank:
mysql_database.present
"""
import logging
import sys
log = logging.getLogger(__name__)
# pylint: disable=undefined-variable
def __virtual__():
"""
Only load if the mysql module is available in __salt__
"""
if "mysql.db_exists" in __salt__:
return True
return (False, "mysql module could not be loaded")
def _get_mysql_error():
"""
Look in module context for a MySQL error. Eventually we should make a less
ugly way of doing this.
"""
return sys.modules[__salt__["test.ping"].__module__].__context__.pop("mysql.error", None)
def present(name, character_set=None, collate=None, **connection_args):
"""
Ensure that the named database is present with the specified properties
name
The name of the database to manage
"""
ret = {
"name": name,
"changes": {},
"result": True,
"comment": f"Database {name} is already present",
}
# check if database exists
existing = __salt__["mysql.db_get"](name, **connection_args)
if existing:
alter_charset = False
alter_collate = False
existing_charset = bytes(str(existing.get("character_set")).encode()).decode()
if character_set and character_set != existing_charset:
alter_charset = True
log.debug(
"character set differes from %s : %s",
character_set,
existing_charset,
)
comment = "Database character set {} != {} needs to be updated".format(
character_set, existing_charset
)
if __opts__.get("test", False):
ret["result"] = None
ret["comment"] = comment
else:
ret["comment"] = comment
existing_collate = bytes(str(existing.get("collate")).encode()).decode()
if collate and collate != existing_collate:
alter_collate = True
log.debug(
"collate set differs from %s : %s",
collate,
existing_collate,
)
comment = "Database collate {} != {} needs to be updated".format(
collate, existing_collate
)
if __opts__.get("test", False):
ret["result"] = None
ret["comment"] += f"\n{comment}"
return ret
else:
ret["comment"] += f"\n{comment}"
if alter_charset or alter_collate:
if __opts__.get("test", False):
ret["comment"] += f"\nDatabase {name} is going to be updated"
else:
__salt__["mysql.alter_db"](
name, character_set=character_set, collate=collate, **connection_args
)
current = __salt__["mysql.db_get"](name, **connection_args)
if existing.get("collate", None) != current.get("collate", None):
ret["changes"].update(
{
"collate": {
"before": existing.get("collate", None),
"now": current.get("collate", None),
}
}
)
if existing.get("character_set", None) != current.get("character_set", None):
ret["changes"].update(
{
"character_set": {
"before": existing.get("character_set", None),
"now": current.get("character_set", None),
}
}
)
return ret
else:
err = _get_mysql_error()
if err is not None:
ret["comment"] = err
ret["result"] = False
return ret
if __opts__.get("test", False):
ret["result"] = None
ret["comment"] = f"Database {name} is not present and needs to be created"
return ret
# The database is not present, make it!
if __salt__["mysql.db_create"](
name, character_set=character_set, collate=collate, **connection_args
):
ret["comment"] = f"The database {name} has been created"
ret["changes"][name] = "Present"
else:
ret["comment"] = f"Failed to create database {name}"
err = _get_mysql_error()
if err is not None:
ret["comment"] += f" ({err})"
ret["result"] = False
return ret
def absent(name, **connection_args):
"""
Ensure that the named database is absent
name
The name of the database to remove
"""
ret = {"name": name, "changes": {}, "result": True, "comment": ""}
# check if db exists and remove it
if __salt__["mysql.db_exists"](name, **connection_args):
if __opts__.get("test", False):
ret["result"] = None
ret["comment"] = f"Database {name} is present and needs to be removed"
return ret
if __salt__["mysql.db_remove"](name, **connection_args):
ret["comment"] = f"Database {name} has been removed"
ret["changes"][name] = "Absent"
return ret
else:
err = _get_mysql_error()
if err is not None:
ret["comment"] = f"Unable to remove database {name} ({err})"
ret["result"] = False
return ret
else:
err = _get_mysql_error()
if err is not None:
ret["comment"] = err
ret["result"] = False
return ret
# fallback
ret["comment"] = f"Database {name} is not present, so it cannot be removed"
return ret
070701000000C0000081A400000000000000000000000167471E9C0000217F000000000000000000000000000000000000004D00000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/states/mysql_grants.py"""
Management of MySQL grants (user permissions)
=============================================
:depends: - MySQLdb Python module
:configuration: See :py:mod:`salt.modules.mysql` for setup instructions.
The mysql_grants module is used to grant and revoke MySQL permissions.
The ``name`` you pass in purely symbolic and does not have anything to do
with the grant itself.
The ``database`` parameter needs to specify a 'priv_level' in the same
specification as defined in the MySQL documentation:
* \\*
* \\*.\\*
* db_name.\\*
* db_name.tbl_name
* etc...
This state is not able to set password for the permission from the
specified host. See :py:mod:`salt.states.mysql_user` for further
instructions.
.. code-block:: yaml
frank_exampledb:
mysql_grants.present:
- grant: select,insert,update
- database: exampledb.*
- user: frank
- host: localhost
frank_otherdb:
mysql_grants.present:
- grant: all privileges
- database: otherdb.*
- user: frank
restricted_singletable:
mysql_grants.present:
- grant: select
- database: somedb.sometable
- user: joe
"""
import sys
def __virtual__():
"""
Only load if the mysql module is available
"""
if "mysql.grant_exists" in __salt__:
return True
return (False, "mysql module could not be loaded")
def _get_mysql_error():
"""
Look in module context for a MySQL error. Eventually we should make a less
ugly way of doing this.
"""
return sys.modules[__salt__["test.ping"].__module__].__context__.pop("mysql.error", None)
def present(
name,
grant=None,
database=None,
user=None,
host="localhost",
grant_option=False,
escape=True,
revoke_first=False,
ssl_option=False,
**connection_args,
):
"""
Ensure that the grant is present with the specified properties
name
The name (key) of the grant to add
grant
The grant priv_type (i.e. select,insert,update OR all privileges)
database
The database priv_level (i.e. db.tbl OR db.*)
user
The user to apply the grant to
host
The network/host that the grant should apply to
grant_option
Adds the WITH GRANT OPTION to the defined grant. Default is ``False``
escape
Defines if the database value gets escaped or not. Default is ``True``
revoke_first
By default, MySQL will not do anything if you issue a command to grant
privileges that are more restrictive than what's already in place. This
effectively means that you cannot downgrade permissions without first
revoking permissions applied to a db.table/user pair first.
To have Salt forcibly revoke perms before applying a new grant, enable
the 'revoke_first options.
WARNING: This will *remove* permissions for a database before attempting
to apply new permissions. There is no guarantee that new permissions
will be applied correctly which can leave your database security in an
unknown and potentially dangerous state.
Use with caution!
Default is ``False``
ssl_option
Adds the specified ssl options for the connecting user as requirements for
this grant. Value is a list of single-element dicts corresponding to the
list of ssl options to use.
Possible key/value pairings for the dicts in the value:
.. code-block:: text
- SSL: True
- X509: True
- SUBJECT: <subject>
- ISSUER: <issuer>
- CIPHER: <cipher>
The non-boolean ssl options take a string as their values, which should
be an appropriate value as specified by the MySQL documentation for these
options.
Default is ``False`` (no ssl options will be used)
"""
comment = "Grant {0} on {1} to {2}@{3} is already present"
ret = {
"name": name,
"changes": {},
"result": True,
"comment": comment.format(grant, database, user, host),
}
# check if grant exists
if __salt__["mysql.grant_exists"](
grant, database, user, host, grant_option, escape, **connection_args
):
return ret
else:
err = _get_mysql_error()
if err is not None:
ret["comment"] = err
ret["result"] = False
return ret
if revoke_first and not __opts__["test"]:
# for each grant, break into tokens and see if its on the same
# user/db/table as ours. (there is probably only one)
user_grants = __salt__["mysql.user_grants"](user, host, **connection_args)
if not user_grants:
user_grants = []
for user_grant in user_grants:
token_grants = __salt__["mysql.tokenize_grant"](user_grant)
db_part = database.rpartition(".")
my_db = db_part[0]
my_table = db_part[2]
my_db = __salt__["mysql.quote_identifier"](my_db, (my_table == "*"))
my_table = __salt__["mysql.quote_identifier"](my_table)
# Removing per table grants in case of database level grant !!!
if token_grants["database"] == my_db:
grant_to_revoke = ",".join(token_grants["grant"]).rstrip(",")
__salt__["mysql.grant_revoke"](
grant=grant_to_revoke,
database=database,
user=user,
host=host,
grant_option=grant_option,
escape=escape,
**connection_args,
)
# The grant is not present, make it!
if __opts__["test"]:
# there is probably better things to make in test mode
ret["result"] = None
ret["comment"] = f"MySQL grant {name} is set to be created"
return ret
if __salt__["mysql.grant_add"](
grant, database, user, host, grant_option, escape, ssl_option, **connection_args
):
ret["comment"] = "Grant {0} on {1} to {2}@{3} has been added"
ret["comment"] = ret["comment"].format(grant, database, user, host)
ret["changes"][name] = "Present"
else:
ret["comment"] = 'Failed to execute: "GRANT {0} ON {1} TO {2}@{3}"'
ret["comment"] = ret["comment"].format(grant, database, user, host)
err = _get_mysql_error()
if err is not None:
ret["comment"] += f" ({err})"
ret["result"] = False
return ret
def absent(
name,
grant=None,
database=None,
user=None,
host="localhost",
grant_option=False,
escape=True,
**connection_args,
):
"""
Ensure that the grant is absent
name
The name (key) of the grant to add
grant
The grant priv_type (i.e. select,insert,update OR all privileges)
database
The database priv_level (i.e. db.tbl OR db.*)
user
The user to apply the grant to
host
The network/host that the grant should apply to
"""
ret = {"name": name, "changes": {}, "result": True, "comment": ""}
# Check if grant exists, and if so, remove it
if __salt__["mysql.grant_exists"](
grant, database, user, host, grant_option, escape, **connection_args
):
if __opts__["test"]:
ret["result"] = None
ret["comment"] = f"MySQL grant {name} is set to be revoked"
return ret
if __salt__["mysql.grant_revoke"](
grant, database, user, host, grant_option, **connection_args
):
ret["comment"] = "Grant {} on {} for {}@{} has been revoked".format(
grant, database, user, host
)
ret["changes"][name] = "Absent"
return ret
else:
err = _get_mysql_error()
if err is not None:
ret["comment"] = "Unable to revoke grant {} on {} for {}@{} ({})".format(
grant, database, user, host, err
)
ret["result"] = False
return ret
else:
err = _get_mysql_error()
if err is not None:
ret["comment"] = "Unable to determine if grant {} on {} for {}@{} exists ({})".format(
grant, database, user, host, err
)
ret["result"] = False
return ret
# fallback
ret["comment"] = "Grant {} on {} to {}@{} is not present, so it cannot be revoked".format(
grant, database, user, host
)
return ret
070701000000C1000081A400000000000000000000000167471E9C00003164000000000000000000000000000000000000004C00000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/states/mysql_query.py"""
Execution of MySQL queries
==========================
.. versionadded:: 2014.7.0
:depends: - MySQLdb Python module
:configuration: See :py:mod:`salt.modules.mysql` for setup instructions.
The mysql_query module is used to execute queries on MySQL databases.
Its output may be stored in a file or in a grain.
.. code-block:: yaml
query_id:
mysql_query.run
- database: my_database
- query: "SELECT * FROM table;"
- output: "/tmp/query_id.txt"
"""
import os.path
import sys
import salt.utils.files
import salt.utils.stringutils
def __virtual__():
"""
Only load if the mysql module is available in __salt__
"""
if "mysql.query" in __salt__:
return True
return (False, "mysql module could not be loaded")
def _get_mysql_error():
"""
Look in module context for a MySQL error. Eventually we should make a less
ugly way of doing this.
"""
return sys.modules[__salt__["test.ping"].__module__].__context__.pop("mysql.error", None)
def run_file(
name,
database,
query_file=None,
output=None,
grain=None,
key=None,
overwrite=True,
saltenv=None,
check_db_exists=True,
client_flags=None,
**connection_args,
):
"""
Execute an arbitrary query on the specified database
.. versionadded:: 2017.7.0
name
Used only as an ID
database
The name of the database to execute the query_file on
query_file
The file of mysql commands to run
output
grain: output in a grain
other: the file to store results
None: output to the result comment (default)
grain:
grain to store the output (need output=grain)
key:
the specified grain will be treated as a dictionary, the result
of this state will be stored under the specified key.
overwrite:
The file or grain will be overwritten if it already exists (default)
saltenv:
The saltenv to pull the query_file from
check_db_exists:
The state run will check that the specified database exists (default=True)
before running any queries
client_flags:
A list of client flags to pass to the MySQL connection.
https://dev.mysql.com/doc/internals/en/capability-flags.html
"""
ret = {
"name": name,
"changes": {},
"result": True,
"comment": f"Database {database} is already present",
}
if client_flags is None:
client_flags = []
connection_args["client_flags"] = client_flags
if not isinstance(client_flags, list):
ret["comment"] = "Error: client_flags must be a list."
ret["result"] = False
return ret
if any(
[
query_file.startswith(proto)
for proto in ["http://", "https://", "salt://", "s3://", "swift://"]
]
):
query_file = __salt__["cp.cache_file"](query_file, saltenv=saltenv or __env__)
if not os.path.exists(query_file):
ret["comment"] = f"File {query_file} does not exist"
ret["result"] = False
return ret
# check if database exists
if check_db_exists and not __salt__["mysql.db_exists"](database, **connection_args):
err = _get_mysql_error()
if err is not None:
ret["comment"] = err
ret["result"] = False
return ret
ret["result"] = None
ret["comment"] = f"Database {database} is not present"
return ret
# Check if execution needed
if output == "grain":
if grain is not None and key is None:
if not overwrite and grain in __salt__["grains.ls"]():
ret["comment"] = "No execution needed. Grain " + grain + " already set"
return ret
elif __opts__["test"]:
ret["result"] = None
ret["comment"] = "Query would execute, storing result in " + "grain: " + grain
return ret
elif grain is not None:
if grain in __salt__["grains.ls"]():
grain_value = __salt__["grains.get"](grain)
else:
grain_value = {}
if not overwrite and key in grain_value:
ret["comment"] = "No execution needed. Grain " + grain + ":" + key + " already set"
return ret
elif __opts__["test"]:
ret["result"] = None
ret["comment"] = (
"Query would execute, storing result in " + "grain: " + grain + ":" + key
)
return ret
else:
ret["result"] = False
ret["comment"] = "Error: output type 'grain' needs the grain " + "parameter\n"
return ret
elif output is not None:
if not overwrite and os.path.isfile(output):
ret["comment"] = "No execution needed. File " + output + " already set"
return ret
elif __opts__["test"]:
ret["result"] = None
ret["comment"] = "Query would execute, storing result in " + "file: " + output
return ret
elif __opts__["test"]:
ret["result"] = None
ret["comment"] = "Query would execute, not storing result"
return ret
# The database is present, execute the query
query_result = __salt__["mysql.file_query"](database, query_file, **connection_args)
if query_result is False:
ret["result"] = False
return ret
mapped_results = []
if "results" in query_result:
for res in query_result["results"]:
mapped_line = {}
for idx, col in enumerate(query_result["columns"]):
mapped_line[col] = res[idx]
mapped_results.append(mapped_line)
query_result["results"] = mapped_results
ret["comment"] = str(query_result)
if output == "grain":
if grain is not None and key is None:
__salt__["grains.setval"](grain, query_result)
ret["changes"]["query"] = "Executed. Output into grain: " + grain
elif grain is not None:
if grain in __salt__["grains.ls"]():
grain_value = __salt__["grains.get"](grain)
else:
grain_value = {}
grain_value[key] = query_result
__salt__["grains.setval"](grain, grain_value)
ret["changes"]["query"] = "Executed. Output into grain: " + grain + ":" + key
elif output is not None:
ret["changes"]["query"] = "Executed. Output into " + output
with salt.utils.files.fopen(output, "w") as output_file:
if "results" in query_result:
for res in query_result["results"]:
for col, val in res.items():
output_file.write(salt.utils.stringutils.to_str(col + ":" + val + "\n"))
else:
output_file.write(salt.utils.stringutils.to_str(query_result))
else:
ret["changes"]["query"] = "Executed"
return ret
def run(
name,
database,
query,
output=None,
grain=None,
key=None,
overwrite=True,
check_db_exists=True,
client_flags=None,
**connection_args,
):
"""
Execute an arbitrary query on the specified database
name
Used only as an ID
database
The name of the database to execute the query on
query
The query to execute
output
grain: output in a grain
other: the file to store results
None: output to the result comment (default)
grain:
grain to store the output (need output=grain)
key:
the specified grain will be treated as a dictionary, the result
of this state will be stored under the specified key.
overwrite:
The file or grain will be overwritten if it already exists (default)
check_db_exists:
The state run will check that the specified database exists (default=True)
before running any queries
client_flags:
A list of client flags to pass to the MySQL connection.
https://dev.mysql.com/doc/internals/en/capability-flags.html
"""
ret = {
"name": name,
"changes": {},
"result": True,
"comment": f"Database {database} is already present",
}
if client_flags is None:
client_flags = []
connection_args["client_flags"] = client_flags
if not isinstance(client_flags, list):
ret["comment"] = "Error: client_flags must be a list."
ret["result"] = False
return ret
# check if database exists
if check_db_exists and not __salt__["mysql.db_exists"](database, **connection_args):
err = _get_mysql_error()
if err is not None:
ret["comment"] = err
ret["result"] = False
return ret
ret["result"] = None
ret["comment"] = f"Database {name} is not present"
return ret
# Check if execution needed
if output == "grain":
if grain is not None and key is None:
if not overwrite and grain in __salt__["grains.ls"]():
ret["comment"] = "No execution needed. Grain " + grain + " already set"
return ret
elif __opts__["test"]:
ret["result"] = None
ret["comment"] = "Query would execute, storing result in " + "grain: " + grain
return ret
elif grain is not None:
if grain in __salt__["grains.ls"]():
grain_value = __salt__["grains.get"](grain)
else:
grain_value = {}
if not overwrite and key in grain_value:
ret["comment"] = "No execution needed. Grain " + grain + ":" + key + " already set"
return ret
elif __opts__["test"]:
ret["result"] = None
ret["comment"] = (
"Query would execute, storing result in " + "grain: " + grain + ":" + key
)
return ret
else:
ret["result"] = False
ret["comment"] = "Error: output type 'grain' needs the grain " + "parameter\n"
return ret
elif output is not None:
if not overwrite and os.path.isfile(output):
ret["comment"] = "No execution needed. File " + output + " already set"
return ret
elif __opts__["test"]:
ret["result"] = None
ret["comment"] = "Query would execute, storing result in " + "file: " + output
return ret
elif __opts__["test"]:
ret["result"] = None
ret["comment"] = "Query would execute, not storing result"
return ret
# The database is present, execute the query
query_result = __salt__["mysql.query"](database, query, **connection_args)
mapped_results = []
if "results" in query_result:
for res in query_result["results"]:
mapped_line = {}
for idx, col in enumerate(query_result["columns"]):
mapped_line[col] = res[idx]
mapped_results.append(mapped_line)
query_result["results"] = mapped_results
ret["comment"] = str(query_result)
if output == "grain":
if grain is not None and key is None:
__salt__["grains.setval"](grain, query_result)
ret["changes"]["query"] = "Executed. Output into grain: " + grain
elif grain is not None:
if grain in __salt__["grains.ls"]():
grain_value = __salt__["grains.get"](grain)
else:
grain_value = {}
grain_value[key] = query_result
__salt__["grains.setval"](grain, grain_value)
ret["changes"]["query"] = "Executed. Output into grain: " + grain + ":" + key
elif output is not None:
ret["changes"]["query"] = "Executed. Output into " + output
with salt.utils.files.fopen(output, "w") as output_file:
if "results" in query_result:
for res in query_result["results"]:
for col, val in res.items():
output_file.write(salt.utils.stringutils.to_str(col + ":" + val + "\n"))
else:
if isinstance(query_result, str):
output_file.write(salt.utils.stringutils.to_str(query_result))
else:
for col, val in query_result.items():
output_file.write(salt.utils.stringutils.to_str(f"{col}:{val}\n"))
else:
ret["changes"]["query"] = "Executed"
return ret
070701000000C2000081A400000000000000000000000167471E9C00002525000000000000000000000000000000000000004B00000000test-repo-1-0.1/saltext_mysql-1.0.0/src/saltext/mysql/states/mysql_user.py"""
Management of MySQL users
=========================
:depends: - MySQLdb Python module
:configuration: See :py:mod:`salt.modules.mysql` for setup instructions.
.. code-block:: yaml
frank:
mysql_user.present:
- host: localhost
- password: bobcat
.. versionadded:: 0.16.2
Authentication overrides have been added.
The MySQL authentication information specified in the minion config file can be
overridden in states using the following arguments: ``connection_host``,
``connection_port``, ``connection_user``, ``connection_pass``,
``connection_db``, ``connection_unix_socket``, ``connection_default_file`` and
``connection_charset``.
.. code-block:: yaml
frank:
mysql_user.present:
- host: localhost
- password: "bob@cat"
- connection_user: someuser
- connection_pass: somepass
- connection_charset: utf8
- saltenv:
- LC_ALL: "en_US.utf8"
This state is not able to grant permissions for the user. See
:py:mod:`salt.states.mysql_grants` for further instructions.
"""
import sys
import salt.utils.data
def __virtual__():
"""
Only load if the mysql module is in __salt__
"""
if "mysql.user_create" in __salt__:
return True
return (False, "mysql module could not be loaded")
def _get_mysql_error():
"""
Look in module context for a MySQL error. Eventually we should make a less
ugly way of doing this.
"""
return sys.modules[__salt__["test.ping"].__module__].__context__.pop("mysql.error", None)
def present(
name,
host="localhost",
password=None,
password_hash=None,
allow_passwordless=False,
unix_socket=False,
password_column=None,
auth_plugin="mysql_native_password",
**connection_args,
):
"""
Ensure that the named user is present with the specified properties. A
passwordless user can be configured by omitting ``password`` and
``password_hash``, and setting ``allow_passwordless`` to ``True``.
name
The name of the user to manage
host
Host for which this user/password combo applies
password
The password to use for this user. Will take precedence over the
``password_hash`` option if both are specified.
password_hash
The password in hashed form. Be sure to quote the password because YAML
doesn't like the ``*``. A password hash can be obtained from the mysql
command-line client like so::
mysql> SELECT PASSWORD('mypass');
+-------------------------------------------+
| PASSWORD('mypass') |
+-------------------------------------------+
| *6C8989366EAF75BB670AD8EA7A7FC1176A95CEF4 |
+-------------------------------------------+
1 row in set (0.00 sec)
allow_passwordless
If ``True``, then ``password`` and ``password_hash`` can be omitted to
permit a passwordless login.
.. versionadded:: 0.16.2
unix_socket
If ``True`` and allow_passwordless is ``True``, the unix_socket auth
plugin will be used.
"""
ret = {
"name": name,
"changes": {},
"result": True,
"comment": f"User {name}@{host} is already present",
}
passwordless = not any((password, password_hash))
# check if user exists with the same password (or passwordless login)
if passwordless:
if not salt.utils.data.is_true(allow_passwordless) and not unix_socket:
ret["comment"] = (
"Either password or password_hash must be "
"specified, unless allow_passwordless is True"
)
ret["result"] = False
return ret
else:
if __salt__["mysql.user_exists"](
name,
host,
passwordless=True,
unix_socket=unix_socket,
password_column=password_column,
**connection_args,
):
if allow_passwordless:
ret["comment"] += " with passwordless login"
return ret
else:
err = _get_mysql_error()
if err is not None:
ret["comment"] = err
ret["result"] = False
return ret
else:
if __salt__["mysql.user_exists"](
name,
host,
password,
password_hash,
unix_socket=unix_socket,
password_column=password_column,
**connection_args,
):
if auth_plugin == "mysql_native_password":
ret["comment"] += " with the desired password"
if password_hash and not password:
ret["comment"] += " hash"
else:
ret["comment"] += ". Unable to verify password."
return ret
else:
err = _get_mysql_error()
if err is not None:
ret["comment"] = err
ret["result"] = False
return ret
# check if user exists with a different password
if __salt__["mysql.user_exists"](name, host, unix_socket=unix_socket, **connection_args):
# The user is present, change the password
if __opts__["test"]:
ret["comment"] = f"Password for user {name}@{host} is set to be "
ret["result"] = None
if passwordless:
ret["comment"] += "cleared"
if not salt.utils.data.is_true(allow_passwordless):
ret["comment"] += ", but allow_passwordless != True"
ret["result"] = False
else:
ret["comment"] += "changed"
return ret
if __salt__["mysql.user_chpass"](
name, host, password, password_hash, allow_passwordless, unix_socket, **connection_args
):
ret["comment"] = "Password for user {}@{} has been {}".format(
name, host, "cleared" if passwordless else "changed"
)
ret["changes"][name] = "Updated"
else:
ret["comment"] = "Failed to {} password for user {}@{}".format(
"clear" if passwordless else "change", name, host
)
err = _get_mysql_error()
if err is not None:
ret["comment"] += f" ({err})"
if passwordless and not salt.utils.data.is_true(allow_passwordless):
ret["comment"] += (
". Note: allow_passwordless must be True " "to permit passwordless login."
)
ret["result"] = False
else:
err = _get_mysql_error()
if err is not None:
ret["comment"] = err
ret["result"] = False
return ret
# The user is not present, make it!
if __opts__["test"]:
ret["comment"] = f"User {name}@{host} is set to be added"
ret["result"] = None
if allow_passwordless:
ret["comment"] += " with passwordless login"
if not salt.utils.data.is_true(allow_passwordless):
ret["comment"] += ", but allow_passwordless != True"
ret["result"] = False
if unix_socket:
ret["comment"] += " using unix_socket"
return ret
if __salt__["mysql.user_create"](
name,
host,
password,
password_hash,
allow_passwordless,
unix_socket=unix_socket,
password_column=password_column,
auth_plugin=auth_plugin,
**connection_args,
):
ret["comment"] = f"The user {name}@{host} has been added"
if allow_passwordless:
ret["comment"] += " with passwordless login"
if unix_socket:
ret["comment"] += " using unix_socket"
ret["changes"][name] = "Present"
else:
ret["comment"] = f"Failed to create user {name}@{host}"
err = _get_mysql_error()
if err is not None:
ret["comment"] += f" ({err})"
ret["result"] = False
return ret
def absent(name, host="localhost", **connection_args):
"""
Ensure that the named user is absent
name
The name of the user to remove
"""
ret = {"name": name, "changes": {}, "result": True, "comment": ""}
# Check if user exists, and if so, remove it
if __salt__["mysql.user_exists"](name, host, **connection_args):
if __opts__["test"]:
ret["result"] = None
ret["comment"] = f"User {name}@{host} is set to be removed"
return ret
if __salt__["mysql.user_remove"](name, host, **connection_args):
ret["comment"] = f"User {name}@{host} has been removed"
ret["changes"][name] = "Absent"
return ret
else:
err = _get_mysql_error()
if err is not None:
ret["comment"] = err
ret["result"] = False
return ret
else:
err = _get_mysql_error()
if err is not None:
ret["comment"] = err
ret["result"] = False
return ret
# fallback
ret["comment"] = f"User {name}@{host} is not present, so it cannot be removed"
return ret
070701000000C3000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000002A00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests070701000000C4000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000003600000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/__init__.py070701000000C5000081A400000000000000000000000167471E9C00000401000000000000000000000000000000000000003600000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/conftest.pyimport logging
import os
import pytest
from saltfactories.utils import random_string
from saltext.mysql import PACKAGE_ROOT
# Reset the root logger to its default level(because salt changed it)
logging.root.setLevel(logging.WARNING)
# This swallows all logging to stdout.
# To show select logs, set --log-cli-level=<level>
for handler in logging.root.handlers[:]:
logging.root.removeHandler(handler)
handler.close()
@pytest.fixture(scope="session")
def salt_factories_config():
"""
Return a dictionary with the keyword arguments for FactoriesManager
"""
return {
"code_dir": str(PACKAGE_ROOT),
"inject_sitecustomize": "COVERAGE_PROCESS_START" in os.environ,
"start_timeout": 120 if os.environ.get("CI") else 60,
}
@pytest.fixture(scope="package")
def master(salt_factories):
return salt_factories.salt_master_daemon(random_string("master-"))
@pytest.fixture(scope="package")
def minion(master):
return master.salt_minion_daemon(random_string("minion-"))
070701000000C6000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003500000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional070701000000C7000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004100000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/__init__.py070701000000C8000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003A00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/auth070701000000C9000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004600000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/auth/__init__.py070701000000CA000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/cache070701000000CB000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004700000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/cache/__init__.py070701000000CC000081A400000000000000000000000167471E9C000024F8000000000000000000000000000000000000004600000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/cache/helpers.pyimport time
from unittest.mock import MagicMock
from unittest.mock import patch
import pytest
import salt.cache
from salt.exceptions import SaltCacheError
def run_common_cache_tests(subtests, cache):
bank = "fnord/kevin/stuart"
# ^^^^ This bank can be just fnord, or fnord/foo, or any mildly reasonable
# or possibly unreasonably nested names.
#
# No. Seriously. Try import string; bank = '/'.join(string.ascii_letters)
# - it works!
# import string; bank = "/".join(string.ascii_letters)
good_key = "roscivs"
bad_key = "monkey"
with subtests.test("non-existent bank should be empty on cache start"):
assert not cache.contains(bank=bank)
assert cache.list(bank=bank) == []
with subtests.test("after storing key in bank it should be in cache list"):
cache.store(bank=bank, key=good_key, data=b"\x01\x04\x05fnordy data")
assert cache.list(bank) == [good_key]
with subtests.test("after storing value, it should be fetchable"):
expected_data = "trombone pleasantry"
cache.store(bank=bank, key=good_key, data=expected_data)
assert cache.fetch(bank=bank, key=good_key) == expected_data
with subtests.test("bad key should still be absent from cache"):
assert cache.fetch(bank=bank, key=bad_key) == {}
with subtests.test("storing new value should update it"):
# Double check that the data was still the old stuff
old_data = expected_data
assert cache.fetch(bank=bank, key=good_key) == old_data
new_data = "stromboli"
cache.store(bank=bank, key=good_key, data=new_data)
assert cache.fetch(bank=bank, key=good_key) == new_data
with subtests.test("storing complex object works"):
new_thing = {
"some": "data",
42: "wheee",
"some other": {"sub": {"objects": "here"}},
}
cache.store(bank=bank, key=good_key, data=new_thing)
actual_thing = cache.fetch(bank=bank, key=good_key)
if isinstance(cache, salt.cache.MemCache):
# MemCache should actually store the object - everything else
# should create a copy of it.
assert actual_thing is new_thing
else:
assert actual_thing is not new_thing
assert actual_thing == new_thing
with subtests.test("contains returns true if key in bank"):
assert cache.contains(bank=bank, key=good_key)
with subtests.test("contains returns true if bank exists and key is None"):
assert cache.contains(bank=bank, key=None)
with subtests.test("contains returns False when bank not in cache and/or key not in bank"):
assert not cache.contains(bank=bank, key=bad_key)
assert not cache.contains(bank="nonexistent", key=good_key)
assert not cache.contains(bank="nonexistent", key=bad_key)
assert not cache.contains(bank="nonexistent", key=None)
with subtests.test("flushing nonexistent key should not remove other keys"):
cache.flush(bank=bank, key=bad_key)
assert cache.contains(bank=bank, key=good_key)
with subtests.test("flushing existing key should not remove bank if no more keys exist"):
pytest.skip("This is impossible with redis. Should we make localfs behave the same way?")
cache.flush(bank=bank, key=good_key)
assert cache.contains(bank=bank)
assert cache.list(bank=bank) == []
with subtests.test(
"after existing key is flushed updated should not return a timestamp for that key"
):
cache.store(bank=bank, key=good_key, data="fnord")
cache.flush(bank=bank, key=good_key)
timestamp = cache.updated(bank=bank, key=good_key)
assert timestamp is None
with subtests.test(
"after flushing bank containing a good key, updated should not return a timestamp for that key"
):
cache.store(bank=bank, key=good_key, data="fnord")
cache.flush(bank=bank, key=None)
timestamp = cache.updated(bank=bank, key=good_key)
assert timestamp is None
with subtests.test("flushing bank with None as key should remove bank"):
cache.flush(bank=bank, key=None)
assert not cache.contains(bank=bank)
with subtests.test("Exception should happen when flushing None bank"):
# This bit is maybe an accidental API, but currently there is no
# protection at least with the localfs cache when bank is None. If
# bank is None we try to `os.path.normpath` the bank, which explodes
# and is at least the current behavior. If we want to change that
# this test should change. Or be removed altogether.
# TODO: this should actually not raise. Not sure if there's a test that we can do here... or just call the code which will fail if there's actually an exception. -W. Werner, 2021-09-28
pytest.skip(
"Skipping for now - etcd, redis, and mysql do not raise. Should ensure all backends behave consistently"
)
with pytest.raises(Exception):
cache.flush(bank=None, key=None)
with subtests.test("Updated for non-existent key should return None"):
timestamp = cache.updated(bank="nonexistent", key="whatever")
assert timestamp is None
with subtests.test("Updated for key should return a reasonable time"):
before_storage = int(time.time())
cache.store(bank="fnord", key="updated test part 2", data="fnord")
after_storage = int(time.time())
timestamp = cache.updated(bank="fnord", key="updated test part 2")
assert before_storage <= timestamp <= after_storage
with subtests.test("If the module raises SaltCacheError then it should make it out of updated"):
with patch.dict(
cache.modules._dict,
{f"{cache.driver}.updated": MagicMock(side_effect=SaltCacheError)},
), pytest.raises(SaltCacheError):
cache.updated(bank="kaboom", key="oops")
with subtests.test("cache.cache right after a value is cached should not update the cache"):
expected_value = "some cool value yo"
cache.store(bank=bank, key=good_key, data=expected_value)
result = cache.cache(
bank=bank,
key=good_key,
fun=lambda **kwargs: "bad bad value no good",
value="some other value?",
loop_fun=lambda x: "super very no good bad",
)
fetch_result = cache.fetch(bank=bank, key=good_key)
assert result == fetch_result == expected_value
with subtests.test(
"cache.cache should update the value with the result of fun when value was updated longer than expiration",
), patch(
"salt.cache.Cache.updated",
return_value=42, # Dec 31, 1969... time to update the cache!
autospec=True,
):
expected_value = "this is the return value woo woo woo"
cache.store(bank=bank, key=good_key, data="not this value")
cache_result = cache.cache(
bank=bank, key=good_key, fun=lambda *args, **kwargs: expected_value
)
fetch_result = cache.fetch(bank=bank, key=good_key)
assert cache_result == fetch_result == expected_value
with subtests.test(
"cache.cache should update the value with all of the outputs from loop_fun if loop_fun was provided",
), patch(
"salt.cache.Cache.updated",
return_value=42,
autospec=True,
):
expected_value = "SOME HUGE STRING OKAY?"
cache.store(bank=bank, key=good_key, data="nope, not me")
cache_result = cache.cache(
bank=bank,
key=good_key,
fun=lambda **kwargs: "some huge string okay?",
loop_fun=str.upper,
)
fetch_result = cache.fetch(bank=bank, key=good_key)
assert cache_result == fetch_result
assert "".join(fetch_result) == expected_value
with subtests.test(
"cache.cache should update the value if the stored value is empty but present and expiry is way in the future"
), patch(
"salt.cache.Cache.updated",
return_value=time.time() * 2,
autospec=True,
):
# Unclear if this was intended behavior: currently any falsey data will
# be updated by cache.cache. If this is incorrect, this test should
# be updated or removed.
expected_data = "some random string whatever"
for empty in ("", (), [], {}, 0, 0.0, False, None):
with subtests.test(empty=empty):
cache.store(bank=bank, key=good_key, data=empty) # empty chairs and empty data
cache_result = cache.cache(
bank=bank, key=good_key, fun=lambda **kwargs: expected_data
)
fetch_result = cache.fetch(bank=bank, key=good_key)
assert cache_result == fetch_result == expected_data
with subtests.test("cache.cache should store a value if it does not exist"):
expected_result = "some result plz"
cache.flush(bank=bank, key=None)
assert cache.fetch(bank=bank, key=good_key) == {}
cache_result = cache.cache(bank=bank, key=good_key, fun=lambda **kwargs: expected_result)
fetch_result = cache.fetch(bank=bank, key=good_key)
assert cache_result == fetch_result
assert fetch_result == expected_result
assert cache_result == fetch_result == expected_result
070701000000CD000081A400000000000000000000000167471E9C0000045A000000000000000000000000000000000000004900000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/cache/test_mysql.pyimport logging
import pytest
import salt.cache
import salt.loader
from tests.functional.cache.helpers import run_common_cache_tests
from tests.support.mysql import * # pylint: disable=wildcard-import,unused-wildcard-import
docker = pytest.importorskip("docker")
log = logging.getLogger(__name__)
pytestmark = [
pytest.mark.skip_if_binaries_missing("dockerd"),
]
@pytest.fixture(scope="module")
def mysql_combo(create_mysql_combo): # pylint: disable=function-redefined
create_mysql_combo.mysql_database = "salt_cache"
return create_mysql_combo
@pytest.fixture
def cache(minion_opts, mysql_container):
opts = minion_opts.copy()
opts["cache"] = "mysql"
opts["mysql.host"] = "127.0.0.1"
opts["mysql.port"] = mysql_container.mysql_port
opts["mysql.user"] = mysql_container.mysql_user
opts["mysql.password"] = mysql_container.mysql_passwd
opts["mysql.database"] = mysql_container.mysql_database
opts["mysql.table_name"] = "cache"
cache = salt.cache.factory(opts)
return cache
def test_caching(subtests, cache):
run_common_cache_tests(subtests, cache)
070701000000CE000081A400000000000000000000000167471E9C00000DAC000000000000000000000000000000000000004100000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/conftest.pyimport logging
import shutil
import pytest
from saltfactories.utils.functional import Loaders
log = logging.getLogger(__name__)
@pytest.fixture(scope="package")
def minion_id():
return "func-tests-minion-opts"
@pytest.fixture(scope="module")
def state_tree(tmp_path_factory):
state_tree_path = tmp_path_factory.mktemp("state-tree-base")
try:
yield state_tree_path
finally:
shutil.rmtree(str(state_tree_path), ignore_errors=True)
@pytest.fixture(scope="module")
def state_tree_prod(tmp_path_factory):
state_tree_path = tmp_path_factory.mktemp("state-tree-prod")
try:
yield state_tree_path
finally:
shutil.rmtree(str(state_tree_path), ignore_errors=True)
@pytest.fixture(scope="module")
def minion_config_defaults():
"""
Functional test modules can provide this fixture to tweak the default
configuration dictionary passed to the minion factory
"""
return {}
@pytest.fixture(scope="module")
def minion_config_overrides():
"""
Functional test modules can provide this fixture to tweak the configuration
overrides dictionary passed to the minion factory
"""
return {}
@pytest.fixture(scope="module")
def minion_opts(
salt_factories,
minion_id,
state_tree,
state_tree_prod,
minion_config_defaults,
minion_config_overrides,
):
minion_config_overrides.update(
{
"file_client": "local",
"file_roots": {
"base": [
str(state_tree),
],
"prod": [
str(state_tree_prod),
],
},
}
)
factory = salt_factories.salt_minion_daemon(
minion_id,
defaults=minion_config_defaults or None,
overrides=minion_config_overrides,
)
return factory.config.copy()
@pytest.fixture(scope="module")
def master_config_defaults():
"""
Functional test modules can provide this fixture to tweak the default
configuration dictionary passed to the master factory
"""
return {}
@pytest.fixture(scope="module")
def master_config_overrides():
"""
Functional test modules can provide this fixture to tweak the configuration
overrides dictionary passed to the master factory
"""
return {}
@pytest.fixture(scope="module")
def master_opts(
salt_factories,
state_tree,
state_tree_prod,
master_config_defaults,
master_config_overrides,
):
master_config_overrides.update(
{
"file_client": "local",
"file_roots": {
"base": [
str(state_tree),
],
"prod": [
str(state_tree_prod),
],
},
}
)
factory = salt_factories.salt_master_daemon(
"func-tests-master-opts",
defaults=master_config_defaults or None,
overrides=master_config_overrides,
)
return factory.config.copy()
@pytest.fixture(scope="module")
def loaders(minion_opts):
return Loaders(minion_opts, loaded_base_name=f"{__name__}.loaded")
@pytest.fixture(autouse=True)
def reset_loaders_state(loaders):
try:
# Run the tests
yield
finally:
# Reset the loaders state
loaders.reset_state()
@pytest.fixture(scope="module")
def modules(loaders):
return loaders.modules
@pytest.fixture(scope="module")
def states(loaders):
return loaders.states
070701000000CF000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003D00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/modules070701000000D0000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004900000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/modules/__init__.py070701000000D1000081A400000000000000000000000167471E9C000000B2000000000000000000000000000000000000004900000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/modules/conftest.pyimport pytest
@pytest.fixture(scope="module")
def modules(loaders):
return loaders.modules
@pytest.fixture(scope="module")
def states(loaders):
return loaders.states
070701000000D2000081A400000000000000000000000167471E9C00005300000000000000000000000000000000000000004B00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/modules/test_mysql.py"""
Test Salt MySQL module across various MySQL variants
"""
import logging
import time
import pytest
from pytestshellutils.utils import format_callback_to_string
from salt.utils.versions import version_cmp
from saltext.mysql.modules import mysql as mysqlmod
from tests.support.mysql import * # pylint: disable=wildcard-import,unused-wildcard-import
log = logging.getLogger(__name__)
pytestmark = [
pytest.mark.skip_if_binaries_missing("dockerd"),
pytest.mark.skipif(mysqlmod.MySQLdb is None, reason="No python mysql client installed."),
]
def _get_mysql_error(context):
return context.pop("mysql.error", None)
class CallWrapper:
def __init__(self, func, container, ctx):
self.func = func
self.container = container
self.ctx = ctx
def __call__(self, *args, **kwargs):
kwargs.update(self.container.get_credentials(**kwargs))
retry = 1
retries = 3
ret = None
while True:
ret = self.func(*list(args), **kwargs.copy())
mysql_error = _get_mysql_error(self.ctx)
if mysql_error is None:
break
retry += 1
if retry > retries:
break
time.sleep(0.5)
log.debug(
"Retrying(%s out of %s) %s because of the following error: %s",
retry,
retries,
format_callback_to_string(self.func, args, kwargs),
mysql_error,
)
return ret
@pytest.fixture(scope="module")
def mysql(modules, mysql_container, loaders):
for name in list(modules):
if not name.startswith("mysql."):
continue
modules._dict[name] = CallWrapper(
modules._dict[name],
mysql_container,
loaders.context,
)
return modules.mysql
def test_query(mysql):
ret = mysql.query("mysql", "SELECT 1")
assert ret
assert ret["results"] == (("1",),)
def test_version(mysql, mysql_container):
ret = mysql.version()
assert ret
assert mysql_container.mysql_version in ret
def test_status(mysql):
ret = mysql.status()
assert ret
def test_db_list(mysql):
ret = mysql.db_list()
assert ret
assert "mysql" in ret
def test_db_create_alter_remove(mysql):
ret = mysql.db_create("salt")
assert ret
ret = mysql.alter_db(
name="salt",
character_set="latin1",
collate="latin1_general_ci",
)
assert ret
ret = mysql.db_remove(name="salt")
assert ret
def test_user_list(mysql, mysql_combo):
ret = mysql.user_list()
assert ret
assert {
"User": mysql_combo.mysql_root_user,
"Host": mysql_combo.mysql_host,
} in ret
def test_user_exists(mysql, mysql_combo):
ret = mysql.user_exists(
mysql_combo.mysql_root_user,
host=mysql_combo.mysql_host,
password=mysql_combo.mysql_passwd,
)
assert ret
ret = mysql.user_exists(
"george",
"hostname",
"badpassword",
)
assert not ret
def test_user_info(mysql, mysql_combo):
ret = mysql.user_info(mysql_combo.mysql_root_user, host=mysql_combo.mysql_host)
assert ret
# Check that a subset of the information
# is available in the returned user information.
expected = {
"Host": mysql_combo.mysql_host,
"User": mysql_combo.mysql_root_user,
"Select_priv": "Y",
"Insert_priv": "Y",
"Update_priv": "Y",
"Delete_priv": "Y",
"Create_priv": "Y",
"Drop_priv": "Y",
"Reload_priv": "Y",
"Shutdown_priv": "Y",
"Process_priv": "Y",
"File_priv": "Y",
"Grant_priv": "Y",
"References_priv": "Y",
"Index_priv": "Y",
"Alter_priv": "Y",
"Show_db_priv": "Y",
"Super_priv": "Y",
"Create_tmp_table_priv": "Y",
"Lock_tables_priv": "Y",
"Execute_priv": "Y",
"Repl_slave_priv": "Y",
"Repl_client_priv": "Y",
"Create_view_priv": "Y",
"Show_view_priv": "Y",
"Create_routine_priv": "Y",
"Alter_routine_priv": "Y",
"Create_user_priv": "Y",
"Event_priv": "Y",
"Trigger_priv": "Y",
"Create_tablespace_priv": "Y",
}
data = ret.copy()
for key in list(data):
if key not in expected:
data.pop(key)
assert data == expected
def test_user_create_chpass_delete(mysql):
ret = mysql.user_create(
"george",
host="localhost",
password="badpassword",
)
assert ret
ret = mysql.user_chpass(
"george",
host="localhost",
password="different_password",
)
assert ret
ret = mysql.user_remove("george", host="localhost")
assert ret
def test_user_grants(mysql, mysql_combo):
ret = mysql.user_grants(mysql_combo.mysql_root_user, host=mysql_combo.mysql_host)
assert ret
def test_grant_add_revoke(mysql):
# Create the database
ret = mysql.db_create("salt")
assert ret
# Create a user
ret = mysql.user_create(
"george",
host="localhost",
password="badpassword",
)
assert ret
# Grant privileges to user to specific table
ret = mysql.grant_add(
grant="ALL PRIVILEGES",
database="salt.*",
user="george",
host="localhost",
)
assert ret
# Check the grant exists
ret = mysql.grant_exists(
grant="ALL PRIVILEGES",
database="salt.*",
user="george",
host="localhost",
)
assert ret
# Revoke the grant
ret = mysql.grant_revoke(
grant="ALL PRIVILEGES",
database="salt.*",
user="george",
host="localhost",
)
assert ret
# Check the grant does not exist
ret = mysql.grant_exists(
grant="ALL PRIVILEGES",
database="salt.*",
user="george",
host="localhost",
)
assert not ret
# Grant privileges to user globally
ret = mysql.grant_add(
grant="ALL PRIVILEGES",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Check the global exists
ret = mysql.grant_exists(
grant="ALL PRIVILEGES",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Revoke the global grant
ret = mysql.grant_revoke(
grant="ALL PRIVILEGES",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Check the grant does not exist
ret = mysql.grant_exists(
grant="ALL PRIVILEGES",
database="salt.*",
user="george",
host="localhost",
)
assert not ret
# Remove the user
ret = mysql.user_remove("george", host="localhost")
assert ret
# Remove the database
ret = mysql.db_remove("salt")
assert ret
def test_grant_replication_replica_add_revoke(mysql, mysql_container):
# The REPLICATION REPLICA grant is only available for mariadb
if "mariadb" not in mysql_container.mysql_name:
pytest.skip(
"The REPLICATION REPLICA grant is unavailable "
f"for the {mysql_container.mysql_name}:{mysql_container.mysql_version} docker image."
)
# The REPLICATION REPLICA grant was added in mariadb 10.5.1
if version_cmp(mysql_container.mysql_version, "10.5.1") < 0:
pytest.skip(
"The REPLICATION REPLICA grant is unavailable "
f"for the {mysql_container.mysql_name}:{mysql_container.mysql_version} docker image."
)
# Create the database
ret = mysql.db_create("salt")
assert ret
# Create a user
ret = mysql.user_create(
"george",
host="localhost",
password="badpassword",
)
assert ret
# Grant privileges to user to specific table
ret = mysql.grant_add(
grant="REPLICATION REPLICA",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Check the grant exists
ret = mysql.grant_exists(
grant="REPLICATION REPLICA",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Revoke the global grant
ret = mysql.grant_revoke(
grant="REPLICATION REPLICA",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Check the grant does not exist
ret = mysql.grant_exists(
grant="REPLICATION REPLICA",
database="*.*",
user="george",
host="localhost",
)
assert not ret
# Remove the user
ret = mysql.user_remove("george", host="localhost")
assert ret
# Remove the database
ret = mysql.db_remove("salt")
assert ret
def test_grant_replication_slave_add_revoke(mysql, mysql_container):
# Create the database
ret = mysql.db_create("salt")
assert ret
# Create a user
ret = mysql.user_create(
"george",
host="localhost",
password="badpassword",
)
assert ret
# Grant privileges to user to specific table
ret = mysql.grant_add(
grant="REPLICATION SLAVE",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Check the grant exists
ret = mysql.grant_exists(
grant="REPLICATION SLAVE",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Revoke the global grant
ret = mysql.grant_revoke(
grant="REPLICATION SLAVE",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Check the grant does not exist
ret = mysql.grant_exists(
grant="REPLICATION SLAVE",
database="*.*",
user="george",
host="localhost",
)
assert not ret
# Remove the user
ret = mysql.user_remove("george", host="localhost")
assert ret
# Remove the database
ret = mysql.db_remove("salt")
assert ret
def test_grant_replication_client_add_revoke(mysql, mysql_container):
# Create the database
ret = mysql.db_create("salt")
assert ret
# Create a user
ret = mysql.user_create(
"george",
host="localhost",
password="badpassword",
)
assert ret
# Grant privileges to user to specific table
ret = mysql.grant_add(
grant="REPLICATION CLIENT",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Check the grant exists
ret = mysql.grant_exists(
grant="REPLICATION CLIENT",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Revoke the global grant
ret = mysql.grant_revoke(
grant="REPLICATION CLIENT",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Check the grant does not exist
ret = mysql.grant_exists(
grant="REPLICATION CLIENT",
database="*.*",
user="george",
host="localhost",
)
assert not ret
# Remove the user
ret = mysql.user_remove("george", host="localhost")
assert ret
# Remove the database
ret = mysql.db_remove("salt")
assert ret
def test_grant_binlog_monitor_add_revoke(mysql, mysql_container):
# The BINLOG MONITOR grant is only available for mariadb
if "mariadb" not in mysql_container.mysql_name:
pytest.skip(
"The BINLOG MONITOR grant is unavailable "
f"for the {mysql_container.mysql_name}:{mysql_container.mysql_version} docker image."
)
# The BINLOG MONITOR grant was added in mariadb 10.5.2
if version_cmp(mysql_container.mysql_version, "10.5.2") < 0:
pytest.skip(
"The BINLOG_MONITOR grant is unavailable "
f"for the {mysql_container.mysql_name}:{mysql_container.mysql_version} docker image."
)
# Create the database
ret = mysql.db_create("salt")
assert ret
# Create a user
ret = mysql.user_create(
"george",
host="localhost",
password="badpassword",
)
assert ret
# Grant privileges to user to specific table
ret = mysql.grant_add(
grant="BINLOG MONITOR",
database="salt.*",
user="george",
host="localhost",
)
assert ret
# Check the grant exists
ret = mysql.grant_exists(
grant="BINLOG MONITOR",
database="salt.*",
user="george",
host="localhost",
)
assert ret
# Revoke the global grant
ret = mysql.grant_revoke(
grant="BINLOG MONITOR",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Check the grant does not exist
ret = mysql.grant_exists(
grant="BINLOG MONITOR",
database="salt.*",
user="george",
host="localhost",
)
assert not ret
# Remove the user
ret = mysql.user_remove("george", host="localhost")
assert ret
# Remove the database
ret = mysql.db_remove("salt")
assert ret
def test_grant_replica_monitor_add_revoke(mysql, mysql_container):
# The REPLICA MONITOR grant is only available for mariadb
if "mariadb" not in mysql_container.mysql_name:
pytest.skip(
"The REPLICA MONITOR grant is unavailable "
f"for the {mysql_container.mysql_name}:{mysql_container.mysql_version} docker image."
)
# The REPLICA MONITOR grant was added in mariadb 10.5.9
if version_cmp(mysql_container.mysql_version, "10.5.9") < 0:
pytest.skip(
"The REPLICA MONITOR grant is unavailable "
f"for the {mysql_container.mysql_name}:{mysql_container.mysql_version} docker image."
)
# Create the database
ret = mysql.db_create("salt")
assert ret
# Create a user
ret = mysql.user_create(
"george",
host="localhost",
password="badpassword",
)
assert ret
# Grant privileges to user to specific table
ret = mysql.grant_add(
grant="REPLICA MONITOR",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Check the grant exists
ret = mysql.grant_exists(
grant="REPLICA MONITOR",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Revoke the global grant
ret = mysql.grant_revoke(
grant="REPLICA MONITOR",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Check the grant does not exist
ret = mysql.grant_exists(
grant="REPLICA MONITOR",
database="*.*",
user="george",
host="localhost",
)
assert not ret
# Remove the user
ret = mysql.user_remove("george", host="localhost")
assert ret
# Remove the database
ret = mysql.db_remove("salt")
assert ret
def test_grant_slave_monitor_add_revoke(mysql, mysql_container):
# The SLAVE MONITOR grant is only available for mariadb
if "mariadb" not in mysql_container.mysql_name:
pytest.skip(
"The SLAVE MONITOR grant is unavailable "
f"for the {mysql_container.mysql_name}:{mysql_container.mysql_version} docker image."
)
# The SLAVE MONITOR grant was added in mariadb 10.5.9
if version_cmp(mysql_container.mysql_version, "10.5.9") < 0:
pytest.skip(
"The SLAVE MONITOR grant is unavailable "
f"for the {mysql_container.mysql_name}:{mysql_container.mysql_version} docker image."
)
# Create the database
ret = mysql.db_create("salt")
assert ret
# Create a user
ret = mysql.user_create(
"george",
host="localhost",
password="badpassword",
)
assert ret
# Grant privileges to user to specific table
ret = mysql.grant_add(
grant="SLAVE MONITOR",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Check the grant exists
ret = mysql.grant_exists(
grant="SLAVE MONITOR",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Revoke the global grant
ret = mysql.grant_revoke(
grant="SLAVE MONITOR",
database="*.*",
user="george",
host="localhost",
)
assert ret
# Check the grant does not exist
ret = mysql.grant_exists(
grant="SLAVE MONITOR",
database="salt.*",
user="george",
host="localhost",
)
assert not ret
# Remove the user
ret = mysql.user_remove("george", host="localhost")
assert ret
# Remove the database
ret = mysql.db_remove("salt")
assert ret
def test_plugin_add_status_remove(mysql, mysql_combo):
if "mariadb" in mysql_combo.mysql_name:
plugin = "simple_password_check"
else:
plugin = "auth_socket"
ret = mysql.plugin_status(plugin, host=mysql_combo.mysql_host)
assert not ret
ret = mysql.plugin_add(plugin)
assert ret
ret = mysql.plugin_status(plugin, host=mysql_combo.mysql_host)
assert ret
assert ret == "ACTIVE"
ret = mysql.plugin_remove(plugin)
assert ret
ret = mysql.plugin_status(plugin, host=mysql_combo.mysql_host)
assert not ret
def test_plugin_list(mysql, mysql_container):
if "mariadb" in mysql_container.mysql_name:
plugin = "simple_password_check"
else:
plugin = "auth_socket"
ret = mysql.plugins_list()
assert {"name": plugin, "status": "ACTIVE"} not in ret
assert ret
ret = mysql.plugin_add(plugin)
assert ret
ret = mysql.plugins_list()
assert ret
assert {"name": plugin, "status": "ACTIVE"} in ret
ret = mysql.plugin_remove(plugin)
assert ret
def test_grant_add_revoke_password_hash(mysql):
# Create the database
ret = mysql.db_create("salt")
assert ret
# Create a user
ret = mysql.user_create(
"george",
host="%",
password_hash="*2470C0C06DEE42FD1618BB99005ADCA2EC9D1E19",
)
assert ret
# Grant privileges to user to specific table
ret = mysql.grant_add(
grant="ALL PRIVILEGES",
database="salt.*",
user="george",
host="%",
)
assert ret
# Check the grant exists
ret = mysql.grant_exists(
grant="ALL PRIVILEGES",
database="salt.*",
user="george",
host="%",
)
assert ret
# Check the grant exists via a query
ret = mysql.query(
database="salt",
query="SELECT 1",
connection_user="george",
connection_pass="password",
connection_db="salt",
)
assert ret
# Revoke the grant
ret = mysql.grant_revoke(
grant="ALL PRIVILEGES",
database="salt.*",
user="george",
host="%",
)
assert ret
# Check the grant does not exist
ret = mysql.grant_exists(
grant="ALL PRIVILEGES",
database="salt.*",
user="george",
host="%",
)
assert not ret
# Remove the user
ret = mysql.user_remove("george", host="%")
assert ret
# Remove the database
ret = mysql.db_remove("salt")
assert ret
def test_create_alter_password_hash(mysql):
# Create the database
ret = mysql.db_create("salt")
assert ret
# Create a user
ret = mysql.user_create(
"george",
host="%",
password_hash="*2470C0C06DEE42FD1618BB99005ADCA2EC9D1E19",
)
assert ret
# Grant privileges to user to specific table
ret = mysql.grant_add(
grant="ALL PRIVILEGES",
database="salt.*",
user="george",
host="%",
)
assert ret
# Check the grant exists
ret = mysql.grant_exists(
grant="ALL PRIVILEGES",
database="salt.*",
user="george",
host="%",
)
assert ret
# Check we can query as the new user
ret = mysql.query(
database="salt",
query="SELECT 1",
connection_user="george",
connection_pass="password",
connection_db="salt",
)
assert ret
# Change the user password
ret = mysql.user_chpass(
"george",
host="%",
password_hash="*F4A5147613F01DEC0C5226BF24CD1D5762E6AAF2",
)
assert ret
# Check we can query with the new password
ret = mysql.query(
database="salt",
query="SELECT 1",
connection_user="george",
connection_pass="badpassword",
connection_db="salt",
)
assert ret
# Revoke the grant
ret = mysql.grant_revoke(
grant="ALL PRIVILEGES",
database="salt.*",
user="george",
host="%",
)
assert ret
# Check the grant does not exist
ret = mysql.grant_exists(
grant="ALL PRIVILEGES",
database="salt.*",
user="george",
host="%",
)
assert not ret
# Remove the user
ret = mysql.user_remove("george", host="%")
assert ret
# Remove the database
ret = mysql.db_remove("salt")
assert ret
070701000000D3000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003C00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/pillar070701000000D4000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004800000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/pillar/__init__.py070701000000D5000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003F00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/returners070701000000D6000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004B00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/returners/__init__.py070701000000D7000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003C00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/states070701000000D8000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004800000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/states/__init__.py070701000000D9000081A400000000000000000000000167471E9C000000B2000000000000000000000000000000000000004800000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/states/conftest.pyimport pytest
@pytest.fixture(scope="module")
def states(loaders):
return loaders.states
@pytest.fixture(scope="module")
def modules(loaders):
return loaders.modules
070701000000DA000081A400000000000000000000000167471E9C00003238000000000000000000000000000000000000004A00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/functional/states/test_mysql.py"""
Test Salt MySQL state module across various MySQL variants
"""
import logging
import time
import pytest
from pytestshellutils.utils import format_callback_to_string
from saltfactories.utils.functional import StateResult
from saltext.mysql.modules import mysql as mysqlmod
from tests.support.mysql import * # pylint: disable=wildcard-import,unused-wildcard-import
log = logging.getLogger(__name__)
pytestmark = [
pytest.mark.skip_if_binaries_missing("dockerd"),
pytest.mark.skipif(mysqlmod.MySQLdb is None, reason="No python mysql client installed."),
]
def _get_mysql_error(context):
return context.pop("mysql.error", None)
class CallWrapper:
def __init__(self, func, container, ctx):
self.func = func
self.container = container
self.ctx = ctx
def __call__(self, *args, **kwargs):
kwargs.update(self.container.get_credentials(**kwargs))
retry = 1
retries = 3
ret = None
while True:
ret = self.func(*list(args), **kwargs.copy())
mysql_error = _get_mysql_error(self.ctx)
if mysql_error is None:
break
retry += 1
if retry > retries:
break
time.sleep(0.5)
log.debug(
"Retrying(%s out of %s) %s because of the following error: %s",
retry,
retries,
format_callback_to_string(self.func, args, kwargs),
mysql_error,
)
return ret
class StateSingleWrapper:
def __init__(self, func, container, ctx):
self.func = func
self.container = container
self.ctx = ctx
def __call__(self, statefunc, *args, **kwargs):
if statefunc.startswith("mysql_"):
kwargs.update(self.container.get_credentials(**kwargs))
retry = 1
retries = 3
ret = None
while True:
ret = self.func(statefunc, *list(args), **kwargs.copy())
if ret.result:
break
mysql_error = _get_mysql_error(self.ctx)
if mysql_error is None:
break
retry += 1
if retry > retries:
break
time.sleep(0.5)
log.debug(
"Retrying(%s out of %s) %s because of the following return: %s",
retry,
retries,
format_callback_to_string(statefunc, args, kwargs),
ret,
)
else:
# No retries for every other state function
ret = self.func(statefunc, *args, **kwargs)
if isinstance(ret, StateResult):
# Sadly, because we're wrapping, we need to return the raw
# attribute for a StateResult class to be recreated.
return ret.raw
return ret
@pytest.fixture(scope="module")
def mysql(modules, mysql_container, loaders):
for name in list(modules):
if name.startswith("mysql."):
modules._dict[name] = CallWrapper(
modules._dict[name],
mysql_container,
loaders.context,
)
if name == "state.single":
modules._dict[name] = StateSingleWrapper(
modules._dict[name],
mysql_container,
loaders.context,
)
return modules.mysql
@pytest.fixture(scope="module")
def mysql_states(mysql, states, mysql_container):
# Just so we also have the container running
return states
@pytest.fixture(scope="module")
def mysql_user(mysql_states):
return mysql_states.mysql_user
@pytest.fixture(scope="module")
def mysql_query(mysql_states):
return mysql_states.mysql_query
@pytest.fixture(scope="module")
def mysql_grants(mysql_states):
return mysql_states.mysql_grants
@pytest.fixture(scope="module")
def mysql_database(mysql_states):
return mysql_states.mysql_database
def test_database_present_absent(mysql_database):
ret = mysql_database.present(name="test_database")
assert ret.changes
assert ret.changes == {"test_database": "Present"}
assert ret.comment
assert ret.comment == "The database test_database has been created"
ret = mysql_database.absent(name="test_database")
assert ret.changes
assert ret.changes == {"test_database": "Absent"}
assert ret.comment
assert ret.comment == "Database test_database has been removed"
def test_grants_present_absent(mysql, mysql_grants):
# Create the database
ret = mysql.db_create("salt")
assert ret
# Create a user
ret = mysql.user_create(
"george",
host="localhost",
password="badpassword",
)
assert ret
try:
ret = mysql_grants.present(
name="add_salt_grants",
grant="select,insert,update",
database="salt.*",
user="george",
host="localhost",
)
assert ret.changes
assert ret.changes == {"add_salt_grants": "Present"}
assert ret.comment
assert (
ret.comment == "Grant select,insert,update on salt.* to george@localhost has been added"
)
ret = mysql_grants.absent(
name="delete_salt_grants",
grant="select,insert,update",
database="salt.*",
user="george",
host="localhost",
)
assert ret.changes
assert ret.changes == {"delete_salt_grants": "Absent"}
assert ret.comment
assert (
ret.comment
== "Grant select,insert,update on salt.* for george@localhost has been revoked"
)
finally:
# Remove the user
ret = mysql.user_remove("george", host="localhost")
assert ret
# Remove the database
ret = mysql.db_remove("salt")
assert ret
def test_grants_present_absent_all_privileges(mysql, mysql_grants):
# Create the database
ret = mysql.db_create("salt_2_0")
assert ret
# Create a user
ret = mysql.user_create(
"george",
host="localhost",
password="badpassword",
)
assert ret
try:
ret = mysql_grants.present(
name="add_salt_grants",
grant="all privileges",
database="salt_2_0.*",
user="george",
host="localhost",
)
assert ret.changes
assert ret.changes == {"add_salt_grants": "Present"}
assert ret.comment
assert (
ret.comment == "Grant all privileges on salt_2_0.* to george@localhost has been added"
)
ret = mysql_grants.absent(
name="delete_salt_grants",
grant="all privileges",
database="salt_2_0.*",
user="george",
host="localhost",
)
assert ret.changes
assert ret.changes == {"delete_salt_grants": "Absent"}
assert ret.comment
assert (
ret.comment
== "Grant all privileges on salt_2_0.* for george@localhost has been revoked"
)
finally:
# Remove the user
ret = mysql.user_remove("george", host="localhost")
assert ret
# Remove the database
ret = mysql.db_remove("salt_2_0")
assert ret
def test_user_present_absent(mysql_user):
ret = mysql_user.present(
name="george",
host="localhost",
password="password",
)
assert ret.changes
assert ret.changes == {"george": "Present"}
assert ret.comment
assert ret.comment == "The user george@localhost has been added"
ret = mysql_user.absent(
name="george",
host="localhost",
)
assert ret.changes
assert ret.changes == {"george": "Absent"}
assert ret.comment
assert ret.comment == "User george@localhost has been removed"
def test_user_present_absent_passwordless(mysql_user):
ret = mysql_user.present(
name="george",
host="localhost",
allow_passwordless=True,
)
assert ret.changes
assert ret.changes == {"george": "Present"}
assert ret.comment
assert ret.comment == "The user george@localhost has been added with passwordless login"
ret = mysql_user.absent(
name="george",
host="localhost",
)
assert ret.changes
assert ret.changes == {"george": "Absent"}
assert ret.comment
assert ret.comment == "User george@localhost has been removed"
def test_user_present_absent_unixsocket(mysql, mysql_user, mysql_container):
# The auth_socket plugin on MariaDB is unavailable
# on versions 10.1 - 10.3
if "mariadb" in mysql_container.mysql_name:
if mysql_container.mysql_version in ("10.1", "10.2", "10.3"):
pytest.skip(
"The auth_socket plugin is unavaiable "
f"for the {mysql_container.mysql_name}:{mysql_container.mysql_version} docker image."
)
# enable the auth_socket plugin on MySQL
# already enabled on MariaDB > 10.3
try:
if "mariadb" not in mysql_container.mysql_name:
ret = mysql.plugin_add("auth_socket")
assert ret
ret = mysql_user.present(
name="george",
host="localhost",
unix_socket=True,
allow_passwordless=False,
)
assert ret.changes
assert ret.changes == {"george": "Present"}
assert ret.comment
assert ret.comment == "The user george@localhost has been added using unix_socket"
ret = mysql_user.absent(
name="george",
host="localhost",
)
assert ret.changes
assert ret.changes == {"george": "Absent"}
assert ret.comment
assert ret.comment == "User george@localhost has been removed"
finally:
if "mariadb" not in mysql_container.mysql_name:
ret = mysql.plugin_remove("auth_socket")
assert ret
def test_grants_present_absent_state_file(modules, mysql, mysql_grants, mysql_combo, state_tree):
content = """
sbclient_2_0:
mysql_user.present:
- host: localhost
- password: sbclient
- connection_user: {mysql_user}
- connection_pass: {mysql_pass}
- connection_db: mysql
- connection_port: {mysql_port}
mysql_database.present:
- connection_user: {mysql_user}
- connection_pass: {mysql_pass}
- connection_db: mysql
- connection_port: {mysql_port}
mysql_grants.present:
- grant: ALL PRIVILEGES
- user: sbclient_2_0
- database: sbclient_2_0.*
- host: localhost
- connection_user: {mysql_user}
- connection_pass: {mysql_pass}
- connection_db: mysql
- connection_port: {mysql_port}
""".format(
mysql_user=mysql_combo.mysql_root_user,
mysql_pass=mysql_combo.mysql_root_passwd,
mysql_port=mysql_combo.mysql_port,
)
try:
with pytest.helpers.temp_file("manage_mysql.sls", content, state_tree):
ret = modules.state.apply("manage_mysql")
# Check user creation
state = "mysql_user_|-sbclient_2_0_|-sbclient_2_0_|-present"
assert state in ret
assert "changes" in ret[state]
assert ret[state].changes == {"sbclient_2_0": "Present"}
# Check database creation
state = "mysql_database_|-sbclient_2_0_|-sbclient_2_0_|-present"
assert state in ret
assert "changes" in ret[state]
assert ret[state].changes == {"sbclient_2_0": "Present"}
# Check grant creation
state = "mysql_grants_|-sbclient_2_0_|-sbclient_2_0_|-present"
assert state in ret
assert "comment" in ret[state]
assert (
ret[state].comment
== "Grant ALL PRIVILEGES on sbclient_2_0.* to sbclient_2_0@localhost has been added"
)
ret = mysql_grants.absent(
name="delete_sbclient_grants",
grant="all privileges",
database="sbclient_2_0.*",
user="sbclient_2_0",
host="localhost",
)
assert ret.changes
assert ret.changes == {"delete_sbclient_grants": "Absent"}
assert ret.comment
assert (
ret.comment
== "Grant all privileges on sbclient_2_0.* for sbclient_2_0@localhost has been revoked"
)
finally:
# Remove the user
ret = mysql.user_remove("sbclient_2_0", host="localhost")
assert ret
# Remove the database
ret = mysql.db_remove("sbclient_2_0")
assert ret
070701000000DB000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003600000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration070701000000DC000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004200000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/__init__.py070701000000DD000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/auth070701000000DE000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004700000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/auth/__init__.py070701000000DF000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003C00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/cache070701000000E0000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004800000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/cache/__init__.py070701000000E1000081A400000000000000000000000167471E9C000001BD000000000000000000000000000000000000004200000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/conftest.pyimport pytest
@pytest.fixture(scope="package")
def master(master):
with master.started():
yield master
@pytest.fixture(scope="package")
def minion(minion):
with minion.started():
yield minion
@pytest.fixture
def salt_run_cli(master):
return master.salt_run_cli()
@pytest.fixture
def salt_cli(master):
return master.salt_cli()
@pytest.fixture
def salt_call_cli(minion):
return minion.salt_call_cli()
070701000000E2000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003C00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/files070701000000E3000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000004100000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/files/file070701000000E4000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000004600000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/files/file/base070701000000E5000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000004C00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/files/file/base/mysql070701000000E6000081A400000000000000000000000167471E9C000000F3000000000000000000000000000000000000005D00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/files/file/base/mysql/select_query.sqlCREATE TABLE test_select (a INT);
insert into test_select values (1);
insert into test_select values (3);
insert into test_select values (4);
insert into test_select values (5);
update test_select set a=2 where a=1;
select * from test_select;
070701000000E7000081A400000000000000000000000167471E9C000000D6000000000000000000000000000000000000005D00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/files/file/base/mysql/update_query.sql/*
multiline
comment
*/
CREATE TABLE test_update (a INT); # end of line comment
# example comment
insert into test_update values (1); -- ending comment
-- another comment type
update test_update set a=2 where a=1;
070701000000E8000081A400000000000000000000000167471E9C000002E1000000000000000000000000000000000000005500000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/files/file/base/mysql_utf8.sls# -*- coding: utf-8 -*-
#
# We all agree that a real world example would take credentials from pillar
# this is simply an utf-8 test
A:
mysql_database.present:
- name: "foo 準`bar"
- character_set: utf8
- collate: utf8_general_ci
- connection_user: root
- connection_pass: poney
- connection_use_unicode: True
- connection_charset: utf8
- saltenv:
- LC_ALL: "en_US.utf8"
B:
mysql_database.absent:
- name: "foo 準`bar"
- character_set: utf8
- collate: utf8_general_ci
- connection_user: root
- connection_pass: poney
- connection_use_unicode: True
- connection_charset: utf8
- saltenv:
- LC_ALL: "en_US.utf8"
- require:
- mysql_database: A
070701000000E9000081A400000000000000000000000167471E9C0000257C000000000000000000000000000000000000005D00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/files/mysql_returner_archiver_data.sql-- MySQL dump 10.15 Distrib 10.0.22-MariaDB, for Linux (x86_64)
--
-- Host: localhost Database: salt
-- ------------------------------------------------------
-- Server version 10.0.22-MariaDB
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `jids`
--
CREATE DATABASE if not exists `salt`
DEFAULT CHARACTER SET utf8
DEFAULT COLLATE utf8_general_ci;
USE `salt`;
DROP TABLE IF EXISTS `jids`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `jids` (
`jid` varchar(255) NOT NULL,
`load` mediumtext NOT NULL,
UNIQUE KEY `jid` (`jid`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `jids`
--
LOCK TABLES `jids` WRITE;
/*!40000 ALTER TABLE `jids` DISABLE KEYS */;
INSERT INTO `jids` VALUES ('20160719134843873492','{\"tgt_type\": \"compound\", \"jid\": \"20160719134843873492\", \"tgt\": \"G@virtual:physical and G@os:smartos\", \"cmd\": \"publish\", \"ret\": \"\", \"user\": \"root\", \"arg\": [], \"fun\": \"test.ping\"}'),('20160719134848959936','{\"tgt_type\": \"compound\", \"jid\": \"20160719134848959936\", \"tgt\": \"G@virtual:physical and G@os:smartos\", \"cmd\": \"publish\", \"ret\": \"\", \"user\": \"root\", \"arg\": [\"20160719134843873492\"], \"fun\": \"saltutil.find_job\"}'),('20160719134910163074','{\"tgt_type\": \"glob\", \"jid\": \"20160719134910163074\", \"cmd\": \"publish\", \"tgt\": \"twd\", \"kwargs\": {\"delimiter\": \":\", \"show_timeout\": true, \"show_jid\": false}, \"ret\": \"\", \"user\": \"root\", \"arg\": [], \"fun\": \"test.ping\"}'),('20160719134919147347','{\"tgt_type\": \"glob\", \"jid\": \"20160719134919147347\", \"cmd\": \"publish\", \"tgt\": \"twd\", \"kwargs\": {\"delimiter\": \":\", \"show_timeout\": true, \"show_jid\": false}, \"ret\": \"\", \"user\": \"root\", \"arg\": [], \"fun\": \"network.interfaces\"}'),('20160719135029732667','{\"tgt_type\": \"glob\", \"jid\": \"20160719135029732667\", \"cmd\": \"publish\", \"tgt\": \"twd\", \"kwargs\": {\"delimiter\": \":\", \"show_timeout\": true, \"show_jid\": false}, \"ret\": \"\", \"user\": \"root\", \"arg\": [{\"refresh\": true, \"__kwarg__\": true}], \"fun\": \"pkg.upgrade\"}'),('20160719135034878238','{\"tgt_type\": \"glob\", \"jid\": \"20160719135034878238\", \"cmd\": \"publish\", \"tgt\": \"twd\", \"kwargs\": {\"delimiter\": \":\"}, \"ret\": \"\", \"user\": \"root\", \"arg\": [\"20160719135029732667\"], \"fun\": \"saltutil.find_job\"}'),('20160719135044921491','{\"tgt_type\": \"glob\", \"jid\": \"20160719135044921491\", \"cmd\": \"publish\", \"tgt\": \"twd\", \"kwargs\": {\"delimiter\": \":\"}, \"ret\": \"\", \"user\": \"root\", \"arg\": [\"20160719135029732667\"], \"fun\": \"saltutil.find_job\"}');
/*!40000 ALTER TABLE `jids` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Table structure for table `salt_events`
--
DROP TABLE IF EXISTS `salt_events`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `salt_events` (
`id` bigint(20) NOT NULL AUTO_INCREMENT,
`tag` varchar(255) NOT NULL,
`data` mediumtext NOT NULL,
`alter_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`master_id` varchar(255) NOT NULL,
PRIMARY KEY (`id`),
KEY `tag` (`tag`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `salt_events`
--
LOCK TABLES `salt_events` WRITE;
/*!40000 ALTER TABLE `salt_events` DISABLE KEYS */;
/*!40000 ALTER TABLE `salt_events` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Table structure for table `salt_returns`
--
DROP TABLE IF EXISTS `salt_returns`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `salt_returns` (
`fun` varchar(50) NOT NULL,
`jid` varchar(255) NOT NULL,
`return` mediumtext NOT NULL,
`id` varchar(255) NOT NULL,
`success` varchar(10) NOT NULL,
`full_ret` mediumtext NOT NULL,
`alter_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
KEY `id` (`id`),
KEY `jid` (`jid`),
KEY `fun` (`fun`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `salt_returns`
--
LOCK TABLES `salt_returns` WRITE;
/*!40000 ALTER TABLE `salt_returns` DISABLE KEYS */;
INSERT INTO `salt_returns` VALUES ('test.ping','20160719134910163074','true','twd','1','{\"fun_args\": [], \"jid\": \"20160719134910163074\", \"return\": true, \"retcode\": 0, \"success\": true, \"cmd\": \"_return\", \"_stamp\": \"2016-07-19T19:49:10.295047\", \"fun\": \"test.ping\", \"id\": \"twd\"}','2016-07-19 19:49:10'),('network.interfaces','20160719134919147347','{\"lo\": {\"hwaddr\": \"00:00:00:00:00:00\", \"up\": true, \"inet\": [{\"broadcast\": null, \"netmask\": \"255.0.0.0\", \"address\": \"127.0.0.1\", \"label\": \"lo\"}], \"inet6\": [{\"prefixlen\": \"128\", \"scope\": \"host\", \"address\": \"::1\"}]}, \"docker0\": {\"hwaddr\": \"02:42:bb:e2:f6:7e\", \"up\": true, \"inet\": [{\"broadcast\": null, \"netmask\": \"255.255.0.0\", \"address\": \"172.17.0.1\", \"label\": \"docker0\"}]}, \"eno16777984\": {\"hwaddr\": \"00:0c:29:e3:6b:c8\", \"up\": true}, \"br0\": {\"hwaddr\": \"00:0c:29:e3:6b:c8\", \"up\": true, \"inet\": [{\"broadcast\": \"172.16.207.255\", \"netmask\": \"255.255.255.0\", \"address\": \"172.16.207.136\", \"label\": \"br0\"}], \"inet6\": [{\"prefixlen\": \"64\", \"scope\": \"link\", \"address\": \"fe80::20c:29ff:fee3:6bc8\"}]}}','twd','1','{\"fun_args\": [], \"jid\": \"20160719134919147347\", \"return\": {\"lo\": {\"hwaddr\": \"00:00:00:00:00:00\", \"up\": true, \"inet\": [{\"broadcast\": null, \"netmask\": \"255.0.0.0\", \"address\": \"127.0.0.1\", \"label\": \"lo\"}], \"inet6\": [{\"prefixlen\": \"128\", \"scope\": \"host\", \"address\": \"::1\"}]}, \"docker0\": {\"hwaddr\": \"02:42:bb:e2:f6:7e\", \"up\": true, \"inet\": [{\"broadcast\": null, \"netmask\": \"255.255.0.0\", \"address\": \"172.17.0.1\", \"label\": \"docker0\"}]}, \"eno16777984\": {\"hwaddr\": \"00:0c:29:e3:6b:c8\", \"up\": true}, \"br0\": {\"hwaddr\": \"00:0c:29:e3:6b:c8\", \"up\": true, \"inet\": [{\"broadcast\": \"172.16.207.255\", \"netmask\": \"255.255.255.0\", \"address\": \"172.16.207.136\", \"label\": \"br0\"}], \"inet6\": [{\"prefixlen\": \"64\", \"scope\": \"link\", \"address\": \"fe80::20c:29ff:fee3:6bc8\"}]}}, \"retcode\": 0, \"success\": true, \"cmd\": \"_return\", \"_stamp\": \"2016-07-19T19:49:19.222588\", \"fun\": \"network.interfaces\", \"id\": \"twd\"}','2016-07-19 19:49:19'),('saltutil.find_job','20160719135034878238','{\"tgt_type\": \"glob\", \"jid\": \"20160719135029732667\", \"tgt\": \"twd\", \"pid\": 5557, \"ret\": \"\", \"user\": \"root\", \"arg\": [{\"refresh\": true, \"__kwarg__\": true}], \"fun\": \"pkg.upgrade\"}','twd','1','{\"fun_args\": [\"20160719135029732667\"], \"jid\": \"20160719135034878238\", \"return\": {\"tgt_type\": \"glob\", \"jid\": \"20160719135029732667\", \"tgt\": \"twd\", \"pid\": 5557, \"ret\": \"\", \"user\": \"root\", \"arg\": [{\"refresh\": true, \"__kwarg__\": true}], \"fun\": \"pkg.upgrade\"}, \"retcode\": 0, \"success\": true, \"cmd\": \"_return\", \"_stamp\": \"2016-07-19T19:50:34.967377\", \"fun\": \"saltutil.find_job\", \"id\": \"twd\"}','2016-07-19 19:50:34'),('saltutil.find_job','20160719135044921491','{\"tgt_type\": \"glob\", \"jid\": \"20160719135029732667\", \"tgt\": \"twd\", \"pid\": 5557, \"ret\": \"\", \"user\": \"root\", \"arg\": [{\"refresh\": true, \"__kwarg__\": true}], \"fun\": \"pkg.upgrade\"}','twd','1','{\"fun_args\": [\"20160719135029732667\"], \"jid\": \"20160719135044921491\", \"return\": {\"tgt_type\": \"glob\", \"jid\": \"20160719135029732667\", \"tgt\": \"twd\", \"pid\": 5557, \"ret\": \"\", \"user\": \"root\", \"arg\": [{\"refresh\": true, \"__kwarg__\": true}], \"fun\": \"pkg.upgrade\"}, \"retcode\": 0, \"success\": true, \"cmd\": \"_return\", \"_stamp\": \"2016-07-19T19:50:45.034813\", \"fun\": \"saltutil.find_job\", \"id\": \"twd\"}','2016-07-19 19:50:45'),('pkg.upgrade','20160719135029732667','{\"comment\": \"\", \"changes\": {}, \"result\": true}','twd','1','{\"fun_args\": [{\"refresh\": true}], \"jid\": \"20160719135029732667\", \"return\": {\"comment\": \"\", \"changes\": {}, \"result\": true}, \"retcode\": 0, \"success\": true, \"cmd\": \"_return\", \"_stamp\": \"2016-07-19T19:50:52.016142\", \"fun\": \"pkg.upgrade\", \"id\": \"twd\"}','2016-07-19 19:50:52');
/*!40000 ALTER TABLE `salt_returns` ENABLE KEYS */;
UNLOCK TABLES;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2016-07-19 13:52:18
070701000000EA000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003E00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/modules070701000000EB000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004A00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/modules/__init__.py070701000000EC000081A400000000000000000000000167471E9C0000E4D5000000000000000000000000000000000000004C00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/modules/test_mysql.py# pylint: disable-all
import logging
import pytest
import salt.utils.path
from saltext.mysql.modules import mysql as mysqlmod
# from tests.support.case import ModuleCase
# from tests.support.mixins import SaltReturnAssertsMixin
log = logging.getLogger(__name__)
NO_MYSQL = False
try:
import MySQLdb # pylint: disable=import-error,unused-import
except Exception: # pylint: disable=broad-except
NO_MYSQL = True
if not salt.utils.path.which("mysqladmin"):
NO_MYSQL = True
pytest.skip(reason="These tests were not converted to pytest yet", allow_module_level=True)
@pytest.mark.skipif(
NO_MYSQL,
reason="Please install MySQL bindings and a MySQL Server before running "
"MySQL integration tests.",
)
@pytest.mark.windows_whitelisted
class MysqlModuleDbTest: # (ModuleCase, SaltReturnAssertsMixin):
"""
Module testing database creation on a real MySQL Server.
"""
user = "root"
password = "poney"
@pytest.mark.destructive_test
def setUp(self):
"""
Test presence of MySQL server, enforce a root password
"""
super().setUp()
NO_MYSQL_SERVER = True
# now ensure we know the mysql root password
# one of theses two at least should work
ret1 = self.run_state(
"cmd.run",
name='mysqladmin --host="localhost" -u '
+ self.user
+ ' flush-privileges password "'
+ self.password
+ '"',
)
ret2 = self.run_state(
"cmd.run",
name='mysqladmin --host="localhost" -u '
+ self.user
+ ' --password="'
+ self.password
+ '" flush-privileges password "'
+ self.password
+ '"',
)
key, value = ret2.popitem()
if value["result"]:
NO_MYSQL_SERVER = False
else:
self.skipTest("No MySQL Server running, or no root access on it.")
def _db_creation_loop(self, db_name, returning_name, test_conn=False, **kwargs):
"""
Used in db testCase, create, check exists, check in list and removes.
"""
ret = self.run_function("mysql.db_create", name=db_name, **kwargs)
assert True == ret, f"Problem while creating db for db name: '{db_name}'"
# test db exists
ret = self.run_function("mysql.db_exists", name=db_name, **kwargs)
assert True == ret, f"Problem while testing db exists for db name: '{db_name}'"
# List db names to ensure db is created with the right utf8 string
ret = self.run_function("mysql.db_list", **kwargs)
if not isinstance(ret, list):
raise AssertionError(
(
"Unexpected query result while retrieving databases list" " '{}' for '{}' test"
).format(ret, db_name)
)
assert returning_name in ret, (
"Problem while testing presence of db name in db lists"
" for db name: '{}' in list '{}'"
).format(db_name, ret)
if test_conn:
# test connections on database with root user
ret = self.run_function("mysql.query", database=db_name, query="SELECT 1", **kwargs)
if not isinstance(ret, dict) or "results" not in ret:
raise AssertionError(
"Unexpected result while testing connection on database : {}".format(
repr(db_name)
)
)
assert [["1"]] == ret["results"]
# Now remove database
ret = self.run_function("mysql.db_remove", name=db_name, **kwargs)
assert True == ret, f"Problem while removing db for db name: '{db_name}'"
@pytest.mark.destructive_test
def test_database_creation_level1(self):
"""
Create database, test presence, then drop db. All theses with complex names.
"""
# name with space
db_name = "foo 1"
self._db_creation_loop(
db_name=db_name,
returning_name=db_name,
test_conn=True,
connection_user=self.user,
connection_pass=self.password,
)
# ```````
# create
# also with character_set and collate only
ret = self.run_function(
"mysql.db_create",
name="foo`2",
character_set="utf8",
collate="utf8_general_ci",
connection_user=self.user,
connection_pass=self.password,
)
assert True == ret
# test db exists
ret = self.run_function(
"mysql.db_exists",
name="foo`2",
connection_user=self.user,
connection_pass=self.password,
)
assert True == ret
# redoing the same should fail
# even with other character sets or collations
ret = self.run_function(
"mysql.db_create",
name="foo`2",
character_set="utf8",
collate="utf8_general_ci",
connection_user=self.user,
connection_pass=self.password,
)
assert False == ret
# redoing the same should fail
ret = self.run_function(
"mysql.db_create",
name="foo`2",
character_set="utf8",
collate="utf8_general_ci",
connection_user=self.user,
connection_pass=self.password,
)
assert False == ret
# Now remove database
ret = self.run_function(
"mysql.db_remove",
name="foo`2",
connection_user=self.user,
connection_pass=self.password,
)
assert True == ret
# '''''''
# create
# also with character_set only
db_name = "foo'3"
self._db_creation_loop(
db_name=db_name,
returning_name=db_name,
test_conn=True,
character_set="utf8",
connection_user=self.user,
connection_pass=self.password,
)
# """"""""
# also with collate only
db_name = 'foo"4'
self._db_creation_loop(
db_name=db_name,
returning_name=db_name,
test_conn=True,
collate="utf8_general_ci",
connection_user=self.user,
connection_pass=self.password,
)
# fuzzy
db_name = '<foo` --"5>'
self._db_creation_loop(
db_name=db_name,
returning_name=db_name,
test_conn=True,
connection_user=self.user,
connection_pass=self.password,
)
@pytest.mark.destructive_test
def test_mysql_dbname_character_percent(self):
"""
Play with the '%' character problems
This character should be escaped in the form '%%' on queries, but only
when theses queries have arguments. It is also a special character
in LIKE SQL queries. Finally it is used to indicate query arguments.
"""
db_name1 = "foo%1_"
db_name2 = "foo%12"
ret = self.run_function(
"mysql.db_create",
name=db_name1,
character_set="utf8",
collate="utf8_general_ci",
connection_user=self.user,
connection_pass=self.password,
)
assert True == ret
ret = self.run_function(
"mysql.db_create",
name=db_name2,
connection_user=self.user,
connection_pass=self.password,
)
assert True == ret
ret = self.run_function(
"mysql.db_remove",
name=db_name1,
connection_user=self.user,
connection_pass=self.password,
)
assert True == ret
ret = self.run_function(
"mysql.db_exists",
name=db_name1,
connection_user=self.user,
connection_pass=self.password,
)
assert False == ret
ret = self.run_function(
"mysql.db_exists",
name=db_name2,
connection_user=self.user,
connection_pass=self.password,
)
assert True == ret
ret = self.run_function(
"mysql.db_remove",
name=db_name2,
connection_user=self.user,
connection_pass=self.password,
)
assert True == ret
@pytest.mark.destructive_test
def test_database_creation_utf8(self):
"""
Test support of utf8 in database names
"""
# Simple accents : using utf8 string
db_name_unicode = "notam\xe9rican"
# same as 'notamérican' because of file encoding
# but ensure it on this test
db_name_utf8 = "notam\xc3\xa9rican"
# FIXME: MySQLdb problems on conn strings containing
# utf-8 on user name of db name prevent conn test
self._db_creation_loop(
db_name=db_name_utf8,
returning_name=db_name_utf8,
test_conn=False,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
# test unicode entry will also return utf8 name
self._db_creation_loop(
db_name=db_name_unicode,
returning_name=db_name_utf8,
test_conn=False,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
# Using more complex unicode characters:
db_name_unicode = "\u6a19\u6e96\u8a9e"
# same as '標準語' because of file encoding
# but ensure it on this test
db_name_utf8 = "\xe6\xa8\x99\xe6\xba\x96\xe8\xaa\x9e"
self._db_creation_loop(
db_name=db_name_utf8,
returning_name=db_name_utf8,
test_conn=False,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
# test unicode entry will also return utf8 name
self._db_creation_loop(
db_name=db_name_unicode,
returning_name=db_name_utf8,
test_conn=False,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
@pytest.mark.destructive_test
def test_database_maintenance(self):
"""
Test maintenance operations on a created database
"""
dbname = "foo%'-- `\"'"
# create database
# but first silently try to remove it
# in case of previous tests failures
ret = self.run_function(
"mysql.db_remove",
name=dbname,
connection_user=self.user,
connection_pass=self.password,
)
ret = self.run_function(
"mysql.db_create",
name=dbname,
character_set="utf8",
collate="utf8_general_ci",
connection_user=self.user,
connection_pass=self.password,
)
assert True == ret
# test db exists
ret = self.run_function(
"mysql.db_exists",
name=dbname,
connection_user=self.user,
connection_pass=self.password,
)
assert True == ret
# Create 3 tables
tablenames = {
'A%table "`1': "MYISAM",
"B%table '`2": "InnoDB",
"Ctable --`3": "MEMORY",
}
for tablename, engine in sorted(tablenames.items()):
# prepare queries
create_query = (
"CREATE TABLE {tblname} ("
" id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,"
" data VARCHAR(100)) ENGINE={engine};".format(
tblname=mysqlmod.quote_identifier(tablename),
engine=engine,
)
)
insert_query = "INSERT INTO {tblname} (data) VALUES ".format(
tblname=mysqlmod.quote_identifier(tablename)
)
delete_query = "DELETE from {tblname} order by rand() limit 50;".format(
tblname=mysqlmod.quote_identifier(tablename)
)
for x in range(100):
insert_query += "('foo" + str(x) + "'),"
insert_query += "('bar');"
# populate database
log.info("Adding table '%s'", tablename)
ret = self.run_function(
"mysql.query",
database=dbname,
query=create_query,
connection_user=self.user,
connection_pass=self.password,
)
if not isinstance(ret, dict) or "rows affected" not in ret:
raise AssertionError(
("Unexpected query result while populating test table" " '{}' : '{}'").format(
tablename,
ret,
)
)
assert ret["rows affected"] == 0
log.info("Populating table '%s'", tablename)
ret = self.run_function(
"mysql.query",
database=dbname,
query=insert_query,
connection_user=self.user,
connection_pass=self.password,
)
if not isinstance(ret, dict) or "rows affected" not in ret:
raise AssertionError(
("Unexpected query result while populating test table" " '{}' : '{}'").format(
tablename,
ret,
)
)
assert ret["rows affected"] == 101
log.info("Removing some rows on table'%s'", tablename)
ret = self.run_function(
"mysql.query",
database=dbname,
query=delete_query,
connection_user=self.user,
connection_pass=self.password,
)
if not isinstance(ret, dict) or "rows affected" not in ret:
raise AssertionError(
(
"Unexpected query result while removing rows on test table" " '{}' : '{}'"
).format(
tablename,
ret,
)
)
assert ret["rows affected"] == 50
# test check/repair/opimize on 1 table
tablename = 'A%table "`1'
ret = self.run_function(
"mysql.db_check",
name=dbname,
table=tablename,
connection_user=self.user,
connection_pass=self.password,
)
# Note that returned result does not quote_identifier of table and db
assert ret == [
{
"Table": dbname + "." + tablename,
"Msg_text": "OK",
"Msg_type": "status",
"Op": "check",
}
]
ret = self.run_function(
"mysql.db_repair",
name=dbname,
table=tablename,
connection_user=self.user,
connection_pass=self.password,
)
# Note that returned result does not quote_identifier of table and db
assert ret == [
{
"Table": dbname + "." + tablename,
"Msg_text": "OK",
"Msg_type": "status",
"Op": "repair",
}
]
ret = self.run_function(
"mysql.db_optimize",
name=dbname,
table=tablename,
connection_user=self.user,
connection_pass=self.password,
)
# Note that returned result does not quote_identifier of table and db
assert ret == [
{
"Table": dbname + "." + tablename,
"Msg_text": "OK",
"Msg_type": "status",
"Op": "optimize",
}
]
# test check/repair/opimize on all tables
ret = self.run_function(
"mysql.db_check",
name=dbname,
connection_user=self.user,
connection_pass=self.password,
)
expected = []
for tablename, engine in sorted(tablenames.items()):
if engine == "MEMORY":
expected.append(
[
{
"Table": dbname + "." + tablename,
"Msg_text": ("The storage engine for the table doesn't support check"),
"Msg_type": "note",
"Op": "check",
}
]
)
else:
expected.append(
[
{
"Table": dbname + "." + tablename,
"Msg_text": "OK",
"Msg_type": "status",
"Op": "check",
}
]
)
assert ret == expected
ret = self.run_function(
"mysql.db_repair",
name=dbname,
connection_user=self.user,
connection_pass=self.password,
)
expected = []
for tablename, engine in sorted(tablenames.items()):
if engine == "MYISAM":
expected.append(
[
{
"Table": dbname + "." + tablename,
"Msg_text": "OK",
"Msg_type": "status",
"Op": "repair",
}
]
)
else:
expected.append(
[
{
"Table": dbname + "." + tablename,
"Msg_text": (
"The storage engine for the table doesn't" " support repair"
),
"Msg_type": "note",
"Op": "repair",
}
]
)
assert ret == expected
ret = self.run_function(
"mysql.db_optimize",
name=dbname,
connection_user=self.user,
connection_pass=self.password,
)
expected = []
for tablename, engine in sorted(tablenames.items()):
if engine == "MYISAM":
expected.append(
[
{
"Table": dbname + "." + tablename,
"Msg_text": "OK",
"Msg_type": "status",
"Op": "optimize",
}
]
)
elif engine == "InnoDB":
expected.append(
[
{
"Table": dbname + "." + tablename,
"Msg_text": (
"Table does not support optimize, "
"doing recreate + analyze instead"
),
"Msg_type": "note",
"Op": "optimize",
},
{
"Table": dbname + "." + tablename,
"Msg_text": "OK",
"Msg_type": "status",
"Op": "optimize",
},
]
)
elif engine == "MEMORY":
expected.append(
[
{
"Table": dbname + "." + tablename,
"Msg_text": (
"The storage engine for the table doesn't" " support optimize"
),
"Msg_type": "note",
"Op": "optimize",
}
]
)
assert ret == expected
# Teardown, remove database
ret = self.run_function(
"mysql.db_remove",
name=dbname,
connection_user=self.user,
connection_pass=self.password,
)
assert True == ret
@pytest.mark.skipif(
NO_MYSQL,
reason="Please install MySQL bindings and a MySQL Server before running "
"MySQL integration tests.",
)
@pytest.mark.windows_whitelisted
class MysqlModuleUserTest: # (ModuleCase, SaltReturnAssertsMixin):
"""
User Creation and connection tests
"""
user = "root"
password = "poney"
@pytest.mark.destructive_test
def setUp(self):
"""
Test presence of MySQL server, enforce a root password
"""
super().setUp()
NO_MYSQL_SERVER = True
# now ensure we know the mysql root password
# one of theses two at least should work
ret1 = self.run_state(
"cmd.run",
name='mysqladmin --host="localhost" -u '
+ self.user
+ ' flush-privileges password "'
+ self.password
+ '"',
)
ret2 = self.run_state(
"cmd.run",
name='mysqladmin --host="localhost" -u '
+ self.user
+ ' --password="'
+ self.password
+ '" flush-privileges password "'
+ self.password
+ '"',
)
key, value = ret2.popitem()
if value["result"]:
NO_MYSQL_SERVER = False
else:
self.skipTest("No MySQL Server running, or no root access on it.")
def _userCreationLoop(
self,
uname,
host,
password=None,
new_password=None,
new_password_hash=None,
**kwargs,
):
"""
Perform some tests around creation of the given user
"""
# First silently remove it, in case of
ret = self.run_function("mysql.user_remove", user=uname, host=host, **kwargs)
# creation
ret = self.run_function(
"mysql.user_create", user=uname, host=host, password=password, **kwargs
)
assert True == ret, "Calling user_create on user '{}' did not return True: {}".format(
uname, repr(ret)
)
# double creation failure
ret = self.run_function(
"mysql.user_create", user=uname, host=host, password=password, **kwargs
)
assert False == ret, (
"Calling user_create a second time on" " user '{}' did not return False: {}"
).format(uname, repr(ret))
# Alter password
if new_password is not None or new_password_hash is not None:
ret = self.run_function(
"mysql.user_chpass",
user=uname,
host=host,
password=new_password,
password_hash=new_password_hash,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
assert True == ret, "Calling user_chpass on user '{}' did not return True: {}".format(
uname, repr(ret)
)
def _chck_userinfo(self, user, host, check_user, check_hash):
"""
Internal routine to check user_info returned results
"""
ret = self.run_function(
"mysql.user_info",
user=user,
host=host,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
if not isinstance(ret, dict):
raise AssertionError(f"Unexpected result while retrieving user_info for '{user}'")
assert ret["Host"] == host
assert ret["Password"] == check_hash
assert ret["User"] == check_user
def _chk_remove_user(self, user, host, **kwargs):
"""
Internal routine to check user_remove
"""
ret = self.run_function("mysql.user_remove", user=user, host=host, **kwargs)
assert True == ret, "Assertion failed while removing user '{}' on host '{}': {}".format(
user, host, repr(ret)
)
@pytest.mark.destructive_test
def test_user_management(self):
"""
Test various users creation settings
"""
# Create users with rights on this database
# and rights on other databases
user1 = "user '1"
user1_pwd = "pwd`'\"1b"
user1_pwd_hash = "*4DF33B3B12E43384677050A818327877FAB2F4BA"
# this is : user "2'標
user2 = "user \"2'\xe6\xa8\x99"
user2_pwd = "user \"2'\xe6\xa8\x99b"
user2_pwd_hash = "*3A38A7B94B024B983687BB9B44FB60B7AA38FE61"
user3 = 'user "3;,?:@=&/'
user3_pwd = 'user "3;,?:@=&/'
user3_pwd_hash = "*AA3B1D4105A45D381C23A5C221C47EA349E1FD7D"
# this is : user ":=;4標 in unicode instead of utf-8
# if unicode char is counted as 1 char we hit the max user
# size (16)
user4 = 'user":;,?:@=&/4\u6a19'
user4_utf8 = 'user":;,?:@=&/4\xe6\xa8\x99'
user4_pwd = 'user "4;,?:@=&/'
user4_pwd_hash = "*FC8EF8DBF27628E4E113359F8E7478D5CF3DD57C"
user5 = 'user ``"5'
user5_utf8 = 'user ``"5'
# this is 標標標\
user5_pwd = "\xe6\xa8\x99\xe6\xa8\x99\\"
# this is password('標標\\')
user5_pwd_hash = "*3752E65CDD8751AF8D889C62CFFC6C998B12C376"
user6 = 'user %--"6'
user6_utf8 = 'user %--"6'
# this is : --'"% SIX標b
user6_pwd_u = " --'\"% SIX\u6a19b"
user6_pwd_utf8 = " --'\"% SIX\xe6\xa8\x99b"
# this is password(' --\'"% SIX標b')
user6_pwd_hash = "*90AE800593E2D407CD9E28CCAFBE42D17EEA5369"
self._userCreationLoop(
uname=user1,
host="localhost",
password="pwd`'\"1",
new_password="pwd`'\"1b",
connection_user=self.user,
connection_pass=self.password,
)
# Now check for results
ret = self.run_function(
"mysql.user_exists",
user=user1,
host="localhost",
password=user1_pwd,
password_hash=None,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
assert True == ret, "Testing final user '{}' on host '{}' existence failed".format(
user1, "localhost"
)
self._userCreationLoop(
uname=user2,
host="localhost",
password=None,
# this is his name hash : user "2'標
password_hash="*EEF6F854748ACF841226BB1C2422BEC70AE7F1FF",
# and this is the same with a 'b' added
new_password_hash=user2_pwd_hash,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
# user2 can connect from other places with other password
self._userCreationLoop(
uname=user2,
host="10.0.0.1",
allow_passwordless=True,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
self._userCreationLoop(
uname=user2,
host="10.0.0.2",
allow_passwordless=True,
unix_socket=True,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
# Now check for results
ret = self.run_function(
"mysql.user_exists",
user=user2,
host="localhost",
password=None,
password_hash=user2_pwd_hash,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
assert True == ret, "Testing final user '{}' on host '{}' failed".format(user2, "localhost")
ret = self.run_function(
"mysql.user_exists",
user=user2,
host="10.0.0.1",
allow_passwordless=True,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
assert True == ret, "Testing final user '{}' on host '{}' without password failed".format(
user2, "10.0.0.1"
)
ret = self.run_function(
"mysql.user_exists",
user=user2,
host="10.0.0.2",
allow_passwordless=True,
unix_socket=True,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
assert True == ret, "Testing final user '{}' on host '{}' without password failed".format(
user2, "10.0.0.2"
)
# Empty password is not passwordless (or is it a bug?)
self._userCreationLoop(
uname=user3,
host="localhost",
password="",
connection_user=self.user,
connection_pass=self.password,
)
# user 3 on another host with a password
self._userCreationLoop(
uname=user3,
host="%",
password="foo",
new_password=user3_pwd,
connection_user=self.user,
connection_pass=self.password,
)
# Now check for results
ret = self.run_function(
"mysql.user_exists",
user=user3,
host="localhost",
password="",
connection_user=self.user,
connection_pass=self.password,
)
assert (
True == ret
), "Testing final user '{}' on host '{}' without empty password failed".format(
user3, "localhost"
)
ret = self.run_function(
"mysql.user_exists",
user=user3,
host="%",
password=user3_pwd,
connection_user=self.user,
connection_pass=self.password,
)
assert True == ret, "Testing final user '{}' on host '{}' with password failed".format(
user3, "%"
)
# check unicode name, and password > password_hash
self._userCreationLoop(
uname=user4,
host="%",
password=user4_pwd,
# this is password('foo')
password_hash="*F3A2A51A9B0F2BE2468926B4132313728C250DBF",
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
# Now check for results
ret = self.run_function(
"mysql.user_exists",
user=user4_utf8,
host="%",
password=user4_pwd,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
assert True == ret, (
"Testing final user '{}' on host '{}'"
" with password take from password and not password_hash"
" failed"
).format(user4_utf8, "%")
self._userCreationLoop(
uname=user5,
host="localhost",
password="\xe6\xa8\x99\xe6\xa8\x99",
new_password=user5_pwd,
unix_socket=True,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
ret = self.run_function(
"mysql.user_exists",
user=user5_utf8,
host="localhost",
password=user5_pwd,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
assert True == ret, "Testing final user '{}' on host '{}' with utf8 password failed".format(
user5_utf8, "localhost"
)
# for this one we give password in unicode and check it in utf-8
self._userCreationLoop(
uname=user6,
host="10.0.0.1",
password=" foobar",
new_password=user6_pwd_u,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
# Now check for results
ret = self.run_function(
"mysql.user_exists",
user=user6_utf8,
host="10.0.0.1",
password=user6_pwd_utf8,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
assert (
True == ret
), "Testing final user '{}' on host '{}' with unicode password failed".format(
user6_utf8, "10.0.0.1"
)
# Final result should be:
# mysql> select Host, User, Password from user where user like 'user%';
# +--------------------+-----------+-------------------------------+
# | User | Host | Password |
# +--------------------+-----------+-------------------------------+
# | user "2'標 | 10.0.0.1 | |
# | user "2'標 | 10.0.0.2 | |
# | user "2'標 | localhost | *3A38A7B94B0(...)60B7AA38FE61 |
# | user "3;,?:@=&/ | % | *AA3B1D4105(...)47EA349E1FD7D |
# | user "3;,?:@=&/ | localhost | |
# | user %--"6 | 10.0.0.1 | *90AE800593(...)E42D17EEA5369 |
# | user '1 | localhost | *4DF33B3B1(...)327877FAB2F4BA |
# | user ``"5 | localhost | *3752E65CD(...)FC6C998B12C376 |
# | user":;,?:@=&/4標 | % | *FC8EF8DBF(...)7478D5CF3DD57C |
# +--------------------+-----------+-------------------------------+
self._chck_userinfo(user=user2, host="10.0.0.1", check_user=user2, check_hash="")
self._chck_userinfo(user=user2, host="10.0.0.2", check_user=user2, check_hash="")
self._chck_userinfo(
user=user2, host="localhost", check_user=user2, check_hash=user2_pwd_hash
)
self._chck_userinfo(user=user3, host="%", check_user=user3, check_hash=user3_pwd_hash)
self._chck_userinfo(user=user3, host="localhost", check_user=user3, check_hash="")
self._chck_userinfo(user=user4, host="%", check_user=user4_utf8, check_hash=user4_pwd_hash)
self._chck_userinfo(
user=user6,
host="10.0.0.1",
check_user=user6_utf8,
check_hash=user6_pwd_hash,
)
self._chck_userinfo(
user=user1, host="localhost", check_user=user1, check_hash=user1_pwd_hash
)
self._chck_userinfo(
user=user5,
host="localhost",
check_user=user5_utf8,
check_hash=user5_pwd_hash,
)
# check user_list function
ret = self.run_function(
"mysql.user_list",
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
assert {"Host": "localhost", "User": user1} in ret
assert {"Host": "localhost", "User": user2} in ret
assert {"Host": "10.0.0.1", "User": user2} in ret
assert {"Host": "10.0.0.2", "User": user2} in ret
assert {"Host": "%", "User": user3} in ret
assert {"Host": "localhost", "User": user3} in ret
assert {"Host": "%", "User": user4_utf8} in ret
assert {"Host": "localhost", "User": user5_utf8} in ret
assert {"Host": "10.0.0.1", "User": user6_utf8} in ret
# And finally, test connections on MySQL with theses users
ret = self.run_function(
"mysql.query",
database="information_schema",
query="SELECT 1",
connection_user=user1,
connection_pass="pwd`'\"1b",
connection_host="localhost",
)
if not isinstance(ret, dict) or "results" not in ret:
raise AssertionError(
"Unexpected result while testing connection with user '{}': {}".format(
user1, repr(ret)
)
)
assert [["1"]] == ret["results"]
# FIXME: still failing, but works by hand...
# mysql --user="user \"2'標" --password="user \"2'標b" information_schema
# Seems to be a python-mysql library problem with user names containing
# utf8 characters
# @see https://github.com/farcepest/MySQLdb1/issues/40
# import urllib
# ret = self.run_function(
# 'mysql.query',
# database='information_schema',
# query='SELECT 1',
# connection_user=urllib.quote_plus(user2),
# connection_pass=urllib.quote_plus(user2_pwd),
# connection_host='localhost',
# connection_charset='utf8',
# saltenv={"LC_ALL": "en_US.utf8"}
# )
# if not isinstance(ret, dict) or 'results' not in ret:
# raise AssertionError(
# ('Unexpected result while testing connection'
# ' with user \'{0}\': {1}').format(
# user2,
# repr(ret)
# )
# )
# self.assertEqual([['1']], ret['results'])
ret = self.run_function(
"mysql.query",
database="information_schema",
query="SELECT 1",
connection_user=user3,
connection_pass="",
connection_host="localhost",
)
if not isinstance(ret, dict) or "results" not in ret:
raise AssertionError(
"Unexpected result while testing connection with user '{}': {}".format(
user3, repr(ret)
)
)
assert [["1"]] == ret["results"]
# FIXME: Failing
# ret = self.run_function(
# 'mysql.query',
# database='information_schema',
# query='SELECT 1',
# connection_user=user4_utf8,
# connection_pass=user4_pwd,
# connection_host='localhost',
# connection_charset='utf8',
# saltenv={"LC_ALL": "en_US.utf8"}
# )
# if not isinstance(ret, dict) or 'results' not in ret:
# raise AssertionError(
# ('Unexpected result while testing connection'
# ' with user \'{0}\': {1}').format(
# user4_utf8,
# repr(ret)
# )
# )
# self.assertEqual([['1']], ret['results'])
ret = self.run_function(
"mysql.query",
database="information_schema",
query="SELECT 1",
connection_user=user5_utf8,
connection_pass=user5_pwd,
connection_host="localhost",
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
if not isinstance(ret, dict) or "results" not in ret:
raise AssertionError(
"Unexpected result while testing connection with user '{}': {}".format(
user5_utf8, repr(ret)
)
)
assert [["1"]] == ret["results"]
# Teardown by deleting with user_remove
self._chk_remove_user(
user=user2,
host="10.0.0.1",
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
self._chk_remove_user(
user=user2,
host="10.0.0.2",
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
self._chk_remove_user(
user=user2,
host="localhost",
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
self._chk_remove_user(
user=user3,
host="%",
connection_user=self.user,
connection_pass=self.password,
)
self._chk_remove_user(
user=user3,
host="localhost",
connection_user=self.user,
connection_pass=self.password,
)
self._chk_remove_user(
user=user4,
host="%",
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
self._chk_remove_user(
user=user6,
host="10.0.0.1",
connection_user=self.user,
connection_pass=self.password,
)
self._chk_remove_user(
user=user1,
host="localhost",
connection_user=self.user,
connection_pass=self.password,
)
self._chk_remove_user(
user=user5,
host="localhost",
connection_user=self.user,
connection_pass=self.password,
)
# Final verification of the cleanup
ret = self.run_function(
"mysql.user_list",
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
assert {"Host": "localhost", "User": user1} not in ret
assert {"Host": "localhost", "User": user2} not in ret
assert {"Host": "10.0.0.1", "User": user2} not in ret
assert {"Host": "10.0.0.2", "User": user2} not in ret
assert {"Host": "%", "User": user3} not in ret
assert {"Host": "localhost", "User": user3} not in ret
assert {"Host": "%", "User": user4_utf8} not in ret
assert {"Host": "localhost", "User": user5_utf8} not in ret
assert {"Host": "10.0.0.1", "User": user6_utf8} not in ret
@pytest.mark.skipif(
NO_MYSQL,
reason="Please install MySQL bindings and a MySQL Server before running "
"MySQL integration tests.",
)
@pytest.mark.windows_whitelisted
class MysqlModuleUserGrantTest: # (ModuleCase, SaltReturnAssertsMixin):
"""
User Creation and connection tests
"""
user = "root"
password = "poney"
# yep, theses are valid MySQL db names
# very special chars are _ % and .
testdb1 = "tes.t'\"saltdb"
testdb2 = "t_st `(:=salt%b)"
testdb3 = "test `(:=salteeb)"
test_file_query_db = "test_query"
table1 = "foo"
table2 = "foo `'%_bar"
users = {
"user1": {"name": "foo", "pwd": "bar"},
"user2": {"name": 'user ";--,?:&/\\', "pwd": '";--(),?:@=&/\\'},
# this is : passwd 標標
"user3": {"name": "user( @ )=foobar", "pwd": "\xe6\xa8\x99\xe6\xa8\x99"},
# this is : user/password containing 標標
"user4": {"name": "user \xe6\xa8\x99", "pwd": "\xe6\xa8\x99\xe6\xa8\x99"},
}
@pytest.mark.destructive_test
def setUp(self):
"""
Test presence of MySQL server, enforce a root password, create users
"""
super().setUp()
NO_MYSQL_SERVER = True
# now ensure we know the mysql root password
# one of theses two at least should work
ret1 = self.run_state(
"cmd.run",
name='mysqladmin --host="localhost" -u '
+ self.user
+ ' flush-privileges password "'
+ self.password
+ '"',
)
ret2 = self.run_state(
"cmd.run",
name='mysqladmin --host="localhost" -u '
+ self.user
+ ' --password="'
+ self.password
+ '" flush-privileges password "'
+ self.password
+ '"',
)
key, value = ret2.popitem()
if value["result"]:
NO_MYSQL_SERVER = False
else:
self.skipTest("No MySQL Server running, or no root access on it.")
# Create some users and a test db
for user, userdef in self.users.items():
self._userCreation(uname=userdef["name"], password=userdef["pwd"])
self.run_function(
"mysql.db_create",
name=self.testdb1,
connection_user=self.user,
connection_pass=self.password,
)
self.run_function(
"mysql.db_create",
name=self.testdb2,
connection_user=self.user,
connection_pass=self.password,
)
create_query = (
"CREATE TABLE {tblname} ("
" id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,"
" data VARCHAR(100)) ENGINE={engine};".format(
tblname=mysqlmod.quote_identifier(self.table1),
engine="MYISAM",
)
)
log.info("Adding table '%s'", self.table1)
self.run_function(
"mysql.query",
database=self.testdb2,
query=create_query,
connection_user=self.user,
connection_pass=self.password,
)
create_query = (
"CREATE TABLE {tblname} ("
" id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,"
" data VARCHAR(100)) ENGINE={engine};".format(
tblname=mysqlmod.quote_identifier(self.table2),
engine="MYISAM",
)
)
log.info("Adding table '%s'", self.table2)
self.run_function(
"mysql.query",
database=self.testdb2,
query=create_query,
connection_user=self.user,
connection_pass=self.password,
)
@pytest.mark.destructive_test
def tearDown(self):
"""
Removes created users and db
"""
for user, userdef in self.users.items():
self._userRemoval(uname=userdef["name"], password=userdef["pwd"])
self.run_function(
"mysql.db_remove",
name=self.testdb1,
connection_user=self.user,
connection_pass=self.password,
)
self.run_function(
"mysql.db_remove",
name=self.testdb2,
connection_user=self.user,
connection_pass=self.password,
)
self.run_function(
"mysql.db_remove",
name=self.test_file_query_db,
connection_user=self.user,
connection_pass=self.password,
)
def _userCreation(self, uname, password=None):
"""
Create a test user
"""
self.run_function(
"mysql.user_create",
user=uname,
host="localhost",
password=password,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
def _userRemoval(self, uname, password=None):
"""
Removes a test user
"""
self.run_function(
"mysql.user_remove",
user=uname,
host="localhost",
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
def _addGrantRoutine(self, grant, user, db, grant_option=False, escape=True, **kwargs):
"""
Perform some tests around creation of the given grants
"""
ret = self.run_function(
"mysql.grant_add",
grant=grant,
database=db,
user=user,
grant_option=grant_option,
escape=escape,
**kwargs,
)
assert True == ret, (
"Calling grant_add on user '{}' and grants '{}' did not return True: {}"
).format(user, grant, repr(ret))
ret = self.run_function(
"mysql.grant_exists",
grant=grant,
database=db,
user=user,
grant_option=grant_option,
escape=escape,
**kwargs,
)
assert True == ret, (
"Calling grant_exists on" " user '{}' and grants '{}' did not return True: {}"
).format(user, grant, repr(ret))
@pytest.mark.destructive_test
def testGrants(self):
"""
Test user grant methods
"""
self._addGrantRoutine(
grant="SELECT, INSERT,UPDATE, CREATE",
user=self.users["user1"]["name"],
db=self.testdb1 + ".*",
grant_option=True,
escape=True,
connection_user=self.user,
connection_pass=self.password,
)
self._addGrantRoutine(
grant="INSERT, SELECT",
user=self.users["user1"]["name"],
db=self.testdb2 + "." + self.table1,
grant_option=True,
escape=True,
connection_user=self.user,
connection_pass=self.password,
)
self._addGrantRoutine(
grant=" SELECT, UPDATE,DELETE, CREATE TEMPORARY TABLES",
user=self.users["user2"]["name"],
db=self.testdb1 + ".*",
grant_option=True,
escape=True,
connection_user=self.user,
connection_pass=self.password,
)
self._addGrantRoutine(
grant="select, ALTER,CREATE TEMPORARY TABLES, EXECUTE ",
user=self.users["user3"]["name"],
db=self.testdb1 + ".*",
grant_option=True,
escape=True,
connection_user=self.user,
connection_pass=self.password,
)
self._addGrantRoutine(
grant="SELECT, INSERT",
user=self.users["user4"]["name"],
db=self.testdb2 + "." + self.table2,
grant_option=False,
escape=True,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
)
self._addGrantRoutine(
grant="CREATE",
user=self.users["user4"]["name"],
db=self.testdb2 + ".*",
grant_option=False,
escape=True,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
)
self._addGrantRoutine(
grant="SELECT, INSERT",
user=self.users["user4"]["name"],
db=self.testdb2 + "." + self.table1,
grant_option=False,
escape=True,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
)
# '' is valid for anonymous users
self._addGrantRoutine(
grant="DELETE",
user="",
db=self.testdb3 + ".*",
grant_option=False,
escape=True,
connection_user=self.user,
connection_pass=self.password,
)
# Check result for users
ret = self.run_function(
"mysql.user_grants",
user=self.users["user1"]["name"],
host="localhost",
connection_user=self.user,
connection_pass=self.password,
)
assert ret == [
"GRANT USAGE ON *.* TO 'foo'@'localhost'",
"GRANT SELECT, INSERT, UPDATE, CREATE ON "
"`tes.t'\"saltdb`.* TO 'foo'@'localhost' WITH GRANT OPTION",
"GRANT SELECT, INSERT ON `t_st ``(:=salt%b)`.`foo`"
" TO 'foo'@'localhost' WITH GRANT OPTION",
]
ret = self.run_function(
"mysql.user_grants",
user=self.users["user2"]["name"],
host="localhost",
connection_user=self.user,
connection_pass=self.password,
)
assert ret == [
"GRANT USAGE ON *.* TO 'user \";--,?:&/\\'@'localhost'",
"GRANT SELECT, UPDATE, DELETE, CREATE TEMPORARY TABLES ON `tes.t'"
"\"saltdb`.* TO 'user \";--,?:&/\\'@'localhost'"
" WITH GRANT OPTION",
]
ret = self.run_function(
"mysql.user_grants",
user=self.users["user3"]["name"],
host="localhost",
connection_user=self.user,
connection_pass=self.password,
)
assert ret == [
"GRANT USAGE ON *.* TO 'user( @ )=foobar'@'localhost'",
"GRANT SELECT, ALTER, CREATE TEMPORARY TABLES, EXECUTE ON "
"`tes.t'\"saltdb`.* TO 'user( @ )=foobar'@'localhost' "
"WITH GRANT OPTION",
]
ret = self.run_function(
"mysql.user_grants",
user=self.users["user4"]["name"],
host="localhost",
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
)
assert ret == [
"GRANT USAGE ON *.* TO 'user \xe6\xa8\x99'@'localhost'",
(r"GRANT CREATE ON `t\_st ``(:=salt\%b)`.* TO 'user \xe6\xa8\x99'@'localhost'"),
"GRANT SELECT, INSERT ON `t_st ``(:=salt%b)`.`foo ``'%_bar` TO "
"'user \xe6\xa8\x99'@'localhost'",
"GRANT SELECT, INSERT ON `t_st ``(:=salt%b)`.`foo` TO "
"'user \xe6\xa8\x99'@'localhost'",
]
ret = self.run_function(
"mysql.user_grants",
user="",
host="localhost",
connection_user=self.user,
connection_pass=self.password,
)
assert ret == [
"GRANT USAGE ON *.* TO ''@'localhost'",
"GRANT DELETE ON `test ``(:=salteeb)`.* TO ''@'localhost'",
]
@pytest.mark.skipif(
NO_MYSQL,
reason="Please install MySQL bindings and a MySQL Server before running "
"MySQL integration tests.",
)
@pytest.mark.windows_whitelisted
class MysqlModuleFileQueryTest: # (ModuleCase, SaltReturnAssertsMixin):
"""
Test file query module
"""
user = "root"
password = "poney"
testdb = "test_file_query"
@pytest.mark.destructive_test
def setUp(self):
"""
Test presence of MySQL server, enforce a root password, create users
"""
super().setUp()
NO_MYSQL_SERVER = True
# now ensure we know the mysql root password
# one of theses two at least should work
ret1 = self.run_state(
"cmd.run",
name='mysqladmin --host="localhost" -u '
+ self.user
+ ' flush-privileges password "'
+ self.password
+ '"',
)
ret2 = self.run_state(
"cmd.run",
name='mysqladmin --host="localhost" -u '
+ self.user
+ ' --password="'
+ self.password
+ '" flush-privileges password "'
+ self.password
+ '"',
)
key, value = ret2.popitem()
if value["result"]:
NO_MYSQL_SERVER = False
else:
self.skipTest("No MySQL Server running, or no root access on it.")
# Create some users and a test db
self.run_function(
"mysql.db_create",
name=self.testdb,
connection_user=self.user,
connection_pass=self.password,
connection_db="mysql",
)
@pytest.mark.destructive_test
def tearDown(self):
"""
Removes created users and db
"""
self.run_function(
"mysql.db_remove",
name=self.testdb,
connection_user=self.user,
connection_pass=self.password,
connection_db="mysql",
)
@pytest.mark.destructive_test
def test_update_file_query(self):
"""
Test query without any output
"""
ret = self.run_function(
"mysql.file_query",
database=self.testdb,
file_name="salt://mysql/update_query.sql",
character_set="utf8",
collate="utf8_general_ci",
connection_user=self.user,
connection_pass=self.password,
)
assert "query time" in ret
ret.pop("query time")
assert ret == {"rows affected": 2}
@pytest.mark.destructive_test
def test_select_file_query(self):
"""
Test query with table output
"""
ret = self.run_function(
"mysql.file_query",
database=self.testdb,
file_name="salt://mysql/select_query.sql",
character_set="utf8",
collate="utf8_general_ci",
connection_user=self.user,
connection_pass=self.password,
)
expected = {
"rows affected": 5,
"rows returned": 4,
"results": [[["2"], ["3"], ["4"], ["5"]]],
"columns": [["a"]],
}
assert "query time" in ret
ret.pop("query time")
assert ret == expected
070701000000ED000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003D00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/pillar070701000000EE000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004900000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/pillar/__init__.py070701000000EF000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000004000000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/returners070701000000F0000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004C00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/returners/__init__.py070701000000F1000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003D00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/states070701000000F2000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004900000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/states/__init__.py070701000000F3000081A400000000000000000000000167471E9C000019D2000000000000000000000000000000000000005400000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/states/test_mysql_database.py# pylint: disable-all
"""
Tests for the MySQL states
"""
import pytest
import salt.utils.path
# from tests.support.case import ModuleCase
# from tests.support.mixins import SaltReturnAssertsMixin
NO_MYSQL = False
try:
import MySQLdb # pylint: disable=import-error,unused-import
except ImportError:
NO_MYSQL = True
if not salt.utils.path.which("mysqladmin"):
NO_MYSQL = True
pytest.skip(reason="These tests were not converted to pytest yet", allow_module_level=True)
@pytest.mark.skipif(
NO_MYSQL,
reason="Please install MySQL bindings and a MySQL Server before running "
"MySQL integration tests.",
)
class MysqlDatabaseStateTest: # (ModuleCase, SaltReturnAssertsMixin):
"""
Validate the mysql_database state
"""
user = "root"
password = "poney"
@pytest.mark.destructive_test
def setUp(self):
"""
Test presence of MySQL server, enforce a root password
"""
super().setUp()
NO_MYSQL_SERVER = True
# now ensure we know the mysql root password
# one of theses two at least should work
ret1 = self.run_state(
"cmd.run",
name='mysqladmin --host="localhost" -u '
+ self.user
+ ' flush-privileges password "'
+ self.password
+ '"',
)
ret2 = self.run_state(
"cmd.run",
name='mysqladmin --host="localhost" -u '
+ self.user
+ ' --password="'
+ self.password
+ '" flush-privileges password "'
+ self.password
+ '"',
)
key, value = ret2.popitem()
if value["result"]:
NO_MYSQL_SERVER = False
else:
self.skipTest("No MySQL Server running, or no root access on it.")
def _test_database(self, db_name, second_db_name, test_conn, **kwargs):
"""
Create db two times, test conn, remove it two times
"""
# In case of...
ret = self.run_state("mysql_database.absent", name=db_name, **kwargs)
ret = self.run_state("mysql_database.present", name=db_name, **kwargs)
self.assertSaltTrueReturn(ret)
self.assertInSaltComment("The database " + db_name + " has been created", ret)
# 2nd run
ret = self.run_state("mysql_database.present", name=second_db_name, **kwargs)
self.assertSaltTrueReturn(ret)
self.assertInSaltComment("Database " + db_name + " is already present", ret)
if test_conn:
# test root connection
ret = self.run_function("mysql.query", database=db_name, query="SELECT 1", **kwargs)
if not isinstance(ret, dict) or "results" not in ret:
raise AssertionError(
("Unexpected result while testing connection on db '{}': {}").format(
db_name, repr(ret)
)
)
assert [["1"]] == ret["results"]
# Now removing databases
kwargs.pop("character_set")
kwargs.pop("collate")
ret = self.run_state("mysql_database.absent", name=db_name, **kwargs)
self.assertSaltTrueReturn(ret)
self.assertInSaltComment("Database " + db_name + " has been removed", ret)
# 2nd run
ret = self.run_state("mysql_database.absent", name=second_db_name, **kwargs)
self.assertSaltTrueReturn(ret)
self.assertInSaltComment(
"Database " + db_name + " is not present, so it cannot be removed", ret
)
self.assertSaltStateChangesEqual(ret, {})
@pytest.mark.destructive_test
def test_present_absent(self):
"""
mysql_database.present
"""
self._test_database(
"testdb1",
"testdb1",
test_conn=True,
character_set="utf8",
collate="utf8_general_ci",
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
)
# TODO: test with variations on collate and charset, check for db alter
# once it will be done in mysql_database.present state
@pytest.mark.destructive_test
def test_present_absent_fuzzy(self):
"""
mysql_database.present with utf-8 andf fuzzy db name
"""
# this is : ":() ;,?@=`&'\
dbname_fuzzy = "\":() ;,?@=`&/'\\"
# \xe6\xa8\x99\ = \u6a19 = 標
# this is : "();,?:@=`&/標'\
dbname_utf8 = "\"();,?@=`&//\xe6\xa8\x99'\\"
dbname_unicode = "\"();,?@=`&//\u6a19'\\"
self._test_database(
dbname_fuzzy,
dbname_fuzzy,
test_conn=True,
character_set="utf8",
collate="utf8_general_ci",
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
)
# FIXME: MySQLdb bugs on dbnames with utf-8?
self._test_database(
dbname_utf8,
dbname_unicode,
test_conn=False,
character_set="utf8",
collate="utf8_general_ci",
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
# saltenv={"LC_ALL": "en_US.utf8"}
)
@pytest.mark.destructive_test
@pytest.mark.skip(reason="This tests needs issue #8947 to be fixed first")
def test_utf8_from_sls_file(self):
"""
Try to create/destroy an utf-8 database name from an sls file #8947
"""
expected_result = {
"mysql_database_|-A_|-foo \xe6\xba\x96`bar_|-present": {
"__run_num__": 0,
"comment": "The database foo \xe6\xba\x96`bar has been created",
"result": True,
},
"mysql_database_|-B_|-foo \xe6\xba\x96`bar_|-absent": {
"__run_num__": 1,
"comment": "Database foo \xe6\xba\x96`bar has been removed",
"result": True,
},
}
result = {}
ret = self.run_function("state.sls", mods="mysql_utf8")
if not isinstance(ret, dict):
raise AssertionError(
("Unexpected result while testing external mysql utf8 sls: {}").format(repr(ret))
)
for item, descr in ret.items():
result[item] = {
"__run_num__": descr["__run_num__"],
"comment": descr["comment"],
"result": descr["result"],
}
assert expected_result == result
070701000000F4000081A400000000000000000000000167471E9C000022B0000000000000000000000000000000000000005200000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/integration/states/test_mysql_grants.py# pylint: disable-all
"""
Tests for the MySQL states
"""
import logging
import pytest
import salt.utils.path
from salt.modules import mysql as mysqlmod
# from tests.support.case import ModuleCase
# from tests.support.mixins import SaltReturnAssertsMixin
log = logging.getLogger(__name__)
NO_MYSQL = False
try:
import MySQLdb # pylint: disable=import-error,unused-import
except ImportError:
NO_MYSQL = True
if not salt.utils.path.which("mysqladmin"):
NO_MYSQL = True
pytest.skip(reason="These tests were not converted to pytest yet", allow_module_level=True)
@pytest.mark.skipif(
NO_MYSQL,
reason="Please install MySQL bindings and a MySQL Server before running "
"MySQL integration tests.",
)
class MysqlGrantsStateTest: # (ModuleCase, SaltReturnAssertsMixin):
"""
Validate the mysql_grants states
"""
user = "root"
password = "poney"
# yep, theses are valid MySQL db names
# very special chars are _ % and .
testdb1 = "tes.t'\"saltdb"
testdb2 = "t_st `(:=salt%b)"
testdb3 = "test `(:=salteeb)"
table1 = "foo"
table2 = "foo `'%_bar"
users = {
"user1": {"name": "foo", "pwd": "bar"},
"user2": {"name": 'user ";--,?:&/\\', "pwd": '";--(),?:@=&/\\'},
# this is : passwd 標標
"user3": {"name": "user( @ )=foobar", "pwd": "\xe6\xa8\x99\xe6\xa8\x99"},
# this is : user/password containing 標標
"user4": {"name": "user \xe6\xa8\x99", "pwd": "\xe6\xa8\x99\xe6\xa8\x99"},
}
@pytest.mark.destructive_test
def setUp(self):
"""
Test presence of MySQL server, enforce a root password
"""
super().setUp()
NO_MYSQL_SERVER = True
# now ensure we know the mysql root password
# one of theses two at least should work
ret1 = self.run_state(
"cmd.run",
name='mysqladmin --host="localhost" -u '
+ self.user
+ ' flush-privileges password "'
+ self.password
+ '"',
)
ret2 = self.run_state(
"cmd.run",
name='mysqladmin --host="localhost" -u '
+ self.user
+ ' --password="'
+ self.password
+ '" flush-privileges password "'
+ self.password
+ '"',
)
key, value = ret2.popitem()
if value["result"]:
NO_MYSQL_SERVER = False
else:
self.skipTest("No MySQL Server running, or no root access on it.")
# Create some users and a test db
for user, userdef in self.users.items():
self._userCreation(uname=userdef["name"], password=userdef["pwd"])
self.run_state(
"mysql_database.present",
name=self.testdb1,
character_set="utf8",
collate="utf8_general_ci",
connection_user=self.user,
connection_pass=self.password,
)
self.run_state(
"mysql_database.present",
name=self.testdb2,
character_set="utf8",
collate="utf8_general_ci",
connection_user=self.user,
connection_pass=self.password,
)
create_query = (
"CREATE TABLE {tblname} ("
" id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,"
" data VARCHAR(100)) ENGINE={engine};".format(
tblname=mysqlmod.quote_identifier(self.table1),
engine="MYISAM",
)
)
log.info("Adding table '%s'", self.table1)
self.run_function(
"mysql.query",
database=self.testdb2,
query=create_query,
connection_user=self.user,
connection_pass=self.password,
)
create_query = (
"CREATE TABLE {tblname} ("
" id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,"
" data VARCHAR(100)) ENGINE={engine};".format(
tblname=mysqlmod.quote_identifier(self.table2),
engine="MYISAM",
)
)
log.info("Adding table '%s'", self.table2)
self.run_function(
"mysql.query",
database=self.testdb2,
query=create_query,
connection_user=self.user,
connection_pass=self.password,
)
@pytest.mark.destructive_test
def tearDown(self):
"""
Removes created users and db
"""
for user, userdef in self.users.items():
self._userRemoval(uname=userdef["name"], password=userdef["pwd"])
self.run_state(
"mysql_database.absent",
name=self.testdb1,
connection_user=self.user,
connection_pass=self.password,
)
self.run_function(
"mysql_database.absent",
name=self.testdb2,
connection_user=self.user,
connection_pass=self.password,
)
def _userCreation(self, uname, password=None):
"""
Create a test user
"""
self.run_state(
"mysql_user.present",
name=uname,
host="localhost",
password=password,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
def _userRemoval(self, uname, password=None):
"""
Removes a test user
"""
self.run_state(
"mysql_user.absent",
name=uname,
host="localhost",
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
saltenv={"LC_ALL": "en_US.utf8"},
)
@pytest.mark.destructive_test
def test_grant_present_absent(self):
"""
mysql_database.present
"""
ret = self.run_state(
"mysql_grants.present",
name="grant test 1",
grant="SELECT, INSERT",
database=self.testdb1 + ".*",
user=self.users["user1"]["name"],
host="localhost",
grant_option=True,
revoke_first=True,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
)
self.assertSaltTrueReturn(ret)
ret = self.run_state(
"mysql_grants.present",
name="grant test 2",
grant="SELECT, ALTER,CREATE TEMPORARY tables, execute",
database=self.testdb1 + ".*",
user=self.users["user1"]["name"],
host="localhost",
grant_option=True,
revoke_first=True,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
)
self.assertSaltTrueReturn(ret)
ret = self.run_state(
"mysql_grants.present",
name="grant test 3",
grant="SELECT, INSERT",
database=self.testdb2 + "." + self.table2,
user=self.users["user2"]["name"],
host="localhost",
grant_option=True,
revoke_first=True,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
)
self.assertSaltTrueReturn(ret)
ret = self.run_state(
"mysql_grants.present",
name="grant test 4",
grant="SELECT, INSERT",
database=self.testdb2 + "." + self.table2,
user=self.users["user2"]["name"],
host="localhost",
grant_option=True,
revoke_first=True,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
)
self.assertSaltTrueReturn(ret)
ret = self.run_state(
"mysql_grants.present",
name="grant test 5",
grant="SELECT, UPDATE",
database=self.testdb2 + ".*",
user=self.users["user1"]["name"],
host="localhost",
grant_option=True,
revoke_first=False,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
)
self.assertSaltTrueReturn(ret)
ret = self.run_state(
"mysql_grants.absent",
name="grant test 6",
grant="SELECT,update",
database=self.testdb2 + ".*",
user=self.users["user1"]["name"],
host="localhost",
grant_option=True,
revoke_first=False,
connection_user=self.user,
connection_pass=self.password,
connection_charset="utf8",
)
self.assertSaltTrueReturn(ret)
070701000000F5000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003200000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/support070701000000F6000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000003E00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/support/__init__.py070701000000F7000081A400000000000000000000000167471E9C00001766000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/support/mysql.pyimport logging
import time
import attr
import pytest
from pytestskipmarkers.utils import platform
from saltfactories.utils import random_string
# This `pytest.importorskip` here actually works because this module
# is imported into test modules, otherwise, the skipping would just fail
pytest.importorskip("docker")
import docker.errors # isort:skip pylint:disable=wrong-import-position
log = logging.getLogger(__name__)
@attr.s(kw_only=True, slots=True)
class MySQLImage:
name = attr.ib()
tag = attr.ib()
container_id = attr.ib()
def __str__(self):
return f"{self.name}:{self.tag}"
@attr.s(kw_only=True, slots=True)
class MySQLCombo:
mysql_name = attr.ib()
mysql_version = attr.ib()
mysql_port = attr.ib(default=None)
mysql_host = attr.ib(default="%")
mysql_user = attr.ib()
mysql_passwd = attr.ib()
mysql_database = attr.ib(default=None)
mysql_root_user = attr.ib(default="root")
mysql_root_passwd = attr.ib()
container = attr.ib(default=None)
container_id = attr.ib()
@container_id.default
def _default_container_id(self):
return random_string(
"{}-{}-".format( # pylint: disable=consider-using-f-string
self.mysql_name.replace("/", "-"),
self.mysql_version,
)
)
@mysql_root_passwd.default
def _default_mysql_root_user_passwd(self):
return self.mysql_passwd
def get_credentials(self, **kwargs):
return {
"connection_user": kwargs.get("connection_user") or self.mysql_root_user,
"connection_pass": kwargs.get("connection_pass") or self.mysql_root_passwd,
"connection_db": kwargs.get("connection_db") or "mysql",
"connection_port": kwargs.get("connection_port") or self.mysql_port,
}
def get_test_versions():
test_versions = []
name = "mysql-server"
for version in ("5.5", "5.6", "5.7", "8.0"):
test_versions.append(
MySQLImage(
name=name,
tag=version,
container_id=random_string(f"mysql-{version}-"),
)
)
name = "mariadb"
for version in ("10.3", "10.4", "10.5"):
test_versions.append(
MySQLImage(
name=name,
tag=version,
container_id=random_string(f"mariadb-{version}-"),
)
)
name = "percona"
for version in ("5.6", "5.7", "8.0"):
test_versions.append(
MySQLImage(
name=name,
tag=version,
container_id=random_string(f"percona-{version}-"),
)
)
return test_versions
def get_test_version_id(value):
return f"container={value}"
@pytest.fixture(scope="module", params=get_test_versions(), ids=get_test_version_id)
def mysql_image(request):
return request.param
@pytest.fixture(scope="module")
def create_mysql_combo(mysql_image):
if platform.is_fips_enabled():
if mysql_image.name in ("mysql-server", "percona") and mysql_image.tag == "8.0":
pytest.skip(f"These tests fail on {mysql_image.name}:{mysql_image.tag}")
return MySQLCombo(
mysql_name=mysql_image.name,
mysql_version=mysql_image.tag,
mysql_user="salt-mysql-user",
mysql_passwd="Pa55w0rd!",
container_id=mysql_image.container_id,
)
@pytest.fixture(scope="module")
def mysql_combo(create_mysql_combo):
return create_mysql_combo
def check_container_started(timeout_at, container, combo):
sleeptime = 0.5
while time.time() <= timeout_at:
try:
if not container.is_running():
log.warning("%s is no longer running", container)
return False
ret = container.run(
"mysql",
f"--user={combo.mysql_user}",
f"--password={combo.mysql_passwd}",
"-e",
"SELECT 1",
)
if ret.returncode == 0:
break
except docker.errors.APIError:
log.exception("Failed to run start check")
time.sleep(sleeptime)
sleeptime *= 2
else:
return False
time.sleep(0.5)
return True
def set_container_name_before_start(container):
"""
This is useful if the container has to be restared and the old
container, under the same name was left running, but in a bad shape.
"""
container.name = random_string(
"{}-".format(container.name.rsplit("-", 1)[0]) # pylint: disable=consider-using-f-string
)
container.display_name = None
return container
@pytest.fixture(scope="module")
def mysql_container(salt_factories, mysql_combo):
container_environment = {
"MYSQL_ROOT_PASSWORD": mysql_combo.mysql_passwd,
"MYSQL_ROOT_HOST": mysql_combo.mysql_host,
"MYSQL_USER": mysql_combo.mysql_user,
"MYSQL_PASSWORD": mysql_combo.mysql_passwd,
}
if mysql_combo.mysql_database:
container_environment["MYSQL_DATABASE"] = mysql_combo.mysql_database
container = salt_factories.get_container(
mysql_combo.container_id,
"ghcr.io/saltstack/salt-ci-containers/{}:{}".format( # pylint: disable=consider-using-f-string
mysql_combo.mysql_name, mysql_combo.mysql_version
),
pull_before_start=True,
skip_on_pull_failure=True,
skip_if_docker_client_not_connectable=True,
container_run_kwargs={
"ports": {"3306/tcp": None},
"environment": container_environment,
},
)
container.before_start(set_container_name_before_start, container)
container.container_start_check(check_container_started, container, mysql_combo)
with container.started():
mysql_combo.container = container
mysql_combo.mysql_port = container.get_host_port_binding(3306, protocol="tcp", ipv6=False)
yield mysql_combo
070701000000F8000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000002F00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit070701000000F9000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/__init__.py070701000000FA000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003400000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/auth070701000000FB000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004000000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/auth/__init__.py070701000000FC000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003500000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/cache070701000000FD000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004100000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/cache/__init__.py070701000000FE000081A400000000000000000000000167471E9C00002076000000000000000000000000000000000000004900000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/cache/test_mysql_cache.py"""
unit tests for the mysql_cache cache
"""
import logging
from unittest.mock import MagicMock
from unittest.mock import call
from unittest.mock import patch
import pytest
from salt.exceptions import SaltCacheError
from saltext.mysql.cache import mysql_cache
log = logging.getLogger(__name__)
pytestmark = [
pytest.mark.skipif(mysql_cache.MySQLdb is None, reason="No python mysql client installed.")
]
@pytest.fixture
def configure_loader_modules():
return {mysql_cache: {}}
def test_run_query():
"""
Tests that a SaltCacheError is raised when there is a problem writing to the
cache file.
"""
with patch("MySQLdb.connect", MagicMock()) as mock_connect:
expected_calls = call.cursor().execute("SELECT 1;")
mysql_cache.run_query(conn=mock_connect, query="SELECT 1;")
mock_connect.assert_has_calls((expected_calls,), True)
def test_store():
"""
Tests that the store function writes the data to the serializer for storage.
"""
mock_connect_client = MagicMock()
with patch.object(mysql_cache, "_init_client") as mock_init_client:
with patch.dict(
mysql_cache.__context__,
{
"mysql_table_name": "salt",
"mysql_client": mock_connect_client,
},
):
with patch.object(mysql_cache, "run_query") as mock_run_query:
mock_run_query.return_value = (MagicMock(), 1)
expected_calls = [
call(
mock_connect_client,
"REPLACE INTO salt (bank, etcd_key, data) values(%s,%s,%s)",
args=("minions/minion", "key1", b"\xa4data"),
)
]
try:
mysql_cache.store(bank="minions/minion", key="key1", data="data")
except SaltCacheError:
pytest.fail("This test should not raise an exception")
mock_run_query.assert_has_calls(expected_calls, True)
with patch.object(mysql_cache, "run_query") as mock_run_query:
mock_run_query.return_value = (MagicMock(), 2)
expected_calls = [
call(
mock_connect_client,
"REPLACE INTO salt (bank, etcd_key, data) values(%s,%s,%s)",
args=("minions/minion", "key2", b"\xa4data"),
)
]
try:
mysql_cache.store(bank="minions/minion", key="key2", data="data")
except SaltCacheError:
pytest.fail("This test should not raise an exception")
mock_run_query.assert_has_calls(expected_calls, True)
with patch.object(mysql_cache, "run_query") as mock_run_query:
mock_run_query.return_value = (MagicMock(), 0)
with pytest.raises(SaltCacheError) as exc_info:
mysql_cache.store(bank="minions/minion", key="data", data="data")
expected = "Error storing minions/minion data returned 0"
assert expected in str(exc_info.value)
def test_fetch():
"""
Tests that the fetch function reads the data from the serializer for storage.
"""
with patch.object(mysql_cache, "_init_client") as mock_init_client:
with patch("MySQLdb.connect") as mock_connect:
mock_connection = mock_connect.return_value
cursor = mock_connection.cursor.return_value
cursor.fetchone.return_value = (b"\xa5hello",)
with patch.dict(
mysql_cache.__context__,
{
"mysql_client": mock_connection,
"mysql_table_name": "salt",
},
):
ret = mysql_cache.fetch(bank="bank", key="key")
assert ret == "hello"
def test_flush():
"""
Tests the flush function in mysql_cache.
"""
mock_connect_client = MagicMock()
with patch.object(mysql_cache, "_init_client") as mock_init_client:
with patch.dict(
mysql_cache.__context__,
{"mysql_client": mock_connect_client, "mysql_table_name": "salt"},
):
with patch.object(mysql_cache, "run_query") as mock_run_query:
expected_calls = [
call(
mock_connect_client,
"DELETE FROM salt WHERE bank=%s",
args=("bank",),
),
]
mock_run_query.return_value = (MagicMock(), "")
mysql_cache.flush(bank="bank")
mock_run_query.assert_has_calls(expected_calls, True)
expected_calls = [
call(
mock_connect_client,
"DELETE FROM salt WHERE bank=%s AND etcd_key=%s",
args=("bank", "key"),
)
]
mysql_cache.flush(bank="bank", key="key")
mock_run_query.assert_has_calls(expected_calls, True)
def test_init_client():
"""
Tests that the _init_client places the correct information in __context__
"""
with patch.dict(
mysql_cache.__opts__,
{"mysql.max_allowed_packet": 100000},
):
with patch.object(mysql_cache, "_create_table") as mock_create_table:
mysql_cache._init_client()
assert "mysql_table_name" in mysql_cache.__context__
assert mysql_cache.__context__["mysql_table_name"] == "salt"
assert "mysql_kwargs" in mysql_cache.__context__
assert mysql_cache.__context__["mysql_kwargs"]["autocommit"]
assert mysql_cache.__context__["mysql_kwargs"]["host"] == "127.0.0.1"
assert mysql_cache.__context__["mysql_kwargs"]["db"] == "salt_cache"
assert mysql_cache.__context__["mysql_kwargs"]["port"] == 3306
assert mysql_cache.__context__["mysql_kwargs"]["max_allowed_packet"] == 100000
assert not mysql_cache.__context__["mysql_fresh_connection"]
with patch.dict(
mysql_cache.__opts__,
{
"mysql.max_allowed_packet": 100000,
"mysql.db": "salt_mysql_db",
"mysql.host": "mysql-host",
"mysql.fresh_connection": True,
},
):
with patch.object(mysql_cache, "_create_table") as mock_create_table:
mysql_cache._init_client()
assert "mysql_table_name" in mysql_cache.__context__
assert mysql_cache.__context__["mysql_table_name"] == "salt"
assert "mysql_kwargs" in mysql_cache.__context__
assert mysql_cache.__context__["mysql_kwargs"]["autocommit"]
assert mysql_cache.__context__["mysql_kwargs"]["host"] == "mysql-host"
assert mysql_cache.__context__["mysql_kwargs"]["db"] == "salt_mysql_db"
assert mysql_cache.__context__["mysql_kwargs"]["port"] == 3306
assert mysql_cache.__context__["mysql_kwargs"]["max_allowed_packet"] == 100000
assert mysql_cache.__context__["mysql_fresh_connection"]
def test_create_table():
"""
Tests that the _create_table
"""
mock_connect_client = MagicMock()
with patch.dict(
mysql_cache.__context__,
{
"mysql_table_name": "salt",
"mysql_client": mock_connect_client,
"mysql_kwargs": {"db": "salt_cache"},
},
):
with patch.object(mysql_cache, "run_query") as mock_run_query:
mock_run_query.return_value = (MagicMock(), 1)
sql_call = """CREATE TABLE IF NOT EXISTS salt (
bank CHAR(255),
etcd_key CHAR(255),
data MEDIUMBLOB,
last_update TIMESTAMP NOT NULL
DEFAULT CURRENT_TIMESTAMP
ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY(bank, etcd_key)
);"""
expected_calls = [call(mock_connect_client, sql_call)]
try:
mysql_cache._create_table()
except SaltCacheError:
pytest.fail("This test should not raise an exception")
mock_run_query.assert_has_calls(expected_calls, True)
070701000000FF000081A400000000000000000000000167471E9C00000761000000000000000000000000000000000000003B00000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/conftest.pyimport os
import pytest
import salt.config
@pytest.fixture
def minion_opts(tmp_path):
"""
Default minion configuration with relative temporary paths to not
require root permissions.
"""
root_dir = tmp_path / "minion"
opts = salt.config.DEFAULT_MINION_OPTS.copy()
opts["__role"] = "minion"
opts["root_dir"] = str(root_dir)
for name in ("cachedir", "pki_dir", "sock_dir", "conf_dir"):
dirpath = root_dir / name
dirpath.mkdir(parents=True)
opts[name] = str(dirpath)
opts["log_file"] = "logs/minion.log"
opts["conf_file"] = os.path.join(opts["conf_dir"], "minion")
return opts
@pytest.fixture
def master_opts(tmp_path):
"""
Default master configuration with relative temporary paths to not
require root permissions.
"""
root_dir = tmp_path / "master"
opts = salt.config.master_config(None)
opts["__role"] = "master"
opts["root_dir"] = str(root_dir)
for name in ("cachedir", "pki_dir", "sock_dir", "conf_dir"):
dirpath = root_dir / name
dirpath.mkdir(parents=True)
opts[name] = str(dirpath)
opts["log_file"] = "logs/master.log"
opts["conf_file"] = os.path.join(opts["conf_dir"], "master")
return opts
@pytest.fixture
def syndic_opts(tmp_path):
"""
Default master configuration with relative temporary paths to not
require root permissions.
"""
root_dir = tmp_path / "syndic"
opts = salt.config.DEFAULT_MINION_OPTS.copy()
opts["syndic_master"] = "127.0.0.1"
opts["__role"] = "minion"
opts["root_dir"] = str(root_dir)
for name in ("cachedir", "pki_dir", "sock_dir", "conf_dir"):
dirpath = root_dir / name
dirpath.mkdir(parents=True)
opts[name] = str(dirpath)
opts["log_file"] = "logs/syndic.log"
opts["conf_file"] = os.path.join(opts["conf_dir"], "syndic")
return opts
07070100000100000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003700000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/modules07070100000101000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004300000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/modules/__init__.py07070100000102000081A400000000000000000000000167471E9C0000818A000000000000000000000000000000000000004500000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/modules/test_mysql.py"""
:codeauthor: Mike Place (mp@saltstack.com)
tests.unit.modules.mysql
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import logging
from textwrap import dedent
from unittest.mock import MagicMock
from unittest.mock import call
from unittest.mock import mock_open
from unittest.mock import patch
import pytest
from saltext.mysql.modules import mysql
try:
import pymysql
HAS_PYMYSQL = True
except ImportError:
HAS_PYMYSQL = False
log = logging.getLogger(__name__)
__all_privileges__ = [
"ALTER",
"ALTER ROUTINE",
"BACKUP_ADMIN",
"BINLOG_ADMIN",
"CONNECTION_ADMIN",
"CREATE",
"CREATE ROLE",
"CREATE ROUTINE",
"CREATE TABLESPACE",
"CREATE TEMPORARY TABLES",
"CREATE USER",
"CREATE VIEW",
"DELETE",
"DROP",
"DROP ROLE",
"ENCRYPTION_KEY_ADMIN",
"EVENT",
"EXECUTE",
"FILE",
"GROUP_REPLICATION_ADMIN",
"INDEX",
"INSERT",
"LOCK TABLES",
"PERSIST_RO_VARIABLES_ADMIN",
"PROCESS",
"REFERENCES",
"RELOAD",
"REPLICATION CLIENT",
"REPLICATION SLAVE",
"REPLICATION_SLAVE_ADMIN",
"RESOURCE_GROUP_ADMIN",
"RESOURCE_GROUP_USER",
"ROLE_ADMIN",
"SELECT",
"SET_USER_ID",
"SHOW DATABASES",
"SHOW VIEW",
"SHUTDOWN",
"SUPER",
"SYSTEM_VARIABLES_ADMIN",
"TRIGGER",
"UPDATE",
"XA_RECOVER_ADMIN",
]
pytestmark = [
pytest.mark.skipif(mysql.MySQLdb is None, reason="No python mysql client installed."),
]
class MockMySQLConnect:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def autocommit(self, *args, **kwards):
return True
@pytest.fixture
def configure_loader_modules():
return {mysql: {}}
def test_user_exists():
"""
Test to see if mysql module properly forms the MySQL query to see if a user exists
Do it before test_user_create_when_user_exists mocks the user_exists call
"""
with patch.object(mysql, "version", return_value="8.0.10"):
_test_call(
mysql.user_exists,
{
"sql": (
"SELECT User,Host FROM mysql.user WHERE "
"User = %(user)s AND Host = %(host)s AND "
"Password = PASSWORD(%(password)s)"
),
"sql_args": {
"host": "localhost",
"password": "BLUECOW",
"user": "mytestuser",
},
},
user="mytestuser",
host="localhost",
password="BLUECOW",
)
with patch.object(mysql, "version", return_value="10.1.38-MariaDB"):
_test_call(
mysql.user_exists,
{
"sql": (
"SELECT User,Host FROM mysql.user WHERE "
"User = %(user)s AND Host = %(host)s AND "
"Password = PASSWORD(%(password)s)"
),
"sql_args": {
"host": "localhost",
"password": "BLUECOW",
"user": "mytestuser",
},
},
user="mytestuser",
host="localhost",
password="BLUECOW",
)
with patch.object(mysql, "version", return_value="8.0.11"):
_test_call(
mysql.user_exists,
{
"sql": (
"SELECT User,Host FROM mysql.user WHERE " "User = %(user)s AND Host = %(host)s"
),
"sql_args": {"host": "localhost", "user": "mytestuser"},
},
user="mytestuser",
host="localhost",
password="BLUECOW",
)
with patch.object(mysql, "version", return_value="8.0.11"):
with patch.object(
mysql,
"__get_auth_plugin",
MagicMock(return_value="mysql_native_password"),
):
_test_call(
mysql.user_exists,
{
"sql": (
"SELECT User,Host FROM mysql.user WHERE "
"User = %(user)s AND Host = %(host)s AND "
"Password = %(password)s"
),
"sql_args": {
"host": "%",
"password": "*1A01CF8FBE6425398935FB90359AD8B817399102",
"user": "mytestuser",
},
},
user="mytestuser",
host="%",
password="BLUECOW",
)
with patch.object(mysql, "version", return_value="10.2.21-MariaDB"):
_test_call(
mysql.user_exists,
{
"sql": (
"SELECT User,Host FROM mysql.user WHERE "
"User = %(user)s AND Host = %(host)s AND "
"Password = PASSWORD(%(password)s)"
),
"sql_args": {
"host": "localhost",
"password": "BLUECOW",
"user": "mytestuser",
},
},
user="mytestuser",
host="localhost",
password="BLUECOW",
)
with patch.object(mysql, "version", side_effect=["", "10.2.21-MariaDB", "10.2.21-MariaDB"]):
_test_call(
mysql.user_exists,
{
"sql": (
"SELECT User,Host FROM mysql.user WHERE "
"User = %(user)s AND Host = %(host)s AND "
"Password = PASSWORD(%(password)s)"
),
"sql_args": {
"host": "localhost",
"password": "new_pass",
"user": "root",
},
},
user="root",
host="localhost",
password="new_pass",
connection_user="root",
connection_pass="old_pass",
)
# test_user_create_when_user_exists():
# ensure we don't try to create a user when one already exists
# mock the version of MySQL
with patch.object(mysql, "version", return_value="8.0.10"):
with patch.object(mysql, "user_exists", MagicMock(return_value=True)):
with patch.dict(mysql.__salt__, {"config.option": MagicMock()}):
ret = mysql.user_create("testuser")
assert not ret
# test_user_create_when_user_exists():
# ensure we don't try to create a user when one already exists
# mock the version of MySQL
with patch.object(mysql, "version", return_value="8.0.11"):
with patch.object(mysql, "user_exists", MagicMock(return_value=True)):
with patch.object(mysql, "verify_login", MagicMock(return_value=True)):
with patch.dict(mysql.__salt__, {"config.option": MagicMock()}):
ret = mysql.user_create("testuser")
assert not False
def test_user_create():
"""
Test the creation of a MySQL user in mysql exec module
"""
with patch.object(mysql, "version", return_value="8.0.10"):
with patch.object(
mysql,
"__get_auth_plugin",
MagicMock(return_value="mysql_native_password"),
):
_test_call(
mysql.user_create,
{
"sql": "CREATE USER %(user)s@%(host)s IDENTIFIED BY %(password)s",
"sql_args": {
"password": "BLUECOW",
"user": "testuser",
"host": "localhost",
},
},
"testuser",
password="BLUECOW",
)
with patch.object(mysql, "version", return_value="8.0.11"):
with patch.object(
mysql,
"__get_auth_plugin",
MagicMock(return_value="mysql_native_password"),
):
_test_call(
mysql.user_create,
{
"sql": "CREATE USER %(user)s@%(host)s IDENTIFIED WITH %(auth_plugin)s BY %(password)s",
"sql_args": {
"password": "BLUECOW",
"auth_plugin": "mysql_native_password",
"user": "testuser",
"host": "localhost",
},
},
"testuser",
password="BLUECOW",
)
# Test creating a user with passwordless=True and unix_socket=True
with patch.object(mysql, "version", return_value="8.0.10"):
with patch.object(mysql, "plugin_status", MagicMock(return_value="ACTIVE")):
_test_call(
mysql.user_create,
{
"sql": "CREATE USER %(user)s@%(host)s IDENTIFIED WITH auth_socket",
"sql_args": {"user": "testuser", "host": "localhost"},
},
"testuser",
allow_passwordless=True,
unix_socket=True,
)
with patch.object(mysql, "version", return_value="10.2.21-MariaDB"):
with patch.object(mysql, "plugin_status", MagicMock(return_value="ACTIVE")):
_test_call(
mysql.user_create,
{
"sql": "CREATE USER %(user)s@%(host)s IDENTIFIED VIA unix_socket",
"sql_args": {"user": "testuser", "host": "localhost"},
},
"testuser",
allow_passwordless=True,
unix_socket=True,
)
with patch.object(mysql, "version", side_effect=["", "8.0.10", "8.0.10"]):
with patch.object(mysql, "user_exists", MagicMock(return_value=False)), patch.object(
mysql,
"__get_auth_plugin",
MagicMock(return_value="mysql_native_password"),
):
_test_call(
mysql.user_create,
{
"sql": "CREATE USER %(user)s@%(host)s IDENTIFIED BY %(password)s",
"sql_args": {
"password": "new_pass",
"user": "root",
"host": "localhost",
},
},
"root",
password="new_pass",
connection_user="root",
connection_pass="old_pass",
)
def test_user_chpass():
"""
Test changing a MySQL user password in mysql exec module
"""
connect_mock = MagicMock()
with patch.object(mysql, "_connect", connect_mock):
with patch.object(mysql, "version", return_value="8.0.10"):
with patch.object(mysql, "user_exists", MagicMock(return_value=True)):
with patch.dict(mysql.__salt__, {"config.option": MagicMock()}):
mysql.user_chpass("testuser", password="BLUECOW")
calls = (
call()
.cursor()
.execute(
"UPDATE mysql.user SET Password=PASSWORD(%(password)s) WHERE User=%(user)s AND Host = %(host)s;",
{
"password": "BLUECOW",
"user": "testuser",
"host": "localhost",
},
),
call().cursor().execute("FLUSH PRIVILEGES;"),
)
connect_mock.assert_has_calls(calls, any_order=True)
connect_mock = MagicMock()
with patch.object(mysql, "_connect", connect_mock):
with patch.object(mysql, "version", return_value="8.0.11"):
with patch.object(mysql, "user_exists", MagicMock(return_value=True)):
with patch.object(
mysql,
"__get_auth_plugin",
MagicMock(return_value="mysql_native_password"),
):
with patch.dict(mysql.__salt__, {"config.option": MagicMock()}):
mysql.user_chpass("testuser", password="BLUECOW")
calls = (
call()
.cursor()
.execute(
"ALTER USER %(user)s@%(host)s IDENTIFIED WITH %(auth_plugin)s BY %(password)s;",
{
"password": "BLUECOW",
"user": "testuser",
"host": "localhost",
"auth_plugin": "mysql_native_password",
},
),
call().cursor().execute("FLUSH PRIVILEGES;"),
)
connect_mock.assert_has_calls(calls, any_order=True)
connect_mock = MagicMock()
with patch.object(mysql, "_connect", connect_mock):
with patch.object(mysql, "version", side_effect=["", "8.0.11", "8.0.11"]):
with patch.object(mysql, "user_exists", MagicMock(return_value=True)):
with patch.object(
mysql,
"__get_auth_plugin",
MagicMock(return_value="mysql_native_password"),
):
with patch.dict(mysql.__salt__, {"config.option": MagicMock()}):
mysql.user_chpass(
"root",
password="new_pass",
connection_user="root",
connection_pass="old_pass",
)
calls = (
call()
.cursor()
.execute(
"ALTER USER %(user)s@%(host)s IDENTIFIED WITH %(auth_plugin)s BY %(password)s;",
{
"password": "new_pass",
"user": "root",
"host": "localhost",
"auth_plugin": "mysql_native_password",
},
),
call().cursor().execute("FLUSH PRIVILEGES;"),
)
connect_mock.assert_has_calls(calls, any_order=True)
def test_user_remove():
"""
Test the removal of a MySQL user in mysql exec module
"""
with patch.object(mysql, "user_exists", MagicMock(return_value=True)):
_test_call(
mysql.user_remove,
{
"sql": "DROP USER %(user)s@%(host)s",
"sql_args": {"user": "testuser", "host": "localhost"},
},
"testuser",
)
def test_db_check():
"""
Test MySQL db check function in mysql exec module
"""
_test_call(
mysql.db_check,
"CHECK TABLE `test``'\" db`.`my``'\" table`",
"test`'\" db",
"my`'\" table",
)
def test_db_repair():
"""
Test MySQL db repair function in mysql exec module
"""
_test_call(
mysql.db_repair,
"REPAIR TABLE `test``'\" db`.`my``'\" table`",
"test`'\" db",
"my`'\" table",
)
def test_db_optimize():
"""
Test MySQL db optimize function in mysql exec module
"""
_test_call(
mysql.db_optimize,
"OPTIMIZE TABLE `test``'\" db`.`my``'\" table`",
"test`'\" db",
"my`'\" table",
)
def test_db_remove():
"""
Test MySQL db remove function in mysql exec module
"""
with patch.object(mysql, "db_exists", MagicMock(return_value=True)):
_test_call(mysql.db_remove, "DROP DATABASE `test``'\" db`;", "test`'\" db")
def test_db_tables():
"""
Test MySQL db_tables function in mysql exec module
"""
with patch.object(mysql, "db_exists", MagicMock(return_value=True)):
_test_call(mysql.db_tables, "SHOW TABLES IN `test``'\" db`", "test`'\" db")
def test_db_exists():
"""
Test MySQL db_exists function in mysql exec module
"""
_test_call(
mysql.db_exists,
{
"sql": "SHOW DATABASES LIKE %(dbname)s;",
"sql_args": {"dbname": r"""test%_`" db"""},
},
'test%_`" db',
)
def test_db_create():
"""
Test MySQL db_create function in mysql exec module
"""
_test_call(
mysql.db_create,
"CREATE DATABASE IF NOT EXISTS `test``'\" db`;",
"test`'\" db",
)
def test_alter_db():
"""
Test MySQL alter_db function in mysql exec module
"""
mock_get_db = {
"character_set": "utf8",
"collate": "utf8_unicode_ci",
"name": "my_test",
}
mock = MagicMock(return_value=mock_get_db)
with patch.object(mysql, "db_get", return_value=mock) as mock_db_get:
_test_call(
mysql.alter_db,
"ALTER DATABASE `my_test` CHARACTER SET utf8 COLLATE utf8_unicode_ci;",
"my_test",
"utf8",
"utf8_unicode_ci",
)
def test_user_list():
"""
Test MySQL user_list function in mysql exec module
"""
_test_call(mysql.user_list, "SELECT User,Host FROM mysql.user")
def test_user_info():
"""
Test to see if the mysql execution module correctly forms the SQL for information on a MySQL user.
"""
_test_call(
mysql.user_info,
{
"sql": "SELECT * FROM mysql.user WHERE User = %(user)s AND Host = %(host)s",
"sql_args": {"host": "localhost", "user": "mytestuser"},
},
"mytestuser",
)
def test_user_grants():
"""
Test to ensure the mysql user_grants function returns properly formed SQL for a basic query
"""
with patch.object(mysql, "user_exists", MagicMock(return_value=True)):
_test_call(
mysql.user_grants,
{
"sql": "SHOW GRANTS FOR %(user)s@%(host)s",
"sql_args": {"host": "localhost", "user": "testuser"},
},
"testuser",
)
def test_grant_exists_true():
"""
Test to ensure that we can find a grant that exists
"""
mock_grants = [
"GRANT USAGE ON *.* TO 'testuser'@'%'",
"GRANT SELECT, INSERT, UPDATE ON `testdb`.`testtableone` TO 'testuser'@'%'",
"GRANT SELECT(column1,column2) ON `testdb`.`testtableone` TO 'testuser'@'%'",
"GRANT SELECT(column1,column2), INSERT(column1,column2) ON `testdb`.`testtableone` TO 'testuser'@'%'",
"GRANT SELECT(column1,column2), UPDATE ON `testdb`.`testtableone` TO 'testuser'@'%'",
"GRANT SELECT ON `testdb`.`testtabletwo` TO 'testuser'@'%'",
"GRANT SELECT ON `testdb`.`testtablethree` TO 'testuser'@'%'",
]
with patch.object(mysql, "version", return_value="5.6.41"):
mock = MagicMock(return_value=mock_grants)
with patch.object(mysql, "user_grants", return_value=mock_grants) as mock_user_grants:
ret = mysql.grant_exists(
"SELECT, INSERT, UPDATE", "testdb.testtableone", "testuser", "%"
)
assert ret
def test_grant_exists_false():
"""
Test to ensure that we don't find a grant that doesn't exist
"""
mock_grants = [
"GRANT USAGE ON *.* TO 'testuser'@'%'",
"GRANT SELECT, INSERT, UPDATE ON `testdb`.`testtableone` TO 'testuser'@'%'",
"GRANT SELECT(column1,column2) ON `testdb`.`testtableone` TO 'testuser'@'%'",
"GRANT SELECT(column1,column2), UPDATE ON `testdb`.`testtableone` TO 'testuser'@'%'",
"GRANT SELECT ON `testdb`.`testtablethree` TO 'testuser'@'%'",
]
with patch.object(mysql, "version", return_value="5.6.41"):
mock = MagicMock(return_value=mock_grants)
with patch.object(mysql, "user_grants", return_value=mock_grants) as mock_user_grants:
ret = mysql.grant_exists("SELECT", "testdb.testtabletwo", "testuser", "%")
assert not ret
def test_grant_exists_all():
"""
Test to ensure that we can find a grant that exists
"""
mock_grants = ["GRANT ALL PRIVILEGES ON testdb.testtableone TO `testuser`@`%`"]
with patch.object(mysql, "version", return_value="8.0.10"):
mock = MagicMock(return_value=mock_grants)
with patch.object(mysql, "user_grants", return_value=mock_grants) as mock_user_grants:
ret = mysql.grant_exists("ALL", "testdb.testtableone", "testuser", "%")
assert ret
with patch.object(mysql, "version", return_value="8.0.10"):
mock = MagicMock(return_value=mock_grants)
with patch.object(mysql, "user_grants", return_value=mock_grants) as mock_user_grants:
ret = mysql.grant_exists("all privileges", "testdb.testtableone", "testuser", "%")
assert ret
mock_grants = ["GRANT ALL PRIVILEGES ON testdb.testtableone TO `testuser`@`%`"]
with patch.object(mysql, "version", return_value="5.6.41"):
mock = MagicMock(return_value=mock_grants)
with patch.object(mysql, "user_grants", return_value=mock_grants) as mock_user_grants:
ret = mysql.grant_exists("ALL PRIVILEGES", "testdb.testtableone", "testuser", "%")
assert ret
mock_grants = [
"GRANT SELECT, INSERT, UPDATE, DELETE, CREATE, DROP, RELOAD, SHUTDOWN, PROCESS, FILE, REFERENCES, INDEX, ALTER, SHOW DATABASES, SUPER, CREATE TEMPORARY TABLES, LOCK TABLES, EXECUTE, REPLICATION SLAVE, REPLICATION CLIENT, CREATE VIEW, SHOW VIEW, CREATE ROUTINE, ALTER ROUTINE, CREATE USER, EVENT, TRIGGER, CREATE TABLESPACE, CREATE ROLE, DROP ROLE ON *.* TO `testuser`@`%`",
"GRANT BACKUP_ADMIN,BINLOG_ADMIN,CONNECTION_ADMIN,ENCRYPTION_KEY_ADMIN,GROUP_REPLICATION_ADMIN,PERSIST_RO_VARIABLES_ADMIN,REPLICATION_SLAVE_ADMIN,RESOURCE_GROUP_ADMIN,RESOURCE_GROUP_USER,ROLE_ADMIN,SET_USER_ID,SYSTEM_VARIABLES_ADMIN,XA_RECOVER_ADMIN ON *.* TO `testuser`@`%`",
]
with patch.object(mysql, "version", return_value="8.0.10"):
mock = MagicMock(return_value=mock_grants)
with patch.object(mysql, "user_grants", return_value=mock_grants) as mock_user_grants:
ret = mysql.grant_exists("ALL", "*.*", "testuser", "%")
assert ret
with patch.object(mysql, "version", return_value="8.0.10"):
mock = MagicMock(return_value=mock_grants)
with patch.object(mysql, "user_grants", return_value=mock_grants) as mock_user_grants:
ret = mysql.grant_exists("all privileges", "*.*", "testuser", "%")
assert ret
@pytest.mark.skipif(True, reason="TODO: Mock up user_grants()")
def test_grant_add():
"""
Test grant_add function in mysql exec module
"""
_test_call(
mysql.grant_add,
"",
"SELECT,INSERT,UPDATE",
"database.*",
"frank",
"localhost",
)
@pytest.mark.skipif(True, reason="TODO: Mock up user_grants()")
def test_grant_revoke():
"""
Test grant revoke in mysql exec module
"""
_test_call(
mysql.grant_revoke,
"",
"SELECT,INSERT,UPDATE",
"database.*",
"frank",
"localhost",
)
def test_processlist():
"""
Test processlist function in mysql exec module
"""
_test_call(mysql.processlist, "SHOW FULL PROCESSLIST")
def test_get_master_status():
"""
Test get_master_status in the mysql execution module
"""
_test_call(mysql.get_master_status, "SHOW MASTER STATUS")
def test_get_slave_status():
"""
Test get_slave_status in the mysql execution module
"""
_test_call(mysql.get_slave_status, "SHOW SLAVE STATUS")
def test_get_slave_status_bad_server():
"""
Test get_slave_status in the mysql execution module, simulating a broken server
"""
connect_mock = MagicMock(return_value=None)
with patch.object(mysql, "_connect", connect_mock):
with patch.dict(mysql.__salt__, {"config.option": MagicMock()}):
rslt = mysql.get_slave_status()
connect_mock.assert_has_calls([call()])
assert rslt == []
@pytest.mark.skip(reason="MySQL module claims this function is not ready for production")
def test_free_slave():
pass
def test_query():
_test_call(mysql.query, "SELECT * FROM testdb", "testdb", "SELECT * FROM testdb")
@pytest.mark.skipif(not HAS_PYMYSQL, reason="Could not import pymysql")
def test_query_error():
connect_mock = MagicMock()
with patch.object(mysql, "_connect", connect_mock):
with patch.dict(mysql.__salt__, {"config.option": MagicMock()}):
# Use the OperationalError from the salt mysql module because that
# exception can come from either MySQLdb or pymysql
side_effect = mysql.OperationalError(9999, "Something Went Wrong")
with patch.object(mysql, "_execute", MagicMock(side_effect=side_effect)):
mysql.query("testdb", "SELECT * FROM testdb")
assert "mysql.error" in mysql.__context__
expected = "MySQL Error 9999: Something Went Wrong"
assert mysql.__context__["mysql.error"] == expected
def test_plugin_add():
"""
Test the adding/installing a MySQL / MariaDB plugin
"""
with patch.object(mysql, "plugin_status", MagicMock(return_value="")):
_test_call(
mysql.plugin_add,
'INSTALL PLUGIN auth_socket SONAME "auth_socket.so"',
"auth_socket",
)
def test_plugin_remove():
"""
Test the removing/uninstalling a MySQL / MariaDB plugin
"""
with patch.object(mysql, "plugin_status", MagicMock(return_value="ACTIVE")):
_test_call(
mysql.plugin_remove,
"UNINSTALL PLUGIN auth_socket",
"auth_socket",
)
def test_plugin_status():
"""
Test checking the status of a MySQL / MariaDB plugin
"""
_test_call(
mysql.plugin_status,
{
"sql": "SELECT PLUGIN_STATUS FROM INFORMATION_SCHEMA.PLUGINS WHERE PLUGIN_NAME = %(name)s",
"sql_args": {"name": "auth_socket"},
},
"auth_socket",
)
def test_sanitize_comment():
"""
Test comment sanitization
"""
input_data = dedent(
"""
/*
multiline
comment
*/
CREATE TABLE test_update (a VARCHAR(25)); # end of line comment
# example comment
insert into test_update values ("some #hash value"); -- ending comment
insert into test_update values ("crazy -- not comment"); -- another ending comment
-- another comment type
"""
).strip()
expected_response = dedent(
"""\
CREATE TABLE test_update (a VARCHAR(25));
insert into test_update values ("some #hash value");
insert into test_update values ("crazy -- not comment");
"""
)
output = mysql._sanitize_comments(input_data)
assert output == expected_response
input_data = dedent(
"""
-- --------------------------------------------------------
-- SQL Commands to set up the pmadb as described in the documentation.
--
-- This file is meant for use with MySQL 5 and above!
--
-- This script expects the user pma to already be existing. If we would put a
-- line here to create them too many users might just use this script and end
-- up with having the same password for the controluser.
--
-- This user "pma" must be defined in config.inc.php (controluser/controlpass)
--
-- Please don't forget to set up the tablenames in config.inc.php
--
-- --------------------------------------------------------
--
CREATE DATABASE IF NOT EXISTS `phpmyadmin`
DEFAULT CHARACTER SET utf8 COLLATE utf8_bin;
USE phpmyadmin;
"""
).strip()
expected_response = dedent(
"""
CREATE DATABASE IF NOT EXISTS `phpmyadmin`
DEFAULT CHARACTER SET utf8 COLLATE utf8_bin;
USE phpmyadmin;
"""
).strip()
output = mysql._sanitize_comments(input_data)
assert output == expected_response
def _test_call(function, expected_sql, *args, **kwargs):
connect_mock = MagicMock()
with patch.object(mysql, "_connect", connect_mock):
with patch.dict(mysql.__salt__, {"config.option": MagicMock()}):
function(*args, **kwargs)
if isinstance(expected_sql, dict):
calls = call().cursor().execute(f"{expected_sql['sql']}", expected_sql["sql_args"])
else:
calls = call().cursor().execute(f"{expected_sql}")
connect_mock.assert_has_calls((calls,), True)
def test_file_query():
"""
Test file_query
"""
with patch.object(mysql, "HAS_SQLPARSE", False):
ret = mysql.file_query("database", "filename")
assert not ret
file_data = """-- --------------------------------------------------------
-- SQL Commands to set up the pmadb as described in the documentation.
--
-- This file is meant for use with MySQL 5 and above!
--
-- This script expects the user pma to already be existing. If we would put a
-- line here to create them too many users might just use this script and end
-- up with having the same password for the controluser.
--
-- This user "pma" must be defined in config.inc.php (controluser/controlpass)
--
-- Please don't forget to set up the tablenames in config.inc.php
--
-- --------------------------------------------------------
--
USE phpmyadmin;
--
-- Table structure for table `pma__bookmark`
--
CREATE TABLE IF NOT EXISTS `pma__bookmark` (
`id` int(10) unsigned NOT NULL auto_increment,
`dbase` varchar(255) NOT NULL default '',
`user` varchar(255) NOT NULL default '',
`label` varchar(255) COLLATE utf8_general_ci NOT NULL default '',
`query` text NOT NULL,
PRIMARY KEY (`id`)
)
COMMENT='Bookmarks'
DEFAULT CHARACTER SET utf8 COLLATE utf8_bin;
"""
side_effect = [
{"query time": {"human": "0.4ms", "raw": "0.00038"}, "rows affected": 0},
{"query time": {"human": "8.9ms", "raw": "0.00893"}, "rows affected": 0},
]
expected = {
"query time": {"human": "8.9ms", "raw": "0.00893"},
"rows affected": 0,
}
with patch("os.path.exists", MagicMock(return_value=True)):
with patch("salt.utils.files.fopen", mock_open(read_data=file_data)):
with patch.object(mysql, "query", side_effect=side_effect):
ret = mysql.file_query("database", "filename")
assert ret, expected
@pytest.mark.skipif(not HAS_PYMYSQL, reason="Could not import pymysql")
def test__connect_pymysql_exception():
"""
Test the _connect function in the MySQL module
"""
with patch.dict(mysql.__salt__, {"config.option": MagicMock()}):
with patch(
"MySQLdb.connect",
side_effect=pymysql.err.InternalError(
1698, "Access denied for user 'root'@'localhost'"
),
):
ret = mysql._connect()
assert "mysql.error" in mysql.__context__
assert (
mysql.__context__["mysql.error"]
== "MySQL Error 1698: Access denied for user 'root'@'localhost'"
)
def test__connect_mysqldb_exception():
"""
Test the _connect function in the MySQL module
"""
with patch.dict(mysql.__salt__, {"config.option": MagicMock()}):
with patch(
"MySQLdb.connect",
side_effect=mysql.OperationalError(1698, "Access denied for user 'root'@'localhost'"),
):
ret = mysql._connect()
assert "mysql.error" in mysql.__context__
assert (
mysql.__context__["mysql.error"]
== "MySQL Error 1698: Access denied for user 'root'@'localhost'"
)
def test__connect_mysqldb():
"""
Test the _connect function in the MySQL module
"""
mysqldb_connect_mock = MagicMock(autospec=True, return_value=MockMySQLConnect())
with patch.dict(mysql.__salt__, {"config.option": MagicMock()}):
with patch("MySQLdb.connect", mysqldb_connect_mock):
mysql._connect()
assert "mysql.error" not in mysql.__context__
07070100000103000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003600000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/pillar07070100000104000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004200000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/pillar/__init__.py07070100000105000081A400000000000000000000000167471E9C00006D23000000000000000000000000000000000000004400000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/pillar/test_mysql.pyimport pytest
from saltext.mysql.pillar import mysql
pytestmark = [pytest.mark.skipif(mysql.MySQLdb is None, reason="MySQL-python module not installed")]
def test_001_extract_queries_legacy():
return_data = mysql.MySQLExtPillar()
args, kwargs = ["SELECT blah"], {}
qbuffer = return_data.extract_queries(args, kwargs)
assert [
[
None,
{
"query": "SELECT blah",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
]
] == qbuffer
def test_002_extract_queries_list():
return_data = mysql.MySQLExtPillar()
args, kwargs = (
[
"SELECT blah",
"SELECT blah2",
("SELECT blah3",),
("SELECT blah4", 2),
{"query": "SELECT blah5"},
{"query": "SELECT blah6", "depth": 2},
{"query": "SELECT blah7", "as_list": True},
{"query": "SELECT blah8", "with_lists": "1"},
{"query": "SELECT blah9", "with_lists": "1,2"},
{"query": "SELECT json1", "as_json": True},
],
{},
)
qbuffer = return_data.extract_queries(args, kwargs)
assert [
[
None,
{
"query": "SELECT blah",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah2",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah3",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah4",
"depth": 2,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah5",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah6",
"depth": 2,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah7",
"depth": 0,
"as_list": True,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah8",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": [1],
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah9",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": [1, 2],
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT json1",
"depth": 0,
"as_list": False,
"as_json": True,
"with_lists": None,
"ignore_null": False,
},
],
] == qbuffer
def test_003_extract_queries_kwarg():
return_data = mysql.MySQLExtPillar()
args, kwargs = (
[],
{
"1": "SELECT blah",
"2": "SELECT blah2",
"3": ("SELECT blah3",),
"4": ("SELECT blah4", 2),
"5": {"query": "SELECT blah5"},
"6": {"query": "SELECT blah6", "depth": 2},
"7": {"query": "SELECT blah7", "as_list": True},
"8": {"query": "SELECT json1", "as_json": True},
},
)
qbuffer = return_data.extract_queries(args, kwargs)
assert [
[
"1",
{
"query": "SELECT blah",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
"2",
{
"query": "SELECT blah2",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
"3",
{
"query": "SELECT blah3",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
"4",
{
"query": "SELECT blah4",
"depth": 2,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
"5",
{
"query": "SELECT blah5",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
"6",
{
"query": "SELECT blah6",
"depth": 2,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
"7",
{
"query": "SELECT blah7",
"depth": 0,
"as_list": True,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
"8",
{
"query": "SELECT json1",
"depth": 0,
"as_list": False,
"as_json": True,
"with_lists": None,
"ignore_null": False,
},
],
] == qbuffer
def test_004_extract_queries_mixed():
return_data = mysql.MySQLExtPillar()
args, kwargs = (
[
"SELECT blah1",
("SELECT blah2", 2),
{"query": "SELECT blah3", "as_list": True},
],
{
"1": "SELECT blah1",
"2": ("SELECT blah2", 2),
"3": {"query": "SELECT blah3", "as_list": True},
},
)
qbuffer = return_data.extract_queries(args, kwargs)
assert [
[
None,
{
"query": "SELECT blah1",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah2",
"depth": 2,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah3",
"depth": 0,
"as_list": True,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
"1",
{
"query": "SELECT blah1",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
"2",
{
"query": "SELECT blah2",
"depth": 2,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
"3",
{
"query": "SELECT blah3",
"depth": 0,
"as_list": True,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
] == qbuffer
def test_005_extract_queries_bogus_list():
# This test is specifically checking that empty queries are dropped
return_data = mysql.MySQLExtPillar()
args, kwargs = (
[
"SELECT blah",
"",
"SELECT blah2",
("SELECT blah3",),
("",),
("SELECT blah4", 2),
tuple(),
("SELECT blah5",),
{"query": "SELECT blah6"},
{"query": ""},
{"query": "SELECT blah7", "depth": 2},
{"not_a_query": "in sight!"},
{"query": "SELECT blah8", "as_list": True},
],
{},
)
qbuffer = return_data.extract_queries(args, kwargs)
assert [
[
None,
{
"query": "SELECT blah",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah2",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah3",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah4",
"depth": 2,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah5",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah6",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah7",
"depth": 2,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
None,
{
"query": "SELECT blah8",
"depth": 0,
"as_list": True,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
] == qbuffer
def test_006_extract_queries_bogus_kwargs():
# this test is cut down as most of the path matches test_*_bogus_list
return_data = mysql.MySQLExtPillar()
args, kwargs = [], {"1": "SELECT blah", "2": "", "3": "SELECT blah2"}
qbuffer = return_data.extract_queries(args, kwargs)
assert [
[
"1",
{
"query": "SELECT blah",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
[
"3",
{
"query": "SELECT blah2",
"depth": 0,
"as_list": False,
"as_json": False,
"with_lists": None,
"ignore_null": False,
},
],
] == qbuffer
def test_011_enter_root():
return_data = mysql.MySQLExtPillar()
return_data.enter_root("test")
assert return_data.result["test"] == return_data.focus
return_data.enter_root(None)
assert return_data.result == return_data.focus
def test_021_process_fields():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b"], 0)
assert return_data.num_fields == 2
assert return_data.depth == 1
return_data.process_fields(["a", "b"], 2)
assert return_data.num_fields == 2
assert return_data.depth == 1
return_data.process_fields(["a", "b", "c", "d"], 0)
assert return_data.num_fields == 4
assert return_data.depth == 3
return_data.process_fields(["a", "b", "c", "d"], 1)
assert return_data.num_fields == 4
assert return_data.depth == 1
return_data.process_fields(["a", "b", "c", "d"], 2)
assert return_data.num_fields == 4
assert return_data.depth == 2
return_data.process_fields(["a", "b", "c", "d"], 3)
assert return_data.num_fields == 4
assert return_data.depth == 3
return_data.process_fields(["a", "b", "c", "d"], 4)
assert return_data.num_fields == 4
assert return_data.depth == 3
def test_111_process_results_legacy():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b"], 0)
return_data.with_lists = []
return_data.process_results([[1, 2]])
assert {1: 2} == return_data.result
def test_112_process_results_legacy_multiple():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b"], 0)
return_data.with_lists = []
return_data.process_results([[1, 2], [3, 4], [5, 6]])
assert {1: 2, 3: 4, 5: 6} == return_data.result
def test_121_process_results_depth_0():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b", "c", "d"], 0)
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
assert {1: {2: {3: 4}}, 5: {6: {7: 8}}} == return_data.result
def test_122_process_results_depth_1():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b", "c", "d"], 1)
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
assert {
1: {"b": 2, "c": 3, "d": 4},
5: {"b": 6, "c": 7, "d": 8},
} == return_data.result
def test_123_process_results_depth_2():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b", "c", "d"], 2)
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
assert {1: {2: {"c": 3, "d": 4}}, 5: {6: {"c": 7, "d": 8}}} == return_data.result
def test_124_process_results_depth_3():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b", "c", "d"], 3)
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
assert {1: {2: {3: 4}}, 5: {6: {7: 8}}} == return_data.result
def test_125_process_results_depth_4():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b", "c", "d"], 4)
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
assert {1: {2: {3: 4}}, 5: {6: {7: 8}}} == return_data.result
def test_131_process_results_overwrite_legacy_multiple():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b"], 0)
return_data.with_lists = []
return_data.process_results([[1, 2], [3, 4], [1, 6]])
assert {1: 6, 3: 4} == return_data.result
def test_132_process_results_merge_depth_0():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b", "c", "d"], 0)
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
assert {1: {2: {3: 4}, 6: {7: 8}}} == return_data.result
def test_133_process_results_overwrite_depth_0():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b", "c", "d"], 0)
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_results([[1, 2, 3, 4], [1, 2, 3, 8]])
assert {1: {2: {3: 8}}} == return_data.result
def test_134_process_results_deepmerge_depth_0():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b", "c", "d"], 0)
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_results([[1, 2, 3, 4], [1, 2, 7, 8]])
assert {1: {2: {3: 4, 7: 8}}} == return_data.result
def test_135_process_results_overwrite_depth_1():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b", "c", "d"], 1)
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
assert {1: {"b": 6, "c": 7, "d": 8}} == return_data.result
def test_136_process_results_merge_depth_2():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b", "c", "d"], 2)
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
assert {1: {2: {"c": 3, "d": 4}, 6: {"c": 7, "d": 8}}} == return_data.result
def test_137_process_results_overwrite_depth_2():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b", "c", "d"], 2)
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_results([[1, 2, 3, 4], [1, 2, 7, 8]])
assert {1: {2: {"c": 7, "d": 8}}} == return_data.result
def test_201_process_results_complexity_multiresults():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b", "c", "d"], 2)
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_results([[1, 2, 3, 4]])
return_data.process_results([[1, 2, 7, 8]])
assert {1: {2: {"c": 7, "d": 8}}} == return_data.result
def test_202_process_results_complexity_as_list():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b", "c", "d"], 2)
return_data.with_lists = []
return_data.enter_root(None)
return_data.as_list = True
return_data.process_results([[1, 2, 3, 4]])
return_data.process_results([[1, 2, 7, 8]])
assert {1: {2: {"c": [3, 7], "d": [4, 8]}}} == return_data.result
def test_203_process_results_complexity_as_list_deeper():
return_data = mysql.MySQLExtPillar()
return_data.process_fields(["a", "b", "c", "d"], 0)
return_data.with_lists = []
return_data.enter_root(None)
return_data.as_list = True
return_data.process_results([[1, 2, 3, 4]])
return_data.process_results([[1, 2, 3, 8]])
assert {1: {2: {3: [4, 8]}}} == return_data.result
def test_204_process_results_complexity_as_list_mismatch_depth():
return_data = mysql.MySQLExtPillar()
return_data.as_list = True
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_fields(["a", "b", "c", "d"], 0)
return_data.process_results([[1, 2, 3, 4]])
return_data.process_results([[1, 2, 3, 5]])
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
return_data.process_results([[1, 2, 3, 6, 7]])
assert {1: {2: {3: [4, 5, {6: 7}]}}} == return_data.result
def test_205_process_results_complexity_as_list_mismatch_depth_reversed():
return_data = mysql.MySQLExtPillar()
return_data.as_list = True
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
return_data.process_results([[1, 2, 3, 6, 7]])
return_data.process_results([[1, 2, 3, 8, 9]])
return_data.process_fields(["a", "b", "c", "d"], 0)
return_data.process_results([[1, 2, 3, 4]])
return_data.process_results([[1, 2, 3, 5]])
assert {1: {2: {3: [{6: 7, 8: 9}, 4, 5]}}} == return_data.result
def test_206_process_results_complexity_as_list_mismatch_depth_weird_order():
return_data = mysql.MySQLExtPillar()
return_data.as_list = True
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
return_data.process_results([[1, 2, 3, 6, 7]])
return_data.process_fields(["a", "b", "c", "d"], 0)
return_data.process_results([[1, 2, 3, 4]])
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
return_data.process_results([[1, 2, 3, 8, 9]])
return_data.process_fields(["a", "b", "c", "d"], 0)
return_data.process_results([[1, 2, 3, 5]])
assert {1: {2: {3: [{6: 7}, 4, {8: 9}, 5]}}} == return_data.result
def test_207_process_results_complexity_collision_mismatch_depth():
return_data = mysql.MySQLExtPillar()
return_data.as_list = False
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_fields(["a", "b", "c", "d"], 0)
return_data.process_results([[1, 2, 3, 4]])
return_data.process_results([[1, 2, 3, 5]])
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
return_data.process_results([[1, 2, 3, 6, 7]])
assert {1: {2: {3: {6: 7}}}} == return_data.result
def test_208_process_results_complexity_collision_mismatch_depth_reversed():
return_data = mysql.MySQLExtPillar()
return_data.as_list = False
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
return_data.process_results([[1, 2, 3, 6, 7]])
return_data.process_results([[1, 2, 3, 8, 9]])
return_data.process_fields(["a", "b", "c", "d"], 0)
return_data.process_results([[1, 2, 3, 4]])
return_data.process_results([[1, 2, 3, 5]])
assert {1: {2: {3: 5}}} == return_data.result
def test_209_process_results_complexity_collision_mismatch_depth_weird_order():
return_data = mysql.MySQLExtPillar()
return_data.as_list = False
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
return_data.process_results([[1, 2, 3, 6, 7]])
return_data.process_fields(["a", "b", "c", "d"], 0)
return_data.process_results([[1, 2, 3, 4]])
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
return_data.process_results([[1, 2, 3, 8, 9]])
return_data.process_fields(["a", "b", "c", "d"], 0)
return_data.process_results([[1, 2, 3, 5]])
assert {1: {2: {3: 5}}} == return_data.result
def test_20A_process_results_complexity_as_list_vary():
return_data = mysql.MySQLExtPillar()
return_data.as_list = True
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
return_data.process_results([[1, 2, 3, 6, 7]])
return_data.process_results([[1, 2, 3, 8, 9]])
return_data.process_fields(["a", "b", "c", "d"], 0)
return_data.process_results([[1, 2, 3, 4]])
return_data.as_list = False
return_data.process_results([[1, 2, 3, 5]])
assert {1: {2: {3: 5}}} == return_data.result
def test_207_process_results_complexity_roots_collision():
return_data = mysql.MySQLExtPillar()
return_data.as_list = False
return_data.with_lists = []
return_data.enter_root(None)
return_data.process_fields(["a", "b", "c", "d"], 0)
return_data.process_results([[1, 2, 3, 4]])
return_data.enter_root(1)
return_data.process_results([[5, 6, 7, 8]])
assert {1: {5: {6: {7: 8}}}} == return_data.result
def test_301_process_results_with_lists():
"""
Validates the following results:
{'a': [
{'c': [
{'e': 1},
{'g': 2}
]
},
{'h': [
{'j': 3, 'k': 4}
]
}
]}
"""
return_data = mysql.MySQLExtPillar()
return_data.as_list = False
return_data.with_lists = [1, 3]
return_data.enter_root(None)
return_data.process_fields(["a", "b", "c", "d", "e", "v"], 0)
return_data.process_results(
[
["a", "b", "c", "d", "e", 1],
["a", "b", "c", "f", "g", 2],
["a", "z", "h", "y", "j", 3],
["a", "z", "h", "y", "k", 4],
]
)
assert "a" in return_data.result
for x in return_data.result["a"]:
if "c" in x:
assert list(x.keys()) == ["c"], x.keys()
for y in x["c"]:
if "e" in y:
assert list(y.keys()) == ["e"]
assert y["e"] == 1
elif "g" in y:
assert list(y.keys()) == ["g"]
assert y["g"] == 2
else:
raise ValueError(f"Unexpected value {y}")
elif "h" in x:
assert len(x["h"]) == 1
for y in x["h"]:
if "j" in y:
assert len(y.keys()) == 2
assert y["j"] == 3
elif "h" in y:
assert len(y.keys()) == 2
assert y["k"] == 4
else:
raise ValueError(f"Unexpected value {y}")
else:
raise ValueError(f"Unexpected value {x}")
def test_302_process_results_with_lists_consecutive():
"""
Validates the following results:
{'a': [
[[
{'e': 1},
{'g': 2}
]
],
[[
{'j': 3, 'k': 4}
]
]
]}
"""
return_data = mysql.MySQLExtPillar()
return_data.as_list = False
return_data.with_lists = [1, 2, 3]
return_data.enter_root(None)
return_data.process_fields(["a", "b", "c", "d", "e", "v"], 0)
return_data.process_results(
[
["a", "b", "c", "d", "e", 1],
["a", "b", "c", "f", "g", 2],
["a", "z", "h", "y", "j", 3],
["a", "z", "h", "y", "k", 4],
]
)
assert "a" in return_data.result
for x in return_data.result["a"]:
assert len(x) == 1
if len(x[0][0]) == 1:
for y in x[0]:
if "e" in y:
assert list(y.keys()) == ["e"]
assert y["e"] == 1
elif "g" in y:
assert list(y.keys()) == ["g"]
assert y["g"] == 2
else:
raise ValueError(f"Unexpected value {y}")
elif len(x[0][0]) == 2:
for y in x[0]:
if "j" in y:
assert len(y.keys()) == 2
assert y["j"] == 3
elif "k" in y:
assert len(y.keys()) == 2
assert y["k"] == 4
else:
raise ValueError(f"Unexpected value {len(x[0][0])}")
else:
raise ValueError(f"Unexpected value {x}")
07070100000106000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003900000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/returners07070100000107000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004500000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/returners/__init__.py07070100000108000081A400000000000000000000000167471E9C00000342000000000000000000000000000000000000004700000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/returners/test_mysql.pyfrom unittest.mock import patch
import pytest
from saltext.mysql.returners import mysql
def test_returner_with_bytes():
ret = {
"success": True,
"return": b"bytes",
"retcode": 0,
"jid": "20221101172203459989",
"fun": "file.read",
"fun_args": ["/fake/path", {"binary": True}],
"id": "minion-1",
}
with patch.object(mysql, "_get_serv"):
try:
mysql.returner(ret)
except TypeError:
pytest.fail("Data not decoded properly")
def test_save_load_with_bytes():
load = {
"return": b"bytes",
"jid": "20221101172203459989",
}
with patch.object(mysql, "_get_serv"):
try:
mysql.save_load(load["jid"], load)
except TypeError:
pytest.fail("Data not decoded properly")
07070100000109000041ED00000000000000000000000267471E9C00000000000000000000000000000000000000000000003600000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/states0707010000010A000081A400000000000000000000000167471E9C00000000000000000000000000000000000000000000004200000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/states/__init__.py0707010000010B000081A400000000000000000000000167471E9C000015D2000000000000000000000000000000000000004700000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/states/test_database.py"""
This test checks mysql_database salt state
"""
from unittest.mock import MagicMock
from unittest.mock import patch
import pytest
from saltext.mysql.states import mysql_database
@pytest.fixture
def configure_loader_modules():
return {mysql_database: {}}
def test_present():
"""
Test to ensure that the named database is present with
the specified properties.
"""
dbname = "my_test"
charset = "utf8"
collate = "utf8_unicode_ci"
ret = {"name": dbname, "result": False, "comment": "", "changes": {}}
mock_result = {
"character_set": charset,
"collate": collate,
"name": dbname,
}
mock_result_alter_db = {True}
mock = MagicMock(return_value=mock_result)
mock_a = MagicMock(return_value=mock_result_alter_db)
mock_failed = MagicMock(return_value=False)
mock_err = MagicMock(return_value="salt")
mock_no_err = MagicMock(return_value=None)
mock_create = MagicMock(return_value=True)
mock_create_failed = MagicMock(return_value=False)
with patch.dict(mysql_database.__salt__, {"mysql.db_get": mock, "mysql.alter_db": mock_a}):
mod_charset = "ascii"
mod_collate = "ascii_general_ci"
with patch.dict(mysql_database.__opts__, {"test": True}):
comt = [
f"Database character set {mod_charset} != {charset} needs to be updated",
f"Database {dbname} is going to be updated",
]
ret.update({"comment": "\n".join(comt)})
ret.update({"result": None})
assert mysql_database.present(dbname, character_set=mod_charset) == ret
with patch.dict(mysql_database.__opts__, {"test": True}):
comt = [
f"Database {dbname} is already present",
f"Database collate {mod_collate} != {collate} needs to be updated",
]
ret.update({"comment": "\n".join(comt)})
ret.update({"result": None})
assert mysql_database.present(dbname, character_set=charset, collate=mod_collate) == ret
with patch.dict(mysql_database.__opts__, {}):
comt = [
f"Database character set {mod_charset} != {charset} needs to be updated",
f"Database collate {mod_collate} != {collate} needs to be updated",
]
ret.update({"comment": "\n".join(comt)})
ret.update({"result": True})
assert (
mysql_database.present(dbname, character_set=mod_charset, collate=mod_collate)
== ret
)
with patch.dict(mysql_database.__opts__, {"test": False}):
comt = f"Database {dbname} is already present"
ret.update({"comment": comt})
ret.update({"result": True})
assert mysql_database.present(dbname, character_set=charset, collate=collate) == ret
with patch.dict(mysql_database.__salt__, {"mysql.db_get": mock_failed}):
with patch.dict(mysql_database.__salt__, {"mysql.db_create": mock_create}):
with patch.object(mysql_database, "_get_mysql_error", mock_err):
ret.update({"comment": "salt", "result": False})
assert mysql_database.present(dbname) == ret
with patch.object(mysql_database, "_get_mysql_error", mock_no_err):
comt = f"The database {dbname} has been created"
ret.update({"comment": comt, "result": True})
ret.update({"changes": {dbname: "Present"}})
assert mysql_database.present(dbname) == ret
with patch.dict(mysql_database.__salt__, {"mysql.db_create": mock_create_failed}):
ret["comment"] = ""
with patch.object(mysql_database, "_get_mysql_error", mock_no_err):
ret.update({"changes": {}})
comt = f"Failed to create database {dbname}"
ret.update({"comment": comt, "result": False})
assert mysql_database.present(dbname) == ret
def test_absent():
"""
Test to ensure that the named database is absent.
"""
dbname = "my_test"
ret = {"name": dbname, "result": True, "comment": "", "changes": {}}
mock_db_exists = MagicMock(return_value=True)
mock_remove = MagicMock(return_value=True)
mock_remove_fail = MagicMock(return_value=False)
mock_err = MagicMock(return_value="salt")
with patch.dict(
mysql_database.__salt__,
{"mysql.db_exists": mock_db_exists, "mysql.db_remove": mock_remove},
):
with patch.dict(mysql_database.__opts__, {"test": True}):
comt = f"Database {dbname} is present and needs to be removed"
ret.update({"comment": comt, "result": None})
assert mysql_database.absent(dbname) == ret
with patch.dict(mysql_database.__opts__, {}):
comt = f"Database {dbname} has been removed"
ret.update({"comment": comt, "result": True})
ret.update({"changes": {dbname: "Absent"}})
assert mysql_database.absent(dbname) == ret
with patch.dict(
mysql_database.__salt__,
{"mysql.db_exists": mock_db_exists, "mysql.db_remove": mock_remove_fail},
):
with patch.dict(mysql_database.__opts__, {}):
with patch.object(mysql_database, "_get_mysql_error", mock_err):
ret["changes"] = {}
comt = f"Unable to remove database {dbname} (salt)"
ret.update({"comment": comt, "result": False})
assert mysql_database.absent(dbname) == ret
0707010000010C000081A400000000000000000000000167471E9C00000E1A000000000000000000000000000000000000004500000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/states/test_grants.py"""
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
"""
from unittest.mock import MagicMock
from unittest.mock import patch
import pytest
from saltext.mysql.states import mysql_grants
@pytest.fixture
def configure_loader_modules():
return {mysql_grants: {}}
def test_present():
"""
Test to ensure that the grant is present with the specified properties.
"""
name = "frank_exampledb"
ret = {"name": name, "result": True, "comment": "", "changes": {}}
mock = MagicMock(side_effect=[True, False, False, False])
mock_t = MagicMock(return_value=True)
mock_str = MagicMock(return_value="salt")
mock_none = MagicMock(return_value=None)
with patch.dict(
mysql_grants.__salt__,
{"mysql.grant_exists": mock, "mysql.grant_add": mock_t},
):
comt = "Grant None on None to None@localhost is already present"
ret.update({"comment": comt})
assert mysql_grants.present(name) == ret
with patch.object(mysql_grants, "_get_mysql_error", mock_str):
ret.update({"comment": "salt", "result": False})
assert mysql_grants.present(name) == ret
with patch.object(mysql_grants, "_get_mysql_error", mock_none):
with patch.dict(mysql_grants.__opts__, {"test": True}):
comt = "MySQL grant frank_exampledb is set to be created"
ret.update({"comment": comt, "result": None})
assert mysql_grants.present(name) == ret
with patch.dict(mysql_grants.__opts__, {"test": False}):
comt = "Grant None on None to None@localhost has been added"
ret.update({"comment": comt, "result": True, "changes": {name: "Present"}})
assert mysql_grants.present(name) == ret
def test_absent():
"""
Test to ensure that the grant is absent.
"""
name = "frank_exampledb"
ret = {"name": name, "result": True, "comment": "", "changes": {}}
mock = MagicMock(side_effect=[True, False])
mock_t = MagicMock(side_effect=[True, True, True, False, False])
mock_str = MagicMock(return_value="salt")
mock_none = MagicMock(return_value=None)
with patch.dict(
mysql_grants.__salt__,
{"mysql.grant_exists": mock_t, "mysql.grant_revoke": mock},
):
with patch.dict(mysql_grants.__opts__, {"test": True}):
comt = "MySQL grant frank_exampledb is set to be revoked"
ret.update({"comment": comt, "result": None})
assert mysql_grants.absent(name) == ret
with patch.dict(mysql_grants.__opts__, {"test": False}):
comt = "Grant None on None for None@localhost has been revoked"
ret.update({"comment": comt, "result": True, "changes": {name: "Absent"}})
assert mysql_grants.absent(name) == ret
with patch.object(mysql_grants, "_get_mysql_error", mock_str):
comt = "Unable to revoke grant None on None for None@localhost (salt)"
ret.update({"comment": comt, "result": False, "changes": {}})
assert mysql_grants.absent(name) == ret
comt = "Unable to determine if grant None on None for None@localhost exists (salt)"
ret.update({"comment": comt})
assert mysql_grants.absent(name) == ret
with patch.object(mysql_grants, "_get_mysql_error", mock_none):
comt = "Grant None on None to None@localhost is not present, so it cannot be revoked"
ret.update({"comment": comt, "result": True})
assert mysql_grants.absent(name) == ret
0707010000010D000081A400000000000000000000000167471E9C0000163F000000000000000000000000000000000000004400000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/states/test_query.py"""
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
"""
import logging
import os
from unittest.mock import MagicMock
from unittest.mock import patch
import pytest
from saltext.mysql.modules import mysql as mysql_mod
from saltext.mysql.states import mysql_query
log = logging.getLogger(__name__)
MySQLdb = pytest.importorskip("MySQLdb")
pymysql = pytest.importorskip("pymysql")
pymysql.install_as_MySQLdb()
class MockMySQLConnect:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def autocommit(self, *args, **kwards):
return True
def cursor(self, *args, **kwards):
return MagicMock()
@pytest.fixture
def configure_loader_modules():
return {mysql_query: {}, mysql_mod: {}}
def test_run():
"""
Test to execute an arbitrary query on the specified database.
"""
name = "query_id"
database = "my_database"
query = "SELECT * FROM table;"
ret = {"name": name, "result": True, "comment": "", "changes": {}}
mock_t = MagicMock(return_value=True)
mock_f = MagicMock(return_value=False)
mock_str = MagicMock(return_value="salt")
mock_none = MagicMock(return_value=None)
mock_dict = MagicMock(return_value={"salt": "SALT"})
mock_lst = MagicMock(return_value=["grain"])
with patch.dict(mysql_query.__salt__, {"mysql.db_exists": mock_f}):
with patch.object(mysql_query, "_get_mysql_error", mock_str):
ret.update({"comment": "salt", "result": False})
assert mysql_query.run(name, database, query) == ret
with patch.object(mysql_query, "_get_mysql_error", mock_none):
comt = f"Database {name} is not present"
ret.update({"comment": comt, "result": None})
assert mysql_query.run(name, database, query) == ret
with patch.dict(
mysql_query.__salt__,
{
"mysql.db_exists": mock_t,
"grains.ls": mock_lst,
"grains.get": mock_dict,
"mysql.query": mock_str,
},
):
comt = "No execution needed. Grain grain already set"
ret.update({"comment": comt, "result": True})
assert (
mysql_query.run(
name,
database,
query,
output="grain",
grain="grain",
overwrite=False,
)
== ret
)
with patch.dict(mysql_query.__opts__, {"test": True}):
comt = "Query would execute, storing result in grain: grain"
ret.update({"comment": comt, "result": None})
assert mysql_query.run(name, database, query, output="grain", grain="grain") == ret
comt = "Query would execute, storing result in grain: grain:salt"
ret.update({"comment": comt})
assert (
mysql_query.run(name, database, query, output="grain", grain="grain", key="salt")
== ret
)
comt = "Query would execute, storing result in file: salt"
ret.update({"comment": comt})
assert mysql_query.run(name, database, query, output="salt", grain="grain") == ret
comt = "Query would execute, not storing result"
ret.update({"comment": comt})
assert mysql_query.run(name, database, query) == ret
comt = "No execution needed. Grain grain:salt already set"
ret.update({"comment": comt, "result": True})
assert (
mysql_query.run(
name,
database,
query,
output="grain",
grain="grain",
key="salt",
overwrite=False,
)
== ret
)
comt = "Error: output type 'grain' needs the grain parameter\n"
ret.update({"comment": comt, "result": False})
assert mysql_query.run(name, database, query, output="grain") == ret
with patch.object(os.path, "isfile", mock_t):
comt = "No execution needed. File salt already set"
ret.update({"comment": comt, "result": True})
assert (
mysql_query.run(
name,
database,
query,
output="salt",
grain="grain",
overwrite=False,
)
== ret
)
with patch.dict(mysql_query.__opts__, {"test": False}):
ret.update({"comment": "salt", "changes": {"query": "Executed"}})
assert mysql_query.run(name, database, query) == ret
def test_run_multiple_statements():
"""
Test to execute an arbitrary query on the specified database
and ensure that the correct multi_statements flag is passed along
to MySQLdb.connect.
"""
name = "query_id"
database = "my_database"
query = "SELECT * FROM table; SELECT * from another_table;"
mock_t = MagicMock(return_value=True)
with patch.dict(mysql_query.__salt__, {"mysql.db_exists": mock_t}), patch.dict(
mysql_query.__opts__, {"test": False}
), patch.dict(mysql_query.__salt__, {"mysql.query": mysql_mod.query}), patch.dict(
mysql_query.__salt__, {"mysql._execute": MagicMock()}
), patch.dict(
mysql_mod.__salt__, {"config.option": MagicMock()}
), patch(
"MySQLdb.connect", return_value=MockMySQLConnect()
) as mock_connect:
ret = mysql_query.run(name, database, query, client_flags=["multi_statements"])
assert 1 == len(mock_connect.mock_calls)
assert "client_flag=65536" in str(mock_connect.mock_calls[0])
0707010000010E000081A400000000000000000000000167471E9C0000160B000000000000000000000000000000000000004300000000test-repo-1-0.1/saltext_mysql-1.0.0/tests/unit/states/test_user.py"""
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
"""
from unittest.mock import MagicMock
from unittest.mock import patch
import pytest
import salt.utils.data
from saltext.mysql.states import mysql_user
@pytest.fixture
def configure_loader_modules():
return {mysql_user: {}}
def test_present():
"""
Test to ensure that the named user is present with
the specified properties.
"""
name = "frank"
password = "bob@cat"
ret = {"name": name, "result": False, "comment": "", "changes": {}}
mock = MagicMock(
side_effect=[
True,
False,
True,
False,
False,
True,
False,
False,
False,
False,
False,
True,
]
)
mock_t = MagicMock(return_value=True)
mock_f = MagicMock(return_value=False)
mock_str = MagicMock(return_value="salt")
mock_none = MagicMock(return_value=None)
mock_sn = MagicMock(side_effect=[None, "salt", None, None, None])
with patch.object(salt.utils.data, "is_true", mock_f):
comt = (
"Either password or password_hash must be specified,"
" unless allow_passwordless is True"
)
ret.update({"comment": comt})
assert mysql_user.present(name) == ret
with patch.dict(
mysql_user.__salt__,
{"mysql.user_exists": mock, "mysql.user_chpass": mock_t},
):
with patch.object(salt.utils.data, "is_true", mock_t):
comt = "User frank@localhost is already present with passwordless login"
ret.update({"comment": comt, "result": True})
assert mysql_user.present(name, allow_passwordless=True) == ret
with patch.object(mysql_user, "_get_mysql_error", mock_str):
ret.update({"comment": "salt", "result": False})
assert mysql_user.present(name) == ret
with patch.object(mysql_user, "_get_mysql_error", mock_str):
comt = "User frank@localhost is already present with the desired password"
ret.update({"comment": comt, "result": True})
assert mysql_user.present(name, password=password) == ret
with patch.object(mysql_user, "_get_mysql_error", mock_str):
ret.update({"comment": "salt", "result": False})
assert mysql_user.present(name, password=password) == ret
with patch.object(mysql_user, "_get_mysql_error", mock_none):
with patch.dict(mysql_user.__opts__, {"test": True}):
comt = "Password for user frank@localhost is set to be changed"
ret.update({"comment": comt, "result": None})
assert mysql_user.present(name, password=password) == ret
with patch.object(mysql_user, "_get_mysql_error", mock_sn):
with patch.dict(mysql_user.__opts__, {"test": False}):
ret.update({"comment": "salt", "result": False})
assert mysql_user.present(name, password=password) == ret
with patch.dict(mysql_user.__opts__, {"test": True}):
comt = "User frank@localhost is set to be added"
ret.update({"comment": comt, "result": None})
assert mysql_user.present(name, password=password) == ret
with patch.dict(mysql_user.__opts__, {"test": False}):
comt = "Password for user frank@localhost has been changed"
ret.update({"comment": comt, "result": True, "changes": {name: "Updated"}})
assert mysql_user.present(name, password=password) == ret
def test_absent():
"""
Test to ensure that the named user is absent.
"""
name = "frank_exampledb"
ret = {"name": name, "result": True, "comment": "", "changes": {}}
mock = MagicMock(side_effect=[True, True, True, False, False, False])
mock_t = MagicMock(side_effect=[True, False])
mock_str = MagicMock(return_value="salt")
mock_none = MagicMock(return_value=None)
with patch.dict(
mysql_user.__salt__,
{"mysql.user_exists": mock, "mysql.user_remove": mock_t},
):
with patch.dict(mysql_user.__opts__, {"test": True}):
comt = "User frank_exampledb@localhost is set to be removed"
ret.update({"comment": comt, "result": None})
assert mysql_user.absent(name) == ret
with patch.dict(mysql_user.__opts__, {"test": False}):
comt = "User frank_exampledb@localhost has been removed"
ret.update(
{
"comment": comt,
"result": True,
"changes": {"frank_exampledb": "Absent"},
}
)
assert mysql_user.absent(name) == ret
with patch.object(mysql_user, "_get_mysql_error", mock_str):
comt = "User frank_exampledb@localhost has been removed"
ret.update({"comment": "salt", "result": False, "changes": {}})
assert mysql_user.absent(name) == ret
comt = "User frank_exampledb@localhost has been removed"
ret.update({"comment": "salt"})
assert mysql_user.absent(name) == ret
with patch.object(mysql_user, "_get_mysql_error", mock_none):
comt = "User frank_exampledb@localhost is not present, so it cannot be removed"
ret.update({"comment": comt, "result": True, "changes": {}})
assert mysql_user.absent(name) == ret
07070100000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000B00000000TRAILER!!!1436 blocks