diff --git a/.github/workflows/build-test-release.yml b/.github/workflows/build-test-release.yml index 3912cf3be..6fed15e50 100644 --- a/.github/workflows/build-test-release.yml +++ b/.github/workflows/build-test-release.yml @@ -180,6 +180,15 @@ jobs: test-splunk-matrix: name: Test Matrix + needs: + - pre-commit + - compliance-dependencies + - compliance-copyrights + - test-splunk-external + - test-splunk-doc + - test-splunk-unit + - review_secrets + - review-dog-misspell runs-on: ubuntu-latest strategy: fail-fast: false @@ -213,25 +222,20 @@ jobs: curl -sSL https://install.python-poetry.org | python3 - poetry install poetry run coverage run --source=./pytest_splunk_addon/standard_lib -m pytest -v tests/unit - poetry run coverage html - - name: Archive test coverage results - uses: actions/upload-artifact@v2 + poetry run coverage xml + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v2 with: - name: code-coverage-report-unit-tests - path: htmlcov - + token: ${{ secrets.CODECOV_TOKEN }} + files: ./coverage.xml + directory: ./coverage/reports/ + env_vars: OS,PYTHON + fail_ci_if_error: true + verbose: true publish: name: publish needs: - - pre-commit - - compliance-dependencies - - compliance-copyrights - - test-splunk-external - - test-splunk-doc - test-splunk-matrix - - test-splunk-unit - - review_secrets - - review-dog-misspell runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6a8005bbc..34ebc1688 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,6 +18,7 @@ repos: rev: v4.1.0 hooks: - id: check-merge-conflict + - id: debug-statements - repo: https://github.com/psf/black rev: 21.12b0 hooks: diff --git a/README.rst b/README.rst index bf3a3a7d8..74f2dfa0d 100644 --- a/README.rst +++ b/README.rst @@ -42,13 +42,11 @@ Developing Note: Must install docker desktop, vscode or pycharm pro optional -Note2: Appinspect requires libmagic verify this has been installed correctly each time a new workstation/vm is used https://dev.splunk.com/enterprise/docs/releaseapps/appinspect/splunkappinspectclitool/installappinspect - .. code:: bash $ git clone --recurse-submodules -j8 git@github.com:splunk/pytest-splunk-addon.git - $ #setup python venv must be 3.7 + $ #setup python venv must be 3.7 $ /Library/Frameworks/Python.framework/Versions/3.7/bin/python3 -m venv .venv $ source .venv/bin/activate @@ -57,8 +55,6 @@ Note2: Appinspect requires libmagic verify this has been installed correctly eac $ pip3 install -r requirements.txt - $ pip3 install https://download.splunk.com/misc/appinspect/splunk-appinspect-latest.tar.gz - $ python setup.py develop diff --git a/docs/api_reference/sample_generation.rst b/docs/api_reference/sample_generation.rst index 31cf1cd4d..99a155752 100644 --- a/docs/api_reference/sample_generation.rst +++ b/docs/api_reference/sample_generation.rst @@ -1,9 +1,9 @@ DataGenerator ------------------ -EventgenParser -~~~~~~~~~~~~~~~~~~ -.. automodule:: standard_lib.sample_generation.eventgen_parser +PytestSplunkAddonDataParser +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. automodule:: standard_lib.sample_generation.pytest_splunk_addon_data_parser :members: :show-inheritance: diff --git a/docs/generate_conf.rst b/docs/generate_conf.rst index 68d91dd37..b3c568533 100644 --- a/docs/generate_conf.rst +++ b/docs/generate_conf.rst @@ -4,7 +4,11 @@ Generate Conf Utility .. _generate_conf: Overview -"""""""""" +"""""""" + +.. note:: + + This is deprecated since `pytest-splunk-addon` v1.12.0 and latest available version is v1.11.4. * The utility helps in creating the `pytest-splunk-addon-data.conf` from the existing `eventgen.conf` of the add-on. * The utility adds the following metadata required for the index-times tests in the new conf file: diff --git a/docs/index.rst b/docs/index.rst index 3d1fecb5b..907fb7ed1 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -19,7 +19,6 @@ pytest-splunk-addon documentation index_time_tests sample_generator generate_conf - release_history api_reference/api_reference troubleshoot diff --git a/docs/overview.rst b/docs/overview.rst index 90f593b52..e215cb131 100644 --- a/docs/overview.rst +++ b/docs/overview.rst @@ -23,7 +23,7 @@ Features Release notes ------------- -Find details about all the releases here: :ref:`Release History Page` +Find details about all the releases `here `_. Installation ------------ diff --git a/docs/release_history.rst b/docs/release_history.rst deleted file mode 100644 index b7741004d..000000000 --- a/docs/release_history.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. _release_history: - -================= -Release History -================= - -All releases can be found `here `_. \ No newline at end of file diff --git a/docs/sample_generator.rst b/docs/sample_generator.rst index b7b156855..0019a72be 100644 --- a/docs/sample_generator.rst +++ b/docs/sample_generator.rst @@ -1,7 +1,7 @@ Data Generator =============== -To ingest samples into Splunk, plugin takes `pytest-splunk-addon-data.conf` or `eventgen.conf` as input. +To ingest samples into Splunk, plugin takes `pytest-splunk-addon-data.conf` as input. The sample generation & ingestion takes place before executing the testcases. For index-time test cases, there are multiple metadata required about the sample file for which `pytest-splunk-addon-data.conf` must be created and provided to the pytest command. diff --git a/poetry.lock b/poetry.lock index d63c62fd9..5addc61ed 100644 --- a/poetry.lock +++ b/poetry.lock @@ -24,17 +24,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" -version = "21.2.0" +version = "21.4.0" description = "Classes Without Boilerplate" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] [[package]] name = "babel" @@ -47,21 +47,6 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.dependencies] pytz = ">=2015.7" -[[package]] -name = "beautifulsoup4" -version = "4.10.0" -description = "Screen-scraping library" -category = "main" -optional = false -python-versions = ">3.0.0" - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -html5lib = ["html5lib"] -lxml = ["lxml"] - [[package]] name = "certifi" version = "2021.10.8" @@ -70,17 +55,9 @@ category = "main" optional = false python-versions = "*" -[[package]] -name = "chardet" -version = "3.0.4" -description = "Universal encoding detector for Python 2 and 3" -category = "main" -optional = false -python-versions = "*" - [[package]] name = "charset-normalizer" -version = "2.0.9" +version = "2.0.12" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false @@ -89,14 +66,6 @@ python-versions = ">=3.5.0" [package.extras] unicode_backport = ["unicodedata2"] -[[package]] -name = "click" -version = "7.1.2" -description = "Composable command line interface toolkit" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - [[package]] name = "colorama" version = "0.4.4" @@ -107,7 +76,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "coverage" -version = "6.3" +version = "6.3.1" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -119,17 +88,6 @@ tomli = {version = "*", optional = true, markers = "extra == \"toml\""} [package.extras] toml = ["tomli"] -[[package]] -name = "croniter" -version = "1.1.0" -description = "croniter provides iteration for datetime object with cron like format" -category = "main" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.dependencies] -python-dateutil = "*" - [[package]] name = "docutils" version = "0.17.1" @@ -138,14 +96,6 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -[[package]] -name = "enum34" -version = "1.1.10" -description = "Python 3.4 Enum backported to 3.3, 3.2, 3.1, 2.7, 2.6, 2.5, and 2.4" -category = "main" -optional = false -python-versions = "*" - [[package]] name = "execnet" version = "1.9.0" @@ -159,7 +109,7 @@ testing = ["pre-commit"] [[package]] name = "faker" -version = "11.0.0" +version = "11.4.0" description = "Faker is a Python package that generates fake data for you." category = "main" optional = false @@ -172,11 +122,11 @@ typing-extensions = {version = ">=3.10.0.2", markers = "python_version < \"3.8\" [[package]] name = "filelock" -version = "3.4.0" +version = "3.4.2" description = "A platform independent file lock." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] @@ -201,25 +151,6 @@ category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -[[package]] -name = "futures" -version = "3.1.1" -description = "Backport of the concurrent.futures package from Python 3.2" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "futures-then" -version = "0.1.1" -description = "Python Futures made then-able" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -futures = "*" - [[package]] name = "idna" version = "3.3" @@ -238,11 +169,11 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "4.8.2" +version = "4.11.0" description = "Read metadata from Python packages" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} @@ -251,7 +182,7 @@ zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] name = "importlib-resources" @@ -276,19 +207,11 @@ category = "main" optional = false python-versions = "*" -[[package]] -name = "ipaddress" -version = "1.0.23" -description = "IPv4/IPv6 manipulation library" -category = "main" -optional = false -python-versions = "*" - [[package]] name = "jinja2" version = "3.0.3" description = "A very fast and expressive template engine." -category = "main" +category = "dev" optional = false python-versions = ">=3.6" @@ -298,20 +221,9 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] -[[package]] -name = "jsoncomment" -version = "0.3.3" -description = "A wrapper to JSON parsers allowing comments, multiline strings and trailing commas" -category = "main" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*" - -[package.extras] -ujson = ["ujson (>=1.30)"] - [[package]] name = "jsonschema" -version = "4.3.3" +version = "4.4.0" description = "An implementation of JSON Schema validation for Python" category = "main" optional = false @@ -330,7 +242,7 @@ format_nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "junitparser" -version = "2.3.0" +version = "2.4.2" description = "Manipulates JUnit/xUnit Result XML files" category = "main" optional = false @@ -339,20 +251,9 @@ python-versions = "*" [package.dependencies] future = "*" -[[package]] -name = "langdetect" -version = "1.0.9" -description = "Language detection library ported from Google's language-detection." -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -six = "*" - [[package]] name = "lovely-pytest-docker" -version = "0.2.1" +version = "0.3.0" description = "Pytest testing utilities with docker containers." category = "main" optional = false @@ -360,55 +261,13 @@ python-versions = "*" [package.dependencies] pytest = "*" - -[[package]] -name = "lxml" -version = "4.6.5" -description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" - -[package.extras] -cssselect = ["cssselect (>=0.7)"] -html5 = ["html5lib"] -htmlsoup = ["beautifulsoup4"] -source = ["Cython (>=0.29.7)"] - -[[package]] -name = "mako" -version = "1.1.6" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["babel"] -lingua = ["lingua"] - -[[package]] -name = "markdown" -version = "3.3.6" -description = "Python implementation of Markdown." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} - -[package.extras] -testing = ["coverage", "pyyaml"] +six = "*" [[package]] name = "markupsafe" version = "2.0.1" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" +category = "dev" optional = false python-versions = ">=3.6" @@ -423,22 +282,6 @@ python-versions = ">=3.6" [package.dependencies] pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" -[[package]] -name = "painter" -version = "0.3.1" -description = "Your own expressive painter who colors text in your terminal." -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "pillow" -version = "8.3.2" -description = "Python Imaging Library (Fork)" -category = "main" -optional = false -python-versions = ">=3.6" - [[package]] name = "pluggy" version = "1.0.0" @@ -464,7 +307,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pygments" -version = "2.10.0" +version = "2.11.2" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false @@ -472,7 +315,7 @@ python-versions = ">=3.5" [[package]] name = "pyparsing" -version = "3.0.6" +version = "3.0.7" description = "Python parsing module" category = "main" optional = false @@ -483,11 +326,11 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pyrsistent" -version = "0.18.0" +version = "0.18.1" description = "Persistent/Functional/Immutable data structures" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "pytest" @@ -528,11 +371,11 @@ testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtuale [[package]] name = "pytest-forked" -version = "1.3.0" +version = "1.4.0" description = "run tests in isolated forked subprocesses" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" [package.dependencies] py = "*" @@ -540,11 +383,11 @@ pytest = ">=3.10" [[package]] name = "pytest-mock" -version = "3.6.1" +version = "3.7.0" description = "Thin-wrapper around the mock package for easier use with pytest" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] pytest = ">=5.0" @@ -592,14 +435,6 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" [package.dependencies] six = ">=1.5" -[[package]] -name = "python-magic" -version = "0.4.18" -description = "File type identification using libmagic" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - [[package]] name = "pytz" version = "2021.3" @@ -608,25 +443,9 @@ category = "dev" optional = false python-versions = "*" -[[package]] -name = "pyyaml" -version = "5.4.1" -description = "YAML parser and emitter for Python" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" - -[[package]] -name = "regex" -version = "2019.06.08" -description = "Alternative regular expression module, to replace re." -category = "main" -optional = false -python-versions = "*" - [[package]] name = "requests" -version = "2.26.0" +version = "2.27.1" description = "Python HTTP for Humans." category = "main" optional = false @@ -658,14 +477,6 @@ six = "*" fixture = ["fixtures"] test = ["fixtures", "mock", "purl", "pytest", "sphinx", "testrepository (>=0.0.18)", "testtools"] -[[package]] -name = "semver" -version = "2.13.0" -description = "Python helper for Semantic Versioning (http://semver.org/)" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - [[package]] name = "six" version = "1.16.0" @@ -682,17 +493,9 @@ category = "dev" optional = false python-versions = "*" -[[package]] -name = "soupsieve" -version = "2.3.1" -description = "A modern CSS selector implementation for Beautiful Soup." -category = "main" -optional = false -python-versions = ">=3.6" - [[package]] name = "sphinx" -version = "4.3.1" +version = "4.4.0" description = "Python documentation generator" category = "dev" optional = false @@ -704,6 +507,7 @@ babel = ">=1.3" colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} docutils = ">=0.14,<0.18" imagesize = "*" +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} Jinja2 = ">=2.3" packaging = "*" Pygments = ">=2.0" @@ -718,7 +522,7 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.900)", "docutils-stubs", "types-typed-ast", "types-pkg-resources", "types-requests"] +lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "docutils-stubs", "types-typed-ast", "types-requests"] test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] [[package]] @@ -825,37 +629,6 @@ python-versions = ">=3.5" lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] -[[package]] -name = "splunk-appinspect" -version = "2.12.0" -description = "Automatic validation checks for Splunk Apps" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -beautifulsoup4 = ">=4.8.1,<5.0.0" -chardet = "3.0.4" -click = ">=7.0.0,<8.0.0" -croniter = ">0.3.34,<2" -enum34 = ">=1.1.6,<2.0.0" -future = ">=0.18.0,<1.0.0" -futures-then = ">=0.1.1,<1.0.0" -ipaddress = ">=1.0.22,<2.0.0" -jinja2 = ">=2.11.3,<4" -jsoncomment = "0.3.3" -langdetect = ">=1.0.7,<2.0.0" -lxml = ">=4.6.0,<5.0.0" -mako = ">=1.0.12,<2.0.0" -markdown = ">=3.1.1,<4.0.0" -painter = ">=0.3.1,<1.0.0" -pillow = "8.3.2" -python-magic = "0.4.18" -pyyaml = ">=5.4.0,<6.0.0" -regex = "2019.6.8" -semver = ">=2.13.0" -six = ">=1.12.0,<2.0.0" - [[package]] name = "splunk-sdk" version = "1.6.18" @@ -882,15 +655,15 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "tomli" -version = "1.2.2" +version = "2.0.1" description = "A lil' TOML parser" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "typing-extensions" -version = "4.0.1" +version = "4.1.1" description = "Backported and Experimental Type Hints for Python 3.6+" category = "main" optional = false @@ -898,7 +671,7 @@ python-versions = ">=3.6" [[package]] name = "urllib3" -version = "1.26.7" +version = "1.26.8" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -911,15 +684,15 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "zipp" -version = "3.6.0" +version = "3.7.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [extras] docker = ["lovely-pytest-docker"] @@ -927,7 +700,7 @@ docker = ["lovely-pytest-docker"] [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "5b67b58ef9e2fc44d0d4c642c6ecea1a63cfa3a7718f36fe04a3345a38fe1ea6" +content-hash = "cef4948753d2ac99208a6f1ce903cf06bf9c89c23e1aa3087bd22abe0a2df0db" [metadata.files] addonfactory-splunk-conf-parser-lib = [ @@ -943,107 +716,83 @@ atomicwrites = [ {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ - {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, - {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] babel = [ {file = "Babel-2.9.1-py2.py3-none-any.whl", hash = "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9"}, {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"}, ] -beautifulsoup4 = [ - {file = "beautifulsoup4-4.10.0-py3-none-any.whl", hash = "sha256:9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf"}, - {file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"}, -] certifi = [ {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, ] -chardet = [ - {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, - {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, -] charset-normalizer = [ - {file = "charset-normalizer-2.0.9.tar.gz", hash = "sha256:b0b883e8e874edfdece9c28f314e3dd5badf067342e42fb162203335ae61aa2c"}, - {file = "charset_normalizer-2.0.9-py3-none-any.whl", hash = "sha256:1eecaa09422db5be9e29d7fc65664e6c33bd06f9ced7838578ba40d58bdf3721"}, -] -click = [ - {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, - {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, + {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, + {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] coverage = [ - {file = "coverage-6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e8071e7d9ba9f457fc674afc3de054450be2c9b195c470147fbbc082468d8ff7"}, - {file = "coverage-6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:86c91c511853dfda81c2cf2360502cb72783f4b7cebabef27869f00cbe1db07d"}, - {file = "coverage-6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c4ce3b647bd1792d4394f5690d9df6dc035b00bcdbc5595099c01282a59ae01"}, - {file = "coverage-6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a491e159294d756e7fc8462f98175e2d2225e4dbe062cca7d3e0d5a75ba6260"}, - {file = "coverage-6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d008e0f67ac800b0ca04d7914b8501312c8c6c00ad8c7ba17754609fae1231a"}, - {file = "coverage-6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4578728c36de2801c1deb1c6b760d31883e62e33f33c7ba8f982e609dc95167d"}, - {file = "coverage-6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7ee317486593193e066fc5e98ac0ce712178c21529a85c07b7cb978171f25d53"}, - {file = "coverage-6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2bc85664b06ba42d14bb74d6ddf19d8bfc520cb660561d2d9ce5786ae72f71b5"}, - {file = "coverage-6.3-cp310-cp310-win32.whl", hash = "sha256:27a94db5dc098c25048b0aca155f5fac674f2cf1b1736c5272ba28ead2fc267e"}, - {file = "coverage-6.3-cp310-cp310-win_amd64.whl", hash = "sha256:bde4aeabc0d1b2e52c4036c54440b1ad05beeca8113f47aceb4998bb7471e2c2"}, - {file = "coverage-6.3-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:509c68c3e2015022aeda03b003dd68fa19987cdcf64e9d4edc98db41cfc45d30"}, - {file = "coverage-6.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e4ff163602c5c77e7bb4ea81ba5d3b793b4419f8acd296aae149370902cf4e92"}, - {file = "coverage-6.3-cp311-cp311-win_amd64.whl", hash = "sha256:d1675db48490e5fa0b300f6329ecb8a9a37c29b9ab64fa9c964d34111788ca2d"}, - {file = "coverage-6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7eed8459a2b81848cafb3280b39d7d49950d5f98e403677941c752e7e7ee47cb"}, - {file = "coverage-6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b4285fde5286b946835a1a53bba3ad41ef74285ba9e8013e14b5ea93deaeafc"}, - {file = "coverage-6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4748349734110fd32d46ff8897b561e6300d8989a494ad5a0a2e4f0ca974fc7"}, - {file = "coverage-6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:823f9325283dc9565ba0aa2d240471a93ca8999861779b2b6c7aded45b58ee0f"}, - {file = "coverage-6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fff16a30fdf57b214778eff86391301c4509e327a65b877862f7c929f10a4253"}, - {file = "coverage-6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:da1a428bdbe71f9a8c270c7baab29e9552ac9d0e0cba5e7e9a4c9ee6465d258d"}, - {file = "coverage-6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7d82c610a2e10372e128023c5baf9ce3d270f3029fe7274ff5bc2897c68f1318"}, - {file = "coverage-6.3-cp37-cp37m-win32.whl", hash = "sha256:11e61c5548ecf74ea1f8b059730b049871f0e32b74f88bd0d670c20c819ad749"}, - {file = "coverage-6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8e0c3525b1a182c8ffc9bca7e56b521e0c2b8b3e82f033c8e16d6d721f1b54d6"}, - {file = "coverage-6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a189036c50dcd56100746139a459f0d27540fef95b09aba03e786540b8feaa5f"}, - {file = "coverage-6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32168001f33025fd756884d56d01adebb34e6c8c0b3395ca8584cdcee9c7c9d2"}, - {file = "coverage-6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5d79c9af3f410a2b5acad91258b4ae179ee9c83897eb9de69151b179b0227f5"}, - {file = "coverage-6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:85c5fc9029043cf8b07f73fbb0a7ab6d3b717510c3b5642b77058ea55d7cacde"}, - {file = "coverage-6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7596aa2f2b8fa5604129cfc9a27ad9beec0a96f18078cb424d029fdd707468d"}, - {file = "coverage-6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ce443a3e6df90d692c38762f108fc4c88314bf477689f04de76b3f252e7a351c"}, - {file = "coverage-6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:012157499ec4f135fc36cd2177e3d1a1840af9b236cbe80e9a5ccfc83d912a69"}, - {file = "coverage-6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a34d313105cdd0d3644c56df2d743fe467270d6ab93b5d4a347eb9fec8924d6"}, - {file = "coverage-6.3-cp38-cp38-win32.whl", hash = "sha256:6e78b1e25e5c5695dea012be473e442f7094d066925604be20b30713dbd47f89"}, - {file = "coverage-6.3-cp38-cp38-win_amd64.whl", hash = "sha256:433b99f7b0613bdcdc0b00cc3d39ed6d756797e3b078d2c43f8a38288520aec6"}, - {file = "coverage-6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9ed3244b415725f08ca3bdf02ed681089fd95e9465099a21c8e2d9c5d6ca2606"}, - {file = "coverage-6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab4fc4b866b279740e0d917402f0e9a08683e002f43fa408e9655818ed392196"}, - {file = "coverage-6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8582e9280f8d0f38114fe95a92ae8d0790b56b099d728cc4f8a2e14b1c4a18c"}, - {file = "coverage-6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c72bb4679283c6737f452eeb9b2a0e570acaef2197ad255fb20162adc80bea76"}, - {file = "coverage-6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca29c352389ea27a24c79acd117abdd8a865c6eb01576b6f0990cd9a4e9c9f48"}, - {file = "coverage-6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:152cc2624381df4e4e604e21bd8e95eb8059535f7b768c1fb8b8ae0b26f47ab0"}, - {file = "coverage-6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:51372e24b1f7143ee2df6b45cff6a721f3abe93b1e506196f3ffa4155c2497f7"}, - {file = "coverage-6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:72d9d186508325a456475dd05b1756f9a204c7086b07fffb227ef8cee03b1dc2"}, - {file = "coverage-6.3-cp39-cp39-win32.whl", hash = "sha256:649df3641eb351cdfd0d5533c92fc9df507b6b2bf48a7ef8c71ab63cbc7b5c3c"}, - {file = "coverage-6.3-cp39-cp39-win_amd64.whl", hash = "sha256:e67ccd53da5958ea1ec833a160b96357f90859c220a00150de011b787c27b98d"}, - {file = "coverage-6.3-pp36.pp37.pp38-none-any.whl", hash = "sha256:27ac7cb84538e278e07569ceaaa6f807a029dc194b1c819a9820b9bb5dbf63ab"}, - {file = "coverage-6.3.tar.gz", hash = "sha256:987a84ff98a309994ca77ed3cc4b92424f824278e48e4bf7d1bb79a63cfe2099"}, -] -croniter = [ - {file = "croniter-1.1.0-py2.py3-none-any.whl", hash = "sha256:d30dd147d1daec39d015a15b8cceb3069b9780291b9c141e869c32574a8eeacb"}, - {file = "croniter-1.1.0.tar.gz", hash = "sha256:4023e4d18ced979332369964351e8f4f608c1f7c763e146b1d740002c4245247"}, + {file = "coverage-6.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeffd96882d8c06d31b65dddcf51db7c612547babc1c4c5db6a011abe9798525"}, + {file = "coverage-6.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:621f6ea7260ea2ffdaec64fe5cb521669984f567b66f62f81445221d4754df4c"}, + {file = "coverage-6.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84f2436d6742c01136dd940ee158bfc7cf5ced3da7e4c949662b8703b5cd8145"}, + {file = "coverage-6.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de73fca6fb403dd72d4da517cfc49fcf791f74eee697d3219f6be29adf5af6ce"}, + {file = "coverage-6.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fbb2be068a13a5d99dce9e1e7d168db880870f7bc73f876152130575bd6167"}, + {file = "coverage-6.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f5a4551dfd09c3bd12fca8144d47fe7745275adf3229b7223c2f9e29a975ebda"}, + {file = "coverage-6.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7bff3a98f63b47464480de1b5bdd80c8fade0ba2832c9381253c9b74c4153c27"}, + {file = "coverage-6.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a06c358f4aed05fa1099c39decc8022261bb07dfadc127c08cfbd1391b09689e"}, + {file = "coverage-6.3.1-cp310-cp310-win32.whl", hash = "sha256:9fff3ff052922cb99f9e52f63f985d4f7a54f6b94287463bc66b7cdf3eb41217"}, + {file = "coverage-6.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:276b13cc085474e482566c477c25ed66a097b44c6e77132f3304ac0b039f83eb"}, + {file = "coverage-6.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:56c4a409381ddd7bbff134e9756077860d4e8a583d310a6f38a2315b9ce301d0"}, + {file = "coverage-6.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eb494070aa060ceba6e4bbf44c1bc5fa97bfb883a0d9b0c9049415f9e944793"}, + {file = "coverage-6.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e15d424b8153756b7c903bde6d4610be0c3daca3986173c18dd5c1a1625e4cd"}, + {file = "coverage-6.3.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d47a897c1e91f33f177c21de897267b38fbb45f2cd8e22a710bcef1df09ac1"}, + {file = "coverage-6.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:25e73d4c81efa8ea3785274a2f7f3bfbbeccb6fcba2a0bdd3be9223371c37554"}, + {file = "coverage-6.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fac0bcc5b7e8169bffa87f0dcc24435446d329cbc2b5486d155c2e0f3b493ae1"}, + {file = "coverage-6.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:72128176fea72012063200b7b395ed8a57849282b207321124d7ff14e26988e8"}, + {file = "coverage-6.3.1-cp37-cp37m-win32.whl", hash = "sha256:1bc6d709939ff262fd1432f03f080c5042dc6508b6e0d3d20e61dd045456a1a0"}, + {file = "coverage-6.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:618eeba986cea7f621d8607ee378ecc8c2504b98b3fdc4952b30fe3578304687"}, + {file = "coverage-6.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ed164af5c9078596cfc40b078c3b337911190d3faeac830c3f1274f26b8320"}, + {file = "coverage-6.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:352c68e233409c31048a3725c446a9e48bbff36e39db92774d4f2380d630d8f8"}, + {file = "coverage-6.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:448d7bde7ceb6c69e08474c2ddbc5b4cd13c9e4aa4a717467f716b5fc938a734"}, + {file = "coverage-6.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9fde6b90889522c220dd56a670102ceef24955d994ff7af2cb786b4ba8fe11e4"}, + {file = "coverage-6.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e647a0be741edbb529a72644e999acb09f2ad60465f80757da183528941ff975"}, + {file = "coverage-6.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a5cdc3adb4f8bb8d8f5e64c2e9e282bc12980ef055ec6da59db562ee9bdfefa"}, + {file = "coverage-6.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2dd70a167843b4b4b2630c0c56f1b586fe965b4f8ac5da05b6690344fd065c6b"}, + {file = "coverage-6.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9ad0a117b8dc2061ce9461ea4c1b4799e55edceb236522c5b8f958ce9ed8fa9a"}, + {file = "coverage-6.3.1-cp38-cp38-win32.whl", hash = "sha256:e92c7a5f7d62edff50f60a045dc9542bf939758c95b2fcd686175dd10ce0ed10"}, + {file = "coverage-6.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:482fb42eea6164894ff82abbcf33d526362de5d1a7ed25af7ecbdddd28fc124f"}, + {file = "coverage-6.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c5b81fb37db76ebea79aa963b76d96ff854e7662921ce742293463635a87a78d"}, + {file = "coverage-6.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a4f923b9ab265136e57cc14794a15b9dcea07a9c578609cd5dbbfff28a0d15e6"}, + {file = "coverage-6.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56d296cbc8254a7dffdd7bcc2eb70be5a233aae7c01856d2d936f5ac4e8ac1f1"}, + {file = "coverage-6.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245ab82e8554fa88c4b2ab1e098ae051faac5af829efdcf2ce6b34dccd5567c"}, + {file = "coverage-6.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f2b05757c92ad96b33dbf8e8ec8d4ccb9af6ae3c9e9bd141c7cc44d20c6bcba"}, + {file = "coverage-6.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9e3dd806f34de38d4c01416344e98eab2437ac450b3ae39c62a0ede2f8b5e4ed"}, + {file = "coverage-6.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d651fde74a4d3122e5562705824507e2f5b2d3d57557f1916c4b27635f8fbe3f"}, + {file = "coverage-6.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:704f89b87c4f4737da2860695a18c852b78ec7279b24eedacab10b29067d3a38"}, + {file = "coverage-6.3.1-cp39-cp39-win32.whl", hash = "sha256:2aed4761809640f02e44e16b8b32c1a5dee5e80ea30a0ff0912158bde9c501f2"}, + {file = "coverage-6.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:9976fb0a5709988778ac9bc44f3d50fccd989987876dfd7716dee28beed0a9fa"}, + {file = "coverage-6.3.1-pp36.pp37.pp38-none-any.whl", hash = "sha256:463e52616ea687fd323888e86bf25e864a3cc6335a043fad6bbb037dbf49bbe2"}, + {file = "coverage-6.3.1.tar.gz", hash = "sha256:6c3f6158b02ac403868eea390930ae64e9a9a2a5bbfafefbb920d29258d9f2f8"}, ] docutils = [ {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] -enum34 = [ - {file = "enum34-1.1.10-py2-none-any.whl", hash = "sha256:a98a201d6de3f2ab3db284e70a33b0f896fbf35f8086594e8c9e74b909058d53"}, - {file = "enum34-1.1.10-py3-none-any.whl", hash = "sha256:c3858660960c984d6ab0ebad691265180da2b43f07e061c0f8dca9ef3cffd328"}, - {file = "enum34-1.1.10.tar.gz", hash = "sha256:cce6a7477ed816bd2542d03d53db9f0db935dd013b70f336a95c73979289f248"}, -] execnet = [ {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, ] faker = [ - {file = "Faker-11.0.0-py3-none-any.whl", hash = "sha256:8a90e8b015d9e2f5556293d817c3711481ca288b28b94f55ad6327cdc2cfdaad"}, - {file = "Faker-11.0.0.tar.gz", hash = "sha256:5bb6a3decfa08fa7d86691d3b6d48482d5fcfb4ccf24990c7ddced74c77c9b4b"}, + {file = "Faker-11.4.0-py3-none-any.whl", hash = "sha256:c9e8557368706a094b5caf52113dd21412f8c9f06fcf00acd08dacb1cf014ed4"}, + {file = "Faker-11.4.0.tar.gz", hash = "sha256:b548a1f48f6d95b6ce57b0523c78c6cfd48fdb786267e448403f242c2678c4c8"}, ] filelock = [ - {file = "filelock-3.4.0-py3-none-any.whl", hash = "sha256:2e139a228bcf56dd8b2274a65174d005c4a6b68540ee0bdbb92c76f43f29f7e8"}, - {file = "filelock-3.4.0.tar.gz", hash = "sha256:93d512b32a23baf4cac44ffd72ccf70732aeff7b8050fcaf6d3ec406d954baf4"}, + {file = "filelock-3.4.2-py3-none-any.whl", hash = "sha256:cf0fc6a2f8d26bd900f19bf33915ca70ba4dd8c56903eeb14e1e7a2fd7590146"}, + {file = "filelock-3.4.2.tar.gz", hash = "sha256:38b4f4c989f9d06d44524df1b24bd19e167d851f19b50bf3e3559952dddc5b80"}, ] freezegun = [ {file = "freezegun-1.1.0-py2.py3-none-any.whl", hash = "sha256:2ae695f7eb96c62529f03a038461afe3c692db3465e215355e1bb4b0ab408712"}, @@ -1052,14 +801,6 @@ freezegun = [ future = [ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, ] -futures = [ - {file = "futures-3.1.1-py2-none-any.whl", hash = "sha256:c4884a65654a7c45435063e14ae85280eb1f111d94e542396717ba9828c4337f"}, - {file = "futures-3.1.1-py3-none-any.whl", hash = "sha256:3a44f286998ae64f0cc083682fcfec16c406134a81a589a5de445d7bb7c2751b"}, - {file = "futures-3.1.1.tar.gz", hash = "sha256:51ecb45f0add83c806c68e4b06106f90db260585b25ef2abfcda0bd95c0132fd"}, -] -futures-then = [ - {file = "futures_then-0.1.1.tar.gz", hash = "sha256:976f684e5b336a1a13c8c2f342e28352519febf6591175aeb3bbc5ce60dde04a"}, -] idna = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, @@ -1069,8 +810,8 @@ imagesize = [ {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.8.2-py3-none-any.whl", hash = "sha256:53ccfd5c134223e497627b9815d5030edf77d2ed573922f7a0b8f8bb81a1c100"}, - {file = "importlib_metadata-4.8.2.tar.gz", hash = "sha256:75bdec14c397f528724c1bfd9709d660b33a4d2e77387a3358f20b848bb5e5fb"}, + {file = "importlib_metadata-4.11.0-py3-none-any.whl", hash = "sha256:6affcdb3aec542dd98df8211e730bba6c5f2bec8288d47bacacde898f548c9ad"}, + {file = "importlib_metadata-4.11.0.tar.gz", hash = "sha256:9e5e553bbba1843cb4a00823014b907616be46ee503d2b9ba001d214a8da218f"}, ] importlib-resources = [ {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, @@ -1080,126 +821,28 @@ iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] -ipaddress = [ - {file = "ipaddress-1.0.23-py2.py3-none-any.whl", hash = "sha256:6e0f4a39e66cb5bb9a137b00276a2eff74f93b71dcbdad6f10ff7df9d3557fcc"}, - {file = "ipaddress-1.0.23.tar.gz", hash = "sha256:b7f8e0369580bb4a24d5ba1d7cc29660a4a6987763faf1d8a8046830e020e7e2"}, -] jinja2 = [ {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, ] -jsoncomment = [ - {file = "jsoncomment-0.3.3-py3-none-any.whl", hash = "sha256:ec5e16b609724f60b33b86509cad56aa1dd4ccdf30ebe936d2f0d0daa8c43151"}, - {file = "jsoncomment-0.3.3.tar.gz", hash = "sha256:98093db601c735804b8da6d999f117727fa4cd31c0fa18cfde3cc993d27e5a1e"}, -] jsonschema = [ - {file = "jsonschema-4.3.3-py3-none-any.whl", hash = "sha256:eb7a69801beb7325653aa8fd373abbf9ff8f85b536ab2812e5e8287b522fb6a2"}, - {file = "jsonschema-4.3.3.tar.gz", hash = "sha256:f210d4ce095ed1e8af635d15c8ee79b586f656ab54399ba87b8ab87e5bff0ade"}, + {file = "jsonschema-4.4.0-py3-none-any.whl", hash = "sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823"}, + {file = "jsonschema-4.4.0.tar.gz", hash = "sha256:636694eb41b3535ed608fe04129f26542b59ed99808b4f688aa32dcf55317a83"}, ] junitparser = [ - {file = "junitparser-2.3.0-py2.py3-none-any.whl", hash = "sha256:e8c558e8de0e2a4542f73821e2f84a8a5c102c6f07645f4dbe8f53dc05c8d94c"}, - {file = "junitparser-2.3.0.tar.gz", hash = "sha256:604f58a47dfe3a6da26fa0136461eec7730a3abb31d05c38fa20763154828c67"}, -] -langdetect = [ - {file = "langdetect-1.0.9-py2-none-any.whl", hash = "sha256:7cbc0746252f19e76f77c0b1690aadf01963be835ef0cd4b56dddf2a8f1dfc2a"}, - {file = "langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0"}, + {file = "junitparser-2.4.2-py2.py3-none-any.whl", hash = "sha256:c2d9c83bef4712da18090935cbed430e6041915986c097f7a58f35797ddf6065"}, + {file = "junitparser-2.4.2.tar.gz", hash = "sha256:35f8f5df8fe988435c3d378befff98fa8b44724427f1ad11c7f510fa3830f50f"}, ] lovely-pytest-docker = [ - {file = "lovely-pytest-docker-0.2.1.tar.gz", hash = "sha256:944ecf259c2144e1857e9ff1ac79cfaef88bd6aabf4e99cc16d64783345826f0"}, -] -lxml = [ - {file = "lxml-4.6.5-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:abcf7daa5ebcc89328326254f6dd6d566adb483d4d00178892afd386ab389de2"}, - {file = "lxml-4.6.5-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3884476a90d415be79adfa4e0e393048630d0d5bcd5757c4c07d8b4b00a1096b"}, - {file = "lxml-4.6.5-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:add017c5bd6b9ec3a5f09248396b6ee2ce61c5621f087eb2269c813cd8813808"}, - {file = "lxml-4.6.5-cp27-cp27m-win32.whl", hash = "sha256:a702005e447d712375433ed0499cb6e1503fadd6c96a47f51d707b4d37b76d3c"}, - {file = "lxml-4.6.5-cp27-cp27m-win_amd64.whl", hash = "sha256:da07c7e7fc9a3f40446b78c54dbba8bfd5c9100dfecb21b65bfe3f57844f5e71"}, - {file = "lxml-4.6.5-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a708c291900c40a7ecf23f1d2384ed0bc0604e24094dd13417c7e7f8f7a50d93"}, - {file = "lxml-4.6.5-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f33d8efb42e4fc2b31b3b4527940b25cdebb3026fb56a80c1c1c11a4271d2352"}, - {file = "lxml-4.6.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:f6befb83bca720b71d6bd6326a3b26e9496ae6649e26585de024890fe50f49b8"}, - {file = "lxml-4.6.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:59d77bfa3bea13caee95bc0d3f1c518b15049b97dd61ea8b3d71ce677a67f808"}, - {file = "lxml-4.6.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:68a851176c931e2b3de6214347b767451243eeed3bea34c172127bbb5bf6c210"}, - {file = "lxml-4.6.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a7790a273225b0c46e5f859c1327f0f659896cc72eaa537d23aa3ad9ff2a1cc1"}, - {file = "lxml-4.6.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6548fc551de15f310dd0564751d9dc3d405278d45ea9b2b369ed1eccf142e1f5"}, - {file = "lxml-4.6.5-cp310-cp310-win32.whl", hash = "sha256:dc8a0dbb2a10ae8bb609584f5c504789f0f3d0d81840da4849102ec84289f952"}, - {file = "lxml-4.6.5-cp310-cp310-win_amd64.whl", hash = "sha256:1ccbfe5d17835db906f2bab6f15b34194db1a5b07929cba3cf45a96dbfbfefc0"}, - {file = "lxml-4.6.5-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca9a40497f7e97a2a961c04fa8a6f23d790b0521350a8b455759d786b0bcb203"}, - {file = "lxml-4.6.5-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e5b4b0d9440046ead3bd425eb2b852499241ee0cef1ae151038e4f87ede888c4"}, - {file = "lxml-4.6.5-cp35-cp35m-win32.whl", hash = "sha256:87f8f7df70b90fbe7b49969f07b347e3f978f8bd1046bb8ecae659921869202b"}, - {file = "lxml-4.6.5-cp35-cp35m-win_amd64.whl", hash = "sha256:ce52aad32ec6e46d1a91ff8b8014a91538800dd533914bfc4a82f5018d971408"}, - {file = "lxml-4.6.5-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:8021eeff7fabde21b9858ed058a8250ad230cede91764d598c2466b0ba70db8b"}, - {file = "lxml-4.6.5-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:cab343b265e38d4e00649cbbad9278b734c5715f9bcbb72c85a1f99b1a58e19a"}, - {file = "lxml-4.6.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:3534d7c468c044f6aef3c0aff541db2826986a29ea73f2ca831f5d5284d9b570"}, - {file = "lxml-4.6.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdb98f4c9e8a1735efddfaa995b0c96559792da15d56b76428bdfc29f77c4cdb"}, - {file = "lxml-4.6.5-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:5ea121cb66d7e5cb396b4c3ca90471252b94e01809805cfe3e4e44be2db3a99c"}, - {file = "lxml-4.6.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:121fc6f71c692b49af6c963b84ab7084402624ffbe605287da362f8af0668ea3"}, - {file = "lxml-4.6.5-cp36-cp36m-win32.whl", hash = "sha256:1a2a7659b8eb93c6daee350a0d844994d49245a0f6c05c747f619386fb90ba04"}, - {file = "lxml-4.6.5-cp36-cp36m-win_amd64.whl", hash = "sha256:2f77556266a8fe5428b8759fbfc4bd70be1d1d9c9b25d2a414f6a0c0b0f09120"}, - {file = "lxml-4.6.5-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:558485218ee06458643b929765ac1eb04519ca3d1e2dcc288517de864c747c33"}, - {file = "lxml-4.6.5-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ba0006799f21d83c3717fe20e2707a10bbc296475155aadf4f5850f6659b96b9"}, - {file = "lxml-4.6.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:916d457ad84e05b7db52700bad0a15c56e0c3000dcaf1263b2fb7a56fe148996"}, - {file = "lxml-4.6.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c580c2a61d8297a6e47f4d01f066517dbb019be98032880d19ece7f337a9401d"}, - {file = "lxml-4.6.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a21b78af7e2e13bec6bea12fc33bc05730197674f3e5402ce214d07026ccfebd"}, - {file = "lxml-4.6.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:46515773570a33eae13e451c8fcf440222ef24bd3b26f40774dd0bd8b6db15b2"}, - {file = "lxml-4.6.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:124f09614f999551ac65e5b9875981ce4b66ac4b8e2ba9284572f741935df3d9"}, - {file = "lxml-4.6.5-cp37-cp37m-win32.whl", hash = "sha256:b4015baed99d046c760f09a4c59d234d8f398a454380c3cf0b859aba97136090"}, - {file = "lxml-4.6.5-cp37-cp37m-win_amd64.whl", hash = "sha256:12ae2339d32a2b15010972e1e2467345b7bf962e155671239fba74c229564b7f"}, - {file = "lxml-4.6.5-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:76b6c296e4f7a1a8a128aec42d128646897f9ae9a700ef6839cdc9b3900db9b5"}, - {file = "lxml-4.6.5-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:534032a5ceb34bba1da193b7d386ac575127cc39338379f39a164b10d97ade89"}, - {file = "lxml-4.6.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:60aeb14ff9022d2687ef98ce55f6342944c40d00916452bb90899a191802137a"}, - {file = "lxml-4.6.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9801bcd52ac9c795a7d81ea67471a42cffe532e46cfb750cd5713befc5c019c0"}, - {file = "lxml-4.6.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3b95fb7e6f9c2f53db88f4642231fc2b8907d854e614710996a96f1f32018d5c"}, - {file = "lxml-4.6.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:642eb4cabd997c9b949a994f9643cd8ae00cf4ca8c5cd9c273962296fadf1c44"}, - {file = "lxml-4.6.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:af4139172ff0263d269abdcc641e944c9de4b5d660894a3ec7e9f9db63b56ac9"}, - {file = "lxml-4.6.5-cp38-cp38-win32.whl", hash = "sha256:57cf05466917e08f90e323f025b96f493f92c0344694f5702579ab4b7e2eb10d"}, - {file = "lxml-4.6.5-cp38-cp38-win_amd64.whl", hash = "sha256:4f415624cf8b065796649a5e4621773dc5c9ea574a944c76a7f8a6d3d2906b41"}, - {file = "lxml-4.6.5-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:7679bb6e4d9a3978a46ab19a3560e8d2b7265ef3c88152e7fdc130d649789887"}, - {file = "lxml-4.6.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c34234a1bc9e466c104372af74d11a9f98338a3f72fae22b80485171a64e0144"}, - {file = "lxml-4.6.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4b9390bf973e3907d967b75be199cf1978ca8443183cf1e78ad80ad8be9cf242"}, - {file = "lxml-4.6.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fcc849b28f584ed1dbf277291ded5c32bb3476a37032df4a1d523b55faa5f944"}, - {file = "lxml-4.6.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:46f21f2600d001af10e847df9eb3b832e8a439f696c04891bcb8a8cedd859af9"}, - {file = "lxml-4.6.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:99cf827f5a783038eb313beee6533dddb8bdb086d7269c5c144c1c952d142ace"}, - {file = "lxml-4.6.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:925174cafb0f1179a7fd38da90302555d7445e34c9ece68019e53c946be7f542"}, - {file = "lxml-4.6.5-cp39-cp39-win32.whl", hash = "sha256:12d8d6fe3ddef629ac1349fa89a638b296a34b6529573f5055d1cb4e5245f73b"}, - {file = "lxml-4.6.5-cp39-cp39-win_amd64.whl", hash = "sha256:a52e8f317336a44836475e9c802f51c2dc38d612eaa76532cb1d17690338b63b"}, - {file = "lxml-4.6.5-pp37-pypy37_pp73-macosx_10_14_x86_64.whl", hash = "sha256:11ae552a78612620afd15625be9f1b82e3cc2e634f90d6b11709b10a100cba59"}, - {file = "lxml-4.6.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:473701599665d874919d05bb33b56180447b3a9da8d52d6d9799f381ce23f95c"}, - {file = "lxml-4.6.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7f00cc64b49d2ef19ddae898a3def9dd8fda9c3d27c8a174c2889ee757918e71"}, - {file = "lxml-4.6.5-pp38-pypy38_pp73-macosx_10_14_x86_64.whl", hash = "sha256:73e8614258404b2689a26cb5d002512b8bc4dfa18aca86382f68f959aee9b0c8"}, - {file = "lxml-4.6.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ff44de36772b05c2eb74f2b4b6d1ae29b8f41ed5506310ce1258d44826ee38c1"}, - {file = "lxml-4.6.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5d5254c815c186744c8f922e2ce861a2bdeabc06520b4b30b2f7d9767791ce6e"}, - {file = "lxml-4.6.5.tar.gz", hash = "sha256:6e84edecc3a82f90d44ddee2ee2a2630d4994b8471816e226d2b771cda7ac4ca"}, -] -mako = [ - {file = "Mako-1.1.6-py2.py3-none-any.whl", hash = "sha256:afaf8e515d075b22fad7d7b8b30e4a1c90624ff2f3733a06ec125f5a5f043a57"}, - {file = "Mako-1.1.6.tar.gz", hash = "sha256:4e9e345a41924a954251b95b4b28e14a301145b544901332e658907a7464b6b2"}, -] -markdown = [ - {file = "Markdown-3.3.6-py3-none-any.whl", hash = "sha256:9923332318f843411e9932237530df53162e29dc7a4e2b91e35764583c46c9a3"}, - {file = "Markdown-3.3.6.tar.gz", hash = "sha256:76df8ae32294ec39dcf89340382882dfa12975f87f45c3ed1ecdb1e8cefc7006"}, + {file = "lovely-pytest-docker-0.3.0.tar.gz", hash = "sha256:b8333ae52faefc92ffaeed3e02d7664b7b02df999f621f1635cb6e3fbb7e145c"}, ] markupsafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -1208,27 +851,14 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -1238,12 +868,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -1252,64 +876,6 @@ packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] -painter = [ - {file = "painter-0.3.1.tar.gz", hash = "sha256:3373463d584ba9bbbb23d570c37893e7930b93704fe1149df88e9d2ef906fc88"}, -] -pillow = [ - {file = "Pillow-8.3.2-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:c691b26283c3a31594683217d746f1dad59a7ae1d4cfc24626d7a064a11197d4"}, - {file = "Pillow-8.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f514c2717012859ccb349c97862568fdc0479aad85b0270d6b5a6509dbc142e2"}, - {file = "Pillow-8.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be25cb93442c6d2f8702c599b51184bd3ccd83adebd08886b682173e09ef0c3f"}, - {file = "Pillow-8.3.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d675a876b295afa114ca8bf42d7f86b5fb1298e1b6bb9a24405a3f6c8338811c"}, - {file = "Pillow-8.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59697568a0455764a094585b2551fd76bfd6b959c9f92d4bdec9d0e14616303a"}, - {file = "Pillow-8.3.2-cp310-cp310-win32.whl", hash = "sha256:2d5e9dc0bf1b5d9048a94c48d0813b6c96fccfa4ccf276d9c36308840f40c228"}, - {file = "Pillow-8.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:11c27e74bab423eb3c9232d97553111cc0be81b74b47165f07ebfdd29d825875"}, - {file = "Pillow-8.3.2-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:11eb7f98165d56042545c9e6db3ce394ed8b45089a67124298f0473b29cb60b2"}, - {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f23b2d3079522fdf3c09de6517f625f7a964f916c956527bed805ac043799b8"}, - {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19ec4cfe4b961edc249b0e04b5618666c23a83bc35842dea2bfd5dfa0157f81b"}, - {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5a31c07cea5edbaeb4bdba6f2b87db7d3dc0f446f379d907e51cc70ea375629"}, - {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15ccb81a6ffc57ea0137f9f3ac2737ffa1d11f786244d719639df17476d399a7"}, - {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8f284dc1695caf71a74f24993b7c7473d77bc760be45f776a2c2f4e04c170550"}, - {file = "Pillow-8.3.2-cp36-cp36m-win32.whl", hash = "sha256:4abc247b31a98f29e5224f2d31ef15f86a71f79c7f4d2ac345a5d551d6393073"}, - {file = "Pillow-8.3.2-cp36-cp36m-win_amd64.whl", hash = "sha256:a048dad5ed6ad1fad338c02c609b862dfaa921fcd065d747194a6805f91f2196"}, - {file = "Pillow-8.3.2-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:06d1adaa284696785375fa80a6a8eb309be722cf4ef8949518beb34487a3df71"}, - {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd24054aaf21e70a51e2a2a5ed1183560d3a69e6f9594a4bfe360a46f94eba83"}, - {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a330bf7014ee034046db43ccbb05c766aa9e70b8d6c5260bfc38d73103b0ba"}, - {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13654b521fb98abdecec105ea3fb5ba863d1548c9b58831dd5105bb3873569f1"}, - {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a1bd983c565f92779be456ece2479840ec39d386007cd4ae83382646293d681b"}, - {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4326ea1e2722f3dc00ed77c36d3b5354b8fb7399fb59230249ea6d59cbed90da"}, - {file = "Pillow-8.3.2-cp37-cp37m-win32.whl", hash = "sha256:085a90a99404b859a4b6c3daa42afde17cb3ad3115e44a75f0d7b4a32f06a6c9"}, - {file = "Pillow-8.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:18a07a683805d32826c09acfce44a90bf474e6a66ce482b1c7fcd3757d588df3"}, - {file = "Pillow-8.3.2-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4e59e99fd680e2b8b11bbd463f3c9450ab799305d5f2bafb74fefba6ac058616"}, - {file = "Pillow-8.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4d89a2e9219a526401015153c0e9dd48319ea6ab9fe3b066a20aa9aee23d9fd3"}, - {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fd98c8294f57636084f4b076b75f86c57b2a63a8410c0cd172bc93695ee979"}, - {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b11c9d310a3522b0fd3c35667914271f570576a0e387701f370eb39d45f08a4"}, - {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0412516dcc9de9b0a1e0ae25a280015809de8270f134cc2c1e32c4eeb397cf30"}, - {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bcb04ff12e79b28be6c9988f275e7ab69f01cc2ba319fb3114f87817bb7c74b6"}, - {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0b9911ec70731711c3b6ebcde26caea620cbdd9dcb73c67b0730c8817f24711b"}, - {file = "Pillow-8.3.2-cp38-cp38-win32.whl", hash = "sha256:ce2e5e04bb86da6187f96d7bab3f93a7877830981b37f0287dd6479e27a10341"}, - {file = "Pillow-8.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:35d27687f027ad25a8d0ef45dd5208ef044c588003cdcedf05afb00dbc5c2deb"}, - {file = "Pillow-8.3.2-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:04835e68ef12904bc3e1fd002b33eea0779320d4346082bd5b24bec12ad9c3e9"}, - {file = "Pillow-8.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:10e00f7336780ca7d3653cf3ac26f068fa11b5a96894ea29a64d3dc4b810d630"}, - {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cde7a4d3687f21cffdf5bb171172070bb95e02af448c4c8b2f223d783214056"}, - {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c3ff00110835bdda2b1e2b07f4a2548a39744bb7de5946dc8e95517c4fb2ca6"}, - {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35d409030bf3bd05fa66fb5fdedc39c521b397f61ad04309c90444e893d05f7d"}, - {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bff50ba9891be0a004ef48828e012babaaf7da204d81ab9be37480b9020a82b"}, - {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7dbfbc0020aa1d9bc1b0b8bcf255a7d73f4ad0336f8fd2533fcc54a4ccfb9441"}, - {file = "Pillow-8.3.2-cp39-cp39-win32.whl", hash = "sha256:963ebdc5365d748185fdb06daf2ac758116deecb2277ec5ae98139f93844bc09"}, - {file = "Pillow-8.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:cc9d0dec711c914ed500f1d0d3822868760954dce98dfb0b7382a854aee55d19"}, - {file = "Pillow-8.3.2-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2c661542c6f71dfd9dc82d9d29a8386287e82813b0375b3a02983feac69ef864"}, - {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:548794f99ff52a73a156771a0402f5e1c35285bd981046a502d7e4793e8facaa"}, - {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b68f565a4175e12e68ca900af8910e8fe48aaa48fd3ca853494f384e11c8bcd"}, - {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:838eb85de6d9307c19c655c726f8d13b8b646f144ca6b3771fa62b711ebf7624"}, - {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:feb5db446e96bfecfec078b943cc07744cc759893cef045aa8b8b6d6aaa8274e"}, - {file = "Pillow-8.3.2-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:fc0db32f7223b094964e71729c0361f93db43664dd1ec86d3df217853cedda87"}, - {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fd4fd83aa912d7b89b4b4a1580d30e2a4242f3936882a3f433586e5ab97ed0d5"}, - {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d0c8ebbfd439c37624db98f3877d9ed12c137cadd99dde2d2eae0dab0bbfc355"}, - {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cb3dd7f23b044b0737317f892d399f9e2f0b3a02b22b2c692851fb8120d82c6"}, - {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a66566f8a22561fc1a88dc87606c69b84fa9ce724f99522cf922c801ec68f5c1"}, - {file = "Pillow-8.3.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ce651ca46d0202c302a535d3047c55a0131a720cf554a578fc1b8a2aff0e7d96"}, - {file = "Pillow-8.3.2.tar.gz", hash = "sha256:dde3f3ed8d00c72631bc19cbfff8ad3b6215062a5eed402381ad365f82f0c18c"}, -] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, @@ -1319,35 +885,35 @@ py = [ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] pygments = [ - {file = "Pygments-2.10.0-py3-none-any.whl", hash = "sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380"}, - {file = "Pygments-2.10.0.tar.gz", hash = "sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"}, + {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, + {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, ] pyparsing = [ - {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, - {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"}, + {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, + {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, ] pyrsistent = [ - {file = "pyrsistent-0.18.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f4c8cabb46ff8e5d61f56a037974228e978f26bfefce4f61a4b1ac0ba7a2ab72"}, - {file = "pyrsistent-0.18.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:da6e5e818d18459fa46fac0a4a4e543507fe1110e808101277c5a2b5bab0cd2d"}, - {file = "pyrsistent-0.18.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5e4395bbf841693eaebaa5bb5c8f5cdbb1d139e07c975c682ec4e4f8126e03d2"}, - {file = "pyrsistent-0.18.0-cp36-cp36m-win32.whl", hash = "sha256:527be2bfa8dc80f6f8ddd65242ba476a6c4fb4e3aedbf281dfbac1b1ed4165b1"}, - {file = "pyrsistent-0.18.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2aaf19dc8ce517a8653746d98e962ef480ff34b6bc563fc067be6401ffb457c7"}, - {file = "pyrsistent-0.18.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58a70d93fb79dc585b21f9d72487b929a6fe58da0754fa4cb9f279bb92369396"}, - {file = "pyrsistent-0.18.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4916c10896721e472ee12c95cdc2891ce5890898d2f9907b1b4ae0f53588b710"}, - {file = "pyrsistent-0.18.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:73ff61b1411e3fb0ba144b8f08d6749749775fe89688093e1efef9839d2dcc35"}, - {file = "pyrsistent-0.18.0-cp37-cp37m-win32.whl", hash = "sha256:b29b869cf58412ca5738d23691e96d8aff535e17390128a1a52717c9a109da4f"}, - {file = "pyrsistent-0.18.0-cp37-cp37m-win_amd64.whl", hash = "sha256:097b96f129dd36a8c9e33594e7ebb151b1515eb52cceb08474c10a5479e799f2"}, - {file = "pyrsistent-0.18.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:772e94c2c6864f2cd2ffbe58bb3bdefbe2a32afa0acb1a77e472aac831f83427"}, - {file = "pyrsistent-0.18.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c1a9ff320fa699337e05edcaae79ef8c2880b52720bc031b219e5b5008ebbdef"}, - {file = "pyrsistent-0.18.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd3caef37a415fd0dae6148a1b6957a8c5f275a62cca02e18474608cb263640c"}, - {file = "pyrsistent-0.18.0-cp38-cp38-win32.whl", hash = "sha256:e79d94ca58fcafef6395f6352383fa1a76922268fa02caa2272fff501c2fdc78"}, - {file = "pyrsistent-0.18.0-cp38-cp38-win_amd64.whl", hash = "sha256:a0c772d791c38bbc77be659af29bb14c38ced151433592e326361610250c605b"}, - {file = "pyrsistent-0.18.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d5ec194c9c573aafaceebf05fc400656722793dac57f254cd4741f3c27ae57b4"}, - {file = "pyrsistent-0.18.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:6b5eed00e597b5b5773b4ca30bd48a5774ef1e96f2a45d105db5b4ebb4bca680"}, - {file = "pyrsistent-0.18.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:48578680353f41dca1ca3dc48629fb77dfc745128b56fc01096b2530c13fd426"}, - {file = "pyrsistent-0.18.0-cp39-cp39-win32.whl", hash = "sha256:f3ef98d7b76da5eb19c37fda834d50262ff9167c65658d1d8f974d2e4d90676b"}, - {file = "pyrsistent-0.18.0-cp39-cp39-win_amd64.whl", hash = "sha256:404e1f1d254d314d55adb8d87f4f465c8693d6f902f67eb6ef5b4526dc58e6ea"}, - {file = "pyrsistent-0.18.0.tar.gz", hash = "sha256:773c781216f8c2900b42a7b638d5b517bb134ae1acbebe4d1e8f1f41ea60eb4b"}, + {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, + {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, + {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, + {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, ] pytest = [ {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, @@ -1358,12 +924,12 @@ pytest-cov = [ {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, ] pytest-forked = [ - {file = "pytest-forked-1.3.0.tar.gz", hash = "sha256:6aa9ac7e00ad1a539c41bec6d21011332de671e938c7637378ec9710204e37ca"}, - {file = "pytest_forked-1.3.0-py2.py3-none-any.whl", hash = "sha256:dc4147784048e70ef5d437951728825a131b81714b398d5d52f17c7c144d8815"}, + {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"}, + {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"}, ] pytest-mock = [ - {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"}, - {file = "pytest_mock-3.6.1-py3-none-any.whl", hash = "sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3"}, + {file = "pytest-mock-3.7.0.tar.gz", hash = "sha256:5112bd92cc9f186ee96e1a92efc84969ea494939c3aead39c50f421c4cc69534"}, + {file = "pytest_mock-3.7.0-py3-none-any.whl", hash = "sha256:6cff27cec936bf81dc5ee87f07132b807bcda51106b5ec4b90a04331cba76231"}, ] pytest-ordering = [ {file = "pytest-ordering-0.6.tar.gz", hash = "sha256:561ad653626bb171da78e682f6d39ac33bb13b3e272d406cd555adb6b006bda6"}, @@ -1378,70 +944,18 @@ python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] -python-magic = [ - {file = "python-magic-0.4.18.tar.gz", hash = "sha256:b757db2a5289ea3f1ced9e60f072965243ea43a2221430048fd8cacab17be0ce"}, - {file = "python_magic-0.4.18-py2.py3-none-any.whl", hash = "sha256:356efa93c8899047d1eb7d3eb91e871ba2f5b1376edbaf4cc305e3c872207355"}, -] pytz = [ {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, ] -pyyaml = [ - {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, - {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, - {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, - {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, - {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"}, - {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, - {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, - {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"}, - {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, - {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, - {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"}, - {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, - {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, - {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"}, - {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, - {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, - {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, -] -regex = [ - {file = "regex-2019.06.08-cp27-none-win32.whl", hash = "sha256:38e6486c7e14683cd1b17a4218760f0ea4c015633cf1b06f7c190fb882a51ba7"}, - {file = "regex-2019.06.08-cp27-none-win_amd64.whl", hash = "sha256:80dde4ff10b73b823da451687363cac93dd3549e059d2dc19b72a02d048ba5aa"}, - {file = "regex-2019.06.08-cp35-none-win32.whl", hash = "sha256:2948310c01535ccb29bb600dd033b07b91f36e471953889b7f3a1e66b39d0c19"}, - {file = "regex-2019.06.08-cp35-none-win_amd64.whl", hash = "sha256:1c70ccb8bf4ded0cbe53092e9f56dcc9d6b0efcf6e80b6ef9b0ece8a557d6635"}, - {file = "regex-2019.06.08-cp36-none-win32.whl", hash = "sha256:2ab13db0411cb308aa590d33c909ea4efeced40188d8a4a7d3d5970657fe73bc"}, - {file = "regex-2019.06.08-cp36-none-win_amd64.whl", hash = "sha256:ca4f47131af28ef168ff7c80d4b4cad019cb4cabb5fa26143f43aa3dbd60389c"}, - {file = "regex-2019.06.08-cp37-none-win32.whl", hash = "sha256:dd4e8924915fa748e128864352875d3d0be5f4597ab1b1d475988b8e3da10dd7"}, - {file = "regex-2019.06.08-cp37-none-win_amd64.whl", hash = "sha256:f2c65530255e4010a5029eb11138f5ecd5aa70363f57a3444d83b3253b0891be"}, - {file = "regex-2019.06.08-cp38-none-win32.whl", hash = "sha256:b98e5876ca1e63b41c4aa38d7d5cc04a736415d4e240e9ae7ebc4f780083c7d5"}, - {file = "regex-2019.06.08-cp38-none-win_amd64.whl", hash = "sha256:cf7838110d3052d359da527372666429b9485ab739286aa1a11ed482f037a88c"}, - {file = "regex-2019.06.08.tar.gz", hash = "sha256:84daedefaa56320765e9c4d43912226d324ef3cc929f4d75fa95f8c579a08211"}, -] requests = [ - {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, - {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, ] requests-mock = [ {file = "requests-mock-1.9.3.tar.gz", hash = "sha256:8d72abe54546c1fc9696fa1516672f1031d72a55a1d66c85184f972a24ba0eba"}, {file = "requests_mock-1.9.3-py2.py3-none-any.whl", hash = "sha256:0a2d38a117c08bb78939ec163522976ad59a6b7fdd82b709e23bb98004a44970"}, ] -semver = [ - {file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, - {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, -] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -1450,13 +964,9 @@ snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] -soupsieve = [ - {file = "soupsieve-2.3.1-py3-none-any.whl", hash = "sha256:1a3cca2617c6b38c0343ed661b1fa5de5637f257d4fe22bd9f1338010a1efefb"}, - {file = "soupsieve-2.3.1.tar.gz", hash = "sha256:b8d49b1cd4f037c7082a9683dfa1801aa2597fb11c3a1155b7a5b94829b4f1f9"}, -] sphinx = [ - {file = "Sphinx-4.3.1-py3-none-any.whl", hash = "sha256:048dac56039a5713f47a554589dc98a442b39226a2b9ed7f82797fcb2fe9253f"}, - {file = "Sphinx-4.3.1.tar.gz", hash = "sha256:32a5b3e9a1b176cc25ed048557d4d3d01af635e6b76c5bc7a43b0a34447fbd45"}, + {file = "Sphinx-4.4.0-py3-none-any.whl", hash = "sha256:5da895959511473857b6d0200f56865ed62c31e8f82dd338063b84ec022701fe"}, + {file = "Sphinx-4.4.0.tar.gz", hash = "sha256:6caad9786055cb1fa22b4a365c1775816b876f91966481765d7d50e9f0dd35cc"}, ] sphinx-panels = [ {file = "sphinx-panels-0.6.0.tar.gz", hash = "sha256:d36dcd26358117e11888f7143db4ac2301ebe90873ac00627bf1fe526bf0f058"}, @@ -1490,9 +1000,6 @@ sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, ] -splunk-appinspect = [ - {file = "splunk-appinspect-2.12.0.tar.gz", hash = "sha256:9f5f88cbe62fccc946a932484d45f1ed22f8a18420624568177f039121f5d2d9"}, -] splunk-sdk = [ {file = "splunk-sdk-1.6.18.tar.gz", hash = "sha256:edc0959786f5dcab225ba98633c310dbf7584977849f6c2152a0e5090b5e2561"}, ] @@ -1505,18 +1012,18 @@ toml = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tomli = [ - {file = "tomli-1.2.2-py3-none-any.whl", hash = "sha256:f04066f68f5554911363063a30b108d2b5a5b1a010aa8b6132af78489fe3aade"}, - {file = "tomli-1.2.2.tar.gz", hash = "sha256:c6ce0015eb38820eaf32b5db832dbc26deb3dd427bd5f6556cf0acac2c214fee"}, + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] typing-extensions = [ - {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, - {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, + {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, + {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, ] urllib3 = [ - {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, - {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, + {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, + {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, ] zipp = [ - {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, - {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, + {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, + {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, ] diff --git a/pyproject.toml b/pyproject.toml index 8980eadca..2304ccb53 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,6 @@ python = "^3.7" pytest = ">5.4.0,<6.3" splunk-sdk = "^1.6" requests = "^2" -splunk_appinspect = "^2" jsonschema = "^4.2.1" faker = ">=4.1,<12.0" pytest-xdist = "*" @@ -65,7 +64,6 @@ pytest11 = { plugin = "pytest_splunk_addon.plugin", "splunk" = "pytest_splunk_ad [tool.poetry.scripts] cim-report = 'pytest_splunk_addon.standard_lib.utilities.junit_parser:main' -generate-indextime-conf = 'pytest_splunk_addon.standard_lib.utilities.create_new_eventgen:main' cim-field-report = 'pytest_splunk_addon.tools.cim_field_report:main' [build-system] diff --git a/pytest_splunk_addon/splunk.py b/pytest_splunk_addon/splunk.py index 19671604c..75b45d6c8 100644 --- a/pytest_splunk_addon/splunk.py +++ b/pytest_splunk_addon/splunk.py @@ -16,7 +16,6 @@ # -*- coding: utf-8 -*- """ Module usage: -- splunk_appinspect: To parse the configuration files from Add-on package - helmut : To connect to a Splunk instance. source: splunk-sdk - helmut_lib: Provides various Utility functions to search on Splunk. Source: splunk-sdk """ diff --git a/pytest_splunk_addon/standard_lib/addon_parser/__init__.py b/pytest_splunk_addon/standard_lib/addon_parser/__init__.py index de13eeeec..dcf97bbf0 100644 --- a/pytest_splunk_addon/standard_lib/addon_parser/__init__.py +++ b/pytest_splunk_addon/standard_lib/addon_parser/__init__.py @@ -19,14 +19,10 @@ parse the knowledge objects from an Add-on's configuration files Supports: fields from props & transforms, tags, eventtypes - -Dependencies: - splunk_appinspect.App: To parse the configuration files """ import os import re import logging -from splunk_appinspect import App from .fields import convert_to_fields, Field from .transforms_parser import TransformsParser @@ -49,25 +45,15 @@ class AddonParser(object): def __init__(self, splunk_app_path): self.splunk_app_path = splunk_app_path - LOGGER.info( - f"Initializing the splunk_appinspect.App from path={splunk_app_path}" - ) - self._app = None self._props_parser = None self._tags_parser = None self._eventtype_parser = None self._savedsearch_parser = None - @property - def app(self): - if not self._app: - self._app = App(self.splunk_app_path, python_analyzer_enable=False) - return self._app - @property def props_parser(self): if not self._props_parser: - self._props_parser = PropsParser(self.splunk_app_path, self.app) + self._props_parser = PropsParser(self.splunk_app_path) return self._props_parser @property @@ -79,13 +65,13 @@ def tags_parser(self): @property def eventtype_parser(self): if not self._eventtype_parser: - self._eventtype_parser = EventTypeParser(self.splunk_app_path, self.app) + self._eventtype_parser = EventTypeParser(self.splunk_app_path) return self._eventtype_parser @property def savedsearch_parser(self): if not self._savedsearch_parser: - self._savedsearch_parser = SavedSearchParser(self.splunk_app_path, self.app) + self._savedsearch_parser = SavedSearchParser(self.splunk_app_path) return self._savedsearch_parser def get_props_fields(self): diff --git a/pytest_splunk_addon/standard_lib/addon_parser/eventtype_parser.py b/pytest_splunk_addon/standard_lib/addon_parser/eventtype_parser.py index c259d734d..524266a2a 100644 --- a/pytest_splunk_addon/standard_lib/addon_parser/eventtype_parser.py +++ b/pytest_splunk_addon/standard_lib/addon_parser/eventtype_parser.py @@ -13,11 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# -*- coding: utf-8 -*- """ Provides eventtypes.conf parsing mechanism """ +from typing import Dict +from typing import Generator +from typing import Optional import logging +import os + +import addonfactory_splunk_conf_parser_lib as conf_parser LOGGER = logging.getLogger("pytest-splunk-addon") @@ -28,26 +33,26 @@ class EventTypeParser(object): Args: splunk_app_path (str): Path of the Splunk app - app (splunk_appinspect.App): Object of Splunk app """ - def __init__(self, splunk_app_path, app): - self.app = app + def __init__(self, splunk_app_path: str): + self._conf_parser = conf_parser.TABConfigParser() self.splunk_app_path = splunk_app_path self._eventtypes = None @property - def eventtypes(self): - try: - if not self._eventtypes: - LOGGER.info("Parsing eventtypes.conf") - self._eventtypes = self.app.eventtypes_conf() + def eventtypes(self) -> Optional[Dict]: + if self._eventtypes is not None: return self._eventtypes - except OSError: - LOGGER.warning("eventtypes.conf not found.") - return None + eventtypes_conf_path = os.path.join( + self.splunk_app_path, "default", "eventtypes.conf" + ) + LOGGER.info("Parsing eventtypes.conf") + self._conf_parser.read(eventtypes_conf_path) + self._eventtypes = self._conf_parser.item_dict() + return self._eventtypes if self._eventtypes else None - def get_eventtypes(self): + def get_eventtypes(self) -> Optional[Generator]: """ Parse the App configuration files & yield eventtypes @@ -56,6 +61,6 @@ def get_eventtypes(self): """ if not self.eventtypes: return None - for eventtype_section in self.eventtypes.sects: - LOGGER.info("Parsing eventtype stanza=%s", eventtype_section) - yield {"stanza": eventtype_section} + for stanza_key in self.eventtypes.keys(): + LOGGER.info("Parsing eventtype stanza=%s", stanza_key) + yield {"stanza": stanza_key} diff --git a/pytest_splunk_addon/standard_lib/addon_parser/fields.py b/pytest_splunk_addon/standard_lib/addon_parser/fields.py index de4161c65..cf0874277 100644 --- a/pytest_splunk_addon/standard_lib/addon_parser/fields.py +++ b/pytest_splunk_addon/standard_lib/addon_parser/fields.py @@ -51,6 +51,15 @@ def __init__(self, field_json=None): def __str__(self): return str(self.name) + def __eq__(self, other: "Field"): + return self.__dict__ == other.__dict__ + + def __lt__(self, other: "Field"): + return self.name < other.name + + def __repr__(self): + return f"" + def get_type(self): return self.type diff --git a/pytest_splunk_addon/standard_lib/addon_parser/props_parser.py b/pytest_splunk_addon/standard_lib/addon_parser/props_parser.py index ab98bd096..12e60a287 100644 --- a/pytest_splunk_addon/standard_lib/addon_parser/props_parser.py +++ b/pytest_splunk_addon/standard_lib/addon_parser/props_parser.py @@ -13,15 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# -*- coding: utf-8 -*- """ Provides props.conf parsing mechanism """ +from typing import Dict +from typing import Generator +from typing import Optional import logging +import os import re from itertools import product -from . import convert_to_fields, Field -from . import TransformsParser + +import addonfactory_splunk_conf_parser_lib as conf_parser + +from .fields import convert_to_fields +from .transforms_parser import TransformsParser LOGGER = logging.getLogger("pytest-splunk-addon") @@ -32,25 +38,23 @@ class PropsParser(object): Args: splunk_app_path (str): Path of the Splunk app - app (splunk_appinspect.App): Object of Splunk app """ - def __init__(self, splunk_app_path, app): - self.app = app + def __init__(self, splunk_app_path: str): + self._conf_parser = conf_parser.TABConfigParser() self.splunk_app_path = splunk_app_path self._props = None - self.transforms_parser = TransformsParser(self.splunk_app_path, self.app) + self.transforms_parser = TransformsParser(self.splunk_app_path) @property - def props(self): - try: - if not self._props: - LOGGER.info("Parsing props.conf") - self._props = self.app.props_conf() + def props(self) -> Optional[Dict]: + if self._props is not None: return self._props - except OSError: - LOGGER.warning("props.conf not found.") - return None + props_conf_path = os.path.join(self.splunk_app_path, "default", "props.conf") + LOGGER.info("Parsing props.conf") + self._conf_parser.read(props_conf_path) + self._props = self._conf_parser.item_dict() + return self._props if self._props else None def get_props_fields(self): """ @@ -59,40 +63,33 @@ def get_props_fields(self): Yields: generator of all the supported fields """ - for stanza_type, stanza_name, stanza in self.get_props_stanzas(): - for classname in stanza.options: - LOGGER.info( - "Parsing parameter=%s of stanza=%s", - classname, - stanza_name, - ) - props_property = stanza.options[classname] - if not re.match("REPORT", classname, re.IGNORECASE): - LOGGER.info("Trying to parse classname=%s", classname) - parsing_method = self.get_props_method(classname) + for stanza_type, stanza_name, stanza_values in self._get_props_stanzas(): + for key, value in stanza_values.items(): + LOGGER.info(f"Parsing parameter={key} of stanza={stanza_name}") + if not re.match("REPORT", key, re.IGNORECASE): + LOGGER.info(f"Trying to parse classname={key}") + parsing_method = self._get_props_method(key) if parsing_method: - field_list = list(parsing_method(props_property)) + field_list = list(parsing_method(key, value)) if field_list: yield { "stanza": stanza_name, "stanza_type": stanza_type, - "classname": classname, + "classname": key, "fields": field_list, } else: - for transform_stanza, fields in self.get_report_fields( - props_property - ): + for transform_stanza, fields in self._get_report_fields(key, value): field_list = list(fields) if field_list: yield { "stanza": stanza_name, "stanza_type": stanza_type, - "classname": f"{classname}::{transform_stanza}", + "classname": f"{key}::{transform_stanza}", "fields": field_list, } - def get_props_method(self, class_name): + def _get_props_method(self, class_name: str): """ Get the parsing method depending on classname @@ -103,19 +100,19 @@ def get_props_method(self, class_name): instance method to parse the property """ method_mapping = { - "EXTRACT": self.get_extract_fields, - "EVAL": self.get_eval_fields, - "FIELDALIAS": self.get_fieldalias_fields, - "LOOKUP": self.get_lookup_fields, + "EXTRACT": self._get_extract_fields, + "EVAL": self._get_eval_fields, + "FIELDALIAS": self._get_fieldalias_fields, + "LOOKUP": self._get_lookup_fields, } for each_type in method_mapping: if re.match(each_type, class_name, re.IGNORECASE): - LOGGER.info("Matched method of type=%s", each_type) + LOGGER.info(f"Matched method of type={each_type}") return method_mapping[each_type] else: - LOGGER.warning("No parser available for %s. Skipping...", class_name) + LOGGER.warning(f"No parser available for {class_name}. Skipping...") - def get_props_stanzas(self): + def _get_props_stanzas(self) -> Optional[Generator]: """ Parse the props.conf of the App & yield stanzas. For source with | (OR), it will return all combinations @@ -125,21 +122,20 @@ def get_props_stanzas(self): """ if not self.props: return - for stanza_name in self.props.sects: - stanza = self.props.sects[stanza_name] - if stanza.name.startswith("host::"): + for stanza_name, stanza_values in self.props.items(): + if stanza_name.startswith("host::"): LOGGER.warning("Host stanza is not supported. Skipping..") continue - if stanza.name.startswith("source::"): - LOGGER.info("Parsing Source based stanza: %s", stanza.name) + if stanza_name.startswith("source::"): + LOGGER.info(f"Parsing Source based stanza: {stanza_name}") for each_source in self.get_list_of_sources(stanza_name): - yield "source", each_source, stanza + yield "source", each_source, stanza_values else: - LOGGER.info("Parsing Sourcetype based stanza: %s", stanza.name) - yield "sourcetype", stanza.name, stanza + LOGGER.info(f"Parsing Sourcetype based stanza: {stanza_name}") + yield "sourcetype", stanza_name, stanza_values @staticmethod - def get_list_of_sources(source): + def get_list_of_sources(source: str) -> Generator: """ For source with | (OR), it will return all combinations. Uses itertools.product to list the combinations @@ -174,32 +170,8 @@ def get_list_of_sources(source): yield template.format(*each_permutation) LOGGER.debug("Found %d combinations", count) - def get_sourcetype_assignments(self, props_property): - """ - Get the sourcetype assigned for the source - - Example:: - - [source::/splunk/var/log/splunkd.log] - sourcetype = splunkd - - Args: - props_property (splunk_appinspect.configuration_file.ConfigurationSetting): - The configuration setting object of REPORT. - properties used: - - * name : key in the configuration settings - * value : value of the respective name in the configuration - - Yields: - the sourcetype field with possible value - """ - yield Field( - {"name": props_property.name, "expected_values": [props_property.value]} - ) - @convert_to_fields - def get_extract_fields(self, props_property): + def _get_extract_fields(self, name: str, value: str): """ Returns the fields parsed from EXTRACT @@ -208,12 +180,8 @@ def get_extract_fields(self, props_property): EXTRACT-one = regex with (?.*) Args: - props_property (splunk_appinspect.configuration_file.ConfigurationSetting): - The configuration setting object of EXTRACT. - properties used: - - * name : key in the configuration settings - * value : value of the respective name in the configuration + name: key in the configuration settings + value: value of the respective name in the configuration Regex: Parse the fields from a regex. Examples, @@ -227,23 +195,21 @@ def get_extract_fields(self, props_property): """ regex = r"\(\?P?(?:[<'])([^\>'\s]+)[\>']" fields_group = [] - for field in re.findall(regex, props_property.value): + for field in re.findall(regex, value): if not field.startswith(("_KEY_", "_VAL_")): fields_group.append(field) yield field # If SOURCE_KEY is used in EXTRACT, generate the test for the same. regex_for_source_key = r"(?:(?i)in\s+(\w+))\s*$" - extract_source_key = re.search( - regex_for_source_key, props_property.value, re.MULTILINE - ) + extract_source_key = re.search(regex_for_source_key, value, re.MULTILINE) if extract_source_key: - LOGGER.info("Found a source key in %s", props_property.name) + LOGGER.info(f"Found a source key in {name}") yield extract_source_key.group(1) fields_group.insert(0, extract_source_key.group(1)) @convert_to_fields - def get_eval_fields(self, props_property): + def _get_eval_fields(self, name, value): """ Return the fields parsed from EVAL @@ -252,22 +218,18 @@ def get_eval_fields(self, props_property): EVAL-action = if(isnull(action), "unknown", action) Args: - props_property (splunk_appinspect.configuration_file.ConfigurationSetting): - The configuration setting object of eval - properties used: - - * name : key in the configuration settings - * value : value of the respective name in the configuration + name: key in the configuration settings + value: value of the respective name in the configuration Yields: generator of fields """ regex = r"EVAL-(?P.*)" - if not props_property.value == "null()": - yield from re.findall(regex, props_property.name, re.IGNORECASE) + if not value == "null()": + yield from re.findall(regex, name, re.IGNORECASE) @convert_to_fields - def get_fieldalias_fields(self, props_property): + def _get_fieldalias_fields(self, name: str, value: str): """ Return the fields parsed from FIELDALIAS @@ -276,12 +238,8 @@ def get_fieldalias_fields(self, props_property): FIELDALIAS-class = source AS dest, sc2 AS dest2 Args: - props_property (splunk_appinspect.configuration_file.ConfigurationSetting): - The configuration setting object of FIELDALIAS - properties used: - - * name : key in the configuration settings - * value : value of the respective name in the configuration + name: key in the configuration settings + value: value of the respective name in the configuration Regex: Description: @@ -303,11 +261,10 @@ def get_fieldalias_fields(self, props_property): r"\s+(?i)(?:as(?:new)?)\s+" r"(\"(?:\\\"|[^\"])*\"|\'(?:\\\'|[^\'])*\'|[^\s,]+)" ) - fields_tuples = re.findall(regex, props_property.value, re.IGNORECASE) - # Convert list of tuples into list + fields_tuples = re.findall(regex, value, re.IGNORECASE) return list(set([item for t in fields_tuples for item in t])) - def get_report_fields(self, props_property): + def _get_report_fields(self, name: str, value: str): """ Returns the fields parsed from REPORT @@ -315,21 +272,14 @@ def get_report_fields(self, props_property): transforms.conf and returns the list Args: - props_property (splunk_appinspect.configuration_file.ConfigurationSetting): - - The configuration setting object of REPORT. - properties used: - - * name : key in the configuration settings - * value : value of the respective name in the configuration + name: key in the configuration settings + value: value of the respective name in the configuration Yields: generator of (transform_stanza ,fields) parsed from transforms.conf """ - transforms_itr = ( - each_stanza.strip() for each_stanza in props_property.value.split(",") - ) + transforms_itr = (each_stanza.strip() for each_stanza in value.split(",")) for transforms_section in transforms_itr: yield ( transforms_section, @@ -337,22 +287,18 @@ def get_report_fields(self, props_property): ) @convert_to_fields - def get_lookup_fields(self, props_property): + def _get_lookup_fields(self, name: str, value: str): """ Extracts the lookup fields Args: - props_property (splunk_appinspect.configuration_file.ConfigurationSetting): - The configuration setting object of eval - properties used: - - * name : key in the configuration settings - * value : value of the respective name in the configuration + name: key in the configuration settings + value: value of the respective name in the configuration Returns: List of lookup fields """ - parsed_fields = self.parse_lookup_str(props_property.value) + parsed_fields = self._parse_lookup(value) lookup_field_list = ( parsed_fields["input_fields"] + parsed_fields["output_fields"] ) @@ -361,7 +307,7 @@ def get_lookup_fields(self, props_property): if not parsed_fields["output_fields"]: LOGGER.info( "OUTPUT fields not found classname=%s. Parsing the lookup csv file", - props_property.name, + name, ) lookup_field_list += list( self.transforms_parser.get_lookup_csv_fields( @@ -370,7 +316,7 @@ def get_lookup_fields(self, props_property): ) return list(set(lookup_field_list)) - def parse_lookup_str(self, lookup_str): + def _parse_lookup(self, lookup: str): """ Get list of lookup fields by parsing the lookup string. If a field is aliased to another field, take the aliased field into consideration @@ -395,8 +341,8 @@ def parse_lookup_str(self, lookup_str): """ input_output_field_list = [] - lookup_stanza = lookup_str.split(" ")[0] - lookup_str = " ".join(lookup_str.split(" ")[1:]) + lookup_stanza = lookup.split(" ")[0] + lookup_str = " ".join(lookup.split(" ")[1:]) # 0: Take the left side of the OUTPUT as input fields # -1: Take the right side of the OUTPUT as output fields diff --git a/pytest_splunk_addon/standard_lib/addon_parser/savedsearches_parser.py b/pytest_splunk_addon/standard_lib/addon_parser/savedsearches_parser.py index c16130069..b56005c96 100644 --- a/pytest_splunk_addon/standard_lib/addon_parser/savedsearches_parser.py +++ b/pytest_splunk_addon/standard_lib/addon_parser/savedsearches_parser.py @@ -13,10 +13,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# -*- coding: utf-8 -*- """ Provides savedsearches.conf parsing mechanism """ +from typing import Dict +from typing import Generator +from typing import Optional +import logging +import os + +import addonfactory_splunk_conf_parser_lib as conf_parser + +LOGGER = logging.getLogger("pytest-splunk-addon") class SavedSearchParser(object): @@ -25,24 +33,26 @@ class SavedSearchParser(object): Args: splunk_app_path (str): Path of the Splunk app - app (splunk_appinspect.App): Object of Splunk app """ - def __init__(self, splunk_app_path, app): - self.app = app + def __init__(self, splunk_app_path: str): + self._conf_parser = conf_parser.TABConfigParser() self.splunk_app_path = splunk_app_path self._savedsearches = None @property - def savedsearches(self): - try: - if not self._savedsearches: - self._savedsearches = self.app.get_config("savedsearches.conf") + def savedsearches(self) -> Optional[Dict]: + if self._savedsearches is not None: return self._savedsearches - except OSError: - return None + savedsearches_conf_path = os.path.join( + self.splunk_app_path, "default", "savedsearches.conf" + ) + LOGGER.info("Parsing savedsearches.conf") + self._conf_parser.read(savedsearches_conf_path) + self._savedsearches = self._conf_parser.item_dict() + return self._savedsearches if self._savedsearches else None - def get_savedsearches(self): + def get_savedsearches(self) -> Optional[Generator]: """ Parse the App configuration files & yield savedsearches @@ -51,29 +61,17 @@ def get_savedsearches(self): """ if not self.savedsearches: return None - for stanza in self.savedsearches.sects: - savedsearch_sections = self.savedsearches.sects[stanza] + for stanza_key, stanza_values in self.savedsearches.items(): + LOGGER.info(f"Parsing savedsearches of stanza={stanza_key}") savedsearch_container = { - "stanza": stanza, + "stanza": stanza_key, "search": 'index = "main"', "dispatch.earliest_time": "0", "dispatch.latest_time": "now", } - for key in savedsearch_sections.options: - empty_value = ["None", "", " "] - if ( - key == "search" - and savedsearch_sections.options[key].value not in empty_value - ): - savedsearch_container[key] = savedsearch_sections.options[key].value - elif ( - key == "dispatch.earliest_time" - and savedsearch_sections.options[key].value not in empty_value - ): - savedsearch_container[key] = savedsearch_sections.options[key].value - elif ( - key == "dispatch.latest_time" - and savedsearch_sections.options[key].value not in empty_value - ): - savedsearch_container[key] = savedsearch_sections.options[key].value + empty_value = ["None", "", " "] + for key, value in stanza_values.items(): + if key in ("search", "dispatch.earliest_time", "dispatch.latest_time"): + if value not in empty_value: + savedsearch_container[key] = value yield savedsearch_container diff --git a/pytest_splunk_addon/standard_lib/addon_parser/tags_parser.py b/pytest_splunk_addon/standard_lib/addon_parser/tags_parser.py index 425452bbe..147d37b34 100644 --- a/pytest_splunk_addon/standard_lib/addon_parser/tags_parser.py +++ b/pytest_splunk_addon/standard_lib/addon_parser/tags_parser.py @@ -43,8 +43,8 @@ def __init__(self, splunk_app_path: str): def tags(self) -> Optional[Dict]: if self._tags is not None: return self._tags - LOGGER.info("Parsing tags.conf") tags_conf_path = os.path.join(self.splunk_app_path, "default", "tags.conf") + LOGGER.info("Parsing tags.conf") self._conf_parser.read(tags_conf_path) self._tags = self._conf_parser.item_dict() return self._tags if self._tags else None diff --git a/pytest_splunk_addon/standard_lib/addon_parser/transforms_parser.py b/pytest_splunk_addon/standard_lib/addon_parser/transforms_parser.py index d0db7952a..2ace3becb 100644 --- a/pytest_splunk_addon/standard_lib/addon_parser/transforms_parser.py +++ b/pytest_splunk_addon/standard_lib/addon_parser/transforms_parser.py @@ -13,15 +13,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# -*- coding: utf-8 -*- """ Provides transforms.conf parsing mechanism """ +from typing import Dict +from typing import Generator +from typing import Optional import logging import re import os import csv -from urllib.parse import unquote + +import addonfactory_splunk_conf_parser_lib as conf_parser LOGGER = logging.getLogger("pytest-splunk-addon") @@ -34,29 +37,29 @@ class TransformsParser(object): Args: splunk_app_path (str): Path of the Splunk app - app (splunk_appinspect.App): Object of Splunk app """ - def __init__(self, splunk_app_path, app): - self.app = app + def __init__(self, splunk_app_path: str): + self._conf_parser = conf_parser.TABConfigParser() self.splunk_app_path = splunk_app_path self._transforms = None @property - def transforms(self): - try: - if not self._transforms: - LOGGER.info("Parsing transforms.conf") - self._transforms = self.app.transforms_conf() + def transforms(self) -> Optional[Dict]: + if self._transforms is not None: return self._transforms - except OSError: - LOGGER.warning("transforms.conf not found.") - return None + transforms_conf_path = os.path.join( + self.splunk_app_path, "default", "transforms.conf" + ) + LOGGER.info("Parsing transforms.conf") + self._conf_parser.read(transforms_conf_path) + self._transforms = self._conf_parser.item_dict() + return self._transforms if self._transforms else None @convert_to_fields - def get_transform_fields(self, transforms_stanza): + def get_transform_fields(self, transforms_stanza: str) -> Optional[Generator]: """ - Parse the tranforms.conf of the App & yield fields of + Parse the transforms.conf of the App & yield fields of a specific stanza. Supported extractions from transforms.conf are @@ -83,45 +86,36 @@ def get_transform_fields(self, transforms_stanza): try: if not self.transforms: return - transforms_section = self.transforms.sects[transforms_stanza] - if "SOURCE_KEY" in transforms_section.options: - LOGGER.info("Parsing source_key of %s", transforms_stanza) - yield transforms_section.options["SOURCE_KEY"].value - - if "REGEX" in transforms_section.options: - LOGGER.info("Parsing REGEX of %s", transforms_stanza) + transforms_values = self.transforms[transforms_stanza] + if "SOURCE_KEY" in transforms_values: + LOGGER.info(f"Parsing source_key of {transforms_stanza}") + yield transforms_values["SOURCE_KEY"] + if "REGEX" in transforms_values: + LOGGER.info(f"Parsing REGEX of {transforms_stanza}") regex = r"\(\?P?[<'](?!_KEY|_VAL)([A-Za-z0-9_]+)[>']" - match_fields = re.findall( - regex, transforms_section.options["REGEX"].value - ) + match_fields = re.findall(regex, transforms_values["REGEX"]) for each_field in match_fields: if not each_field.startswith(("_KEY_", "_VAL_")): yield each_field.strip() - - if "FIELDS" in transforms_section.options: - LOGGER.info("Parsing FIELDS of %s", transforms_stanza) - for each_field in transforms_section.options["FIELDS"].value.split(","): + if "FIELDS" in transforms_values: + LOGGER.info(f"Parsing FIELDS of {transforms_stanza}") + fields_values = transforms_values["FIELDS"] + for each_field in fields_values.split(","): yield each_field.strip() - - if "FORMAT" in transforms_section.options: - LOGGER.info("Parsing FORMAT of %s", transforms_stanza) + if "FORMAT" in transforms_values: + LOGGER.info(f"Parsing FORMAT of {transforms_stanza}") regex = r"(\S*)::" - match_fields = re.findall( - regex, transforms_section.options["FORMAT"].value - ) + match_fields = re.findall(regex, transforms_values["FORMAT"]) for each_field in match_fields: - if not "$" in each_field: + if "$" not in each_field: yield each_field.strip() - except KeyError: LOGGER.error( - "The stanza {} does not exists in transforms.conf.".format( - transforms_stanza - ), + f"The stanza {transforms_stanza} does not exists in transforms.conf." ) - def get_lookup_csv_fields(self, lookup_stanza): + def get_lookup_csv_fields(self, lookup_stanza: str) -> Optional[Generator]: """ Parse the fields from a lookup file for a specific lookup_stanza @@ -133,15 +127,15 @@ def get_lookup_csv_fields(self, lookup_stanza): """ if not self.transforms: return - if lookup_stanza in self.transforms.sects: - stanza = self.transforms.sects[lookup_stanza] - if "filename" in stanza.options: - lookup_file = stanza.options["filename"].value + if lookup_stanza in self.transforms.keys(): + stanza_values = self.transforms[lookup_stanza] + if "filename" in stanza_values: + lookup_file = stanza_values["filename"] try: location = os.path.join( self.splunk_app_path, "lookups", lookup_file ) - with open(location, "r") as csv_file: + with open(location) as csv_file: reader = csv.DictReader(csv_file) fieldnames = reader.fieldnames for items in fieldnames: diff --git a/pytest_splunk_addon/standard_lib/index_tests/test_generator.py b/pytest_splunk_addon/standard_lib/index_tests/test_generator.py index 498d26bae..456193550 100644 --- a/pytest_splunk_addon/standard_lib/index_tests/test_generator.py +++ b/pytest_splunk_addon/standard_lib/index_tests/test_generator.py @@ -49,11 +49,12 @@ def generate_tests(self, store_events, app_path, config_path, test_type): store_sample = sample_generator.get_samples(store_events) tokenized_events = store_sample.get("tokenized_events") if not store_sample.get("conf_name") == "psa-data-gen": - LOGGER.warning( - "Index Time tests cannot be executed using eventgen.conf, pytest-splunk-addon-data.conf is required." + msg = ( + "Index time tests cannot be executed without " + "pytest-splunk-addon-data.conf" ) - return " Index Time tests cannot be executed using eventgen.conf,\ - pytest-splunk-addon-data.conf is required." + LOGGER.warning(msg) + return msg if test_type == "line_breaker": LOGGER.info("Generating line breaker test") diff --git a/pytest_splunk_addon/standard_lib/sample_generation/__init__.py b/pytest_splunk_addon/standard_lib/sample_generation/__init__.py index 76bd62a9a..ef65f346a 100644 --- a/pytest_splunk_addon/standard_lib/sample_generation/__init__.py +++ b/pytest_splunk_addon/standard_lib/sample_generation/__init__.py @@ -16,7 +16,7 @@ from .sample_event import SampleEvent from .rule import Rule, raise_warning from .sample_stanza import SampleStanza -from .eventgen_parser import EventgenParser +from .pytest_splunk_addon_data_parser import PytestSplunkAddonDataParser from .sample_event import SampleEvent from .sample_generator import SampleGenerator from .sample_xdist_generator import SampleXdistGenerator diff --git a/pytest_splunk_addon/standard_lib/sample_generation/eventgen_parser.py b/pytest_splunk_addon/standard_lib/sample_generation/eventgen_parser.py deleted file mode 100644 index 3c629d545..000000000 --- a/pytest_splunk_addon/standard_lib/sample_generation/eventgen_parser.py +++ /dev/null @@ -1,192 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import re -import logging -from splunk_appinspect import App -from .rule import Rule, raise_warning -from . import SampleStanza - -LOGGER = logging.getLogger("pytest-splunk-addon") -import warnings - - -class EventgenParser: - """ - This class represents the entire eventgen.conf file and handles parsing mechanism of eventgen and the rules. - - Args: - addon_path (str): Path to the Splunk App - """ - - conf_name = " " - - def __init__(self, addon_path, config_path=None): - self._app = App(addon_path, python_analyzer_enable=False) - self.config_path = config_path - self._eventgen = None - self.addon_path = addon_path - self.match_stanzas = set() - - @property - def path_to_samples(self): - if os.path.exists(os.path.join(self.config_path, "samples")): - LOGGER.info( - "Samples path is: {}".format(os.path.join(self.config_path, "samples")) - ) - return os.path.join(self.config_path, "samples") - elif os.path.exists( - os.path.join( - os.path.abspath(os.path.join(self.config_path, os.pardir)), "samples" - ) - ): - LOGGER.info( - "Samples path is: {}".format( - os.path.join( - os.path.abspath(os.path.join(self.config_path, os.pardir)), - "samples", - ) - ) - ) - return os.path.join( - os.path.abspath(os.path.join(self.config_path, os.pardir)), "samples" - ) - else: - LOGGER.info( - "Samples path is: {}".format(os.path.join(self.addon_path, "samples")) - ) - return os.path.join(self.addon_path, "samples") - - @property - def eventgen(self): - try: - relative_path = os.path.relpath(self.config_path, self.addon_path) - if os.path.exists( - os.path.join(self.config_path, "pytest-splunk-addon-data.conf") - ): - self._eventgen = self._app.get_config( - "pytest-splunk-addon-data.conf", dir=relative_path - ) - self.conf_name = "psa-data-gen" - path = self._app.get_filename( - relative_path, "pytest-splunk-addon-data.conf" - ) - - elif os.path.exists(os.path.join(self.config_path, "eventgen.conf")): - - self._eventgen = self._app.get_config( - "eventgen.conf", dir=relative_path - ) - self.conf_name = "eventgen" - path = self._app.get_filename(relative_path, "eventgen.conf") - - else: - self._eventgen = self._app.get_config("eventgen.conf") - self.conf_name = "eventgen" - path = self._app.get_filename("default", "eventgen.conf") - LOGGER.info( - "Using Eventgen path: {e}\nUsing Conf file name: {c}".format( - e=path, c=self.conf_name - ) - ) - return self._eventgen - - except OSError: - LOGGER.warning("pytest-splunk-addon-data.conf/eventgen.conf not Found") - raise FileNotFoundError( - "pytest-splunk-addon-data.conf/eventgen.conf not Found" - ) - - def get_sample_stanzas(self): - """ - Converts a stanza in eventgen.conf to an object of SampleStanza. - - Yields: - SampleStanza Object - """ - eventgen_dict = self.get_eventgen_stanzas() - self.check_samples() - for sample_name, stanza_params in sorted(eventgen_dict.items()): - sample_path = os.path.join(self.path_to_samples, sample_name) - yield SampleStanza( - sample_path, - stanza_params, - ) - - def get_eventgen_stanzas(self): - """ - Parses the eventgen.conf file and converts it into a dictionary. - - Format:: - - { - "sample_file_name": # Not Stanza name - { - "input_type": "str", - "tokens": - { - 1: - { - token: #One# - replacementType: random - replacement: static - } - } - } - } - - Return: - Dictionary representing eventgen.conf in the above format. - """ - eventgen_dict = {} - if os.path.exists(self.path_to_samples): - for sample_file in os.listdir(self.path_to_samples): - for stanza in sorted(self.eventgen.sects): - stanza_match_obj = re.search(stanza, sample_file) - if stanza_match_obj and stanza_match_obj.group(0) == sample_file: - self.match_stanzas.add(stanza) - eventgen_sections = self.eventgen.sects[stanza] - eventgen_dict.setdefault((sample_file), {"tokens": {}}) - for stanza_param in eventgen_sections.options: - eventgen_property = eventgen_sections.options[stanza_param] - if eventgen_property.name.startswith("token"): - _, token_id, token_param = eventgen_property.name.split( - "." - ) - token_key = "{}_{}".format(stanza, token_id) - if ( - not token_key - in eventgen_dict[sample_file]["tokens"].keys() - ): - eventgen_dict[sample_file]["tokens"][token_key] = {} - eventgen_dict[sample_file]["tokens"][token_key][ - token_param - ] = eventgen_property.value - else: - eventgen_dict[sample_file][ - eventgen_property.name - ] = eventgen_property.value - return eventgen_dict - - def check_samples(self): - """ - Gives a user warning when sample file is not found for the stanza peresent in the configuration file. - """ - if os.path.exists(self.path_to_samples): - for stanza in self.eventgen.sects: - if stanza not in self.match_stanzas: - raise_warning("No sample file found for stanza : {}".format(stanza)) - LOGGER.info("Sample file found for stanza : {}".format(stanza)) diff --git a/pytest_splunk_addon/standard_lib/sample_generation/pytest_splunk_addon_data_parser.py b/pytest_splunk_addon/standard_lib/sample_generation/pytest_splunk_addon_data_parser.py new file mode 100644 index 000000000..348372b78 --- /dev/null +++ b/pytest_splunk_addon/standard_lib/sample_generation/pytest_splunk_addon_data_parser.py @@ -0,0 +1,160 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import re +import logging +from .rule import raise_warning +from . import SampleStanza + +import addonfactory_splunk_conf_parser_lib as conf_parser + +LOGGER = logging.getLogger("pytest-splunk-addon") + + +PSA_DATA_CONFIG_FILE = "pytest-splunk-addon-data.conf" + + +class PytestSplunkAddonDataParser: + """ + This class parses pytest-splunk-addon-data.conf file. + + Args: + addon_path: Path to the Splunk App + """ + + conf_name = " " + + def __init__(self, addon_path: str, config_path: str): + self._conf_parser = conf_parser.TABConfigParser() + self.config_path = config_path + self._psa_data = None + self.addon_path = addon_path + self.match_stanzas = set() + + def _path_to_samples(self): + if os.path.exists(os.path.join(self.config_path, "samples")): + LOGGER.info( + "Samples path is: {}".format(os.path.join(self.config_path, "samples")) + ) + return os.path.join(self.config_path, "samples") + elif os.path.exists( + os.path.join( + os.path.abspath(os.path.join(self.config_path, os.pardir)), "samples" + ) + ): + LOGGER.info( + "Samples path is: {}".format( + os.path.join( + os.path.abspath(os.path.join(self.config_path, os.pardir)), + "samples", + ) + ) + ) + return os.path.join( + os.path.abspath(os.path.join(self.config_path, os.pardir)), "samples" + ) + else: + LOGGER.info( + "Samples path is: {}".format(os.path.join(self.addon_path, "samples")) + ) + return os.path.join(self.addon_path, "samples") + + @property + def psa_data(self): + psa_data_path = os.path.join(self.config_path, PSA_DATA_CONFIG_FILE) + if os.path.exists(psa_data_path): + self._conf_parser.read(psa_data_path) + self.conf_name = "psa-data-gen" + self._psa_data = self._conf_parser.item_dict() + return self._psa_data + else: + LOGGER.warning(f"{PSA_DATA_CONFIG_FILE} not found") + raise FileNotFoundError(f"{PSA_DATA_CONFIG_FILE} not found") + + def get_sample_stanzas(self): + """ + Converts a stanza in pytest-splunk-addon-data.conf to an object of SampleStanza. + + Returns: + List of SampleStanza objects. + """ + _psa_data = self._get_psa_data_stanzas() + self._check_samples() + results = [] + for sample_name, stanza_params in sorted(_psa_data.items()): + sample_path = os.path.join(self._path_to_samples(), sample_name) + results.append(SampleStanza(sample_path, stanza_params)) + return results + + def _get_psa_data_stanzas(self): + """ + Parses the pytest-splunk-addon-data.conf file and converts it into a dictionary. + + Format:: + + { + "sample_file_name": # Not Stanza name + { + "input_type": "str", + "tokens": + { + 1: + { + token: #One# + replacementType: random + replacement: static + } + } + } + } + + Return: + Dictionary representing pytest-splunk-addon-data.conf in the above format. + """ + psa_data_dict = {} + if os.path.exists(self._path_to_samples()): + for sample_file in os.listdir(self._path_to_samples()): + for stanza, fields in sorted(self.psa_data.items()): + stanza_match_obj = re.search(stanza, sample_file) + if stanza_match_obj and stanza_match_obj.group(0) == sample_file: + self.match_stanzas.add(stanza) + psa_data_dict.setdefault(sample_file, {"tokens": {}}) + for key, value in fields.items(): + if key.startswith("token"): + _, token_id, token_param = key.split(".") + token_key = f"{stanza}_{token_id}" + if ( + not token_key + in psa_data_dict[sample_file]["tokens"].keys() + ): + psa_data_dict[sample_file]["tokens"][token_key] = {} + psa_data_dict[sample_file]["tokens"][token_key][ + token_param + ] = value + else: + psa_data_dict[sample_file][key] = value + return psa_data_dict + + def _check_samples(self): + """ + Gives a user warning when sample file is not found for the stanza + present in the configuration file. + """ + if os.path.exists(self._path_to_samples()): + for stanza in self.psa_data.keys(): + if stanza not in self.match_stanzas: + raise_warning(f"No sample file found for stanza : {stanza}") + LOGGER.info(f"Sample file found for stanza : {stanza}") diff --git a/pytest_splunk_addon/standard_lib/sample_generation/rule.py b/pytest_splunk_addon/standard_lib/sample_generation/rule.py index 823627f71..d01532e28 100644 --- a/pytest_splunk_addon/standard_lib/sample_generation/rule.py +++ b/pytest_splunk_addon/standard_lib/sample_generation/rule.py @@ -16,7 +16,6 @@ """ Provides Rules for all possible replacements for tokens. """ -import csv import re import string import uuid @@ -62,7 +61,7 @@ class Rule: Args: token (dict): Dictionary containing token and its data - eventgen_params (dict): Eventgen stanzas dictionary + psa_data_params (dict): PSA data stanzas dictionary sample_path (str): Path to the samples directory """ @@ -70,24 +69,24 @@ class Rule: src_header = ["host", "ipv4", "ipv6", "fqdn"] token_value = namedtuple("token_value", ["key", "value"]) - def __init__(self, token, eventgen_params=None, sample_path=None): + def __init__(self, token, psa_data_params=None, sample_path=None): self.token = token["token"] self.replacement = token["replacement"] self.replacement_type = token["replacementType"] self.field = token.get("field", self.token.strip("#")) - self.eventgen_params = eventgen_params + self.psa_data_params = psa_data_params self.sample_path = sample_path self.fake = Faker() self.file_count = 0 @classmethod - def parse_rule(cls, token, eventgen_params, sample_path): + def parse_rule(cls, token, psa_data_params, sample_path): """ Returns appropriate Rule object as per replacement type of token. Args: token (dict): Dictionary containing token and its data - eventgen_params (dict): Eventgen stanzas dictionary + psa_data_params (dict): PSA data stanzas dictionary sample_path (str): Path to the samples directory """ rule_book = { @@ -135,7 +134,7 @@ def parse_rule(cls, token, eventgen_params, sample_path): if replacement_type == "static": return StaticRule(token) elif replacement_type == "timestamp": - return TimeRule(token, eventgen_params) + return TimeRule(token, psa_data_params) elif replacement_type == "random" or replacement_type == "all": for each_rule in rule_book: if replacement.lower().startswith(each_rule): @@ -619,9 +618,9 @@ def replace(self, sample, token_count): sample (SampleEvent): Instance containing event info token_count (int): No. of token in sample event where rule is applicable """ - earliest = self.eventgen_params.get("earliest") - latest = self.eventgen_params.get("latest") - timezone_time = self.eventgen_params.get("timezone", "0000") + earliest = self.psa_data_params.get("earliest") + latest = self.psa_data_params.get("latest") + timezone_time = self.psa_data_params.get("timezone", "0000") random_time = datetime.utcnow() time_parser = time_parse() time_delta = datetime.now().timestamp() - datetime.utcnow().timestamp() diff --git a/pytest_splunk_addon/standard_lib/sample_generation/sample_generator.py b/pytest_splunk_addon/standard_lib/sample_generation/sample_generator.py index 54c42783f..72b724aa1 100644 --- a/pytest_splunk_addon/standard_lib/sample_generation/sample_generator.py +++ b/pytest_splunk_addon/standard_lib/sample_generation/sample_generator.py @@ -13,10 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import time -from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor +from concurrent.futures import ThreadPoolExecutor -from . import EventgenParser +from . import PytestSplunkAddonDataParser from . import SampleStanza from itertools import cycle @@ -47,14 +46,13 @@ def get_samples(self): Generate SampleEvent object """ if not SampleGenerator.sample_stanzas: - eventgen_parser = EventgenParser( + psa_data_parser = PytestSplunkAddonDataParser( self.addon_path, config_path=self.config_path ) - sample_stanzas = list(eventgen_parser.get_sample_stanzas()) - SampleGenerator.conf_name = eventgen_parser.conf_name + sample_stanzas = psa_data_parser.get_sample_stanzas() + SampleGenerator.conf_name = psa_data_parser.conf_name with ThreadPoolExecutor(min(20, max(len(sample_stanzas), 1))) as t: t.map(SampleStanza.get_raw_events, sample_stanzas) - # with ProcessPoolExecutor(self.process_count) as p: _ = list( map( SampleStanza.tokenize, diff --git a/pytest_splunk_addon/standard_lib/sample_generation/sample_stanza.py b/pytest_splunk_addon/standard_lib/sample_generation/sample_stanza.py index 88570ca85..a3cf663fc 100644 --- a/pytest_splunk_addon/standard_lib/sample_generation/sample_stanza.py +++ b/pytest_splunk_addon/standard_lib/sample_generation/sample_stanza.py @@ -29,25 +29,25 @@ class SampleStanza(object): """ - This class represents a stanza of the eventgen.conf. + This class represents a stanza of the pytest-splunk-addon-data.conf. It contains all the parameters for the stanza such as: * Sample Name * Tokens * Sample file's raw data - * Tokenised events + * Tokenized events * Sample ingestion type Args: sample_path (str): Path to the sample file - eventgen_params (dict): Dictionary representing eventgen.conf + psa_data_params (dict): Dictionary representing pytest-splunk-addon-data.conf """ - def __init__(self, sample_path, eventgen_params): + def __init__(self, sample_path, psa_data_params): self.sample_path = sample_path self.sample_name = os.path.basename(sample_path) - self.metadata = self._parse_meta(eventgen_params) - self.sample_rules = list(self._parse_rules(eventgen_params, self.sample_path)) + self.metadata = self._parse_meta(psa_data_params) + self.sample_rules = list(self._parse_rules(psa_data_params, self.sample_path)) self.input_type = self.metadata.get("input_type", "default") self.host_count = 0 @@ -73,7 +73,7 @@ def tokenize(self, conf_name): Tokenizes the raw events by replacing all the tokens in it. Args: - conf_name (str): Name of the conf file, "eventgen" or "psa-data-gen" + conf_name (str): Name of the conf file, "psa-data-gen" """ if conf_name == "eventgen": required_event_count = self.metadata.get("count") @@ -116,19 +116,19 @@ def tokenize(self, conf_name): self.tokenized_events = bulk_event - def _parse_rules(self, eventgen_params, sample_path): + def _parse_rules(self, psa_data_params, sample_path): """ Yield the rule instance based token replacement type. Args: - eventgen_params (dict): Eventgen stanzas dictionary + psa_data_params (dict): PSA data stanzas dictionary sample_path (str): Path to the sample file """ token_list = self._sort_tokens_by_replacement_type_all( - eventgen_params["tokens"] + psa_data_params["tokens"] ) for each_token, token_value in token_list: - applied_rule = Rule.parse_rule(token_value, eventgen_params, sample_path) + applied_rule = Rule.parse_rule(token_value, psa_data_params, sample_path) if not applied_rule: raise_warning( "Unidentified Rule: '{}' for token '{}'".format( @@ -138,15 +138,15 @@ def _parse_rules(self, eventgen_params, sample_path): else: yield applied_rule - def _parse_meta(self, eventgen_params): + def _parse_meta(self, psa_data_params): """ - Return the metadata from eventgen stanzas. + Return the metadata from PSA data stanzas. Args: - eventgen_params (dict): Eventgen stanzas dictionary + psa_data_params (dict): PSA data stanzas dictionary """ metadata = { - key: eventgen_params[key] for key in eventgen_params if key != "tokens" + key: psa_data_params[key] for key in psa_data_params if key != "tokens" } metadata.update(host=self.sample_name) if ( @@ -192,7 +192,7 @@ def _parse_meta(self, eventgen_params): ) ) metadata.update(timezone="0000") - eventgen_params.update(timezone="0000") + psa_data_params.update(timezone="0000") if metadata.get("timestamp_type") not in ["event", "plugin", None]: raise_warning( "Invalid value for timestamp_type: '{}' using timestamp_type = plugin.".format( diff --git a/pytest_splunk_addon/standard_lib/utilities/create_new_eventgen.py b/pytest_splunk_addon/standard_lib/utilities/create_new_eventgen.py deleted file mode 100644 index 6a5bc8683..000000000 --- a/pytest_splunk_addon/standard_lib/utilities/create_new_eventgen.py +++ /dev/null @@ -1,310 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import re -import os - -from splunk_appinspect import App -from .mapping import FIELD_MAPPING, FILE_MAPPING -import logging - -LOGGER = logging.getLogger("pytest-splunk-addon") - - -class UpdateEventgen: - """Update eventgen file""" - - def __init__(self, addon_path): - self._app = App(addon_path, python_analyzer_enable=False) - self._eventgen = None - self.path_to_samples = os.path.join(addon_path, "samples") - - @property - def eventgen(self): - try: - if not self._eventgen: - self._eventgen = self._app.get_config("eventgen.conf") - return self._eventgen - except OSError: - LOGGER.error("Eventgen.conf not found") - raise Exception("Eventgen.conf not found") - - def get_eventgen_stanzas(self): - """ - To get eventgen stanza and create a dictionary. - If stanza contains regex for multiple sample files, then it creates stanza for each sample file. - - Return: - eventgen_dict (dict): - { - "stanza_name": - { - "other metadata": "source, sourcetype, etc." - "sample_count" : int - "tokens": - { - 0: { - token: #One# - replacementType: random - replacement: static - } - } - } - } - """ - eventgen_dict = {} - for stanza in self.eventgen.sects: - eventgen_sections = self.eventgen.sects[stanza] - eventgen_dict.setdefault( - (stanza), - { - "tokens": {}, - }, - ) - - try: - events_in_file = len( - open(os.path.join(self.path_to_samples, stanza)).readlines() - ) - eventgen_dict[stanza]["sample_count"] = events_in_file - - except: - pass - - for stanza_param in eventgen_sections.options: - eventgen_property = eventgen_sections.options[stanza_param] - if eventgen_property.name.startswith("token"): - _, token_id, token_param = eventgen_property.name.split(".") - if not token_id in eventgen_dict[stanza]["tokens"].keys(): - eventgen_dict[stanza]["tokens"][token_id] = {} - eventgen_dict[stanza]["tokens"][token_id][ - token_param - ] = eventgen_property.value - - else: - eventgen_dict[stanza][ - eventgen_property.name - ] = eventgen_property.value - - for sample_file in os.listdir(self.path_to_samples): - - if re.search(stanza, sample_file): - - events_in_file = len( - open( - os.path.join(self.path_to_samples, sample_file) - ).readlines() - ) - if sample_file not in eventgen_dict.keys(): - eventgen_dict.setdefault((sample_file), {}) - eventgen_dict[sample_file]["sample_count"] = events_in_file - eventgen_dict[sample_file]["add_comment"] = True - eventgen_dict[sample_file]["tokens"] = {} - return eventgen_dict - - # update the stanzas in dict - def update_eventgen_stanzas(self, eventgen_dict): - """ - Updates the eventgen_dict by adding new metadata - New Metadata: ["input_type", "host_type", "sourcetype_to_search", "timestamp_type"] - And update the tokens if possible based on the new Data-Generator rules. - Input: - eventgen_dict (dict) : eventgen dictionary in following format. - - Return: - eventgen_dict (dict): Updated Eventgen stanzas dictionary - """ - - metadata = ["input_type", "host_type", "sourcetype_to_search", "timestamp_type"] - review_comments = { - "metadata": "#REVIEW : Update metadata as per addon's requirement", - "replacement": "# REVIEW : Possible value in list : ", - "field": "# REVIEW : Check if the field is extracted from the events, else remove this field parameter", - "mapping": "# REVIEW : Please check if it can be replace with %s rule", - "sample_count": "# REVIEW : Please check for the events per stanza and update sample_count accordingly", - } - - for stanza_name, stanza_data in eventgen_dict.items(): - # adding metadata - for data in metadata: - eventgen_dict[stanza_name][data] = ( - f"<<{data}>> " f"{review_comments['metadata']}" - ) - - if eventgen_dict[stanza_name].get("index"): - eventgen_dict[stanza_name]["index"] = ( - f"{eventgen_dict[stanza_name]['index']} " - f"{review_comments['metadata']}" - ) - - eventgen_dict[stanza_name]["source"] = eventgen_dict[stanza_name].get( - "source", - f"pytest-splunk-addon:{eventgen_dict[stanza_name]['input_type']}", - ) - - for _, token_data in stanza_data.get("tokens", {}).items(): - token_name = token_data.get("token").strip("#()").lower() - for _, new_token_values in FIELD_MAPPING.items(): - - if token_name in new_token_values.get("token"): - new_replacement_type = new_token_values.get("replacementType") - new_replacement = new_token_values.get("replacement") - - token_data["replacementType"] = new_replacement_type - token_data["replacement"] = new_replacement - if new_token_values.get("possible_replacement"): - token_data["replacement"] = ( - f"{new_replacement} " - f"{review_comments['replacement']} " - f"{new_token_values.get('possible_replacement')}" - ) - - if new_token_values.get("field"): - token_data["field"] = ( - f"{new_token_values.get('field')} " - f"{review_comments['field']}" - ) - - if token_data.get("replacementType").lower() == "timestamp": - token_data["field"] = f"_time {review_comments['field']}" - - elif token_data.get("replacementType").lower() in ["file", "mvfile"]: - file_name = ( - token_data.get("replacement").split("/")[-1].split(":")[0] - ) - token_data["replacement"] = f"file[{token_data.get('replacement')}]" - token_data["replacementType"] = "random" - - for key_fields, mapped_files in FILE_MAPPING.items(): - replacement_type = FIELD_MAPPING.get(key_fields).get( - "replacementType" - ) - replacement = FIELD_MAPPING.get(key_fields).get("replacement") - replacement_type_values = FIELD_MAPPING.get(key_fields).get( - "possible_replacement" - ) - field_value = FIELD_MAPPING.get(key_fields).get("field") - - if file_name in mapped_files: - if "SA-Eventgen" in token_data["replacement"]: - token_data["replacementType"] = ( - f"{replacement_type} " - f"{review_comments['mapping']%key_fields}" - ) - - token_data["replacement"] = ( - f"{replacement} " - f"{review_comments['mapping']%key_fields}" - ) - - if replacement_type_values: - token_data["replacement"] = ( - f"{token_data['replacement']} " - f"{review_comments['replacement']} " - f"{replacement_type_values}" - ) - - if field_value: - token_data["field"] = ( - f"{field_value} " - f"{review_comments['mapping']%key_fields}" - ) - - # for assigning sample_count at the end of metadata - if eventgen_dict.get(stanza_name).get("sample_count"): - event_count = eventgen_dict[stanza_name].pop("sample_count") - eventgen_dict[stanza_name]["sample_count"] = ( - f"{event_count}" f" {review_comments['sample_count']}" - ) - - # for assigning tokens at the end of metadata - if eventgen_dict.get(stanza_name).get("tokens"): - token_dict = eventgen_dict[stanza_name].pop("tokens") - eventgen_dict[stanza_name]["tokens"] = token_dict - - return eventgen_dict - - def create_new_eventgen(self, updated_eventgen_dict, new_conf_path): - """ - Writes the new values in a new conf file - params: - updated_eventgen_dict (dict) : Containing all the new values for eventgen.conf - new_conf_path : file path for creating new conf file - """ - with open(new_conf_path, "w") as new_eventgen: - LOGGER.info("created new file {}".format(new_conf_path)) - # writing file metadata in new eventgen file - comment = "## Stanza gets metadata from main stanza" - for file_metadata in self.eventgen.headers: - new_eventgen.write(file_metadata + "\n") - - for stanza_name, stanza_data in updated_eventgen_dict.items(): - new_eventgen.write(f"\n[{stanza_name}]\n") - for metadata_name, metadata_value in stanza_data.items(): - - if metadata_name == "add_comment": - new_eventgen.write(f"{comment}\n") - - elif metadata_name != "tokens": - new_eventgen.write(f"{metadata_name} = {metadata_value}\n") - else: - new_eventgen.write("\n") - for tokens_id, tokens_value in stanza_data.get( - "tokens" - ).items(): - new_eventgen.write( - f"token.{tokens_id}.token = {tokens_value['token']}\n" - ) - new_eventgen.write( - f"token.{tokens_id}.replacementType = {tokens_value['replacementType']}\n" - ) - new_eventgen.write( - f"token.{tokens_id}.replacement = {tokens_value['replacement']}\n" - ) - if tokens_value.get("field"): - new_eventgen.write( - f'token.{tokens_id}.field = {tokens_value.get("field")}\n' - ) - new_eventgen.write("\n") - - -def main(): - ap = argparse.ArgumentParser() - ap.add_argument( - "addon_path", - help="Path to the addon for which eventgen.conf has to be converted. Must contains samples folder", - metavar="addon-path", - ) - ap.add_argument( - "new_conf_path", - help="Path to Save the new conf file", - metavar="new-conf-path", - nargs="?", - default="pytest-splunk-addon-data.conf", - ) - args = ap.parse_args() - - addon_path = args.addon_path - new_conf_path = args.new_conf_path - - update_eventgen = UpdateEventgen(addon_path) - eventgen_dict = update_eventgen.get_eventgen_stanzas() - updated_eventgen_dict = update_eventgen.update_eventgen_stanzas(eventgen_dict) - update_eventgen.create_new_eventgen(updated_eventgen_dict, new_conf_path) - - -if __name__ == "__main__": - main() diff --git a/pytest_splunk_addon/standard_lib/utilities/mapping.py b/pytest_splunk_addon/standard_lib/utilities/mapping.py deleted file mode 100644 index 1b3633aff..000000000 --- a/pytest_splunk_addon/standard_lib/utilities/mapping.py +++ /dev/null @@ -1,243 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -Replaced value of tokens -""" -FIELD_MAPPING = { - "src": { - "token": [ - "src", - "srcaddr", - "src_addr", - "src-addr", - "srcip", - "src_ip", - "src-ip", - "srcaddress", - "src_address", - "src-address", - "source", - "sourceaddr", - "source_addr", - "source-addr", - "sourceip", - "source_ip", - "source-ip", - "sourceaddress", - "source_address", - "source-address", - "srcfqdn", - "src_fqdn", - "src-fqdn", - "sourcefqdn", - "source_fqdn", - "source-fqdn", - ], - "replacementType": "random", - "replacement": "src[]", - "field": "src", - "possible_replacement": ["ipv4", "ipv6", "host", "fqdn"], - }, - "dest": { - "token": [ - "dest", - "destaddr", - "dest_addr", - "dest-addr", - "destip", - "dest_ip", - "dest-ip", - "destaddress", - "dest_address", - "dest-address", - "destination", - "destinationaddr", - "destination_addr", - "destination-addr", - "destinationip", - "destination_ip", - "destination-ip", - "destinationaddress", - "destination_address", - "destination-address", - "destfqdn", - "dest_fqdn", - "dest-fqdn", - "destinationfqdn", - "destination_fqdn", - "destination-fqdn", - ], - "replacementType": "random", - "replacement": "dest[]", - "field": "dest", - "possible_replacement": ["ipv4", "ipv6", "host", "fqdn"], - }, - "user": { - "token": ["user", "username", "usr", "user_name", "user-name", "users"], - "replacementType": "random", - "replacement": "user[]", - "field": "user", - "possible_replacement": ["name", "email", "domain_user", "distinquised_name"], - }, - "src_port": { - "token": [ - "src_port", - "src-port", - "source_port", - "source-port", - "sourceport", - "srcport", - ], - "replacementType": "random", - "replacement": "src_port", - "field": "src_port", - }, - "dest_port": { - "token": [ - "dest_port", - "dest-port", - "destination_port", - "destination-port", - "destinationport", - "destport", - ], - "replacementType": "random", - "replacement": "dest_port", - "field": "dest_port", - }, - "dvc": { - "token": ["dvc"], - "replacementType": "random", - "replacement": "dvc[]", - "field": "dvc", - "possible_replacement": ["ipv4", "ipv6", "host", "fqdn"], - }, - "url": { - "token": ["url", "uri"], - "replacementType": "random", - "replacement": "url[]", - "field": "url", - "possible_replacement": ["ip_host", "fqdn_host", "path", "query", "protocol"], - }, - "guid": {"token": ["guid"], "replacementType": "random", "replacement": "guid"}, - "host": { - "token": [ - "host", - "hostaddr", - "host_addr", - "host-addr", - "hostaddress", - "host_address", - "host-address", - "httphost", - "http_host", - "http-host", - "hostname", - "host_name", - "host-name", - ], - "replacementType": "random", - "replacement": "host[] # REVIEW : ", - "field": "host", - "possible_replacement": ["host", "ipv4", "ipv6", "fqdn"], - }, - "ipv4": { - "token": [ - "ip", - "ipv4", - "ipaddr", - "ip_addr", - "ip-addr", - "ipaddress", - "ip_address", - "ip-address", - ], - "replacementType": "random", - "replacement": "ipv4", - }, - "ipv6": {"token": ["ipv6"], "replacementType": "random", "replacement": "ipv6"}, - "hex": { - "token": ["hex", "puid"], - "replacementType": "random", - "replacement": "hex(20)", - }, - "email": { - "token": [ - "email", - "e-mail", - "e_mail", - "mail", - "mailid", - "mail_id", - "mail-id", - "emailid", - "email_id", - "email-id", - "emailaddr", - "email_addr", - "email-addr", - "emailaddress", - "email_address", - "email-address", - ], - "replacementType": "random", - "replacement": "email", - }, - "mac": { - "token": [ - "mac", - "macaddr", - "mac_addr", - "mac-addr", - "macaddress", - "mac_address", - "mac-address", - "macname", - "mac_name", - "mac-name", - ], - "replacementType": "random", - "replacement": "mac", - }, -} - - -FILE_MAPPING = { - "ipv4": [ - "anomalous.ip_address.sample", - "ip_address.sample", - "webhosts.sample", - "ip.sample", - "ipaddress.sample", - ], - "mac": [ - "anomalous.mac_address.sample", - "mac_address.sample", - "mac.sample", - "remote_mac.sample", - ], - "host": [ - "anomalous.hostname.sample", - "hostname.sample", - "linux.host.sample", - "computer_name.sample", - "host_name.sample", - ], - "user": ["userName.sample", "mac_user.sample", "user_name.sample"], - "dvc": ["dvc.sample", "dvc_ids.sample"], - "url": ["uri.sample", "url.sample"], - "email": ["email_address.sample"], -} diff --git a/pytest_splunk_addon/tools/cim_field_report.py b/pytest_splunk_addon/tools/cim_field_report.py index 5a3854a2a..656b699a8 100644 --- a/pytest_splunk_addon/tools/cim_field_report.py +++ b/pytest_splunk_addon/tools/cim_field_report.py @@ -18,7 +18,6 @@ import logging import json import argparse -import re import time import traceback @@ -498,7 +497,8 @@ def get_addon_eventtypes(addon_path): parser = AddonParser(addon_path) eventtypes = { - eventtype: [] for eventtype in parser.eventtype_parser.eventtypes.sects + eventtype["stanza"]: [] + for eventtype in parser.eventtype_parser.get_eventtypes() } for item in parser.tags_parser.get_tags(): diff --git a/tests/unit/tests_standard_lib/test_addon_parser/conftest.py b/tests/unit/tests_standard_lib/test_addon_parser/conftest.py deleted file mode 100644 index 4a5b046e2..000000000 --- a/tests/unit/tests_standard_lib/test_addon_parser/conftest.py +++ /dev/null @@ -1,22 +0,0 @@ -import pytest -from unittest.mock import Mock - - -@pytest.fixture -def parser(configuration_file): - def create_parser( - parser_class, func_to_be_mocked, parsed_output, headers=None, props_conf=None - ): - headers = headers if headers else [] - FakeApp = Mock() - attrs = { - "{}.return_value".format(func_to_be_mocked): configuration_file( - headers=headers, sects=parsed_output, errors=[] - ) - } - FakeApp.configure_mock(**attrs) - if props_conf is not None: - FakeApp.props_conf.return_value = props_conf - return parser_class("fake_path", FakeApp) - - return create_parser diff --git a/tests/unit/tests_standard_lib/test_addon_parser/test_eventtype_parser.py b/tests/unit/tests_standard_lib/test_addon_parser/test_eventtype_parser.py index 94f52624b..2b826848c 100644 --- a/tests/unit/tests_standard_lib/test_addon_parser/test_eventtype_parser.py +++ b/tests/unit/tests_standard_lib/test_addon_parser/test_eventtype_parser.py @@ -1,61 +1,33 @@ -import pytest -from unittest.mock import patch, PropertyMock +from unittest.mock import patch, mock_open from pytest_splunk_addon.standard_lib.addon_parser.eventtype_parser import ( EventTypeParser, ) +TEST_EVENTTYPES = """[fiction_is_splunkd] +search = index=_internal sourcetype=splunkd -output_to_build = { - "fiction_is_splunkd": {"search": "index=_internal sourcetype=splunkd"}, - "fiction_for_tags_positive": {"search": "sourcetype=splunkd"}, - "fiction_is_splunkd-%host%": {"search": "index=_internal sourcetype=splunkd"}, -} +[fiction_for_tags_positive] +search = sourcetype=splunkd +[fiction_is_splunkd-%host%] +search = index=_internal sourcetype=splunkd +""" -def test_eventtypes_can_be_parsed_and_extracted(parser_instance): - assert list(parser_instance.eventtypes.sects.keys()) == [ - "fiction_is_splunkd", - "fiction_for_tags_positive", - "fiction_is_splunkd-%host%", - ], "eventypes can not be called or does not have sects attribute" +def test_eventtypes_can_be_parsed_and_returned(): + expected_outputs = [ + {"stanza": "fiction_is_splunkd"}, + {"stanza": "fiction_for_tags_positive"}, + {"stanza": "fiction_is_splunkd-%host%"}, + ] + eventtypes_parser = EventTypeParser("unused_path") + with patch("builtins.open", new_callable=mock_open, read_data=TEST_EVENTTYPES): + output = eventtypes_parser.get_eventtypes() + assert expected_outputs == list(output) -def test_eventtypes_can_be_parsed_and_returned(parsed_output, parser_instance): - expected_outputs = [{"stanza": x} for x in parsed_output.keys()] - for i, event in enumerate(parser_instance.get_eventtypes()): - assert event == expected_outputs[i], "expeceted event {} not found".format( - expected_outputs[i] - ) - -def test_get_eventtypes_calls_app_get_config(parser_instance): - for _ in parser_instance.get_eventtypes(): - pass - parser_instance.app.eventtypes_conf.assert_called_once() - - -def test_no_eventtype_config_file(parser_instance): - parser_instance.app.eventtypes_conf.side_effect = OSError - assert ( - parser_instance.eventtypes is None - ), "eventtypes created when no config file exists" - - -def test_nothing_returned_when_no_tags_config_file(parser): - with patch.object( - EventTypeParser, "eventtypes", new_callable=PropertyMock - ) as eventtypes_mock: - eventtypes_mock.return_value = None - parser_instance = parser(EventTypeParser, "eventtypes_conf", {}) - output = [tag for tag in parser_instance.get_eventtypes() if tag] - assert output == [], "eventtypes returned when no config file exists" - - -@pytest.fixture(scope="module") -def parsed_output(build_parsed_output): - return build_parsed_output(output_to_build) - - -@pytest.fixture() -def parser_instance(parsed_output, parser): - return parser(EventTypeParser, "eventtypes_conf", parsed_output) +def test_no_eventtypes_config_file(): + eventtypes_parser = EventTypeParser("unused_path") + with patch("builtins.open", mock_open()) as mock_file: + mock_file.side_effect = OSError() + assert eventtypes_parser.eventtypes is None diff --git a/tests/unit/tests_standard_lib/test_addon_parser/test_props_parser.py b/tests/unit/tests_standard_lib/test_addon_parser/test_props_parser.py index 6e158c24a..b72044781 100644 --- a/tests/unit/tests_standard_lib/test_addon_parser/test_props_parser.py +++ b/tests/unit/tests_standard_lib/test_addon_parser/test_props_parser.py @@ -1,438 +1,207 @@ -import importlib -import pytest -from collections import namedtuple -from unittest.mock import MagicMock, patch, call - -# helpers variables to make test input/outup easier to change -FIELD = "field" -FIELDS = f"{FIELD}s" -FIELD1 = f"{FIELD}1" -FIELD2 = f"{FIELD}2" -FIELD3 = f"{FIELD}3" -FIELD4 = f"{FIELD}4" -FIELD5 = f"{FIELD}5" -FIELD6 = f"{FIELD}6" -FIELD7 = f"{FIELD}7" -FIELD8 = f"{FIELD}8" -NAME = "Name" -OUTPUTNEW = "OUTPUTNEW" -OUTPUT = "OUTPUT" -INPUT_FIELDS = "input_fields" -OUTPUT_FIELDS = "output_fields" -STANZA = "stanza" -LOOKUP_STANZA = f"lookup_{STANZA}" -STANZA_TYPE = f"{STANZA}_type" -CLASSNAME = "classname" - -PropsProperty = namedtuple("PropsProperty", ["name", "value"]) - - -@pytest.fixture(scope="session") -def field_mock(): - return MagicMock() - - -@pytest.fixture(scope="session") -def transforms_parser(): - return MagicMock - - -@pytest.fixture(scope="session") -def pp(field_mock, transforms_parser): - with patch( - "pytest_splunk_addon.standard_lib.addon_parser.Field", field_mock - ), patch( - "pytest_splunk_addon.standard_lib.addon_parser.convert_to_fields", - lambda x: x, - ), patch( - "pytest_splunk_addon.standard_lib.addon_parser.TransformsParser", - transforms_parser, - ): - import pytest_splunk_addon.standard_lib.addon_parser.props_parser - - importlib.reload(pytest_splunk_addon.standard_lib.addon_parser.props_parser) - return pytest_splunk_addon.standard_lib.addon_parser.props_parser.PropsParser - - -@pytest.fixture -def headers(): - return [ - "## SPDX-FileCopyrightText: 2020 Splunk, Inc. ", - "## SPDX-License-Identifier: LicenseRef-Splunk-1-2020", - ] - - -@pytest.fixture -def sect_value(mocker): - SectValue = namedtuple("SectValue", ["header", "lineo", "name", "options"]) - options = {"REPORT": "report_value", "NON_report": "non_report_value"} - - def func(sect): - return SectValue(mocker.ANY, mocker.ANY, sect, options) - - return func +import os +import pytest -@pytest.fixture -def sects(sect_value): - scs = [ - "host::snow" "(?:::){0}snow:*", - "snow:incident", - "source::...ta_snow_setup.log*", - "source::...ta_snow_ticket.log*", - ] - return {s: sect_value(s) for s in scs} - - -@pytest.fixture -def default_props_parser(parser, pp, sects, headers): - return parser(pp, "props_conf", sects, headers) - - -@pytest.fixture -def props_parser_empty_conf(parser, pp): - return parser(pp, "props_conf", [], props_conf=[]) - - -@pytest.fixture -def props_parser_props_conf_erroring(parser, pp): - parser_instance = parser(pp, "props_conf", []) - parser_instance.app.props_conf.side_effect = OSError - return parser_instance - +from pytest_splunk_addon.standard_lib.addon_parser.props_parser import ( + PropsParser, +) +from pytest_splunk_addon.standard_lib.addon_parser.fields import Field -@pytest.fixture -def get_sects(sect_value): - def func(sects_keys, sv=None): - if sv is None: - sv = sect_value - return {k: sv for k in sects_keys} - return func +@pytest.fixture() +def default_props_parser() -> PropsParser: + props_conf_path = os.path.join(os.path.dirname(__file__), "testdata") + return PropsParser(props_conf_path) -@pytest.fixture -def get_props_stanza_results(sect_value): - return [ - ("sourcetype", "snow:incident", sect_value("snow:incident")), - ("source", "*ta_snow_setup.log*1", sect_value("source::...ta_snow_setup.log*")), - ("source", "*ta_snow_setup.log*2", sect_value("source::...ta_snow_setup.log*")), +@pytest.mark.parametrize( + "src, expected", + [ + ("source::...setup.log*", {"*setup.log*"}), + ("source::...set...up.log*", {"*set*up.log*"}), + ("source::...se_(tT)_a_(pP).log*", {"*se_tT_a_pP.log*"}), ( - "source", - "*ta_snow_ticket.log*", - sect_value("source::...ta_snow_ticket.log*"), + "source::...s_(e|E)_t_(a|A)p.log*", + { + "*s_e_t_ap.log*", + "*s_e_t_Ap.log*", + "*s_E_t_ap.log*", + "*s_E_t_Ap.log*", + }, ), - ] - - -def test_props_property(default_props_parser, headers, sects): - assert default_props_parser.props.headers == headers - assert default_props_parser.props.sects == sects - assert default_props_parser.props.errors == [] - - -def test_props_property_with_error(props_parser_props_conf_erroring, caplog): - assert props_parser_props_conf_erroring.props is None - assert caplog.messages == ["Parsing props.conf", "props.conf not found."] + ], +) +def test_get_list_of_sources(default_props_parser, src, expected): + assert set(list(default_props_parser.get_list_of_sources(src))) == expected -def test_get_props_fields(default_props_parser, get_props_stanza_results): - gps = MagicMock() - gps.return_value = get_props_stanza_results - default_props_parser.get_props_stanzas = gps - gpm = MagicMock() - gpm.return_value = lambda x: (FIELD1, FIELD2) - default_props_parser.get_props_method = gpm - grf = MagicMock() - grf.return_value = [("transform_string", (FIELD3, FIELD4))] - default_props_parser.get_report_fields = grf - assert list(default_props_parser.get_props_fields()) == [ +def test_get_props_stanzas(default_props_parser): + expected = [ + { + "stanza": "sourcetype:test", + "stanza_type": "sourcetype", + "classname": "REPORT-field::fiction-tsc-regex-format", + "fields": [ + Field({"name": "size1"}), + Field({"name": "size2"}), + ], + }, + { + "stanza": "sourcetype:test", + "stanza_type": "sourcetype", + "classname": "FIELDALIAS-fields", + "fields": [ + Field({"name": "field_aliased_1"}), + Field({"name": "field_2"}), + Field({"name": "field_aliased_2"}), + Field({"name": "field_1"}), + ], + }, + { + "stanza": "sourcetype:test", + "stanza_type": "sourcetype", + "classname": "FIELDALIAS-fieldalias_1", + "fields": [ + Field({"name": "field1"}), + Field({"name": "field2"}), + ], + }, + { + "stanza": "sourcetype:test", + "stanza_type": "sourcetype", + "classname": "FIELDALIAS-fieldalias_2", + "fields": [ + Field({"name": "field1"}), + Field({"name": "field2"}), + ], + }, + { + "stanza": "sourcetype:test", + "stanza_type": "sourcetype", + "classname": "FIELDALIAS-fieldalias_3", + "fields": [ + Field({"name": "field1"}), + Field({"name": "field2"}), + Field({"name": "field5"}), + Field({"name": "field6"}), + ], + }, { - STANZA: "snow:incident", - STANZA_TYPE: "sourcetype", - CLASSNAME: "REPORT::transform_string", - FIELDS: [FIELD3, FIELD4], + "stanza": "sourcetype:test", + "stanza_type": "sourcetype", + "classname": "FIELDALIAS-fieldalias_4", + "fields": [ + Field({"name": "field2"}), + Field({"name": "OUTPUTNEW"}), + Field({"name": "field3"}), + Field({"name": "fieldx"}), + Field({"name": "field4"}), + Field({"name": "field5"}), + Field({"name": "field7"}), + Field({"name": "field8"}), + ], }, { - STANZA: "snow:incident", - STANZA_TYPE: "sourcetype", - CLASSNAME: "NON_report", - FIELDS: [FIELD1, FIELD2], + "stanza": "sourcetype:test", + "stanza_type": "sourcetype", + "classname": "LOOKUP-lookup_name_1", + "fields": [ + Field({"name": "field2"}), + Field({"name": "field3"}), + Field({"name": "field5"}), + ], }, { - STANZA: "*ta_snow_setup.log*1", - STANZA_TYPE: "source", - CLASSNAME: "REPORT::transform_string", - FIELDS: [FIELD3, FIELD4], + "stanza": "sourcetype:test", + "stanza_type": "sourcetype", + "classname": "LOOKUP-lookup_name_2", + "fields": [ + Field({"name": "field2"}), + Field({"name": "field3"}), + Field({"name": "field5"}), + ], }, { - STANZA: "*ta_snow_setup.log*1", - STANZA_TYPE: "source", - CLASSNAME: "NON_report", - FIELDS: [FIELD1, FIELD2], + "stanza": "sourcetype:test", + "stanza_type": "sourcetype", + "classname": "LOOKUP-lookup_name_3", + "fields": [ + Field({"name": "field2"}), + Field({"name": "field3"}), + Field({"name": "field5"}), + ], }, { - STANZA: "*ta_snow_setup.log*2", - STANZA_TYPE: "source", - CLASSNAME: "REPORT::transform_string", - FIELDS: [FIELD3, FIELD4], + "stanza": "sourcetype:test", + "stanza_type": "sourcetype", + "classname": "LOOKUP-lookup_name_4", + "fields": [ + Field({"name": "field2"}), + Field({"name": "field3"}), + Field({"name": "field5"}), + Field({"name": "field7"}), + ], }, { - STANZA: "*ta_snow_setup.log*2", - STANZA_TYPE: "source", - CLASSNAME: "NON_report", - FIELDS: [FIELD1, FIELD2], + "stanza": "sourcetype:test", + "stanza_type": "sourcetype", + "classname": "LOOKUP-lookup_name_5", + "fields": [ + Field({"name": "field2"}), + Field({"name": "field7"}), + ], }, { - STANZA: "*ta_snow_ticket.log*", - STANZA_TYPE: "source", - CLASSNAME: "REPORT::transform_string", - FIELDS: [FIELD3, FIELD4], + "stanza": "sourcetype:test", + "stanza_type": "sourcetype", + "classname": "EXTRACT-extract_fields1", + "fields": [ + Field({"name": "field1"}), + Field({"name": "to_extract"}), + ], }, { - STANZA: "*ta_snow_ticket.log*", - STANZA_TYPE: "source", - CLASSNAME: "NON_report", - FIELDS: [FIELD1, FIELD2], + "stanza": "sourcetype:test", + "stanza_type": "sourcetype", + "classname": "EXTRACT-extract_fields2", + "fields": [ + Field({"name": "to_extract"}), + ], + }, + { + "stanza": "sourcetype:test", + "stanza_type": "sourcetype", + "classname": "EXTRACT-extract_fields3", + "fields": [ + Field({"name": "to_extract"}), + ], }, ] - - -@pytest.mark.parametrize( - "class_name, expected", - [ - ("extract", "extract_fields"), - ("EVAL", "eval_fields"), - ("FIELDalias", "fieldalias_fields"), - ("lookup", "lookup_fields"), - ("something", None), - ], -) -def test_get_props_method(default_props_parser, class_name, expected, caplog): - default_props_parser.get_extract_fields = "extract_fields" - default_props_parser.get_eval_fields = "eval_fields" - default_props_parser.get_fieldalias_fields = "fieldalias_fields" - default_props_parser.get_lookup_fields = "lookup_fields" - if expected is None: - assert default_props_parser.get_props_method(class_name) is expected - assert caplog.messages == ["No parser available for something. Skipping..."] - else: - assert default_props_parser.get_props_method(class_name) == expected - - -def test_get_props_stanzas(default_props_parser, get_props_stanza_results): - default_props_parser.get_list_of_sources = MagicMock() - default_props_parser.get_list_of_sources.side_effect = [ - ("*ta_snow_setup.log*1", "*ta_snow_setup.log*2"), - ("*ta_snow_ticket.log*",), + result = list(default_props_parser.get_props_fields()) + for i, r in enumerate(result[: len(expected)]): + assert expected[i]["stanza"] == r["stanza"] + assert expected[i]["stanza_type"] == r["stanza_type"] + assert expected[i]["classname"] == r["classname"] + assert sorted(expected[i]["fields"]) == sorted(r["fields"]) + + +def test_get_props_stanzas_no_fields_for_key_val_extractions(default_props_parser): + expected_classnames_without_extractions = [ + "EXTRACT-extract_fields4", + "EXTRACT-extract_fields5", ] - assert list(default_props_parser.get_props_stanzas()) == get_props_stanza_results + result = list(default_props_parser.get_props_fields()) + for elem in result: + assert elem["classname"] not in expected_classnames_without_extractions -def test_get_props_stanzas_empty_props(props_parser_empty_conf): - assert list(props_parser_empty_conf.get_props_stanzas()) == [] +def test_get_props_method_unknown_classname(default_props_parser): + unknown_classname = "UNKNOWN_CLASS_NAME-key" + result = list(default_props_parser.get_props_fields()) + for elem in result: + assert elem["classname"] != unknown_classname -@pytest.mark.parametrize( - "src, expected", - [ - ("source::...ta_snow_setup.log*", {"*ta_snow_setup.log*"}), - ("source::...ta_snow_se...tup.log*", {"*ta_snow_se*tup.log*"}), - ("source::...ta_(aa)_snow_(bb)setup.log*", {"*ta_aa_snow_bbsetup.log*"}), - ( - "source::...ta_(aa|cc)_snow_(bb|mm)setup.log*", - { - "*ta_aa_snow_bbsetup.log*", - "*ta_aa_snow_mmsetup.log*", - "*ta_cc_snow_bbsetup.log*", - "*ta_cc_snow_mmsetup.log*", - }, - ), - ], -) -def test_get_list_of_sources(default_props_parser, src, expected): - assert set(list(default_props_parser.get_list_of_sources(src))) == expected - - -def test_get_sourcetype_assignments(default_props_parser, field_mock): - pp = PropsProperty(NAME, "Value") - assert len(list(default_props_parser.get_sourcetype_assignments(pp))) == 1 - field_mock.assert_called_once() - field_mock.assert_called_with({"name": NAME, "expected_values": ["Value"]}) - - -@pytest.mark.parametrize( - "prop, expected", - [ - (PropsProperty(NAME, "(?P<_KEY_df>the rest"), []), - (PropsProperty(NAME, "(?<_VAL_df>the rest"), []), - (PropsProperty(NAME, "(?the rest"), ["to_extract"]), - (PropsProperty(NAME, "(?P'to_extract'the rest"), ["to_extract"]), - ( - PropsProperty(NAME, f"(?Pthe rest In {FIELD1} "), - ["to_extract", FIELD1], - ), - ], -) -def test_get_extract_fields(default_props_parser, prop, expected): - assert list(default_props_parser.get_extract_fields(prop)) == expected - - -@pytest.mark.parametrize( - "prop, expected", - [ - (PropsProperty("key EVAL-val", f"EVAL-333 {FIELD1}"), ["val"]), - (PropsProperty("key EVAL-val", "null()"), []), - ], -) -def test_get_eval_fields(default_props_parser, prop, expected): - assert list(default_props_parser.get_eval_fields(prop)) == expected - - -@pytest.mark.parametrize( - "prop, expected", - [ - ( - PropsProperty( - NAME, - f"{FIELD1} as {FIELD2} {FIELD8}", - ), - (FIELD1, FIELD2), - ), - ( - PropsProperty( - NAME, - f"{FIELD1} AS {FIELD2} {FIELD8}", - ), - (FIELD1, FIELD2), - ), - ( - PropsProperty( - NAME, - f"{FIELD1} ASNEW {FIELD2} {FIELD8} {FIELD5} asnew {FIELD6}", - ), - (FIELD1, FIELD2, FIELD5, FIELD6), - ), - ( - PropsProperty( - NAME, - f"{FIELD1} {FIELD2} ASNEW {OUTPUTNEW} {FIELD3} asnew fieldx {FIELD4} AS {FIELD5} {FIELD6} {FIELD7} as {FIELD8}", - ), - ( - FIELD2, - OUTPUTNEW, - FIELD3, - "fieldx", - FIELD4, - FIELD5, - FIELD7, - FIELD8, - ), - ), - ], -) -def test_get_fieldalias_fields(default_props_parser, prop, expected): - fieldaliases = default_props_parser.get_fieldalias_fields(prop) - assert len(fieldaliases) == len(expected) - assert all(field in fieldaliases for field in expected) - - -def test_get_report_fields(default_props_parser, transforms_parser, mocker): - pp = PropsProperty(NAME, "Value, YYYYY, AAAAAA GGGGG UUUUUUU , IIIIII") - transforms_parser.get_transform_fields = MagicMock() - assert list(default_props_parser.get_report_fields(pp)) == [ - ("Value", mocker.ANY), - ("YYYYY", mocker.ANY), - ("AAAAAA GGGGG UUUUUUU", mocker.ANY), - ("IIIIII", mocker.ANY), - ] - transforms_parser.get_transform_fields.assert_has_calls( - [call("Value"), call("YYYYY"), call("AAAAAA GGGGG UUUUUUU"), call("IIIIII")] - ) - - -def test_get_lookup_fields(default_props_parser): - pp = PropsProperty( - NAME, - f"{FIELD1} {FIELD2} {OUTPUTNEW} {FIELD3} {FIELD4} as {FIELD5} {OUTPUT} {FIELD6} as {FIELD7}", - ) - default_props_parser.parse_lookup_str = MagicMock( - return_value={ - INPUT_FIELDS: [FIELD2], - OUTPUT_FIELDS: [FIELD7], - LOOKUP_STANZA: FIELD1, - } - ) - fields_list = default_props_parser.get_lookup_fields(pp) - assert len(fields_list) == 2 - assert FIELD2 in fields_list - assert FIELD7 in fields_list - - -def test_get_lookup_fields_no_output_fields(default_props_parser): - pp = PropsProperty(NAME, f"{FIELD1} {FIELD2} {FIELD3} {FIELD4} as {FIELD5}") - default_props_parser.parse_lookup_str = MagicMock( - return_value={ - INPUT_FIELDS: [FIELD2, FIELD3, FIELD5], - OUTPUT_FIELDS: [], - LOOKUP_STANZA: FIELD1, - } - ) - default_props_parser.transforms_parser.get_lookup_csv_fields = MagicMock( - return_value=("csv_field",) - ) - fields_list = default_props_parser.get_lookup_fields(pp) - assert len(fields_list) == 4 - assert FIELD2 in fields_list - assert FIELD3 in fields_list - assert FIELD5 in fields_list - assert "csv_field" in fields_list - - -@pytest.mark.parametrize( - "lookup_str, expected", - [ - ( - f"{FIELD1} {FIELD2} {OUTPUTNEW} {FIELD3} {FIELD4} as {FIELD5}", - { - INPUT_FIELDS: [FIELD2], - OUTPUT_FIELDS: [FIELD3, FIELD5], - LOOKUP_STANZA: FIELD1, - }, - ), - ( - f"{FIELD1} {FIELD2} {FIELD3} {FIELD4} as {FIELD5}", - { - INPUT_FIELDS: [FIELD2, FIELD3, FIELD5], - OUTPUT_FIELDS: [], - LOOKUP_STANZA: FIELD1, - }, - ), - ( - f"{FIELD1} {FIELD2} {OUTPUT} {FIELD3} {FIELD4} as {FIELD5}", - { - INPUT_FIELDS: [FIELD2], - OUTPUT_FIELDS: [FIELD3, FIELD5], - LOOKUP_STANZA: FIELD1, - }, - ), - ( - f"{FIELD1} {FIELD2} {OUTPUTNEW} {FIELD3} {FIELD4} as {FIELD5} {FIELD6} as {FIELD7}", - { - INPUT_FIELDS: [FIELD2], - OUTPUT_FIELDS: [FIELD3, FIELD5, FIELD7], - LOOKUP_STANZA: FIELD1, - }, - ), - ( - f"{FIELD1} {FIELD2} {OUTPUTNEW} {FIELD3} {FIELD4} as {FIELD5} {OUTPUT} {FIELD6} as {FIELD7}", - { - INPUT_FIELDS: [FIELD2], - OUTPUT_FIELDS: [FIELD7], - LOOKUP_STANZA: FIELD1, - }, - ), - ], -) -def test_parse_lookup_str(default_props_parser, lookup_str, expected): - default_props_parser.parse_lookup_str(lookup_str) +def test_no_props_config_file(): + props_parser = PropsParser("unused_path") + assert props_parser.props is None diff --git a/tests/unit/tests_standard_lib/test_addon_parser/test_pytest_addon_init.py b/tests/unit/tests_standard_lib/test_addon_parser/test_pytest_addon_init.py index 1bbef5fc5..037138f1a 100644 --- a/tests/unit/tests_standard_lib/test_addon_parser/test_pytest_addon_init.py +++ b/tests/unit/tests_standard_lib/test_addon_parser/test_pytest_addon_init.py @@ -3,7 +3,6 @@ from unittest.mock import MagicMock, patch, PropertyMock EXAMPLE_PATH = "Example_Path" -APP_RETURN_VALUE = "App_return_value" PROPS_RETURN_VALUE = "Props_return_value" TAGS_RETURN_VALUE = "Tags_return_value" EVENTTYPE_RETURN_VALUE = "Eventtype_return_value" @@ -14,16 +13,13 @@ @pytest.fixture def addonparser(): - with patch("splunk_appinspect.App") as app_mock, patch( - f"{ADDON_PARSER_PATH}.props_parser.PropsParser" - ) as props_mock, patch( + with patch(f"{ADDON_PARSER_PATH}.props_parser.PropsParser") as props_mock, patch( f"{ADDON_PARSER_PATH}.tags_parser.TagsParser" ) as tags_mock, patch( f"{ADDON_PARSER_PATH}.eventtype_parser.EventTypeParser" ) as eventtype_mock, patch( f"{ADDON_PARSER_PATH}.savedsearches_parser.SavedSearchParser" ) as savedsearch_mock: - app_mock.return_value = APP_RETURN_VALUE props_mock.return_value = PROPS_RETURN_VALUE tags_mock.return_value = TAGS_RETURN_VALUE eventtype_mock.return_value = EVENTTYPE_RETURN_VALUE @@ -37,7 +33,6 @@ def addonparser(): def test_addonparser_init(addonparser): ap = addonparser(EXAMPLE_PATH) assert ap.splunk_app_path == EXAMPLE_PATH - assert ap.app == APP_RETURN_VALUE assert ap.props_parser == PROPS_RETURN_VALUE assert ap.tags_parser == TAGS_RETURN_VALUE assert ap.eventtype_parser == EVENTTYPE_RETURN_VALUE diff --git a/tests/unit/tests_standard_lib/test_addon_parser/test_savedsearches_parser.py b/tests/unit/tests_standard_lib/test_addon_parser/test_savedsearches_parser.py index 601d72afb..b1ea73e33 100644 --- a/tests/unit/tests_standard_lib/test_addon_parser/test_savedsearches_parser.py +++ b/tests/unit/tests_standard_lib/test_addon_parser/test_savedsearches_parser.py @@ -1,51 +1,25 @@ -import pytest -from unittest.mock import patch, PropertyMock +from unittest.mock import patch, mock_open from pytest_splunk_addon.standard_lib.addon_parser.savedsearches_parser import ( SavedSearchParser, ) -output_to_build = { - "basic_search": { - "search": "_internal | stats count by sourcetype", - }, - "search_earliest_time": { - "search": "index = _internal | stats count by sourcetype | outputlookup saved_search_data.csv", - "dispatch.earliest_time": "-4d", - }, - "empty_search_latest_time": { - "search": "", - "dispatch.latest_time": "-1s", - }, -} +TEST_SAVEDSEARCHES = """[basic_search] +search = _internal | stats count by sourcetype +[search_earliest_time] +search = index = _internal | stats count by sourcetype | outputlookup saved_search_data.csv +dispatch.earliest_time = -4d -@pytest.fixture(scope="module") -def parsed_output(build_parsed_output): - return build_parsed_output(output_to_build) +[empty_search_latest_time] +search = +dispatch.latest_time = -1s +[empty_search] +""" -@pytest.fixture() -def parser_instance(parsed_output, parser): - return parser(SavedSearchParser, "get_config", parsed_output) - -def test_savedsearches(parser_instance): - assert list(parser_instance.savedsearches.sects.keys()) == [ - "basic_search", - "search_earliest_time", - "empty_search_latest_time", - ] - parser_instance.app.get_config.assert_called_once_with("savedsearches.conf") - - -def test_no_savedsearches_config_file(parser_instance): - parser_instance.app.get_config.side_effect = OSError - assert parser_instance.savedsearches is None - - -def test_get_savedsearches(parser_instance): - out = list(parser_instance.get_savedsearches()) - assert out == [ +def test_get_savedsearches(): + expected_outputs = [ { "stanza": "basic_search", "search": "_internal | stats count by sourcetype", @@ -64,14 +38,21 @@ def test_get_savedsearches(parser_instance): "dispatch.earliest_time": "0", "dispatch.latest_time": "-1s", }, + { + "stanza": "empty_search", + "search": 'index = "main"', + "dispatch.earliest_time": "0", + "dispatch.latest_time": "now", + }, ] + savedsearches_parser = SavedSearchParser("unused_path") + with patch("builtins.open", new_callable=mock_open, read_data=TEST_SAVEDSEARCHES): + output = savedsearches_parser.get_savedsearches() + assert expected_outputs == list(output) -def test_get_savedsearches_without_config_file(parser): - with patch.object( - SavedSearchParser, "savedsearches", new_callable=PropertyMock - ) as savedsearches_mock: - savedsearches_mock.return_value = None - parser_instance = parser(SavedSearchParser, "get_config", {}) - output = [search for search in parser_instance.get_savedsearches() if search] - assert output == [], "savedsearches returned when no config file exists" +def test_no_savedsearches_config_file(): + savedsearches_parser = SavedSearchParser("unused_path") + with patch("builtins.open", mock_open()) as mock_file: + mock_file.side_effect = OSError() + assert savedsearches_parser.savedsearches is None diff --git a/tests/unit/tests_standard_lib/test_addon_parser/test_transforms_parser.py b/tests/unit/tests_standard_lib/test_addon_parser/test_transforms_parser.py index 88306d793..dba60a1ca 100644 --- a/tests/unit/tests_standard_lib/test_addon_parser/test_transforms_parser.py +++ b/tests/unit/tests_standard_lib/test_addon_parser/test_transforms_parser.py @@ -1,192 +1,84 @@ -import importlib +import os + import pytest -from unittest.mock import patch, mock_open, PropertyMock +from unittest.mock import patch, mock_open from collections import namedtuple -with patch( - "pytest_splunk_addon.standard_lib.addon_parser.convert_to_fields", - lambda x: x, -): - import pytest_splunk_addon.standard_lib.addon_parser.transforms_parser - - importlib.reload(pytest_splunk_addon.standard_lib.addon_parser.transforms_parser) - - from pytest_splunk_addon.standard_lib.addon_parser.transforms_parser import ( TransformsParser, ) - - -output_to_build = { - "ta_fiction_lookup": { - "filename": "ta_fiction_splund_component.csv", - "case_sensitive_match": "false", - }, - "fiction-tsc-delim-fields": { - "DELIMS": '","', - "FIELDS": "day_id, event_id, end_time, start_time", - }, - "fiction-tsc-sk-delim-format": { - "SOURCE_KEY": "event_id", - "DELIMS": '"="', - "FIELDS": "server_contact_mode, dest", - }, - "fiction-tsc-sk-regex-format": { - "SOURCE_KEY": "component", - "REGEX": "(.+)", - "FORMAT": 'comp::"$1"', - }, - "fiction-tsc-regex-format": { - "REGEX": "(\w*)=(.*)", - "FORMAT": "size1::$1 size2::$2", - }, - "fiction-tsc-regex": {"REGEX": "group=(?[^,]+)"}, - "fiction-tsc-regex-key-n": { - "REGEX": "(?:^| )(?<_KEY_1>XXXXXX[^=]*)=(?! )(?<_VAL_1>.+?)(?=(?: [^ ]*(?.+?)(?=(?: [^ ]*(?.+?)(?=(?: [^ ]*(?the rest In field1 +EXTRACT-extract_fields2 = (?P'to_extract'the rest +EXTRACT-extract_fields3 = (?the rest +EXTRACT-extract_fields4 = (?<_VAL_df>the rest +EXTRACT-extract_fields5 = (?P<_KEY_df>the rest + +[host::not-supported] +EVAL-not = supported + +[source::...setup.log*] +EVAL-not = used + +[source::...set...up.log*] +EVAL-not = used + +[source::...se_(tT)_a_(pP).log*] +EVAL-not = used + +[source::...s_(e|E)_t_(a|A)p.log*] +EVAL-not = used + +[sourcetype:unknown-class-name] +UNKNOWN_CLASS_NAME-key = value diff --git a/tests/unit/tests_standard_lib/test_addon_parser/testdata/default/transforms.conf b/tests/unit/tests_standard_lib/test_addon_parser/testdata/default/transforms.conf new file mode 100644 index 000000000..0e50afff2 --- /dev/null +++ b/tests/unit/tests_standard_lib/test_addon_parser/testdata/default/transforms.conf @@ -0,0 +1,34 @@ +[ta_fiction_lookup] +filename = ta_fiction_splunkd_component.csv +case_sensitive_match = false + +[ta_lookup_does_not_exits] +filename = does_not_exist_lookup.csv +case_sensitive_match = false + +[fiction-tsc-delim-fields] +DELIMS = "," +FIELDS = day_id, event_id, end_time, start_time + +[fiction-tsc-sk-delim-format] +SOURCE_KEY = event_id +DELIMS = "=" +FIELDS = server_contact_mode, dest + +[fiction-tsc-sk-regex-format] +SOURCE_KEY = component +REGEX = (.+) +FORMAT = comp::"$1" + +[fiction-tsc-regex-format] +REGEX = (\w*)=(.*) +FORMAT = size1::$1 size2::$2 + +[fiction-tsc-regex] +REGEX = group=(?[^,]+) + +[fiction-tsc-regex-key-n] +REGEX = (?:^| )(?<_KEY_1>XXXXXX[^=]*)=(?! )(?<_VAL_1>.+?)(?=(?: [^ ]*(?.+?)(?=(?: [^ ]*(?.+?)(?=(?: [^ ]*(?> #REVIEW : Update metadata as per addon's requirement", - "host_type": "<> #REVIEW : Update metadata as per addon's requirement", - "sourcetype_to_search": "<> " - "#REVIEW : Update metadata as per addon's requirement", - "timestamp_type": "<> " - "#REVIEW : Update metadata as per addon's requirement", - "sample_count": "1 # REVIEW : Please check for the events per stanza " - "and update sample_count accordingly", - "index": "main #REVIEW : Update metadata as per addon's requirement", - "search": "index=_internal sourcetype=splunkd", - "source": "pytest-splunk-addon:<> #REVIEW : Update metadata as per addon's requirement", - "tokens": { - "101": { - "field": "_time # REVIEW : Check if the field is extracted from the events, " - "else remove this field parameter", - "token": "##replacement_token##", - "replacementType": "timestamp", - "replacement": 'list["a", "b"]', - }, - }, - }, - }, - ), - ( - { - "sample_file.samples": { - "tokens": { - "102": { - "token": "##Dest##", - "replacementType": "static", - "replacement": "src", - }, - }, - "source": "utility.log", - }, - }, - { - "sample_file.samples": { - "input_type": "<> #REVIEW : Update metadata as per addon's requirement", - "host_type": "<> #REVIEW : Update metadata as per addon's requirement", - "sourcetype_to_search": "<> " - "#REVIEW : Update metadata as per addon's requirement", - "timestamp_type": "<> #REVIEW : Update metadata as per addon's requirement", - "source": "utility.log", - "tokens": { - "102": { - "field": "dest # REVIEW : Check if the field is extracted from the events, " - "else remove this field parameter", - "token": "##Dest##", - "replacementType": "random", - "replacement": "dest[] " - "# REVIEW : Possible value in list : ['ipv4', 'ipv6', 'host', 'fqdn']", - }, - }, - }, - }, - ), - ( - { - "sample_file.samples": { - "tokens": { - "103": { - "token": "##token_user_file##", - "replacementType": "file", - "replacement": "$SPLUNK_HOME/fake_path/samples/user_name.sample:1", - }, - }, - "source": "user.log", - }, - }, - { - "sample_file.samples": { - "input_type": "<> #REVIEW : Update metadata as per addon's requirement", - "host_type": "<> " - "#REVIEW : Update metadata as per addon's requirement", - "sourcetype_to_search": "<> " - "#REVIEW : Update metadata as per addon's requirement", - "timestamp_type": "<> " - "#REVIEW : Update metadata as per addon's requirement", - "source": "user.log", - "tokens": { - "103": { - "field": "user # REVIEW : Please check if it can be replace with user rule", - "token": "##token_user_file##", - "replacementType": "random", - "replacement": "file[$SPLUNK_HOME/fake_path/samples/user_name.sample:1] " - "# REVIEW : Possible value in list : " - "['name', 'email', 'domain_user', 'distinquised_name']", - }, - }, - }, - }, - ), - ( - { - "sample_file.samples": { - "tokens": { - "104": { - "token": "##token_email_log##", - "replacementType": "file", - "replacement": "SA-Eventgen/email_address.sample:2", - }, - }, - "source": "email.log", - }, - }, - { - "sample_file.samples": { - "input_type": "<> #REVIEW : Update metadata as per addon's requirement", - "host_type": "<> #REVIEW : Update metadata as per addon's requirement", - "sourcetype_to_search": "<> " - "#REVIEW : Update metadata as per addon's requirement", - "timestamp_type": "<> #REVIEW : Update metadata as per addon's requirement", - "source": "email.log", - "tokens": { - "104": { - "token": "##token_email_log##", - "replacementType": "random # REVIEW : Please check if it can be replace with email rule", - "replacement": "email # REVIEW : Please check if it can be replace with email rule", - }, - }, - }, - }, - ), - ], -) -def test_update_eventgen_stanzas( - update_eventgen_instance, eventgen_dict, expected_output -): - out = update_eventgen_instance.update_eventgen_stanzas(eventgen_dict) - assert out == expected_output - - -def test_create_new_eventgen( - open_mock, - eventgen_mock, - build_parsed_output, - update_eventgen_instance, - configuration_file, -): - eventgen_mock.return_value = configuration_file(["## Splunk", "## Data"], [], []) - updated_d = { - "sample_file.samples": { - "add_comment": True, - "source": "utility.log", - "tokens": { - "102": { - "field": "dest", - "token": "##Dest##", - "replacementType": "random", - "replacement": "dest[]", - }, - }, - }, - } - update_eventgen_instance.create_new_eventgen(updated_d, "fake_path") - open_mock.assert_called_once_with("fake_path", "w") - open_mock().assert_has_calls( - [ - call.__enter__(), - call.write("## Splunk\n"), - call.write("## Data\n"), - call.write("\n[sample_file.samples]\n"), - call.write("## Stanza gets metadata from main stanza\n"), - call.write("source = utility.log\n"), - call.write("\n"), - call.write("token.102.token = ##Dest##\n"), - call.write("token.102.replacementType = random\n"), - call.write("token.102.replacement = dest[]\n"), - call.write("token.102.field = dest\n"), - call.write("\n"), - call.__exit__(None, None, None), - ] - ) - - -def test_main(argparse_mock, update_eventgen_mock): - update_eventgen_mock.return_value = update_eventgen_mock - update_eventgen_mock.get_eventgen_stanzas.return_value = {"fake": "init"} - update_eventgen_mock.update_eventgen_stanzas.return_value = {"fake": "updated"} - args = namedtuple("Namespace", ["addon_path", "new_conf_path"]) - argparse_mock.parse_args.return_value = args( - "fake_addon_path", "fake_new_conf_path" - ) - main() - update_eventgen_mock.assert_has_calls( - [ - call("fake_addon_path"), - call.get_eventgen_stanzas(), - call.update_eventgen_stanzas({"fake": "init"}), - call.create_new_eventgen({"fake": "updated"}, "fake_new_conf_path"), - ] - ) diff --git a/tests/unit/tests_standard_lib/tests_sample_generation/test_data/with_samples/pytest-splunk-addon-data.conf b/tests/unit/tests_standard_lib/tests_sample_generation/test_data/with_samples/pytest-splunk-addon-data.conf new file mode 100644 index 000000000..c8b2aea29 --- /dev/null +++ b/tests/unit/tests_standard_lib/tests_sample_generation/test_data/with_samples/pytest-splunk-addon-data.conf @@ -0,0 +1,44 @@ +[test1.samples] +sourcetype = test:sourcetype +source = source://test +input_type = modinput +host_type = plugin +sourcetype_to_search = test:sourcetype +timestamp_type = event +sample_count = 1 + +token.0.token = ##Timestamp## +token.0.replacementType = timestamp +token.0.replacement = %Y-%m-%dT%H:%M:%S +token.0.field = _time + +token.1.token = ##user## +token.1.replacementType = random +token.1.replacement = list["user1@email.com","user2@email.com"] + +token.2.token = ##ip## +token.2.replacementType = random +token.2.replacement = src["ipv4"] +token.2.field = src + +token.3.token = ##number## +token.3.replacementType = random +token.3.replacement = integer[100000000000000000000:999999999999999999999] + +[test2.samples] +sourcetype = test:sourcetype +source = source://test:text +input_type = modinput +host_type = plugin +sourcetype_to_search = test:sourcetype +timestamp_type = event +sample_count = 1 + +token.1.token = ##user## +token.1.replacementType = random +token.1.replacement = list["user1@email.com","user2@email.com"] + +token.2.token = ##ip## +token.2.replacementType = random +token.2.replacement = src["ipv4"] +token.2.field = src diff --git a/tests/unit/tests_standard_lib/tests_sample_generation/test_data/with_samples/samples/test1.samples b/tests/unit/tests_standard_lib/tests_sample_generation/test_data/with_samples/samples/test1.samples new file mode 100644 index 000000000..97c8c35a9 --- /dev/null +++ b/tests/unit/tests_standard_lib/tests_sample_generation/test_data/with_samples/samples/test1.samples @@ -0,0 +1 @@ +{"number": "##number##", "ip": "##ip##", "user": "##user##", "time": "##Timestamp##"} \ No newline at end of file diff --git a/tests/unit/tests_standard_lib/tests_sample_generation/test_data/with_samples/samples/test2.samples b/tests/unit/tests_standard_lib/tests_sample_generation/test_data/with_samples/samples/test2.samples new file mode 100644 index 000000000..2e73e9bb2 --- /dev/null +++ b/tests/unit/tests_standard_lib/tests_sample_generation/test_data/with_samples/samples/test2.samples @@ -0,0 +1 @@ +Src ip address ##ip## for user ##user## \ No newline at end of file diff --git a/tests/unit/tests_standard_lib/tests_sample_generation/test_data/without_samples/pytest-splunk-addon-data.conf b/tests/unit/tests_standard_lib/tests_sample_generation/test_data/without_samples/pytest-splunk-addon-data.conf new file mode 100644 index 000000000..c8b2aea29 --- /dev/null +++ b/tests/unit/tests_standard_lib/tests_sample_generation/test_data/without_samples/pytest-splunk-addon-data.conf @@ -0,0 +1,44 @@ +[test1.samples] +sourcetype = test:sourcetype +source = source://test +input_type = modinput +host_type = plugin +sourcetype_to_search = test:sourcetype +timestamp_type = event +sample_count = 1 + +token.0.token = ##Timestamp## +token.0.replacementType = timestamp +token.0.replacement = %Y-%m-%dT%H:%M:%S +token.0.field = _time + +token.1.token = ##user## +token.1.replacementType = random +token.1.replacement = list["user1@email.com","user2@email.com"] + +token.2.token = ##ip## +token.2.replacementType = random +token.2.replacement = src["ipv4"] +token.2.field = src + +token.3.token = ##number## +token.3.replacementType = random +token.3.replacement = integer[100000000000000000000:999999999999999999999] + +[test2.samples] +sourcetype = test:sourcetype +source = source://test:text +input_type = modinput +host_type = plugin +sourcetype_to_search = test:sourcetype +timestamp_type = event +sample_count = 1 + +token.1.token = ##user## +token.1.replacementType = random +token.1.replacement = list["user1@email.com","user2@email.com"] + +token.2.token = ##ip## +token.2.replacementType = random +token.2.replacement = src["ipv4"] +token.2.field = src diff --git a/tests/unit/tests_standard_lib/tests_sample_generation/test_eventgen_parser.py b/tests/unit/tests_standard_lib/tests_sample_generation/test_eventgen_parser.py deleted file mode 100644 index bce71d3ad..000000000 --- a/tests/unit/tests_standard_lib/tests_sample_generation/test_eventgen_parser.py +++ /dev/null @@ -1,184 +0,0 @@ -import pytest -from unittest.mock import MagicMock, patch, PropertyMock, call -from collections import namedtuple -from splunk_appinspect import App - -from pytest_splunk_addon.standard_lib.sample_generation.eventgen_parser import ( - EventgenParser, -) - -ADDON_PATH = "/add/on/path" -CONFIG_PATH = "/config/path" -PARENT_DIR = "pardir" -DATA_CONFIG = "data_config" -FILE_1 = "file_1" -FILE_2 = "file_2" -VALUE_1 = "value_1" -VALUE_2 = "value_2" -OPTION_1 = "option_1" -SAMPLE_STANZA = "sample_stanza" -PTS = "pts" - -sects = namedtuple("Sects", ["sects"]) - - -def get_exists_mock_func(path): - def func(input): - if input == path: - return True - return False - - return func - - -class AttrDict(dict): - def __init__(self, *args, **kwargs): - super(AttrDict, self).__init__(*args, **kwargs) - self.__dict__ = self - - -class TestEventgenParser: - @pytest.fixture(scope="session") - def eventgen_parser(self): - def func(*args): - if not args: - return EventgenParser("path") - return EventgenParser(*args) - - return func - - def test_init(self, eventgen_parser): - ep = eventgen_parser(ADDON_PATH, CONFIG_PATH) - assert ep.config_path == CONFIG_PATH - assert ep.addon_path == ADDON_PATH - assert ep.match_stanzas == set() - - @pytest.mark.parametrize( - "exist_path", - [ - (f"{CONFIG_PATH}/samples"), - (f"{CONFIG_PATH}/{PARENT_DIR}/samples"), - (f"{ADDON_PATH}/samples"), - ], - ) - def test_path_to_samples(self, eventgen_parser, exist_path): - ep = eventgen_parser(ADDON_PATH, CONFIG_PATH) - path = exist_path - with patch("os.path.exists", get_exists_mock_func(path)), patch( - "os.pardir", PARENT_DIR - ), patch("os.path.abspath", lambda x: x), patch("os.sep", "/"): - assert ep.path_to_samples == path - - @pytest.mark.parametrize( - "exist_path, expected, args, kwargs", - [ - ( - f"{CONFIG_PATH}/pytest-splunk-addon-data.conf", - DATA_CONFIG, - ("pytest-splunk-addon-data.conf",), - {"dir": "relpath_/config/path"}, - ), - ( - f"{CONFIG_PATH}/eventgen.conf", - DATA_CONFIG, - ("eventgen.conf",), - {"dir": "relpath_/config/path"}, - ), - (f"{CONFIG_PATH}/other", DATA_CONFIG, ("eventgen.conf",), {}), - ], - ) - def test_eventgen(self, eventgen_parser, exist_path, expected, args, kwargs): - ep = eventgen_parser(ADDON_PATH, CONFIG_PATH) - path = exist_path - app_mock = MagicMock(spec=App) - app_mock.get_config.return_value = DATA_CONFIG - app_mock.get_filename.return_value = "filename" - with patch("os.path.exists", get_exists_mock_func(path)), patch( - "os.path.relpath", lambda x, _: f"relpath_{x}" - ), patch("os.sep", "/"), patch.object(ep, "_app", app_mock): - assert ep.eventgen == expected - app_mock.get_config.assert_called_with(*args, **kwargs) - - def test_eventgen_os_error(self, eventgen_parser): - ep = eventgen_parser(ADDON_PATH, CONFIG_PATH) - with patch("os.path.exists", MagicMock(side_effect=OSError)): - with pytest.raises( - FileNotFoundError, - match="pytest-splunk-addon-data.conf/eventgen.conf not Found", - ): - ep.eventgen - - def test_get_sample_stanzas(self): - with patch.object( - EventgenParser, - "get_eventgen_stanzas", - MagicMock(return_value=AttrDict(file_1=VALUE_1, file_2=VALUE_2)), - ), patch("os.sep", "/"), patch.object( - EventgenParser, - "path_to_samples", - new_callable=PropertyMock(return_value=PTS), - ), patch( - "pytest_splunk_addon.standard_lib.sample_generation.eventgen_parser.SampleStanza", - MagicMock(return_value=SAMPLE_STANZA), - ) as sample_stanza_mock: - assert list( - EventgenParser(ADDON_PATH, CONFIG_PATH).get_sample_stanzas() - ) == [SAMPLE_STANZA, SAMPLE_STANZA] - sample_stanza_mock.assert_has_calls( - [call("pts/file_1", "value_1"), call("pts/file_2", "value_2")] - ) - - def test_get_eventgen_stanzas(self): - with patch.object( - EventgenParser, - "path_to_samples", - new_callable=PropertyMock(return_value=""), - ), patch.object( - EventgenParser, - "eventgen", - new_callable=PropertyMock( - return_value=sects( - { - FILE_1: AttrDict( - options=AttrDict( - option1=AttrDict(name="token.option.1", value=VALUE_1) - ) - ), - FILE_2: AttrDict( - options=AttrDict( - option2=AttrDict(name=OPTION_1, value=VALUE_2) - ) - ), - } - ) - ), - ), patch( - "os.path.exists", MagicMock(return_value=True) - ), patch( - "os.listdir", MagicMock(return_value=[FILE_1, FILE_2, "file_3"]) - ): - assert EventgenParser(ADDON_PATH, CONFIG_PATH).get_eventgen_stanzas() == { - FILE_1: {"tokens": {"file_1_option": {"1": VALUE_1}}}, - FILE_2: {OPTION_1: VALUE_2, "tokens": {}}, - } - - def test_check_samples(self, caplog): - with patch.object( - EventgenParser, - "path_to_samples", - new_callable=PropertyMock(return_value=""), - ), patch("os.path.exists", MagicMock(return_value=True)), patch.object( - EventgenParser, - "eventgen", - new_callable=PropertyMock(return_value=sects([FILE_1, FILE_2])), - ): - assert EventgenParser(ADDON_PATH, CONFIG_PATH).check_samples() is None - assert all( - message in caplog.messages - for message in [ - "No sample file found for stanza : file_1", - "Sample file found for stanza : file_1", - "No sample file found for stanza : file_2", - "Sample file found for stanza : file_2", - ] - ) diff --git a/tests/unit/tests_standard_lib/tests_sample_generation/test_pytest_splunk_addon_data_parser.py b/tests/unit/tests_standard_lib/tests_sample_generation/test_pytest_splunk_addon_data_parser.py new file mode 100644 index 000000000..3c7ba9290 --- /dev/null +++ b/tests/unit/tests_standard_lib/tests_sample_generation/test_pytest_splunk_addon_data_parser.py @@ -0,0 +1,107 @@ +import os +import tempfile + +import pytest + +from pytest_splunk_addon.standard_lib.sample_generation import ( + PytestSplunkAddonDataParser, +) + + +def test_psa_data_when_no_config(): + with tempfile.TemporaryDirectory() as tempdir: + with pytest.raises(FileNotFoundError): + psa_data_parser = PytestSplunkAddonDataParser(tempdir, tempdir) + _ = psa_data_parser.psa_data + + +def test_path_to_samples(): + path = os.path.join(os.path.dirname(__file__), "test_data") + psa_data_parser = PytestSplunkAddonDataParser( + path, + path, + ) + assert os.path.join(path, "samples") == psa_data_parser._path_to_samples() + + +def test_get_psa_data_stanzas_with_samples(): + path = os.path.join(os.path.dirname(__file__), "test_data", "with_samples") + psa_data_parser = PytestSplunkAddonDataParser( + path, + path, + ) + expected_result = { + "test1.samples": { + "sourcetype": "test:sourcetype", + "source": "source://test", + "input_type": "modinput", + "host_type": "plugin", + "sourcetype_to_search": "test:sourcetype", + "timestamp_type": "event", + "sample_count": "1", + "tokens": { + "test1.samples_0": { + "token": "##Timestamp##", + "replacementType": "timestamp", + "replacement": "%Y-%m-%dT%H:%M:%S", + "field": "_time", + }, + "test1.samples_1": { + "token": "##user##", + "replacementType": "random", + "replacement": 'list["user1@email.com","user2@email.com"]', + }, + "test1.samples_2": { + "token": "##ip##", + "replacementType": "random", + "replacement": 'src["ipv4"]', + "field": "src", + }, + "test1.samples_3": { + "token": "##number##", + "replacementType": "random", + "replacement": "integer[100000000000000000000:999999999999999999999]", + }, + }, + }, + "test2.samples": { + "sourcetype": "test:sourcetype", + "source": "source://test:text", + "input_type": "modinput", + "host_type": "plugin", + "sourcetype_to_search": "test:sourcetype", + "timestamp_type": "event", + "sample_count": "1", + "tokens": { + "test2.samples_1": { + "token": "##user##", + "replacementType": "random", + "replacement": 'list["user1@email.com","user2@email.com"]', + }, + "test2.samples_2": { + "token": "##ip##", + "replacementType": "random", + "replacement": 'src["ipv4"]', + "field": "src", + }, + }, + }, + } + result = psa_data_parser._get_psa_data_stanzas() + assert expected_result == result + + +def test_get_sample_stanzas_without_samples(caplog): + with tempfile.TemporaryDirectory() as tempdir: + samples_path = os.path.join(tempdir, "samples") + os.mkdir(samples_path) + config_path = os.path.join( + os.path.dirname(__file__), "test_data", "without_samples" + ) + parser = PytestSplunkAddonDataParser( + tempdir, + config_path, + ) + parser.get_sample_stanzas() + assert "No sample file found for stanza : test1.samples" in caplog.messages + assert "No sample file found for stanza : test2.samples" in caplog.messages diff --git a/tests/unit/tests_standard_lib/tests_sample_generation/test_rule.py b/tests/unit/tests_standard_lib/tests_sample_generation/test_rule.py index f1f60a367..2162031c9 100644 --- a/tests/unit/tests_standard_lib/tests_sample_generation/test_rule.py +++ b/tests/unit/tests_standard_lib/tests_sample_generation/test_rule.py @@ -9,7 +9,7 @@ TOKEN_DATA = "token_data" FIELD = "Field" -EVENTGEN_PARAMS = {"eventgen_params": "eventgen_params_value"} +PSA_DATA_PARAMS = {"psa_data_params": "psa_data_params_value"} SAMPLE_PATH = "sample_path" SAMPLE_NAME = "Sample_name" RETURN_VALUE = "Return_value" @@ -138,7 +138,7 @@ def func(class_to_mock): ( "TimeRule", token(replacement_type="timestamp"), - [token(replacement_type="timestamp"), EVENTGEN_PARAMS], + [token(replacement_type="timestamp"), PSA_DATA_PARAMS], {}, ), ( @@ -175,14 +175,14 @@ def func(class_to_mock): ) def test_parse_rule(self, rule, mock_class, rule_name, _token, params, params_dict): static_mock = mock_class(rule_name) - assert rule.parse_rule(_token, EVENTGEN_PARAMS, SAMPLE_PATH) == RETURN_VALUE + assert rule.parse_rule(_token, PSA_DATA_PARAMS, SAMPLE_PATH) == RETURN_VALUE static_mock.assert_called_once_with(*params, **params_dict) def test_parse_rule_other_repl_type(self, rule): assert ( rule.parse_rule( token(replacement_type="other", replacement=DEST), - EVENTGEN_PARAMS, + PSA_DATA_PARAMS, SAMPLE_PATH, ) is None @@ -707,7 +707,7 @@ class TestTimeRule: def test_replace(self, event, earliest, latest, expected): eve = event() rule = get_rule_class(TIME)( - token(), eventgen_params={"earliest": earliest, "latest": latest} + token(), psa_data_params={"earliest": earliest, "latest": latest} ) with get_patch("time_parse.convert_to_time", mocked_datetime), get_patch( "randint", 1439905910 @@ -743,7 +743,7 @@ def test_replace_local_timezone(self, event, timezone, replacement, expected): eve = event() rule = get_rule_class(TIME)( token(replacement=replacement), - eventgen_params={"earliest": "24h", "latest": "6h", "timezone": timezone}, + psa_data_params={"earliest": "24h", "latest": "6h", "timezone": timezone}, ) with get_patch("time_parse.convert_to_time", mocked_datetime), get_patch( "randint", 1616801099 diff --git a/tests/unit/tests_standard_lib/tests_sample_generation/test_sample_generator.py b/tests/unit/tests_standard_lib/tests_sample_generation/test_sample_generator.py index 3c24fa68a..2d5771de0 100644 --- a/tests/unit/tests_standard_lib/tests_sample_generation/test_sample_generator.py +++ b/tests/unit/tests_standard_lib/tests_sample_generation/test_sample_generator.py @@ -25,16 +25,17 @@ def test_get_samples(self): tks_2 = "tokenized_sample_2" sample_mock = MagicMock() sample_mock.get_tokenized_events.return_value = [tks_1, tks_2] - eventgen_mock = MagicMock() - eventgen_mock.get_sample_stanzas = MagicMock( + psa_data_mock = MagicMock() + psa_data_mock.get_sample_stanzas = MagicMock( return_value=[sample_mock, sample_mock] ) with patch( - f"{MODULE_PATH}.EventgenParser", MagicMock(return_value=eventgen_mock) + f"{MODULE_PATH}.PytestSplunkAddonDataParser", + MagicMock(return_value=psa_data_mock), ), patch(f"{MODULE_PATH}.SampleStanza", MagicMock()) as sample_stanza_mock: sample_stanza_mock.get_raw_events = ["event_1", "event_2"] sample_stanza_mock.tokenize = lambda x, y: (x, y) - eventgen_mock.conf_name = CONFIG_PATH + psa_data_mock.conf_name = CONFIG_PATH sg = SampleGenerator(ADDON_PATH) assert list(sg.get_samples()) == [tks_1, tks_2, tks_1, tks_2] diff --git a/tests/unit/tests_standard_lib/tests_sample_generation/test_sample_stanza.py b/tests/unit/tests_standard_lib/tests_sample_generation/test_sample_stanza.py index 9e309ddc0..1d312e528 100644 --- a/tests/unit/tests_standard_lib/tests_sample_generation/test_sample_stanza.py +++ b/tests/unit/tests_standard_lib/tests_sample_generation/test_sample_stanza.py @@ -59,14 +59,14 @@ class TestSampleStanza: @pytest.fixture def sample_stanza(self): def func( - eventgen_params={"tokens": tokens}, + psa_data_params={"tokens": tokens}, rule_mock_value="Test_rule", ): with patch.object(os, "sep", "/"), patch( "pytest_splunk_addon.standard_lib.sample_generation.sample_stanza.Rule", MagicMock(return_value=rule_mock_value), ): - ss = SampleStanza(SAMPLE_PATH, eventgen_params) + ss = SampleStanza(SAMPLE_PATH, psa_data_params) return ss return func @@ -98,7 +98,7 @@ def test_get_tokenized_events(self, sample_stanza): assert m.key_fields == "three" @pytest.mark.parametrize( - "eventgen_params, conf_name, expected", + "psa_data_params, conf_name, expected", [ ( {"tokens": tokens}, @@ -107,7 +107,7 @@ def test_get_tokenized_events(self, sample_stanza): ), ( {"tokens": tokens, "count": "1"}, - "eventgen", + "psa_data", [rule_obj({"breaker": 1, "expected_event_count": 1})], ), ( @@ -115,15 +115,10 @@ def test_get_tokenized_events(self, sample_stanza): "som", [rule_obj({"breaker": 1, "sample_count": 1})], ), - ( - {"tokens": tokens, "count": "0"}, - "eventgen", - [rule_obj({"breaker": 1, "expected_event_count": 250})] * 250, - ), ], ) - def test_tokenize(self, sample_stanza, eventgen_params, conf_name, expected): - ss = sample_stanza(eventgen_params=eventgen_params) + def test_tokenize(self, sample_stanza, psa_data_params, conf_name, expected): + ss = sample_stanza(psa_data_params=psa_data_params) ss._get_raw_sample = MagicMock(return_value=[rule_obj({})]) rule = MagicMock() rule.apply.return_value = [rule_obj({"breaker": 1})] @@ -223,7 +218,7 @@ def test_get_eventmetadata(self, sample_stanza): ) def test_break_events(self, sample_stanza, sample_raw, expected): ss = sample_stanza( - eventgen_params={ + psa_data_params={ "tokens": tokens, "breaker": "aa", } @@ -231,7 +226,7 @@ def test_break_events(self, sample_stanza, sample_raw, expected): assert ss.break_events(sample_raw) == expected @pytest.mark.parametrize( - "eventgen_params, sample_event_params", + "psa_data_params, sample_event_params", [ ( { @@ -251,8 +246,8 @@ def test_break_events(self, sample_stanza, sample_raw, expected): *get_params_for_get_raw_sample(), ], ) - def test_get_raw_sample(self, sample_stanza, eventgen_params, sample_event_params): - ss = sample_stanza(eventgen_params=eventgen_params) + def test_get_raw_sample(self, sample_stanza, psa_data_params, sample_event_params): + ss = sample_stanza(psa_data_params=psa_data_params) data = "sample_raw" with patch("builtins.open", mock_open(read_data=data)), patch( "pytest_splunk_addon.standard_lib.sample_generation.sample_stanza.SampleEvent", @@ -263,7 +258,7 @@ def test_get_raw_sample(self, sample_stanza, eventgen_params, sample_event_param def test_get_raw_sample_empty_event(self, sample_stanza): ss = sample_stanza( - eventgen_params={ + psa_data_params={ "tokens": tokens, "input_type": "file_monitor", } @@ -278,7 +273,7 @@ def test_get_raw_sample_empty_event(self, sample_stanza): def test_break_events_exception(self, sample_stanza, caplog): ss = sample_stanza( - eventgen_params={ + psa_data_params={ "tokens": { "token_1": {"replacementType": "all"}, "token_2": {"replacementType": "random"},