diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 29bce69c..88a44dc5 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -17,32 +17,41 @@ jobs: - windows-latest # - macos-latest python-version: - - 3.8 - 3.9 - "3.10" - "3.11" - "3.12" + - "3.13" steps: + # https://github.com/actions/setup-python - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install tox tox-gh-actions + # https://github.com/astral-sh/setup-uv + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + enable-cache: true + - name: pin python version + run: uv python pin ${{ matrix.python-version }} + - name: Install tox and its plugins + run: uv tool install tox --with tox-uv --with tox-gh-actions + - name: Install some additional dependencies + run: uv pip install --system coveralls - name: Test with tox run: tox env: PLATFORM: ${{ matrix.platform }} - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v4 # .. seealso:: https://github.com/marketplace/actions/codecov#usage + # .. seealso:: https://docs.codecov.com/reference/overview with: - # token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos + token: ${{ secrets.CODECOV_TOKEN }} # files: ./coverage1.xml,./coverage2.xml # optional # flags: unittests # optional # name: codecov-umbrella # optional - fail_ci_if_error: true # optional (default = false) + #fail_ci_if_error: true # optional (default = false) verbose: true # optional (default = false) diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..adf85d7e --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,14 @@ +# .. seealso:: https://docs.readthedocs.io/en/stable/config-file/v2.html +# .. seealso:: https://github.com/readthedocs-examples/example-sphinx-basic/blob/main/.readthedocs.yaml +version: 2 +build: + os: ubuntu-22.04 + tools: + python: "3.12" +sphinx: + configuration: docs/conf.py +python: + install: + - requirements: docs/requirements.txt + - method: pip + path: . diff --git a/NEWS b/NEWS index 04c418c3..80629b51 100644 --- a/NEWS +++ b/NEWS @@ -1,3 +1,124 @@ +* Mon Feb 16 2026 Satoru SATOH - 0.15.1-1 +- fix: [doc] disable the badge of scrutinizer as it does not look worked +- fix: [doc] add an extra empty line to avoid rst syntax error +- fix: [rpm] corrections for rpm stuff to avoid build time errors +- fix: remove deps to pytest-clarity as it's not available in fedora +- fix: limit deps to toml if python < 3.11 +- fix: correct path to SRPMs +- fix: try to make coveralls installed only in github ci env +- fix: [rpm] move a hack to tweak pyproject.toml before %generate_buildrequires +- fix/change: [rpm] test build before uploading to copr +- enhancement: [tox] check sdist just in case in dists environment +- enhancement: [tox] add an environment 'rpms' to test to build rpms with mock + +* Mon Feb 16 2026 Satoru SATOH - 0.15.0-1 +- fix: correct deps; tox-uv is needed build time +- fix/change: migrate test cases from unittest.TestCase to pytest based with + new test data loader (#173) +- fix: enable all rules by ruff linter by default +- fix/change: [tox] swtich to ruff linter from flake8 and pylint +- fix: correction for various type of ruff errors, + - blank-line-after-summary (D205) + - camelcase-imported-as-acronym (N817) + - commented-out-code (ERA001) + - future-rewritable-type-annotation (FA100) + - no-explicit-stacklevel (B028) + - non-self-return-type (PYI034) + - one-blank-line-after-class (D204) + - open-file-with-context-handler (SIM115) + - quoted-annotation (UP037) + - unnecessary-generator-dict (C402) + - unnecessary-generator-list (C400) + - unnecessary-generator-set (C401)# + - blanket-type-ignore (PGH003) + - mutable-class-default (RUF012) + - missing-type-args (ANN002) + - unused-method-argument (ARG002) + - builtin-open (PTH123) + - collapsible-if (SIM102) + - collection-literal-concatenation (RUF005) + - f-string-in-exception (EM102) + - future-rewritable-type-annotation (FA100) + - if-else-block-instead-of-if-exp (SIM108) + - multi-line-summary-first-line (D212) + - quoted-annotation (UP037) + - raise-without-from-inside-except (B904) + - raw-string-in-exception (EM101) + - redefined-loop-name (PLW2901) + - suppressible-exception (SIM105) + - typing-only-first-party-import (TCH001) + - unnecessary-paren-on-raise-exception (RSE102) + - yield-in-for-loop (UP028) + - workaround for a ruff error, B018 + - workaround for blind-except (BLE001) by ruff + - workaround for builtin-variable-shadowing (A001) by ruff + - workaround for future-rewritable-type-annotation (FA100) by ruff + - workaround for open-file-with-context-handler (SIM115) by ruff + - workaround for raw-string-in-exception (EM101) by ruff + - workaround for some corner cases along with corrections for type hints + - workarounds for hardcoded-sql-expression (S608) by ruff + - workarounds for jinja2-autoescape-false (S701) by ruff + - workarounds for unused-function-argument (ARG001) by ruff + - workaround (suppress error) for try-except-in-loop (PERF203) by ruff +- fix: add a configuration file for readthedocs.org +- fix: add and correct type hints +- fix: add build.os confguration for the readthedocs.org +- fix: add build time dependency for the docs +- fix: add full dependency file +- fix: add missing test cases for json.stdlib to test option, 'sort_keys' +- fix: add missing type hints, correc some type hints and add TypeGuards and + checks to make them robust +- fix: check expected warnings was raised in a test case for shellvars backend +- fix: cleanup the badge links in the header +- fix: disable specific ruff rules to avoid conflicts +- Fix/flake8 (#184) +- Fix/pytest migration (#176) +- fix: remove deprecated old test data collector module (#178) +- fix: remove oboleted code not needed any more +- fix: remove unused block, import, test data +- fix: some flake8 errors and warnings (#182) +- fix: [tox] enable 'lint' and 'type-check' targets run by default +- fix: update configurations for flake8 +- fix: update gha for codecov and configure token +- fix: update the list of ruff linter's rules to ignore (#183) +- fix: use standard collections as type annotations (#171) +- fix: workaround to suppress a flake8-bugbear error +- fix: workaround to suppress a mypy error +- fix: workaround to suppress errors only happen with tomllib +- fix: remove rpm related hack as bdist_rpm was deprecated in the latest setuptools +- fix: [tox] add missing dependencies in srpm target +- fix: [rpm] add hack to avoid false-positive errors in pyproject.toml +- fix: [tox] remove env var not used any more +- change: drop python 3.8.x support +- change: enable all ruff rules with minimal exceptions +- change: add new requirements.txt to list tox-uv +- change: migration from unittest to pytest with pytest.mark.parametrize +- change: add type check time dependency to typing-extensions for TypeGuard + if python < 3.10 +- chagne: add some exports of data types used internally +- change: find test file for tests.backend.test_common dynamically if possible +- change/fix: [api] make some arguments of the API load{,s} keyword-only +- change/fix: [api] make some more arguments of the APIs keyword-only +- change: re-organize requirements.txt files +- change: re-organize schema module (#172) +- change: switch to new test data loader based on tests.common.tdc (#177) +- Change: switch to use uv as a package and project manager (#181) +- change: use more strict typed arguments in a couple of test cases +- enhancement: add python 3.13 support +- enhancement: add lint-next target to prepare for ruff-by-default configuration +- enhancement: add new test loader and its test cases, for test cases of backends +- enhancement: add test data for xml backend with options +- enhancement: [tox] add 'full' target to test together with optional + dependencies also +- Migrate from setup.cfg to pyproject.toml (#168) +- refactor: make items of envlist listed line by line to make those maintenance easier +- refactor: move constants from tests.backend.test_common to tests.backend.constants +- refactor: simplify test cases for anyconfig.backend.base.*Mixin.*open with + pytest.mark.parametrize +- enhancement: add azure-pipelines.yml for Azure Pipelines +- enhancement: add a script to build SRPM fom src dist and pkg/package.spec.in +- enhancement: [tox] add environments, upload and upload-copr + * Sun Jan 14 2024 Satoru SATOH - 0.14.0-1 - change: drop the support py37 and add py311 - change: drop python 3.6.x support diff --git a/README.rst b/README.rst index 09222ad2..56ba9fda 100644 --- a/README.rst +++ b/README.rst @@ -20,26 +20,18 @@ python-anyconfig :target: https://github.com/ssato/python-anyconfig/actions?query=workflow%3ATests :alt: [Github Actions: Test status] +.. image:: https://dev.azure.com/satorusatoh0471/python-anyconfig/_apis/build/status/ssato.python-anyconfig?branchName=next + :target: https://dev.azure.com/satorusatoh0471/python-anyconfig/_build/latest?definitionId=1 + :alt: [Azure Pipelines Status] + .. image:: https://img.shields.io/coveralls/ssato/python-anyconfig.svg :target: https://coveralls.io/r/ssato/python-anyconfig :alt: [Coverage Status] -.. .. image:: https://landscape.io/github/ssato/python-anyconfig/master/landscape.svg?style=flat - :target: https://landscape.io/github/ssato/python-anyconfig/master - :alt: [Code Health] - -.. image:: https://scrutinizer-ci.com/g/ssato/python-anyconfig/badges/quality-score.png?b=master +.. .. image:: https://scrutinizer-ci.com/g/ssato/python-anyconfig/badges/quality-score.png?b=next :target: https://scrutinizer-ci.com/g/ssato/python-anyconfig :alt: [Code Quality by Scrutinizer] -.. .. image:: https://img.shields.io/lgtm/grade/python/g/ssato/python-anyconfig.svg - :target: https://lgtm.com/projects/g/ssato/python-anyconfig/context:python - :alt: [Code Quality by LGTM] - -.. .. image:: https://www.openhub.net/p/python-anyconfig/widgets/project_thin_badge.gif - :target: https://www.openhub.net/p/python-anyconfig - :alt: [Open HUB] - .. image:: https://readthedocs.org/projects/python-anyconfig/badge/?version=latest :target: http://python-anyconfig.readthedocs.io/en/latest/?badge=latest :alt: [Doc Status] diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 00000000..9c79037b --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,48 @@ +--- +# yamllint disable-line rule:line-length +# Based on https://github.com/Azure-Samples/azure-pipelines-python/blob/master/.azure-pipelines/simple_package.1-multi-target.yml +# another ref. https://github.com/tox-dev/azure-pipelines-template +variables: + package: pre_commit_hooks_for_ansible + srcDirectory: src/ + testsDirectory: tests/$(package) + +trigger: + branches: + include: + - '*' + +jobs: + - job: Build + strategy: + matrix: + python39: + pythonVersion: 3.9 + python310: + pythonVersion: 3.10 + python311: + pythonVersion: 3.11 + python312: + pythonVersion: 3.12 + + pool: + vmImage: 'ubuntu-latest' + + variables: + TOXENV: py${{ replace(variables['pythonVersion'], '.', '') }} + + steps: + - task: UsePythonVersion@0 + displayName: Use Python $(pythonVersion) + inputs: + versionSpec: $(pythonVersion) + + - script: | + sudo apt-get update + sudo apt-get install -y graphviz + python -m pip install --upgrade pip + pip install tox tox-gh-actions + displayName: Install some more test time dependencies + + - bash: tox + displayName: Run tests diff --git a/docs/conf.py b/docs/conf.py index f5c10c85..a026cbaf 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,26 +1,24 @@ -# -*- coding: utf-8 -*- # # pylint:disable=invalid-name """conf.py for sphinx.""" -import sys import pathlib +import sys -sys.path.insert(0, str(pathlib.Path(__file__).parent.resolve() / 'src')) +sys.path.insert(0, str(pathlib.Path(__file__).parent.resolve() / "src")) extensions = [ - 'sphinx.ext.autodoc', - 'sphinx_autodoc_typehints' + "sphinx.ext.autodoc", + "sphinx_autodoc_typehints", ] -source_suffix = '.rst' -master_doc = 'index' +source_suffix = ".rst" +master_doc = "index" -project = u'python-anyconfig' -copyright = u'2021, Satoru SATOH ' -version = '0.10.0' +project = "python-anyconfig" +version = "3.13.11" release = version exclude_patterns = [] -html_theme = 'default' +html_theme = "default" -autodoc_member_order = 'bysource' +autodoc_member_order = "bysource" diff --git a/pkg/buildsrpm.sh b/pkg/buildsrpm.sh new file mode 100755 index 00000000..45b2a385 --- /dev/null +++ b/pkg/buildsrpm.sh @@ -0,0 +1,90 @@ +#! /bin/bash +# +# Build source RPM from source and RPM SPEC file. +# +set -e -o pipefail + +usage="Usage: $0 [NAME [RPMSPECIN [BUILDDIR [SRCDIR]]]]" + +self=$0 +selfdir="${self%/*}" + +RPMSPECIN="${selfdir:?}/package.spec.in" + +BUILDDIR="${selfdir}/../build" +SRCDIR="${selfdir}/../src" +DISTDIR="${selfdir}/../dist" + +# Set or detect name. +NAME=${1} +[[ -z ${NAME} ]] && { + NAME=$(sed -nr 's/^name.*=.*"(\S+)"$/\1/p' pyproject.toml) + [[ -z ${NAME} ]] && { + cat << EOM +[Error] No name was provided, and could not be automatically detected. +EOM + echo "${usage}" + exit 1 + } || : +} + +RPMSPECIN=${2:-${RPMSPECIN}} +BUILDDIR=${3:-${BUILDDIR}} +SRCDIR=${4:-${SRCDIR}} +RELEASE=${5:-1} + +test -f ${RPMSPECIN:?} && test -d ${SRCDIR:?} && test -d ${DISTDIR:?} || { + echo "[Error] NOT found: ${RPMSPECIN} and/or ${SRCDIR} and/or ${DISTDIR}" + echo "${usage}" + exit 1 +} + +test -d ${BUILDDIR} || mkdir -p ${BUILDDIR} + +# Detect version info. +candidates=$( +find ${SRCDIR} -type f -iregex '.*/__init__.py' +) + +for f in ${candidates:?} +do + VERSION=$( + grep -q __version__ $f && + sed -nr 's/^__version__ = .(.+)./\1/p' $f || : +) + [[ -z ${VERSION} ]] || break +done + +[[ -z ${VERSION} ]] && { + cat << EOM +[Error] Could NOT find version string from ${SRCDIR}/**/__init__.py +EOM + exit 1 +} || : + +# Find src dist. +SRCDIST=$(ls -1 ${DISTDIR}/${NAME}-${VERSION}.*) +[[ -z ${SRCDIST} ]] && { + echo "[Error] Cound NOT find src dist. Build it in advance." +} || : +cp -f ${SRCDIST} ${BUILDDIR} + +# Generate the RPM SPEC file from ${RPMSPECIN}. +RPMSPECIN_fn=${RPMSPECIN##*/} +RPMSPEC=${BUILDDIR}/${RPMSPECIN_fn/.in/} + +sed -r " +s/@NAME@/${NAME}/g +s/@VERSION@/${VERSION}/g +s/@RELEASE@/${RELEASE}/g +" ${RPMSPECIN} > ${RPMSPEC} + +_rpmbuild () { + rpmbuild --define "_topdir ${BUILDDIR}" \ + --define "_srcrpmdir ${BUILDDIR}" \ + --define "_sourcedir ${BUILDDIR}" \ + --define "_buildroot ${BUILDDIR}" \ + -bs $@ +} + +_rpmbuild ${RPMSPEC} diff --git a/pkg/copr-build.sh b/pkg/copr-build.sh index 12b92655..c56a319a 100755 --- a/pkg/copr-build.sh +++ b/pkg/copr-build.sh @@ -9,12 +9,28 @@ set -ex curdir=${0%/*} topdir=${curdir}/../ -srpmdir=${topdir}/dist +srpmdir=${topdir}/build + +check=$1 + +# see also +# - /etc/fedora-release +# - https://copr.fedorainfracloud.org/coprs/ssato/python-anyconfig/ +dists=" +epel-10-x86_64 +fedora-42-x86_64 +fedora-rawhide-x86_64 +" copr_project=ssato/python-anyconfig srpm="$(ls -1 ${srpmdir:?}/*.src.rpm | sort -Vr | head -n 1)" # FIXME -test -f ~/.config/copr -copr-cli build ${copr_project:?} "${srpm:?}" - -# vim:sw=4:ts=4:et: +[[ -z ${check} ]] && { + test -f ~/.config/copr + copr-cli build ${copr_project:?} "${srpm:?}" +} || { + for dist in ${dists:?}; do + mock -r ${dist:?} "${srpm:?}" & + done + wait +} diff --git a/pkg/header.rst b/pkg/header.rst index 256ff04b..e4673503 100644 --- a/pkg/header.rst +++ b/pkg/header.rst @@ -20,26 +20,18 @@ python-anyconfig :target: https://github.com/ssato/python-anyconfig/actions?query=workflow%3ATests :alt: [Github Actions: Test status] +.. image:: https://dev.azure.com/satorusatoh0471/python-anyconfig/_apis/build/status/ssato.python-anyconfig?branchName=next + :target: https://dev.azure.com/satorusatoh0471/python-anyconfig/_build/latest?definitionId=1 + :alt: [Azure Pipelines Status] + .. image:: https://img.shields.io/coveralls/ssato/python-anyconfig.svg :target: https://coveralls.io/r/ssato/python-anyconfig :alt: [Coverage Status] -.. .. image:: https://landscape.io/github/ssato/python-anyconfig/master/landscape.svg?style=flat - :target: https://landscape.io/github/ssato/python-anyconfig/master - :alt: [Code Health] - -.. image:: https://scrutinizer-ci.com/g/ssato/python-anyconfig/badges/quality-score.png?b=master +.. .. image:: https://scrutinizer-ci.com/g/ssato/python-anyconfig/badges/quality-score.png?b=next :target: https://scrutinizer-ci.com/g/ssato/python-anyconfig :alt: [Code Quality by Scrutinizer] -.. .. image:: https://img.shields.io/lgtm/grade/python/g/ssato/python-anyconfig.svg - :target: https://lgtm.com/projects/g/ssato/python-anyconfig/context:python - :alt: [Code Quality by LGTM] - -.. .. image:: https://www.openhub.net/p/python-anyconfig/widgets/project_thin_badge.gif - :target: https://www.openhub.net/p/python-anyconfig - :alt: [Open HUB] - .. image:: https://readthedocs.org/projects/python-anyconfig/badge/?version=latest :target: http://python-anyconfig.readthedocs.io/en/latest/?badge=latest :alt: [Doc Status] diff --git a/pkg/nose.cfg b/pkg/nose.cfg deleted file mode 100644 index 82c0d61b..00000000 --- a/pkg/nose.cfg +++ /dev/null @@ -1,10 +0,0 @@ -[nosetests] -verbosity=2 -with-doctest=1 -#all-modules=1 -# Requires that nosetest processes multiple modules' test cases at once. -# processes=4 - -# coverage: -cover-package=anyconfig -cover-branches=1 diff --git a/pkg/package.spec.in b/pkg/package.spec.in index 5aaf725f..d69e7f86 100644 --- a/pkg/package.spec.in +++ b/pkg/package.spec.in @@ -16,7 +16,8 @@ Release: @RELEASE@ Summary: Python library to load and dump configuration files in various formats License: MIT URL: https://github.com/ssato/python-anyconfig -Source0: %{url}/archive/RELEASE_%{version}.tar.gz +# Source0: %%{url}/archive/RELEASE_%%{version}.tar.gz +Source0: %{pkgname}-%{version}.tar.gz BuildArch: noarch %if %{with doc} @@ -24,7 +25,6 @@ BuildRequires: make BuildRequires: python3-docutils BuildRequires: python3-sphinx %endif -BuildRequires: python3-toml BuildRequires: python3-yaml BuildRequires: python3-devel BuildRequires: python3-setuptools @@ -42,7 +42,7 @@ Requires: python3-ruamel-yaml %endif %if %{with optionals} Requires: python3-configobj -Requires: python3-toml +Requires: python3-tomli-w %endif %{?python_provide:%python_provide python3-%{pkgname}} @@ -58,9 +58,15 @@ HTML documentation for %{name}. %prep %autosetup -n %{pkgname}-%{version} +# hack: To avoid build-time error with the later license identifier format in +# pyproject.toml since PEP 639. +sed -i.save -r '/^license = /,/^]/s/^.*/# &/g' pyproject.toml + +%generate_buildrequires +%pyproject_buildrequires -t %build -%py3_build +%pyproject_wheel %if %{with doc} make -C docs/ html @@ -69,11 +75,11 @@ rm -frv docs/build/html/_sources %endif %install -%py3_install +%pyproject_install %if %{with tests} %check -tox -e py$(python -c "import sys; sys.stdout.write(sys.version[:3].replace('.', ''))") +%tox %endif %files -n python3-%{pkgname} @@ -90,6 +96,11 @@ tox -e py$(python -c "import sys; sys.stdout.write(sys.version[:3].replace('.', %endif %changelog +* Mon Feb 16 2026 Satoru SATOH - 0.15.0-1 +- new upstream release +- fix dependencies to deprecated python3-toml package +- see NEWS fore more details + * Mon Jan 15 2024 Satoru SATOH - 0.14.0-1 - new upstream release - see NEWS fore more details diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..46159dfa --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,263 @@ +# .. seealso:: https://packaging.python.org/en/latest/guides/writing-pyproject-toml/ +[project] +name = "anyconfig" +requires-python = ">=3.9" +dynamic = [ + "version", +] +description = "Library provides common APIs to load and dump configuration files in various formats" +readme = "README.rst" +authors = [ + {"name" = "Satoru SATOH", email = "satoru.satoh@gmail.com"}, +] +maintainers = [ + {"name" = "Satoru SATOH", email = "satoru.satoh@gmail.com"}, +] +license = "MIT" +license-files = [ + "LICENSE.MIT", "AUTHORS.txt" +] +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Environment :: Console", + "Operating System :: OS Independent", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Text Processing :: Markup", + "Topic :: Utilities", +] +# .. note:: It requires nothing at minimum. +#dependencies = [ +#] + +[tool.setuptools.dynamic.version] +attr = "anyconfig.api.__version__" + +[project.urls] +Homepage = "https://github.com/ssato/python-anyconfig" +Documentation = "http://python-anyconfig.readthedocs.org" +Repository = "https://github.com/ssato/python-anyconfig" +Issues = "https://github.com/ssato/python-anyconfig/issues" +Changelog = "https://github.com/ssato/python-anyconfig/blob/next/NEWS" +Download = "https://pypi.python.org/pypi/anyconfig" +"Download RPMs" = "https://copr.fedoraproject.org/coprs/ssato/python-anyconfig/" +CI = "https://github.com/ssato/python-anyconfig/actions" + +[build-system] +requires = [ + "setuptools >= 61.0", + "wheel", + "tox-uv", +] +build-backend = "setuptools.build_meta" + +[tool.setuptools.package-dir] +"" = "src" + +[tool.setuptools.package-data] +anyconfig = [ + "py.typed", +] + +[tool.setuptools.packages.find] +where = [ + "src" +] + +# [tool.setuptools.data_files] +# share/man/man1 = [ +# "docs/anyconfig_cli.1", +# ] + +[project.optional-dependencies] +yaml = [ + "pyyaml", +] +toml = [ + "tomli; python_version < '3.11'", + "tomli-w", +] +query = [ + "jmespath", +] +schema = [ + "jsonschema", +] +template = [ + "Jinja2", +] + +[project.scripts] +anyconfig_cli = "anyconfig.cli:main" + +# .. seealso:: https://docs.pytest.org/en/stable/reference/customize.html#pyproject-toml +[tool.pytest.ini_options] +testpaths = [ + "tests", +] +# addopts = "--cov=src -vv -rxXs --cov-report xml" +addopts = "--cov=src -v -ra --cov-report xml -n auto" + +[tool.pylint."MESSAGES CONTROL"] +disable = [ + "import-error", + "invalid-name", + "locally-disabled", + "too-many-ancestors", + "too-many-return-statements", +] + +# .. seealso:: https://docs.astral.sh/ruff/settings/#top-level +[tool.ruff] +exclude = [ + "tests/__init__.py", + "tests/*/__init__.py", + "tests/*/*/__init__.py", + "tests/res/*/*/*/*.py", + "tests/res/*/*/*/*/*.py", + "tests/res/*/*/*/*/*/*.py", + "tests/res/*/*/*/*/*/*/*.py", + "tests/res/*/*/*/*/*/*/*/*.py", +] +src = [ + "src", + "tests", +] + +[tool.ruff.lint] +select = [ + "ALL", +] +# This setting is intentional, although it breaks the rules. +ignore = [ + "ANN401", # https://docs.astral.sh/ruff/rules/any-type/ + "D203", # Because it looks conflict with D211. + # https://docs.astral.sh/ruff/rules/incorrect-blank-line-before-class/ + "D213", # Because it looks conflict with D212. + # https://docs.astral.sh/ruff/rules/multi-line-summary-second-line/ + "FIX002", # https://docs.astral.sh/ruff/rules/line-contains-todo/ + "I001", # https://docs.astral.sh/ruff/rules/unsorted-imports/ + "ICN001", # https://docs.astral.sh/ruff/rules/unconventional-import-alias/ + "RUF022", # https://docs.astral.sh/ruff/rules/unsorted-dunder-all/ + "TID252", # https://docs.astral.sh/ruff/rules/relative-imports/ +] + +[tool.ruff.lint.extend-per-file-ignores] +"src/anyconfig/query/default.py" = ["ARG001"] +"src/anyconfig/schema/default.py" = ["ARG001"] +# "src/anyconfig/template/__init__.py" = ["ARG001"] +"src/anyconfig/template/jinja2.py" = ["BLE001"] + +# .. todo:: security hardening +# https://docs.astral.sh/ruff/rules/suspicious-xml-element-tree-usage/ +"src/anyconfig/backend/xml/etree.py" = ["S314"] + +"docs/*.py" = [ + "INP001", +] + +# TODO: +"src/anyconfig/backend/yaml/pyyaml.py" = [ + "TD003", # https://docs.astral.sh/ruff/rules/missing-todo-link/ + "FIX002", # https://docs.astral.sh/ruff/rules/line-contains-todo/ +] + +"src/anyconfig/parser.py" = [ + # https://docs.astral.sh/ruff/rules/non-pep604-annotation-optional/ + "UP045", +] + +"src/anyconfig/schema/datatypes.py" = [ + "UP045", +] + +# Ignored: +"tests/*.py" = [ + # https://docs.astral.sh/ruff/rules/missing-type-function-argument/ + "ANN001", + "D103", # https://docs.astral.sh/ruff/rules/undocumented-public-function/ + "I001", # https://docs.astral.sh/ruff/rules/unsorted-imports/ + # https://docs.astral.sh/ruff/rules/lowercase-imported-as-non-lowercase/ + "N812", + "S101", # https://docs.astral.sh/ruff/rules/assert/ + "TID252", # https://docs.astral.sh/ruff/rules/relative-imports/ +] +"tests/*/*.py" = [ + "ANN001", + "D103", + "I001", + "N812", + "S101", + "TID252", +] +"tests/*/*/*.py" = [ + "ANN001", + "D103", + "I001", + "N812", + "S101", + "TID252", +] +"tests/*/*/*/*.py" = [ + "ANN001", + "D103", + "I001", + "N812", + "S101", + "TID252", +] + +#[tool.ruff.lint.flake8-annotations] +#[tool.ruff.lint.flake8-bandit] +#[tool.ruff.lint.flake8-bugbear] +[tool.ruff.lint.flake8-builtins] +ignorelist = [ + "open", # for anyconfig.api.open +] + +# .. note:: +# +# The following options are not supported by pyproject.toml or I don't know +# how to port from setup.cfg. +# +# +# [meta] +# long_description = +# python-anyconfig is a python library provides common APIs to load and dump +# configuration files in various formats with some useful features such as +# contents merge, templates, query, schema validation and generation support. +# +# - Home: https://github.com/ssato/python-anyconfig +# - Author: Satoru SATOH +# - License: MIT + +#[aliases] +# dists = clean --all sdist bdist_wheel + +# .. note:: It's not well supported. +# .. seealso:: https://github.com/pypa/packaging-problems/issues/72 +# +# Disabled because it does not work in CentOS 8. +# [options.data_files] +# share/man/man1 = +# docs/anyconfig_cli.1 + +# .. seealso:: https://stackoverflow.com/a/64487610 +# +# [flake8] +# per-file-ignores = +# tests/res/*/*/*/*.py: W605, B018 +# tests/res/*/*/*/*/*.py: W605, B018 +# +# max-complexity = 10 +# select = C,E,F,W,B +# +# # .. seealso:: https://www.flake8rules.com/rules/W504.html +# ignore = W503 diff --git a/requirements.txt b/requirements.txt index 7f931485..d7998ff8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,9 @@ --r requirements_min.txt Jinja2 PyYAML jmespath jsonschema ruamel.yaml simplejson -toml +toml; python_version < "3.11" tomli; python_version < "3.11" -tomli-w +tomli-w; python_version >= "3.11" diff --git a/requirements_min.txt b/requirements_min.txt deleted file mode 100644 index 49fe098d..00000000 --- a/requirements_min.txt +++ /dev/null @@ -1 +0,0 @@ -setuptools diff --git a/setup.cfg b/setup.cfg index 4a64c22a..cfec1c58 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,109 +1,18 @@ # .. seealso:: https://setuptools.readthedocs.io/en/latest/setuptools.html#configuring-setup-using-setup-cfg-files -# .. seealso:: https://wheel.readthedocs.io/en/stable/ -[bdist_wheel] -universal = 1 - [aliases] dists = clean --all sdist bdist_wheel -[metadata] -name = anyconfig -#version = attr: anyconfig.__version__ -description = Library provides common APIs to load and dump configuration files in various formats -project_urls = - CI: Github Actions = https://github.com/ssato/python-anyconfig/actions - Download = https://pypi.python.org/pypi/anyconfig - Download RPMs = https://copr.fedoraproject.org/coprs/ssato/python-anyconfig/ - Documentation = http://python-anyconfig.readthedocs.org - Bug Tracker = https://github.com/ssato/python-anyconfig/issues - Source = https://github.com/ssato/python-anyconfig -long_description = - python-anyconfig is a python library provides common APIs to load and dump - configuration files in various formats with some useful features such as - contents merge, templates, query, schema validation and generation support. - - - Home: https://github.com/ssato/python-anyconfig - - Author: Satoru SATOH - - License: MIT - -author = Satoru SATOH -author_email = satoru.satoh@gmail.com -maintainer = Satoru SATOH -maintainer_email = satoru.satoh@gmail.com -license = MIT -url = https://github.com/ssato/python-anyconfig -classifiers = - Development Status :: 4 - Beta - Intended Audience :: Developers - Programming Language :: Python - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 - Programming Language :: Python :: 3.11 - Programming Language :: Python :: 3.12 - Environment :: Console - Operating System :: OS Independent - Topic :: Software Development :: Libraries :: Python Modules - Topic :: Text Processing :: Markup - Topic :: Utilities - License :: OSI Approved :: MIT License - -[options] -include_package_data = True -packages = find: -package_dir = - = src - -# Disabled because it does not work in CentOS 8. -# [options.data_files] -# share/man/man1 = -# docs/anyconfig_cli.1 - -[options.package_data] -anyconfig = py.typed - -[options.extras_require] -yaml = - pyyaml -toml = - tomli; python_version < "3.11" - tomli-w -query = - jmespath -schema = - jsonschema -template = - Jinja2 - -[options.packages.find] -where = src -exclude = - tests - tests.* - -[options.entry_points] -console_scripts = - anyconfig_cli = anyconfig.cli:main - [flake8] per-file-ignores = - tests/res/*/*/*/*.py: W605, B018 - tests/res/*/*/*/*/*.py: W605, B018 + tests/res/*/*/*/*.py: W605 + tests/res/*/*/*/*/*.py: W605 + tests/res/*/*/*/*/*/*.py: W605 max-complexity = 10 select = C,E,F,W,B # .. seealso:: https://www.flake8rules.com/rules/W504.html -ignore = W503 - -[tool:pytest] -testpaths = - tests - -python_files = - test_*.py - Test*.py - -addopts = --cov=src -vv -rxXs --cov --cov-report xml +ignore = + W503 + B018 diff --git a/setup.py b/setup.py index dee047f1..489a748a 100644 --- a/setup.py +++ b/setup.py @@ -3,53 +3,20 @@ import pathlib import re import setuptools -import setuptools.command.bdist_rpm # It might throw IndexError and so on. -VERSION = '0.1.0' -VER_REG = re.compile(r"^__version__ = '([^']+)'") +VERSION = os.getenv("_PKG_VERSION", default="0.1.0") +VER_REG = re.compile(r"^__version__ = \"([^']+)\"") -for fpath in pathlib.Path('src').glob('**/__init__.py'): +for fpath in pathlib.Path("src").glob("**/__init__.py"): for line in fpath.open(): match = VER_REG.match(line) if match: VERSION = match.groups()[0] break -# For daily snapshot versioning mode: -RELEASE = "1%{?dist}" -if os.environ.get("_SNAPSHOT_BUILD", None) is not None: - import datetime - RELEASE = RELEASE.replace('1', - datetime.datetime.now().strftime("%Y%m%d")) - - -def _replace(line): - """Replace some strings in the RPM SPEC template.""" - if "@VERSION@" in line: - return line.replace("@VERSION@", VERSION) - - if "@RELEASE@" in line: - return line.replace("@RELEASE@", RELEASE) - - if "Source0:" in line: # Dirty hack - return "Source0: %{pkgname}-%{version}.tar.gz" - - return line - - -class bdist_rpm(setuptools.command.bdist_rpm.bdist_rpm): - """Override the default content of the RPM SPEC.""" - - spec_tmpl = pathlib.Path('pkg/package.spec.in').resolve() - - def _make_spec_file(self): - """Generate the RPM SPEC file.""" - return [_replace(line.rstrip()) for line in self.spec_tmpl.open()] - - -setuptools.setup(version=VERSION, cmdclass=dict(bdist_rpm=bdist_rpm), - data_files=[("share/man/man1", ["docs/anyconfig_cli.1"])]) - -# vim:sw=4:ts=4:et: +setuptools.setup( + version=VERSION, + data_files=[("share/man/man1", ["docs/anyconfig_cli.1"])], +) diff --git a/src/anyconfig/__init__.py b/src/anyconfig/__init__.py index e431dcad..0a69aa79 100644 --- a/src/anyconfig/__init__.py +++ b/src/anyconfig/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # For 'anyconfig.open': @@ -31,32 +31,43 @@ load_plugins, list_types, list_by_cid, list_by_type, list_by_extension, findall, find, try_query, - validate, is_valid, gen_schema + validate, is_valid, gen_schema, ) __all__ = [ - 'dump', 'dumps', - 'single_load', 'multi_load', 'load', 'loads', - 'open', 'version', + "dump", "dumps", # dump APIs. + "single_load", "multi_load", "load", "loads", # load APIs. + "get", "set_", # accessor APIs. + "merge", "open", "version", # other APIs. # anyconfig.common - 'UnknownParserTypeError', 'UnknownProcessorTypeError', - 'UnknownFileTypeError', 'ValidationError', + "UnknownFileTypeError", + "UnknownParserTypeError", + "UnknownProcessorTypeError", + "ValidationError", # anyconfig.dicsts - 'MS_REPLACE', 'MS_NO_REPLACE', 'MS_DICTS', 'MS_DICTS_AND_LISTS', - 'MERGE_STRATEGIES', 'merge', 'get', 'set_', + "MERGE_STRATEGIES", + "MS_DICTS", + "MS_DICTS_AND_LISTS", + "MS_NO_REPLACE", + "MS_REPLACE", # anyconfig.parsers - 'load_plugins', 'list_types', 'list_by_cid', 'list_by_type', - 'list_by_extension', 'findall', 'find', + "find", + "findall", + "list_by_cid", + "list_by_extension", + "list_by_type", + "list_types", + "load_plugins", # anyconfig.query - 'try_query', + "try_query", # anyconfig.validate - 'validate', 'is_valid', 'gen_schema' + "validate", "is_valid", "gen_schema", # validation APIs. ] # vim:sw=4:ts=4:et: diff --git a/src/anyconfig/api/__init__.py b/src/anyconfig/api/__init__.py index 3ef64e2b..1f3a83f5 100644 --- a/src/anyconfig/api/__init__.py +++ b/src/anyconfig/api/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=unused-import,import-error,invalid-name @@ -83,14 +83,14 @@ - Export factory method (create) of anyconfig.mergeabledict.MergeableDict """ -import typing +from __future__ import annotations from .datatypes import MaybeDataT from ._dump import ( - dump, dumps + dump, dumps, ) from ._load import ( - single_load, multi_load, load, loads + single_load, multi_load, load, loads, ) from ._open import open # pylint: disable=redefined-builtin @@ -99,7 +99,7 @@ from ..common import ( InDataT, InDataExT, UnknownFileTypeError, UnknownParserTypeError, - UnknownProcessorTypeError, ValidationError + UnknownProcessorTypeError, ValidationError, ) from ..dicts import ( MS_REPLACE, MS_NO_REPLACE, MS_DICTS, MS_DICTS_AND_LISTS, MERGE_STRATEGIES, @@ -110,57 +110,55 @@ ) from ..parsers import ( load_plugins, list_types, list_by_cid, list_by_type, list_by_extension, - findall, find, MaybeParserT + findall, find, MaybeParserT, ) from ..query import try_query from ..schema import ( - validate, is_valid, gen_schema + validate, is_valid, gen_schema, ) -__version__ = '0.14.0' +__version__ = "0.15.1" -def version() -> typing.List[str]: +def version() -> list[str]: """Version info. :return: A list of version info, [major, minor, release[, e.g. [0, 8, 2] """ - return __version__.split('.') + return __version__.split(".") __all__ = [ - 'MaybeDataT', - 'dump', 'dumps', - 'single_load', 'multi_load', 'load', 'loads', - 'open', 'version', + "MaybeDataT", + "dump", "dumps", + "single_load", "multi_load", "load", "loads", + "open", "version", # anyconfig.backend - 'ParserT', + "ParserT", # anyconfig.common - 'InDataT', 'InDataExT', - 'UnknownFileTypeError', 'UnknownParserTypeError', - 'UnknownProcessorTypeError', 'ValidationError', + "InDataT", "InDataExT", + "UnknownFileTypeError", "UnknownParserTypeError", + "UnknownProcessorTypeError", "ValidationError", # anyconfig.dicsts - 'MS_REPLACE', 'MS_NO_REPLACE', 'MS_DICTS', 'MS_DICTS_AND_LISTS', - 'MERGE_STRATEGIES', - 'merge', 'get', 'set_', + "MS_REPLACE", "MS_NO_REPLACE", "MS_DICTS", "MS_DICTS_AND_LISTS", + "MERGE_STRATEGIES", + "merge", "get", "set_", # anyconfig.ioinfo - 'IOInfo', 'ioinfo_make', 'ioinfo_makes', + "IOInfo", "ioinfo_make", "ioinfo_makes", # anyconfig.parsers - 'load_plugins', 'list_types', 'list_by_cid', 'list_by_type', - 'list_by_extension', 'findall', 'find', - 'MaybeParserT', + "load_plugins", "list_types", "list_by_cid", "list_by_type", + "list_by_extension", "findall", "find", + "MaybeParserT", # anyconfig.query - 'try_query', + "try_query", # anyconfig.validate - 'validate', 'is_valid', 'gen_schema' + "validate", "is_valid", "gen_schema", ] - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/api/_dump.py b/src/anyconfig/api/_dump.py index 98505114..76753fd8 100644 --- a/src/anyconfig/api/_dump.py +++ b/src/anyconfig/api/_dump.py @@ -1,15 +1,23 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Provides the API to dump (serialize) objects.""" +from __future__ import annotations + +import typing + from .. import common, ioinfo, parsers -from . import datatypes +if typing.TYPE_CHECKING: + from .datatypes import ParserT -def dump(data: common.InDataExT, out: ioinfo.PathOrIOInfoT, - ac_parser: parsers.MaybeParserT = None, **options - ) -> None: + +def dump( + data: common.InDataExT, out: ioinfo.PathOrIOInfoT, + ac_parser: parsers.MaybeParserT = None, + **options: typing.Any, +) -> None: """Save ``data`` to ``out`` in specified or detected format. :param data: A mapping object may have configurations data to dump @@ -25,13 +33,14 @@ def dump(data: common.InDataExT, out: ioinfo.PathOrIOInfoT, :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError """ ioi = ioinfo.make(out) - psr: datatypes.ParserT = parsers.find(ioi, forced_type=ac_parser) + psr: ParserT = parsers.find(ioi, forced_type=ac_parser) psr.dump(data, ioi, **options) -def dumps(data: common.InDataExT, - ac_parser: parsers.MaybeParserT = None, - **options) -> str: +def dumps( + data: common.InDataExT, ac_parser: parsers.MaybeParserT = None, + **options: typing.Any, +) -> str: """Return a str representation of ``data`` in specified format. :param data: Config data object to dump @@ -41,7 +50,5 @@ def dumps(data: common.InDataExT, :return: Backend-specific string representation for the given data :raises: ValueError, UnknownProcessorTypeError """ - psr: datatypes.ParserT = parsers.find(None, forced_type=ac_parser) + psr: ParserT = parsers.find(None, forced_type=ac_parser) return psr.dumps(data, **options) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/api/_load.py b/src/anyconfig/api/_load.py index 9e86fa0b..d05046ba 100644 --- a/src/anyconfig/api/_load.py +++ b/src/anyconfig/api/_load.py @@ -1,19 +1,18 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=unused-import,import-error,invalid-name """Provides the API to load objects from given files.""" +from __future__ import annotations + import typing import warnings from .. import ioinfo -from ..common import ( - InDataT, InDataExT -) from ..dicts import ( convert_to as dicts_convert_to, - merge as dicts_merge + merge as dicts_merge, ) from ..parsers import find as parsers_find from ..query import try_query @@ -21,16 +20,23 @@ from ..template import try_render from ..utils import is_dict_like from .datatypes import ( - ParserT + ParserT, ) from .utils import are_same_file_types +if typing.TYPE_CHECKING: + import collections.abc + + from ..common import ( + InDataT, InDataExT, + ) -MappingT = typing.Dict[str, typing.Any] + +MappingT = dict[str, typing.Any] MaybeParserOrIdOrTypeT = typing.Optional[typing.Union[str, ParserT]] -def try_to_load_schema(**options) -> typing.Optional[InDataT]: +def try_to_load_schema(**options: typing.Any) -> InDataT | None: """Try to load a schema object for validation. :param options: Optional keyword arguments such as @@ -42,22 +48,28 @@ def try_to_load_schema(**options) -> typing.Optional[InDataT]: :return: Mapping object or None means some errors """ - ac_schema = options.get("ac_schema", None) + ac_schema = options.get("ac_schema") if ac_schema is not None: # Try to detect the appropriate parser to load the schema data as it # may be different from the original config file's format, perhaps. options["ac_parser"] = None options["ac_schema"] = None # Avoid infinite loop. - return load(ac_schema, **options) + res = load(ac_schema, **options) + if not res or not is_dict_like(res): + return None + + return res return None -def _single_load(ioi: ioinfo.IOInfo, - ac_parser: MaybeParserOrIdOrTypeT = None, - ac_template: bool = False, - ac_context: typing.Optional[MappingT] = None, - **options) -> InDataExT: +def _single_load( + ioi: ioinfo.IOInfo, *, + ac_parser: MaybeParserOrIdOrTypeT = None, + ac_template: bool = False, + ac_context: MappingT | None = None, + **options: typing.Any, +) -> InDataExT: """Load data from a given ``ioi``. :param input_: @@ -87,11 +99,14 @@ def _single_load(ioi: ioinfo.IOInfo, return psr.load(ioi, **options) -def single_load(input_: ioinfo.PathOrIOInfoT, - ac_parser: MaybeParserOrIdOrTypeT = None, - ac_template: bool = False, - ac_context: typing.Optional[MappingT] = None, - **options) -> InDataExT: +def single_load( + input_: ioinfo.PathOrIOInfoT, + ac_parser: MaybeParserOrIdOrTypeT = None, + *, + ac_template: bool = False, + ac_context: MappingT | None = None, + **options: typing.Any, +) -> InDataExT: r"""Load from single input ``input\_``. .. note:: @@ -131,6 +146,9 @@ def single_load(input_: ioinfo.PathOrIOInfoT, - ac_schema: JSON schema file path to validate given config file - ac_query: JMESPath expression to query data + - ac_parse_value: Parse given string as a value in some loaders if + True + - Common backend options: - ac_ignore_missing: @@ -146,20 +164,25 @@ def single_load(input_: ioinfo.PathOrIOInfoT, cnf = _single_load(ioi, ac_parser=ac_parser, ac_template=ac_template, ac_context=ac_context, **options) schema = try_to_load_schema( - ac_template=ac_template, ac_context=ac_context, **options + ac_template=ac_template, ac_context=ac_context, **options, ) if schema and not is_valid(cnf, schema, **options): return None - return try_query(cnf, options.get('ac_query', False), **options) + return try_query(cnf, options.get("ac_query", False), **options) -def multi_load(inputs: typing.Union[typing.Iterable[ioinfo.PathOrIOInfoT], - ioinfo.PathOrIOInfoT], - ac_parser: MaybeParserOrIdOrTypeT = None, - ac_template: bool = False, - ac_context: typing.Optional[MappingT] = None, - **options) -> InDataExT: +def multi_load( + inputs: typing.Union[ # noqa: UP007 + collections.abc.Iterable[ioinfo.PathOrIOInfoT], + ioinfo.PathOrIOInfoT, + ], + ac_parser: MaybeParserOrIdOrTypeT = None, + *, + ac_template: bool = False, + ac_context: MappingT | None = None, + **options: typing.Any, +) -> InDataExT: r"""Load data from multiple inputs ``inputs``. .. note:: @@ -213,9 +236,9 @@ def multi_load(inputs: typing.Union[typing.Iterable[ioinfo.PathOrIOInfoT], :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError """ schema = try_to_load_schema( - ac_template=ac_template, ac_context=ac_context, **options + ac_template=ac_template, ac_context=ac_context, **options, ) - options['ac_schema'] = None # Avoid to load schema more than twice. + options["ac_schema"] = None # Avoid to load schema more than twice. iois = ioinfo.makes(inputs) if are_same_file_types(iois): @@ -229,25 +252,26 @@ def multi_load(inputs: typing.Union[typing.Iterable[ioinfo.PathOrIOInfoT], for ioi in iois: cups = _single_load( ioi, ac_parser=ac_parser, ac_template=ac_template, - ac_context=ctx, **options + ac_context=ctx, **options, ) if cups: if cnf is None: - cnf = cups # type: ignore + cnf = cups if is_dict_like(cups): dicts_merge( - typing.cast(MappingT, cnf), - typing.cast(MappingT, cups), - **options + typing.cast("MappingT", cnf), + typing.cast("MappingT", cups), + **options, ) - dicts_merge(ctx, typing.cast(MappingT, cups), **options) + dicts_merge(ctx, typing.cast("MappingT", cups), **options) elif len(iois) > 1: - raise ValueError( - f'Object loaded from {ioi!r} is not a mapping object and ' - 'cannot be merged with later ones will be loaded from ' - 'other inputs.' + msg = ( + f"Object loaded from {ioi!r} is not a mapping object and " + "cannot be merged with later ones will be loaded from " + "other inputs." ) + raise ValueError(msg) if cnf is None: return dicts_convert_to({}, **options) @@ -255,11 +279,21 @@ def multi_load(inputs: typing.Union[typing.Iterable[ioinfo.PathOrIOInfoT], if schema and not is_valid(cnf, schema, **options): return None - return try_query(cnf, options.get('ac_query', False), **options) - - -def load(path_specs, ac_parser=None, ac_dict=None, ac_template=False, - ac_context=None, **options): + return try_query(cnf, options.get("ac_query", False), **options) + + +def load( + path_specs: typing.Union[ # noqa: UP007 + collections.abc.Iterable[ioinfo.PathOrIOInfoT], + ioinfo.PathOrIOInfoT, + ], + ac_parser: str | None = None, + *, + ac_dict: collections.abc.Callable | None = None, + ac_template: bool = False, + ac_context: MappingT | None = None, + **options: typing.Any, +) -> InDataExT: r"""Load from a file or files specified as ``path_specs``. Load single or multiple config files or multiple config files specified in @@ -291,20 +325,32 @@ def load(path_specs, ac_parser=None, ac_dict=None, ac_template=False, """ iois = ioinfo.makes(path_specs) if not iois: - raise ValueError(f'Maybe invalid input: {path_specs!r}') + msg = f"Maybe invalid input: {path_specs!r}" + raise ValueError(msg) if len(iois) == 1: - return single_load(iois[0], ac_parser=ac_parser, ac_dict=ac_dict, - ac_template=ac_template, ac_context=ac_context, - **options) + return single_load( + iois[0], ac_parser=ac_parser, ac_dict=ac_dict, + ac_template=ac_template, ac_context=ac_context, + **options, + ) - return multi_load(iois, ac_parser=ac_parser, ac_dict=ac_dict, - ac_template=ac_template, ac_context=ac_context, - **options) + return multi_load( + iois, ac_parser=ac_parser, ac_dict=ac_dict, + ac_template=ac_template, ac_context=ac_context, + **options, + ) -def loads(content, ac_parser=None, ac_dict=None, ac_template=False, - ac_context=None, **options): +def loads( + content: str, + ac_parser: MaybeParserOrIdOrTypeT = None, + *, + ac_dict: collections.abc.Callable | None = None, + ac_template: str | bool = False, + ac_context: MappingT | None = None, + **options: typing.Any, +) -> InDataExT: """Load data from a str, ``content``. :param content: Configuration file's content (a string) @@ -331,14 +377,16 @@ def loads(content, ac_parser=None, ac_dict=None, ac_template=False, stacklevel=2) return None - psr = parsers_find(None, forced_type=ac_parser) + psr: ParserT = parsers_find(None, forced_type=ac_parser) schema = None - ac_schema = options.get('ac_schema', None) + ac_schema = options.get("ac_schema") if ac_schema is not None: - options['ac_schema'] = None - schema = loads(ac_schema, ac_parser=psr, ac_dict=ac_dict, - ac_template=ac_template, ac_context=ac_context, - **options) + options["ac_schema"] = None + schema = loads( + ac_schema, ac_parser=psr, ac_dict=ac_dict, + ac_template=ac_template, ac_context=ac_context, + **options, + ) if ac_template: compiled = try_render(content=content, ctx=ac_context, **options) @@ -349,6 +397,4 @@ def loads(content, ac_parser=None, ac_dict=None, ac_template=False, if not is_valid(cnf, schema, **options): return None - return try_query(cnf, options.get('ac_query', False), **options) - -# vim:sw=4:ts=4:et: + return try_query(cnf, options.get("ac_query", False), **options) diff --git a/src/anyconfig/api/_open.py b/src/anyconfig/api/_open.py index dfb85921..d439df21 100644 --- a/src/anyconfig/api/_open.py +++ b/src/anyconfig/api/_open.py @@ -1,20 +1,26 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """A API to open files by detecting those type automatically.""" +from __future__ import annotations + import typing import warnings from .. import ioinfo, parsers -from .datatypes import ParserT + +if typing.TYPE_CHECKING: + from .datatypes import ParserT # pylint: disable=redefined-builtin -def open(path: ioinfo.PathOrIOInfoT, - mode: typing.Optional[str] = None, - ac_parser: parsers.MaybeParserT = None, - **options) -> typing.IO: +def open( + path: ioinfo.PathOrIOInfoT, + mode: str | None = None, + ac_parser: parsers.MaybeParserT = None, + **options: dict[str, typing.Any], +) -> typing.IO: """Open given file ``path`` with appropriate open flag. :param path: Configuration file path @@ -32,18 +38,17 @@ def open(path: ioinfo.PathOrIOInfoT, :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError """ if not path: - raise ValueError(f'Invalid argument, path: {path!r}') + msg = f"Invalid argument, path: {path!r}" + raise ValueError(msg) ioi = ioinfo.make(path) if ioinfo.is_stream(ioi): - warnings.warn(f'Looks already opened stream: {ioi!r}', stacklevel=2) - return typing.cast(typing.IO, ioi.src) + warnings.warn(f"Looks already opened stream: {ioi!r}", stacklevel=2) + return typing.cast("typing.IO", ioi.src) psr: ParserT = parsers.find(ioi, forced_type=ac_parser) - if mode is not None and mode.startswith('w'): + if mode is not None and mode.startswith("w"): return psr.wopen(ioi.path, **options) return psr.ropen(ioi.path, **options) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/api/datatypes.py b/src/anyconfig/api/datatypes.py index 19b5c7a9..f82fe643 100644 --- a/src/anyconfig/api/datatypes.py +++ b/src/anyconfig/api/datatypes.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=unused-import,import-error,invalid-name @@ -9,8 +9,5 @@ from ..backend import base from ..common import InDataT - MaybeDataT = typing.Optional[InDataT] ParserT = base.Parser - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/api/utils.py b/src/anyconfig/api/utils.py index f3e37bd8..eec72f96 100644 --- a/src/anyconfig/api/utils.py +++ b/src/anyconfig/api/utils.py @@ -1,20 +1,20 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # """Utility funtions for anyconfig.api.""" +from __future__ import annotations + import typing if typing.TYPE_CHECKING: from .. import ioinfo -def are_same_file_types(objs: typing.List['ioinfo.IOInfo']) -> bool: +def are_same_file_types(objs: list[ioinfo.IOInfo]) -> bool: """Test if given objects have same types (extensions).""" if not objs: return False ext = objs[0].extension return all(p.extension == ext for p in objs[1:]) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/__init__.py b/src/anyconfig/backend/__init__.py index 33bb7d64..7dfb9965 100644 --- a/src/anyconfig/backend/__init__.py +++ b/src/anyconfig/backend/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2021 Satoru SATOH +# Copyright (C) 2011 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # Suppress import positions after some global variables are defined @@ -16,10 +16,10 @@ sh, toml, yaml, - xml + xml, ) from .base import ( - ParserT, ParsersT, ParserClssT + ParserT, ParsersT, ParserClssT, ) @@ -29,28 +29,28 @@ ] -def warn(name: str, feature: str): +def warn(name: str, feature: str) -> None: """Wraper for warnings.warn.""" warnings.warn( f"'{name}' module is not available. Disabled {feature} support.", - category=ImportWarning, stacklevel=2 + category=ImportWarning, stacklevel=2, ) if yaml.PARSERS: PARSERS.extend(yaml.PARSERS) else: - warn('yaml', 'YAML') + warn("yaml", "YAML") if toml.PARSERS: PARSERS.extend(toml.PARSERS) else: - warn('toml', 'TOML') + warn("toml", "TOML") __all__ = [ - 'ParserT', 'ParsersT', 'ParserClssT', - 'PARSERS', + "PARSERS", + "ParserClssT", + "ParserT", + "ParsersT", ] - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/base/__init__.py b/src/anyconfig/backend/base/__init__.py index 7bd44114..d973729c 100644 --- a/src/anyconfig/backend/base/__init__.py +++ b/src/anyconfig/backend/base/__init__.py @@ -1,23 +1,27 @@ # -# Copyright (C) 2021 - 2024 Satoru SATOH +# Copyright (C) 2021 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # """Backend basic classes, functions and constants.""" +from __future__ import annotations + import typing from .compat import BinaryFilesMixin from .datatypes import ( - GenContainerT, OptionsT, InDataExT, OutDataExT, IoiT + GenContainerT, OptionsT, + InDataT, InDataExT, OutDataExT, IoiT, + PathOrStrT, ) from .dumpers import ( - ToStringDumperMixin, ToStreamDumperMixin, BinaryDumperMixin + ToStringDumperMixin, ToStreamDumperMixin, BinaryDumperMixin, ) from .loaders import ( LoaderMixin, FromStringLoaderMixin, FromStreamLoaderMixin, - BinaryLoaderMixin + BinaryLoaderMixin, ) from .utils import ( - ensure_outdir_exists, to_method + ensure_outdir_exists, to_method, ) from .parsers import ( Parser, @@ -25,21 +29,21 @@ ) -ParserT = typing.TypeVar('ParserT', bound=Parser) -ParsersT = typing.List[ParserT] -ParserClssT = typing.List[typing.Type[ParserT]] +ParserT = typing.TypeVar("ParserT", bound=Parser) +ParsersT = list[ParserT] +ParserClssT = list[type[ParserT]] __all__ = [ - 'BinaryFilesMixin', - 'GenContainerT', 'OptionsT', 'InDataExT', 'OutDataExT', 'IoiT', - 'ToStringDumperMixin', 'ToStreamDumperMixin', 'BinaryDumperMixin', - 'LoaderMixin', - 'FromStringLoaderMixin', 'FromStreamLoaderMixin', 'BinaryLoaderMixin', - 'ensure_outdir_exists', 'to_method', - 'Parser', - 'StringParser', 'StreamParser', 'StringStreamFnParser', - 'ParserT', 'ParsersT', 'ParserClssT', + "BinaryFilesMixin", + "GenContainerT", "OptionsT", + "InDataT", "InDataExT", "OutDataExT", "IoiT", + "PathOrStrT", + "ToStringDumperMixin", "ToStreamDumperMixin", "BinaryDumperMixin", + "LoaderMixin", + "FromStringLoaderMixin", "FromStreamLoaderMixin", "BinaryLoaderMixin", + "ensure_outdir_exists", "to_method", + "Parser", + "StringParser", "StreamParser", "StringStreamFnParser", + "ParserT", "ParsersT", "ParserClssT", ] - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/base/compat.py b/src/anyconfig/backend/base/compat.py index bb153cef..a460a6fa 100644 --- a/src/anyconfig/backend/base/compat.py +++ b/src/anyconfig/backend/base/compat.py @@ -1,31 +1,43 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=consider-using-with """Module to provide backward compatibility for plugins.""" +from __future__ import annotations + +import pathlib import typing +if typing.TYPE_CHECKING: + from .datatypes import PathOrStrT + class BinaryFilesMixin: """Mixin class to open configuration files as a binary data.""" - _open_flags: typing.Tuple[str, str] = ('rb', 'wb') + _open_flags: tuple[str, str] = ("rb", "wb") @classmethod - def ropen(cls, filepath, **kwargs): + def ropen( + cls, filepath: PathOrStrT, **options: typing.Any, + ) -> typing.IO: """Open ``filepath`` with read only mode. :param filepath: Path to file to open to read data """ - return open(filepath, cls._open_flags[0], **kwargs) + return pathlib.Path(filepath).open( + cls._open_flags[0], **options, + ) @classmethod - def wopen(cls, filepath, **kwargs): + def wopen( + cls, filepath: PathOrStrT, **options: typing.Any, + ) -> typing.IO: """Open ``filepath`` with write mode. :param filepath: Path to file to open to write data to """ - return open(filepath, cls._open_flags[1], **kwargs) - -# vim:sw=4:ts=4:et: + return pathlib.Path(filepath).open( + cls._open_flags[1], **options, + ) diff --git a/src/anyconfig/backend/base/datatypes.py b/src/anyconfig/backend/base/datatypes.py index c2cc339a..84e8be6e 100644 --- a/src/anyconfig/backend/base/datatypes.py +++ b/src/anyconfig/backend/base/datatypes.py @@ -1,23 +1,28 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # """Utility functions in anyconfig.backend.base.""" +from __future__ import annotations + +import collections.abc +import pathlib import typing from ...common import ( - InDataT, InDataExT + InDataT, InDataExT, ) from ...ioinfo import ( - IOInfo, PathOrIOInfoT + IOInfo, PathOrIOInfoT, ) + OutDataExT = InDataExT IoiT = IOInfo MaybeFilePathT = typing.Optional[PathOrIOInfoT] -GenContainerT = typing.Callable[..., InDataT] -OptionsT = typing.Dict[str, typing.Any] +GenContainerT = collections.abc.Callable[..., InDataT] +OptionsT = dict[str, typing.Any] -# vim:sw=4:ts=4:et: +PathOrStrT = typing.Union[str, pathlib.Path] diff --git a/src/anyconfig/backend/base/dumpers.py b/src/anyconfig/backend/base/dumpers.py index 76a22934..afdc5136 100644 --- a/src/anyconfig/backend/base/dumpers.py +++ b/src/anyconfig/backend/base/dumpers.py @@ -1,20 +1,25 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=consider-using-with, unspecified-encoding """Abstract and basic dumpes.""" +from __future__ import annotations + import io +import pathlib import typing from ... import ioinfo, utils -from .datatypes import ( - InDataExT, IoiT -) from .utils import ( - ensure_outdir_exists, not_implemented + ensure_outdir_exists, not_implemented, ) +if typing.TYPE_CHECKING: + from .datatypes import ( + InDataExT, IoiT, PathOrStrT, + ) + _ENCODING = ioinfo.get_encoding() @@ -34,62 +39,75 @@ class DumperMixin: - _open_write_mode: Backend option to specify write mode passed to open() """ - _dump_opts: typing.List[str] = [] - _open_write_mode = 'w' + _dump_opts: tuple[str, ...] = () + _open_write_mode: typing.ClassVar[str] = "w" - def wopen(self, filepath: str, **kwargs): + def wopen( + self, filepath: PathOrStrT, **options: typing.Any, + ) -> typing.IO: """Open file ``filepath`` with the write mode ``_open_write_mode``.""" - if 'encoding' not in kwargs and self._open_write_mode == 'w': - kwargs["encoding"] = _ENCODING + if "encoding" not in options and self._open_write_mode == "w": + options["encoding"] = _ENCODING - return open( - filepath, self._open_write_mode, **kwargs + return pathlib.Path(filepath).open( + self._open_write_mode, **options, ) - def dump_to_string(self, cnf: InDataExT, **kwargs) -> str: + def dump_to_string( + self, cnf: InDataExT, **options: typing.Any, + ) -> str: """Dump config 'cnf' to a string. :param cnf: Configuration data to dump - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict :return: string represents the configuration """ - not_implemented(self, cnf, **kwargs) - return '' + not_implemented(self, cnf, **options) + return "" - def dump_to_path(self, cnf: InDataExT, filepath: str, **kwargs) -> None: + def dump_to_path( + self, cnf: InDataExT, filepath: PathOrStrT, + **options: typing.Any, + ) -> None: """Dump config 'cnf' to a file 'filepath'. :param cnf: Configuration data to dump :param filepath: Config file path - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict """ - not_implemented(self, cnf, filepath, **kwargs) + not_implemented(self, cnf, filepath, **options) - def dump_to_stream(self, cnf: InDataExT, stream: typing.IO, **kwargs - ) -> None: + def dump_to_stream( + self, cnf: InDataExT, stream: typing.IO, + **options: typing.Any, + ) -> None: """Dump config 'cnf' to a file-like object 'stream'. TODO: How to process socket objects same as file objects ? :param cnf: Configuration data to dump :param stream: Config file or file like object - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict """ - not_implemented(self, cnf, stream, **kwargs) + not_implemented(self, cnf, stream, **options) - def dumps(self, cnf: InDataExT, **kwargs) -> str: + def dumps( + self, cnf: InDataExT, **options: typing.Any, + ) -> str: """Dump config 'cnf' to a string. :param cnf: Configuration data to dump - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict :return: string represents the configuration """ - kwargs = utils.filter_options(self._dump_opts, kwargs) - return self.dump_to_string(cnf, **kwargs) + options = utils.filter_options(self._dump_opts, options) + return self.dump_to_string(cnf, **options) - def dump(self, cnf: InDataExT, ioi: IoiT, **kwargs): + def dump( + self, cnf: InDataExT, ioi: IoiT, **options: typing.Any, + ) -> None: """Dump config 'cnf' to output object of which 'ioi' referring. :param cnf: Configuration data to dump @@ -97,22 +115,24 @@ def dump(self, cnf: InDataExT, ioi: IoiT, **kwargs): an 'anyconfig.cmmon.IOInfo' namedtuple object provides various info of input object to load data from - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict :raises IOError, OSError, AttributeError: When dump failed. """ - kwargs = utils.filter_options(self._dump_opts, kwargs) + options = utils.filter_options(self._dump_opts, options) if ioinfo.is_stream(ioi): - self.dump_to_stream(cnf, typing.cast(typing.IO, ioi.src), **kwargs) + self.dump_to_stream( + cnf, typing.cast("typing.IO", ioi.src), **options, + ) else: ensure_outdir_exists(ioi.path) - self.dump_to_path(cnf, ioi.path, **kwargs) + self.dump_to_path(cnf, ioi.path, **options) class BinaryDumperMixin(DumperMixin): """Mixin class to dump binary (byte string) configuration data.""" - _open_write_mode: str = 'wb' + _open_write_mode: typing.ClassVar[str] = "wb" class ToStringDumperMixin(DumperMixin): @@ -126,27 +146,32 @@ class ToStringDumperMixin(DumperMixin): :meth:`dump_to_string` at least. """ - def dump_to_path(self, cnf: InDataExT, filepath: str, **kwargs) -> None: + def dump_to_path( + self, cnf: InDataExT, filepath: PathOrStrT, + **options: typing.Any, + ) -> None: """Dump config 'cnf' to a file 'filepath'. :param cnf: Configuration data to dump :param filepath: Config file path - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict """ with self.wopen(filepath) as out: - out.write(self.dump_to_string(cnf, **kwargs)) + out.write(self.dump_to_string(cnf, **options)) - def dump_to_stream(self, cnf: InDataExT, stream: typing.IO, **kwargs - ) -> None: + def dump_to_stream( + self, cnf: InDataExT, stream: typing.IO, + **options: typing.Any, + ) -> None: """Dump config 'cnf' to a file-like object 'stream'. TODO: How to process socket objects same as file objects ? :param cnf: Configuration data to dump :param stream: Config file or file like object - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict """ - stream.write(self.dump_to_string(cnf, **kwargs)) + stream.write(self.dump_to_string(cnf, **options)) class ToStreamDumperMixin(DumperMixin): @@ -159,26 +184,29 @@ class ToStreamDumperMixin(DumperMixin): :meth:`dump_to_stream` at least. """ - def dump_to_string(self, cnf: InDataExT, **kwargs) -> str: + def dump_to_string( + self, cnf: InDataExT, **options: typing.Any, + ) -> str: """Dump config 'cnf' to a string. :param cnf: Configuration data to dump - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict :return: Dict-like object holding config parameters """ stream = io.StringIO() - self.dump_to_stream(cnf, stream, **kwargs) + self.dump_to_stream(cnf, stream, **options) return stream.getvalue() - def dump_to_path(self, cnf: InDataExT, filepath: str, **kwargs) -> None: + def dump_to_path( + self, cnf: InDataExT, filepath: PathOrStrT, + **options: typing.Any, + ) -> None: """Dump config 'cnf' to a file 'filepath`. :param cnf: Configuration data to dump :param filepath: Config file path - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict """ with self.wopen(filepath) as out: - self.dump_to_stream(cnf, out, **kwargs) - -# vim:sw=4:ts=4:et: + self.dump_to_stream(cnf, out, **options) diff --git a/src/anyconfig/backend/base/loaders.py b/src/anyconfig/backend/base/loaders.py index 6fb8c5da..a23faafe 100644 --- a/src/anyconfig/backend/base/loaders.py +++ b/src/anyconfig/backend/base/loaders.py @@ -1,20 +1,24 @@ # -# Copyright (C) 2012 - 2023 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=consider-using-with, unspecified-encoding """Abstract and basic loaders.""" +from __future__ import annotations + import collections import io import pathlib import typing from ... import ioinfo, utils -from .datatypes import ( - InDataExT, IoiT, GenContainerT, OptionsT -) from .utils import not_implemented +if typing.TYPE_CHECKING: + from .datatypes import ( + InDataExT, IoiT, GenContainerT, OptionsT, PathOrStrT, + ) + DATA_DEFAULT: InDataExT = {} @@ -40,11 +44,11 @@ class LoaderMixin: - _open_read_mode: Backend option to specify read mode passed to open() """ - _load_opts: typing.List[str] = [] - _ordered: bool = False - _allow_primitives: bool = False - _dict_opts: typing.List[str] = [] - _open_read_mode: str = 'r' + _load_opts: tuple[str, ...] = () + _ordered: typing.ClassVar[bool] = False + _allow_primitives: typing.ClassVar[bool] = False + _dict_opts: tuple[str, ...] = () + _open_read_mode: typing.ClassVar[str] = "r" @classmethod def ordered(cls) -> bool: @@ -62,20 +66,24 @@ def allow_primitives(cls) -> bool: return cls._allow_primitives @classmethod - def dict_options(cls) -> typing.List[str]: + def dict_options(cls) -> tuple[str, ...]: """Get the list of dict factory options.""" return cls._dict_opts - def ropen(self, filepath, **kwargs): + def ropen( + self, filepath: PathOrStrT, **options: typing.Any, + ) -> typing.IO: """Open files with read only mode.""" - if 'encoding' not in kwargs and self._open_read_mode == 'r': - kwargs["encoding"] = _ENCODING + if "encoding" not in options and self._open_read_mode == "r": + options["encoding"] = _ENCODING - return open( - filepath, self._open_read_mode, **kwargs + return pathlib.Path(filepath).open( + self._open_read_mode, **options, ) - def _container_factory(self, **options) -> GenContainerT: + def _container_factory( + self, **options: typing.Any, + ) -> GenContainerT: """Get the factory to make container objects. The order of prirorities are ac_dict, backend specific dict class @@ -97,7 +105,9 @@ def _container_factory(self, **options) -> GenContainerT: return dict - def _load_options(self, container: GenContainerT, **options) -> OptionsT: + def _load_options( + self, container: GenContainerT, **options: typing.Any, + ) -> OptionsT: """Select backend specific loading options.""" # Force set dict option if available in backend. For example, # options["object_hook"] will be OrderedDict if 'container' was @@ -107,46 +117,54 @@ def _load_options(self, container: GenContainerT, **options) -> OptionsT: return utils.filter_options(self._load_opts, options) - def load_from_string(self, content: str, container: GenContainerT, - **kwargs) -> InDataExT: + def load_from_string( + self, content: str, container: GenContainerT, + **options: typing.Any, + ) -> InDataExT: """Load config from given string 'content'. :param content: Config content string :param container: callble to make a container object later - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict :return: Dict-like object holding config parameters """ - not_implemented(self, content, container, **kwargs) + not_implemented(self, content, container, **options) return DATA_DEFAULT - def load_from_path(self, filepath: str, container: GenContainerT, - **kwargs) -> InDataExT: + def load_from_path( + self, filepath: PathOrStrT, container: GenContainerT, + **options: typing.Any, + ) -> InDataExT: """Load config from given file path 'filepath`. :param filepath: Config file path :param container: callble to make a container object later - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict :return: Dict-like object holding config parameters """ - not_implemented(self, filepath, container, **kwargs) + not_implemented(self, filepath, container, **options) return DATA_DEFAULT - def load_from_stream(self, stream: typing.IO, container: GenContainerT, - **kwargs) -> InDataExT: + def load_from_stream( + self, stream: typing.IO, container: GenContainerT, + **options: typing.Any, + ) -> InDataExT: """Load config from given file like object 'stream`. :param stream: Config file or file like object :param container: callble to make a container object later - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict :return: Dict-like object holding config parameters """ - not_implemented(self, stream, container, **kwargs) + not_implemented(self, stream, container, **options) return DATA_DEFAULT - def loads(self, content: str, **options) -> InDataExT: + def loads( + self, content: str, **options: typing.Any, + ) -> InDataExT: """Load config from given string 'content' after some checks. :param content: Config file content @@ -165,8 +183,10 @@ def loads(self, content: str, **options) -> InDataExT: options = self._load_options(container, **options) return self.load_from_string(content, container, **options) - def load(self, ioi: IoiT, ac_ignore_missing: bool = False, - **options) -> InDataExT: + def load( + self, ioi: IoiT, *, ac_ignore_missing: bool = False, + **options: typing.Any, + ) -> InDataExT: """Load config from ``ioi``. :param ioi: @@ -192,7 +212,7 @@ def load(self, ioi: IoiT, ac_ignore_missing: bool = False, if ioinfo.is_stream(ioi): cnf = self.load_from_stream( - typing.cast(typing.IO, ioi.src), container, **options + typing.cast("typing.IO", ioi.src), container, **options, ) else: if ac_ignore_missing and not pathlib.Path(ioi.path).exists(): @@ -206,7 +226,7 @@ def load(self, ioi: IoiT, ac_ignore_missing: bool = False, class BinaryLoaderMixin(LoaderMixin): """Mixin class to load binary (byte string) configuration files.""" - _open_read_mode: str = 'rb' + _open_read_mode = "rb" class FromStringLoaderMixin(LoaderMixin): @@ -219,30 +239,34 @@ class FromStringLoaderMixin(LoaderMixin): :meth:`load_from_string` at least. """ - def load_from_stream(self, stream: typing.IO, container: GenContainerT, - **kwargs) -> InDataExT: + def load_from_stream( + self, stream: typing.IO, container: GenContainerT, + **options: typing.Any, + ) -> InDataExT: """Load config from given stream 'stream'. :param stream: Config file or file-like object :param container: callble to make a container object later - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict :return: Dict-like object holding config parameters """ - return self.load_from_string(stream.read(), container, **kwargs) + return self.load_from_string(stream.read(), container, **options) - def load_from_path(self, filepath: str, container: GenContainerT, - **kwargs) -> InDataExT: + def load_from_path( + self, filepath: PathOrStrT, container: GenContainerT, + **options: typing.Any, + ) -> InDataExT: """Load config from given file path 'filepath'. :param filepath: Config file path :param container: callble to make a container object later - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict :return: Dict-like object holding config parameters """ with self.ropen(filepath) as inp: - return self.load_from_stream(inp, container, **kwargs) + return self.load_from_stream(inp, container, **options) class FromStreamLoaderMixin(LoaderMixin): @@ -255,30 +279,32 @@ class FromStreamLoaderMixin(LoaderMixin): :meth:`load_from_stream` at least. """ - def load_from_string(self, content: str, container: GenContainerT, - **kwargs) -> InDataExT: + def load_from_string( + self, content: str, container: GenContainerT, + **options: typing.Any, + ) -> InDataExT: """Load config from given string 'cnf_content'. :param content: Config content string :param container: callble to make a container object later - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict :return: Dict-like object holding config parameters """ - return self.load_from_stream(io.StringIO(content), - container, **kwargs) + iof = io.BytesIO if isinstance(content, bytes) else io.StringIO + return self.load_from_stream(iof(content), container, **options) - def load_from_path(self, filepath: str, container: GenContainerT, - **kwargs) -> InDataExT: + def load_from_path( + self, filepath: PathOrStrT, container: GenContainerT, + **options: typing.Any, + ) -> InDataExT: """Load config from given file path 'filepath'. :param filepath: Config file path :param container: callble to make a container object later - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict :return: Dict-like object holding config parameters """ with self.ropen(filepath) as inp: - return self.load_from_stream(inp, container, **kwargs) - -# vim:sw=4:ts=4:et: + return self.load_from_stream(inp, container, **options) diff --git a/src/anyconfig/backend/base/parsers.py b/src/anyconfig/backend/base/parsers.py index abe32314..9e5aa918 100644 --- a/src/anyconfig/backend/base/parsers.py +++ b/src/anyconfig/backend/base/parsers.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # r"""Abstract implementation of backend modules. @@ -15,24 +15,28 @@ - :meth:`dump_to_stream`: Dump config to a file or file-like object - :meth:`dump_to_path`: Dump config to a file of given path """ +from __future__ import annotations + +import collections.abc import typing from ...models import processor from ...utils import is_dict_like from .datatypes import ( - InDataExT, GenContainerT + InDataExT, GenContainerT, ) from .dumpers import ( - DumperMixin, ToStringDumperMixin, ToStreamDumperMixin + DumperMixin, ToStringDumperMixin, ToStreamDumperMixin, ) from .loaders import ( - LoaderMixin, FromStringLoaderMixin, FromStreamLoaderMixin + LoaderMixin, FromStringLoaderMixin, FromStreamLoaderMixin, ) class Parser(LoaderMixin, DumperMixin, processor.Processor): - """ - Abstract parser to provide basic implementation of some methods as below. + """Abstract parser to provide basic implementation. + + The following members will be expected to be overridden. - _type: Parser type indicate which format it supports - _priority: Priority to select it if there are other parsers of same type @@ -42,7 +46,7 @@ class Parser(LoaderMixin, DumperMixin, processor.Processor): .. seealso:: the doc of :class:`anyconfig.models.processor.Processor` """ - _cid: str = 'base' + _cid = "base" class StringParser(Parser, FromStringLoaderMixin, ToStringDumperMixin): @@ -65,15 +69,17 @@ class StreamParser(Parser, FromStreamLoaderMixin, ToStreamDumperMixin): """ -LoadFnT = typing.Callable[..., InDataExT] -DumpFnT = typing.Callable[..., typing.Optional[str]] +LoadFnT = collections.abc.Callable[..., InDataExT] +DumpFnT = collections.abc.Callable[..., str] -def load_with_fn(load_fn: typing.Optional[LoadFnT], - content_or_strm: typing.Union[str, typing.IO], - container: GenContainerT, - allow_primitives: bool = False, - **options) -> InDataExT: +def load_with_fn( + load_fn: LoadFnT | None, + content_or_strm: str | bytes | typing.IO, + container: GenContainerT, *, + allow_primitives: bool = False, + **options: dict[str, typing.Any], +) -> InDataExT: """Load data from given string or stream 'content_or_strm'. :param load_fn: Callable to load data @@ -87,7 +93,8 @@ def load_with_fn(load_fn: typing.Optional[LoadFnT], :return: container object holding data """ if load_fn is None: - raise TypeError('The first argument "load_fn" must be a callable!') + msg = "The first argument 'load_fn' must be a callable!" + raise TypeError(msg) ret = load_fn(content_or_strm, **options) if is_dict_like(ret): @@ -96,9 +103,11 @@ def load_with_fn(load_fn: typing.Optional[LoadFnT], return ret if allow_primitives else container(ret) -def dump_with_fn(dump_fn: typing.Optional[DumpFnT], - data: InDataExT, stream: typing.Optional[typing.IO], - **options) -> typing.Optional[str]: +def dump_with_fn( + dump_fn: DumpFnT | None, + data: InDataExT, stream: typing.IO | None, + **options: dict[str, typing.Any], +) -> str: """Dump 'data' to a string. If 'stream' is None, or dump 'data' to a file or file-like object 'stream'. @@ -111,7 +120,8 @@ def dump_with_fn(dump_fn: typing.Optional[DumpFnT], :return: String represents data if stream is None or None """ if dump_fn is None: - raise TypeError('The first argument "dump_fn" must be a callable!') + msg = "The first argument 'dump_fn' must be a callable!" + raise TypeError(msg) if stream is None: return dump_fn(data, **options) @@ -139,13 +149,15 @@ class StringStreamFnParser(Parser, FromStreamLoaderMixin, ToStreamDumperMixin): :seealso: :class:`anyconfig.backend.json.Parser` """ - _load_from_string_fn: typing.Optional[LoadFnT] = None - _load_from_stream_fn: typing.Optional[LoadFnT] = None - _dump_to_string_fn: typing.Optional[DumpFnT] = None - _dump_to_stream_fn: typing.Optional[DumpFnT] = None + _load_from_string_fn: LoadFnT | None = None + _load_from_stream_fn: LoadFnT | None = None + _dump_to_string_fn: DumpFnT | None = None + _dump_to_stream_fn: DumpFnT | None = None - def load_from_string(self, content: str, container: GenContainerT, - **options) -> InDataExT: + def load_from_string( + self, content: str | bytes, container: GenContainerT, + **options: dict[str, typing.Any], + ) -> InDataExT: """Load configuration data from given string 'content'. :param content: Configuration string @@ -154,12 +166,16 @@ def load_from_string(self, content: str, container: GenContainerT, :return: container object holding the configuration data """ - return load_with_fn(self._load_from_string_fn, content, container, - allow_primitives=self.allow_primitives(), - **options) - - def load_from_stream(self, stream: typing.IO, container: GenContainerT, - **options) -> InDataExT: + return load_with_fn( + self._load_from_string_fn, content, container, + allow_primitives=self.allow_primitives(), + **options, + ) + + def load_from_stream( + self, stream: typing.IO, container: GenContainerT, + **options: dict[str, typing.Any], + ) -> InDataExT: """Load data from given stream 'stream'. :param stream: Stream provides configuration data @@ -168,32 +184,35 @@ def load_from_stream(self, stream: typing.IO, container: GenContainerT, :return: container object holding the configuration data """ - return load_with_fn(self._load_from_stream_fn, stream, container, - allow_primitives=self.allow_primitives(), - **options) - - def dump_to_string(self, cnf: InDataExT, **kwargs) -> str: + return load_with_fn( + self._load_from_stream_fn, stream, container, + allow_primitives=self.allow_primitives(), + **options, + ) + + def dump_to_string( + self, cnf: InDataExT, **options: dict[str, typing.Any], + ) -> str: """Dump config 'cnf' to a string. :param cnf: Configuration data to dump - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict :return: string represents the configuration """ return dump_with_fn(self._dump_to_string_fn, cnf, None, - **kwargs) # type: ignore + **options) - def dump_to_stream(self, cnf: InDataExT, stream: typing.IO, - **kwargs) -> None: + def dump_to_stream( + self, cnf: InDataExT, stream: typing.IO, + **options: dict[str, typing.Any], + ) -> None: """Dump config 'cnf' to a file-like object 'stream'. TODO: How to process socket objects same as file objects ? :param cnf: Configuration data to dump :param stream: Config file or file like object - :param kwargs: optional keyword parameters to be sanitized :: dict + :param options: optional keyword parameters to be sanitized :: dict """ - dump_with_fn(self._dump_to_stream_fn, cnf, stream, - **kwargs) # type: ignore - -# vim:sw=4:ts=4:et: + dump_with_fn(self._dump_to_stream_fn, cnf, stream, **options) diff --git a/src/anyconfig/backend/base/utils.py b/src/anyconfig/backend/base/utils.py index 536916d5..1fb6e1a5 100644 --- a/src/anyconfig/backend/base/utils.py +++ b/src/anyconfig/backend/base/utils.py @@ -1,19 +1,28 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Provides utility functions in anyconfig.backend.base.""" +from __future__ import annotations + import functools import pathlib import typing +if typing.TYPE_CHECKING: + import collections.abc + -def not_implemented(*_args, **_kwargs) -> None: +def not_implemented( + *_args: typing.Any, **_options: typing.Any, +) -> None: """Raise NotImplementedError.""" - raise NotImplementedError() + raise NotImplementedError -def ensure_outdir_exists(filepath: typing.Union[str, pathlib.Path]) -> None: +def ensure_outdir_exists( + filepath: str | pathlib.Path, +) -> None: """Make dir to dump 'filepath' if that dir does not exist. :param filepath: path of file to dump @@ -21,8 +30,9 @@ def ensure_outdir_exists(filepath: typing.Union[str, pathlib.Path]) -> None: pathlib.Path(filepath).parent.mkdir(parents=True, exist_ok=True) -def to_method(func: typing.Callable[..., typing.Any] - ) -> typing.Callable[..., typing.Any]: +def to_method( + func: collections.abc.Callable[..., typing.Any], +) -> collections.abc.Callable[..., typing.Any]: """Lift :func:`func` to a method. It will be called with the first argument 'self' ignored. @@ -30,10 +40,10 @@ def to_method(func: typing.Callable[..., typing.Any] :param func: Any callable object """ @functools.wraps(func) - def wrapper(*args, **kwargs): + def wrapper( + *args: typing.Any, **kwargs: typing.Any, + ) -> collections.abc.Callable[..., typing.Any]: """Original function decorated.""" return func(*args[1:], **kwargs) return wrapper - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/ini/__init__.py b/src/anyconfig/backend/ini/__init__.py index de9b51ca..41df4870 100644 --- a/src/anyconfig/backend/ini/__init__.py +++ b/src/anyconfig/backend/ini/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # """Backend modules to load and dump INI data. diff --git a/src/anyconfig/backend/ini/configparser.py b/src/anyconfig/backend/ini/configparser.py index 6780cd89..c9224a70 100644 --- a/src/anyconfig/backend/ini/configparser.py +++ b/src/anyconfig/backend/ini/configparser.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=deprecated-method @@ -32,6 +32,8 @@ - Introduce 'ac_parse_value' keyword option to switch behaviors, same as original configparser and rich backend parsing each parameter values. """ +from __future__ import annotations + import configparser import re import typing @@ -39,25 +41,29 @@ from ... import parser, utils from .. import base +if typing.TYPE_CHECKING: + import collections.abc + -_SEP = ',' +_SEP = "," try: DEFAULTSECT: str = configparser.DEFAULTSECT except AttributeError: - DEFAULTSECT: str = 'DEFAULT' # type: ignore + DEFAULTSECT: str = "DEFAULT" # type: ignore[no-redef] -_QUOTED_RE: typing.Pattern = re.compile( - r'^(' +_QUOTED_RE: re.Pattern = re.compile( + r"^(" r'".*"' - r'|' + r"|" r"'.*'" - r')$' + r")$", ) -def parse(val_s: str, sep: str = _SEP, - quoted_re: typing.Pattern = _QUOTED_RE) -> typing.Any: +def parse( + val_s: str, sep: str = _SEP, quoted_re: re.Pattern = _QUOTED_RE, +) -> typing.Any: """Parse expression. FIXME: May be too naive implementation. @@ -70,13 +76,14 @@ def parse(val_s: str, sep: str = _SEP, if sep in val_s: return [ - parser.parse(typing.cast(str, x)) for x in parser.parse_list(val_s) + parser.parse(typing.cast("str", x)) + for x in parser.parse_list(val_s) ] return parser.parse(val_s) -def _to_s(val: typing.Any, sep: str = ', ') -> str: +def _to_s(val: typing.Any, sep: str = ", ") -> str: """Convert any object to string. :param val: An object @@ -88,29 +95,30 @@ def _to_s(val: typing.Any, sep: str = ', ') -> str: return str(val) -def parsed_items(items: typing.Iterable[typing.Tuple[str, typing.Any]], - sep: str = _SEP, **options - ) -> typing.Iterator[typing.Tuple[str, typing.Any]]: +def parsed_items( + items: collections.abc.Iterable[tuple[str, typing.Any]], + sep: str = _SEP, **options: typing.Any, +) -> collections.abc.Iterator[tuple[str, typing.Any]]: """Parse an iterable of items. :param items: List of pairs, [(key, value)], or generator yields pairs :param sep: Seprator string :return: Generator to yield (key, value) pair of 'dic' """ - __parse = parse if options.get('ac_parse_value') else utils.noop + __parse = parse if options.get("ac_parse_value") else utils.noop for key, val in items: - yield (key, __parse(val, sep)) # type: ignore + yield (key, __parse(val, sep)) # type: ignore[operator] -def _make_parser(**kwargs - ) -> typing.Tuple[typing.Dict[str, typing.Any], - configparser.ConfigParser]: +def _make_parser( + **kwargs: typing.Any, +) -> tuple[dict[str, typing.Any], configparser.ConfigParser]: """Make an instance of configparser.ConfigParser.""" # Optional arguments for configparser.ConfigParser{,readfp} kwargs_0 = utils.filter_options( - ('defaults', 'dict_type', 'allow_no_value', 'strict'), kwargs + ("defaults", "dict_type", "allow_no_value", "strict"), kwargs, ) - kwargs_1 = utils.filter_options(('filename', ), kwargs) + kwargs_1 = utils.filter_options(("filename", ), kwargs) try: psr = configparser.ConfigParser(**kwargs_0) @@ -118,13 +126,17 @@ def _make_parser(**kwargs # .. note:: # It seems ConfigParser.*ConfigParser in python 2.6 does not support # 'allow_no_value' option parameter, and TypeError will be thrown. - kwargs_0 = utils.filter_options(('defaults', 'dict_type'), kwargs) + kwargs_0 = utils.filter_options(("defaults", "dict_type"), kwargs) psr = configparser.ConfigParser(**kwargs_0) return (kwargs_1, psr) -def _load(stream, container, sep=_SEP, dkey=DEFAULTSECT, **kwargs): +def _load( + stream: typing.IO, container: base.GenContainerT, + sep: str = _SEP, dkey: str = DEFAULTSECT, + **kwargs: typing.Any, +) -> base.InDataT: """Load data from ``stream`` of which file should be in INI format. :param stream: File or file-like object provides ini-style conf @@ -138,7 +150,7 @@ def _load(stream, container, sep=_SEP, dkey=DEFAULTSECT, **kwargs): psr.read_file(stream, **kwargs_1) cnf = container() - kwargs['sep'] = sep + kwargs["sep"] = sep defaults = psr.defaults() if defaults: @@ -150,25 +162,28 @@ def _load(stream, container, sep=_SEP, dkey=DEFAULTSECT, **kwargs): return cnf -def _dumps_itr(cnf: typing.Dict[str, typing.Any], - dkey: str = DEFAULTSECT): +def _dumps_itr( + cnf: dict[str, typing.Any], dkey: str = DEFAULTSECT, +) -> collections.abc.Iterator[str]: """Dump data iterably. :param cnf: Configuration data to dump """ for sect, params in cnf.items(): - yield f'[{sect}]' + yield f"[{sect}]" for key, val in params.items(): if sect != dkey and dkey in cnf and cnf[dkey].get(key) == val: continue # It should be in [DEFAULT] section. - yield f'{key!s} = {_to_s(val)}' + yield f"{key!s} = {_to_s(val)}" - yield '' # it will be a separator between each sections. + yield "" # it will be a separator between each sections. -def _dumps(cnf: typing.Dict[str, typing.Any], **_kwargs) -> str: +def _dumps( + cnf: dict[str, typing.Any], **_kwargs: typing.Any, +) -> str: """Dump data as a str. :param cnf: Configuration data to dump @@ -184,16 +199,14 @@ class Parser(base.Parser, base.FromStreamLoaderMixin, base.ToStringDumperMixin): """Ini config files parser.""" - _cid: str = 'ini.configparser' - _type: str = 'ini' - _extensions: typing.List[str] = ['ini'] - _load_opts: typing.List[str] = [ - 'defaults', 'dict_type', 'allow_no_value', 'filename', - 'ac_parse_value', 'strict' - ] - _dict_opts: typing.List[str] = ['dict_type'] + _cid: typing.ClassVar[str] = "ini.configparser" + _type: typing.ClassVar[str] = "ini" + _extensions: tuple[str, ...] = ("ini", ) + _load_opts: tuple[str, ...] = ( + "defaults", "dict_type", "allow_no_value", "filename", + "ac_parse_value", "strict", + ) + _dict_opts: tuple[str, ...] = ("dict_type", ) dump_to_string = base.to_method(_dumps) load_from_stream = base.to_method(_load) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/json/__init__.py b/src/anyconfig/backend/json/__init__.py index 14a75b5c..4b3e1b9e 100644 --- a/src/anyconfig/backend/json/__init__.py +++ b/src/anyconfig/backend/json/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # """Backend modules to load and dump JSON data. @@ -25,5 +25,3 @@ PARSERS.append(SimpleJsonParser) except ImportError: pass - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/json/common.py b/src/anyconfig/backend/json/common.py index 1378a14b..36f3af6d 100644 --- a/src/anyconfig/backend/json/common.py +++ b/src/anyconfig/backend/json/common.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2018 - 2024 Satoru SATOH +# Copyright (C) 2018 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # """Globals, functions common in some JSON backend modules. @@ -8,30 +8,36 @@ .. versionadded:: 0.9.8 """ -from .. import base +from __future__ import annotations +import typing -JSON_LOAD_OPTS = ['cls', 'object_hook', 'parse_float', 'parse_int', - 'parse_constant', 'object_pairs_hook'] +from .. import base -JSON_DUMP_OPTS = ['skipkeys', 'ensure_ascii', 'check_circular', 'allow_nan', - 'cls', 'indent', 'separators', 'default', 'sort_keys'] -JSON_DICT_OPTS = ['object_pairs_hook', 'object_hook'] +JSON_LOAD_OPTS: tuple[str, ...] = ( + "cls", "object_hook", "parse_float", "parse_int", + "parse_constant", "object_pairs_hook", +) +JSON_DUMP_OPTS: tuple[str, ...] = ( + "skipkeys", "ensure_ascii", "check_circular", "allow_nan", + "cls", "indent", "separators", "default", "sort_keys", +) +JSON_DICT_OPTS: tuple[str, ...] = ( + "object_pairs_hook", "object_hook", +) class Parser(base.StringStreamFnParser): """Parser for JSON files.""" - _cid = 'json.stdlib' - _type = 'json' - _extensions = ['json', 'jsn', 'js'] - _ordered = True - _allow_primitives = True + _cid: typing.ClassVar[str] = "json.stdlib" + _type: typing.ClassVar[str] = "json" + _extensions: tuple[str, ...] = ("json", "jsn", "js") + _ordered: typing.ClassVar[bool] = True + _allow_primitives: typing.ClassVar[bool] = True # .. note:: These may be overwritten. _load_opts = JSON_LOAD_OPTS _dump_opts = JSON_DUMP_OPTS _dict_opts = JSON_DICT_OPTS - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/json/simplejson.py b/src/anyconfig/backend/json/simplejson.py index 3082b559..28b99fa2 100644 --- a/src/anyconfig/backend/json/simplejson.py +++ b/src/anyconfig/backend/json/simplejson.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=import-error @@ -22,25 +22,32 @@ - Exported from ..json.py """ +from __future__ import annotations + +import typing + import simplejson as json from .. import base from .common import ( - JSON_LOAD_OPTS, JSON_DUMP_OPTS, Parser as BaseParser + JSON_LOAD_OPTS, JSON_DUMP_OPTS, Parser as BaseParser, ) -JSON_LOAD_OPTS.append('use_decimal') -JSON_DUMP_OPTS.extend(['use_decimal', 'namedtuple_as_object', 'tuple_as_array', - 'bigint_as_string', 'item_sort_key', 'for_json', - 'ignore_nan', 'int_as_string_bitcount', - 'iterable_as_array']) +JSON_LOAD_OPTS = (*JSON_LOAD_OPTS, "use_decimal") +JSON_DUMP_OPTS = ( + *JSON_DUMP_OPTS, + "use_decimal", "namedtuple_as_object", "tuple_as_array", + "bigint_as_string", "item_sort_key", "for_json", + "ignore_nan", "int_as_string_bitcount", + "iterable_as_array", +) class Parser(BaseParser): """Parser for JSON files using simplejson.""" - _cid = 'json.simplejson' + _cid: typing.ClassVar[str] = "json.simplejson" _load_opts = JSON_LOAD_OPTS _dump_opts = JSON_DUMP_OPTS @@ -48,5 +55,3 @@ class Parser(BaseParser): _load_from_stream_fn = base.to_method(json.load) _dump_to_string_fn = base.to_method(json.dumps) _dump_to_stream_fn = base.to_method(json.dump) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/json/stdlib.py b/src/anyconfig/backend/json/stdlib.py index 10e97e3f..2d642431 100644 --- a/src/anyconfig/backend/json/stdlib.py +++ b/src/anyconfig/backend/json/stdlib.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # # Ref. python -c "import json; help(json)" @@ -32,7 +32,10 @@ .. versionadded:: 0.0.1 """ +from __future__ import annotations + import json +import typing from .. import base from .common import Parser as BaseParser @@ -41,12 +44,10 @@ class Parser(BaseParser): """Parser for JSON files.""" - _cid = 'json.stdlib' - _priority = 30 # Higher priority than others. + _cid: typing.ClassVar[str] = "json.stdlib" + _priority: typing.ClassVar[int] = 30 # Higher priority than others. _load_from_string_fn = base.to_method(json.loads) _load_from_stream_fn = base.to_method(json.load) _dump_to_string_fn = base.to_method(json.dumps) _dump_to_stream_fn = base.to_method(json.dump) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/pickle/stdlib.py b/src/anyconfig/backend/pickle/stdlib.py index b9a89b75..ed69df97 100644 --- a/src/anyconfig/backend/pickle/stdlib.py +++ b/src/anyconfig/backend/pickle/stdlib.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2017 - 2024 Satoru SATOH +# Copyright (C) 2017 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # r"""A backend module to load and dump pickle files. @@ -27,29 +27,32 @@ .. versionadded:: 0.8.3 """ +from __future__ import annotations + import pickle +import typing from .. import base -LOAD_OPTS = ['fix_imports', 'encoding', 'errors'] -DUMP_OPTS = ['protocol', 'fix_imports'] +LOAD_OPTS: tuple[str, ...] = ("fix_imports", "encoding", "errors") +DUMP_OPTS: tuple[str, ...] = ("protocol", "fix_imports") -class Parser(base.StringStreamFnParser, - base.BinaryLoaderMixin, base.BinaryDumperMixin): +class Parser(base.StringStreamFnParser): """Parser for Pickle files.""" - _cid = 'pickle.stdlib' - _type = 'pickle' - _extensions = ['pkl', 'pickle'] + _cid: typing.ClassVar[str] = "pickle.stdlib" + _type: typing.ClassVar[str] = "pickle" + _extensions: tuple[str, ...] = ("pkl", "pickle") + _open_read_mode: typing.ClassVar[str] = "rb" + _open_write_mode: typing.ClassVar[str] = "wb" + _load_opts = LOAD_OPTS _dump_opts = DUMP_OPTS - _allow_primitives = True + _allow_primitives: typing.ClassVar[bool] = True _load_from_string_fn = base.to_method(pickle.loads) _load_from_stream_fn = base.to_method(pickle.load) _dump_to_string_fn = base.to_method(pickle.dumps) _dump_to_stream_fn = base.to_method(pickle.dump) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/properties/builtin.py b/src/anyconfig/backend/properties/builtin.py index 6aeaa91d..fa93038b 100644 --- a/src/anyconfig/backend/properties/builtin.py +++ b/src/anyconfig/backend/properties/builtin.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # r"""A backend module to load and dump (Java) properties files. @@ -28,17 +28,21 @@ - Added native Java properties parser instead of a plugin utilizes pyjavaproperties module. """ +from __future__ import annotations + import re import typing import warnings from .. import base +from ... import utils -_COMMENT_MARKERS: typing.Tuple[str, ...] = ('#', '!') +_COMMENT_MARKERS: tuple[str, ...] = ("#", "!") +_MIN_LEN_PAIR: int = 2 -def parseline(line: str) -> typing.Tuple[typing.Optional[str], str]: +def parseline(line: str) -> tuple[str | None, str]: """Parse a line of Java properties file. :param line: @@ -48,52 +52,55 @@ def parseline(line: str) -> typing.Tuple[typing.Optional[str], str]: pair = re.split(r"(?:\s+)?(?:(? str | None: """Preprocess a line in properties; strip comments, etc. :param line: A string not starting w/ any white spaces and ending w/ line breaks. It may be empty. see also: :func:`load`. - :param comment_markers: Comment markers, e.g. '#' (hash) + :param cmarkers: Comment markers, e.g. '#' (hash) """ if not line: return None - if any(c in line for c in comment_markers): - if line.startswith(comment_markers): - return None + if any(c in line for c in cmarkers) and line.startswith(cmarkers): + return None return line def unescape(in_s: str) -> str: """Un-escape and take out the content from given str ``in_s``.""" - return re.sub(r'\\(.)', r'\1', in_s) + return re.sub(r"\\(.)", r"\1", in_s) def _escape_char(in_c: str) -> str: """Escape some special characters in java .properties files.""" - return '\\' + in_c if in_c in (':', '=', '\\') else in_c + return "\\" + in_c if in_c in (":", "=", "\\") else in_c def escape(in_s: str) -> str: """Escape special characters in given str.""" - return ''.join(_escape_char(c) for c in in_s) + return "".join(_escape_char(c) for c in in_s) -def load(stream, container=dict, comment_markers=_COMMENT_MARKERS): +def load( + stream: typing.IO, container: base.GenContainerT = dict, + **kwargs: typing.Any, +) -> base.InDataT: """Load data from a java properties files given as ``stream``. :param stream: A file or file like object of Java properties files @@ -104,10 +111,12 @@ def load(stream, container=dict, comment_markers=_COMMENT_MARKERS): """ ret = container() prev = "" + comment_markers = kwargs.get("comment_markers", _COMMENT_MARKERS) - for line in stream: - line = _pre_process_line(prev + line.strip().rstrip(), - comment_markers) + for line_ in stream: + line = _pre_process_line( + prev + line_.strip().rstrip(), comment_markers, + ) # I don't think later case may happen but just in case. if line is None or not line: continue @@ -121,8 +130,8 @@ def load(stream, container=dict, comment_markers=_COMMENT_MARKERS): (key, val) = parseline(line) if key is None: warnings.warn( - f'Failed to parse the line: {line}', - category=SyntaxWarning, stacklevel=2 + f"Failed to parse the line: {line}", + category=SyntaxWarning, stacklevel=2, ) continue @@ -134,13 +143,16 @@ def load(stream, container=dict, comment_markers=_COMMENT_MARKERS): class Parser(base.StreamParser): """Parser for Java properties files.""" - _cid = 'properties.builtin' - _type = 'properties' - _extensions = ['properties'] - _ordered = True - _dict_opts = ['ac_dict'] + _cid: typing.ClassVar[str] = "properties.builtin" + _type: typing.ClassVar[str] = "properties" + _extensions: tuple[str, ...] = ("properties", ) + _ordered: typing.ClassVar[bool] = True + _dict_opts: tuple[str, ...] = ("ac_dict", ) - def load_from_stream(self, stream, container, **kwargs): + def load_from_stream( + self, stream: typing.IO, container: base.GenContainerT, + **kwargs: typing.Any, + ) -> base.InDataT: """Load config from given file like object 'stream'. :param stream: A file or file like object of Java properties files @@ -149,16 +161,20 @@ def load_from_stream(self, stream, container, **kwargs): :return: Dict-like object holding config parameters """ - return load(stream, container=container) + return load(stream, container=container, **kwargs) - def dump_to_stream(self, cnf, stream, **kwargs): + def dump_to_stream( + self, cnf: base.InDataExT, stream: typing.IO, + **_kwargs: typing.Any, + ) -> None: """Dump config 'cnf' to a file or file-like object 'stream'. :param cnf: Java properties config data to dump :param stream: Java properties file or file like object :param kwargs: backend-specific optional keyword parameters :: dict """ - for key, val in cnf.items(): - stream.write(f"{key} = {escape(val)}\n") - -# vim:sw=4:ts=4:et: + if utils.is_dict_like(cnf): + stream.writelines( + f"{key} = {escape(val)}\n" + for key, val in cnf.items() + ) diff --git a/src/anyconfig/backend/python/builtin.py b/src/anyconfig/backend/python/builtin.py index f6fb97af..38f7d93d 100644 --- a/src/anyconfig/backend/python/builtin.py +++ b/src/anyconfig/backend/python/builtin.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2023, 2024 Satoru SATOH +# Copyright (C) 2023 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # r"""A backend module to load and dump python code conntains data. @@ -29,15 +29,19 @@ - Added builtin data loader from python code """ +from __future__ import annotations + +import typing + from .. import base from . import ( - loader, dumper + loader, dumper, ) class Parser(base.Parser, loader.Loader, dumper.Dumper): """Parser for python code files.""" - _cid = 'python.builtin' - _type = 'python' - _extensions = ['py'] + _cid: typing.ClassVar[str] = "python.builtin" + _type: typing.ClassVar[str] = "python" + _extensions: tuple[str, ...] = ("py", ) diff --git a/src/anyconfig/backend/python/dumper.py b/src/anyconfig/backend/python/dumper.py index 4ad77f34..1372c072 100644 --- a/src/anyconfig/backend/python/dumper.py +++ b/src/anyconfig/backend/python/dumper.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2024 Satoru SATOH +# Copyright (C) 2024 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # r"""A backend module to dump python code conntains data. @@ -20,15 +20,21 @@ - Added builtin data dumper from python code """ +from __future__ import annotations + +import typing + from ..base import ( - InDataExT, ToStringDumperMixin + InDataExT, ToStringDumperMixin, ) class Dumper(ToStringDumperMixin): """Dumper for objects as python code.""" - def dump_to_string(self, cnf: InDataExT, **kwargs) -> str: + def dump_to_string( + self, cnf: InDataExT, **_kwargs: typing.Any, + ) -> str: """Dump config 'cnf' to a string. :param cnf: Configuration data to dump diff --git a/src/anyconfig/backend/python/loader.py b/src/anyconfig/backend/python/loader.py index 6fcdacbb..2c757e2b 100644 --- a/src/anyconfig/backend/python/loader.py +++ b/src/anyconfig/backend/python/loader.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2023, 2024 Satoru SATOH +# Copyright (C) 2023 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # r"""A backend module to load python code conntains data. @@ -28,20 +28,22 @@ - Added builtin data loader from python code """ +from __future__ import annotations + import pathlib import tempfile import typing from ... import ioinfo from ..base import ( - IoiT, InDataExT, LoaderMixin + IoiT, InDataExT, LoaderMixin, ) from . import utils def load_from_temp_file( - content: str, **opts + content: str, **opts: typing.Any, ) -> InDataExT: """Dump `content` to tempoary file and load from it. @@ -49,20 +51,22 @@ def load_from_temp_file( """ with tempfile.TemporaryDirectory() as tmpdir: path = pathlib.Path(tmpdir) / "mod.py" - path.write_text(content, encoding='utf-8') + path.write_text(content, encoding="utf-8") return utils.load_from_path( - path, allow_exec=opts.get("allow_exec", False) + path, allow_exec=opts.get("allow_exec", False), ) class Loader(LoaderMixin): """Loader for python code files.""" - _allow_primitives: bool = True - _load_opts = ["allow_exec"] + _allow_primitives: typing.ClassVar[bool] = True + _load_opts: tuple[str, ...] = ("allow_exec", ) - def loads(self, content: str, **options) -> InDataExT: + def loads( + self, content: str, **options: typing.Any, + ) -> InDataExT: """Load config from given string 'content' after some checks. :param content: Config file content @@ -80,17 +84,15 @@ def loads(self, content: str, **options) -> InDataExT: return utils.load_literal_data_from_string(content) - def load(self, ioi: IoiT, ac_ignore_missing: bool = False, - **options) -> InDataExT: + def load( + self, ioi: IoiT, **options: typing.Any, + ) -> InDataExT: """Load config from ``ioi``. :param ioi: 'anyconfig.ioinfo.IOInfo' namedtuple object provides various info of input object to load data from - :param ac_ignore_missing: - Ignore and just return empty result if given `ioi` object does not - exist in actual. :param options: options will be passed to backend specific loading functions. please note that options have to be sanitized w/ @@ -106,10 +108,10 @@ def load(self, ioi: IoiT, ac_ignore_missing: bool = False, if ioinfo.is_stream(ioi): return load_from_temp_file( - typing.cast(typing.IO, ioi.src).read(), - allow_exec=allow_exec + typing.cast("typing.IO", ioi.src).read(), + allow_exec=allow_exec, ) return utils.load_from_path( - pathlib.Path(ioi.path), allow_exec=allow_exec + pathlib.Path(ioi.path), allow_exec=allow_exec, ) diff --git a/src/anyconfig/backend/python/utils.py b/src/anyconfig/backend/python/utils.py index 67b7cfb3..e1a6669a 100644 --- a/src/anyconfig/backend/python/utils.py +++ b/src/anyconfig/backend/python/utils.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2023, 2024 Satoru SATOH +# Copyright (C) 2023 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring @@ -15,14 +15,18 @@ - load_data_from_py has vulnerabilities because it execute the code. You must avoid to load .py data from unknown sources with this. """ +from __future__ import annotations + import ast import importlib import importlib.util import importlib.abc -import pathlib import typing import warnings +if typing.TYPE_CHECKING: + import pathlib + DATA_VAR_NAME: str = "DATA" @@ -41,9 +45,9 @@ def load_literal_data_from_path(path: pathlib.Path) -> typing.Any: def load_data_from_py( - path: pathlib.Path, - data_name: typing.Optional[str] = None, - fallback: bool = False + path: pathlib.Path, *, + data_name: str | None = None, + fallback: bool = False, ) -> typing.Any: """Load test data from .py files by evaluating it. @@ -52,28 +56,30 @@ def load_data_from_py( if data_name is None: data_name = DATA_VAR_NAME - spec = importlib.util.spec_from_file_location('testmod', str(path)) + spec = importlib.util.spec_from_file_location("testmod", str(path)) if spec and isinstance(spec.loader, importlib.abc.Loader): mod = importlib.util.module_from_spec(spec) spec.loader.exec_module(mod) try: return getattr(mod, data_name) except (TypeError, ValueError, AttributeError): - warnings.warn( # noqa - f'No valid data "{data_name}" was found in {mod!r}.' + warnings.warn( + f"No valid data '{data_name}' was found in {mod!r}.", + stacklevel=2, ) if fallback: return None - raise ValueError(f"Faied to load data from: {path!r}") + msg = f"Faied to load data from: {path!r}" + raise ValueError(msg) def load_from_path( - path: pathlib.Path, + path: pathlib.Path, *, allow_exec: bool = False, - data_name: typing.Optional[str] = None, - fallback: bool = False + data_name: str | None = None, + fallback: bool = False, ) -> typing.Any: """Load data from given path `path`. @@ -84,7 +90,7 @@ def load_from_path( """ if allow_exec and (data_name or DATA_VAR_NAME) in path.read_text(): return load_data_from_py( - path, data_name=data_name, fallback=fallback + path, data_name=data_name, fallback=fallback, ) return load_literal_data_from_path(path) diff --git a/src/anyconfig/backend/sh/variables.py b/src/anyconfig/backend/sh/variables.py index 52e8bd4c..bff38558 100644 --- a/src/anyconfig/backend/sh/variables.py +++ b/src/anyconfig/backend/sh/variables.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2016 - 2024 Satoru SATOH +# Copyright (C) 2016 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """A simple backend module to load and dump files contain shell variables. @@ -18,37 +18,47 @@ - Added an experimental parser for simple shelll vars' definitions w/o shell variable expansions nor complex shell statements like conditionals. """ +from __future__ import annotations + import itertools import re +import typing import warnings from .. import base +from ... import utils -def _parseline(line): +def _parseline( + line: str, +) -> tuple[str | None, str | None]: """Parse a line contains shell variable definition. :param line: A string to parse, must not start with '#' (comment) :return: A tuple of (key, value), both key and value may be None """ match = re.match( - r'^\s*(export)?\s*(\S+)=(?:(?:' + r"^\s*(export)?\s*(\S+)=(?:(?:" r"(?:\"(.*[^\\])\")|(?:'(.*[^\\])')|" r"(?:([^\"'#\s]+)))?)\s*#*", - line + line, ) if not match: warnings.warn( - f'Invalid line found: {line}', category=SyntaxWarning, stacklevel=2 + f"Invalid line found: {line}", category=SyntaxWarning, + stacklevel=2, ) return (None, None) tpl = match.groups() vals = list(itertools.dropwhile(lambda x: x is None, tpl[2:])) - return (tpl[1], vals[0] if vals else '') + return (tpl[1], vals[0] if vals else "") -def load(stream, container=dict): +def load( + stream: typing.IO, container: base.GenContainerT = dict, + **_kwargs: typing.Any, +) -> base.InDataT: """Load shell variable definitions data from ``stream``. :param stream: A file or file like object @@ -58,16 +68,16 @@ def load(stream, container=dict): """ ret = container() - for line in stream: - line = line.rstrip() + for line_ in stream: + line = line_.rstrip() if line is None or not line: continue (key, val) = _parseline(line) if key is None: warnings.warn( - f'Empty val in the line: {line}', - category=SyntaxWarning, stacklevel=2 + f"Empty val in the line: {line}", + category=SyntaxWarning, stacklevel=2, ) continue @@ -79,13 +89,16 @@ def load(stream, container=dict): class Parser(base.StreamParser): """Parser for Shell variable definition files.""" - _cid = 'sh.variables' - _type = 'shellvars' - _extensions = ['sh'] - _ordered = True - _dict_opts = ['ac_dict'] + _cid: typing.ClassVar[str] = "sh.variables" + _type: typing.ClassVar[str] = "shellvars" + _extensions: tuple[str, ...] = ("sh", ) + _ordered: typing.ClassVar[bool] = True + _dict_opts: tuple[str, ...] = ("ac_dict", ) - def load_from_stream(self, stream, container, **kwargs): + def load_from_stream( + self, stream: typing.IO, container: base.GenContainerT, + **kwargs: typing.Any, + ) -> base.InDataT: """Load config from given file like object ``stream``. :param stream: @@ -95,16 +108,20 @@ def load_from_stream(self, stream, container, **kwargs): :return: Dict-like object holding config parameters """ - return load(stream, container=container) + return load(stream, container=container, **kwargs) - def dump_to_stream(self, cnf, stream, **kwargs): + def dump_to_stream( + self, cnf: base.InDataExT, stream: typing.IO, + **_kwargs: typing.Any, + ) -> None: """Dump config dat ``cnf`` to a file or file-like object ``stream``. :param cnf: Shell variables data to dump :param stream: Shell script file or file like object :param kwargs: backend-specific optional keyword parameters :: dict """ - for key, val in cnf.items(): - stream.write(f"{key}='{val}'\n") - -# vim:sw=4:ts=4:et: + if utils.is_dict_like(cnf): + stream.writelines( + f"{key}='{val}'\n" + for key, val in cnf.items() + ) diff --git a/src/anyconfig/backend/toml/__init__.py b/src/anyconfig/backend/toml/__init__.py index f7c0acf9..b5ae17a3 100644 --- a/src/anyconfig/backend/toml/__init__.py +++ b/src/anyconfig/backend/toml/__init__.py @@ -19,7 +19,7 @@ """ from ..base import ParserClssT -PARSERS: ParserClssT = [] # type: ignore +PARSERS: ParserClssT = [] try: from . import tomllib diff --git a/src/anyconfig/backend/toml/toml.py b/src/anyconfig/backend/toml/toml.py index ab8b8ecb..509cb3d5 100644 --- a/src/anyconfig/backend/toml/toml.py +++ b/src/anyconfig/backend/toml/toml.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2015 - 2023 Satoru SATOH +# Copyright (C) 2015 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # Ref. python -c "import toml; help(toml); ..." @@ -19,6 +19,10 @@ .. versionadded:: 0.1.0 """ +from __future__ import annotations + +import typing + import toml from .. import base @@ -27,15 +31,14 @@ class Parser(base.StringStreamFnParser): """TOML parser.""" - _cid = 'toml.toml' - _type = 'toml' - _extensions = ['toml'] - _ordered = True - _load_opts = _dump_opts = _dict_opts = ['_dict'] + _cid: typing.ClassVar[str] = "toml.toml" + _type: typing.ClassVar[str] = "toml" + _extensions: tuple[str, ...] = ("toml", ) + _ordered: typing.ClassVar[bool] = True + _load_opts: tuple[str] = ("_dict", ) + _dump_opts = _dict_opts = _load_opts _load_from_string_fn = base.to_method(toml.loads) _load_from_stream_fn = base.to_method(toml.load) _dump_to_string_fn = base.to_method(toml.dumps) _dump_to_stream_fn = base.to_method(toml.dump) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/toml/tomlkit.py b/src/anyconfig/backend/toml/tomlkit.py index d5623fd9..43141b20 100644 --- a/src/anyconfig/backend/toml/tomlkit.py +++ b/src/anyconfig/backend/toml/tomlkit.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2023 Satoru SATOH +# Copyright (C) 2023, 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # Ref. python -c "import tomlkit; help(tomlkit); ..." @@ -18,6 +18,10 @@ .. versionadded:: 0.13.1 """ +from __future__ import annotations + +import typing + import tomlkit from .. import base @@ -26,15 +30,13 @@ class Parser(base.StringStreamFnParser): """TOML parser.""" - _cid = 'toml.tomlkit' - _type = 'toml' - _extensions = ['toml'] - _ordered = True - _dump_opts = ['sort_keys'] + _cid: typing.ClassVar[str] = "toml.tomlkit" + _type: typing.ClassVar[str] = "toml" + _extensions: tuple[str, ...] = ("toml", ) + _ordered: typing.ClassVar[bool] = True + _dump_opts: tuple[str, ...] = ("sort_keys", ) _load_from_string_fn = base.to_method(tomlkit.loads) _load_from_stream_fn = base.to_method(tomlkit.load) _dump_to_string_fn = base.to_method(tomlkit.dumps) _dump_to_stream_fn = base.to_method(tomlkit.dump) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/toml/tomllib.py b/src/anyconfig/backend/toml/tomllib.py index fad7f92c..a78f52bd 100644 --- a/src/anyconfig/backend/toml/tomllib.py +++ b/src/anyconfig/backend/toml/tomllib.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2023 Satoru SATOH +# Copyright (C) 2023, 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # Ref. python -c "import toml; help(toml); ..." @@ -20,27 +20,30 @@ .. versionadded:: 0.13.1 """ +from __future__ import annotations + +import typing + try: import tomllib except ModuleNotFoundError: - import tomli as tomllib # type: ignore + import tomli as tomllib # type: ignore[no-redef] import tomli_w from .. import base -class Parser( - base.StringStreamFnParser, - base.BinaryLoaderMixin, base.BinaryDumperMixin -): +class Parser(base.StringStreamFnParser): """TOML parser using tomlib and tomli-w.""" - _cid = 'toml.tomllib' - _type = 'toml' - _extensions = ['toml'] - _ordered = True - _load_opts = ['parse_float'] + _cid: typing.ClassVar[str] = "toml.tomllib" + _type: typing.ClassVar[str] = "toml" + _extensions: tuple[str, ...] = ("toml", ) + _ordered: typing.ClassVar[bool] = True + _load_opts: tuple[str, ...] = ("parse_float", ) + _open_read_mode: typing.ClassVar[str] = "rb" + _open_write_mode: typing.ClassVar[str] = "wb" _load_from_string_fn = base.to_method(tomllib.loads) _load_from_stream_fn = base.to_method(tomllib.load) diff --git a/src/anyconfig/backend/xml/etree.py b/src/anyconfig/backend/xml/etree.py index 4211b333..fb6f28cb 100644 --- a/src/anyconfig/backend/xml/etree.py +++ b/src/anyconfig/backend/xml/etree.py @@ -1,11 +1,11 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # Some XML modules may be missing and Base.{load,dumps}_impl are not # overridden: # pylint: disable=import-error, duplicate-except -# len(elem) is necessary to check that ET.Element object has children. +# len(elem) is necessary to check that ElementTree.Element object has children. # pylint: disable=len-as-condition r"""A backend module to load and dump XML files. @@ -57,56 +57,56 @@ - Added XML dump support. """ -import collections +from __future__ import annotations + import io import itertools import operator import re -import xml.etree.ElementTree as ET +import typing + +from xml.etree import ElementTree from .. import base +from ... import dicts from ...parser import parse_single from ...utils import ( - get_path_from_stream, is_dict_like, is_iterable, noop + filter_options, get_path_from_stream, + is_dict_like, is_iterable, noop, ) +if typing.TYPE_CHECKING: + import collections.abc -_TAGS = {"attrs": '@attrs', "text": '@text', "children": '@children'} -_ET_NS_RE = re.compile(r"^{(\S+)}(\S+)$") - - -def _iterparse(xmlfile): - """Override ET.iterparse to avoid bug in python 3.{2,3}. - .. seealso:: http://bugs.python.org/issue9257. +_TAGS = {"attrs": "@attrs", "text": "@text", "children": "@children"} +_ET_NS_RE = re.compile(r"^{(\S+)}(\S+)$") - :param xmlfile: XML file or file-like object - """ - try: - return ET.iterparse(xmlfile, events=('start-ns', )) - except TypeError: - return ET.iterparse(xmlfile, events=(b'start-ns', )) +_ENCODING: str = "utf-8" -def flip(tpl): - """Flip arguments. +if typing.TYPE_CHECKING: + DicType = dict[str, typing.Any] + DicsType = collections.abc.Iterable[DicType] + GenDicType = collections.abc.Callable[..., DicType] - >>> flip((1, 2)) - (2, 1) - """ - return (tpl[1], tpl[0]) - -def _namespaces_from_file(xmlfile): +def _namespaces_from_file( + xmlfile: base.PathOrStrT | typing.IO, +) -> dict[str, tuple[str, str]]: """Get the namespace str from file. :param xmlfile: XML file or file-like object :return: {namespace_uri: namespace_prefix} or {} """ - return dict(flip(t) for _, t in _iterparse(xmlfile)) + return { + typing.cast("str", url): typing.cast("tuple[str, str]", prefix) + for _, (prefix, url) + in ElementTree.iterparse(xmlfile, events=("start-ns", )) + } -def _tweak_ns(tag, **options): +def _tweak_ns(tag: str, **options: dict[str, str]) -> str: """Tweak the namespace. :param tag: XML tag element @@ -122,19 +122,19 @@ def _tweak_ns(tag, **options): ... nspaces={"http://example.com/ns/val/": "val"}) 'val:a' """ - nspaces = options.get('nspaces', None) + nspaces = options.get("nspaces") if nspaces is not None: matched = _ET_NS_RE.match(tag) if matched: (uri, tag) = matched.groups() prefix = nspaces.get(uri, False) if prefix: - return f'{prefix}:{tag}' + return f"{prefix}:{tag}" return tag -def _dicts_have_unique_keys(dics): +def _dicts_have_unique_keys(dics: DicsType) -> bool: """Test if given dicts don't have same keys. :param dics: [], must not be [] or [{...}] @@ -147,9 +147,9 @@ def _dicts_have_unique_keys(dics): # >>> _dicts_have_unique_keys([{}, {'a': 1}, {'b': 2, 'c': 0}]) # True - >>> _dicts_have_unique_keys([{}, {'a': 1}, {'a': 2}]) + >>> _dicts_have_unique_keys([{}, {"a": 1}, {"a": 2}]) False - >>> _dicts_have_unique_keys([{}, {'a': 1}, {'b': 2}, {'b': 3, 'c': 0}]) + >>> _dicts_have_unique_keys([{}, {"a": 1}, {"b": 2}, {"b": 3, "c": 0}]) False >>> _dicts_have_unique_keys([{}, {}]) True @@ -158,39 +158,24 @@ def _dicts_have_unique_keys(dics): return len(set(key_itr)) == sum(len(d) for d in dics) -def _merge_dicts(dics, container=dict): - """Merge given dicts. - - :param dics: [] - :param container: callble to make a container object - :return: object - - >>> _merge_dicts(({}, )) - {} - >>> _merge_dicts(({'a': 1}, )) - {'a': 1} - >>> sorted(kv for kv in _merge_dicts(({'a': 1}, {'b': 2})).items()) - [('a', 1), ('b', 2)] - """ - dic_itr = itertools.chain.from_iterable(d.items() for d in dics) - return container(collections.OrderedDict(dic_itr)) - - -def _parse_text(val, **options): +def _parse_text(val: str, **options: typing.Any) -> typing.Any: """Parse ``val`` and interpret its data to some value. :return: Parsed value or value itself depends on 'ac_parse_value' """ - if val and options.get('ac_parse_value', False): + if val and options.get("ac_parse_value", False): return parse_single(val) return val -def _process_elem_text(elem, dic, subdic, text='@text', **options): +def _process_elem_text( + elem: ElementTree.Element, dic: DicType, subdic: DicType, + text: str = "@text", **options: typing.Any, +) -> None: """Process the text in the element ``elem``. - :param elem: ET Element object which has elem.text + :param elem: ElementTree.Element object which has elem.text :param dic: (dict[-like]) object converted from elem :param subdic: Sub object converted from elem :param options: @@ -199,7 +184,9 @@ def _process_elem_text(elem, dic, subdic, text='@text', **options): :return: None but updating elem.text, dic and subdic as side effects """ - elem.text = elem.text.strip() + if elem.text: + elem.text = elem.text.strip() + if elem.text: etext = _parse_text(elem.text, **options) if len(elem) or elem.attrib: @@ -208,26 +195,31 @@ def _process_elem_text(elem, dic, subdic, text='@text', **options): dic[elem.tag] = etext # Only text, e.g. text -def _parse_attrs(elem, container=dict, **options): +def _parse_attrs( + elem: ElementTree.Element, container: GenDicType = dict, + **options: typing.Any, +) -> DicType: """Parse the attributes of the element ``elem``. - :param elem: ET Element object has attributes (elem.attrib) + :param elem: ElementTree.Element object has attributes (elem.attrib) :param container: callble to make a container object :return: Parsed value or value itself depends on 'ac_parse_value' """ - adic = dict((_tweak_ns(a, **options), v) for a, v in elem.attrib.items()) - if options.get('ac_parse_value', False): - return container(dict((k, parse_single(v)) - for k, v in adic.items())) + adic = {_tweak_ns(a, **options): v for a, v in elem.attrib.items()} + if options.get("ac_parse_value", False): + return container({k: parse_single(v) for k, v in adic.items()}) return container(adic) -def _process_elem_attrs(elem, dic, subdic, container=dict, attrs='@attrs', - **options): +def _process_elem_attrs( + elem: ElementTree.Element, dic: DicType, subdic: DicType, + container: GenDicType = dict, attrs: str = "@attrs", + **options: typing.Any, +) -> None: """Process attributes in the element ``elem``. - :param elem: ET Element object or None + :param elem: ElementTree.Element object or None :param dic: (dict[-like]) object converted from elem :param subdic: Sub object converted from elem :param options: @@ -237,17 +229,20 @@ def _process_elem_attrs(elem, dic, subdic, container=dict, attrs='@attrs', :return: None but updating dic and subdic as side effects """ adic = _parse_attrs(elem, container=container, **options) - if not elem.text and not len(elem) and options.get('merge_attrs'): + if not elem.text and not len(elem) and options.get("merge_attrs"): dic[elem.tag] = adic else: subdic[attrs] = adic -def _process_children_elems(elem, dic, subdic, container=dict, - children='@children', **options): +def _process_children_elems( + elem: ElementTree.Element, dic: DicType, subdic: DicType, + container: GenDicType = dict, children: str = "@children", + **options: typing.Any, +) -> None: """Process children of the element ``elem``. - :param elem: ET Element object or None + :param elem: ElementTree.Element object or None :param dic: (dict[-like]) object converted from elem :param subdic: Sub object converted from elem :param container: callble to make a container object @@ -260,18 +255,30 @@ def _process_children_elems(elem, dic, subdic, container=dict, """ cdics = [elem_to_container(c, container=container, **options) for c in elem] - merge_attrs = options.get('merge_attrs', False) - sdics = [container(elem.attrib) if merge_attrs else subdic] + cdics + merge_attrs = options.get("merge_attrs", False) + if merge_attrs or subdic: + sdics = [container(elem.attrib) if merge_attrs else subdic, *cdics] + else: + sdics = cdics if _dicts_have_unique_keys(sdics): # ex. 1c - dic[elem.tag] = _merge_dicts(sdics, container) + (sdic, udicts) = (sdics[0], sdics[1:]) + for udic in udicts: + dicts.merge(sdic, udic, **options) + + dic[elem.tag] = dicts.convert_to(sdic, ac_dict=container) + elif not subdic: # There are no attrs nor text and only these children. dic[elem.tag] = cdics else: subdic[children] = cdics -def elem_to_container(elem, container=dict, **options): +def elem_to_container( + elem: ElementTree.Element | None, + container: GenDicType = dict, + **options: typing.Any, +) -> DicType: """Convert XML ElementTree Element to a collection of container objects. Elements are transformed to a node under special tagged nodes, attrs, text @@ -282,7 +289,7 @@ def elem_to_container(elem, container=dict, **options): - There is only text element - There are only children elements each has unique keys among all - :param elem: ET Element object or None + :param elem: ElementTree.Element object or None :param container: callble to make a container object :param options: Keyword options @@ -297,7 +304,7 @@ def elem_to_container(elem, container=dict, **options): elem.tag = _tweak_ns(elem.tag, **options) # {ns}tag -> ns_prefix:tag subdic = dic[elem.tag] = container() - options['container'] = container + options["container"] = container if elem.text: _process_elem_text(elem, dic, subdic, **options) @@ -313,7 +320,7 @@ def elem_to_container(elem, container=dict, **options): return dic -def _complement_tag_options(options): +def _complement_tag_options(options: DicType) -> DicType: """Complement tag options. :param options: Keyword options :: dict @@ -326,14 +333,19 @@ def _complement_tag_options(options): [('attrs', '@attrs'), ('children', '@children'), ('text', '#text')] """ if not all(nt in options for nt in _TAGS): - tags = options.get('tags', {}) + tags = options.get("tags", {}) for ntype, tag in _TAGS.items(): options[ntype] = tags.get(ntype, tag) return options -def root_to_container(root, container=dict, nspaces=None, **options): +def root_to_container( + root: ElementTree.Element, + container: GenDicType = dict, + nspaces: DicType | None = None, + **options: typing.Any, +) -> DicType: """Convert XML ElementTree Root Element to container objects. :param root: etree root object or None @@ -350,28 +362,36 @@ def root_to_container(root, container=dict, nspaces=None, **options): if nspaces is not None: for uri, prefix in nspaces.items(): - root.attrib[f'xmlns:{prefix}' if prefix else 'xmlns'] = uri + root.attrib[f"xmlns:{prefix}" if prefix else "xmlns"] = uri return elem_to_container(root, container=container, nspaces=nspaces, **_complement_tag_options(options)) -def _to_str_fn(**options): +def _to_str_fn( + **options: typing.Any, +) -> collections.abc.Callable[..., str]: """Convert any objects to a str. :param options: Keyword options might have 'ac_parse_value' key :param to_str: Callable to convert value to string """ - return str if options.get('ac_parse_value') else noop + return ( + str if options.get("ac_parse_value") # type: ignore[return-value] + else noop + ) -def _elem_set_attrs(obj, parent, to_str): +def _elem_set_attrs( + obj: DicType, parent: ElementTree.Element, + to_str: collections.abc.Callable[..., str], +) -> None: """Set attributes of the element ``parent``. :param obj: Container instance gives attributes of XML Element :param parent: XML ElementTree parent node object :param to_str: Callable to convert value to string or None - :param options: Keyword options, see :func:`container_to_etree` + :param options: Keyword options, see :func:`container_to_elem` :return: None but parent will be modified """ @@ -379,33 +399,41 @@ def _elem_set_attrs(obj, parent, to_str): parent.set(attr, to_str(val)) -def _elem_from_descendants(children_nodes, **options): +def _elem_from_descendants( + children_nodes: collections.abc.Iterable[DicType], + **options: typing.Any, +) -> collections.abc.Iterator[ElementTree.Element]: """Get the elements from the descendants ``children_nodes``. :param children_nodes: A list of child dict objects - :param options: Keyword options, see :func:`container_to_etree` + :param options: Keyword options, see :func:`container_to_elem` """ for child in children_nodes: # child should be a dict-like object. for ckey, cval in child.items(): - celem = ET.Element(ckey) - container_to_etree(cval, parent=celem, **options) + celem = ElementTree.Element(ckey) + container_to_elem(cval, parent=celem, **options) yield celem -def _get_or_update_parent(key, val, to_str, parent=None, **options): +def _get_or_update_parent( + key: str, val: typing.Any, + to_str: collections.abc.Callable[..., str], + parent: ElementTree.Element | None = None, + **options: typing.Any, +) -> ElementTree.Element: """Get or update the parent element ``parent``. :param key: Key of current child (dict{,-like} object) :param val: Value of current child (dict{,-like} object or [dict{,...}]) :param to_str: Callable to convert value to string :param parent: XML ElementTree parent node object or None - :param options: Keyword options, see :func:`container_to_etree` + :param options: Keyword options, see :func:`container_to_elem` """ - elem = ET.Element(key) + elem = ElementTree.Element(key) vals = val if is_iterable(val) else [val] for val_ in vals: - container_to_etree(val_, parent=elem, to_str=to_str, **options) + container_to_elem(val_, parent=elem, to_str=to_str, **options) if parent is None: # 'elem' is the top level etree. return elem @@ -414,10 +442,27 @@ def _get_or_update_parent(key, val, to_str, parent=None, **options): return parent -_ATC = ('attrs', 'text', 'children') +_ATC = ("attrs", "text", "children") + +def _assert_if_invalid_node( + obj: typing.Any, + parent: ElementTree.Element | None = None, +) -> None: + """Make sure the ``obj`` or ``parent`` is not invalid.""" + if parent is None and (obj is None or not obj): + raise ValueError -def container_to_etree(obj, parent=None, to_str=None, **options): + if parent is not None and not isinstance(parent, ElementTree.Element): + raise ValueError + + +def container_to_elem( + obj: typing.Any, + parent: ElementTree.Element | None = None, + to_str: collections.abc.Callable[..., str] | None = None, + **options: typing.Any, +) -> ElementTree.Element: """Convert a dict-like object to XML ElementTree. :param obj: Container instance to convert to @@ -428,18 +473,27 @@ def container_to_etree(obj, parent=None, to_str=None, **options): - tags: Dict of tags for special nodes to keep XML info, attributes, text and children nodes, e.g. {"attrs": "@attrs", "text": "#text"} """ + _assert_if_invalid_node(obj, parent=parent) + if to_str is None: to_str = _to_str_fn(**options) if not is_dict_like(obj): if parent is not None and obj: parent.text = to_str(obj) # Parent is a leaf text node. - return parent # All attributes and text should be set already. + # All attributes and text should be set already. + return parent # type: ignore[return-value] options = _complement_tag_options(options) (attrs, text, children) = operator.itemgetter(*_ATC)(options) for key, val in obj.items(): + if parent is None: + parent = _get_or_update_parent( + key, val, to_str, parent=parent, **options, + ) + continue + if key == attrs: _elem_set_attrs(val, parent, to_str) elif key == text: @@ -448,36 +502,61 @@ def container_to_etree(obj, parent=None, to_str=None, **options): for celem in _elem_from_descendants(val, **options): parent.append(celem) else: - parent = _get_or_update_parent(key, val, to_str, parent=parent, - **options) + parent = _get_or_update_parent( + key, val, to_str, parent=parent, **options, + ) - return ET.ElementTree(parent) + return parent # type: ignore[return-value] -def etree_write(tree, stream): +def etree_write( + elem: ElementTree.Element, stream: typing.IO, + **options: typing.Any, +) -> None: """Write XML ElementTree 'root' content into 'stream'. :param tree: XML ElementTree object :param stream: File or file-like object can write to """ - try: - tree.write(stream, encoding='utf-8', xml_declaration=True) - except TypeError: - tree.write(stream, encoding='unicode', xml_declaration=True) - - -class Parser(base.Parser, base.ToStreamDumperMixin, - base.BinaryDumperMixin, base.BinaryLoaderMixin): + opts = {"xml_declaration": True, "encoding": "unicode"} + opts.update( + **filter_options( + ("method", "xml_declaration", "default_namespace", + "short_empty_elements"), + options, + ), + ) + content: bytes = ElementTree.tostring( # type: ignore[call-overload] + elem, **opts, + ).encode("utf-8") + stream.write(content) + + +class Parser(base.Parser, base.ToStreamDumperMixin): """Parser for XML files.""" - _cid = 'xml.etree' - _type = 'xml' - _extensions = ['xml'] - _load_opts = _dump_opts = ['tags', 'merge_attrs', 'ac_parse_value'] - _ordered = True - _dict_opts = ['ac_dict'] - - def load_from_string(self, content, container, **opts): + _cid: typing.ClassVar[str] = "xml.etree" + _type: typing.ClassVar[str] = "xml" + _extensions: tuple[str, ...] = ("xml", ) + _load_opts: tuple[str, ...] = ( + "tags", "merge_attrs", "ac_parse_value", + ) + # .. seealso:: xml.etree.ElementTree.tostring + _dump_opts = ( + *_load_opts, + "encoding", "method", "xml_declaration", "default_namespace", + "short_empty_elements", + ) + + _ordered: typing.ClassVar[bool] = True + _dict_opts: tuple[str, ...] = ("ac_dict", ) + _open_read_mode: typing.ClassVar[str] = "rb" + _open_write_mode: typing.ClassVar[str] = "wb" + + def load_from_string( + self, content: str | bytes, container: GenDicType, + **opts: typing.Any, + ) -> DicType: """Load config from XML snippet (a string 'content'). :param content: @@ -487,13 +566,22 @@ def load_from_string(self, content, container, **opts): :return: Dict-like object holding config parameters """ - root = ET.fromstring(content) - stream = io.BytesIO(content) - nspaces = _namespaces_from_file(stream) - return root_to_container(root, container=container, - nspaces=nspaces, **opts) + if isinstance(content, str): + elem = ElementTree.fromstring(content) + stream = io.BytesIO(content.encode(_ENCODING)) + else: + elem = ElementTree.fromstring(content.decode(_ENCODING)) + stream = io.BytesIO(content) - def load_from_path(self, filepath, container, **opts): + nspaces = _namespaces_from_file(stream) + return root_to_container( + elem, container=container, nspaces=nspaces, **opts, + ) + + def load_from_path( + self, filepath: base.PathOrStrT, container: GenDicType, + **opts: typing.Any, + ) -> DicType: """Load data from path ``filepath``. :param filepath: XML file path @@ -502,12 +590,16 @@ def load_from_path(self, filepath, container, **opts): :return: Dict-like object holding config parameters """ - root = ET.parse(filepath).getroot() + elem = ElementTree.parse(filepath).getroot() nspaces = _namespaces_from_file(filepath) - return root_to_container(root, container=container, - nspaces=nspaces, **opts) - - def load_from_stream(self, stream, container, **opts): + return root_to_container( + elem, container=container, nspaces=nspaces, **opts, + ) + + def load_from_stream( + self, stream: typing.IO, container: GenDicType, + **opts: typing.Any, + ) -> DicType: """Load data from IO stream ``stream``. :param stream: XML file or file-like object @@ -516,13 +608,16 @@ def load_from_stream(self, stream, container, **opts): :return: Dict-like object holding config parameters """ - root = ET.parse(stream).getroot() + elem = ElementTree.parse(stream).getroot() path = get_path_from_stream(stream) nspaces = _namespaces_from_file(path) - return root_to_container(root, container=container, - nspaces=nspaces, **opts) + return root_to_container( + elem, container=container, nspaces=nspaces, **opts, + ) - def dump_to_string(self, cnf, **opts): + def dump_to_string( # type: ignore[override] + self, cnf: base.InDataExT, **opts: typing.Any, + ) -> bytes: """Dump data ``cnf`` as a str. :param cnf: Configuration data to dump @@ -530,19 +625,26 @@ def dump_to_string(self, cnf, **opts): :return: string represents the configuration """ - tree = container_to_etree(cnf, **opts) - buf = io.BytesIO() - etree_write(tree, buf) - return buf.getvalue() - - def dump_to_stream(self, cnf, stream, **opts): + if cnf is None or not cnf or not is_dict_like(cnf): + return b"" + + elem = container_to_elem(cnf, **opts) + bio = io.BytesIO() + etree_write(elem, bio, **opts) + return bio.getvalue() + + def dump_to_stream( + self, cnf: base.InDataExT, stream: typing.IO, + **opts: typing.Any, + ) -> None: """Dump data ``cnf`` to the IO stream ``stream``. :param cnf: Configuration data to dump :param stream: Config file or file like object write to :param opts: optional keyword parameters """ - tree = container_to_etree(cnf, **opts) - etree_write(tree, stream) + if cnf is None or not cnf or not is_dict_like(cnf): + return -# vim:sw=4:ts=4:et: + elem = container_to_elem(cnf, **opts) + etree_write(elem, stream, **opts) diff --git a/src/anyconfig/backend/yaml/__init__.py b/src/anyconfig/backend/yaml/__init__.py index 1441afdf..2b1c44ad 100644 --- a/src/anyconfig/backend/yaml/__init__.py +++ b/src/anyconfig/backend/yaml/__init__.py @@ -21,7 +21,7 @@ from . import pyyaml PARSERS: ParserClssT = [pyyaml.Parser] except ImportError: - PARSERS: ParserClssT = [] # type: ignore + PARSERS: ParserClssT = [] # type: ignore[no-redef] try: from . import ruamel diff --git a/src/anyconfig/backend/yaml/common.py b/src/anyconfig/backend/yaml/common.py index ae75115a..043470cd 100644 --- a/src/anyconfig/backend/yaml/common.py +++ b/src/anyconfig/backend/yaml/common.py @@ -1,13 +1,19 @@ # -# Copyright (C) 2011 - 2021 Satoru SATOH +# Copyright (C) 2011 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # """Common library for YAML backend modules.""" +from __future__ import annotations + +import typing + from ...utils import filter_options from .. import base -def filter_from_options(key, options): +def filter_from_options( + key: str, options: dict[str, typing.Any], +) -> dict[str, typing.Any]: """Filter a key ``key`` in ``options. :param key: Key str in options @@ -15,19 +21,17 @@ def filter_from_options(key, options): :return: New mapping object from 'options' in which the item with 'key' filtered - >>> filter_from_options('a', dict(a=1, b=2)) + >>> filter_from_options("a", dict(a=1, b=2)) {'b': 2} """ - return filter_options([k for k in options.keys() if k != key], options) + return filter_options([k for k in options if k != key], options) class Parser(base.StreamParser): """Parser for YAML files.""" - _type = 'yaml' - _extensions = ['yaml', 'yml'] - _ordered = True - _allow_primitives = True - _dict_opts = ['ac_dict'] - -# vim:sw=4:ts=4:et: + _type: typing.ClassVar[str] = "yaml" + _extensions: tuple[str, ...] = ("yaml", "yml") + _ordered: typing.ClassVar[bool] = True + _allow_primitives: typing.ClassVar[bool] = True + _dict_opts: tuple[str, ...] = ("ac_dict", ) diff --git a/src/anyconfig/backend/yaml/pyyaml.py b/src/anyconfig/backend/yaml/pyyaml.py index baabcf0c..cd990a3c 100644 --- a/src/anyconfig/backend/yaml/pyyaml.py +++ b/src/anyconfig/backend/yaml/pyyaml.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # type() is used to exactly match check instead of isinstance here. @@ -41,22 +41,33 @@ - Changed special keyword option 'ac_safe' from 'safe' to avoid possibility of option conflicts in the future. """ +from __future__ import annotations + +import contextlib +import typing import yaml try: from yaml import CSafeLoader as Loader, CDumper as Dumper except ImportError: - from yaml import SafeLoader as Loader, Dumper # type: ignore + from yaml import SafeLoader as Loader, Dumper # type: ignore[assignment] from ...dicts import convert_to from ...utils import is_dict_like from .. import base from . import common +if typing.TYPE_CHECKING: + import collections.abc + _MAPPING_TAG = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG -def _customized_loader(container, loader=Loader, mapping_tag=_MAPPING_TAG): +def _customized_loader( + container: collections.abc.Callable[..., dict[str, typing.Any]], + loader: type[Loader] = Loader, + mapping_tag: str = _MAPPING_TAG, +) -> type[Loader]: """Get the customized loader. Create or update loader with making given callble 'container' to make @@ -65,7 +76,9 @@ def _customized_loader(container, loader=Loader, mapping_tag=_MAPPING_TAG): :param container: Set container used internally """ - def construct_mapping(loader, node, deep=False): + def construct_mapping( + loader: Loader, node: typing.Any, *, deep: bool = False, + ) -> dict[str, typing.Any]: """Construct python object from yaml mapping node. It is based on :meth:`yaml.BaseConstructor.construct_mapping` in PyYAML @@ -74,8 +87,8 @@ def construct_mapping(loader, node, deep=False): loader.flatten_mapping(node) if not isinstance(node, yaml.MappingNode): raise yaml.constructor.ConstructorError( - None, None, f'expected a mapping node, but found {node.id}', - node.start_mark + None, None, f"expected a mapping node, but found {node.id}", + node.start_mark, ) mapping = container() for key_node, value_node in node.value: @@ -83,54 +96,52 @@ def construct_mapping(loader, node, deep=False): try: hash(key) except TypeError as exc: - eargs = ('while constructing a mapping', - node.start_mark, - f'found unacceptable key ({exc!s})', - key_node.start_mark) - raise yaml.constructor.ConstructorError(*eargs) + eargs = ( + "while constructing a mapping", + node.start_mark, + f"found unacceptable key ({exc!s})", + key_node.start_mark, + ) + raise yaml.constructor.ConstructorError(*eargs) from exc value = loader.construct_object(value_node, deep=deep) mapping[key] = value return mapping - tag = 'tag:yaml.org,2002:python/unicode' + tag = "tag:yaml.org,2002:python/unicode" - def construct_ustr(loader, node): + def construct_ustr( + loader: Loader, node: typing.Any, + ) -> str | int | float | None: """Unicode string constructor.""" return loader.construct_scalar(node) - try: + with contextlib.suppress(NameError): loader.add_constructor(tag, construct_ustr) - except NameError: - pass if container is not dict: loader.add_constructor(mapping_tag, construct_mapping) return loader -def _customized_dumper(container, dumper=Dumper): +def _customized_dumper( + container: typing.Any, dumper: type[Dumper] = Dumper, +) -> type[Dumper]: """Counterpart of :func:`_customized_loader` for dumpers.""" - def container_representer(dumper, data, mapping_tag=_MAPPING_TAG): + def container_representer( + dumper: Dumper, data: typing.Any, mapping_tag: str = _MAPPING_TAG, + ) -> typing.Any: """Container representer.""" return dumper.represent_mapping(mapping_tag, data.items()) - def ustr_representer(dumper, data): - """Unicode string representer.""" - tag = 'tag:yaml.org,2002:python/unicode' - return dumper.represent_scalar(tag, data) - - try: - dumper.add_representer(unicode, ustr_representer) - except NameError: - pass - if container is not dict: dumper.add_representer(container, container_representer) return dumper -def yml_fnc_by_name(fname, **options): +def yml_fnc_by_name( + fname: str, **options: typing.Any, +) -> collections.abc.Callable[..., typing.Any]: """Get yaml loading/dumping function by name. :param fname: @@ -138,10 +149,12 @@ def yml_fnc_by_name(fname, **options): see also :func:`yml_load` and :func:`yml_dump` :param options: keyword args may contain "ac_safe" to load/dump safely """ - return getattr(yaml, f'safe_{fname}' if options.get('ac_safe') else fname) + return getattr(yaml, f"safe_{fname}" if options.get("ac_safe") else fname) -def yml_fnc_(fname, *args, **options): +def yml_fnc_( + fname: str, *args: typing.Any, **options: typing.Any, +) -> typing.Any: """Call yaml.safe_load, yaml.load, yaml.safe_dump and yaml.dump. :param fname: @@ -151,10 +164,14 @@ def yml_fnc_(fname, *args, **options): :param options: keyword args may contain "ac_safe" to load/dump safely """ fnc = yml_fnc_by_name(fname, **options) - return fnc(*args, **common.filter_from_options('ac_safe', options)) + return fnc(*args, **common.filter_from_options("ac_safe", options)) -def yml_load(stream, container, yml_fnc=yml_fnc_, **options): +def yml_load( + stream: typing.IO, container: base.GenContainerT, + yml_fnc: collections.abc.Callable[..., typing.Any] = yml_fnc_, + **options: typing.Any, +) -> dict[str, typing.Any]: """Call yaml.safe_load and yaml.load. :param stream: a file or file-like object to load YAML content @@ -162,26 +179,30 @@ def yml_load(stream, container, yml_fnc=yml_fnc_, **options): :return: Mapping object """ - if options.get('ac_safe', False): + if options.get("ac_safe", False): # .. note:: yaml.safe_load does not support any keyword options. options = {"ac_safe": True} - elif not options.get('Loader', False): - maybe_container = options.get('ac_dict', False) + elif not options.get("Loader", False): + maybe_container = options.get("ac_dict", False) if maybe_container and callable(maybe_container): container = maybe_container - options['Loader'] = _customized_loader(container) + options["Loader"] = _customized_loader(container, Loader) - ret = yml_fnc('load', stream, - **common.filter_from_options('ac_dict', options)) + ret = yml_fnc("load", stream, + **common.filter_from_options("ac_dict", options)) if ret is None: return container() return ret -def yml_dump(data, stream, yml_fnc=yml_fnc_, **options): +def yml_dump( + data: typing.Any, stream: typing.IO, + yml_fnc: collections.abc.Callable[..., typing.Any] = yml_fnc_, + **options: typing.Any, +) -> None: """Call yaml.safe_dump and yaml.dump. :param data: Some data to dump @@ -189,34 +210,34 @@ def yml_dump(data, stream, yml_fnc=yml_fnc_, **options): """ _is_dict = is_dict_like(data) - if options.get('ac_safe', False): + if options.get("ac_safe", False): options = {"ac_safe": True} # Same as yml_load. - elif not options.get('Dumper', False) and _is_dict: - # TODO: Any other way to get its constructor? - maybe_container = options.get('ac_dict', type(data)) - options['Dumper'] = _customized_dumper(maybe_container) + elif not options.get("Dumper", False) and _is_dict: + # TODO(ssato): Any other way to get its constructor? + maybe_container = options.get("ac_dict", type(data)) + options["Dumper"] = _customized_dumper(maybe_container) if _is_dict: # Type information and the order of items are lost on dump currently. data = convert_to(data, ac_dict=dict) - options = common.filter_from_options('ac_dict', options) + options = common.filter_from_options("ac_dict", options) - return yml_fnc('dump', data, stream, **options) + return yml_fnc("dump", data, stream, **options) class Parser(common.Parser): """Parser for YAML files.""" - _cid = 'yaml.pyyaml' - _priority = 30 # Higher priority than ruamel.yaml. - _load_opts = ['Loader', 'ac_safe', 'ac_dict'] - _dump_opts = ['stream', 'ac_safe', 'Dumper', 'default_style', - 'default_flow_style', 'canonical', 'indent', 'width', - 'allow_unicode', 'line_break', 'encoding', 'explicit_start', - 'explicit_end', 'version', 'tags'] + _cid: typing.ClassVar[str] = "yaml.pyyaml" + _priority: typing.ClassVar[int] = 30 # Higher priority than ruamel.yaml. + _load_opts: tuple[str, ...] = ("Loader", "ac_safe", "ac_dict") + _dump_opts: tuple[str, ...] = ( + "stream", "ac_safe", "Dumper", "default_style", + "default_flow_style", "canonical", "indent", "width", + "allow_unicode", "line_break", "encoding", "explicit_start", + "explicit_end", "version", "tags", + ) load_from_stream = base.to_method(yml_load) dump_to_stream = base.to_method(yml_dump) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/yaml/ruamel.py b/src/anyconfig/backend/yaml/ruamel.py index af698d4c..44dbd1cb 100644 --- a/src/anyconfig/backend/yaml/ruamel.py +++ b/src/anyconfig/backend/yaml/ruamel.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """A backend module to load and dump YAML data files using rumael.yaml. @@ -35,6 +35,10 @@ - Split from the common yaml backend and start to support ruamel.yaml specific features. """ +from __future__ import annotations + +import typing + import ruamel.yaml as ryaml from ...utils import filter_options @@ -43,31 +47,37 @@ try: - ryaml.YAML # flake8: noqa + ryaml.YAML # noqa: B018 except AttributeError as exc: - raise ImportError('ruamel.yaml may be too old to use!') from exc - -_YAML_INIT_KWARGS = ['typ', 'pure', 'plug_ins'] # kwargs for ruamel.yaml.YAML -_YAML_INSTANCE_MEMBERS = ['allow_duplicate_keys', 'allow_unicode', - 'block_seq_indent', 'canonical', 'composer', - 'constructor', 'default_flow_style', 'default_style', - 'dump', 'dump_all', 'emitter', 'encoding', - 'explicit_end', 'explicit_start', - 'get_constructor_parser', - 'get_serializer_representer_emitter', 'indent', - 'line_break', 'load', 'load_all', 'map', - 'map_indent', 'official_plug_ins', 'old_indent', - 'parser', 'prefix_colon', 'preserve_quotes', - 'reader', 'register_class', 'representer', - 'resolver', 'scanner', 'seq', 'sequence_dash_offset', - 'sequence_indent', 'serializer', 'stream', 'tags', - 'top_level_block_style_scalar_no_indent_error_1_1', - 'top_level_colon_align', 'version', 'width'] - -_YAML_OPTS = _YAML_INIT_KWARGS + _YAML_INSTANCE_MEMBERS - - -def yml_fnc(fname, *args, **options): + msg = "ruamel.yaml may be too old to use!" + raise ImportError(msg) from exc + +_YAML_INIT_KWARGS: tuple[str, ...] = ( # kwargs for ruamel.yaml.YAML + "typ", "pure", "plug_ins", +) +_YAML_INSTANCE_MEMBERS: tuple[str, ...] = ( + "allow_duplicate_keys", "allow_unicode", + "block_seq_indent", "canonical", "composer", + "constructor", "default_flow_style", "default_style", + "dump", "dump_all", "emitter", "encoding", + "explicit_end", "explicit_start", + "get_constructor_parser", + "get_serializer_representer_emitter", "indent", + "line_break", "load", "load_all", "map", + "map_indent", "official_plug_ins", "old_indent", + "parser", "prefix_colon", "preserve_quotes", + "reader", "register_class", "representer", + "resolver", "scanner", "seq", "sequence_dash_offset", + "sequence_indent", "serializer", "stream", "tags", + "top_level_block_style_scalar_no_indent_error_1_1", + "top_level_colon_align", "version", "width", +) +_YAML_OPTS = (*_YAML_INIT_KWARGS, *_YAML_INSTANCE_MEMBERS) + + +def yml_fnc( + fname: str, *args: typing.Any, **options: typing.Any, +) -> base.InDataExT | None: """Call loading functions for yaml data. :param fname: @@ -78,8 +88,8 @@ def yml_fnc(fname, *args, **options): """ options = common.filter_from_options("ac_dict", options) - if 'ac_safe' in options: - options['typ'] = 'safe' # Override it. + if "ac_safe" in options: + options["typ"] = "safe" # Override it. iopts = filter_options(_YAML_INIT_KWARGS, options) oopts = filter_options(_YAML_INSTANCE_MEMBERS, options) @@ -91,35 +101,35 @@ def yml_fnc(fname, *args, **options): return getattr(yml, fname)(*args) -def yml_load(stream, container, **options): +def yml_load( + stream: typing.IO, container: base.GenContainerT, + **options: typing.Any, +) -> base.InDataExT: """See :func:`anyconfig.backend.yaml.pyyaml.yml_load`.""" - ret = yml_fnc('load', stream, **options) + ret = yml_fnc("load", stream, **options) if ret is None: return container() return ret -def yml_dump(data, stream, **options): +def yml_dump( + data: base.InDataExT, stream: typing.IO, + **options: typing.Any, +) -> None: """See :func:`anyconfig.backend.yaml.pyyaml.yml_dump`.""" - # .. todo:: Needed? - # if anyconfig.utils.is_dict_like(data): - # if options.get("ac_ordered"): - # factory = collections.OrderedDict - # else: - # factory = dict - # data = anyconfig.dicts.convert_to(data, ac_dict=factory) - return yml_fnc('dump', data, stream, **options) + # .. todo:: + # Maybe it should take care to keep keys' order using + # collections.OrderedDict if ac_ordered is True in ``options``. + yml_fnc("dump", data, stream, **options) class Parser(common.Parser): """Parser for YAML files.""" - _cid = 'yaml.ruamel' + _cid: typing.ClassVar[str] = "yaml.ruamel" _load_opts = _YAML_OPTS _dump_opts = _YAML_OPTS load_from_stream = base.to_method(yml_load) dump_to_stream = base.to_method(yml_dump) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/cli/__init__.py b/src/anyconfig/cli/__init__.py index 6e229ca1..b5249434 100644 --- a/src/anyconfig/cli/__init__.py +++ b/src/anyconfig/cli/__init__.py @@ -9,10 +9,10 @@ __all__ = [ - 'main', + "main", ] -if __name__ == '__main__': +if __name__ == "__main__": main(sys.argv) # vim:sw=4:ts=4:et: diff --git a/src/anyconfig/cli/_main.py b/src/anyconfig/cli/_main.py index b42e9a5c..d716b4ac 100644 --- a/src/anyconfig/cli/_main.py +++ b/src/anyconfig/cli/_main.py @@ -1,8 +1,10 @@ # -# Copyright (C) 2011 - 2021 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """CLI frontend module for anyconfig.""" +from __future__ import annotations + import os import sys import typing @@ -10,24 +12,17 @@ from .. import api, parser from . import ( - actions, constants, detectors, filters, io, parse_args, utils + actions, constants, detectors, filters, parse_args, utils, ) if typing.TYPE_CHECKING: import argparse -OUT_ERR = io.make() -if OUT_ERR: - (sys.stdout, sys.stderr) = OUT_ERR - - -def try_special_command_if_no_inputs(args: 'argparse.Namespace') -> None: +def try_special_command_if_no_inputs(args: argparse.Namespace) -> None: """Run one of some special commands do not require inputs argument.""" - assert not args.inputs - if not args.list and not args.env: - utils.exit_with_output('No inputs were given!', 1) + utils.exit_with_output("No inputs were given!", 1) if not args.output: args.output = sys.stdout @@ -41,8 +36,9 @@ def try_special_command_if_no_inputs(args: 'argparse.Namespace') -> None: sys.exit(0) -def process_args_or_run_command(args: 'argparse.Namespace' - ) -> 'argparse.Namespace': +def process_args_or_run_command( + args: argparse.Namespace, +) -> argparse.Namespace: """Process ``args`` and/or run commands. Process ``args``, that is, validate and update it, and raise SystemExit if @@ -50,25 +46,24 @@ def process_args_or_run_command(args: 'argparse.Namespace' """ # Validate args: if args.inputs: - if not args.itype: - if (len(args.inputs) == 1 - and args.inputs[0] == constants.STD_IN_OR_OUT): - utils.exit_with_output( - 'No input type was given but required for the input "-"', - 1 - ) + if (not args.itype and len(args.inputs) == 1 + and args.inputs[0] == constants.STD_IN_OR_OUT): + utils.exit_with_output( + "No input type was given but required for the input '-'", + 1, + ) else: try_special_command_if_no_inputs(args) if args.validate and not args.schema: utils.exit_with_output( - '--validate and --schema options must be used together', - 1 + "--validate and --schema options must be used together", + 1, ) # Update args: if args.loglevel: - warnings.simplefilter('always') + warnings.simplefilter("always") args.otype = detectors.try_detecting_output_type(args) @@ -84,29 +79,36 @@ def process_args_or_run_command(args: 'argparse.Namespace' return args -def try_validate(cnf, args: 'argparse.Namespace') -> None: +def exit_if_not_mergeable(diff: api.InDataExT) -> None: + """Check if ``diff`` is a dict like object can be merged.""" + if not detectors.is_dict_like(diff): + msg_code = (f"Cannot be merged: {diff!r}", 1) + utils.exit_with_output(*msg_code) + + +def try_validate(cnf: api.InDataExT, args: argparse.Namespace) -> None: """Try validate ``cnf`` with the schema loaded from ``args.schema``.""" scm = api.load(args.schema) (res, errors) = api.validate(cnf, scm, ac_schema_errors=True) if res: - msg_code = ('Validation succeeded', 0) + msg_code = ("Validation succeeded", 0) else: msg_code = ( - 'Validation failed:' - f'{(os.linesep + " ").join(errors)}', - 1 + "Validation failed:" + f"{(os.linesep + ' ').join(errors)}", + 1, ) utils.exit_with_output(*msg_code) -def main(argv=None): +def main(argv: list[str] | None = None) -> None: """Provide the entrypoint to run the CLI. :param argv: Argument list to parse or None (sys.argv will be set). """ - (_psr, args) = parse_args.parse((argv if argv else sys.argv)[1:]) + (_psr, args) = parse_args.parse((argv or sys.argv)[1:]) args = process_args_or_run_command(args) cnf = os.environ.copy() if args.env else {} @@ -117,22 +119,22 @@ def main(argv=None): diff = utils.load_diff(args, args.extra_opts or {}) if cnf: - api.merge(cnf, diff) + exit_if_not_mergeable(diff) + api.merge(cnf, diff) # type: ignore[arg-type] else: - cnf = diff + cnf = diff # type: ignore[assignment] if args.args: - diff = parser.parse(args.args) - api.merge(cnf, diff) + diff = parser.parse(args.args) # type: ignore[assignment] + exit_if_not_mergeable(diff) + api.merge(cnf, diff) # type: ignore[arg-type] - if args.gen_schema: - cnf = api.gen_schema(cnf) - else: - cnf = filters.do_filter(cnf, args) + cnf = ( + api.gen_schema(cnf) if args.gen_schema # type: ignore[assignment] + else filters.do_filter(cnf, args) + ) if args.validate: try_validate(cnf, args) actions.try_output_result(cnf, args) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/cli/actions.py b/src/anyconfig/cli/actions.py index ebb55f20..d46d0fd6 100644 --- a/src/anyconfig/cli/actions.py +++ b/src/anyconfig/cli/actions.py @@ -1,22 +1,23 @@ # -# Copyright (C) 2011 - 2021 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Actions for anyconfig.cli.*.""" +import argparse + from .. import api from . import utils -def show_parsers_and_exit(): +def show_parsers_and_exit() -> None: """Show list of info of parsers available.""" utils.exit_with_output(utils.make_parsers_txt()) -def try_output_result(cnf, args): +def try_output_result( + cnf: api.InDataExT, args: argparse.Namespace, +) -> None: """Try to output result.""" api.dump( - cnf, args.output, args.otype, - **(args.extra_opts if args.extra_opts else {}) + cnf, args.output, args.otype, **(args.extra_opts or {}), ) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/cli/constants.py b/src/anyconfig/cli/constants.py index d60c54e3..e3bd088f 100644 --- a/src/anyconfig/cli/constants.py +++ b/src/anyconfig/cli/constants.py @@ -1,9 +1,9 @@ # -# Copyright (C) 2011 - 2021 Satoru SATOH +# Copyright (C) 2011 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # """Provides constants for anyconfig.cli.""" -STD_IN_OR_OUT = '-' +STD_IN_OR_OUT = "-" USAGE = """\ %(prog)s [Options...] CONF_PATH_OR_PATTERN_0 [CONF_PATH_OR_PATTERN_1 ..] @@ -51,5 +51,3 @@ SET_HELP = ("Specify key path to set (update) part of config, for " "example, '--set a.b.c=1' to a config {'a': {'b': {'c': 0, " "'d': 1}}} gives {'a': {'b': {'c': 1, 'd': 1}}}.") - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/cli/detectors.py b/src/anyconfig/cli/detectors.py index 64afbdc1..61f3bba5 100644 --- a/src/anyconfig/cli/detectors.py +++ b/src/anyconfig/cli/detectors.py @@ -1,8 +1,11 @@ # -# Copyright (C) 2011 - 2021 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Detect file type and parser from inputs and/or output.""" +from __future__ import annotations + +import collections import os import pathlib import typing @@ -14,8 +17,13 @@ if typing.TYPE_CHECKING: import argparse + try: + from typing import TypeGuard + except ImportError: + from typing_extensions import TypeGuard + -def are_same_file_types(paths: typing.List[str]) -> bool: +def are_same_file_types(paths: list[str]) -> bool: """Test if all of the types for given file paths ``paths`` are same.""" if not paths: return False @@ -27,7 +35,7 @@ def are_same_file_types(paths: typing.List[str]) -> bool: return all(x and exts[0] == x for x in exts[1:]) -def find_by_the_type(io_type: str) -> typing.Optional[str]: +def find_by_the_type(io_type: str) -> str | None: """Check the type given by users.""" default = None @@ -35,68 +43,62 @@ def find_by_the_type(io_type: str) -> typing.Optional[str]: return default try: - return api.find(None, io_type).type() # type: ignore + return api.find(None, io_type).type() # type: ignore[attr-defined] except api.UnknownProcessorTypeError: # Just ignore it should be wrong type. warnings.warn( - 'Ignored the given type because it looks wrong or ' - 'is not supported by installed parser backends: ' - f'{io_type}', stacklevel=2 + "Ignored the given type because it looks wrong or " + "is not supported by installed parser backends: " + f"{io_type}", stacklevel=2, ) return default -def find_by_the_paths(paths: typing.List[str], - ignore_errors: bool = True - ) -> typing.Optional[str]: +def find_by_the_paths( + paths: list[str], *, ignore_errors: bool = True, +) -> str | None: """Try to detect file (parser) type from given file paths ``paths``.""" default = None msg = ( - '*** You have to specify file type[s] with ' - '-I/--itype or -O/--otype options explicitly. ***' + "*** You have to specify file type[s] with " + "-I/--itype or -O/--otype options explicitly. ***" ) - paths_s = ', '.join(paths) + paths_s = ", ".join(paths) if not are_same_file_types(paths): if ignore_errors: return default utils.exit_with_output( - 'Failed to detect a file type because given file paths ' - 'may contain files with multiple types: ' - f'{paths_s}{os.linesep}{msg}', - 1 + "Failed to detect a file type because given file paths " + "may contain files with multiple types: " + f"{paths_s}{os.linesep}{msg}", + 1, ) if constants.STD_IN_OR_OUT not in paths: try: - return api.find(paths[0]).type() # type: ignore + return api.find(paths[0]).type() # type: ignore[attr-defined] except api.UnknownFileTypeError: if not ignore_errors: utils.exit_with_output( - 'Failed to detect the file type because it is/those are ' - f'unknown file type[s]: {paths_s}{os.linesep}{msg}', - 1 + "Failed to detect the file type because it is/those are " + f"unknown file type[s]: {paths_s}{os.linesep}{msg}", + 1, ) return default -def try_detecting_input_type(args: 'argparse.Namespace', - ignore_errors: bool = True - ) -> typing.Optional[str]: +def try_detecting_input_type( + args: argparse.Namespace, *, ignore_errors: bool = True, +) -> str | None: """Try to resolve a file type and parser of inputs.""" # First, try the type given by users. if args.itype: - # TBD: - # - # if are_same_file_types(args.inputs): - # ... the code blocks below ... - # else: - # (ignore args.itype?) itype = find_by_the_type(args.itype) if itype: return itype @@ -108,8 +110,9 @@ def try_detecting_input_type(args: 'argparse.Namespace', return None -def try_detecting_output_type(args: 'argparse.Namespace' - ) -> typing.Optional[str]: +def try_detecting_output_type( + args: argparse.Namespace, +) -> str | None: """Try to resolve a file type and parser of outputs (``args.output``).""" # First, try the type given by users. if args.otype: @@ -127,12 +130,15 @@ def try_detecting_output_type(args: 'argparse.Namespace' itype = try_detecting_input_type(args) if not itype: utils.exit_with_output( - 'Failed to find or detect the file type: ' - f'itype={args.itype}, otype={args.otype}, ' - f'output={args.output}, inputs={", ".join(args.inputs)}', - 1 + "Failed to find or detect the file type: " + f"itype={args.itype}, otype={args.otype}, " + f"output={args.output}, inputs={', '.join(args.inputs)}", + 1, ) return itype -# vim:sw=4:ts=4:et: + +def is_dict_like(obj: typing.Any) -> TypeGuard[dict]: + """Return True if `obj` is a dict.""" + return isinstance(obj, (dict, collections.abc.Mapping)) diff --git a/src/anyconfig/cli/filters.py b/src/anyconfig/cli/filters.py index a628d731..48b8c4b5 100644 --- a/src/anyconfig/cli/filters.py +++ b/src/anyconfig/cli/filters.py @@ -1,9 +1,11 @@ # -# Copyright (C) 2011 - 2021 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=broad-except """Filter functions for anyconfig.cli.*.""" +from __future__ import annotations + import typing from .. import api, parser @@ -11,27 +13,28 @@ if typing.TYPE_CHECKING: import argparse + from ..common import InDataExT -def do_filter(cnf: typing.Dict[str, typing.Any], args: 'argparse.Namespace'): +def do_filter( + cnf: dict[str, typing.Any], args: argparse.Namespace, +) -> InDataExT: """Filter ``cnf`` by query/get/set and return filtered result.""" if args.query: try: return api.try_query(cnf, args.query) - except Exception as exc: - utils.exit_with_output(f'Failed to query: exc={exc!s}', 1) + except Exception as exc: # noqa: BLE001 + utils.exit_with_output(f"Failed to query: exc={exc!s}", 1) if args.get: (cnf, err) = api.get(cnf, args.get) if cnf is None: - utils.exit_with_output(f'Failed to get result: err={err!s}', 1) + utils.exit_with_output(f"Failed to get result: err={err!s}", 1) return cnf if args.set: - (key, val) = args.set.split('=') + (key, val) = args.set.split("=") api.set_(cnf, key, parser.parse(val)) return cnf - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/cli/io.py b/src/anyconfig/cli/io.py index e246d986..9345b8ef 100644 --- a/src/anyconfig/cli/io.py +++ b/src/anyconfig/cli/io.py @@ -1,28 +1,29 @@ # -# Copyright (C) 2011 - 2023 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Initialize sys.std{out,err}.""" +from __future__ import annotations + import io import sys +import typing from .. import ioinfo -def make(): +def make() -> tuple[typing.IO, typing.IO] | None: """Initialize sys.std{out,err} and returns them.""" encoding = ioinfo.get_encoding() - # TODO: What should be done for an error, "AttributeError: '_io.StringIO' - # object has no attribute 'buffer'"? + # TODO(ssato): #188 What should be done for an error, "AttributeError: + # '_io.StringIO' object has no attribute 'buffer'"? try: return ( io.TextIOWrapper(sys.stdout.buffer, encoding=encoding), - io.TextIOWrapper(sys.stderr.buffer, encoding=encoding) + io.TextIOWrapper(sys.stderr.buffer, encoding=encoding), ) except AttributeError: pass return None - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/cli/parse_args.py b/src/anyconfig/cli/parse_args.py index 30ee8d55..369b6504 100644 --- a/src/anyconfig/cli/parse_args.py +++ b/src/anyconfig/cli/parse_args.py @@ -1,10 +1,11 @@ # -# Copyright (C) 2011 - 2021 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Argument parser.""" +from __future__ import annotations + import argparse -import typing from .. import api from . import constants, utils @@ -14,94 +15,98 @@ "loglevel": 0, "list": False, "output": None, "itype": None, "otype": None, "atype": None, "merge": api.MS_DICTS, "ignore_missing": False, "template": False, "env": False, "schema": None, "validate": False, - "gen_schema": False, "extra_opts": None + "gen_schema": False, "extra_opts": None, } -def gen_type_help_txt(types: str, target: str = 'Input') -> str: +def gen_type_help_txt(types: str, target: str = "Input") -> str: """Generate a type help txt.""" - return (f'Select type of {target} files from {types}' - '[Automatically detected by those extension]') + return ( + f"Select type of {target} files from {types}" # noqa: S608 + "[Automatically detected by those extension]" + ) -def make_parser(defaults: typing.Optional[typing.Dict] = None, - prog: typing.Optional[str] = None - ) -> argparse.ArgumentParser: +def make_parser( + defaults: dict | None = None, + prog: str | None = None, +) -> argparse.ArgumentParser: """Make an instance of argparse.ArgumentParser to parse arguments.""" if defaults is None: defaults = DEFAULTS - ctypes: typing.List[str] = utils.list_parser_types() - ctypes_s: str = ', '.join(ctypes) + ctypes: list[str] = utils.list_parser_types() + ctypes_s: str = ", ".join(ctypes) apsr = argparse.ArgumentParser(prog=prog, usage=constants.USAGE) apsr.set_defaults(**defaults) - apsr.add_argument('inputs', type=str, nargs='*', help='Input files') + apsr.add_argument("inputs", type=str, nargs="*", help="Input files") apsr.add_argument( - '--version', action='version', - version=f'%%(prog)s {".".join(api.version())}' + "--version", action="version", + version=f"%%(prog)s {'.'.join(api.version())}", ) - apsr.add_argument('-o', '--output', help='Output file path') - apsr.add_argument('-I', '--itype', choices=ctypes, metavar='ITYPE', + apsr.add_argument("-o", "--output", help="Output file path") + apsr.add_argument("-I", "--itype", choices=ctypes, metavar="ITYPE", help=gen_type_help_txt(ctypes_s)) - apsr.add_argument('-O', '--otype', choices=ctypes, metavar='OTYPE', - help=gen_type_help_txt(ctypes_s, 'Output')) + apsr.add_argument("-O", "--otype", choices=ctypes, metavar="OTYPE", + help=gen_type_help_txt(ctypes_s, "Output")) mss = api.MERGE_STRATEGIES - mss_s = ', '.join(mss) - mt_help = ('Select strategy to merge multiple configs from ' - f'{mss_s} {defaults["merge"]}') - apsr.add_argument('-M', '--merge', choices=mss, metavar='MERGE', + mss_s = ", ".join(mss) + mt_help = ( + "Select strategy to merge multiple configs from " # noqa: S608 + f"{mss_s} {defaults['merge']}" + ) + apsr.add_argument("-M", "--merge", choices=mss, metavar="MERGE", help=mt_help) - apsr.add_argument('-A', '--args', help='Argument configs to override') - apsr.add_argument('--atype', choices=ctypes, metavar='ATYPE', + apsr.add_argument("-A", "--args", help="Argument configs to override") + apsr.add_argument("--atype", choices=ctypes, metavar="ATYPE", help=constants.ATYPE_HELP_FMT % ctypes_s) - lpog = apsr.add_argument_group('List specific options') - lpog.add_argument('-L', '--list', action='store_true', - help='List supported config types') - - spog = apsr.add_argument_group('Schema specific options') - spog.add_argument('--validate', action='store_true', - help='Only validate input files and do not output. ' - 'You must specify schema file with -S/--schema ' - 'option.') - spog.add_argument('--gen-schema', action='store_true', - help='Generate JSON schema for givne config file[s] ' - 'and output it instead of (merged) configuration.') - - gspog = apsr.add_argument_group('Query/Get/set options') - gspog.add_argument('-Q', '--query', help=constants.QUERY_HELP) - gspog.add_argument('--get', help=constants.GET_HELP) - gspog.add_argument('--set', help=constants.SET_HELP) - - cpog = apsr.add_argument_group('Common options') - cpog.add_argument('-x', '--ignore-missing', action='store_true', - help='Ignore missing input files') - cpog.add_argument('-T', '--template', action='store_true', - help='Enable template config support') - cpog.add_argument('-E', '--env', action='store_true', - help='Load configuration defaults from ' - 'environment values') - cpog.add_argument('-S', '--schema', help='Specify Schema file[s] path') - cpog.add_argument('-e', '--extra-opts', - help='Extra options given to the API call, ' - '--extra-options indent:2 (specify the ' - 'indent for pretty-printing of JSON outputs) ' - 'for example') - cpog.add_argument('-v', '--verbose', action='count', dest='loglevel', - help='Verbose mode; -v or -vv (more verbose)') + lpog = apsr.add_argument_group("List specific options") + lpog.add_argument("-L", "--list", action="store_true", + help="List supported config types") + + spog = apsr.add_argument_group("Schema specific options") + spog.add_argument("--validate", action="store_true", + help="Only validate input files and do not output. " + "You must specify schema file with -S/--schema " + "option.") + spog.add_argument("--gen-schema", action="store_true", + help="Generate JSON schema for givne config file[s] " + "and output it instead of (merged) configuration.") + + gspog = apsr.add_argument_group("Query/Get/set options") + gspog.add_argument("-Q", "--query", help=constants.QUERY_HELP) + gspog.add_argument("--get", help=constants.GET_HELP) + gspog.add_argument("--set", help=constants.SET_HELP) + + cpog = apsr.add_argument_group("Common options") + cpog.add_argument("-x", "--ignore-missing", action="store_true", + help="Ignore missing input files") + cpog.add_argument("-T", "--template", action="store_true", + help="Enable template config support") + cpog.add_argument("-E", "--env", action="store_true", + help="Load configuration defaults from " + "environment values") + cpog.add_argument("-S", "--schema", help="Specify Schema file[s] path") + cpog.add_argument("-e", "--extra-opts", + help="Extra options given to the API call, " + "--extra-options indent:2 (specify the " + "indent for pretty-printing of JSON outputs) " + "for example") + cpog.add_argument("-v", "--verbose", action="count", dest="loglevel", + help="Verbose mode; -v or -vv (more verbose)") return apsr -def parse(argv: typing.List[str], - prog: typing.Optional[str] = None - ) -> typing.Tuple[argparse.ArgumentParser, argparse.Namespace]: +def parse( + argv: list[str], + prog: str | None = None, +) -> tuple[argparse.ArgumentParser, argparse.Namespace]: """Parse given arguments ``argv`` and return it with the parser.""" psr = make_parser(prog=prog) return (psr, psr.parse_args(argv)) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/cli/utils.py b/src/anyconfig/cli/utils.py index 05337743..c2dd9a9a 100644 --- a/src/anyconfig/cli/utils.py +++ b/src/anyconfig/cli/utils.py @@ -1,8 +1,10 @@ # -# Copyright (C) 2011 - 2021 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Utilities for anyconfig.cli.*.""" +from __future__ import annotations + import functools import os import sys @@ -10,9 +12,12 @@ from .. import api +if typing.TYPE_CHECKING: + import argparse + @functools.lru_cache(None) -def list_parser_types() -> typing.List[str]: +def list_parser_types() -> list[str]: """Provide an wrapper of api.list_types() to memoize its result.""" return api.list_types() @@ -20,25 +25,25 @@ def list_parser_types() -> typing.List[str]: def make_parsers_txt() -> str: """Make up a text shows list and info of parsers available.""" sep = os.linesep - indent = ' ' + indent = " " - parser_types = ', '.join(list_parser_types()) + parser_types = ", ".join(list_parser_types()) file_ext_vs_parsers = sep.join( - f'{indent}{x}: ' + ', '.join(p.cid() for p in ps) + f"{indent}{x}: " + ", ".join(p.cid() for p in ps) for x, ps in api.list_by_extension() ) return sep.join( [ - 'Supported file types:', - f'{indent}{parser_types}', - 'Supported file extensions [extension: parsers]:', - f'{file_ext_vs_parsers}', - ] + "Supported file types:", + f"{indent}{parser_types}", + "Supported file extensions [extension: parsers]:", + f"{file_ext_vs_parsers}", + ], ) -def exit_with_output(content, exit_code=0): +def exit_with_output(content: str, exit_code: int = 0) -> None: """Exit the program with printing out messages. :param content: content to print out @@ -48,7 +53,7 @@ def exit_with_output(content, exit_code=0): sys.exit(exit_code) -def exit_if_load_failure(cnf, msg): +def exit_if_load_failure(cnf: api.InDataExT, msg: str) -> None: """Exit the program with errors if loading data was failed. :param cnf: Loaded configuration object or None indicates load failure @@ -58,7 +63,9 @@ def exit_if_load_failure(cnf, msg): exit_with_output(msg, 1) -def load_diff(args, extra_opts): +def load_diff( + args: argparse.Namespace, extra_opts: dict[str, typing.Any], +) -> api.InDataExT: """Load update data. :param args: :class:`argparse.Namespace` object @@ -75,14 +82,14 @@ def load_diff(args, extra_opts): exit_with_output(f"Wrong input type '{args.itype}'", 1) except api.UnknownFileTypeError: exit_with_output( - 'No appropriate backend was found for given file ' - f"type='{args.itype}', inputs={', '.join(args.inputs)}", - 1 + "No appropriate backend was found for given file " + f"type=n{args.itype}', inputs={', '.join(args.inputs)}", + 1, ) - exit_if_load_failure( - diff, f'Failed to load: args={", ".join(args.inputs)}' - ) - return diff + if diff is None: + exit_with_output( + f"Failed to load: args={', '.join(args.inputs)}", 1, + ) -# vim:sw=4:ts=4:et: + return diff diff --git a/src/anyconfig/common/__init__.py b/src/anyconfig/common/__init__.py index 2be1c209..e48189cd 100644 --- a/src/anyconfig/common/__init__.py +++ b/src/anyconfig/common/__init__.py @@ -1,21 +1,19 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Misc global constants, variables, classes and so on.""" from .datatypes import ( - InDataT, InDataExT, PrimitiveT + InDataT, InDataExT, PrimitiveT, ) from .errors import ( UnknownParserTypeError, UnknownProcessorTypeError, UnknownFileTypeError, - ValidationError + ValidationError, ) __all__ = [ - 'InDataT', 'InDataExT', 'PrimitiveT', - 'UnknownParserTypeError', 'UnknownProcessorTypeError', - 'UnknownFileTypeError', 'ValidationError', + "InDataT", "InDataExT", "PrimitiveT", + "UnknownParserTypeError", "UnknownProcessorTypeError", + "UnknownFileTypeError", "ValidationError", ] - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/common/datatypes.py b/src/anyconfig/common/datatypes.py index 1805f276..11170d5e 100644 --- a/src/anyconfig/common/datatypes.py +++ b/src/anyconfig/common/datatypes.py @@ -1,15 +1,15 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=inherit-non-class,too-few-public-methods """anyconfig basic data types.""" +from __future__ import annotations + import typing -InDataT = typing.Mapping[str, typing.Any] +InDataT = dict[str, typing.Any] PrimitiveT = typing.Union[None, int, float, bool, str, InDataT] InDataExT = typing.Union[PrimitiveT, InDataT] - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/common/errors.py b/src/anyconfig/common/errors.py index 362dcd3d..7acda633 100644 --- a/src/anyconfig/common/errors.py +++ b/src/anyconfig/common/errors.py @@ -1,18 +1,20 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=too-few-public-methods """Basic data types for anyconfig.""" +from __future__ import annotations + import typing class BaseError(RuntimeError): """Base Error exception.""" - _msg_fmt: str = 'forced_type: {!s}' + _msg_fmt: str = "forced_type: {!s}" - def __init__(self, arg: typing.Optional[typing.Any] = None): + def __init__(self, arg: typing.Any | None = None) -> None: """Initialize the format.""" super().__init__(self._msg_fmt.format(str(arg))) @@ -20,7 +22,7 @@ def __init__(self, arg: typing.Optional[typing.Any] = None): class UnknownParserTypeError(BaseError): """Raise if no parsers were found for given type.""" - _msg_fmt: str = 'No parser found for type: {!s}' + _msg_fmt: str = "No parser found for type: {!s}" class UnknownProcessorTypeError(UnknownParserTypeError): @@ -30,12 +32,10 @@ class UnknownProcessorTypeError(UnknownParserTypeError): class UnknownFileTypeError(BaseError): """Raise if not parsers were found for given file path.""" - _msg_fmt: str = 'No parser found for file: {!s}' + _msg_fmt: str = "No parser found for file: {!s}" class ValidationError(BaseError): """Raise if validation failed.""" - _msg_fmt: str = 'Validation failed: {!s}' - -# vim:sw=4:ts=4:et: + _msg_fmt: str = "Validation failed: {!s}" diff --git a/src/anyconfig/dicts.py b/src/anyconfig/dicts.py index 57333a44..d8ccab3b 100644 --- a/src/anyconfig/dicts.py +++ b/src/anyconfig/dicts.py @@ -2,7 +2,7 @@ # Forked from m9dicts.{api,dicts}. # # Copyright (C) 2011 - 2021 Red Hat, Inc. -# Copyright (C) 2018 - 2021 Satoru SATOH +# Copyright (C) 2018 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # r"""Utility functions to operate on mapping objects such as get, set and merge. @@ -12,6 +12,8 @@ :mod:`m9dicts.dicts` """ +from __future__ import annotations + import collections import functools import operator @@ -22,20 +24,20 @@ # Merge strategies: -MS_REPLACE: str = 'replace' -MS_NO_REPLACE: str = 'noreplace' -MS_DICTS: str = 'merge_dicts' -MS_DICTS_AND_LISTS: str = 'merge_dicts_and_lists' -MERGE_STRATEGIES: typing.Tuple[str, ...] = ( - MS_REPLACE, MS_NO_REPLACE, MS_DICTS, MS_DICTS_AND_LISTS +MS_REPLACE: str = "replace" +MS_NO_REPLACE: str = "noreplace" +MS_DICTS: str = "merge_dicts" +MS_DICTS_AND_LISTS: str = "merge_dicts_and_lists" +MERGE_STRATEGIES: tuple[str, ...] = ( + MS_REPLACE, MS_NO_REPLACE, MS_DICTS, MS_DICTS_AND_LISTS, ) -PATH_SEPS: typing.Tuple[str, ...] = ('/', '.') +PATH_SEPS: tuple[str, ...] = ("/", ".") -_JSNP_GET_ARRAY_IDX_REG: typing.Pattern = re.compile(r'(?:0|[1-9][0-9]*)') +_JSNP_GET_ARRAY_IDX_REG: re.Pattern = re.compile(r"(?:0|[1-9][0-9]*)") -DictT = typing.Dict[str, typing.Any] +DictT = dict[str, typing.Any] def _jsnp_unescape(jsn_s: str) -> str: @@ -45,11 +47,12 @@ def _jsnp_unescape(jsn_s: str) -> str: .. seealso:: JSON Pointer: http://tools.ietf.org/html/rfc6901 """ - return jsn_s.replace('~1', '/').replace('~0', '~') + return jsn_s.replace("~1", "/").replace("~0", "~") -def _split_path(path: str, seps: typing.Tuple[str, ...] = PATH_SEPS - ) -> typing.List[str]: +def _split_path( + path: str, seps: tuple[str, ...] = PATH_SEPS, +) -> list[str]: """Parse a path expression and return a list of path items. :param path: Path expression may contain separator chars. @@ -61,15 +64,16 @@ def _split_path(path: str, seps: typing.Tuple[str, ...] = PATH_SEPS for sep in seps: if sep in path: - if path == sep: # Special case, '/' or '.' only. - return [''] + if path == sep: # Special case, "/" or "." only. + return [""] return [x for x in path.split(sep) if x] return [path] -def mk_nested_dic(path: str, val: typing.Any, - seps: typing.Tuple[str, ...] = PATH_SEPS) -> DictT: +def mk_nested_dic( + path: str, val: typing.Any, seps: tuple[str, ...] = PATH_SEPS, +) -> DictT: """Make a nested dict iteratively. :param path: Path expression to make a nested dict @@ -83,9 +87,10 @@ def mk_nested_dic(path: str, val: typing.Any, return ret -def get(dic: DictT, path: str, seps: typing.Tuple[str, ...] = PATH_SEPS, - idx_reg: typing.Pattern = _JSNP_GET_ARRAY_IDX_REG - ) -> typing.Tuple[typing.Any, str]: +def get( + dic: DictT, path: str, seps: tuple[str, ...] = PATH_SEPS, + idx_reg: re.Pattern = _JSNP_GET_ARRAY_IDX_REG, +) -> tuple[typing.Any, str]: """Getter for nested dicts. :param dic: a dict[-like] object @@ -95,23 +100,25 @@ def get(dic: DictT, path: str, seps: typing.Tuple[str, ...] = PATH_SEPS, """ items = [_jsnp_unescape(s) for s in _split_path(path, seps)] # : [str] if not items: - return (dic, '') + return (dic, "") try: if len(items) == 1: - return (dic[items[0]], '') + return (dic[items[0]], "") prnt: typing.Any = functools.reduce(operator.getitem, items[:-1], dic) arr = (idx_reg.match(items[-1]) - if utils.is_list_like(prnt) else False) # type: ignore + if utils.is_list_like(prnt) else False) - return (prnt[int(items[-1])], '') if arr else (prnt[items[-1]], '') + return (prnt[int(items[-1])], "") if arr else (prnt[items[-1]], "") except (TypeError, KeyError, IndexError) as exc: return (None, str(exc)) -def set_(dic: DictT, path: str, val: typing.Any, - seps: typing.Tuple[str, ...] = PATH_SEPS) -> None: +def set_( + dic: DictT, path: str, val: typing.Any, + seps: tuple[str, ...] = PATH_SEPS, +) -> None: """Setter for nested dicts. :param dic: a dict[-like] object support recursive merge operations @@ -126,8 +133,10 @@ def _are_list_like(*objs: typing.Any) -> bool: return all(utils.is_list_like(obj) for obj in objs) -def _update_with_replace(self: DictT, other: DictT, key: str, - default: typing.Any = None, **_options) -> None: +def _update_with_replace( + self: DictT, other: DictT, key: str, + default: typing.Any = None, **_options: typing.Any, +) -> None: """Update ``self`` by replacements using ``other``. Replace value of a mapping object 'self' with 'other' has if both have same @@ -147,8 +156,10 @@ def _update_with_replace(self: DictT, other: DictT, key: str, self[key] = default -def _update_wo_replace(self: DictT, other: DictT, key: str, - val: typing.Any = None, **_options) -> None: +def _update_wo_replace( + self: DictT, other: DictT, key: str, + val: typing.Any = None, **_options: typing.Any, +) -> None: """Update ``self`` without any replacements using ``other``. Never update (replace) the value of 'self' with 'other''s, that is, only @@ -165,8 +176,9 @@ def _update_wo_replace(self: DictT, other: DictT, key: str, self[key] = other.get(key, val) -def _merge_list(self: DictT, key: str, - lst: typing.Iterable[typing.Any]) -> None: +def _merge_list( + self: DictT, key: str, lst: collections.abc.Iterable[typing.Any], +) -> None: """Update a dict ``self`` using an iterable ``lst``. :param key: self[key] will be updated @@ -184,9 +196,11 @@ def _merge_other(self: DictT, key: str, val: typing.Any) -> None: self[key] = val # Just overwrite it by default implementation. -def _update_with_merge(self: DictT, other: DictT, key: str, - val: typing.Any = None, - merge_lists: bool = False, **options) -> None: +def _update_with_merge( + self: DictT, other: DictT, key: str, *, + val: typing.Any = None, + merge_lists: bool = False, **options: typing.Any, +) -> None: """Update a dict ``self`` using ``other`` and optional arguments. Merge the value of self with other's recursively. Behavior of merge will be @@ -221,8 +235,10 @@ def _update_with_merge(self: DictT, other: DictT, key: str, self[key] = val -def _update_with_merge_lists(self: DictT, other: DictT, key: str, - val: typing.Any = None, **options) -> None: +def _update_with_merge_lists( + self: DictT, other: DictT, key: str, + val: typing.Any = None, **options: typing.Any, +) -> None: """Similar to _update_with_merge but merge lists always. :param self: mapping object to update with 'other' @@ -241,7 +257,7 @@ def _update_with_merge_lists(self: DictT, other: DictT, key: str, MS_DICTS_AND_LISTS: _update_with_merge_lists} -def _get_update_fn(strategy: str) -> typing.Callable[..., None]: +def _get_update_fn(strategy: str) -> collections.abc.Callable[..., None]: """Select dict-like class based on merge strategy and orderness of keys. :param merge: Specify strategy from MERGE_STRATEGIES of how to merge dicts. @@ -250,22 +266,24 @@ def _get_update_fn(strategy: str) -> typing.Callable[..., None]: if strategy is None: strategy = MS_DICTS try: - return typing.cast(typing.Callable[..., None], _MERGE_FNS[strategy]) + return typing.cast( + "collections.abc.Callable[..., None]", + _MERGE_FNS[strategy], + ) except KeyError as exc: if callable(strategy): return strategy - raise ValueError(f'Wrong merge strategy: {strategy!r}') from exc - + msg = f"Wrong merge strategy: {strategy!r}" + raise ValueError(msg) from exc -UpdatesT = typing.Union[ - typing.Iterable[typing.Tuple[str, typing.Any]], - DictT -] - -def merge(self: DictT, other: UpdatesT, ac_merge: str = MS_DICTS, - **options) -> None: +def merge( + self: DictT, + other: collections.abc.Iterable[tuple[str, typing.Any]] | DictT, + ac_merge: str = MS_DICTS, + **options: typing.Any, +) -> None: """Update (merge) a mapping object ``self`` with ``other``. ``other`` may be a mapping object or an iterable yields (key, value) tuples @@ -278,24 +296,27 @@ def merge(self: DictT, other: UpdatesT, ac_merge: str = MS_DICTS, _update_fn = _get_update_fn(ac_merge) if isinstance(other, dict): - for key in other.keys(): + for key in other: _update_fn(self, other, key, **options) else: try: iother = typing.cast( - typing.Iterable[typing.Tuple[str, typing.Any]], - other + "collections.abc.Iterable[tuple[str, typing.Any]]", + other, ) for key, val in iother: _update_fn(self, dict(other), key, val=val, **options) except (ValueError, TypeError) as exc: # Re-raise w/ info. - raise type(exc)(f'{exc!s} other={other!r}') + msg = f"{exc!s} other={other!r}" + raise type(exc)(msg) from exc -def _make_recur(obj: typing.Any, make_fn: typing.Callable, - ac_ordered: bool = False, - ac_dict: typing.Optional[typing.Callable] = None, - **options) -> DictT: +def _make_recur( + obj: typing.Any, make_fn: collections.abc.Callable, *, + ac_ordered: bool = False, + ac_dict: collections.abc.Callable | None = None, + **options: typing.Any, +) -> DictT: """Apply ``make_fn`` to ``obj`` recursively. :param obj: A mapping objects or other primitive object @@ -313,8 +334,10 @@ def _make_recur(obj: typing.Any, make_fn: typing.Callable, for k, v in obj.items()) -def _make_iter(obj: typing.Any, make_fn: typing.Callable, **options - ) -> DictT: +def _make_iter( + obj: typing.Any, make_fn: collections.abc.Callable, + **options: typing.Any, +) -> DictT: """Apply ``make_fn`` to ``obj`` iteratively. :param obj: A mapping objects or other primitive object @@ -326,9 +349,11 @@ def _make_iter(obj: typing.Any, make_fn: typing.Callable, **options return type(obj)(make_fn(v, **options) for v in obj) -def convert_to(obj: typing.Any, ac_ordered: bool = False, - ac_dict: typing.Optional[typing.Callable] = None, - **options) -> DictT: +def convert_to( + obj: typing.Any, *, ac_ordered: bool = False, + ac_dict: collections.abc.Callable | None = None, + **options: typing.Any, +) -> DictT: """Convert a mapping objects to a dict or object of 'to_type' recursively. Borrowed basic idea and implementation from bunch.unbunchify. (bunch is @@ -348,5 +373,3 @@ def convert_to(obj: typing.Any, ac_ordered: bool = False, return _make_iter(obj, convert_to, **options) return obj - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/ioinfo/__init__.py b/src/anyconfig/ioinfo/__init__.py index f23875c0..e0898926 100644 --- a/src/anyconfig/ioinfo/__init__.py +++ b/src/anyconfig/ioinfo/__init__.py @@ -27,10 +27,10 @@ from .utils import get_encoding __all__ = [ - 'IOInfo', 'PathOrIOInfoT', - 'is_stream', - 'make', 'makes', - 'get_encoding', + "IOInfo", "PathOrIOInfoT", + "is_stream", + "make", "makes", + "get_encoding", ] # vim:sw=4:ts=4:et: diff --git a/src/anyconfig/ioinfo/constants.py b/src/anyconfig/ioinfo/constants.py index 5785404f..79298d51 100644 --- a/src/anyconfig/ioinfo/constants.py +++ b/src/anyconfig/ioinfo/constants.py @@ -6,7 +6,7 @@ import os.path -GLOB_MARKER: str = '*' +GLOB_MARKER: str = "*" PATH_SEP: str = os.path.sep # vim:sw=4:ts=4:et: diff --git a/src/anyconfig/ioinfo/datatypes.py b/src/anyconfig/ioinfo/datatypes.py index a6db0c21..66fa9d3e 100644 --- a/src/anyconfig/ioinfo/datatypes.py +++ b/src/anyconfig/ioinfo/datatypes.py @@ -1,29 +1,29 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=inherit-non-class,too-few-public-methods """anyconfig basic data types.""" +from __future__ import annotations + import pathlib import typing -IOI_PATH_OBJ: str = 'pathlib.Path' -IOI_STREAM: str = 'stream' +IOI_PATH_OBJ: str = "pathlib.Path" +IOI_STREAM: str = "stream" class IOInfo(typing.NamedTuple): """Equivalent to collections.namedtuple.""" - src: typing.Union[pathlib.Path, typing.IO] + src: pathlib.Path | typing.IO type: str path: str extension: str -IOI_KEYS: typing.Tuple[str, ...] = IOInfo._fields +IOI_KEYS: tuple[str, ...] = IOInfo._fields PathOrIOT = typing.Union[str, pathlib.Path, typing.IO] PathOrIOInfoT = typing.Union[PathOrIOT, IOInfo] - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/ioinfo/detectors.py b/src/anyconfig/ioinfo/detectors.py index 877a6653..03d804c3 100644 --- a/src/anyconfig/ioinfo/detectors.py +++ b/src/anyconfig/ioinfo/detectors.py @@ -1,15 +1,17 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # """Provide functions to detect str, pathlib.Path, stream and IOInfo objects.""" +from __future__ import annotations + import pathlib import typing from . import datatypes -IOI_KEYS_LIST: typing.List[str] = sorted(datatypes.IOI_KEYS) +IOI_KEYS_LIST: list[str] = sorted(datatypes.IOI_KEYS) def is_path_str(obj: typing.Any) -> bool: @@ -24,13 +26,13 @@ def is_path_obj(obj: typing.Any) -> bool: def is_io_stream(obj: typing.Any) -> bool: """Test if given object ``obj`` is a file stream, file/file-like object.""" - return callable(getattr(obj, 'read', False)) + return callable(getattr(obj, "read", False)) def is_ioinfo(obj: typing.Any) -> bool: """Test if given object ``obj`` is an IOInfo namedtuple objejct.""" if isinstance(obj, tuple): - to_dict = getattr(obj, '_asdict', False) + to_dict = getattr(obj, "_asdict", False) if to_dict and callable(to_dict): keys = sorted(to_dict().keys()) return keys == IOI_KEYS_LIST @@ -40,6 +42,4 @@ def is_ioinfo(obj: typing.Any) -> bool: def is_stream(obj: typing.Any) -> bool: """Test if given object ``obj`` is an IOInfo object with stream type.""" - return getattr(obj, 'type', None) == datatypes.IOI_STREAM - -# vim:sw=4:ts=4:et: + return getattr(obj, "type", None) == datatypes.IOI_STREAM diff --git a/src/anyconfig/ioinfo/factory.py b/src/anyconfig/ioinfo/factory.py index 17577b1c..40669ea5 100644 --- a/src/anyconfig/ioinfo/factory.py +++ b/src/anyconfig/ioinfo/factory.py @@ -1,21 +1,26 @@ # -# Copyright (C) 2018 - 2021 Satoru SATOH +# Copyright (C) 2018 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=invalid-name """ioinfo.main to provide internal APIs used from other modules.""" +from __future__ import annotations + import pathlib import typing from . import constants, datatypes, detectors, utils +if typing.TYPE_CHECKING: + import collections.abc + def from_path_object(path: pathlib.Path) -> datatypes.IOInfo: """Get an IOInfo object made from :class:`pathlib.Path` object ``path``.""" (abs_path, file_ext) = utils.get_path_and_ext(path) return datatypes.IOInfo( - abs_path, datatypes.IOI_PATH_OBJ, str(abs_path), file_ext + abs_path, datatypes.IOI_PATH_OBJ, str(abs_path), file_ext, ) @@ -26,15 +31,15 @@ def from_path_str(path: str) -> datatypes.IOInfo: def from_io_stream(strm: typing.IO) -> datatypes.IOInfo: """Get an IOInfo object made from IO stream object ``strm``.""" - path: str = getattr(strm, 'name', '') + path: str = getattr(strm, "name", "") if path: (_path, file_ext) = utils.get_path_and_ext(pathlib.Path(path)) abs_path: str = str(_path) else: - (abs_path, file_ext) = (path, '') + (abs_path, file_ext) = (path, "") return datatypes.IOInfo( - strm, datatypes.IOI_STREAM, abs_path, file_ext + strm, datatypes.IOI_STREAM, abs_path, file_ext, ) @@ -50,14 +55,15 @@ def make(obj: typing.Any) -> datatypes.IOInfo: return from_path_object(obj) # Which is better? isinstance(obj, io.IOBase): - if getattr(obj, 'read', False): + if getattr(obj, "read", False): return from_io_stream(obj) raise ValueError(repr(obj)) -def make_itr(obj: typing.Any, marker: str = constants.GLOB_MARKER - ) -> typing.Iterator[datatypes.IOInfo]: +def make_itr( + obj: typing.Any, marker: str = constants.GLOB_MARKER, +) -> collections.abc.Iterator[datatypes.IOInfo]: """Make and yield a series of :class:`datatypes.IOInfo` objects.""" if isinstance(obj, datatypes.IOInfo): yield obj @@ -75,13 +81,11 @@ def make_itr(obj: typing.Any, marker: str = constants.GLOB_MARKER else: for item in obj: - for ioi in make_itr(item, marker=marker): - yield ioi + yield from make_itr(item, marker=marker) -def makes(obj: typing.Any, marker: str = constants.GLOB_MARKER - ) -> typing.List[datatypes.IOInfo]: +def makes( + obj: typing.Any, marker: str = constants.GLOB_MARKER, +) -> list[datatypes.IOInfo]: """Make and return a list of :class:`datatypes.IOInfo` objects.""" return list(make_itr(obj, marker=marker)) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/ioinfo/utils.py b/src/anyconfig/ioinfo/utils.py index ebba6e27..b1ae8929 100644 --- a/src/anyconfig/ioinfo/utils.py +++ b/src/anyconfig/ioinfo/utils.py @@ -1,8 +1,10 @@ # -# Copyright (C) 2012 - 2023 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Utility funtions for anyconfig.ionfo.""" +from __future__ import annotations + import itertools import locale import pathlib @@ -11,50 +13,53 @@ from .constants import GLOB_MARKER, PATH_SEP +if typing.TYPE_CHECKING: + import collections.abc + def get_encoding() -> str: """Get the (prefered) encoding or 'utf-8'.""" - return (locale.getpreferredencoding() or 'UTF-8').lower() + return (locale.getpreferredencoding() or "UTF-8").lower() -def get_path_and_ext(path: pathlib.Path) -> typing.Tuple[pathlib.Path, str]: +def get_path_and_ext(path: pathlib.Path) -> tuple[pathlib.Path, str]: """Normaliez path objects and retunr it with file extension.""" try: abs_path = path.expanduser().resolve() except (RuntimeError, OSError) as exc: - warnings.warn(f'Failed to resolve {path!s}, exc={exc!r}', stacklevel=2) + warnings.warn(f"Failed to resolve {path!s}, exc={exc!r}", stacklevel=2) abs_path = path file_ext = path.suffix return ( abs_path, - file_ext[1:] if file_ext.startswith('.') else '' + file_ext[1:] if file_ext.startswith(".") else "", ) -def expand_from_path(path: pathlib.Path, - marker: str = GLOB_MARKER - ) -> typing.Iterator[pathlib.Path]: +def expand_from_path( + path: pathlib.Path, marker: str = GLOB_MARKER, +) -> collections.abc.Iterator[pathlib.Path]: """Expand ``path`` contains '*' in its path str.""" if not path.is_absolute(): path = path.resolve() idx_part = list( - enumerate(itertools.takewhile(lambda p: marker not in p, path.parts)) + enumerate( + itertools.takewhile(lambda p: marker not in p, path.parts), + ), )[-1] if not idx_part: - raise ValueError(f'It should not happen: {path!r}') + msg = f"It should not happen: {path!r}" + raise ValueError(msg) idx = idx_part[0] + 1 if len(path.parts) > idx: base = pathlib.Path(path.parts[0]).joinpath(*path.parts[:idx]) pattern = PATH_SEP.join(path.parts[idx:]) - for epath in sorted(base.glob(pattern)): - yield epath + yield from sorted(base.glob(pattern)) else: # No marker was found. yield path - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/models/processor.py b/src/anyconfig/models/processor.py index 76d7ead7..f6970483 100644 --- a/src/anyconfig/models/processor.py +++ b/src/anyconfig/models/processor.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2018 - 2021 Satoru SATOH +# Copyright (C) 2018 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Abstract processor module. @@ -8,14 +8,21 @@ - Add to abstract processors such like Parsers (loaders and dumpers). """ +from __future__ import annotations + import typing +if typing.TYPE_CHECKING: + import builtins + class Processor: """Abstract processor class to provide basic implementation. - _type: type indicates data types it can process - - _priority: Priority to select it if there are others of same type + - _priority: + Priority to select it if there are others of same type from 0 (lowest) + to 99 (highest) - _extensions: File extensions of data type it can process .. note:: @@ -23,10 +30,10 @@ class Processor: in most cases, I think. """ - _cid: str = '' - _type: str = '' - _priority: int = 0 # 0 (lowest priority) .. 99 (highest priority) - _extensions: typing.List[str] = [] + _cid: typing.ClassVar[str] = "" + _type: typing.ClassVar[str] = "" + _priority: typing.ClassVar[int] = 0 + _extensions: tuple[str, ...] = () @classmethod def cid(cls) -> str: @@ -44,20 +51,25 @@ def priority(cls) -> int: return cls._priority @classmethod - def extensions(cls) -> typing.List[str]: + def extensions(cls) -> tuple[str, ...]: """Get the list of file extensions of files it can process.""" return cls._extensions @classmethod - def __eq__(cls, other) -> bool: + def __eq__( + cls, other: builtins.type[Processor], # type: ignore[override] + ) -> bool: """Test equality.""" return cls.cid() == other.cid() + @classmethod + def __hash__(cls) -> int: + """Test equality.""" + return hash(cls.cid()) + def __str__(self) -> str: """Provide a string representation.""" return ( - f' +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Misc simple parsers.""" +from __future__ import annotations + import re import typing import warnings +if typing.TYPE_CHECKING: + import collections.abc + -INT_PATTERN: typing.Pattern = re.compile(r"^(\d|([1-9]\d+))$") -FLOAT_PATTERN: typing.Pattern = re.compile(r"^\d+[\.]\d+$") -BOOL_TRUE_PATTERN: typing.Pattern = re.compile(r"^true$", re.I) -BOOL_FALSE_PATTERN: typing.Pattern = re.compile(r"^false$", re.I) -STR_PATTERN: typing.Pattern = re.compile(r"^['\"](.*)['\"]$") +INT_PATTERN: re.Pattern = re.compile(r"^(\d|([1-9]\d+))$") +FLOAT_PATTERN: re.Pattern = re.compile(r"^\d+[\.]\d+$") +BOOL_TRUE_PATTERN: re.Pattern = re.compile(r"^true$", re.IGNORECASE) +BOOL_FALSE_PATTERN: re.Pattern = re.compile(r"^false$", re.IGNORECASE) +STR_PATTERN: re.Pattern = re.compile(r"^['\"](.*)['\"]$") PrimitiveT = typing.Union[str, int, float, bool] -PrimitivesT = typing.List[PrimitiveT] +PrimitivesT = list[PrimitiveT] -def parse_single(str_: typing.Optional[str]) -> PrimitiveT: +def parse_single( # noqa: PLR0911 + str_: str | None, +) -> PrimitiveT: """Parse an expression gives a primitive value.""" if str_ is None: - return '' + return "" str_ = str_.strip() if not str_: - return '' + return "" if BOOL_TRUE_PATTERN.match(str_) is not None: return True @@ -46,7 +53,7 @@ def parse_single(str_: typing.Optional[str]) -> PrimitiveT: return str_ -def parse_list(str_: str, sep: str = ',') -> PrimitivesT: +def parse_list(str_: str, sep: str = ",") -> PrimitivesT: """Parse an expression gives a list of values. An expression ``str_`` might contain a list of str-es separated with @@ -55,11 +62,12 @@ def parse_list(str_: str, sep: str = ',') -> PrimitivesT: return [parse_single(x) for x in str_.split(sep) if x] -AttrValsT = typing.Tuple[str, typing.Union[PrimitivesT, PrimitiveT]] +AttrValsT = tuple[str, typing.Union[PrimitivesT, PrimitiveT]] -def attr_val_itr(str_: str, avs_sep: str = ':', vs_sep: str = ',', - as_sep: str = ';') -> typing.Iterator[AttrValsT]: +def attr_val_itr( + str_: str, avs_sep: str = ":", vs_sep: str = ",", as_sep: str = ";", +) -> collections.abc.Iterator[AttrValsT]: """Parse a list of atrribute and value pairs. This is a helper function for parse_attrlist_0. @@ -70,7 +78,7 @@ def attr_val_itr(str_: str, avs_sep: str = ':', vs_sep: str = ',', :param as_sep: char to separate attributes """ for rel in parse_list(str_, as_sep): - rel = typing.cast(str, rel) + rel = typing.cast("str", rel) if avs_sep not in rel or rel.endswith(avs_sep): continue @@ -78,21 +86,22 @@ def attr_val_itr(str_: str, avs_sep: str = ':', vs_sep: str = ',', if _rest: warnings.warn( - f'Extra strings {_rest!s} in {rel!s}' - f'It should be in the form of attr{avs_sep}value.', - stacklevel=2 + f"Extra strings {_rest!s} in {rel!s}" + f"It should be in the form of attr{avs_sep}value.", + stacklevel=2, ) - _attr = typing.cast(str, _attr) + _attr = typing.cast("str", _attr) if vs_sep in str(_values): - yield (_attr, parse_list(typing.cast(str, _values), vs_sep)) + yield (_attr, parse_list(typing.cast("str", _values), vs_sep)) elif _values: - yield (_attr, typing.cast(PrimitiveT, _values)) + yield (_attr, typing.cast("PrimitiveT", _values)) -def parse_attrlist_0(str_: str, avs_sep: str = ':', vs_sep: str = ',', - as_sep: str = ';') -> typing.List[AttrValsT]: +def parse_attrlist_0( + str_: str, avs_sep: str = ":", vs_sep: str = ",", as_sep: str = ";", +) -> list[AttrValsT]: """Parse a list of atrribute and value pairs. This is a helper function for parse_attrlist. @@ -113,11 +122,11 @@ def parse_attrlist_0(str_: str, avs_sep: str = ':', vs_sep: str = ',', return list(attr_val_itr(str_, avs_sep, vs_sep, as_sep)) -AttrValsDictT = typing.Dict[str, typing.Union[PrimitivesT, PrimitiveT]] +AttrValsDictT = dict[str, typing.Union[PrimitivesT, PrimitiveT]] -def parse_attrlist(str_: str, avs_sep: str = ':', vs_sep: str = ',', - as_sep: str = ';') -> AttrValsDictT: +def parse_attrlist(str_: str, avs_sep: str = ":", vs_sep: str = ",", + as_sep: str = ";") -> AttrValsDictT: """Parse a list of atrribute and value pairs. The expressions to parse should be in the form of @@ -131,16 +140,10 @@ def parse_attrlist(str_: str, avs_sep: str = ':', vs_sep: str = ',', return dict(parse_attrlist_0(str_, avs_sep, vs_sep, as_sep)) -ResultsT = typing.Union[ - PrimitiveT, - PrimitivesT, - AttrValsDictT -] - - -def parse(str_: typing.Optional[str], - lsep: str = ',', avsep: str = ':', vssep: str = ',', - avssep: str = ';') -> ResultsT: +def parse( + str_: typing.Optional[str], lsep: str = ",", avsep: str = ":", + vssep: str = ",", avssep: str = ";", +) -> PrimitiveT | PrimitivesT | AttrValsDictT: """Very simple generic parser.""" if str_ is None or not str_: return parse_single(str_) @@ -151,5 +154,3 @@ def parse(str_: typing.Optional[str], return parse_list(str_, lsep) return parse_single(str_) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/parsers/__init__.py b/src/anyconfig/parsers/__init__.py index c04a120d..70353e6c 100644 --- a/src/anyconfig/parsers/__init__.py +++ b/src/anyconfig/parsers/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Utilities to list and find appropriate parser class objects and instances. @@ -14,12 +14,10 @@ """ from .utils import ( load_plugins, list_types, list_by_cid, list_by_type, list_by_extension, - findall, find, MaybeParserT + findall, find, MaybeParserT, ) __all__ = [ - 'load_plugins', 'list_types', 'list_by_cid', 'list_by_type', - 'list_by_extension', 'findall', 'find', 'MaybeParserT' + "load_plugins", "list_types", "list_by_cid", "list_by_type", + "list_by_extension", "findall", "find", "MaybeParserT", ] - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/parsers/parsers.py b/src/anyconfig/parsers/parsers.py index 2253417f..7f3f88fc 100644 --- a/src/anyconfig/parsers/parsers.py +++ b/src/anyconfig/parsers/parsers.py @@ -1,11 +1,11 @@ # -# Copyright (C) 2011 - 2021 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # Suppress import positions after some global variables are defined # pylint: disable=wrong-import-position """Provide config parser objects aggregated.""" -import typing +from __future__ import annotations from ..backend import ParserClssT, PARSERS from ..processors import Processors @@ -15,14 +15,11 @@ class Parsers(Processors, Singleton): """Manager class for parsers.""" - _pgroup: str = 'anyconfig_backends' + _pgroup: str = "anyconfig_backends" - def __init__(self, prcs: typing.Optional[ParserClssT] = None - ) -> None: + def __init__(self, prcs: ParserClssT | None = None) -> None: """Initialize with PARSERS.""" if prcs is None: prcs = PARSERS super().__init__(prcs) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/parsers/utils.py b/src/anyconfig/parsers/utils.py index c63f55d4..7eef19ed 100644 --- a/src/anyconfig/parsers/utils.py +++ b/src/anyconfig/parsers/utils.py @@ -1,10 +1,10 @@ # -# Copyright (C) 2012 - 2023 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # -# FIXME: -# mypy: disable-error-code=type-var """Internal APIs to load, list and find parser class objects.""" +from __future__ import annotations + import typing from ..backend import ParserT, ParsersT @@ -15,7 +15,7 @@ MaybeParserT = typing.Optional[ - typing.Union[str, ParserT, typing.Type[ParserT]] + typing.Union[str, ParserT, type[ParserT]] ] @@ -24,29 +24,30 @@ def load_plugins() -> None: Parsers().load_plugins() -def list_types() -> typing.List[str]: +def list_types() -> list[str]: """List supported processor types.""" - return sorted(Parsers().list_x('type')) + return sorted(Parsers().list_x("type")) -def list_by_cid() -> typing.List[typing.Tuple[str, ParsersT]]: +def list_by_cid() -> list[tuple[str, ParsersT]]: """List processors by each cid.""" - return Parsers().list_by_x('cid') + return Parsers().list_by_x("cid") -def list_by_type() -> typing.List[typing.Tuple[str, ParsersT]]: +def list_by_type() -> list[tuple[str, ParsersT]]: """List processor by eacch type.""" - return Parsers().list_by_x('type') + return Parsers().list_by_x("type") -def list_by_extension() -> typing.List[typing.Tuple[str, ParsersT]]: +def list_by_extension() -> list[tuple[str, ParsersT]]: """List processor by file extension supported.""" - return Parsers().list_by_x('extensions') + return Parsers().list_by_x("extensions") -def findall(obj: typing.Optional['ioinfo.PathOrIOInfoT'] = None, - forced_type: typing.Optional[str] = None - ) -> typing.List[ParserT]: +def findall( + obj: ioinfo.PathOrIOInfoT | None = None, + forced_type: str | None = None, +) -> list[ParserT]: """Find out processor objects can process data from given ``obj``. ``obj`` may be a file path, file or file-like object, pathlib.Path object @@ -63,8 +64,10 @@ def findall(obj: typing.Optional['ioinfo.PathOrIOInfoT'] = None, return Parsers().findall(obj, forced_type=forced_type) -def find(obj: typing.Optional['ioinfo.PathOrIOInfoT'] = None, - forced_type: MaybeParserT = None) -> ParserT: +def find( + obj: ioinfo.PathOrIOInfoT | None = None, + forced_type: str | ParserT | type[ParserT] | None = None, +) -> ParserT: """Very similar to the above :func:`findall`. However it returns *a processor object* instead of a list of processor @@ -80,5 +83,3 @@ def find(obj: typing.Optional['ioinfo.PathOrIOInfoT'] = None, :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError """ return Parsers().find(obj, forced_type=forced_type) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/processors/__init__.py b/src/anyconfig/processors/__init__.py index 421df67e..1c59ec5d 100644 --- a/src/anyconfig/processors/__init__.py +++ b/src/anyconfig/processors/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Provide a list of a :class:`anyconfig.models.processor` and so on. @@ -13,17 +13,15 @@ - Add to abstract processors such like Parsers (loaders and dumpers). """ from .datatypes import ( - ProcT, ProcClsT, ProcClssT, MaybeProcT + ProcT, ProcClsT, ProcClssT, MaybeProcT, ) from .processors import Processors from .utils import ( - list_by_x, load_plugins + list_by_x, load_plugins, ) __all__ = [ - 'ProcT', 'ProcClsT', 'ProcClssT', 'MaybeProcT', - 'Processors', - 'list_by_x', 'load_plugins', + "ProcT", "ProcClsT", "ProcClssT", "MaybeProcT", + "Processors", + "list_by_x", "load_plugins", ] - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/processors/datatypes.py b/src/anyconfig/processors/datatypes.py index ef3264e9..b33af1ca 100644 --- a/src/anyconfig/processors/datatypes.py +++ b/src/anyconfig/processors/datatypes.py @@ -1,18 +1,18 @@ # -# Copyright (C) 2018 - 2023 Satoru SATOH +# Copyright (C) 2018 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # """Common functions and variables.""" +from __future__ import annotations + import typing from ..models import processor -ProcT = typing.TypeVar('ProcT', bound=processor.Processor) -ProcsT = typing.List[ProcT] -ProcClsT = typing.Type[ProcT] -ProcClssT = typing.List[ProcClsT] +ProcT = typing.TypeVar("ProcT", bound=processor.Processor) +ProcsT = list[ProcT] +ProcClsT = type[ProcT] +ProcClssT = list[ProcClsT] MaybeProcT = typing.Optional[typing.Union[str, ProcT, ProcClsT]] - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/processors/processors.py b/src/anyconfig/processors/processors.py index e7ca6613..62b23f0e 100644 --- a/src/anyconfig/processors/processors.py +++ b/src/anyconfig/processors/processors.py @@ -1,28 +1,30 @@ # -# Copyright (C) 2018 - 2023 Satoru SATOH +# Copyright (C) 2018 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # -# FIXME: # mypy: disable-error-code=type-var """A collection of models.processor.Processor and children classes.""" +from __future__ import annotations + import operator import typing from . import utils -from .datatypes import ( - ProcT, ProcsT, ProcClsT, ProcClssT, MaybeProcT -) if typing.TYPE_CHECKING: + import builtins + from .datatypes import ( + ProcT, ProcsT, ProcClsT, ProcClssT, MaybeProcT, + ) from .. import ioinfo class Processors: """An abstract class of which instance holding processors.""" - _pgroup: str = '' # processor group name to load plugins + _pgroup: str = "" # processor group name to load plugins - def __init__(self, processors: typing.Optional[ProcClssT] = None) -> None: + def __init__(self, processors: ProcClssT | None = None) -> None: """Initialize with ``processors``. :param processors: @@ -30,7 +32,9 @@ def __init__(self, processors: typing.Optional[ProcClssT] = None) -> None: children class objects to initialize this, or None """ # {: } - self._processors: typing.Dict[str, ProcT] = {} # type: ignore + self._processors: dict[ # type: ignore[valid-type] + str, ProcT, + ] = {} # type: ignore[valid-type] if processors is not None: for pcls in processors: self.register(pcls) @@ -48,7 +52,7 @@ def load_plugins(self) -> None: for pcls in utils.load_plugins(self._pgroup): self.register(pcls) - def list(self, sort: bool = False) -> ProcClssT: + def list(self, *, sort: bool = False) -> ProcClssT: """List processors. :param sort: Result will be sorted if it's True @@ -56,11 +60,11 @@ def list(self, sort: bool = False) -> ProcClssT: """ prs = self._processors.values() if sort: - return sorted(prs, key=operator.methodcaller('cid')) + return sorted(prs, key=operator.methodcaller("cid")) return list(prs) - def list_by_cid(self) -> typing.List[typing.Tuple[str, ProcsT]]: + def list_by_cid(self) -> builtins.list[tuple[str, ProcsT]]: """List processors by those IDs. :return: @@ -68,20 +72,23 @@ def list_by_cid(self) -> typing.List[typing.Tuple[str, ProcsT]]: each cid, [(cid, [:class:`Processor`)]] """ prs = self._processors - return sorted(((cid, [prs[cid]]) for cid in sorted(prs.keys())), - key=operator.itemgetter(0)) + return sorted( + ((cid, [prs[cid]]) for cid in sorted(prs.keys())), + key=operator.itemgetter(0), + ) - def list_by_type(self) -> typing.List[typing.Tuple[str, ProcsT]]: + def list_by_type(self) -> builtins.list[tuple[str, ProcsT]]: """List processors by those types. :return: A list of :class:`Processor` or its children classes grouped by each type, [(type, [:class:`Processor`)]] """ - return utils.list_by_x(self.list(), 'type') # type: ignore + return utils.list_by_x(self.list(), "type") - def list_by_x(self, item: typing.Optional[str] = None - ) -> typing.List[typing.Tuple[str, ProcsT]]: + def list_by_x( + self, item: str | None = None, + ) -> builtins.list[tuple[str, ProcsT]]: """List processors by those factor 'x'. :param item: Grouping key, one of 'cid', 'type' and 'extensions' @@ -91,36 +98,46 @@ def list_by_x(self, item: typing.Optional[str] = None """ prs = self._processors - if item is None or item == 'cid': # Default. + if item is None or item == "cid": # Default. res = [(cid, [prs[cid]]) for cid in sorted(prs.keys())] - elif item in ('type', 'extensions'): - res = utils.list_by_x(prs.values(), typing.cast(str, item)) + elif item in ("type", "extensions"): + res = utils.list_by_x(prs.values(), typing.cast("str", item)) else: - raise ValueError("keyword argument 'item' must be one of " - "None, 'cid', 'type' and 'extensions' " - f"but it was '{item}'") + msg = ( + "keyword argument 'item' must be one of " + "None, 'cid', 'type' and 'extensions' " + f"but it was '{item}'" + ) + raise ValueError(msg) + return res - def list_x(self, key: typing.Optional[str] = None) -> typing.List[str]: + def list_x(self, key: str | None = None) -> builtins.list[str]: """List the factor 'x' of processors. :param key: Which of key to return from 'cid', 'type', and 'extention' :return: A list of x 'key' """ - if key in ('cid', 'type'): - return sorted(set(operator.methodcaller(key)(p) - for p in self._processors.values())) - if key == 'extension': - return sorted(k for k, _v in self.list_by_x('extensions')) - - raise ValueError("keyword argument 'key' must be one of " - "None, 'cid', 'type' and 'extension' " - f"but it was '{key}'") - - def findall(self, obj: typing.Optional['ioinfo.PathOrIOInfoT'], - forced_type: typing.Optional[str] = None - ) -> typing.List[ProcT]: + if key in ("cid", "type"): + return sorted( + {operator.methodcaller(key)(p) + for p in self._processors.values()}, + ) + if key == "extension": + return sorted(k for k, _v in self.list_by_x("extensions")) + + msg = ( + "keyword argument 'key' must be one of " + "None, 'cid', 'type' and 'extension' " + f"but it was '{key}'" + ) + raise ValueError(msg) + + def findall( + self, obj: ioinfo.PathOrIOInfoT | None, + forced_type: str | None = None, + ) -> builtins.list[ProcT]: """Find all of the processors match with tthe given conditions. :param obj: @@ -133,8 +150,10 @@ def findall(self, obj: typing.Optional['ioinfo.PathOrIOInfoT'], """ return utils.findall(obj, self.list(), forced_type=forced_type) - def find(self, obj: typing.Optional['ioinfo.PathOrIOInfoT'], - forced_type: MaybeProcT = None) -> ProcT: + def find( + self, obj: ioinfo.PathOrIOInfoT | None, + forced_type: MaybeProcT = None, + ) -> ProcT: """Find the processor best match with tthe given conditions. :param obj: @@ -148,5 +167,3 @@ def find(self, obj: typing.Optional['ioinfo.PathOrIOInfoT'], :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError """ return utils.find(obj, self.list(), forced_type=forced_type) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/processors/utils.py b/src/anyconfig/processors/utils.py index e606b7dd..8764e344 100644 --- a/src/anyconfig/processors/utils.py +++ b/src/anyconfig/processors/utils.py @@ -1,55 +1,66 @@ # -# Copyright (C) 2018 - 2023 Satoru SATOH +# Copyright (C) 2018 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=unidiomatic-typecheck # -# FIXME: +# TODO(ssato): #189 fix the mypy error, type-var. # mypy: disable-error-code=type-var """Utility functions for anyconfig.processors.""" +from __future__ import annotations + +import contextlib import operator import typing import warnings import importlib.metadata -from .. import common, ioinfo, models, utils -from .datatypes import ( - ProcT, ProcsT, ProcClsT, MaybeProcT +from .. import ( + common, ioinfo, models, utils, ) +if typing.TYPE_CHECKING: + import collections.abc + + from .datatypes import ( + ProcT, ProcsT, ProcClsT, MaybeProcT, + ) + -def sort_by_prio(prs: typing.Iterable[ProcT]) -> ProcsT: +def sort_by_prio(prs: collections.abc.Iterable[ProcT]) -> ProcsT: """Sort an iterable of processor classes by each priority. :param prs: A list of :class:`anyconfig.models.processor.Processor` classes :return: Sambe as above but sorted by priority """ - return sorted(prs, key=operator.methodcaller('priority'), reverse=True) + return sorted(prs, key=operator.methodcaller("priority"), reverse=True) def select_by_key( - items: typing.Iterable[ - typing.Tuple[typing.List[str], typing.Any]], - sort_fn: typing.Callable[..., typing.Any] = sorted -) -> typing.List[ - typing.Tuple[str, typing.List[typing.Any]] -]: + items: collections.abc.Iterable[ + tuple[tuple[str, ...], typing.Any] + ], + sort_fn: collections.abc.Callable[..., typing.Any] = sorted, +) -> list[tuple[str, list[typing.Any]]]: """Select items from ``items`` by key. :param items: A list of tuples of keys and values, [([key], val)] :return: A list of tuples of key and values, [(key, [val])] - >>> select_by_key([(['a', 'aaa'], 1), (['b', 'bb'], 2), (['a'], 3)]) + >>> select_by_key([(["a", "aaa"], 1), (["b", "bb"], 2), (["a"], 3)]) [('a', [1, 3]), ('aaa', [1]), ('b', [2]), ('bb', [2])] """ itr = utils.concat(((k, v) for k in ks) for ks, v in items) - return list((k, sort_fn(t[1] for t in g)) - for k, g in utils.groupby(itr, operator.itemgetter(0))) + return [ + (k, sort_fn(t[1] for t in g)) + for k, g in utils.groupby(itr, operator.itemgetter(0)) + ] -def list_by_x(prs: typing.Iterable[ProcT], key: str - ) -> typing.List[typing.Tuple[str, ProcsT]]: +def list_by_x( + prs: collections.abc.Iterable[ProcT], key: str, +) -> list[tuple[str, ProcsT]]: """List items by the factor 'x'. :param key: Grouping key, 'type' or 'extensions' @@ -57,27 +68,26 @@ def list_by_x(prs: typing.Iterable[ProcT], key: str A list of :class:`Processor` or its children classes grouped by given 'item', [(cid, [:class:`Processor`)]] by default """ - if key == 'type': + if key == "type": kfn = operator.methodcaller(key) res = sorted(((k, sort_by_prio(g)) for k, g in utils.groupby(prs, kfn)), key=operator.itemgetter(0)) - elif key == 'extensions': - res: typing.List[ # type: ignore - typing.Tuple[str, ProcsT] - ] = select_by_key(((p.extensions(), p) for p in prs), - sort_fn=sort_by_prio) - else: - raise ValueError( - f"Argument 'key' must be 'type' or 'extensions' but it was '{key}'" + elif key == "extensions": + res = select_by_key( + ((p.extensions(), p) for p in prs), sort_fn=sort_by_prio, ) + else: + msg = f"Argument 'key' must be 'type' or 'extensions' [{key}]" + raise ValueError(msg) return res -def findall_with_pred(predicate: typing.Callable[..., bool], - prs: ProcsT) -> ProcsT: +def findall_with_pred( + predicate: collections.abc.Callable[..., bool], prs: ProcsT, +) -> ProcsT: """Find all of the items match with given predicates. :param predicate: any callable to filter results @@ -85,12 +95,13 @@ def findall_with_pred(predicate: typing.Callable[..., bool], :return: A list of appropriate processor classes or [] """ return sorted((p for p in prs if predicate(p)), - key=operator.methodcaller('priority'), reverse=True) + key=operator.methodcaller("priority"), reverse=True) -def maybe_processor(type_or_id: typing.Union[ProcT, ProcClsT], - cls: ProcClsT = models.processor.Processor - ) -> typing.Optional[ProcT]: +def maybe_processor( + type_or_id: ProcT | ProcClsT, + cls: ProcClsT = models.processor.Processor, +) -> ProcT | None: """Try to get the processor. :param type_or_id: @@ -103,11 +114,10 @@ def maybe_processor(type_or_id: typing.Union[ProcT, ProcClsT], if isinstance(type_or_id, cls): return type_or_id - try: - if issubclass(typing.cast(ProcClsT, type_or_id), cls): - return type_or_id() # type: ignore - except TypeError: - pass + with contextlib.suppress(TypeError): + maybe_cls = typing.cast("ProcClsT", type_or_id) + if issubclass(maybe_cls, cls): + return maybe_cls() return None @@ -122,7 +132,7 @@ def find_by_type_or_id(type_or_id: str, prs: ProcsT) -> ProcsT: processor 'type_or_id' found by its ID :raises: anyconfig.common.UnknownProcessorTypeError """ - def pred(pcls): + def pred(pcls: ProcT) -> bool: """Provide a predicate.""" return pcls.cid() == type_or_id or pcls.type() == type_or_id @@ -141,13 +151,14 @@ def find_by_fileext(fileext: str, prs: ProcsT) -> ProcsT: :return: A list of processor class to processor files with given extension :raises: common.UnknownFileTypeError """ - def pred(pcls): + def pred(pcls: ProcT) -> bool: """Provide a predicate.""" return fileext in pcls.extensions() pclss = findall_with_pred(pred, prs) if not pclss: - raise common.UnknownFileTypeError(f'file extension={fileext}') + msg = f"file extension={fileext}" + raise common.UnknownFileTypeError(msg) return pclss # :: [Processor], never [] @@ -166,9 +177,10 @@ def find_by_maybe_file(obj: ioinfo.PathOrIOInfoT, prs: ProcsT) -> ProcsT: return find_by_fileext(ioinfo.make(obj).extension, prs) -def findall(obj: typing.Optional[ioinfo.PathOrIOInfoT], prs: ProcsT, - forced_type: typing.Optional[str] = None, - ) -> ProcsT: +def findall( + obj: ioinfo.PathOrIOInfoT | None, prs: ProcsT, + forced_type: str | None = None, +) -> ProcsT: """Find all of the processors match with the conditions. :param obj: @@ -185,23 +197,26 @@ class or None common.UnknownFileTypeError """ if (obj is None or not obj) and forced_type is None: - raise ValueError( + msg = ( "The first argument 'obj' or the second argument 'forced_type' " "must be something other than None or False." ) + raise ValueError(msg) if forced_type is None: - pclss = find_by_maybe_file(typing.cast(ioinfo.PathOrIOInfoT, obj), - prs) # :: [Processor], never [] + pclss = find_by_maybe_file( + typing.cast("ioinfo.PathOrIOInfoT", obj), prs, + ) # :: [Processor], never [] else: pclss = find_by_type_or_id(forced_type, prs) # Do. return pclss -def find(obj: typing.Optional[ioinfo.PathOrIOInfoT], prs: ProcsT, - forced_type: MaybeProcT = None, - ) -> ProcT: +def find( + obj: ioinfo.PathOrIOInfoT | None, prs: ProcsT, + forced_type: MaybeProcT = None, +) -> ProcT: """Find the processors best match with the conditions. :param obj: @@ -221,19 +236,22 @@ class or :class:`anyconfig.models.processor.Processor` class object or """ if forced_type is not None and not isinstance(forced_type, str): proc = maybe_processor( - typing.cast(typing.Union[ProcT, ProcClsT], forced_type) + typing.cast("ProcT | ProcClsT", forced_type), ) if proc is None: - raise ValueError('Wrong processor class or instance ' - f'was given: {forced_type!r}') + msg = ( + "Wrong processor class or instance " + f"was given: {forced_type!r}" + ) + raise ValueError(msg) return proc - procs = findall(obj, prs, forced_type=typing.cast(str, forced_type)) + procs = findall(obj, prs, forced_type=typing.cast("str", forced_type)) return procs[0] -def load_plugins(pgroup: str) -> typing.Iterator[ProcClsT]: +def load_plugins(pgroup: str) -> collections.abc.Iterator[ProcClsT]: """Load processor plugins. A generator function to yield a class object of @@ -242,11 +260,10 @@ def load_plugins(pgroup: str) -> typing.Iterator[ProcClsT]: :param pgroup: A string represents plugin type, e.g. anyconfig_backends """ eps = importlib.metadata.entry_points() - for res in (eps.get(pgroup, []) if isinstance(eps, dict) + for res in (eps.get(pgroup, []) # type: ignore[attr-defined] + if isinstance(eps, dict) else eps.select(group=pgroup)): try: yield res.load() - except ImportError as exc: - warnings.warn(f'Failed to load plugin, exc={exc!s}', stacklevel=2) - -# vim:sw=4:ts=4:et: + except ImportError as exc: # noqa: PERF203 + warnings.warn(f"Failed to load plugin, exc={exc!s}", stacklevel=2) diff --git a/src/anyconfig/query/__init__.py b/src/anyconfig/query/__init__.py index 33f6fbef..d3ebd3d6 100644 --- a/src/anyconfig/query/__init__.py +++ b/src/anyconfig/query/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # """Public API to query data with JMESPath expression.""" @@ -8,11 +8,9 @@ SUPPORTED = True except ImportError: from .default import try_query - SUPPORTED = False # type: ignore + SUPPORTED = False __all__ = [ - 'try_query', + "try_query", ] - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/query/default.py b/src/anyconfig/query/default.py index f738969a..1f9593dd 100644 --- a/src/anyconfig/query/default.py +++ b/src/anyconfig/query/default.py @@ -1,15 +1,20 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=unused-argument """Provide dummy implementation of anyconfig.query.*.""" -from ..common import InDataExT -from .datatypes import MaybeJexp +from __future__ import annotations +import typing -def try_query(data: InDataExT, jexp: MaybeJexp = None, **options) -> InDataExT: +if typing.TYPE_CHECKING: + from ..common import InDataExT + from .datatypes import MaybeJexp + + +def try_query( + data: InDataExT, jexp: MaybeJexp = None, **options: typing.Any, +) -> InDataExT: """Provide a dummy implementation of :func:`anyconfig.query.try_query`.""" return data - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/query/query.py b/src/anyconfig/query/query.py index a13da6c4..db694ffe 100644 --- a/src/anyconfig/query/query.py +++ b/src/anyconfig/query/query.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2017 - 2021 Satoru SATOH +# Copyright (C) 2017 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=bare-except @@ -11,43 +11,50 @@ - Added to query config data with JMESPath expression, http://jmespath.org """ +from __future__ import annotations + import typing import warnings import jmespath -from ..common import ( - InDataExT, InDataT -) from ..utils import is_dict_like -from .datatypes import MaybeJexp +if typing.TYPE_CHECKING: + from .datatypes import MaybeJexp + + from ..common import ( + InDataExT, InDataT, + ) -def try_query(data: InDataExT, jexp: MaybeJexp = None, **options) -> InDataExT: +def try_query( + data: InDataExT, jexp: MaybeJexp = None, + **options: typing.Any, +) -> InDataExT: """Try to query data with JMESPath expression `jexp`.""" if jexp is None or not jexp: return data if not is_dict_like(data): # Some primitive types like int, str. warnings.warn( - 'Could not query because given data is not ' - f'a mapping object (type? {type(data)}', - stacklevel=2 + "Could not query because given data is not " + f"a mapping object (type? {type(data)}", + stacklevel=2, ) return data (odata, exc) = query( - typing.cast(InDataT, data), typing.cast(str, jexp), **options + typing.cast("InDataT", data), typing.cast("str", jexp), **options, ) if exc: raise exc - return odata # type: ignore + return odata -def query(data: InDataT, jexp: str, **_options - ) -> typing.Tuple[typing.Optional[InDataT], - typing.Optional[Exception]]: +def query( + data: InDataT, jexp: str, **_options: typing.Any, +) -> tuple[InDataT | None, Exception | None]: """Filter data with given JMESPath expression. See also: https://github.com/jmespath/jmespath.py and http://jmespath.org. @@ -58,15 +65,10 @@ def query(data: InDataT, jexp: str, **_options :return: A tuple of query result and maybe exception if failed """ - exc: typing.Optional[Exception] = None + exc: Exception | None = None try: pexp = jmespath.compile(jexp) return (pexp.search(data), exc) except ValueError as exc: # jmespath.exceptions.*Error inherit from it. return (data, exc) - - except BaseException: # noqa: E722 - return (None, exc) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/schema/__init__.py b/src/anyconfig/schema/__init__.py index c35d80e3..dca6434b 100644 --- a/src/anyconfig/schema/__init__.py +++ b/src/anyconfig/schema/__init__.py @@ -1,18 +1,30 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # -"""Misc global constants, variables, classes and so on.""" +"""Schema generation and validation.""" +from __future__ import annotations + +from .jsonschema.generator import gen_schema + try: - from .jsonschema import validate, is_valid, gen_schema + from .jsonschema.validator import validate, is_valid + + VALIDATORS = { + "jsonschema": validate, + } SUPPORTED: bool = True except ImportError: - from .default import validate, is_valid, gen_schema - SUPPORTED = False # type: ignore + from .default import validate, is_valid # noqa: F401 + VALIDATORS = {} + SUPPORTED = False -__all__ = [ - 'validate', 'is_valid', 'gen_schema', 'SUPPORTED' -] +GENERATORS = { + "jsonschema": gen_schema, +} -# vim:sw=4:ts=4:et: +_all__ = [ + "validate", "is_valid", "gen_schema", + "VALIDATORS", "GENERATORS", "SUPPORTED", +] diff --git a/src/anyconfig/schema/datatypes.py b/src/anyconfig/schema/datatypes.py index 0c448497..c51ab65c 100644 --- a/src/anyconfig/schema/datatypes.py +++ b/src/anyconfig/schema/datatypes.py @@ -1,11 +1,16 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # +# pylint: disable=unused-import """Some common constants, utility functions and so on.""" +from __future__ import annotations + import typing +from ..common import ( # noqa: F401 + ValidationError, InDataT, InDataExT, +) -ResultT = typing.Tuple[bool, typing.Union[str, typing.List[str]]] -# vim:sw=4:ts=4:et: +ResultT = tuple[bool, typing.Union[str, list[str]]] diff --git a/src/anyconfig/schema/default.py b/src/anyconfig/schema/default.py index 87acf175..10bfffe9 100644 --- a/src/anyconfig/schema/default.py +++ b/src/anyconfig/schema/default.py @@ -1,33 +1,39 @@ # -# Copyright (C) 2015 - 2021 Satoru SATOH +# Copyright (C) 2015 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=unused-argument """Default (dummy) implementation.""" +from __future__ import annotations + import typing -from ..common import ( - InDataT, InDataExT -) -from .datatypes import ResultT +from .jsonschema import generator + +if typing.TYPE_CHECKING: + from .datatypes import ( + InDataT, InDataExT, ResultT, + ) -def validate(data: InDataExT, schema: InDataT, ac_schema_safe: bool = True, - ac_schema_errors: bool = False, **options: typing.Any - ) -> ResultT: +def validate( + data: InDataExT, schema: InDataExT, *, + ac_schema_safe: bool = True, ac_schema_errors: bool = False, + **options: typing.Any, +) -> ResultT: """Provide a dummy function does not validate at all in actual.""" - return (True, 'Validation module (jsonschema) is not available') + return (True, "Validation module (jsonschema) is not available") -def is_valid(data: InDataExT, schema: InDataT, ac_schema_safe: bool = True, - ac_schema_errors: bool = False, **options) -> bool: +def is_valid( + data: InDataExT, schema: InDataExT, *, + ac_schema_safe: bool = True, ac_schema_errors: bool = False, + **options: typing.Any, +) -> bool: """Provide a dummy function never raise exceptions.""" return True -def gen_schema(data: InDataExT, **options) -> InDataT: +def gen_schema(data: InDataExT, **options: typing.Any) -> InDataT: """Provide a dummy function generates an empty dict in actual.""" - return {} - - -# vim:sw=4:ts=4:et: + return generator.gen_schema(data, **options) diff --git a/src/anyconfig/schema/jsonschema.py b/src/anyconfig/schema/jsonschema.py deleted file mode 100644 index e3f68527..00000000 --- a/src/anyconfig/schema/jsonschema.py +++ /dev/null @@ -1,217 +0,0 @@ -# -# Copyright (C) 2015 - 2021 Satoru SATOH -# SPDX-License-Identifier: MIT -# -"""Implementation using jsonschema provides the following functions. - -- validate(data: typing.Dict[str, typing.Any], - schema: typing.Dict[str, typing.Any], - ac_schema_safe: bool = True, ac_schema_errors: bool = False, - **options) -> typing.Tuple[bool, str]: - validate with schema - -- gen_schema(data: typing.Dict[str, typing.Any], - **options) -> typing.Dict[str, typing.Any]: - Generate an object represents a schema -""" -import typing -import warnings - -import jsonschema - -from ..common import ( - ValidationError, InDataExT, InDataT -) -from ..utils import ( - filter_options, is_dict_like, is_list_like -) -from .datatypes import ResultT - - -def _validate_all(data: InDataExT, schema: InDataT, **_options) -> ResultT: - """Do all of the validation checks. - - See the description of :func:`validate` for more details of parameters and - return value. - - :seealso: https://python-jsonschema.readthedocs.io/en/latest/validate/, - a section of 'iter_errors' especially - """ - vldtr = jsonschema.Draft7Validator(schema) # :raises: SchemaError, ... - errors = list(vldtr.iter_errors(data)) - - return (not errors, [err.message for err in errors]) - - -def _validate(data: InDataExT, schema: InDataT, ac_schema_safe: bool = True, - **options: typing.Any) -> ResultT: - """Validate ``data`` with ``schema``. - - See the description of :func:`validate` for more details of parameters and - return value. - - Validate target object 'data' with given schema object. - """ - try: - jsonschema.validate( - data, schema, format_checker=jsonschema.draft7_format_checker, - **options - ) - except (jsonschema.ValidationError, jsonschema.SchemaError, - Exception) as exc: - if ac_schema_safe: - return (False, str(exc)) # Validation was failed. - raise - - return (True, '') - - -def validate(data: InDataExT, schema: InDataT, ac_schema_safe: bool = True, - ac_schema_errors: bool = False, **options: typing.Any - ) -> ResultT: - """Validate target object with given schema object. - - See also: https://python-jsonschema.readthedocs.org/en/latest/validate/ - - :parae data: Target object (a dict or a dict-like object) to validate - :param schema: Schema object (a dict or a dict-like object) - instantiated from schema JSON file or schema JSON string - :param options: Other keyword options such as: - - - ac_schema_safe: Exception (jsonschema.ValidationError or - jsonschema.SchemaError or others) will be thrown during validation - process due to any validation or related errors. However, these will - be catched by default, and will be re-raised if this value is False. - - - ac_schema_errors: Lazily yield each of the validation errors and - returns all of them if validation fails. - - :return: (True if validation succeeded else False, error message[s]) - """ - options = filter_options(('cls', ), options) - if ac_schema_errors: - return _validate_all(data, schema, **options) - - return _validate(data, schema, ac_schema_safe, **options) - - -def is_valid(data: InDataExT, schema: InDataT, ac_schema_safe: bool = True, - ac_schema_errors: bool = False, **options) -> bool: - """Raise ValidationError if ``data`` was invalidated by schema `schema`.""" - if schema is None or not schema: - return True - - (_success, error_or_errors) = validate( - data, schema, ac_schema_safe=True, - ac_schema_errors=ac_schema_errors, **options - ) - if error_or_errors: - msg = f'scm={schema!s}, err={error_or_errors!s}' - if ac_schema_safe: - warnings.warn(msg, stacklevel=2) - return False - - raise ValidationError(msg) - - return True - - -_SIMPLETYPE_MAP: typing.Dict[typing.Any, str] = { - list: 'array', tuple: 'array', bool: 'boolean', int: 'integer', float: - 'number', dict: 'object', str: 'string' -} - - -def _process_options(**options): - """Help to process keyword arguments passed to gen_schema. - - :return: A tuple of (typemap :: dict, strict :: bool) - """ - return (options.get('ac_schema_typemap', _SIMPLETYPE_MAP), - bool(options.get('ac_schema_strict', False))) - - -def array_to_schema(iarr: typing.Iterable[InDataT], **options - ) -> typing.Dict[str, typing.Any]: - """Generate a JSON schema object with type annotation added for ``iaa```. - - :param arr: Array of mapping objects like dicts - :param options: Other keyword options such as: - - - ac_schema_strict: True if more strict (precise) schema is needed - - ac_schema_typemap: Type to JSON schema type mappings - - :return: Another mapping objects represents JSON schema of items - """ - (typemap, strict) = _process_options(**options) - - arr: typing.List[InDataT] = list(iarr) - scm = { - 'type': typemap[list], - 'items': gen_schema(arr[0] if arr else 'str', **options) - } - if strict: - nitems = len(arr) - scm['minItems'] = nitems - scm['uniqueItems'] = len(set(arr)) == nitems - - return scm - - -def object_to_schema(obj: InDataT, **options) -> InDataT: - """Generate a node represents JSON schema object for ``obj``. - - Type annotation will be added for given object node at the same time. - - :param obj: mapping object such like a dict - :param options: Other keyword options such as: - - - ac_schema_strict: True if more strict (precise) schema is needed - - ac_schema_typemap: Type to JSON schema type mappings - - :yield: Another mapping objects represents JSON schema of object - """ - (typemap, strict) = _process_options(**options) - - props = dict((k, gen_schema(v, **options)) for k, v in obj.items()) - scm = {'type': typemap[dict], 'properties': props} - if strict: - scm['required'] = sorted(props.keys()) - - return scm - - -_SIMPLE_TYPES = (bool, int, float, str) - - -def gen_schema(data: InDataExT, **options) -> InDataT: - """Generate a JSON schema object validates ``data``. - - :param data: Configuration data object (dict[-like] or namedtuple) - :param options: Other keyword options such as: - - - ac_schema_strict: True if more strict (precise) schema is needed - - ac_schema_typemap: Type to JSON schema type mappings - - :return: A dict represents JSON schema of this node - """ - if data is None: - return {'type': 'null'} - - _type = type(data) - - if _type in _SIMPLE_TYPES: - typemap = options.get('ac_schema_typemap', _SIMPLETYPE_MAP) - scm = {'type': typemap[_type]} - - elif is_dict_like(data): - scm = object_to_schema(data, **options) # type: ignore - - elif is_list_like(data): - scm = array_to_schema( - typing.cast(typing.Iterable[InDataT], data), **options - ) - - return scm - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/schema/jsonschema/__init__.py b/src/anyconfig/schema/jsonschema/__init__.py new file mode 100644 index 00000000..f0c3f2c5 --- /dev/null +++ b/src/anyconfig/schema/jsonschema/__init__.py @@ -0,0 +1 @@ +"""jsonschema generator and validator.""" diff --git a/src/anyconfig/schema/jsonschema/generator.py b/src/anyconfig/schema/jsonschema/generator.py new file mode 100644 index 00000000..cafd348d --- /dev/null +++ b/src/anyconfig/schema/jsonschema/generator.py @@ -0,0 +1,140 @@ +# +# Copyright (C) 2015 - 2026 Satoru SATOH +# SPDX-License-Identifier: MIT +# +"""JSON schema generator.""" +from __future__ import annotations + +import typing + +from ... import utils + +if typing.TYPE_CHECKING: + import collections.abc + from ..datatypes import ( + InDataExT, InDataT, + ) + + +_TYPE_MAP: dict[type, str] = { + bool: "boolean", + dict: "object", + float: "number", + int: "integer", + list: "array", + str: "string", + tuple: "array", +} + + +def _process_options( + **options: typing.Any, +) -> tuple[dict[typing.Any, typing.Any], bool]: + """Help to process keyword arguments passed to gen_schema. + + :return: A tuple of (typemap :: dict, strict :: bool) + """ + return ( + options.get("ac_schema_typemap", _TYPE_MAP), + bool(options.get("ac_schema_strict", False)), + ) + + +def array_to_schema( + iarr: collections.abc.Iterable[InDataExT], *, + ac_schema_typemap: dict[type, str] | None = None, + ac_schema_strict: bool = False, + **options: typing.Any, +) -> InDataT: + """Generate a JSON schema object with type annotation added for ``iaa```. + + :param arr: Array of mapping objects like dicts + :param options: Other keyword options such as: + + - ac_schema_strict: True if more strict (precise) schema is needed + - ac_schema_typemap: Type to JSON schema type mappings + + :return: Another mapping objects represents JSON schema of items + """ + arr: list[InDataExT] = list(iarr) + typemap = ac_schema_typemap or _TYPE_MAP + scm: dict[str, typing.Any] = { + "type": typemap[list], + "items": gen_schema( + arr[0] if arr else "str", + ac_schema_strict=ac_schema_strict, + **options, + ), + } + if ac_schema_strict: + nitems = len(arr) + scm["minItems"] = nitems + scm["uniqueItems"] = len(set(arr)) == nitems + + return scm + + +def object_to_schema( + obj: InDataT, *, + ac_schema_typemap: dict[type, str] | None = None, + ac_schema_strict: bool = False, + **options: typing.Any, +) -> InDataT: + """Generate a node represents JSON schema object for ``obj``. + + Type annotation will be added for given object node at the same time. + + :param obj: mapping object such like a dict + :param options: Other keyword options such as: + + - ac_schema_strict: True if more strict (precise) schema is needed + - ac_schema_typemap: Type to JSON schema type mappings + + :yield: Another mapping objects represents JSON schema of object + """ + typemap = ac_schema_typemap or _TYPE_MAP + + props = { + k: gen_schema( + v, + ac_schema_typemap=ac_schema_typemap, + ac_schema_strict=ac_schema_strict, + **options, + ) + for k, v in obj.items() + } + scm = {"type": typemap[dict], "properties": props} + if ac_schema_strict: + scm["required"] = sorted(props.keys()) + + return scm + + +def gen_schema( + data: InDataExT, **options: typing.Any, +) -> InDataT: + """Generate a JSON schema object validates ``data``. + + :param data: Configuration data object (dict[-like] or namedtuple) + :param options: Other keyword options such as: + + - ac_schema_strict: True if more strict (precise) schema is needed + - ac_schema_typemap: Type to JSON schema type mappings + + :return: A dict represents JSON schema of this node + """ + if data is None: + return {"type": "null"} + + typemap = options.get("ac_schema_typemap", False) or _TYPE_MAP + + if utils.is_primitive_type(data): + scm = {"type": typemap[type(data)]} + + elif utils.is_dict_like(data): + scm = object_to_schema(data, **options) + + elif utils.is_list_like(data): + scm = array_to_schema(data, **options) + + return scm diff --git a/src/anyconfig/schema/jsonschema/validator.py b/src/anyconfig/schema/jsonschema/validator.py new file mode 100644 index 00000000..1032b283 --- /dev/null +++ b/src/anyconfig/schema/jsonschema/validator.py @@ -0,0 +1,129 @@ +# +# Copyright (C) 2015 - 2026 Satoru SATOH +# SPDX-License-Identifier: MIT +# +"""JSON schema validator.""" +from __future__ import annotations + +import typing +import warnings + +import jsonschema + +from ... import utils +from ..datatypes import ValidationError, InDataT + +if typing.TYPE_CHECKING: + try: + from typing import TypeGuard + except ImportError: + from typing_extensions import TypeGuard + + from .datatypes import ( + InDataExT, ResultT, + ) + + +def is_valid_schema_object(maybe_scm: InDataExT) -> TypeGuard[InDataT]: + """Determine given object ``maybe_scm`` is an expected schema object.""" + return maybe_scm and utils.is_dict_like(maybe_scm) + + +def _validate_all( + data: InDataExT, schema: InDataT, **_options: typing.Any, +) -> ResultT: + """Do all of the validation checks. + + See the description of :func:`validate` for more details of parameters and + return value. + + :seealso: https://python-jsonschema.readthedocs.io/en/latest/validate/, + a section of 'iter_errors' especially + """ + vldtr = jsonschema.Draft7Validator(schema) # :raises: SchemaError, ... + errors = list(vldtr.iter_errors(data)) + + return (not errors, [err.message for err in errors]) + + +def _validate( + data: InDataExT, schema: InDataT, *, + ac_schema_safe: bool = True, + **options: typing.Any, +) -> ResultT: + """Validate ``data`` with ``schema``. + + See the description of :func:`validate` for more details of parameters and + return value. + + Validate target object 'data' with given schema object. + """ + try: + jsonschema.validate(data, schema, **options) + except (jsonschema.ValidationError, jsonschema.SchemaError, + Exception) as exc: + if ac_schema_safe: + return (False, str(exc)) # Validation was failed. + raise + + return (True, "") + + +def validate( + data: InDataExT, schema: InDataExT, *, + ac_schema_safe: bool = True, + ac_schema_errors: bool = False, + **options: typing.Any, +) -> ResultT: + """Validate target object with given schema object. + + See also: https://python-jsonschema.readthedocs.org/en/latest/validate/ + + :parae data: Target object (a dict or a dict-like object) to validate + :param schema: Schema object (a dict or a dict-like object) + instantiated from schema JSON file or schema JSON string + :param options: Other keyword options such as: + + - ac_schema_safe: Exception (jsonschema.ValidationError or + jsonschema.SchemaError or others) will be thrown during validation + process due to any validation or related errors. However, these will + be catched by default, and will be re-raised if this value is False. + + - ac_schema_errors: Lazily yield each of the validation errors and + returns all of them if validation fails. + + :return: (True if validation succeeded else False, error message[s]) + """ + if not is_valid_schema_object(schema): + return (False, f"Invalid schema object: {schema!r}") + + options = utils.filter_options(("cls", ), options) + if ac_schema_errors: + return _validate_all(data, typing.cast("InDataT", schema), **options) + + return _validate(data, schema, ac_schema_safe=ac_schema_safe, **options) + + +def is_valid( + data: InDataExT, schema: InDataExT, *, + ac_schema_safe: bool = True, + ac_schema_errors: bool = False, + **options: typing.Any, +) -> bool: + """Raise ValidationError if ``data`` was invalidated by schema `schema`.""" + if not is_valid_schema_object(schema): + return True + + (_success, error_or_errors) = validate( + data, schema, ac_schema_safe=True, + ac_schema_errors=ac_schema_errors, **options, + ) + if error_or_errors: + msg = f"scm={schema!s}, err={error_or_errors!s}" + if ac_schema_safe: + warnings.warn(msg, stacklevel=2) + return False + + raise ValidationError(msg) + + return True diff --git a/src/anyconfig/singleton.py b/src/anyconfig/singleton.py index 1e2d8f5f..63564008 100644 --- a/src/anyconfig/singleton.py +++ b/src/anyconfig/singleton.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2018 - 2021 Satoru SATOH +# Copyright (C) 2018 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=too-few-public-methods @@ -9,7 +9,16 @@ - Add to make a kind of manager instancne later to manage plugins. """ +from __future__ import annotations + import threading +import typing + +if typing.TYPE_CHECKING: + try: + from typing import Self + except ImportError: + from typing_extensions import Self class Singleton: @@ -21,7 +30,7 @@ class Singleton: __instance = None __lock = threading.RLock() - def __new__(cls): + def __new__(cls) -> Self: """Override class constructor to cache class objects.""" if cls.__instance is None: with cls.__lock: @@ -29,5 +38,3 @@ def __new__(cls): cls.__instance = object.__new__(cls) return cls.__instance - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/template/__init__.py b/src/anyconfig/template/__init__.py index e949dbff..cace10f9 100644 --- a/src/anyconfig/template/__init__.py +++ b/src/anyconfig/template/__init__.py @@ -1,21 +1,30 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Misc global constants, variables, classes and so on.""" +from __future__ import annotations + +import typing + try: from .jinja2 import try_render SUPPORTED: bool = True except ImportError: # jinja2 may not be available. - SUPPORTED = False # type: ignore + SUPPORTED = False - def try_render(*_args, **_kwargs) -> None: # type: ignore + def try_render( + filepath: str | None = None, + content: str | None = None, + **_options: typing.Any, + ) -> str | None: """Provide a dummy function does nothing but returns None.""" + if filepath and content: + return None + return None __all__ = [ - 'try_render', + "try_render", ] - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/template/jinja2.py b/src/anyconfig/template/jinja2.py index a9a6bb6e..79a7b01b 100644 --- a/src/anyconfig/template/jinja2.py +++ b/src/anyconfig/template/jinja2.py @@ -1,7 +1,7 @@ # # Jinja2 (http://jinja.pocoo.org) based template renderer. # -# Copyright (C) 2012 - 2023 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=wrong-import-position,wrong-import-order @@ -9,6 +9,9 @@ Template rendering module for jinja2-based template config files. """ +from __future__ import annotations + +import collections.abc import locale import pathlib import os @@ -22,16 +25,23 @@ # .. seealso:: jinja2.loaders.FileSystemLoader.__init__ -PathsT = typing.List[typing.Union[str, pathlib.Path]] +PathsT = collections.abc.Sequence[typing.Union[str, pathlib.Path]] MaybePathsT = typing.Optional[PathsT] -MaybeContextT = typing.Optional[typing.Dict[str, typing.Any]] -MaybeFiltersT = typing.Optional[typing.Iterable[typing.Callable]] +MaybeContextT = typing.Optional[dict[str, typing.Any]] +MaybeFiltersT = typing.Optional[ + collections.abc.Iterable[collections.abc.Callable] +] -RENDER_S_OPTS: typing.List[str] = ['ctx', 'paths', 'filters'] -RENDER_OPTS = RENDER_S_OPTS + ['ask'] +RENDER_S_OPTS: tuple[str, ...] = ( + "ctx", "paths", "filters", + "autoescape", +) +RENDER_OPTS = (*RENDER_S_OPTS, "ask") -def tmpl_env(paths: MaybePathsT = None) -> jinja2.Environment: +def tmpl_env( + paths: MaybePathsT = None, *, autoescape: bool = True, +) -> jinja2.Environment: """Get the template environment object from given ``paths``. :param paths: A list of template search paths @@ -40,13 +50,14 @@ def tmpl_env(paths: MaybePathsT = None) -> jinja2.Environment: paths = [] return jinja2.Environment( - loader=jinja2.FileSystemLoader([str(p) for p in paths]) + loader=jinja2.FileSystemLoader([str(p) for p in paths]), + autoescape=autoescape, # noqa: S701 ) -def make_template_paths(template_file: pathlib.Path, - paths: MaybePathsT = None - ) -> typing.List[pathlib.Path]: +def make_template_paths( + template_file: pathlib.Path, paths: MaybePathsT = None, +) -> list[pathlib.Path]: """Make a template paths. Make up a list of template search paths from given ``template_file`` path @@ -64,10 +75,10 @@ def make_template_paths(template_file: pathlib.Path, return [tmpldir] -def render_s(tmpl_s: str, ctx: MaybeContextT = None, - paths: MaybePathsT = None, - filters: MaybeFiltersT = None - ) -> str: +def render_s( + tmpl_s: str, ctx: MaybeContextT = None, paths: MaybePathsT = None, + filters: MaybeFiltersT = None, *, autoescape: bool = True, +) -> str: """Render a template as a str. Compile and render given template string 'tmpl_s' with context 'context'. @@ -78,9 +89,9 @@ def render_s(tmpl_s: str, ctx: MaybeContextT = None, :param filters: Custom filters to add into template engine :return: Compiled result (str) - >>> render_s('aaa') == 'aaa' + >>> render_s("aaa") == "aaa" True - >>> s = render_s('a = {{ a }}, b = "{{ b }}"', {'a': 1, 'b': 'bbb'}) + >>> s = render_s("a = {{ a }}, b = '{{ b }}'", {"a": 1, "b": "bbb"}) >>> assert s == 'a = 1, b = "bbb"' """ if paths is None: @@ -88,11 +99,11 @@ def render_s(tmpl_s: str, ctx: MaybeContextT = None, # .. seealso:: jinja2.environment._environment_sanity_check try: - env = tmpl_env(paths) + env = tmpl_env(paths, autoescape=autoescape) except AssertionError as exc: warnings.warn( - f'Something went wrong with: paths={paths!r}, exc={exc!s}', - stacklevel=2 + f"Something went wrong with: paths={paths!r}, exc={exc!s}", + stacklevel=2, ) return tmpl_s @@ -102,16 +113,21 @@ def render_s(tmpl_s: str, ctx: MaybeContextT = None, if ctx is None: ctx = {} - return typing.cast(jinja2.Environment, tmpl_env(paths) - ).from_string(tmpl_s).render(**ctx) + return typing.cast( + "jinja2.Environment", + tmpl_env(paths, autoescape=autoescape), + ).from_string(tmpl_s).render(**ctx) -_ENCODING: str = (locale.getpreferredencoding() or 'utf-8').lower() +_ENCODING: str = (locale.getpreferredencoding() or "utf-8").lower() -def render_impl(template_file: pathlib.Path, ctx: MaybeContextT = None, - paths: MaybePathsT = None, filters: MaybeFiltersT = None - ) -> str: +def render_impl( + template_file: pathlib.Path, ctx: MaybeContextT = None, + paths: MaybePathsT = None, filters: MaybeFiltersT = None, + *, + autoescape: bool = True, +) -> str: """Render implementation. :param template_file: Absolute or relative path to the template file @@ -119,10 +135,14 @@ def render_impl(template_file: pathlib.Path, ctx: MaybeContextT = None, :param filters: Custom filters to add into template engine :return: Compiled result (str) """ - env = tmpl_env(make_template_paths(template_file, paths)) # type: ignore + env = tmpl_env( + make_template_paths(template_file, paths), + autoescape=autoescape, + ) if env is None: - return open(template_file, encoding=_ENCODING).read() + with pathlib.Path(template_file).open(encoding=_ENCODING) as fio: + return fio.read() if filters is not None: env.filters.update(filters) @@ -133,10 +153,12 @@ def render_impl(template_file: pathlib.Path, ctx: MaybeContextT = None, return env.get_template(pathlib.Path(template_file).name).render(**ctx) -def render(filepath: str, ctx: MaybeContextT = None, - paths: MaybePathsT = None, - ask: bool = False, - filters: MaybeFiltersT = None) -> str: +def render( + filepath: str, ctx: MaybeContextT = None, + paths: MaybePathsT = None, *, + ask: bool = False, + filters: MaybeFiltersT = None, +) -> str: """Compile and render template and return the result as a string. :param template_file: Absolute or relative path to the template file @@ -155,17 +177,19 @@ def render(filepath: str, ctx: MaybeContextT = None, usr_tmpl = input( f"{os.linesep}*** Missing template '{mtmpl}'. Please enter " - "absolute or relative path starts from '.' to the template file: " + "absolute or relative path starts from '.' to the template file: ", ) usr_tmpl_2 = pathlib.Path(usr_tmpl.strip()).resolve() paths_2 = make_template_paths(usr_tmpl_2, paths) - return render_impl(usr_tmpl_2, ctx, paths_2, filters) # type: ignore + return render_impl(usr_tmpl_2, ctx, paths_2, filters) -def try_render(filepath: typing.Optional[str] = None, - content: typing.Optional[str] = None, - **options) -> typing.Optional[str]: +def try_render( + filepath: str | None = None, + content: str | None = None, + **options: typing.Any, +) -> str | None: """Compile and render template and return the result as a string. :param filepath: Absolute or relative path to the template file @@ -174,28 +198,23 @@ def try_render(filepath: typing.Optional[str] = None, :return: Compiled result (str) or None """ if filepath is None and content is None: - raise ValueError("Either 'path' or 'content' must be some value!") + msg = "Either 'path' or 'content' must be some value!" + raise ValueError(msg) try: if content is None: render_opts = utils.filter_options(RENDER_OPTS, options) - return render(typing.cast(str, filepath), **render_opts) + return render(typing.cast("str", filepath), **render_opts) render_s_opts = utils.filter_options(RENDER_S_OPTS, options) return render_s(content, **render_s_opts) except Exception as exc: # pylint: disable=broad-except - if filepath: - tmpl_s = filepath - else: - tmpl_s = typing.cast(str, content)[:10] + ' ...' - + tmpl_s = filepath or typing.cast("str", content)[:10] + " ..." warnings.warn( f"Failed to compile '{tmpl_s!r}'. It may not be " - f'a template.{os.linesep}, exc={exc!s}, ' - f'filepath={filepath}, options={options!r}', - stacklevel=2 + f"a template.{os.linesep}, exc={exc!s}, " + f"filepath={filepath}, options={options!r}", + stacklevel=2, ) return None - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/utils/__init__.py b/src/anyconfig/utils/__init__.py index 16967f04..e1f4caff 100644 --- a/src/anyconfig/utils/__init__.py +++ b/src/anyconfig/utils/__init__.py @@ -1,9 +1,13 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Internal utility functions for anyconfig modules. +.. versionchanged:: 0.14.0 + + - Add 'is_primitive_type' to test if given object is primiitive type. + .. versionchanged:: 0.10.2 - Split and re-organize the module and export only some functions. @@ -13,22 +17,20 @@ - Add to abstract processors such like Parsers (loaders and dumpers). """ from .detectors import ( - is_iterable, is_dict_like, is_list_like + is_primitive_type, is_iterable, is_dict_like, is_list_like, ) from .files import get_path_from_stream from .lists import ( - groupby, concat + groupby, concat, ) from .utils import ( - filter_options, noop + filter_options, noop, ) __all__ = [ - 'is_iterable', 'is_dict_like', 'is_list_like', - 'get_path_from_stream', - 'groupby', 'concat', - 'filter_options', 'noop', + "is_primitive_type", "is_iterable", "is_dict_like", "is_list_like", + "get_path_from_stream", + "groupby", "concat", + "filter_options", "noop", ] - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/utils/detectors.py b/src/anyconfig/utils/detectors.py index ff331cb7..8e0d46c1 100644 --- a/src/anyconfig/utils/detectors.py +++ b/src/anyconfig/utils/detectors.py @@ -1,21 +1,38 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # """Functions to detect something.""" +from __future__ import annotations + import collections.abc import types import typing +if typing.TYPE_CHECKING: + try: + from typing import TypeGuard + except ImportError: + from typing_extensions import TypeGuard + + +PRIMITIVE_TYPES = (bool, int, float, str, bytes) +PrimitiveType = typing.Union[bool, int, float, str, bytes] + -def is_iterable(obj: typing.Any) -> bool: +def is_primitive_type(obj: typing.Any) -> TypeGuard[PrimitiveType]: + """Test if given object is a primitive type.""" + return type(obj) in PRIMITIVE_TYPES + + +def is_iterable(obj: typing.Any) -> TypeGuard[collections.abc.Iterable]: """Test if given object is an iterable object.""" return (isinstance(obj, (list, tuple, types.GeneratorType)) or (not isinstance(obj, (int, str, dict)) - and bool(getattr(obj, 'next', False)))) + and bool(getattr(obj, "next", False)))) -def is_dict_like(obj: typing.Any) -> bool: +def is_dict_like(obj: typing.Any) -> TypeGuard[dict]: """Test if given object ``obj`` is an dict.""" return isinstance(obj, (dict, collections.abc.Mapping)) # any others? @@ -23,9 +40,9 @@ def is_dict_like(obj: typing.Any) -> bool: _LIST_LIKE_TYPES = (collections.abc.Iterable, collections.abc.Sequence) -def is_list_like(obj: typing.Any) -> bool: +def is_list_like(obj: typing.Any) -> TypeGuard[collections.abc.Iterable]: """Test if given object ``obj`` is a list or -like one.""" - return isinstance(obj, _LIST_LIKE_TYPES) and \ - not (isinstance(obj, str) or is_dict_like(obj)) - -# vim:sw=4:ts=4:et: + return ( + isinstance(obj, _LIST_LIKE_TYPES) + and not (isinstance(obj, str) or is_dict_like(obj)) + ) diff --git a/src/anyconfig/utils/files.py b/src/anyconfig/utils/files.py index f4643219..bedc0454 100644 --- a/src/anyconfig/utils/files.py +++ b/src/anyconfig/utils/files.py @@ -3,16 +3,18 @@ # SPDX-License-Identifier: MIT # """Utility funtions to process file and file paths.""" +from __future__ import annotations + import pathlib import typing def is_io_stream(obj: typing.Any) -> bool: """Test if given object ``obj`` is an IO stream, file or -like object.""" - return callable(getattr(obj, 'read', False)) + return callable(getattr(obj, "read", False)) -def get_path_from_stream(strm: typing.IO, safe: bool = False) -> str: +def get_path_from_stream(strm: typing.IO, *, safe: bool = False) -> str: """Try to get file path from given file or file-like object 'strm'. :param strm: A file or file-like object might have its file path info @@ -20,15 +22,16 @@ def get_path_from_stream(strm: typing.IO, safe: bool = False) -> str: :raises: ValueError """ if not is_io_stream(strm) and not safe: - raise ValueError(f'It does not look a file[-like] object: {strm!r}') + msg = f"It does not look a file[-like] object: {strm!r}" + raise ValueError(msg) - path = getattr(strm, 'name', None) + path = getattr(strm, "name", None) if path is not None: try: return str(pathlib.Path(path).resolve()) except (TypeError, ValueError): pass - return '' + return "" # vim:sw=4:ts=4:et: diff --git a/src/anyconfig/utils/lists.py b/src/anyconfig/utils/lists.py index d510e06d..1185ce93 100644 --- a/src/anyconfig/utils/lists.py +++ b/src/anyconfig/utils/lists.py @@ -1,17 +1,22 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Misc utility routines for anyconfig module.""" +from __future__ import annotations + import itertools import typing +if typing.TYPE_CHECKING: + import collections.abc + def groupby( - itr: typing.Iterable[typing.Any], - key_fn: typing.Optional[typing.Callable[..., typing.Any]] = None -) -> typing.Iterable[ - typing.Tuple[typing.Any, typing.Iterable[typing.Any]] + itr: collections.abc.Iterable[typing.Any], + key_fn: collections.abc.Callable[..., typing.Any] | None = None, +) -> collections.abc.Iterable[ + tuple[typing.Any, collections.abc.Iterable[typing.Any]] ]: """Provide an wrapper function of itertools.groupby to sort each results. @@ -21,9 +26,8 @@ def groupby( return itertools.groupby(sorted(itr, key=key_fn), key=key_fn) -def concat(xss: typing.Iterable[typing.Iterable[typing.Any]] - ) -> typing.List[typing.Any]: +def concat( + xss: collections.abc.Iterable[collections.abc.Iterable[typing.Any]], +) -> list[typing.Any]: """Concatenates a list of lists.""" return list(itertools.chain.from_iterable(xs for xs in xss)) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/utils/utils.py b/src/anyconfig/utils/utils.py index 21260707..852c1445 100644 --- a/src/anyconfig/utils/utils.py +++ b/src/anyconfig/utils/utils.py @@ -1,12 +1,19 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Misc utility functions.""" +from __future__ import annotations + import typing +if typing.TYPE_CHECKING: + import collections.abc + -def noop(val: typing.Any, *_args, **_kwargs) -> typing.Any: +def noop( + val: typing.Any, *_args: typing.Any, **_kwargs: typing.Any, +) -> typing.Any: """Do nothing. >>> noop(1) @@ -15,19 +22,18 @@ def noop(val: typing.Any, *_args, **_kwargs) -> typing.Any: return val -def filter_options(keys: typing.Iterable[str], - options: typing.Mapping[str, typing.Any] - ) -> typing.Dict[str, typing.Any]: +def filter_options( + keys: collections.abc.Iterable[str], + options: collections.abc.Mapping[str, typing.Any], +) -> dict[str, typing.Any]: """Filter 'options' with given 'keys'. :param keys: key names of optional keyword arguments :param options: optional keyword arguments to filter with 'keys' - >>> filter_options(("aaa", ), dict(aaa=1, bbb=2)) + >>> filter_options(("aaa", ), {"aaa": 1, "bbb": 2}) {'aaa': 1} - >>> filter_options(("aaa", ), dict(bbb=2)) + >>> filter_options(("aaa", ), {"bbb": 2}) {} """ - return dict((k, options[k]) for k in keys if k in options) - -# vim:sw=4:ts=4:et: + return {k: options[k] for k in keys if k in options} diff --git a/tests/api/dump/common.py b/tests/api/dump/common.py deleted file mode 100644 index 0397563b..00000000 --- a/tests/api/dump/common.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring -import unittest - -from ... import base - - -class BaseTestCase(base.TDataCollector, unittest.TestCase): - target = 'dump' - -# vim:sw=4:ts=4:et: diff --git a/tests/api/dump/test_basics.py b/tests/api/dump/test_basics.py index f15e64e8..d6c374fb 100644 --- a/tests/api/dump/test_basics.py +++ b/tests/api/dump/test_basics.py @@ -1,49 +1,63 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -import pathlib -import tempfile +"""Basic test cases for anyconfig.api.dump.""" +from __future__ import annotations + +import typing + +import pytest import anyconfig.api._dump as TT from anyconfig.api import ( - UnknownProcessorTypeError, UnknownFileTypeError + UnknownFileTypeError, UnknownProcessorTypeError, ) -from . import common - - -class TestCase(common.BaseTestCase): - - def test_dump(self): - with tempfile.TemporaryDirectory() as tdir: - for data in self.each_data(): - out = pathlib.Path(tdir) / 'out.json' - TT.dump(data.inp, out, **data.opts) - self.assertEqual( - out.read_text().strip(), - data.exp.strip(), - f'{data.datadir!s}, {data.inp_path!s}' - ) - - def test_dump_intentional_failures(self): - with tempfile.TemporaryDirectory() as tdir: - for data in self.each_data(): - out = pathlib.Path(tdir) / 'out.json' - TT.dump(data.inp, out, **data.opts) - with self.assertRaises(AssertionError): - self.assertEqual(out.read_text().strip(), '') - - def test_dump_failure_ac_parser_was_not_given(self): - for data in self.each_data(): - with self.assertRaises(UnknownFileTypeError): - TT.dump(data.inp, 'dummy.txt') - - def test_dump_failure_invalid_ac_parser_was_given(self): - for data in self.each_data(): - with self.assertRaises(UnknownProcessorTypeError): - TT.dump(data.inp, 'dummy.json', ac_parser='invalid_id') - -# vim:sw=4:ts=4:et: +from ... import common + +if typing.TYPE_CHECKING: + import pathlib + + +NAMES: tuple[str, ...] = ("obj", "opts", "exp") + +# .. seealso:: tests.common.tdc +DATA_0: list = common.load_data_for_testfile(__file__, load_idata=True) +DATA_IDS: list[str] = common.get_test_ids(DATA_0) +DATA: list[tuple[typing.Any, dict, str]] = [ + (i, o, e.strip()) for _, i, o, e in DATA_0 +] + + +def test_data_is_defined_and_not_empty() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_dump( + obj, opts: dict, exp, tmp_path: pathlib.Path, +) -> None: + out = tmp_path / "out.json" + TT.dump(obj, out, **opts) + assert out.read_text() == exp + + +@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) +def test_dump_without_ac_parser_option( + obj, opts: dict, exp, +) -> None: + assert opts or exp + with pytest.raises(UnknownFileTypeError): + TT.dump(obj, "out.txt") + + +@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) +def test_dump_with_invalid_ac_parser_option( + obj, opts: dict, exp, +) -> None: + assert opts or exp + with pytest.raises(UnknownProcessorTypeError): + TT.dump(obj, "out.json", ac_parser="invalid_id") diff --git a/tests/api/dumps/common.py b/tests/api/dumps/common.py deleted file mode 100644 index 27849ef7..00000000 --- a/tests/api/dumps/common.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring -import unittest - -from ... import base - - -class BaseTestCase(base.TDataCollector, unittest.TestCase): - target = 'dumps' - -# vim:sw=4:ts=4:et: diff --git a/tests/api/dumps/test_basics.py b/tests/api/dumps/test_basics.py index 897cd299..6274594a 100644 --- a/tests/api/dumps/test_basics.py +++ b/tests/api/dumps/test_basics.py @@ -1,38 +1,56 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring +"""Basic test cases for anyconfig.api.dumps.""" +from __future__ import annotations + +import typing + +import pytest + import anyconfig.api._dump as TT from anyconfig.api import UnknownProcessorTypeError -from . import common +from ... import common + + +NAMES: tuple[str, ...] = ("obj", "opts", "exp") + +# .. seealso:: tests.common.tdc +DATA_0: list = common.load_data_for_testfile(__file__, load_idata=True) +DATA_IDS: list[str] = common.get_test_ids(DATA_0) +DATA: list[tuple[typing.Any, dict, str]] = [ + (i, o, e.strip()) for _, i, o, e in DATA_0 +] + +def test_data() -> None: + assert DATA -class TestCase(common.BaseTestCase): - def test_dumps(self): - for data in self.each_data(): - self.assertEqual( - TT.dumps(data.inp, **data.opts).strip(), - data.exp.strip(), - f'{data.datadir!s}, {data.inp_path!s}' - ) +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_dumps( + obj, opts: dict, exp, +) -> None: + assert TT.dumps(obj, **opts) == exp - def test_dumps_intentional_failures(self): - for data in self.each_data(): - with self.assertRaises(AssertionError): - self.assertEqual(TT.dumps(data.inp, **data.opts).strip(), {}) - def test_dump_failure_ac_parser_was_not_given(self): - for data in self.each_data(): - with self.assertRaises(ValueError): - TT.dumps(data.inp) +@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) +def test_dumps_without_ac_parser_option( + obj, opts: dict, exp, +) -> None: + assert opts or exp + with pytest.raises(ValueError, match="The first argument"): + TT.dumps(obj) - def test_dump_failure_invalid_ac_parser_was_given(self): - for data in self.each_data(): - with self.assertRaises(UnknownProcessorTypeError): - TT.dumps(data.inp, ac_parser='invalid_id') -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) +def test_dumps_with_invalid_ac_parser_option( + obj, opts: dict, exp, +) -> None: + assert opts or exp + with pytest.raises(UnknownProcessorTypeError): + TT.dumps(obj, ac_parser="invalid_id") diff --git a/tests/api/load/common.py b/tests/api/load/common.py index 5edbe397..940f1181 100644 --- a/tests/api/load/common.py +++ b/tests/api/load/common.py @@ -1,22 +1,13 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -import anyconfig.api._load as TT - - -class Basa: - @staticmethod - def target_fn(*args, **kwargs): - return TT.load(*args, **kwargs) - - -class MultiBase: - target: str = 'load/multi' - - -class SingleBase: - target: str = 'load/single' - -# vim:sw=4:ts=4:et: +# pylint: disable=unused-import +"""Common module for tests.api.load.""" +from __future__ import annotations + +from ...common import ( # noqa: F401 + get_test_ids, load_data_for_testfile, +) +from ..single_load.constants import LOADER_TYPES # noqa: F401 diff --git a/tests/res/dicts/get/30/s/10.txt b/tests/api/load/multi_load/__init__.py similarity index 100% rename from tests/res/dicts/get/30/s/10.txt rename to tests/api/load/multi_load/__init__.py diff --git a/tests/api/load/multi_load/common.py b/tests/api/load/multi_load/common.py new file mode 100644 index 00000000..5d6ed664 --- /dev/null +++ b/tests/api/load/multi_load/common.py @@ -0,0 +1,13 @@ +# +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +# pylint: disable=unused-import +"""Common module for tests.api.load.""" +from __future__ import annotations + +from ...multi_load.common import ( # noqa: F401 + NAMES, GLOB_PATTERN, + load_data_for_testfile, get_test_ids, +) diff --git a/tests/api/load/multi_load/test_basics.py b/tests/api/load/multi_load/test_basics.py new file mode 100644 index 00000000..71aabf7e --- /dev/null +++ b/tests/api/load/multi_load/test_basics.py @@ -0,0 +1,105 @@ +# +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +"""Test cases for anyconfig.api.load (multi_load).""" +from __future__ import annotations + +import collections +import typing + +import pytest + +import anyconfig.api._load as TT + +from .common import ( + NAMES, GLOB_PATTERN, load_data_for_testfile, get_test_ids, +) + +if typing.TYPE_CHECKING: + import pathlib + + +DATA = load_data_for_testfile(__file__) +DATA_IDS: list[str] = get_test_ids(DATA) + +DATA_W_GLOB = [ + (inputs[0].parent / GLOB_PATTERN, opts, exp) + for inputs, opts, exp in DATA +] + + +def test_data() -> None: + assert DATA + + +def test_load_with_empty_list() -> None: + with pytest.raises(ValueError, match="Maybe invalid input"): + TT.load([]) + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load_for_a_list_of_path_objects( + inputs: list[pathlib.Path], opts: dict, exp, +) -> None: + assert TT.load(inputs, **opts) == exp + assert TT.load((i for i in inputs), **opts) == exp + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load_for_a_list_of_path_strings( + inputs: list[pathlib.Path], opts: dict, exp, +) -> None: + assert TT.load([str(i) for i in inputs], **opts) == exp + assert TT.load((str(i) for i in inputs), **opts) == exp + + +@pytest.mark.parametrize( + NAMES, DATA_W_GLOB, ids=get_test_ids(DATA_W_GLOB), +) +def test_load_for_glob_patterns( + inputs: list[pathlib.Path], opts: dict, exp, +) -> None: + assert TT.load(inputs, **opts) == exp + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load_for_a_list_of_streams( + inputs: list[pathlib.Path], opts: dict, exp, +) -> None: + assert TT.load([i.open() for i in inputs], **opts) == exp + + +class MyDict(collections.OrderedDict): + """Custom dict-like object.""" + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load_with_ac_dict_option( + inputs: list[pathlib.Path], opts: dict, exp, +) -> None: + res = TT.load(inputs, ac_dict=MyDict, **opts) + assert res == exp + assert isinstance(res, MyDict) + + +@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) +def test_load_with_wrong_merge_strategy( + inputs: list[pathlib.Path], opts: dict, exp, +) -> None: + assert exp # dummy to avoid an error of unused argument. + with pytest.raises(ValueError, match="Wrong merge strategy"): + TT.load(inputs, ac_merge="wrong_merge_strategy", **opts) + + +def test_load_with_ignore_missing_option() -> None: + paths = [ + "/path/to/file_not_exist_0.json", + "/path/to/file_not_exist_1.json", + "/path/to/file_not_exist_2.json", + ] + with pytest.raises(FileNotFoundError, match="No such file or directory"): + TT.load(paths) + + assert TT.load(paths, ac_ignore_missing=True) == {} diff --git a/tests/api/load/multi_load/test_multi_types.py b/tests/api/load/multi_load/test_multi_types.py new file mode 100644 index 00000000..453e3342 --- /dev/null +++ b/tests/api/load/multi_load/test_multi_types.py @@ -0,0 +1,35 @@ +# +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +"""Test cases for anyconfig.api.load with multi type inputs.""" +from __future__ import annotations + +import typing + +import pytest + +import anyconfig.api._load as TT + +from .common import ( + NAMES, load_data_for_testfile, get_test_ids, +) + +if typing.TYPE_CHECKING: + import pathlib + + +DATA = load_data_for_testfile(__file__) +DATA_IDS: list[str] = get_test_ids(DATA) + + +def test_data() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load( + inputs: list[pathlib.Path], opts: dict, exp, +) -> None: + assert TT.load(inputs, **opts) == exp diff --git a/tests/api/load/multi_load/test_query.py b/tests/api/load/multi_load/test_query.py new file mode 100644 index 00000000..fb3cb98d --- /dev/null +++ b/tests/api/load/multi_load/test_query.py @@ -0,0 +1,53 @@ +# +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +"""Test cases for anyconfig.api.load with query options.""" +from __future__ import annotations + +import typing + +import pytest + +import anyconfig.api._load as TT +import anyconfig.query + +from .common import ( + load_data_for_testfile, get_test_ids, +) + +if typing.TYPE_CHECKING: + import pathlib + + +if not anyconfig.query.SUPPORTED: + pytest.skip( + "jmespath lib to neede for query is not available.", + allow_module_level=True, + ) + +NAMES: tuple[str, ...] = ("inputs", "query", "exp") +DATA = load_data_for_testfile(__file__, values=(("q", ""), ("e", None))) +DATA_IDS: list[str] = get_test_ids(DATA) + + +def test_data() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load( + inputs: list[pathlib.Path], query: str, exp, +) -> None: + assert TT.load(inputs, ac_query=query) == exp + + +@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) +def test_load_with_invalid_query( + inputs: list[pathlib.Path], query: str, exp, +) -> None: + assert query or exp # To avoid an error not using them. + assert TT.load( + inputs, ac_query="", + ) == TT.load(inputs) diff --git a/tests/api/load/multi_load/test_schema.py b/tests/api/load/multi_load/test_schema.py new file mode 100644 index 00000000..bcdec417 --- /dev/null +++ b/tests/api/load/multi_load/test_schema.py @@ -0,0 +1,72 @@ +# +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +"""Test cases for anyconfig.api.load with schema validation.""" +from __future__ import annotations + +import typing + +import pytest + +import anyconfig.api._load as TT +import anyconfig.schema + +from anyconfig.api import ValidationError + +from . import common + +if typing.TYPE_CHECKING: + import pathlib + + +if "jsonschema" not in anyconfig.schema.VALIDATORS: + pytest.skip( + "jsonschema lib is not available.", + allow_module_level=True, + ) + + +def scm_path_from_inputs(inputs: list[pathlib.Path]) -> pathlib.Path: + path = inputs[0] + name = path.name[:-len(path.suffix)] + return next((path.parent / "s").glob(f"{name}.*")) + + +NAMES: tuple[str, ...] = (*common.NAMES, "scm") +DATA = [ + (inputs, *rest, scm_path_from_inputs(inputs)) + for inputs, *rest in common.load_data_for_testfile(__file__) +] +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load( + inputs: list[pathlib.Path], opts: dict, exp, scm: pathlib.Path, +) -> None: + assert TT.load(inputs, ac_schema=scm, **opts) == exp + + +SCM_NG_0 = '{"type": "object", "properties": {"a": {"type": "string"}}}' + + +@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) +def test_load_with_validation_failure( + inputs: list[pathlib.Path], opts: dict, exp, scm: pathlib.Path, + tmp_path: pathlib.Path, +) -> None: + assert exp or scm # dummy + + scm = tmp_path / "scm.json" + scm.write_text(SCM_NG_0) + + with pytest.raises(ValidationError): + TT.load( + inputs, ac_schema=scm, ac_schema_safe=False, **opts, + ) diff --git a/tests/api/load/multi_load/test_template.py b/tests/api/load/multi_load/test_template.py new file mode 100644 index 00000000..2a24aa05 --- /dev/null +++ b/tests/api/load/multi_load/test_template.py @@ -0,0 +1,43 @@ +# +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +"""Test cases for anyconfig.api.load with template options.""" +from __future__ import annotations + +import typing + +import pytest + +import anyconfig.api._load as TT +import anyconfig.template + +from . import common + +if typing.TYPE_CHECKING: + import pathlib + +if not anyconfig.template.SUPPORTED: + pytest.skip( + "jinja2 lib neede for template option is not available", + allow_module_level=True, + ) + + +NAMES: tuple[str, ...] = (*common.NAMES, "ctx") +DATA: list = common.load_data_for_testfile( + __file__, values=(("o", {}), ("e", None), ("c", {})), +) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load( + inputs: list[pathlib.Path], opts: dict, exp, ctx: dict, +) -> None: + assert TT.load(inputs, ac_context=ctx, **opts) == exp diff --git a/tests/res/parser/attrlist/10/00.txt b/tests/api/load/single_load/__init__.py similarity index 100% rename from tests/res/parser/attrlist/10/00.txt rename to tests/api/load/single_load/__init__.py diff --git a/tests/api/load/single_load/test_ac_parser.py b/tests/api/load/single_load/test_ac_parser.py new file mode 100644 index 00000000..a4c20bf2 --- /dev/null +++ b/tests/api/load/single_load/test_ac_parser.py @@ -0,0 +1,32 @@ +# +# Copyright (C) 2012 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +"""Test cases for anyconfig.api.load with ac_parser argument.""" +from __future__ import annotations + +import typing + +import pytest + +import anyconfig.api._load as TT + +from .. import common + +if typing.TYPE_CHECKING: + import pathlib + + +NAMES: tuple[str, ...] = ("ipath", "opts", "exp") +DATA: list = common.load_data_for_testfile(__file__) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data_is_non_empty() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load(ipath: pathlib.Path, opts: dict, exp) -> None: + assert TT.load(ipath, **opts) == exp diff --git a/tests/api/load/single_load/test_basics.py b/tests/api/load/single_load/test_basics.py new file mode 100644 index 00000000..f98370ca --- /dev/null +++ b/tests/api/load/single_load/test_basics.py @@ -0,0 +1,108 @@ +# +# Copyright (C) 2012 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +"""Test cases for anyconfig.api.load with basic test data.""" +from __future__ import annotations + +import collections +import pathlib + +import pytest + +import anyconfig.api._load as TT +import anyconfig.parsers + +from anyconfig.api import ( + UnknownFileTypeError, UnknownProcessorTypeError, +) +from .. import common + + +JSON_PARSER = anyconfig.parsers.find(None, "json") + +NAMES: tuple[str, ...] = ("ipath", "opts", "exp") +DATA: list = common.load_data_for_testfile(__file__) +DATA_IDS: list[str] = common.get_test_ids(DATA) + +NAMES_2: tuple[str, ...] = ("ipath", "exp") +DATA_2: list = [(ipath, exp) for ipath, _, exp in DATA] + + +def test_data_is_not_empty() -> None: + assert DATA + + +class MyDict(collections.OrderedDict): + """My original dict class keep key orders.""" + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load_from_stream(ipath, opts: dict, exp) -> None: + assert TT.load(ipath.open(), **opts) == exp + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load_from_path_str(ipath, opts: dict, exp) -> None: + assert TT.load(str(ipath), **opts) == exp + + +@pytest.mark.parametrize(NAMES_2, DATA_2, ids=DATA_IDS) +def test_load_with_ac_parser_by_instance(ipath, exp) -> None: + assert TT.load(ipath, ac_parser=JSON_PARSER) == exp + + +@pytest.mark.parametrize(NAMES_2, DATA_2, ids=DATA_IDS) +def test_load_with_ac_parser_by_id(ipath, exp) -> None: + assert TT.load( + ipath, ac_parser=JSON_PARSER.cid(), + ) == exp + + +@pytest.mark.parametrize(NAMES_2, DATA_2, ids=DATA_IDS) +def test_load_with_ac_ordered(ipath, exp) -> None: + assert TT.load( + ipath, ac_ordered=True, + ) == collections.OrderedDict(exp) + + +@pytest.mark.parametrize(NAMES_2, DATA_2, ids=DATA_IDS) +def test_load_with_ac_dict(ipath, exp) -> None: + res = TT.load(ipath, ac_dict=MyDict) + assert isinstance(res, MyDict) + assert res == MyDict(**exp) + + +def test_load_missing_file_failures() -> None: + with pytest.raises(FileNotFoundError): + TT.load("not_exist.json") + + +def test_load_unknown_file_type_failures() -> None: + with pytest.raises(UnknownFileTypeError): + TT.load("dummy.txt") + + +def test_load_invalid_parser_object_failures() -> None: + with pytest.raises(ValueError, match="Wrong processor class"): + TT.load("dummy.txt", ac_parser=object()) + + +@pytest.mark.parametrize( + "ipath", + [(ipath, ) for ipath, _, _ in DATA], + ids=DATA_IDS, +) +def test_load_unknown_processor_type_failures(ipath) -> None: + with pytest.raises(UnknownProcessorTypeError): + TT.load(ipath, ac_parser="proc_does_not_exist") + + +def test_load_ignore_missing() -> None: + ipath = pathlib.Path() / "conf_file_not_exist.json" + assert not ipath.exists() + + assert TT.load( + ipath, ac_parser="json", ac_ignore_missing=True, + ) == {} diff --git a/tests/api/load/single_load/test_multi_types.py b/tests/api/load/single_load/test_multi_types.py new file mode 100644 index 00000000..8f8869e3 --- /dev/null +++ b/tests/api/load/single_load/test_multi_types.py @@ -0,0 +1,34 @@ +# +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +"""Test cases for anyconfig.api.load with multi-type inputs.""" +from __future__ import annotations + +import typing + +import pytest + +import anyconfig.api._load as TT + +from .. import common + +if typing.TYPE_CHECKING: + import pathlib + + +NAMES: tuple[str, ...] = ("ipath", "opts", "exp") +DATA: list = common.load_data_for_testfile(__file__) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data_is_non_empty() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load( + ipath: pathlib.Path, opts: dict, exp, +) -> None: + assert TT.load(ipath, **opts) == exp diff --git a/tests/api/load/single_load/test_primitives.py b/tests/api/load/single_load/test_primitives.py new file mode 100644 index 00000000..dc33dcaa --- /dev/null +++ b/tests/api/load/single_load/test_primitives.py @@ -0,0 +1,32 @@ +# +# Copyright (C) 2012 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +"""Test cases for anyconfig.api.load to load primitive types.""" +from __future__ import annotations + +import typing + +import pytest + +import anyconfig.api._load as TT + +from .. import common + +if typing.TYPE_CHECKING: + import pathlib + + +NAMES: tuple[str, ...] = ("ipath", "opts", "exp") +DATA: list = common.load_data_for_testfile(__file__) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data_is_non_empty() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load(ipath: pathlib.Path, opts: dict, exp) -> None: + assert TT.load(ipath, **opts) == exp diff --git a/tests/api/load/single_load/test_query.py b/tests/api/load/single_load/test_query.py new file mode 100644 index 00000000..2a622675 --- /dev/null +++ b/tests/api/load/single_load/test_query.py @@ -0,0 +1,53 @@ +# +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring, unused-import +"""Test cases for anyconfig.api.load to load primitive types.""" +from __future__ import annotations + +import typing + +import pytest + +import anyconfig.api._load as TT + +try: + import anyconfig.query.query # noqa: F401 +except ImportError: + pytest.skip( + "Required query module is not available", + allow_module_level=True, + ) + +from .. import common + +if typing.TYPE_CHECKING: + import pathlib + + +NAMES: tuple[str, ...] = ("ipath", "exp", "query", "opts") +DATA: list = common.load_data_for_testfile( + __file__, (("e", None), ("q", ""), ("o", {})), +) +DATA_IDS: list[str] = common.get_test_ids(DATA) + +DATA_2 = [(i, o) for i, _, _, o in DATA] + + +def test_data_is_non_empty() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load(ipath: pathlib.Path, exp, query, opts) -> None: + assert TT.load(ipath, ac_query=query.strip(), **opts) == exp + + +@pytest.mark.parametrize(("ipath", "opts"), DATA_2, ids=DATA_IDS) +def test_load_with_invalid_query_string( + ipath: pathlib.Path, opts, +) -> None: + assert TT.load( + ipath, ac_query=None, **opts, + ) == TT.load(ipath, **opts) diff --git a/tests/api/load/single_load/test_schema.py b/tests/api/load/single_load/test_schema.py new file mode 100644 index 00000000..fcee40a2 --- /dev/null +++ b/tests/api/load/single_load/test_schema.py @@ -0,0 +1,87 @@ +# +# Copyright (C) 2012 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring, unused-import +"""Test cases for anyconfig.api.load with schema options.""" +from __future__ import annotations + +import typing +import warnings + +import pytest + +import anyconfig.api._load as TT + +from anyconfig.api import ValidationError + +from .. import common + +try: + import jsonschema # noqa: F401 +except ImportError: + pytest.skip( + "Required jsonschema lib is not available.", + allow_module_level=True, + ) + +if typing.TYPE_CHECKING: + import pathlib + + +SCM_NG_0 = """{ + "type": "object", + "properties": {"key_never_exist": {"type": "string", "required": true}} +}""" + + +def ipath_to_scm_path(ipath: pathlib.Path) -> pathlib.Path | None: + basename: str = ipath.name.replace(ipath.suffix, "") + candidates = list((ipath.parent / "s").glob(f"{basename}.*")) + if candidates: + return candidates[0] + + return None + + +NAMES: tuple[str, ...] = ("ipath", "exp", "opts", "scm") +DATA: list = [ + (i, e, o, ipath_to_scm_path(i)) for i, o, e + in common.load_data_for_testfile(__file__) +] +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data_is_non_empty() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load(ipath, exp, opts, scm) -> None: + assert scm, f"Not found: {scm!s} [{ipath!s}" + assert TT.load(ipath, ac_schema=scm, **opts) == exp + + +@pytest.mark.parametrize( + ("ipath", "opts"), + [(ipath, opts) for ipath, _, opts, _ in DATA[:1]], + ids=DATA_IDS[:1], +) +def test_load_failures( + ipath, opts, tmp_path: pathlib.Path, +) -> None: + scm = tmp_path / "scm.json" + scm.write_text(SCM_NG_0) + + with warnings.catch_warnings(record=True) as warns: + warnings.simplefilter("always") + res = TT.load( + ipath, ac_schema=scm, ac_schema_safe=True, **opts, + ) + assert res is None + assert len(warns) > 0 + assert issubclass(warns[-1].category, UserWarning) + assert "scm=" in str(warns[-1].message) + + with pytest.raises(ValidationError): + TT.load(ipath, ac_schema=scm, ac_schema_safe=False) diff --git a/tests/api/load/single_load/test_template.py b/tests/api/load/single_load/test_template.py new file mode 100644 index 00000000..09e009ec --- /dev/null +++ b/tests/api/load/single_load/test_template.py @@ -0,0 +1,56 @@ +# +# Copyright (C) 2012 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring, unused-import +"""Test cases for anyconfig.api.load with template args.""" +from __future__ import annotations + +import typing +import warnings + +import pytest + +import anyconfig.api._load as TT +try: + import anyconfig.template.jinja2 # noqa: F401 +except ImportError: + pytest.skip( + "Requried jinja2 lib is not available.", + allow_module_level=True, + ) + +from .. import common + +if typing.TYPE_CHECKING: + import pathlib + + +NAMES: tuple[str, ...] = ("ipath", "ctx", "exp", "opts") +DATA: list = common.load_data_for_testfile( + __file__, (("c", {}), ("e", None), ("o", {})), +) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data_is_non_empty() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load(ipath, ctx, exp, opts) -> None: + assert TT.load(ipath, ac_context=ctx, **opts) == exp + + +def test_load_from_invalid_template(tmp_path: pathlib.Path) -> None: + ipath = tmp_path / "test.json" + ipath.write_text("""{"a": "{{ a"}""") # broken template string. + + with warnings.catch_warnings(record=True) as warns: + warnings.simplefilter("always") + res = TT.load(ipath, ac_template=True, ac_context={"a": 1}) + + assert res == {"a": "{{ a"} + assert len(warns) > 0 + assert issubclass(warns[-1].category, UserWarning) + assert "ailed to compile " in str(warns[-1].message) diff --git a/tests/api/load/single_load/test_toml.py b/tests/api/load/single_load/test_toml.py new file mode 100644 index 00000000..ca960a42 --- /dev/null +++ b/tests/api/load/single_load/test_toml.py @@ -0,0 +1,31 @@ +# +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +"""Test cases for anyconfig.load with input of other file types.""" +from __future__ import annotations + +import pytest + +import anyconfig.api._load as TT + +from .. import common + + +NAMES: tuple[str, ...] = ("ipath", "exp") +DATA: list = common.load_data_for_testfile(__file__, (("e", None), )) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data_is_non_empty() -> None: + assert DATA + + +@pytest.mark.skipif( + "toml" not in common.LOADER_TYPES, + reason="toml lib is not availabla.", +) +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load_for_toml_files(ipath, exp) -> None: + assert TT.load(ipath) == exp diff --git a/tests/api/load/single_load/test_yaml.py b/tests/api/load/single_load/test_yaml.py new file mode 100644 index 00000000..1ff1f664 --- /dev/null +++ b/tests/api/load/single_load/test_yaml.py @@ -0,0 +1,31 @@ +# +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +"""Test cases for anyconfig.load with input of other file types.""" +from __future__ import annotations + +import pytest + +import anyconfig.api._load as TT + +from .. import common + + +NAMES: tuple[str, ...] = ("ipath", "exp") +DATA: list = common.load_data_for_testfile(__file__, (("e", None), )) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data_is_non_empty() -> None: + assert DATA + + +@pytest.mark.skipif( + "yaml" not in common.LOADER_TYPES, + reason="yaml loader is not availabla.", +) +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_load_for_yaml_files(ipath, exp) -> None: + assert TT.load(ipath) == exp diff --git a/tests/api/load/test_basics.py b/tests/api/load/test_basics.py deleted file mode 100644 index 118c3734..00000000 --- a/tests/api/load/test_basics.py +++ /dev/null @@ -1,19 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring, relative-beyond-top-level -from ..multi_load import test_basics as multi -from ..single_load import test_basics as single - -from . import common - - -class SingleTestCase(common.SingleBase, single.TestCase): - pass - - -class MultiTestCase(common.MultiBase, multi.TestCase): - pass - -# vim:sw=4:ts=4:et: diff --git a/tests/api/load/test_multi_types.py b/tests/api/load/test_multi_types.py deleted file mode 100644 index 35599bf4..00000000 --- a/tests/api/load/test_multi_types.py +++ /dev/null @@ -1,19 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring, relative-beyond-top-level -from ..multi_load import test_multi_types as multi -from ..single_load import test_multi_types as single -from . import common - - -class SingleTestCase(common.SingleBase, single.TestCase): - pass - - -class MultiTestCase(common.MultiBase, multi.TestCase): - pass - - -# vim:sw=4:ts=4:et: diff --git a/tests/api/load/test_query.py b/tests/api/load/test_query.py deleted file mode 100644 index 3e5fedc4..00000000 --- a/tests/api/load/test_query.py +++ /dev/null @@ -1,19 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring, relative-beyond-top-level -from ..multi_load import test_query as multi -from ..single_load import test_query as single -from . import common - - -class SingleTestCase(common.SingleBase, single.TestCase): - pass - - -class MultiTestCase(common.MultiBase, multi.TestCase): - pass - - -# vim:sw=4:ts=4:et: diff --git a/tests/api/load/test_schema.py b/tests/api/load/test_schema.py deleted file mode 100644 index eb5cecfd..00000000 --- a/tests/api/load/test_schema.py +++ /dev/null @@ -1,19 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring, relative-beyond-top-level -from ..multi_load import test_schema as multi -from ..single_load import test_schema as single -from . import common - - -class SingleTestCase(common.SingleBase, single.TestCase): - pass - - -class MultiTestCase(common.MultiBase, multi.TestCase): - pass - - -# vim:sw=4:ts=4:et: diff --git a/tests/api/load/test_templates.py b/tests/api/load/test_templates.py deleted file mode 100644 index 3cd7c822..00000000 --- a/tests/api/load/test_templates.py +++ /dev/null @@ -1,19 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring, relative-beyond-top-level -from ..multi_load import test_template as multi -from ..single_load import test_template as single -from . import common - - -class SingleTestCase(common.SingleBase, single.TestCase): - pass - - -class MultiTestCase(common.MultiBase, multi.TestCase): - pass - - -# vim:sw=4:ts=4:et: diff --git a/tests/api/loads/common.py b/tests/api/loads/common.py deleted file mode 100644 index 0b9e74ca..00000000 --- a/tests/api/loads/common.py +++ /dev/null @@ -1,17 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring -import unittest - -from ... import base - - -class TestCase(unittest.TestCase, base.TDataCollector): - pattern = '*.txt' - - def setUp(self): - self.init() - -# vim:sw=4:ts=4:et: diff --git a/tests/api/loads/test_basics.py b/tests/api/loads/test_basics.py index c65e80a7..bbc82cb1 100644 --- a/tests/api/loads/test_basics.py +++ b/tests/api/loads/test_basics.py @@ -1,49 +1,61 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring +"""Basic test cases for anyconfig.api.loads.""" +from __future__ import annotations + +import typing import warnings +import pytest + import anyconfig.api._load as TT from anyconfig.api import UnknownProcessorTypeError -from . import common - - -class TestCase(common.TestCase): - - def test_loads(self): - for data in self.each_data(): - self.assertEqual( - TT.loads(data.inp, **data.opts), - data.exp, - f'{data.datadir!s}, {data.inp_path!s}' - ) - - def test_loads_intentional_failures(self): - for data in self.each_data(): - with self.assertRaises(AssertionError): - self.assertEqual(TT.loads(data.inp, **data.opts), {}) - - def test_loads_failure_ac_parser_was_not_given(self): - for data in self.each_data(): - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter('always') - self.assertEqual(TT.loads(data.inp), None) - self.assertEqual(len(warns), 1) - self.assertTrue(issubclass(warns[-1].category, UserWarning)) - self.assertTrue( - 'ac_parser was not given but' in str(warns[-1].message) - ) - - def test_loads_failure_invalid_ac_parser_was_given(self): - for data in self.each_data(): - with self.assertRaises(UnknownProcessorTypeError): - self.assertEqual( - TT.loads(data.inp, ac_parser='invalid_id'), - None - ) - -# vim:sw=4:ts=4:et: +from ... import common + +if typing.TYPE_CHECKING: + import pathlib + + +NAMES: tuple[str, ...] = ("content", "opts", "exp") + +# .. seealso:: tests.common.tdc +DATA_0: list[ + tuple[pathlib.Path, dict, typing.Any] +] = common.load_data_for_testfile(__file__) + +DATA: list[tuple[str, dict, typing.Any]] = [ + (i.read_text(), o, e) for i, o, e in DATA_0 +] +DATA_IDS: list[str] = common.get_test_ids(DATA_0) + + +def test_data() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_loads(content: str, opts: dict, exp) -> None: + assert TT.loads(content, **opts) == exp + + +@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) +def test_loads_withou_ac_parser_option(content: str, opts: dict, exp): + assert opts or exp + with warnings.catch_warnings(record=True) as warns: + warnings.simplefilter('always') + assert TT.loads(content) is None + assert len(warns) == 1 + assert issubclass(warns[-1].category, UserWarning) + assert "ac_parser was not given but" in str(warns[-1].message) + + +@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) +def test_loads_with_invalid_ac_parser_option(content: str, opts: dict, exp): + assert opts or exp + with pytest.raises(UnknownProcessorTypeError): + assert TT.loads(content, ac_parser="invalid_parser") is None diff --git a/tests/api/loads/test_query.py b/tests/api/loads/test_query.py index 35f9f1bb..eed6ff89 100644 --- a/tests/api/loads/test_query.py +++ b/tests/api/loads/test_query.py @@ -1,36 +1,54 @@ # # Copyright (C) 2021 Satoru SATOH -# License: MIT +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -import unittest +"""Basic test cases for anyconfig.api.loads.""" +from __future__ import annotations + +import typing + +import pytest import anyconfig.api._load as TT import anyconfig.query -from . import common +from ... import common + + +if not anyconfig.query.SUPPORTED: + pytest.skip( + "Required query module is not available", + allow_module_level=True + ) + + +NAMES: tuple[str, ...] = ("content", "exp", "query", "opts") + +# .. seealso:: tests.common.tdc +DATA_0: list = common.load_data_for_testfile( + __file__, (("e", None), ("q", ""), ("o", {})) +) +DATA_IDS: list[str] = common.get_test_ids(DATA_0) +DATA: list[tuple[str, dict, typing.Any]] = [ + (i.read_text(), e, q.strip(), o) for i, e, q, o in DATA_0 +] + +def test_data() -> None: + assert DATA -@unittest.skipIf(not anyconfig.query.SUPPORTED, - 'jmespath lib is not available') -class TestCase(common.TestCase): - kind = 'query' - def test_loads_with_query(self): - for data in self.each_data(): - self.assertEqual( - TT.loads(data.inp, ac_query=data.query, **data.opts), - data.exp, - f'{data.datadir!s}, {data.inp_path!s}' - ) +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_loads(content: str, exp, query: str, opts: dict): + assert TT.loads(content, ac_query=query, **opts) == exp - def test_loads_with_invalid_query(self): - opts = dict(ac_parser='json') - for data in self.each_data(): - self.assertEqual( - TT.loads(data.inp, ac_query=None, **opts), - TT.single_load(data.inp_path, **opts), - f'{data.datadir!s}, {data.inp_path!s}' - ) -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_loads_with_invalid_query_option( + content: str, exp, query: str, opts: dict +): + assert exp or query + assert TT.loads( + content, ac_query=None, **opts + ) == TT.loads(content, **opts) diff --git a/tests/api/loads/test_schema.py b/tests/api/loads/test_schema.py index f1a3d7b4..c2deee0a 100644 --- a/tests/api/loads/test_schema.py +++ b/tests/api/loads/test_schema.py @@ -1,50 +1,64 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -import unittest +"""Test cases for anyconfig.api.loads with schema validation option.""" +from __future__ import annotations + +import typing import warnings +import pytest + import anyconfig.api._load as TT import anyconfig.schema from anyconfig.api import ValidationError -from . import common +from ... import common + + +if "jsonschema" not in anyconfig.schema.VALIDATORS: + pytest.skip( + "Required schema module 'jsonschema' is not available", + allow_module_level=True + ) + + +NAMES: tuple[str, ...] = ("content", "exp", "scm", "opts") + +# .. seealso:: tests.common.tdc +DATA_0: list = common.load_data_for_testfile( + __file__, (("e", None), ("s", ""), ("o", {})) +) +DATA_IDS: list[str] = common.get_test_ids(DATA_0) +DATA: list[tuple[str, dict, typing.Any]] = [ + (i.read_text(), e, s.strip(), o) for i, e, s, o in DATA_0 +] + + +def test_data() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_loads(content: str, exp, scm: str, opts: dict): + assert TT.loads(content, ac_schema=scm, **opts) == exp SCM_NG_0 = '{"type": "object", "properties": {"a": {"type": "string"}}}' -@unittest.skipIf(not anyconfig.schema.SUPPORTED, - 'jsonschema lib is not available') -class TestCase(common.TestCase): - kind = 'schema' - - def test_loads_with_schema_validation(self): - for data in self.each_data(): - scm = data.scm.read_text().strip() - self.assertEqual( - TT.loads(data.inp, ac_schema=scm, **data.opts), - data.exp, - f'{data.datadir!s}, {data.inp_path!s}' - ) - - def test_loads_with_schema_validation_failures(self): - opts = dict(ac_parser='json', ac_schema=SCM_NG_0) - - for data in self.each_data(): - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter('always') - self.assertTrue( - TT.loads(data.inp, **opts) is None, - f'{data.datadir!s}, {data.inp_path!s}' - ) - self.assertTrue(len(warns) > 0) - self.assertTrue(issubclass(warns[-1].category, UserWarning)) - - with self.assertRaises(ValidationError): - TT.loads(data.inp, ac_schema_safe=False, **opts) - -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) +def test_loads_without_schema(content: str, exp, scm: str, opts: dict): + assert scm or exp + + with pytest.raises(ValidationError): + TT.loads(content, ac_schema=SCM_NG_0, ac_schema_safe=False, **opts) + + with warnings.catch_warnings(record=True) as warns: + warnings.simplefilter("always") + assert TT.loads(content, ac_schema=SCM_NG_0, **opts) is None + assert len(warns) > 0 + assert issubclass(warns[-1].category, UserWarning) diff --git a/tests/api/loads/test_template.py b/tests/api/loads/test_template.py index bf298474..e3551bc9 100644 --- a/tests/api/loads/test_template.py +++ b/tests/api/loads/test_template.py @@ -1,37 +1,55 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -import unittest +"""Test cases for anyconfig.api.single_load with schema options.""" +from __future__ import annotations + +import typing import warnings +import pytest + import anyconfig.api._load as TT import anyconfig.template -from . import common +from ... import common + + +if not anyconfig.template.SUPPORTED: + pytest.skip( + "jinja2 template lib is not available", + allow_module_level=True + ) + + +NAMES: tuple[str, ...] = ("content", "exp", "ctx", "opts") + +# .. seealso:: tests.common.tdc +DATA_0: list = common.load_data_for_testfile( + __file__, (("e", None), ("c", {}), ("o", {})) +) +DATA_IDS: list[str] = common.get_test_ids(DATA_0) +DATA: list[tuple[str, dict, typing.Any]] = [ + (i.read_text(), *eco) for i, *eco in DATA_0 +] + +def test_data() -> None: + assert DATA -@unittest.skipIf(not anyconfig.template.SUPPORTED, - 'jinja2 template lib is not available') -class TestCase(common.TestCase): - kind = 'template' - def test_loads_template(self): - for data in self.each_data(): - self.assertEqual( - TT.loads(data.inp, ac_context=data.ctx, **data.opts), - data.exp, - f'{data.datadir!s}, {data.inp_path!s}' - ) +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_loads(content: str, exp, ctx: dict, opts: dict): + assert TT.loads(content, ac_context=ctx, **opts) == exp - def test_loads_from_template_failures(self): - inp = '{"a": "{{ a"}' - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter('always') - res = TT.loads(inp, ac_parser='json', ac_template=True) - self.assertEqual(res, dict(a='{{ a')) - # self.assertEqual(len(warns), 1) # Needs to fix plugins - self.assertTrue(issubclass(warns[-1].category, UserWarning)) -# vim:sw=4:ts=4:et: +def test_loads_failures(): + content = '{"a": "{{ a"}' + with warnings.catch_warnings(record=True) as warns: + warnings.simplefilter("always") + res = TT.loads(content, ac_parser="json", ac_template=True) + assert res == {"a": "{{ a"} + # self.assertEqual(len(warns), 1) # Needs to fix plugins + assert issubclass(warns[-1].category, UserWarning) diff --git a/tests/api/multi_load/collector.py b/tests/api/multi_load/collector.py deleted file mode 100644 index 664fe077..00000000 --- a/tests/api/multi_load/collector.py +++ /dev/null @@ -1,42 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -"""Collector to collect file based test data. -""" -import typing - -from ... import base -from . import datatypes, utils - - -class DataCollector(base.TDataCollector): - """Data collector for api.multi_load - """ - def load_datasets(self) -> typing.List[datatypes.TData]: - """Load test data from files. - """ - _datasets = sorted( - utils.each_data_from_dir( - self.root, self.pattern, self.should_exist - ) - ) - if not _datasets: - raise ValueError(f'No data: {self.root!s}') - - for tdata in _datasets: - if not tdata.inputs: - raise ValueError(f'No data in subdir: {tdata.subdir!s}') - - return _datasets - - def each_data(self) -> typing.Iterator[datatypes.TData]: - """Yields test data. - """ - if not self.initialized: - self.init() - - for tdata in self.datasets: - yield tdata - -# vim:sw=4:ts=4:et: diff --git a/tests/api/multi_load/common.py b/tests/api/multi_load/common.py index dd519995..5d58692f 100644 --- a/tests/api/multi_load/common.py +++ b/tests/api/multi_load/common.py @@ -1,41 +1,33 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -r"""Common utility functions. +"""Common constants and utility functions for test cases of +anyconfig.api.multi_load. """ -import unittest +from __future__ import annotations -import anyconfig.api._load as TT +import anyconfig.api.utils -from . import collector +from ... import common -class TestCase(unittest.TestCase, collector.DataCollector): +NAMES: tuple[str, ...] = ("inputs", "opts", "exp") +GLOB_PATTERN: str = "*.*" - @staticmethod - def target_fn(*args, **kwargs): - return TT.multi_load(*args, **kwargs) - def setUp(self): - self.init() +def load_data_for_testfile(testfile: str, **kwargs): + return [ + (sorted(i.parent.glob(GLOB_PATTERN)), opts, exp, *rest) + for i, opts, exp, *rest + in common.load_data_for_testfile(testfile, **kwargs) + if exp is not None + ] - def test_multi_load(self): - for tdata in self.each_data(): - self.assertEqual( - self.target_fn(tdata.inputs, **tdata.opts), - tdata.exp, - tdata - ) - def test_multi_load_failures(self): - for tdata in self.each_data(): - with self.assertRaises(AssertionError): - self.assertEqual( - self.target_fn(tdata.inputs, **tdata.opts), - None, - tdata - ) - -# vim:sw=4:ts=4:et: +def get_test_ids(data: list) -> list[str]: + return common.get_test_ids( + [(mis[0] if anyconfig.utils.is_iterable(mis) else mis, *rest) + for mis, *rest in data] + ) diff --git a/tests/api/multi_load/datatypes.py b/tests/api/multi_load/datatypes.py deleted file mode 100644 index 675ff952..00000000 --- a/tests/api/multi_load/datatypes.py +++ /dev/null @@ -1,25 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -"""Common data types for api.multi_load test cases. -""" -import pathlib -import typing - - -DictT = typing.Dict[str, typing.Any] - - -class TData(typing.NamedTuple): - """A namedtuple object keeps test data. - """ - datadir: pathlib.Path - inputs: typing.List[pathlib.Path] # Same as the above. - exp: DictT - opts: DictT - scm: pathlib.Path - query: str - ctx: DictT - -# vim:sw=4:ts=4:et: diff --git a/tests/api/multi_load/test_basics.py b/tests/api/multi_load/test_basics.py index a71d6d64..aa218e8b 100644 --- a/tests/api/multi_load/test_basics.py +++ b/tests/api/multi_load/test_basics.py @@ -1,60 +1,104 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring +"""Test cases for anyconfig.api.multi_load.""" +from __future__ import annotations + import collections +import typing + +import pytest + +import anyconfig.api._load as TT + +from .common import ( + NAMES, GLOB_PATTERN, load_data_for_testfile, get_test_ids +) + +if typing.TYPE_CHECKING: + import pathlib + + +DATA = load_data_for_testfile(__file__) +DATA_IDS: list[str] = get_test_ids(DATA) + +DATA_W_GLOB = [ + (inputs[0].parent / GLOB_PATTERN, opts, exp) + for inputs, opts, exp in DATA +] + + +def test_data() -> None: + assert DATA + + +def test_multi_load_with_empty_list() -> None: + assert TT.multi_load([]) == {} -from ... import base -from . import common + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_multi_load_for_a_list_of_path_objects( + inputs: list[pathlib.Path], opts: dict, exp +) -> None: + assert TT.multi_load(inputs, **opts) == exp + assert TT.multi_load((i for i in inputs), **opts) == exp + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_multi_load_for_a_list_of_path_strings( + inputs: list[pathlib.Path], opts: dict, exp +) -> None: + assert TT.multi_load([str(i) for i in inputs], **opts) == exp + assert TT.multi_load((str(i) for i in inputs), **opts) == exp + + +@pytest.mark.parametrize( + NAMES, DATA_W_GLOB, ids=get_test_ids(DATA_W_GLOB) +) +def test_multi_load_for_glob_patterns( + inputs: list[pathlib.Path], opts: dict, exp +) -> None: + assert TT.multi_load(inputs, **opts) == exp + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_multi_load_for_a_list_of_streams( + inputs: list[pathlib.Path], opts: dict, exp +) -> None: + assert TT.multi_load([i.open() for i in inputs], **opts) == exp class MyDict(collections.OrderedDict): pass -class TestCase(common.TestCase): - - def test_multi_load_from_empty_path_list(self): - self.assertEqual(self.target_fn([]), base.NULL_CNTNR) - - def test_multi_load_from_glob_path_str(self): - for tdata in self.each_data(): - self.assertEqual( - self.target_fn((str(i) for i in tdata.inputs), **tdata.opts), - tdata.exp - ) - - def test_multi_load_from_streams(self): - for tdata in self.each_data(): - self.assertEqual( - self.target_fn((i.open() for i in tdata.inputs), **tdata.opts), - tdata.exp - ) - - def test_multi_load_to_ac_dict(self): - for tdata in self.each_data(): - res = self.target_fn(tdata.inputs, ac_dict=MyDict, **tdata.opts) - self.assertEqual(res, tdata.exp, tdata) - self.assertTrue(isinstance(res, MyDict)) - - def test_multi_load_with_wrong_merge_strategy(self): - for tdata in self.each_data(): - with self.assertRaises(ValueError): - self.target_fn(tdata.inputs, ac_merge='wrong_merge_strategy') - - def test_multi_load_with_ignore_missing_option(self): - paths = [ - 'file_not_exist_0.json', - 'file_not_exist_1.json', - 'file_not_exist_2.json', - ] - with self.assertRaises(FileNotFoundError): - self.target_fn(paths) - - self.assertEqual( - self.target_fn(paths, ac_ignore_missing=True), - base.NULL_CNTNR - ) - -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_multi_load_with_ac_dict_option( + inputs: list[pathlib.Path], opts: dict, exp +) -> None: + res = TT.multi_load(inputs, ac_dict=MyDict, **opts) + assert res == exp + assert isinstance(res, MyDict) + + +@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) +def test_multi_load_with_wrong_merge_strategy( + inputs: list[pathlib.Path], opts: dict, exp +) -> None: + assert exp # dummy to avoid an error of unused argument. + with pytest.raises(ValueError): + TT.multi_load(inputs, ac_merge="wrong_merge_strategy", **opts) + + +def test_multi_load_with_ignore_missing_option(): + paths = [ + "/path/to/file_not_exist_0.json", + "/path/to/file_not_exist_1.json", + "/path/to/file_not_exist_2.json", + ] + with pytest.raises(FileNotFoundError): + TT.multi_load(paths) + + assert TT.multi_load(paths, ac_ignore_missing=True) == {} diff --git a/tests/api/multi_load/test_collector.py b/tests/api/multi_load/test_collector.py deleted file mode 100644 index 1190463e..00000000 --- a/tests/api/multi_load/test_collector.py +++ /dev/null @@ -1,54 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring -import json -import unittest - -from . import collector as TT - - -class NoDataCollector(TT.DataCollector): - kind = 'not_exist' - - -class TestCase(unittest.TestCase): - - def test_load_datasets_failures(self): - collector = NoDataCollector() - with self.assertRaises(ValueError): - collector.init() - - def test_load_datasets(self): - collector = TT.DataCollector() - collector.init() - res = collector.datasets - - inp_refs = list(collector.root.glob(f'*/{collector.pattern}')) - assert bool(inp_refs) - - e_refs = list(collector.root.glob('*/e/*.json')) - assert bool(e_refs) - - o_refs = list(collector.root.glob('*/o/*.json')) - assert bool(o_refs) - - s_refs = list(collector.root.glob('*/s/*.json')) - assert bool(s_refs) - - for inp in inp_refs: - self.assertTrue(any(inp in td.inputs for td in res)) - - for e_file in e_refs: - e_ref = json.load(e_file.open()) - self.assertTrue(any(e_ref == td.exp for td in res)) - - for o_file in o_refs: - o_ref = json.load(o_file.open()) - self.assertTrue(any(o_ref == td.opts for td in res)) - - for s_file in s_refs: - self.assertTrue(any(s_file == td.scm for td in res)) - -# vim:sw=4:ts=4:et: diff --git a/tests/api/multi_load/test_common.py b/tests/api/multi_load/test_common.py deleted file mode 100644 index 12937bcd..00000000 --- a/tests/api/multi_load/test_common.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=invalid-name,missing-docstring -import pathlib - -from . import common -from ... import base - - -CUR_DIR = pathlib.Path(__file__).parent - - -class TestCase(common.TestCase): - - def test_members(self): - self.assertNotEqual(self.target, base.TDataCollector.target) - self.assertEqual(self.target, CUR_DIR.name) - self.assertEqual( - self.root, - CUR_DIR.parent.parent / 'res' / self.target / self.kind - ) - self.assertTrue(self.datasets) - -# vim:sw=4:ts=4:et: diff --git a/tests/api/multi_load/test_multi_types.py b/tests/api/multi_load/test_multi_types.py index 72cbb1a0..a1bc525b 100644 --- a/tests/api/multi_load/test_multi_types.py +++ b/tests/api/multi_load/test_multi_types.py @@ -1,13 +1,35 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -from . import common +"""Test cases for anyconfig.api.multi_load with multi type inputs.""" +from __future__ import annotations +import typing -class TestCase(common.TestCase): - kind = 'multi_types' - pattern = '*.*' +import pytest -# vim:sw=4:ts=4:et: +import anyconfig.api._load as TT + +from .common import ( + NAMES, load_data_for_testfile, get_test_ids +) + +if typing.TYPE_CHECKING: + import pathlib + + +DATA = load_data_for_testfile(__file__) +DATA_IDS: list[str] = get_test_ids(DATA) + + +def test_data() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_multi_load( + inputs: list[pathlib.Path], opts: dict, exp +) -> None: + assert TT.multi_load(inputs, **opts) == exp diff --git a/tests/api/multi_load/test_query.py b/tests/api/multi_load/test_query.py index c46621ce..402cf651 100644 --- a/tests/api/multi_load/test_query.py +++ b/tests/api/multi_load/test_query.py @@ -1,35 +1,53 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -import unittest +"""Test cases for anyconfig.api.multi_load with query options.""" +from __future__ import annotations +import typing + +import pytest + +import anyconfig.api._load as TT import anyconfig.query -from . import common +from .common import ( + load_data_for_testfile, get_test_ids +) + +if typing.TYPE_CHECKING: + import pathlib + + +if not anyconfig.query.SUPPORTED: + pytest.skip( + "jmespath lib to neede for query is not available.", + allow_module_level=True + ) + +NAMES: tuple[str, ...] = ("inputs", "query", "exp") +DATA = load_data_for_testfile(__file__, values=(("q", ""), ("e", None))) +DATA_IDS: list[str] = get_test_ids(DATA) + +def test_data() -> None: + assert DATA -@unittest.skipIf(not anyconfig.query.SUPPORTED, - 'jmespath lib is not available') -class TestCase(common.TestCase): - kind = 'query' - should_exist = ('e', 'q') - def test_multi_load(self): - for tdata in self.each_data(): - self.assertEqual( - self.target_fn( - tdata.inputs, ac_query=tdata.query, **tdata.opts - ), - tdata.exp - ) +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_multi_load( + inputs: list[pathlib.Path], query: str, exp +) -> None: + assert TT.multi_load(inputs, ac_query=query) == exp - def test_multi_load_with_invalid_query(self): - for tdata in self.each_data(): - self.assertEqual( - self.target_fn(tdata.inputs, ac_query='', **tdata.opts), - self.target_fn(tdata.inputs) - ) -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) +def test_multi_load_with_invalid_query( + inputs: list[pathlib.Path], query: str, exp +) -> None: + assert query or exp # To avoid an error not using them. + assert TT.multi_load( + inputs, ac_query="" + ) == TT.multi_load(inputs) diff --git a/tests/api/multi_load/test_schema.py b/tests/api/multi_load/test_schema.py index 9f6708be..2a302c3f 100644 --- a/tests/api/multi_load/test_schema.py +++ b/tests/api/multi_load/test_schema.py @@ -1,47 +1,71 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -import pathlib -import tempfile -import unittest +from __future__ import annotations +import typing + +import pytest + +import anyconfig.api._load as TT import anyconfig.schema from anyconfig.api import ValidationError from . import common +if typing.TYPE_CHECKING: + import pathlib + + +if "jsonschema" not in anyconfig.schema.VALIDATORS: + pytest.skip( + "jsonschema lib is not available.", + allow_module_level=True + ) + + +def scm_path_from_inputs(inputs: list[pathlib.Path]) -> pathlib.Path: + path = inputs[0] + name = path.name[:-len(path.suffix)] + return list((path.parent / "s").glob(f"{name}.*"))[0] + + +NAMES: tuple[str, ...] = (*common.NAMES, "scm") +DATA = [ + (inputs, *rest, scm_path_from_inputs(inputs)) + for inputs, *rest in common.load_data_for_testfile(__file__) +] +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_multi_load( + inputs: list[pathlib.Path], opts: dict, exp, scm: pathlib.Path +) -> None: + assert TT.multi_load(inputs, ac_schema=scm, **opts) == exp + SCM_NG_0 = '{"type": "object", "properties": {"a": {"type": "string"}}}' -@unittest.skipIf(not anyconfig.schema.SUPPORTED, - 'jsonschema lib is not available') -class TestCase(common.TestCase): - kind = 'schema' - - def test_multi_load(self): - for tdata in self.each_data(): - self.assertEqual( - self.target_fn( - tdata.inputs, ac_schema=tdata.scm, **tdata.opts - ), - tdata.exp, - tdata - ) - - def test_multi_load_with_schema_validation_failure(self): - with tempfile.TemporaryDirectory() as tdir: - wdir = pathlib.Path(tdir) - scm = wdir / 'scm.json' - scm.write_text(SCM_NG_0) - - for tdata in self.each_data(): - with self.assertRaises(ValidationError): - self.target_fn( - tdata.inputs, ac_schema=scm, ac_schema_safe=False - ) - -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) +def test_multi_load_with_validation_failure( + inputs: list[pathlib.Path], opts: dict, exp, scm: pathlib.Path, + tmp_path: pathlib.Path +) -> None: + assert exp or scm # dummy + + scm = tmp_path / "scm.json" + scm.write_text(SCM_NG_0) + + with pytest.raises(ValidationError): + TT.multi_load( + inputs, ac_schema=scm, ac_schema_safe=False, **opts + ) diff --git a/tests/api/multi_load/test_template.py b/tests/api/multi_load/test_template.py index a5ef38a0..f8cc596d 100644 --- a/tests/api/multi_load/test_template.py +++ b/tests/api/multi_load/test_template.py @@ -1,39 +1,43 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -import unittest +"""Test cases for anyconfig.api.multi_load with template options.""" +from __future__ import annotations +import typing + +import pytest + +import anyconfig.api._load as TT import anyconfig.template from . import common +if typing.TYPE_CHECKING: + import pathlib + +if not anyconfig.template.SUPPORTED: + pytest.skip( + "jinja2 lib neede for template option is not available", + allow_module_level=True + ) + + +NAMES: tuple[str, ...] = (*common.NAMES, "ctx") +DATA: list = common.load_data_for_testfile( + __file__, values=(("o", {}), ("e", None), ("c", {})) +) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data() -> None: + assert DATA + -@unittest.skipIf(not anyconfig.template.SUPPORTED, - 'jinja2 template lib is not available') -class TestCase(common.TestCase): - kind = 'template' - - def test_multi_load(self): - for tdata in self.each_data(): - self.assertEqual( - self.target_fn( - tdata.inputs, ac_context=tdata.ctx, **tdata.opts - ), - tdata.exp, - tdata - ) - - def test_multi_load_failures(self): - for tdata in self.each_data(): - with self.assertRaises(AssertionError): - self.assertEqual( - self.target_fn( - tdata.inputs, ac_context=tdata.ctx, **tdata.opts - ), - None, - tdata - ) - -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_multi_load( + inputs: list[pathlib.Path], opts: dict, exp, ctx: dict +) -> None: + assert TT.multi_load(inputs, ac_context=ctx, **opts) == exp diff --git a/tests/api/multi_load/test_utils.py b/tests/api/multi_load/test_utils.py deleted file mode 100644 index 8a9900e4..00000000 --- a/tests/api/multi_load/test_utils.py +++ /dev/null @@ -1,94 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring -import json -import pathlib -import unittest -import tempfile - -from ... import base -from . import utils as TT - - -RES_DIR = base.RES_DIR / 'multi_load' -SELF = pathlib.Path(__file__) - - -class TestCase(unittest.TestCase): - - datadir = RES_DIR / 'basics' - - def test_load_data_or_path_no_data(self): - with tempfile.TemporaryDirectory() as tdir: - tmp_path = pathlib.Path(tdir) - aes = [ - ((tmp_path, ), {}, None), - ((tmp_path, ), dict(default='abc'), 'abc'), - ] - for args, kwargs, exp in aes: - self.assertEqual( - TT.load_data_or_path(*args, **kwargs), - exp - ) - - def test_load_data_or_path(self): - # tests/res/multi_load/basics/00/00.json - ddir = self.datadir / '00' - inp = ddir / '00.json' - aes = [ - # datadir, should_exist, load, default - # .. seealso:: tests/res/multi_load/basics/00/00.json - ((ddir, ), dict(load=False), inp), - ((ddir, ), {}, json.load(inp.open())), - ] - for args, kwargs, exp in aes: - self.assertEqual( - TT.load_data_or_path(*args, **kwargs), - exp - ) - - def test_load_data_or_path_failures(self): - datadir = pathlib.Path().cwd() / 'dir_not_exist' - aes = [ - ((datadir, ), dict(should_exist=(datadir.name, )), OSError), - ] - for args, kwargs, exc in aes: - with self.assertRaises(exc): - TT.load_data_or_path(*args, **kwargs) - - def test_each_data_from_dir(self): - inp_refs = list(self.datadir.glob('*/*.json')) - assert bool(inp_refs) - - e_refs = list(self.datadir.glob('*/e/*.json')) - assert bool(e_refs) - - o_refs = list(self.datadir.glob('*/o/*.json')) - assert bool(o_refs) - - aes = [ - ((self.datadir, ), {}, (1, inp_refs, e_refs, o_refs)) - ] - for args, kwargs, exp in aes: - res = list(TT.each_data_from_dir(*args, **kwargs)) - self.assertTrue(bool(res)) - self.assertTrue(len(res) > exp[0]) - - for inp in exp[1]: - self.assertTrue(any(inp in td.inputs for td in res)) - - for e_file in exp[2]: - e_ref = json.load(e_file.open()) - self.assertTrue(any(e_ref == td.exp for td in res)) - - for o_file in exp[3]: - o_ref = json.load(o_file.open()) - self.assertTrue(any(o_ref == td.opts for td in res)) - - def test_each_data_from_dir_failures(self): - with self.assertRaises(ValueError): - _ = list(TT.each_data_from_dir(SELF)) - -# vim:sw=4:ts=4:et: diff --git a/tests/api/multi_load/utils.py b/tests/api/multi_load/utils.py deleted file mode 100644 index 585346d9..00000000 --- a/tests/api/multi_load/utils.py +++ /dev/null @@ -1,72 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -"""File based test data collector - utility functions. -""" -import pathlib -import typing -import warnings - -from ... import base -# load_data, maybe_data_path, - -from .datatypes import TData - - -# .. seealso:: tests.api.multi_load.datatypes -MaybeDataT = typing.Optional[ - typing.Union[str, pathlib.Path, typing.Dict[str, typing.Any]] -] - - -def load_data_or_path(datadir: pathlib.Path, - should_exist: typing.Iterable[str] = (), - load: bool = True, - default: typing.Optional[typing.Any] = None - ) -> MaybeDataT: - """ - Load data from a file in the ``datadir`` of which name matches ``pattern``. - """ - maybe_file = base.maybe_data_path(datadir, '*', should_exist=should_exist) - if maybe_file is None: - return default - - if load: - return base.load_data(maybe_file) - - return maybe_file - - -def each_data_from_dir(datadir: pathlib.Path, - pattern: str = '*.json', - should_exist: typing.Iterable[str] = () - ) -> typing.Iterator[TData]: - """ - Yield a collection of paths of data files under given dir. - """ - if not datadir.is_dir(): - raise ValueError(f'Not look a data dir: {datadir!s}') - - for subdir in sorted(datadir.glob('*')): - if not subdir.is_dir(): - warnings.warn(f'Not looks a dir: {subdir!s}', stacklevel=2) - continue - - if not bool(list(subdir.glob('*.*'))): - warnings.warn(f'No data in subdir: {subdir!s}', stacklevel=2) - continue - - yield TData( - subdir, - sorted( - inp for inp in subdir.glob(pattern) if inp.is_file() - ), - load_data_or_path(subdir / 'e', should_exist, default={}), - load_data_or_path(subdir / 'o', should_exist, default={}), - load_data_or_path(subdir / 's', should_exist, load=False), - load_data_or_path(subdir / 'q', should_exist, default=''), - load_data_or_path(subdir / 'c', should_exist, default={}), - ) - -# vim:sw=4:ts=4:et: diff --git a/tests/api/open/common.py b/tests/api/open/common.py deleted file mode 100644 index fd415596..00000000 --- a/tests/api/open/common.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring -import unittest - -from ... import base - - -class BaseTestCase(base.TDataCollector, unittest.TestCase): - target = 'open' - -# vim:sw=4:ts=4:et: diff --git a/tests/api/open/test_basics.py b/tests/api/open/test_basics.py index 421bcf8d..d44d34fe 100644 --- a/tests/api/open/test_basics.py +++ b/tests/api/open/test_basics.py @@ -1,37 +1,49 @@ # -# Copyright (C) 2012 - 2019 Satoru SATOH -# License: MIT +# Copyright (C) 2012 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring, invalid-name, no-member -import pathlib +# pylint: disable=missing-docstring, no-member +"""Test cases for api.open.""" +from __future__ import annotations + import pickle -import tempfile +import typing + +import pytest import anyconfig.api._open as TT import anyconfig.api._load as LD -from . import common +from ... import common + +if typing.TYPE_CHECKING: + import pathlib + + +NAMES: tuple[str, ...] = ("ipath", "exp", "opts") +DATA: list[ + tuple[pathlib.Path, typing.Optional[dict], dict] +] = common.load_data_for_testfile(__file__, values=(("e", None), ("o", {}))) + +DATA_IDS: list[str] = common.get_test_ids(DATA) + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_open_text_io(ipath, exp, opts): + with TT.open(ipath, **opts) as inp: + assert LD.load(inp, **opts) == exp -class TestCase(common.BaseTestCase): - def test_open_text_io(self): - for data in self.each_data(): - with TT.open(data.inp_path, **data.opts) as inp: - self.assertEqual(LD.loads(inp.read(), **data.opts), data.inp) +def test_open_byte_io(tmp_path): + cnf = {"a": 1, "b": "b"} - def test_open_byte_io(self): - cnf = dict(a=1, b='b') + path = tmp_path / "test.pickle" + pickle.dump(cnf, path.open(mode="wb")) - with tempfile.TemporaryDirectory() as workdir: - path = pathlib.Path(workdir) / 'test.pkl' - pickle.dump(cnf, path.open(mode='wb')) + opts = {"ac_parser": "pickle"} - with TT.open(path) as fio: - self.assertEqual(fio.mode, 'rb') - self.assertEqual( - LD.loads(fio.read(), ac_parser='pickle'), - LD.load(path) - ) + with TT.open(path, **opts) as fio: + assert fio.mode == "rb" + data: bytes = fio.read() -# vim:sw=4:ts=4:et: + assert LD.loads(data, **opts) == cnf diff --git a/tests/api/single_load/common.py b/tests/api/single_load/common.py deleted file mode 100644 index 96d4fd14..00000000 --- a/tests/api/single_load/common.py +++ /dev/null @@ -1,41 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring -import unittest - -import anyconfig.api._load as TT - -from ... import base - - -class Collector(base.TDataCollector): - - @staticmethod - def target_fn(*args, **kwargs): - return TT.single_load(*args, **kwargs) - - -class TestCase(unittest.TestCase, Collector): - - def setUp(self): - self.init() - - def test_single_load(self): - for data in self.each_data(): - self.assertEqual( - self.target_fn(data.inp_path, **data.opts), - data.exp, - data - ) - - def test_single_load_intentional_failures(self): - for data in self.each_data(): - with self.assertRaises(AssertionError): - self.assertEqual( - self.target_fn(data.inp_path, **data.opts), - None - ) - -# vim:sw=4:ts=4:et: diff --git a/tests/api/single_load/constants.py b/tests/api/single_load/constants.py new file mode 100644 index 00000000..f9029706 --- /dev/null +++ b/tests/api/single_load/constants.py @@ -0,0 +1,11 @@ +# +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +""""Constants for tests cases of anyconfig.api.single_load.""" +from __future__ import annotations + +import anyconfig.api + + +LOADER_TYPES = frozenset(anyconfig.api.list_types()) diff --git a/tests/api/single_load/test_ac_parser.py b/tests/api/single_load/test_ac_parser.py index ee123afc..d09e84ea 100644 --- a/tests/api/single_load/test_ac_parser.py +++ b/tests/api/single_load/test_ac_parser.py @@ -1,13 +1,34 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2012 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -from . import common +"""Test cases for anyconfig.api.single_load with ac_parser argument.""" +from __future__ import annotations +import typing -class TestCase(common.TestCase): - kind = 'ac_parser' - pattern = '*.conf' +import pytest -# vim:sw=4:ts=4:et: +import anyconfig.api._load as TT + +from ... import common + +if typing.TYPE_CHECKING: + import pathlib + + +NAMES: tuple[str, ...] = ("ipath", "opts", "exp") +DATA: list = common.load_data_for_testfile( + __file__, (("o", {}), ("e", None)) +) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_single_load(ipath: pathlib.Path, opts: dict, exp) -> None: + assert TT.single_load(ipath, **opts) == exp diff --git a/tests/api/single_load/test_basics.py b/tests/api/single_load/test_basics.py index 2b6853a9..026025e7 100644 --- a/tests/api/single_load/test_basics.py +++ b/tests/api/single_load/test_basics.py @@ -1,95 +1,105 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2012 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring +from __future__ import annotations + import collections import pathlib +import pytest + import anyconfig.api._load as TT import anyconfig.parsers from anyconfig.api import ( UnknownFileTypeError, UnknownProcessorTypeError ) -from ... import base -from . import common +from ... import common JSON_PARSER = anyconfig.parsers.find(None, 'json') +NAMES: tuple[str, ...] = ("ipath", "opts", "exp") +DATA: list = common.load_data_for_testfile( + __file__, (("o", {}), ("e", None)) +) +DATA_IDS: list[str] = common.get_test_ids(DATA) + +NAMES_2: tuple[str, ...] = ("ipath", "exp") +DATA_2: list = [(ipath, exp) for ipath, _, exp in DATA] + + +def test_data() -> None: + assert DATA + class MyDict(collections.OrderedDict): """My original dict class keep key orders.""" -class TestCase(common.TestCase): - - def test_single_load_from_stream(self): - for data in self.each_data(): - self.assertEqual( - TT.single_load(data.inp_path.open(), **data.opts), - data.exp - ) - - def test_single_load_from_path_str(self): - for data in self.each_data(): - self.assertEqual( - TT.single_load(str(data.inp_path), **data.opts), - data.exp - ) - - def test_single_load_with_ac_parser_by_instance(self): - for data in self.each_data(): - self.assertEqual( - TT.single_load(data.inp_path, ac_parser=JSON_PARSER), - data.exp - ) - - def test_single_load_with_ac_parser_by_id(self): - for data in self.each_data(): - self.assertEqual( - TT.single_load(data.inp_path, ac_parser=JSON_PARSER.cid()), - data.exp - ) - - def test_single_load_with_ac_ordered(self): - for data in self.each_data(): - self.assertEqual( - TT.single_load(data.inp_path, ac_ordered=True), - collections.OrderedDict(data.exp) - ) - - def test_single_load_with_ac_dict(self): - for data in self.each_data(): - res = TT.single_load(data.inp_path, ac_dict=MyDict) - self.assertTrue(isinstance(res, MyDict)) - self.assertEqual(res, MyDict(**data.exp)) - - def test_single_load_missing_file_failures(self): - with self.assertRaises(FileNotFoundError): - TT.single_load('not_exist.json') - - def test_single_load_unknown_file_type_failures(self): - with self.assertRaises(UnknownFileTypeError): - TT.single_load('dummy.txt') - - def test_single_load_invalid_parser_object_failures(self): - with self.assertRaises(ValueError): - TT.single_load('dummy.txt', ac_parser=object()) - - def test_single_load_unknown_processor_type_failures(self): - for data in self.each_data(): - with self.assertRaises(UnknownProcessorTypeError): - TT.single_load( - data.inp_path, ac_parser='proc_does_not_exist' - ) - - def test_single_load_ignore_missing(self): - inp = pathlib.Path() / 'conf_file_not_exist.json' - assert not inp.exists() - - res = TT.single_load(inp, ac_parser='json', ac_ignore_missing=True) - self.assertEqual(res, base.NULL_CNTNR) - -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_single_load_from_stream(ipath, opts, exp): + assert TT.single_load(ipath.open(), **opts) == exp + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_single_load_from_path_str(ipath, opts, exp): + assert TT.single_load(str(ipath), **opts) == exp + + +@pytest.mark.parametrize(NAMES_2, DATA_2, ids=DATA_IDS) +def test_single_load_with_ac_parser_by_instance(ipath, exp): + assert TT.single_load(ipath, ac_parser=JSON_PARSER) == exp + + +@pytest.mark.parametrize(NAMES_2, DATA_2, ids=DATA_IDS) +def test_single_load_with_ac_parser_by_id(ipath, exp): + assert TT.single_load(ipath, ac_parser=JSON_PARSER.cid()) == exp + + +@pytest.mark.parametrize(NAMES_2, DATA_2, ids=DATA_IDS) +def test_single_load_with_ac_ordered(ipath, exp): + assert TT.single_load( + ipath, ac_ordered=True + ) == collections.OrderedDict(exp) + + +@pytest.mark.parametrize(NAMES_2, DATA_2, ids=DATA_IDS) +def test_single_load_with_ac_dict(ipath, exp): + res = TT.single_load(ipath, ac_dict=MyDict) + assert isinstance(res, MyDict) + assert res == MyDict(**exp) + + +def test_single_load_missing_file_failures(): + with pytest.raises(FileNotFoundError): + TT.single_load("not_exist.json") + + +def test_single_load_unknown_file_type_failures(): + with pytest.raises(UnknownFileTypeError): + TT.single_load("dummy.txt") + + +def test_single_load_invalid_parser_object_failures(): + with pytest.raises(ValueError): + TT.single_load("dummy.txt", ac_parser=object()) + + +@pytest.mark.parametrize( + ("ipath", ), [(ipath, ) for ipath, _, _ in DATA], ids=DATA_IDS +) +def test_single_load_unknown_processor_type_failures(ipath): + with pytest.raises(UnknownProcessorTypeError): + TT.single_load(ipath, ac_parser="proc_does_not_exist") + + +def test_single_load_ignore_missing(): + ipath = pathlib.Path() / 'conf_file_not_exist.json' + assert not ipath.exists() + + assert TT.single_load( + ipath, ac_parser='json', ac_ignore_missing=True + ) == {} diff --git a/tests/api/single_load/test_common.py b/tests/api/single_load/test_common.py deleted file mode 100644 index 4e1fb6cc..00000000 --- a/tests/api/single_load/test_common.py +++ /dev/null @@ -1,33 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=invalid-name,missing-docstring -import unittest -import pathlib - -from . import common -from ... import base - - -CUR_DIR = pathlib.Path(__file__).parent - - -class TestCase(common.TestCase): - - def test_members(self): - self.assertNotEqual(self.target, base.TDataCollector.target) - self.assertEqual(self.target, CUR_DIR.name) - self.assertEqual( - self.root, - CUR_DIR.parent.parent / 'res' / self.target / self.kind - ) - self.assertTrue(self.datasets) - - @unittest.skip("I have no idea how to implement this.") - def test_target_fn(self): - self.assertEqual( # This is not satisfied clearly. - self.target_fn, common.TT.single_load - ) - -# vim:sw=4:ts=4:et: diff --git a/tests/api/single_load/test_multi_types.py b/tests/api/single_load/test_multi_types.py index 72cbb1a0..f3c3decc 100644 --- a/tests/api/single_load/test_multi_types.py +++ b/tests/api/single_load/test_multi_types.py @@ -1,13 +1,36 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -from . import common +"""Test cases for anyconfig.api.single_load with multi-type inputs.""" +from __future__ import annotations +import typing -class TestCase(common.TestCase): - kind = 'multi_types' - pattern = '*.*' +import pytest -# vim:sw=4:ts=4:et: +import anyconfig.api._load as TT + +from ... import common + +if typing.TYPE_CHECKING: + import pathlib + + +NAMES: tuple[str, ...] = ("ipath", "opts", "exp") +DATA: list = common.load_data_for_testfile( + __file__, (("o", {}), ("e", None)) +) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_single_load( + ipath: pathlib.Path, opts: dict, exp +): + assert TT.single_load(ipath, **opts) == exp diff --git a/tests/api/single_load/test_optional_types.py b/tests/api/single_load/test_optional_types.py deleted file mode 100644 index 5ed88d31..00000000 --- a/tests/api/single_load/test_optional_types.py +++ /dev/null @@ -1,29 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring -import unittest - -import anyconfig.api - -from . import common - - -LOADER_TYPES = frozenset(anyconfig.api.list_types()) - - -@unittest.skipIf('yaml' not in LOADER_TYPES, - 'yaml loader is not available') -class YamlTestCase(common.TestCase): - kind = 'yaml' - pattern = '*.yml' - - -@unittest.skipIf('toml' not in LOADER_TYPES, - 'toml loader is not available') -class TomlTestCase(YamlTestCase): - kind = 'toml' - pattern = '*.toml' - -# vim:sw=4:ts=4:et: diff --git a/tests/api/single_load/test_primitives.py b/tests/api/single_load/test_primitives.py index d2c144f1..1a83acf4 100644 --- a/tests/api/single_load/test_primitives.py +++ b/tests/api/single_load/test_primitives.py @@ -1,12 +1,34 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2012 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -from . import common +"""Test cases for anyconfig.api.single_load to load primitive types.""" +from __future__ import annotations +import typing -class TestCase(common.TestCase): - kind = 'primitives' +import pytest -# vim:sw=4:ts=4:et: +import anyconfig.api._load as TT + +from ... import common + +if typing.TYPE_CHECKING: + import pathlib + + +NAMES: tuple[str, ...] = ("ipath", "opts", "exp") +DATA: list = common.load_data_for_testfile( + __file__, (("o", {}), ("e", None)) +) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_single_load(ipath: pathlib.Path, opts: dict, exp) -> None: + assert TT.single_load(ipath, **opts) == exp diff --git a/tests/api/single_load/test_query.py b/tests/api/single_load/test_query.py index acdd600c..9d2abd85 100644 --- a/tests/api/single_load/test_query.py +++ b/tests/api/single_load/test_query.py @@ -1,49 +1,53 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring -import unittest - -import anyconfig.query - -from . import common - - -@unittest.skipIf(not anyconfig.query.SUPPORTED, - 'jmespath lib is not available') -class TestCase(common.TestCase): - kind = 'query' - - def test_single_load(self): - for data in self.each_data(): - self.assertEqual( - self.target_fn( - data.inp_path, ac_query=data.query.strip(), **data.opts - ), - data.exp, - f'{data.datadir!s}, {data.inp_path!s}' - ) - - def test_single_load_with_invalid_query_string(self): - for data in self.each_data(): - self.assertEqual( - self.target_fn( - data.inp_path, ac_query=None, **data.opts - ), - self.target_fn(data.inp_path, **data.opts), - f'{data.datadir!s}, {data.inp_path!s}' - ) - - def test_single_load_intentional_failures(self): - for data in self.each_data(): - with self.assertRaises(AssertionError): - exp = dict(z=1, zz='zz', zzz=[1, 2, 3], zzzz=dict(z=0)) - self.assertEqual( - self.target_fn( - data.inp_path, ac_query=data.query, **data.opts - ), - exp - ) - -# vim:sw=4:ts=4:et: +# pylint: disable=missing-docstring, unused-import +"""Test cases for anyconfig.api.single_load to load primitive types.""" +from __future__ import annotations + +import typing + +import pytest + +import anyconfig.api._load as TT + +try: + import anyconfig.query.query # noqa: F401 +except ImportError: + pytest.skip( + "Required query module is not available", + allow_module_level=True + ) + +from ... import common + +if typing.TYPE_CHECKING: + import pathlib + + +NAMES: tuple[str, ...] = ("ipath", "exp", "query", "opts") +DATA: list = common.load_data_for_testfile( + __file__, (("e", None), ("q", ""), ("o", {})) +) +DATA_IDS: list[str] = common.get_test_ids(DATA) + +DATA_2 = [(i, o) for i, _, _, o in DATA] + + +def test_data() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_single_load(ipath: pathlib.Path, exp, query, opts): + assert TT.single_load(ipath, ac_query=query.strip(), **opts) == exp + + +@pytest.mark.parametrize(("ipath", "opts"), DATA_2, ids=DATA_IDS) +def test_single_load_with_invalid_query_string( + ipath: pathlib.Path, opts +): + assert TT.single_load( + ipath, ac_query=None, **opts + ) == TT.single_load(ipath, **opts) diff --git a/tests/api/single_load/test_schema.py b/tests/api/single_load/test_schema.py index ffba02bc..62e85889 100644 --- a/tests/api/single_load/test_schema.py +++ b/tests/api/single_load/test_schema.py @@ -1,53 +1,85 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2012 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring +# pylint: disable=missing-docstring, unused-import +"""Test cases for anyconfig.api.single_load with schema options.""" +from __future__ import annotations + import pathlib -import tempfile -import unittest +import typing import warnings -import anyconfig.schema +import pytest + +import anyconfig.api._load as TT from anyconfig.api import ValidationError -from . import common - - -SCM_NG_0 = '{"type": "object", "properties": {"a": {"type": "string"}}}' - - -@unittest.skipIf(not anyconfig.schema.SUPPORTED, - 'jsonschema lib is not available') -class TestCase(common.TestCase): - kind = 'schema' - - def test_single_load_with_validateion_failures(self): - with tempfile.TemporaryDirectory() as tdir: - wdir = pathlib.Path(tdir) - scm = wdir / 'scm.json' - scm.write_text(SCM_NG_0) - - for data in self.each_data(): - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter('always') - self.assertEqual( - self.target_fn( - data.inp_path, ac_schema=scm, ac_schema_safe=True, - **data.opts - ), - None - ) - self.assertTrue(len(warns) > 0) - self.assertTrue( - issubclass(warns[-1].category, UserWarning) - ) - self.assertTrue('scm=' in str(warns[-1].message)) - - with self.assertRaises(ValidationError): - self.target_fn( - data.inp_path, ac_schema=scm, ac_schema_safe=False - ) - -# vim:sw=4:ts=4:et: +from ... import common + +try: + import jsonschema # noqa: F401 +except ImportError: + pytest.skip( + "Required jsonschema lib is not available.", + allow_module_level=True + ) + + +SCM_NG_0 = '''{ + "type": "object", + "properties": {"key_never_exist": {"type": "string", "required": true}} +}''' + + +def ipath_to_scm_path(ipath: pathlib.Path) -> typing.Optional[pathlib.Path]: + basename: str = ipath.name.replace(ipath.suffix, "") + candidates = list((ipath.parent / "s").glob(f"{basename}.*")) + if candidates: + return candidates[0] + + return None + + +NAMES: tuple[str, ...] = ("ipath", "exp", "opts", "scm") +DATA: list = [ + (i, e, o, ipath_to_scm_path(i)) for i, o, e + in common.load_data_for_testfile(__file__) +] +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_single_load(ipath, exp, opts, scm): + assert scm, f"Not found: {scm!s} [{ipath!s}" + assert TT.single_load(ipath, ac_schema=scm, **opts) == exp + + +@pytest.mark.parametrize( + ("ipath", "opts"), + [(ipath, opts) for ipath, _, opts, _ in DATA[:1]], + ids=DATA_IDS[:1] +) +def test_single_load_failures( + ipath, opts, tmp_path: pathlib.Path +) -> None: + scm = tmp_path / 'scm.json' + scm.write_text(SCM_NG_0) + + with warnings.catch_warnings(record=True) as warns: + warnings.simplefilter('always') + res = TT.single_load( + ipath, ac_schema=scm, ac_schema_safe=True, **opts + ) + assert res is None + assert len(warns) > 0 + assert issubclass(warns[-1].category, UserWarning) + assert 'scm=' in str(warns[-1].message) + + with pytest.raises(ValidationError): + TT.single_load(ipath, ac_schema=scm, ac_schema_safe=False) diff --git a/tests/api/single_load/test_template.py b/tests/api/single_load/test_template.py index 6d0a742f..b422528a 100644 --- a/tests/api/single_load/test_template.py +++ b/tests/api/single_load/test_template.py @@ -1,59 +1,52 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2012 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring -import pathlib -import tempfile -import unittest +# pylint: disable=missing-docstring, unused-import +"""Test cases for anyconfig.api.single_load with template args.""" +from __future__ import annotations + import warnings -import anyconfig.template - -from . import common - - -@unittest.skipIf(not anyconfig.template.SUPPORTED, - 'jinja2 template lib is not available') -class TestCase(common.TestCase): - kind = 'template' - pattern = '*.j2' - - def test_single_load(self): - for data in self.each_data(): - self.assertEqual( - self.target_fn( - data.inp_path, ac_context=data.ctx, **data.opts - ), - data.exp, - f'{data.datadir!s}, {data.inp_path!s}' - ) - - def test_single_load_from_invalid_template(self): - with tempfile.TemporaryDirectory() as tdir: - wdir = pathlib.Path(tdir) - inp = wdir / 'test.json' - inp.write_text('{"a": "{{ a"}') # broken template string. - - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter('always') - res = self.target_fn( - inp, ac_template=True, ac_context=dict(a=1) - ) - self.assertEqual(res, dict(a='{{ a')) - self.assertTrue(len(warns) > 0) - self.assertTrue(issubclass(warns[-1].category, UserWarning)) - self.assertTrue('ailed to compile ' in str(warns[-1].message)) - - def test_single_load_intentional_failures(self): - ng_exp = dict(z=1, zz='zz', zzz=[1, 2, 3], zzzz=dict(z=0)) - for data in self.each_data(): - with self.assertRaises(AssertionError): - self.assertEqual( - self.target_fn( - data.inp_path, ac_context=data.ctx, **data.opts - ), - ng_exp - ) - -# vim:sw=4:ts=4:et: +import pytest + +import anyconfig.api._load as TT +try: + import anyconfig.template.jinja2 # noqa: F401 +except ImportError: + pytest.skip( + "Requried jinja2 lib is not available.", + allow_module_level=True + ) + +from ... import common + + +NAMES: tuple[str, ...] = ("ipath", "ctx", "exp", "opts") +DATA: list = common.load_data_for_testfile( + __file__, (("c", {}), ("e", None), ("o", {})) +) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data() -> None: + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_single_load(ipath, ctx, exp, opts): + assert TT.single_load(ipath, ac_context=ctx, **opts) == exp + + +def test_single_load_from_invalid_template(tmp_path): + ipath = tmp_path / "test.json" + ipath.write_text('{"a": "{{ a"}') # broken template string. + + with warnings.catch_warnings(record=True) as warns: + warnings.simplefilter('always') + res = TT.single_load(ipath, ac_template=True, ac_context={"a": 1}) + + assert res == {"a": '{{ a'} + assert len(warns) > 0 + assert issubclass(warns[-1].category, UserWarning) + assert 'ailed to compile ' in str(warns[-1].message) diff --git a/tests/api/single_load/test_toml.py b/tests/api/single_load/test_toml.py new file mode 100644 index 00000000..102e6063 --- /dev/null +++ b/tests/api/single_load/test_toml.py @@ -0,0 +1,32 @@ +# +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +"""Test cases for anyconfig.load with input of other file types.""" +from __future__ import annotations + +import pytest + +import anyconfig.api._load as TT + +from ... import common +from .constants import LOADER_TYPES + + +NAMES: tuple[str, ...] = ("ipath", "exp") +DATA: list = common.load_data_for_testfile(__file__, (("e", None), )) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data() -> None: + assert DATA + + +@pytest.mark.skipif( + "toml" not in LOADER_TYPES, + reason="toml lib is not availabla." +) +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_single_load_for_toml_files(ipath, exp): + assert TT.single_load(ipath) == exp diff --git a/tests/api/single_load/test_yaml.py b/tests/api/single_load/test_yaml.py new file mode 100644 index 00000000..aa2d8c1b --- /dev/null +++ b/tests/api/single_load/test_yaml.py @@ -0,0 +1,32 @@ +# +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +"""Test cases for anyconfig.load with input of other file types.""" +from __future__ import annotations + +import pytest + +import anyconfig.api._load as TT + +from ... import common +from .constants import LOADER_TYPES + + +NAMES: tuple[str, ...] = ("ipath", "exp") +DATA: list = common.load_data_for_testfile(__file__, (("e", None), )) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data() -> None: + assert DATA + + +@pytest.mark.skipif( + "yaml" not in LOADER_TYPES, + reason="yaml loader is not availabla." +) +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_single_load_for_yaml_files(ipath, exp): + assert TT.single_load(ipath) == exp diff --git a/tests/api/test_utils.py b/tests/api/test_utils.py index ce1bb5ec..31687e4f 100644 --- a/tests/api/test_utils.py +++ b/tests/api/test_utils.py @@ -1,32 +1,31 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -r"""Test cases for anyconfig.utils.files. -""" +r"""Test cases for anyconfig.utils.files.""" import pathlib -import unittest + +import pytest import anyconfig.api.utils as TT from anyconfig.ioinfo import make as ioinfo_make -class TestCase(unittest.TestCase): - - def test_are_same_file_types(self): - fun = TT.are_same_file_types - this_py = pathlib.Path(__file__) - this = ioinfo_make(this_py) - other = ioinfo_make(this_py.parent / 'setup.cfg') +THIS_PY = pathlib.Path(__file__) +THIS = ioinfo_make(THIS_PY) +OTHER = ioinfo_make(THIS_PY.parent / "pyproject.toml") - for inp, exp in (([], False), - ([this], True), - ([this, this], True), - ([this, other], False), - ([this, other], False), - ): - (self.assertTrue if exp else self.assertFalse)(fun(inp)) -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize( + ("obj", "exp"), + (([], False), + ([THIS], True), + ([THIS, THIS], True), + ([THIS, OTHER], False), + ([THIS, OTHER], False), + ) +) +def test_are_same_file_types(obj, exp): + assert TT.are_same_file_types(obj) == exp diff --git a/tests/backend/base/test_dumpers.py b/tests/backend/base/test_dumpers.py index b8816323..29adcbb6 100644 --- a/tests/backend/base/test_dumpers.py +++ b/tests/backend/base/test_dumpers.py @@ -1,27 +1,19 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring, invalid-name -import tempfile -import unittest +import pytest import anyconfig.backend.base.dumpers as TT -class DumperMixinTestCase(unittest.TestCase): - - def test_ropen(self): - with tempfile.TemporaryDirectory() as temp_dir: - with TT.DumperMixin().wopen(temp_dir + '/test.txt') as fio: - self.assertEqual(fio.mode, 'w') - - -class BinaryDumperMixinTestCase(unittest.TestCase): - - def test_ropen(self): - with tempfile.TemporaryDirectory() as temp_dir: - with TT.BinaryDumperMixin().wopen(temp_dir + '/test.txt') as fio: - self.assertEqual(fio.mode, 'wb') - -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize( + ("cls", "mode"), + ((TT.DumperMixin, "w"), + (TT.BinaryDumperMixin, "wb"), + ), +) +def test_dumper_mixin_wopen(cls, mode, tmp_path): + with cls().wopen(tmp_path / "test.txt") as fio: + assert fio.mode == mode diff --git a/tests/backend/base/test_loaders.py b/tests/backend/base/test_loaders.py index 1ba3fc3c..9cc61119 100644 --- a/tests/backend/base/test_loaders.py +++ b/tests/backend/base/test_loaders.py @@ -1,27 +1,19 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring, invalid-name -import unittest +import pytest import anyconfig.backend.base.loaders as TT -FILE_PATH = __file__ - - -class LoaderMixinTestCase(unittest.TestCase): - - def test_ropen(self): - with TT.LoaderMixin().ropen(FILE_PATH) as fio: - self.assertEqual(fio.mode, 'r') - - -class BinaryLoaderMixinTestCase(unittest.TestCase): - - def test_ropen(self): - with TT.BinaryLoaderMixin().ropen(FILE_PATH) as fio: - self.assertEqual(fio.mode, 'rb') - -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize( + ("cls", "mode"), + ((TT.LoaderMixin, "r"), + (TT.BinaryLoaderMixin, "rb"), + ), +) +def test_loader_mixin_ropen(cls, mode): + with cls().ropen(__file__) as fio: + assert fio.mode == mode diff --git a/tests/backend/base/test_parsers.py b/tests/backend/base/test_parsers.py index 3c71c740..b0fc90a5 100644 --- a/tests/backend/base/test_parsers.py +++ b/tests/backend/base/test_parsers.py @@ -1,10 +1,12 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2012 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring, invalid-name +# pylint: disable=missing-docstring,protected-access +"""Test cases for anyconfig.backend.base.parsers.""" +from __future__ import annotations + import pathlib -import unittest import anyconfig.backend.base.parsers as TT import anyconfig.ioinfo @@ -13,26 +15,23 @@ MZERO = TT.Parser()._container_factory()() -class TestCase(unittest.TestCase): - - def setUp(self): - self.psr = TT.Parser() +def test_type(): + assert TT.Parser().type() == str(TT.Parser._type) - def test_10_type(self): - self.assertEqual(self.psr.type(), str(TT.Parser._type)) - def test_20_loads__null_content(self): - cnf = self.psr.loads('') - self.assertEqual(cnf, MZERO) - self.assertTrue(isinstance(cnf, type(MZERO))) +def test_loads__null_content(): + psr = TT.Parser() + cnf = psr.loads('') + assert cnf == MZERO + assert isinstance(cnf, type(MZERO)) - def test_30_load__ac_ignore_missing(self): - cpath = pathlib.Path.cwd() / 'conf_file_not_exist.json' - assert not cpath.exists() - ioi = anyconfig.ioinfo.make(str(cpath)) - cnf = self.psr.load(ioi, ac_ignore_missing=True) - self.assertEqual(cnf, MZERO) - self.assertTrue(isinstance(cnf, type(MZERO))) +def test_load__ac_ignore_missing(): + cpath = pathlib.Path.cwd() / 'conf_file_not_exist.json' + assert not cpath.exists() -# vim:sw=4:ts=4:et: + psr = TT.Parser() + ioi = anyconfig.ioinfo.make(str(cpath)) + cnf = psr.load(ioi, ac_ignore_missing=True) + assert cnf == MZERO + assert isinstance(cnf, type(MZERO)) diff --git a/tests/backend/base/test_utils.py b/tests/backend/base/test_utils.py index dc0e1f5d..d523d61e 100644 --- a/tests/backend/base/test_utils.py +++ b/tests/backend/base/test_utils.py @@ -1,48 +1,36 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2012 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -"""Test cases for anyconfig.backend.base.utils. -""" -import pathlib -import tempfile -import unittest - -import anyconfig.backend.base.utils as TT - - -FILENAME = 'file_not_exist.txt' +"""Test cases for anyconfig.backend.base.utils.""" +from __future__ import annotations +import pathlib -class TestCase(unittest.TestCase): +import pytest - def test_not_implemented(self): - with self.assertRaises(NotImplementedError): - TT.not_implemented() +import anyconfig.backend.base.utils as TT - def test_ensure_outdir_exists_for_file_if_it_exists(self): - outdir = pathlib.Path.cwd() - outfile = outdir / FILENAME - TT.ensure_outdir_exists(outfile) - TT.ensure_outdir_exists(str(outfile)) - self.assertTrue(outdir.exists()) +FILENAME: str = "file_not_exist.txt" - def test_ensure_outdir_exists_for_file_if_it_does_not_exist(self): - with tempfile.TemporaryDirectory() as outdir: - outdir = pathlib.Path(outdir) - outpath = outdir / FILENAME - TT.ensure_outdir_exists(outpath) - self.assertTrue(outdir.exists()) +def test_not_implemented(): + with pytest.raises(NotImplementedError): + TT.not_implemented() - def test_ensure_outdir_exists_for_file_if_its_parent_does_not_exist(self): - with tempfile.TemporaryDirectory() as parent: - outdir = pathlib.Path(parent) / 'a' / 'b' / 'c' - outpath = outdir / FILENAME - TT.ensure_outdir_exists(outpath) - self.assertTrue(outdir.exists()) +@pytest.mark.parametrize( + ("rel_path", ), + ((FILENAME, ), + ("a/b/c", ), + ), +) +def test_ensure_outdir_exists( + rel_path: str, tmp_path: pathlib.Path +) -> None: + outpath = tmp_path / rel_path -# vim:sw=4:ts=4:et: + TT.ensure_outdir_exists(outpath) + assert outpath.parent.exists() diff --git a/tests/backend/common.py b/tests/backend/common.py new file mode 100644 index 00000000..1eca5eb0 --- /dev/null +++ b/tests/backend/common.py @@ -0,0 +1,86 @@ +# +# Copyright (C) 2023, 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring,too-few-public-methods +r"""Common functions for test cases of loaders and dumpers.""" +from __future__ import annotations + +import importlib +import pathlib +import re +import typing + +import anyconfig.ioinfo +import pytest + +from .. import common + + +NAMES: tuple[str, ...] = ("ipath", "opts", "exp") +PATH_PATTERN: re.Pattern = re.compile( + r".+[/\\:\.]test_([^_]+)_([^_]+).py" +) + + +def get_name(testfile: str, pattern: re.Pattern = PATH_PATTERN) -> str: + """Get the name of backend module. + + ex. tests/backend/loaders/json/test_json_stdlib.py + -> "json.stdlib" + """ + match = pattern.match(testfile) + if not match: + raise NameError( + f"Filename does not match expected pattern: {testfile}" + ) + + return ".".join(match.groups()) + + +def get_mod(testfile: str, pattern: re.Pattern = PATH_PATTERN): + """Get the module to test.""" + name = get_name(testfile, pattern=pattern) + mname = f"anyconfig.backend.{name}" + try: + return importlib.import_module(mname) + except ImportError: + pytest.skip( + f"Skip becuase it failed to import: {mname}", + allow_module_level=True + ) + + return None # To suppress inconsistent-return-statements. + + +def get_test_ids(*args, **opts): + return common.get_test_ids(*args, **opts) + + +def get_test_resdir( + testfile: str, + pattern: re.Pattern = PATH_PATTERN +) -> pathlib.Path: + """Get test resource dir for given test file path. + + ex. tests/backend/loaders/json/test_json_stdlib.py + -> tests/res/1/loaders/json.stdlib/ + """ + subdir = pathlib.Path(testfile).parent.parent.name + name = get_name(testfile, pattern=pattern) + + return common.RESOURCE_DIR / subdir / name + + +def load_data_for_testfile( + testfile: str, + **opts +) -> list[tuple[pathlib.Path, dict[str, typing.Any], ...]]: + datadir = get_test_resdir(testfile) + return common.load_data_for_testfile( + testfile, datadir=datadir, **opts + ) + + +def ioinfo_from_path(path: pathlib.Path) -> anyconfig.ioinfo.IOInfo: + return anyconfig.ioinfo.make(path) diff --git a/tests/backend/constants.py b/tests/backend/constants.py new file mode 100644 index 00000000..75cd6677 --- /dev/null +++ b/tests/backend/constants.py @@ -0,0 +1,34 @@ +# +# Copyright (C) 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +"""Constants for tests.backend.*.""" +from __future__ import annotations + +import pathlib + +from . import common as TT + + +CURDIR: pathlib.Path = pathlib.Path(__file__).parent + +TEST_FILES: list[pathlib.Path] = list( + (CURDIR / "loaders").glob("*/test_*.py") +) + +assert TEST_FILES + +MOD_TYPE_DEFAULT: str = "json" + +if any(f for f in TEST_FILES if f.parent.name == MOD_TYPE_DEFAULT): + MOD_TYPE = MOD_TYPE_DEFAULT + MOD_BACKEND: str = "stdlib" +else: + MOD_TYPE: str = TEST_FILES[0].parent.name + MOD_BACKEND: str = TEST_FILES[0].stem.split("_")[-1] + +TEST_FILE = ( + CURDIR / "loaders" / MOD_TYPE / f"test_{MOD_TYPE}_{MOD_BACKEND}.py" +) +TEST_DATADIR = TT.common.RESOURCE_DIR / "loaders" / f"{MOD_TYPE}.{MOD_BACKEND}" diff --git a/tests/backend/dumpers/json/test_json_stdlib.py b/tests/backend/dumpers/json/test_json_stdlib.py index 63018749..50ec8854 100644 --- a/tests/backend/dumpers/json/test_json_stdlib.py +++ b/tests/backend/dumpers/json/test_json_stdlib.py @@ -1,52 +1,58 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring,invalid-name,too-few-public-methods -# pylint: disable=ungrouped-imports -"""Test cases for the loader. -""" -import pathlib +# pylint: disable=missing-docstring +"""Test cases for the dumpers.""" +from __future__ import annotations + import typing import pytest -import tests.common.tdi_base -import tests.common.dumper - - -class TDI(tests.common.tdi_base.TDI): - _cid = tests.common.tdi_base.name_from_path(__file__) - _is_loader = False +from ... import common +if typing.TYPE_CHECKING: + import pathlib -(TT, DATA, DATA_IDS) = TDI().get_all() -if TT is None: +try: + DATA = common.load_data_for_testfile(__file__, load_idata=True) +except FileNotFoundError: pytest.skip( - f"skipping tests: {TDI().cid()} as it's not available.", + f"Not found test data for: {__file__}", allow_module_level=True ) -assert DATA +NAMES: tuple[str, ...] = ("ipath", "idata", "opts", "exp") +DATA_IDS: list[str] = common.get_test_ids(DATA) +Parser = getattr(common.get_mod(__file__), "Parser", None) +assert Parser is not None -class TestCase(tests.common.dumper.TestCase): - psr_cls = TT.Parser - @pytest.mark.parametrize( - ("ipath", "aux"), DATA, ids=DATA_IDS, - ) - def test_dumps( - self, ipath: pathlib.Path, aux: typing.Dict[str, typing.Any], - ): - self._assert_dumps(ipath, aux) +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_dumps(ipath: str, idata, opts: dict, exp: str) -> None: + psr = Parser() + content = psr.dumps(idata, **opts) - @pytest.mark.parametrize( - ("ipath", "aux"), DATA, ids=DATA_IDS, - ) - def test_dump( - self, ipath: pathlib.Path, aux: typing.Dict[str, typing.Any], - tmp_path: pathlib.Path - ): - self._assert_dump(ipath, aux, tmp_path) + assert psr.loads(content, **opts) == idata + assert content == exp + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_dump( + ipath: str, idata, opts: dict, exp: str, tmp_path: pathlib.Path +) -> None: + psr = Parser() + + opath = tmp_path / f"{ipath.stem}.{psr.extensions()[0]}" + ioi = common.ioinfo_from_path(opath) + + psr.dump(idata, ioi, **opts) + + assert opath.exists() + assert psr.load(ioi, **opts) == idata + + content = psr.ropen(str(opath)).read() + assert content == exp diff --git a/tests/backend/dumpers/toml/test_toml_tomllib.py b/tests/backend/dumpers/toml/test_toml_tomllib.py index 42d339b3..a72d6801 100644 --- a/tests/backend/dumpers/toml/test_toml_tomllib.py +++ b/tests/backend/dumpers/toml/test_toml_tomllib.py @@ -1,53 +1,62 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring,invalid-name,too-few-public-methods -# pylint: disable=ungrouped-imports -"""Test cases for the loader. -""" -import pathlib +# pylint: disable=missing-docstring +"""Test cases for the dumper.""" +from __future__ import annotations + import typing import pytest -import tests.common.tdi_base -import tests.common.dumper +from ... import common +if typing.TYPE_CHECKING: + import pathlib -class TDI(tests.common.tdi_base.TDI): - _cid = tests.common.tdi_base.name_from_path(__file__) - _is_loader = False +try: + DATA = common.load_data_for_testfile(__file__, load_idata=True) +except FileNotFoundError: + pytest.skip( + f"Not found test data for: {__file__}", + allow_module_level=True + ) -(TT, DATA, DATA_IDS) = TDI().get_all() +NAMES: tuple[str, ...] = ("ipath", "idata", "opts", "exp") +DATA_IDS: list[str] = common.get_test_ids(DATA) +Parser = getattr(common.get_mod(__file__), "Parser", None) -if TT is None: +if Parser is None: pytest.skip( - f"skipping tests: {TDI().cid()} as it's not available.", + f"Skip test cases: {__file__}", allow_module_level=True ) -assert DATA +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_dumps(ipath: str, idata, opts: dict, exp: str) -> None: + psr = Parser() + content = psr.dumps(idata, **opts) -class TestCase(tests.common.dumper.TestCase): - psr_cls = TT.Parser - exact_match = False + assert psr.loads(content, **opts) == idata + # assert content == exp # This may fail. - @pytest.mark.parametrize( - ("ipath", "aux"), DATA, ids=DATA_IDS, - ) - def test_dumps( - self, ipath: pathlib.Path, aux: typing.Dict[str, typing.Any], - ): - self._assert_dumps(ipath, aux) - @pytest.mark.parametrize( - ("ipath", "aux"), DATA, ids=DATA_IDS, - ) - def test_dump( - self, ipath: pathlib.Path, aux: typing.Dict[str, typing.Any], - tmp_path: pathlib.Path - ): - self._assert_dump(ipath, aux, tmp_path) +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_dump( + ipath: str, idata, opts: dict, exp: str, tmp_path: pathlib.Path +) -> None: + psr = Parser() + + opath = tmp_path / f"{ipath.stem}.{psr.extensions()[0]}" + ioi = common.ioinfo_from_path(opath) + + psr.dump(idata, ioi, **opts) + + assert opath.exists() + assert psr.load(ioi, **opts) == idata + + # content = psr.ropen(str(opath)).read().decode("utf-8") + # assert content == exp # This may fail. diff --git a/tests/backend/loaders/json/test_json_stdlib.py b/tests/backend/loaders/json/test_json_stdlib.py index a2b70058..e7455235 100644 --- a/tests/backend/loaders/json/test_json_stdlib.py +++ b/tests/backend/loaders/json/test_json_stdlib.py @@ -1,50 +1,41 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring,invalid-name,too-few-public-methods -# pylint: disable=ungrouped-imports -"""Test cases for the loader. -""" -import pathlib -import typing +# pylint: disable=missing-docstring,invalid-name +"""Test cases for the loader.""" +from __future__ import annotations import pytest -import tests.common.tdi_base -import tests.common.loader +from ... import common -class TDI(tests.common.tdi_base.TDI): - _cid = tests.common.tdi_base.name_from_path(__file__) - - -(TT, DATA, DATA_IDS) = TDI().get_all() - -if TT is None: +try: + DATA = common.load_data_for_testfile(__file__) +except FileNotFoundError: pytest.skip( - f"skipping tests: {TDI().cid()} as it's not available.", + f"Not found test data for: {__file__}", allow_module_level=True ) -assert DATA +DATA_IDS: list[str] = common.get_test_ids(DATA) +Parser = getattr(common.get_mod(__file__), "Parser", None) +assert Parser is not None -class TestCase(tests.common.loader.TestCase): - psr_cls = TT.Parser - @pytest.mark.parametrize( - ("ipath", "aux"), DATA, ids=DATA_IDS, - ) - def test_loads( - self, ipath: pathlib.Path, aux: typing.Dict[str, typing.Any] - ): - self._assert_loads(ipath, aux) +@pytest.mark.parametrize(common.NAMES, DATA, ids=DATA_IDS) +def test_loads(ipath: str, opts: dict, exp) -> None: + psr = Parser() + content = psr.ropen(ipath).read() - @pytest.mark.parametrize( - ("ipath", "aux"), DATA, ids=DATA_IDS, - ) - def test_load( - self, ipath: pathlib.Path, aux: typing.Dict[str, typing.Any] - ): - self._assert_load(ipath, aux) + assert psr.loads(content, **opts) == exp + + +@pytest.mark.parametrize(common.NAMES, DATA, ids=DATA_IDS) +def test_load(ipath: str, opts: dict, exp) -> None: + psr = Parser() + ioi = common.ioinfo_from_path(ipath) + + assert psr.load(ioi, **opts) == exp diff --git a/tests/backend/loaders/properties/test_properties_builtin_functions.py b/tests/backend/loaders/properties/test_properties_builtin_functions.py index 946c6a4f..68a859be 100644 --- a/tests/backend/loaders/properties/test_properties_builtin_functions.py +++ b/tests/backend/loaders/properties/test_properties_builtin_functions.py @@ -19,9 +19,9 @@ @pytest.mark.parametrize( - 'inp,exp', - ((' ', (None, '')), - ('aaa', ('aaa', '')), + "inp,exp", + ((" ", (None, "")), + ("aaa", ("aaa", "")), ), ) def test_parseline_warnings(inp, exp): @@ -30,12 +30,12 @@ def test_parseline_warnings(inp, exp): @pytest.mark.parametrize( - 'inp,exp', - (('aaa:', ('aaa', '')), - (' aaa:', ('aaa', '')), - ('url = http://localhost', ('url', 'http://localhost')), - ('calendar.japanese.type: LocalGregorianCalendar', - ('calendar.japanese.type', 'LocalGregorianCalendar')), + "inp,exp", + (("aaa:", ("aaa", "")), + (" aaa:", ("aaa", "")), + ("url = http://localhost", ("url", "http://localhost")), + ("calendar.japanese.type: LocalGregorianCalendar", + ("calendar.japanese.type", "LocalGregorianCalendar")), ), ) def test_parseline(inp, exp): @@ -43,12 +43,12 @@ def test_parseline(inp, exp): @pytest.mark.parametrize( - 'inp,exp', - (('', None), - ('a: A', 'a: A'), - ('# a: A', None), - ('! a: A', None), - ('a: A # comment', 'a: A # comment'), + "inp,exp", + (("", None), + ("a: A", "a: A"), + ("# a: A", None), + ("! a: A", None), + ("a: A # comment", "a: A # comment"), ), ) def test_pre_process_line(inp, exp): @@ -56,9 +56,9 @@ def test_pre_process_line(inp, exp): @pytest.mark.parametrize( - 'inp,exp', - ((r'aaa\:bbb', 'aaa:bbb'), - (r'\\a', r'\a'), + "inp,exp", + ((r"aaa\:bbb", "aaa:bbb"), + (r"\\a", r"\a"), ), ) def test_10_unescape(inp, exp): @@ -66,8 +66,8 @@ def test_10_unescape(inp, exp): @pytest.mark.parametrize( - 'inp,exp', - ((r':=\ ', r'\:\=\\ '), + "inp,exp", + ((r":=\ ", r"\:\=\\ "), ), ) def test_escape(inp, exp): @@ -75,34 +75,34 @@ def test_escape(inp, exp): @pytest.mark.parametrize( - 'inp,exp', - ((':', '\\:'), - ('=', '\\='), - ('a', 'a'), + "inp,exp", + ((":", "\\:"), + ("=", "\\="), + ("a", "a"), ), ) def test_escape_char(inp, exp): assert TT._escape_char(inp) == exp -KEY_0 = 'calendar.japanese.type' -VAL_0 = 'LocalGregorianCalendar' -KV_0 = f'{KEY_0}: {VAL_0}' +KEY_0 = "calendar.japanese.type" +VAL_0 = "LocalGregorianCalendar" +KV_0 = f"{KEY_0}: {VAL_0}" KV_1 = """application/postscript: \\ x=Postscript File;y=.eps,.ps """ @pytest.mark.parametrize( - 'inp,exp', - (('', {}), - (f'# {KV_0}', {}), - (f'! {KV_0}', {}), - (f'{KEY_0}:', {KEY_0: ''}), + "inp,exp", + (("", {}), + (f"# {KV_0}", {}), + (f"! {KV_0}", {}), + (f"{KEY_0}:", {KEY_0: ""}), (KV_0, {KEY_0: VAL_0}), - (f'{KV_0}# ...', {KEY_0: f'{VAL_0}# ...'}), - ('key=a\\:b', {'key': 'a:b'}), - (KV_1, {'application/postscript': 'x=Postscript File;y=.eps,.ps'}), + (f"{KV_0}# ...", {KEY_0: f"{VAL_0}# ..."}), + ("key=a\\:b", {"key": "a:b"}), + (KV_1, {"application/postscript": "x=Postscript File;y=.eps,.ps"}), ), ) def test_load(inp, exp): diff --git a/tests/backend/loaders/sh/test_sh_variables_functions.py b/tests/backend/loaders/sh/test_sh_variables_functions.py index 8fdc5f35..dd83ec9c 100644 --- a/tests/backend/loaders/sh/test_sh_variables_functions.py +++ b/tests/backend/loaders/sh/test_sh_variables_functions.py @@ -19,7 +19,7 @@ @pytest.mark.parametrize( - 'inp,exp', + ('inp', 'exp'), (('aaa=', ('aaa', '')), ('aaa=bbb', ('aaa', 'bbb')), ('aaa="bb b"', ('aaa', 'bb b')), @@ -32,9 +32,18 @@ def test_parseline(inp, exp): @pytest.mark.parametrize( - 'inp,exp', + ('inp', 'exp', 'warning'), + (('# ', {}, SyntaxWarning), + ), +) +def test_load__with_warns(inp, exp, warning): + with pytest.warns(warning): + assert TT.load(io.StringIO(inp)) == exp + + +@pytest.mark.parametrize( + ('inp', 'exp'), (('', {}), - ('# ', {}), ('aaa=', {'aaa': ''}), ('aaa=bbb', {'aaa': 'bbb'}), ('aaa=bbb # ...', {'aaa': 'bbb'}), diff --git a/tests/backend/loaders/toml/test_toml_tomllib.py b/tests/backend/loaders/toml/test_toml_tomllib.py index e77de9cd..aa8f9657 100644 --- a/tests/backend/loaders/toml/test_toml_tomllib.py +++ b/tests/backend/loaders/toml/test_toml_tomllib.py @@ -1,56 +1,45 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring,invalid-name,too-few-public-methods -# pylint: disable=ungrouped-imports -"""Test cases for the loader. -""" -import pathlib -import typing +# pylint: disable=missing-docstring +"""Test cases for the loader.""" +from __future__ import annotations import pytest -import tests.common.tdi_base -import tests.common.loader +from ... import common -class TDI(tests.common.tdi_base.TDI): - _cid = tests.common.tdi_base.name_from_path(__file__) - +try: + DATA = common.load_data_for_testfile(__file__) +except FileNotFoundError: + pytest.skip( + f"Not found test data for: {__file__}", + allow_module_level=True + ) -(TT, DATA, DATA_IDS) = TDI().get_all() +DATA_IDS: list[str] = common.get_test_ids(DATA) +Parser = getattr(common.get_mod(__file__), "Parser", None) -if TT is None: +if Parser is None: pytest.skip( - f"skipping tests: {TDI().cid()} as it's not available.", + f"Skip test cases: {__file__}", allow_module_level=True ) -assert DATA +@pytest.mark.parametrize(common.NAMES, DATA, ids=DATA_IDS) +def test_loads(ipath: str, opts: dict, exp) -> None: + psr = Parser() + content = psr.ropen(ipath).read().decode("utf-8") -class TestCase(tests.common.loader.TestCase): - psr_cls = TT.Parser + assert psr.loads(content, **opts) == exp - def _assert_loads( - self, ipath: pathlib.Path, aux: typing.Dict[str, typing.Any] - ): - (exp, opts, psr, _ioi) = self._get_all(ipath, aux) - assert psr.loads(ipath.read_text(), **opts) == exp - @pytest.mark.parametrize( - ("ipath", "aux"), DATA, ids=DATA_IDS, - ) - def test_loads( - self, ipath: pathlib.Path, aux: typing.Dict[str, typing.Any] - ): - self._assert_loads(ipath, aux) +@pytest.mark.parametrize(common.NAMES, DATA, ids=DATA_IDS) +def test_load(ipath: str, opts: dict, exp) -> None: + psr = Parser() + ioi = common.ioinfo_from_path(ipath) - @pytest.mark.parametrize( - ("ipath", "aux"), DATA, ids=DATA_IDS, - ) - def test_load( - self, ipath: pathlib.Path, aux: typing.Dict[str, typing.Any] - ): - self._assert_load(ipath, aux) + assert psr.load(ioi, **opts) == exp diff --git a/tests/backend/loaders/xml/test_xml_etree_functions.py b/tests/backend/loaders/xml/test_xml_etree_functions.py index 6e7ac0d5..00842163 100644 --- a/tests/backend/loaders/xml/test_xml_etree_functions.py +++ b/tests/backend/loaders/xml/test_xml_etree_functions.py @@ -1,13 +1,14 @@ -# -*- coding: utf-8 -*- # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring,invalid-name,too-few-public-methods # pylint: disable=ungrouped-imports,protected-access +# pylint: disable=too-many-arguments +# +import collections -import io -import unittest +import pytest import anyconfig.backend.xml.etree as TT @@ -38,195 +39,201 @@ def to_bytes(astr): return bytes(astr, 'utf-8') -class Test_00(unittest.TestCase): - - def test_10__namespaces_from_file(self): - ref = {"http://example.com/ns/config": '', - "http://example.com/ns/config/val": "val"} - xmlfile = io.StringIO(XML_WITH_NS_0) - self.assertEqual(TT._namespaces_from_file(xmlfile), ref) - - def test_20__process_elem_text__whitespaces(self): - (elem, dic, subdic) = (TT.ET.XML(" "), {}, {}) - TT._process_elem_text(elem, dic, subdic) - self.assertTrue(not dic) - self.assertTrue(not subdic) - - def test_22__process_elem_text__wo_attrs_and_children(self): - (elem, dic, subdic) = (TT.ET.XML("A"), {}, {}) - TT._process_elem_text(elem, dic, subdic, text="#text") - self.assertEqual(dic, {"a": 'A'}) - self.assertTrue(not subdic) - - def test_22__process_elem_text__wo_attrs_and_children_parse(self): - (elem, dic, subdic) = (TT.ET.XML("A"), {}, {}) - TT._process_elem_text(elem, dic, subdic, text="#text", - ac_parse_value=True) - self.assertEqual(dic, {"a": 'A'}) - self.assertTrue(not subdic) - - (elem, dic, subdic) = (TT.ET.XML("1"), {}, {}) - TT._process_elem_text(elem, dic, subdic, text="#text", - ac_parse_value=True) - self.assertEqual(dic, {"a": 1}) - self.assertTrue(not subdic) - - def test_24__process_elem_text__w_attrs(self): - (elem, dic, subdic) = (TT.ET.XML("A"), {}, {}) - TT._process_elem_text(elem, dic, subdic, text="#text") - self.assertTrue(not dic) - self.assertEqual(subdic, {"#text": 'A'}) - - def test_24__process_elem_text__w_children(self): - (elem, dic, subdic) = (TT.ET.XML("A"), {}, {}) - TT._process_elem_text(elem, dic, subdic, text="#text") - self.assertTrue(not dic) - self.assertEqual(subdic, {"#text": 'A'}) - - def test_30__process_elem_attrs__wo_text_and_children(self): - (elem, dic, subdic) = (TT.ET.XML(""), {}, {}) - TT._process_elem_attrs(elem, dic, subdic) - self.assertTrue(not dic) - self.assertEqual(subdic, {"@attrs": {"id": 'A'}}) - - def test_32__process_elem_attrs__w_text(self): - (elem, dic, subdic) = (TT.ET.XML("AAA"), {}, {}) - TT._process_elem_attrs(elem, dic, subdic) - self.assertTrue(not dic) - self.assertEqual(subdic, {"@attrs": {"id": 'A'}}) - - def test_34__process_elem_attrs__merge_attrs(self): - (elem, dic, subdic) = (TT.ET.XML(""), {}, {}) - TT._process_elem_attrs(elem, dic, subdic, merge_attrs=True) - self.assertEqual(dic, {"a": {"id": 'A'}}) - self.assertTrue(not subdic) - - def test_36__process_elem_attrs__wo_text_and_children_parse(self): - (elem, dic, subdic) = (TT.ET.XML(""), {}, {}) - TT._process_elem_attrs(elem, dic, subdic, ac_parse_value=True) - self.assertTrue(not dic) - self.assertEqual(subdic, {"@attrs": {"id": 1}}) - - (elem, dic, subdic) = (TT.ET.XML(""), {}, {}) - TT._process_elem_attrs(elem, dic, subdic, ac_parse_value=True) - self.assertTrue(not dic) - self.assertEqual(subdic, {"@attrs": {"id": 'A'}}) - - (elem, dic, subdic) = (TT.ET.XML(""), {}, {}) - TT._process_elem_attrs(elem, dic, subdic, ac_parse_value=True) - self.assertTrue(not dic) - self.assertEqual(subdic, {"@attrs": {"id": True}}) - - def test_40__process_children_elems__root(self): - (elem, dic, subdic) = (TT.ET.XML("AB"), {}, - {}) - TT._process_children_elems(elem, dic, subdic) - self.assertEqual(dic, {"list": [{"i": "A"}, {"i": "B"}]}) - self.assertTrue(not subdic) - - def test_42__process_children_elems__w_attr(self): - (elem, dic) = (TT.ET.XML("AB"), {}) - subdic = {"id": "xyz"} - ref = subdic.copy() - ref.update({"#children": [{"i": "A"}, {"i": "B"}]}) - - TT._process_children_elems(elem, dic, subdic, children="#children") - self.assertTrue(not dic) - self.assertEqual(subdic, ref, subdic) - - def test_44__process_children_elems__w_children_have_unique_keys(self): - (elem, dic, subdic) = (TT.ET.XML("XY"), {}, {}) - TT._process_children_elems(elem, dic, subdic) - self.assertEqual(dic, {"a": {"x": "X", "y": "Y"}}) - self.assertTrue(not subdic) - - def test_46__process_children_elems__w_merge_attrs(self): - elem = TT.ET.XML("XY") - dic = {"a": {"@attrs": {"z": "Z"}}} - subdic = dic["a"]["@attrs"] - TT._process_children_elems(elem, dic, subdic, merge_attrs=True) - self.assertEqual(dic, {"a": {"x": "X", "y": "Y", "z": "Z"}}, dic) - - -class Test_00_1(unittest.TestCase): - - def _assert_eq_dic_from_snippet(self, snippet, ref, **opts): - self.assertEqual(TT.elem_to_container(TT.ET.XML(snippet), **opts), ref) - - def test_10_elem_to_container__None(self): - self.assertEqual(TT.elem_to_container(None), dict()) - - def test_10_root_to_container__None(self): - self.assertEqual(TT.root_to_container(None), dict()) - - def test_12_elem_to_container__empty(self): - self._assert_eq_dic_from_snippet("", dict(a=None)) - - def test_20_elem_to_container__attrs(self): - ref = dict(a={"@attrs": dict(x='1', y='y')}) - self._assert_eq_dic_from_snippet("", ref) - - def test_30_elem_to_container__child(self): - ref = dict(a=dict(b="b")) - self._assert_eq_dic_from_snippet("b", ref) - - def test_32_elem_to_container__children__same_keys(self): - ref = {'a': [{'b': '1'}, {'b': '2'}]} - self._assert_eq_dic_from_snippet("12", ref) - - def test_34_elem_to_container__children(self): - ref = {'a': {'b': 'b', 'c': 'c'}} - self._assert_eq_dic_from_snippet("bc", ref) - - def test_36_elem_to_container__children__same_keys_w_text(self): - ref = {'a': {'@text': 'aaa', '@children': [{'b': '1'}, {'b': '2'}]}} - self._assert_eq_dic_from_snippet("aaa12", ref) - - def test_40_elem_to_container__text(self): - self._assert_eq_dic_from_snippet("A", {'a': 'A'}) - - def test_42_elem_to_container__text_attrs(self): - ref = dict(a={"@attrs": {'x': 'X'}, "@text": "A"}) - self._assert_eq_dic_from_snippet("A", ref) - - def test_50_root_to_container__text_attrs_tags(self): - ref = dict(a={"_attrs": {'x': 'X'}, "_text": "A"}) - tags = dict(attrs="_attrs", text="_text") - self.assertEqual(TT.root_to_container(TT.ET.XML("A"), - dict, {}, tags=tags), - ref) - - -def tree_to_string(tree): - return TT.ET.tostring(tree.getroot()) - - -class Test_00_2(unittest.TestCase): - - def test_00_container_to_etree__None(self): - self.assertTrue(TT.container_to_etree(None) is None) - - def test_10_container_to_etree__text_attrs(self): - ref = to_bytes('A') - obj = dict(a={"@attrs": {'x': 'X', 'y': 'Y'}, "@text": "A"}) - res = TT.container_to_etree(obj) - self.assertEqual(tree_to_string(res), ref) - - def test_12_container_to_etree__text_attrs_tags(self): - ref = to_bytes('A') - obj = dict(a={"_attrs": {'x': 'X', 'y': 'Y'}, "_text": "A"}) - tags = dict(attrs="_attrs", text="_text") - res = TT.container_to_etree(obj, tags=tags) - self.assertEqual(tree_to_string(res), ref) - - def test_20_container_to_etree__child(self): - ref = to_bytes("b") - obj = dict(a=dict(b="b")) - res = TT.container_to_etree(obj) - self.assertEqual(tree_to_string(res), ref) - - def test_22_container_to_etree__children(self): - ref = to_bytes("bc") - obj = {'a': {'@children': [{'b': 'b'}, {'c': 'c'}]}} - res = TT.container_to_etree(obj) - self.assertEqual(tree_to_string(res), ref) +def to_xml_elem(astr: str) -> TT.ElementTree.Element: + """Convert a string to XML element object.""" + return TT.ElementTree.fromstring(astr) + + +@pytest.mark.parametrize( + ("path", "exp"), + (("tests/res/1/loaders/xml.etree/10/100.xml", {}), + ("tests/res/1/loaders/xml.etree/10/200.xml", + {"http://example.com/ns/config": "", + "http://example.com/ns/config/val": "val"}), + ), +) +def test__namespaces_from_file(path: str, exp): + assert TT._namespaces_from_file(path) == exp + + +@pytest.mark.parametrize( + ("tag", "nspaces", "exp"), + (("a", {}, "a"), + ("a", {"http://example.com/ns/val/": "val"}, "a"), + ("{http://example.com/ns/val/}a", + {"http://example.com/ns/val/": "val"}, + "val:a"), + ), +) +def test__tweak_ns(tag, nspaces, exp): + assert TT._tweak_ns(tag, nspaces=nspaces) == exp + + +@pytest.mark.parametrize( + ("dics", "exp"), + ((({}, {"a": 1}, {"a": 2}), False), + (({"a": 1}, {"b": 2}, {"b": 3, "c": 0}), False), + (({}, {}), True), + (({"(": 1}, {"b": 2}, {"c": 0}), True), + ), +) +def test__dicts_have_unique_keys(dics, exp): + assert TT._dicts_have_unique_keys(dics) == exp + + +@pytest.mark.parametrize( + ("val", "opts", "exp"), + (("1", {}, "1"), + ("1", {"ac_parse_value": True}, 1), + ), +) +def test__parse_text_parse_text(val, opts, exp): + assert TT._parse_text(val, **opts) == exp + + +@pytest.mark.parametrize( + ("elem_s", "opts", "exp_elem_text", "exp_dic", "exp_subdic"), + ((" ", {}, "", {}, {}), + (" ", {"text": "#text"}, "", {}, {}), + (" ", {}, "", {}, {}), + ("1", {}, "1", {"a": "1"}, {}), + ("1", {"ac_parse_value": True}, "1", {"a": 1}, {}), + ("1", {"text": "#text"}, "1", {}, {"#text": "1"}), + ("1", {"text": "#text", "ac_parse_value": True}, + "1", {}, {"#text": 1}), + ("1", {}, "1", {}, {"@text": "1"}), + ("", {}, None, {}, {}), + ), +) +def test__process_elem_text(elem_s, opts, exp_elem_text, exp_dic, exp_subdic): + (elem, dic, subdic) = (to_xml_elem(elem_s), {}, {}) + TT._process_elem_text(elem, dic, subdic, **opts) + + assert elem.text == exp_elem_text + assert dic == exp_dic + assert subdic == exp_subdic + + +@pytest.mark.parametrize( + ("elem_s", "opts", "exp_dic", "exp_subdic"), + (("", {}, {}, {"@attrs": {"id": "A"}}), + ("AAA", {}, {}, {"@attrs": {"id": "A"}}), + ("", {"merge_attrs": True}, {"a": {"id": "A"}}, {}), + ("", {"ac_parse_value": True}, {}, {"@attrs": {"id": 1}}), + ("", {"ac_parse_value": True}, {}, {"@attrs": {"id": "A"}}), + ("", {"ac_parse_value": True}, {}, + {"@attrs": {"id": True}}), + ), +) +def test__process_elem_attrs(elem_s, opts, exp_dic, exp_subdic): + (elem, dic, subdic) = (to_xml_elem(elem_s), {}, {}) + TT._process_elem_attrs(elem, dic, subdic, **opts) + + assert dic == exp_dic + assert subdic == exp_subdic + + +@pytest.mark.parametrize( + ("elem_s", "opts", "exp_dic", "exp_subdic"), + (("XY", {}, {"a": {"x": "X", "y": "Y"}}, {}), + ("AB", {}, + {"list": [{"i": "A"}, {"i": "B"}]}, {}), + ("AB", {"children": "#children"}, + {"list": [{"i": "A"}, {"i": "B"}]}, {}), + ("XY", {"merge_attrs": True}, + {"a": {"x": "X", "y": "Y", "z": "Z"}}, {}), + ), +) +def test_process_children_elems( + elem_s, opts, exp_dic, exp_subdic +): + (elem, dic, subdic) = (to_xml_elem(elem_s), {}, {}) + TT._process_children_elems(elem, dic, subdic, **opts) + + assert dic == exp_dic + assert subdic == exp_subdic + + +def test_elem_to_container__none(): + assert TT.elem_to_container(None) == {} + assert TT.elem_to_container( + None, container=collections.OrderedDict + ) == collections.OrderedDict() + + +_E2C_DATASETS = _R2C_DATASETS = ( + ("", {"a": None}), + ("A", {"a": "A"}), + ("A", + {"a": {"@attrs": {"x": "X"}, "@text": "A"}}), + ("b", {"a": {"b": "b"}}), + ("12", + {"a": [{"b": "1"}, {"b": "2"}]}), + ("bc", + {'a': {'b': 'b', 'c': 'c'}}), + ("", + {"a": {"@attrs": {"x": "1", "y": "y"}}}), + ("aaa12", + {"a": {"@text": "aaa", "@children": [{"b": "1"}, {"b": "2"}]}}), +) + + +@pytest.mark.parametrize(("elem_s", "exp"), _E2C_DATASETS) +def test_elem_to_container(elem_s, exp): + assert TT.elem_to_container( + to_xml_elem(elem_s) + ) == exp + + +def test_root_to_container__none(): + assert TT.root_to_container(None) == {} + assert TT.root_to_container( + None, container=collections.OrderedDict + ) == collections.OrderedDict() + + +@pytest.mark.parametrize(("root_s", "exp"), _R2C_DATASETS) +def test_root_to_container(root_s: str, exp): + assert TT.root_to_container( + to_xml_elem(root_s) + ) == exp + + +@pytest.mark.parametrize( + ("obj", "parent"), + ((None, None), + ({}, None), + ), +) +def test_container_to_elem__errors(obj, parent): + with pytest.raises(ValueError): + assert TT.container_to_elem(obj, parent=parent) + + +@pytest.mark.parametrize( + ("obj", "exp_s"), + (({"a": {"@attrs": {'x': 'X', 'y': 'Y'}, "@text": "A"}}, + 'A'), + ({"a": {"b": "b"}}, + "b"), + ({'a': {'@children': [{'b': 'b'}, {'c': 'c'}]}}, + "bc"), + ), +) +def test_container_to_elem(obj, exp_s): + assert TT.ElementTree.tostring( + TT.container_to_elem(obj) + ) == to_bytes(exp_s) + + +@pytest.mark.parametrize( + ("obj", "tags", "exp_s"), + (({"a": {"_attrs": {'x': 'X', 'y': 'Y'}, "_text": "A"}}, + {"attrs": "_attrs", "text": "_text"}, + 'A'), + ), +) +def test_container_to_elem_with_tags(obj, tags, exp_s): + assert TT.ElementTree.tostring( + TT.container_to_elem(obj, tags=tags) + ) == to_bytes(exp_s) diff --git a/tests/backend/test_common.py b/tests/backend/test_common.py new file mode 100644 index 00000000..a638f10e --- /dev/null +++ b/tests/backend/test_common.py @@ -0,0 +1,48 @@ +# +# Copyright (C) 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring,too-few-public-methods +r"""Test cases for Test Data Collecor.""" +from __future__ import annotations + +import pytest + +import anyconfig.backend.json.stdlib as MOD + +from . import common as TT +from .constants import ( + MOD_BACKEND, MOD_TYPE, TEST_FILE, TEST_DATADIR +) + + +@pytest.mark.parametrize( + ("testfile", "exp"), + ((str(TEST_FILE), f"{MOD_TYPE}.{MOD_BACKEND}"), + (__file__, NameError), + ), +) +def test_get_name(testfile, exp): + if isinstance(exp, str): + assert TT.get_name(testfile) == exp + else: + with pytest.raises(exp): + TT.get_name(testfile) + + +@pytest.mark.parametrize( + ("testfile", "exp"), + ((str(TEST_FILE), MOD), + ), +) +def test_get_mod(testfile, exp): + assert TT.get_mod(testfile) == exp + + +@pytest.mark.parametrize( + ("path", "exp"), + ((str(TEST_FILE), TEST_DATADIR), + ), +) +def test_get_test_resdir(path, exp): + assert TT.get_test_resdir(path) == exp diff --git a/tests/base/__init__.py b/tests/base/__init__.py deleted file mode 100644 index 8c2db6d4..00000000 --- a/tests/base/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# SPDX-License-Identifier: MIT -# -r"""Common test Utility functions, constants and global variables, etc. -""" -from .constants import TESTS_DIR, RES_DIR, NULL_CNTNR -from .collector import TDataCollector -from .datatypes import TData -from .utils import ( - load_data, load_datasets_from_dir, maybe_data_path -) - - -__all__ = [ - 'TESTS_DIR', 'RES_DIR', 'NULL_CNTNR', - 'TDataCollector', - 'TData', - 'load_data', 'load_datasets_from_dir', 'maybe_data_path' -] - -# vim:sw=4:ts=4:et: diff --git a/tests/base/collector.py b/tests/base/collector.py deleted file mode 100644 index 124dd54a..00000000 --- a/tests/base/collector.py +++ /dev/null @@ -1,110 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -"""File based test data collector. -""" -import inspect -import pathlib -import typing - -from . import ( - constants, datatypes, utils -) - - -DICT_0 = dict() - - -class TDataCollector: - """File based test data collector. - """ - target: str = '' # Initial value will be replaced in self.init. - kind: str = 'basics' - pattern: str = '*.json' # input file name pattern - - # sub dir names of expected data files should be found always. - should_exist: typing.Iterable[str] = ('e', ) - - # True if you want to keep the order of keys of dicts loaded. - ordered: bool = False - - root: typing.Optional[pathlib.Path] = None - datasets: typing.List[datatypes.TData] = [] - initialized: bool = False - - @classmethod - def resolve_target(cls) -> str: - """ - Resolve target by this file path. - """ - return utils.target_by_parent(inspect.getfile(cls)) - - def init(self) -> None: - """Initialize its members. - """ - if not self.target: - self.target = self.resolve_target() - - if not self.root: - self.root = constants.RES_DIR / self.target / self.kind - - self.datasets = self.load_datasets() - self.initialized = True - - def load_dataset(self, datadir: pathlib.Path, inp: pathlib.Path - ) -> datatypes.TData: - """Load dataset. - """ - name = inp.stem - - exp = utils.maybe_data_path(datadir / 'e', name, self.should_exist) - opts = utils.maybe_data_path(datadir / 'o', name, self.should_exist) - scm = utils.maybe_data_path(datadir / 's', name, self.should_exist) - query = utils.maybe_data_path(datadir / 'q', name, self.should_exist) - ctx = utils.maybe_data_path(datadir / 'c', name, self.should_exist) - - return datatypes.TData( - datadir, - inp, - utils.load_data(inp, ordered=self.ordered), - utils.load_data(exp, ordered=self.ordered), - utils.load_data(opts, default=DICT_0), - scm, - utils.load_data(query, default=DICT_0), - utils.load_data(ctx, default=DICT_0, ordered=self.ordered) - ) - - def load_datasets(self) -> typing.List[datatypes.TData]: - """Load test data from files. - """ - _datasets = [ - (datadir, - utils.load_datasets_from_dir(datadir, self.load_dataset, - pattern=self.pattern)) - for datadir in sorted(self.root.glob('*')) - ] - if not _datasets: - raise ValueError(f'No data: {self.root!s}') - - for datadir, data in _datasets: - if not data: - raise ValueError( - f'No data in subdir: {datadir!s}, ' - f'pattern={self.pattern}, ' - f'should_exist={self.should_exist!r}' - ) - - return _datasets - - def each_data(self) -> typing.Iterable[datatypes.TData]: - """Yields test data. - """ - if not self.initialized: - self.init() - - for _datadir, data in self.datasets: - for tdata in data: - yield tdata - -# vim:sw=4:ts=4:et: diff --git a/tests/base/constants.py b/tests/base/constants.py deleted file mode 100644 index 775b6501..00000000 --- a/tests/base/constants.py +++ /dev/null @@ -1,19 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# SPDX-License-Identifier: MIT -# -# pylint: disable=missing-docstring -r"""Common utility functions. -""" -import pathlib -import typing - - -TESTS_DIR = pathlib.Path(__file__).parent.parent.resolve() -RES_DIR = TESTS_DIR / 'res' - -NULL_CNTNR: typing.Dict[str, typing.Any] = dict() - -DATA = '3.149265' # for test_utils.py - -# vim:sw=4:ts=4:et: diff --git a/tests/base/datatypes.py b/tests/base/datatypes.py deleted file mode 100644 index ce67229a..00000000 --- a/tests/base/datatypes.py +++ /dev/null @@ -1,38 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -"""File based test data collector. -""" -import pathlib -import typing - -from anyconfig.api import InDataExT - - -DictT = typing.Dict[str, typing.Any] -MaybePathT = typing.Optional[pathlib.Path] - - -class TData(typing.NamedTuple): - """A namedtuple object keeps test data. - - - datadir: A dir wheere data files exist - - inp_path: A input file path - - inp: An input data loaded from ``inp_path`` - - exp: Data gives an expected result - - opts: Data gives options - - scm: Data gives a path to schema file - - query: A query string - - ctx: Data gives a context object - """ - datadir: pathlib.Path - inp_path: pathlib.Path - inp: InDataExT - exp: DictT - opts: DictT - scm: typing.Union[pathlib.Path, str] - query: str - ctx: DictT - -# vim:sw=4:ts=4:et: diff --git a/tests/base/test_collector.py b/tests/base/test_collector.py deleted file mode 100644 index ae66d6a5..00000000 --- a/tests/base/test_collector.py +++ /dev/null @@ -1,40 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=invalid-name,missing-docstring -import unittest -import pathlib - -from . import collector as TT - - -CUR_DIR = pathlib.Path(__file__).parent - - -class Collector(TT.TDataCollector): - # To avoid error because there are no files with '.json' file extension in - # tests/res/base/basics/20/. - pattern = '*.*' - should_exist = () # Likewise. - - -class TestCase(unittest.TestCase, Collector): - - def setUp(self): - self.init() - - def test_members(self): - self.assertTrue(self.target) - self.assertNotEqual(self.target, TT.TDataCollector.target) - self.assertEqual(self.target, CUR_DIR.name) - - self.assertTrue(self.root is not None) - self.assertEqual( - self.root, - CUR_DIR.parent / 'res' / self.target / self.kind - ) - - self.assertTrue(self.datasets) - -# vim:sw=4:ts=4:et: diff --git a/tests/base/test_utils.py b/tests/base/test_utils.py deleted file mode 100644 index 017007ce..00000000 --- a/tests/base/test_utils.py +++ /dev/null @@ -1,112 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring -import pathlib -import unittest - -from . import constants, utils as TT - - -RES_DIR = constants.RES_DIR / 'base' -SELF = pathlib.Path(__file__) - - -class TestCase(unittest.TestCase): - - def test_target_by_parent(self): - aes = [ - ((), 'base'), - ((__file__, ), 'base'), - ] - for args, exp in aes: - self.assertEqual(TT.target_by_parent(*args), exp) - - def test_load_from_py(self): - constants_py_path = SELF.parent / 'constants.py' - aes = [ - ((constants_py_path, ), constants.DATA), - ((str(constants_py_path), ), constants.DATA), - ((constants_py_path, 'RES_DIR'), constants.RES_DIR), - ] - for args, exp in aes: - self.assertEqual( - TT.load_from_py(*args), exp, - f'args: {args!r}, exp: {exp!r}' - ) - - def test_load_literal_data_from_py(self): - py_path = RES_DIR / 'basics' / '20' / '00.py' - exp = TT.json.load( - (RES_DIR / 'basics' / '10' / '00.json').open() - ) - aes = [ - (py_path, exp), - (str(py_path), exp), - ] - for arg, exp in aes: - self.assertEqual( - TT.load_literal_data_from_py(arg), exp - ) - - def test_maybe_data_path(self): - aes = [ - ((SELF.parent, SELF.stem, ), SELF), - ((pathlib.Path('/not/exist/dir'), 'foo', ), None), - ] - for args, exp in aes: - self.assertEqual(TT.maybe_data_path(*args), exp) - - def test_maybe_data_path_failures(self): - aes = [ - (SELF.parent, SELF.stem, (SELF.parent.name, ), '.xyz'), - ] - for args in aes: - with self.assertRaises(OSError): - TT.maybe_data_path(*args) - - def test_load_data(self): - aes = [ - ((None, ), {}), - ((None, 1), 1), - ((RES_DIR / 'basics' / '10' / '00.json', ), - TT.json.load((RES_DIR / 'basics' / '10' / '00.json').open()) - ), - ((RES_DIR / 'basics' / '20' / '00.py', ), - TT.json.load((RES_DIR / 'basics' / '10' / '00.json').open()) - ), - ((RES_DIR / 'basics' / '30' / '20.txt', ), - (RES_DIR / 'basics' / '10' / '20.json').read_text() - ), - ] - for args, exp in aes: - res = TT.load_data(*args) - self.assertEqual(res, exp, res) - - def test_load_data_failures(self): - aes = [ - (pathlib.Path('not_exist.xyz'), ), - ] - for args in aes: - with self.assertRaises(ValueError): - TT.load_data(*args) - - def test_load_datasets_from_dir(self): - aes = [ - ((RES_DIR / 'basics' / '10', '*.json'), 3), - ((RES_DIR / 'basics' / '20', '*.py'), 1), - ((RES_DIR / 'basics' / '30', '*.txt'), 3), - ] - for args, exp in aes: - res = TT.load_datasets_from_dir( - args[0], lambda *xs: xs[1], pattern=args[1] - ) - self.assertTrue(bool(res)) - self.assertEqual(len(res), exp) - - def test_load_datasets_from_dir_failures(self): - with self.assertRaises(ValueError): - _ = TT.load_datasets_from_dir(SELF, list) - -# vim:sw=4:ts=4:et: diff --git a/tests/base/utils.py b/tests/base/utils.py deleted file mode 100644 index 8ffdd00d..00000000 --- a/tests/base/utils.py +++ /dev/null @@ -1,145 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -"""File based test data collector. -""" -import ast -import collections -import importlib.abc -import importlib.util -import json -import pathlib -import typing -import warnings - -from .datatypes import ( - DictT, MaybePathT, TData -) - - -DICT_0 = {} - - -def target_by_parent(self: str = __file__): - """ - >>> target_by_parent() - 'base' - """ - return pathlib.Path(self).parent.name - - -def load_from_py(py_path: typing.Union[str, pathlib.Path], - data_name: str = 'DATA') -> DictT: - """Load from .py files. - - .. note:: It's not safe always. - """ - spec = importlib.util.spec_from_file_location('testmod', py_path) - if spec and isinstance(spec.loader, importlib.abc.Loader): - mod = importlib.util.module_from_spec(spec) - spec.loader.exec_module(mod) - try: - return getattr(mod, data_name, None) - except (TypeError, ValueError, AttributeError): - pass - - return {} - - -def load_literal_data_from_py(py_path: typing.Union[str, pathlib.Path] - ) -> DictT: - """.. note:: It might be safer than the above function. - """ - return ast.literal_eval(pathlib.Path(py_path).read_text().strip()) - - -def maybe_data_path(datadir: pathlib.Path, name: str, - should_exist: typing.Iterable[str] = (), - file_ext: str = '*' - ) -> typing.Optional[pathlib.Path]: - """ - Get and return the file path of extra data file. Its filename will be - computed from the filename of the base data file given. - """ - pattern = f'{name}.{file_ext}' - if datadir.exists() and datadir.is_dir(): - paths = sorted(datadir.glob(pattern)) - if paths: - return paths[0] # There should be just one file found. - - if datadir.name in should_exist: - raise OSError(f'{datadir!s}/{pattern} should exists but not') - - return None - - -def load_data(path: MaybePathT, - default: typing.Optional[typing.Any] = None, - should_exist: bool = False, - exec_py: bool = False, - ordered: bool = False - ) -> typing.Union[DictT, str]: - """ - Return data loaded from given path or the default value. - """ - if path is None or not path: - if not should_exist: - return {} if default is None else default - - if path and path.exists(): - if path.suffix == '.json': - if ordered: - return json.load( - path.open(), object_hook=collections.OrderedDict - ) - - return json.load(path.open()) - - if path.suffix == '.py': - return ( - load_from_py if exec_py else load_literal_data_from_py - )(path) # type: ignore - - if path.suffix == '.txt': - return path.read_text() - - return path - - raise ValueError(f'Not exist or an invalid data: {path!s}') - - -def each_data_from_dir(datadir: pathlib.Path, - data_factory: typing.Callable, - pattern: str = '*.json', - ) -> typing.Iterator[TData]: - """ - Yield a collection of paths of data files under given dir. - """ - if not datadir.is_dir(): - raise ValueError(f'Not look a data dir: {datadir!s}') - - for inp in sorted(datadir.glob(pattern)): - if not inp.exists(): - warnings.warn(f'Not exists: {inp!s}', stacklevel=2) - continue - - if not inp.is_file(): - warnings.warn(f'Not looks a file: {inp!s}', stacklevel=2) - continue - - yield data_factory(datadir, inp) - - -def load_datasets_from_dir(datadir: pathlib.Path, - data_factory: typing.Callable, - **kwargs - ) -> typing.List[TData]: - """ - Load a collection of datasets from given dir ``datadir``. - """ - return list( - each_data_from_dir(datadir, data_factory, **kwargs) - ) - -# vim:sw=4:ts=4:et: diff --git a/tests/cli/collectors.py b/tests/cli/collectors.py deleted file mode 100644 index 2550d9ef..00000000 --- a/tests/cli/collectors.py +++ /dev/null @@ -1,121 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -"""Provides base class to collect test data for cli test cases. -""" -import pathlib -import typing - -from .. import base -from . import datatypes - - -class Collector(base.TDataCollector): - """Test data collector for test cases with no file outputs. - - .. seealso:: tests.base.collector.TDataCollector - """ - def load_dataset(self, datadir: pathlib.Path, inp: pathlib.Path): - """ - .. seealso:: tests.base.collector.TDataCollector.load_dataset - """ - name = inp.stem - opts = base.maybe_data_path(datadir / 'o', name, self.should_exist) - exp_data = base.load_data( - base.maybe_data_path(datadir / 'e', name, self.should_exist) - ) - outname = base.maybe_data_path(datadir / 'on', name, self.should_exist) - ref = base.maybe_data_path(datadir / 'r', name, self.should_exist) - oo_opts = base.maybe_data_path(datadir / 'oo', name, self.should_exist) - scm = base.maybe_data_path(datadir / 's', name, self.should_exist) - - return datatypes.TData( - datadir, - inp, - base.load_data(opts, default=[]), - datatypes.Expected(**exp_data), - base.load_data(outname, default=''), - base.load_data(ref), - base.load_data(oo_opts, default={}), - scm or None - ) - - -class MultiDataCollector(base.TDataCollector): - """Test data collector for test cases with no file outputs. - - .. seealso:: tests.base.collector.TDataCollector - """ - def load_data_from_dir(self, datadir: pathlib.Path): - """ - .. seealso:: tests.base.collector.TDataCollector.load_dataset - .. seealso:: tests.base.utils.each_data_from_dir - """ - if not datadir.is_dir(): - raise ValueError(f'Not look a data dir: {datadir!s}') - - # There should be multiple input files match with self.pattern. - inputs = sorted(datadir.glob(self.pattern)) - if not inputs: - raise ValueError(f'No any inputs in: {datadir!s}') - - for inp in inputs: - if not inp.is_file(): - raise ValueError(f'Not a file: {inp!s} in {datadir!s}') - - name = inputs[0].stem - - # Load a glob pattern or a list of inputs. - inp_data = base.load_data( - base.maybe_data_path(datadir / 'i', name, self.should_exist) - ) or '*.json' - - if isinstance(inp_data, list): - inputs = [datadir / i for i in inp_data] - else: - if not isinstance(inp_data, str): - raise ValueError(f'Invalid inputs: {inp_data} in {datadir!s}') - inputs = [datadir / inp_data] - - opts = base.maybe_data_path(datadir / 'o', name, self.should_exist) - exp_data = base.load_data( - base.maybe_data_path(datadir / 'e', name, self.should_exist) - ) - outname = base.maybe_data_path(datadir / 'on', name, self.should_exist) - ref = base.maybe_data_path(datadir / 'r', name, self.should_exist) - oo_opts = base.maybe_data_path(datadir / 'oo', name, self.should_exist) - scm = base.maybe_data_path(datadir / 's', name, self.should_exist) - - return datatypes.TDataSet( - datadir, - inputs, - base.load_data(opts, default=[]), - datatypes.Expected(**exp_data), - base.load_data(outname, default=''), - base.load_data(ref), - base.load_data(oo_opts, default={}), - scm or None - ) - - def load_datasets(self) -> typing.List[datatypes.TData]: - """Load test data from files. - """ - _datasets = [ - (datadir, [self.load_data_from_dir(datadir)]) - for datadir in sorted(self.root.glob('*')) - ] - if not _datasets: - raise ValueError(f'No data: {self.root!s}') - - for datadir, data in _datasets: - if not data: - raise ValueError( - f'No data in subdir: {datadir!s}, ' - f'pattern={self.pattern}, ' - f'should_exist={self.should_exist!r}' - ) - - return _datasets - -# vim:sw=4:ts=4:et: diff --git a/tests/cli/common.py b/tests/cli/common.py new file mode 100644 index 00000000..311cce6a --- /dev/null +++ b/tests/cli/common.py @@ -0,0 +1,86 @@ +# +# Copyright (C) 2013 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +"""Common constants and functions for test cases of anyconfig.cli.""" +from __future__ import annotations + +import contextlib +import io +import pathlib +import sys +import typing + +import pytest + +import anyconfig.api +import anyconfig.cli as TT + +from . import datatypes + + +NAMES: list[str] = ("ipath", "opts", "exp") +NAMES_WITH_REF: list[str] = (*NAMES, "oname", "ref") + + +def _run_main( + tdata: datatypes.TData, tmp_path: pathlib.Path +) -> None: + args = ["anyconfig_cli", *tdata.opts, *tdata.ipaths] + + if tdata.outname: # Running cli.main will output files. + assert tdata.ref is not None + opath = tmp_path / tdata.outname + + # Run anyconfig.cli.main with arguments. + TT.main([*args, "-o", str(opath)]) + + if tdata.exp.exit_code_matches and tdata.exp.exit_code == 0: + assert opath.exists() + + try: + odata = anyconfig.api.load(opath, **tdata.oo_opts) + except anyconfig.api.UnknownFileTypeError: + odata = anyconfig.api.load(opath, ac_parser='json') + + assert odata == tdata.ref, f"{odata} vs. {tdata.ref!r}" + else: + # Likewise but without -o option. + TT.main(args) + + sys.exit(0) + + +def run_main( + tdata: datatypes.TData, tmp_path: pathlib.Path, + post_checks: typing.Optional[typing.Callable] = None +) -> None: + """Run anyconfig.cli.main and check if the exit code was expected one. + """ + expected: datatypes.Expected = tdata.exp + + with pytest.raises(expected.exception) as exci: + with contextlib.redirect_stdout(io.StringIO()) as stdout: + with contextlib.redirect_stderr(io.StringIO()) as stderr: + _run_main(tdata, tmp_path) + + assert isinstance(exci.value, expected.exception) + ecode = getattr( + exci.value, "error_code", getattr(exci.value, "code", 1) + ) + + if post_checks is not None: + post_checks(tdata, stdout, stderr) + + log = f"ecode: {ecode!r}, expected: {expected!r}, opts: {tdata.opts}" + if expected.exit_code_matches: + assert ecode == expected.exit_code, log + else: + assert ecode != expected.exit_code, log + + if expected.words_in_stdout: + assert expected.words_in_stdout in stdout.getvalue() + + if expected.words_in_stderr: + assert expected.words_in_stderr in stderr.getvalue() diff --git a/tests/cli/datatypes.py b/tests/cli/datatypes.py index 15db7479..d02ae92d 100644 --- a/tests/cli/datatypes.py +++ b/tests/cli/datatypes.py @@ -1,38 +1,38 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -"""Basic data types for file based test data collectors. -""" +"""Basic data types for file based test data loaders.""" +from __future__ import annotations + import pathlib import typing -DictT = typing.Dict[str, typing.Any] +DictType = dict[str, typing.Any] class Expected(typing.NamedTuple): - """Keeps expected result's info. - """ + """Keeps expected result's information.""" exit_code: int = 0 exit_code_matches: bool = True - words_in_stdout: str = '' - words_in_stderr: str = '' + words_in_stdout: str = "" + words_in_stderr: str = "" exception: BaseException = SystemExit class TData(typing.NamedTuple): - """A namedtuple object keeps test data to test cases with no file outputs. + """A namedtuple object keeps test data for test cases with no file outputs. """ - datadir: pathlib.Path - inp_path: pathlib.Path - opts: typing.List[str] = [] + ipath: pathlib.Path + ipaths: list[str] = [] + opts: list[str] = [] exp: Expected = Expected() # Optional extra data. - outname: str = '' - ref: typing.Optional[DictT] = None - oo_opts: DictT = {} + outname: str = "" + ref: typing.Optional[DictType] = None + oo_opts: DictType = {} scm: typing.Optional[pathlib.Path] = None @@ -40,14 +40,12 @@ class TDataSet(typing.NamedTuple): """A namedtuple object keeps test data to test cases with no file outputs. """ datadir: pathlib.Path - inputs: typing.List[pathlib.Path] - opts: typing.List[str] = [] + inputs: list[pathlib.Path] + opts: list[str] = [] exp: Expected = Expected() # Likewise. - outname: str = '' - ref: typing.Optional[DictT] = None - oo_opts: DictT = {} + outname: str = "" + ref: typing.Optional[DictType] = None + oo_opts: DictType = {} scm: typing.Optional[pathlib.Path] = None - -# vim:sw=4:ts=4:et: diff --git a/tests/cli/test_base.py b/tests/cli/test_base.py deleted file mode 100644 index d0c9c7f1..00000000 --- a/tests/cli/test_base.py +++ /dev/null @@ -1,119 +0,0 @@ -# -# Copyright (C) 2013 - 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring -"""test cases for anyconfig.cli module. -""" -import contextlib -import io -import pathlib -import sys -import tempfile -import unittest - -import anyconfig.api -import anyconfig.cli as TT - -from .. import base -from . import collectors, datatypes - - -def make_args(_self, tdata): - """Make arguments to run cli.main. - """ - return ['anyconfig_cli'] + tdata.opts + [str(tdata.inp_path)] - - -class BaseTestCase(unittest.TestCase): - """Base Test case. - """ - collector = collectors.Collector() - make_args = make_args - - def setUp(self): - if self.collector: - self.collector.init() - - def post_checks(self, tdata, *args, **kwargs): - """Placeholder to do more post checks. - """ - pass - - def _run_main(self, tdata): - """Wrapper for cli.main.""" - args = self.make_args(tdata) - - if tdata.outname: # Running cli.main will output files. - self.assertTrue( - tdata.ref is not None, - 'No reference data was given, {tdata!r}' - ) - with tempfile.TemporaryDirectory() as tdir: - opath = pathlib.Path(tdir) / tdata.outname - - # Run anyconfig.cli.main with arguments. - TT.main(args + ['-o', str(opath)]) - - if tdata.exp.exit_code_matches and tdata.exp.exit_code == 0: - self.assertTrue(opath.exists(), str(opath)) - - try: - odata = anyconfig.api.load(opath, **tdata.oo_opts) - except anyconfig.api.UnknownFileTypeError: - odata = anyconfig.api.load(opath, ac_parser='json') - self.assertEqual(odata, tdata.ref, repr(tdata)) - - self.post_checks(tdata, opath) - else: - # Likewise but without -o option. - TT.main(args) - self.post_checks(tdata) - - sys.exit(0) - - def run_main(self, tdata) -> None: - """ - Run anyconfig.cli.main and check if the exit code was expected one. - """ - expected: datatypes.Expected = tdata.exp - - with self.assertRaises(expected.exception, msg=repr(tdata)) as ctx: - with contextlib.redirect_stdout(io.StringIO()) as stdout: - with contextlib.redirect_stderr(io.StringIO()) as stderr: - self._run_main(tdata) - - exc = ctx.exception - self.assertTrue(isinstance(exc, expected.exception)) - ecode = getattr(exc, 'error_code', getattr(exc, 'code', 1)) - if expected.exit_code_matches: - self.assertEqual(ecode, expected.exit_code, f'{tdata!r}') - else: - self.assertNotEqual(ecode, expected.exit_code, f'{tdata!r}') - - if expected.words_in_stdout: - msg = stdout.getvalue() - self.assertTrue(expected.words_in_stdout in msg, msg) - - if expected.words_in_stderr: - err = stderr.getvalue() - self.assertTrue(expected.words_in_stderr in err, err) - - def test_runs_for_datasets(self) -> None: - if self.collector and self.collector.initialized: - if self.collector.kind == base.TDataCollector.kind: - return - - for tdata in self.collector.each_data(): - self.run_main(tdata) - - -class NoInputTestCase(BaseTestCase): - """Test cases which does not require inputs. - """ - def make_args(self, tdata): # pylint: disable=no-self-use - """Make arguments to run cli.main. - """ - return ['anyconfig_cli'] + tdata.opts - -# vim:sw=4:ts=4:et: diff --git a/tests/cli/test_detectors.py b/tests/cli/test_detectors.py index a9d51e83..7268275f 100644 --- a/tests/cli/test_detectors.py +++ b/tests/cli/test_detectors.py @@ -1,101 +1,113 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -"""test cases of anyconfig.cli.detectors. -""" +"""test cases of anyconfig.cli.detectors.""" +from __future__ import annotations + import contextlib import io -import unittest import warnings +import typing + +import pytest import anyconfig.cli.detectors as TT import anyconfig.cli.parse_args -class TestCase(unittest.TestCase): - - def test_are_same_file_types(self): - ies = (([], False), - (['/tmp/a/b/c.conf'], True), - (['/tmp/a/b/c.yml', '/tmp/a/b/d.yml'], True), - ) - for inp, exp in ies: - (self.assertTrue if exp else self.assertFalse)( - TT.are_same_file_types(inp) - ) - - def test_find_by_the_type(self): - ies = (('', None), - (None, None), - ('json', 'json'), - ('type_not_exit', None), - ) - with warnings.catch_warnings(): - warnings.simplefilter('ignore') - for inp, exp in ies: - self.assertEqual( - TT.find_by_the_type(inp), exp - ) - - def test_find_by_the_paths(self): - ies = (([], None), - (['/tmp/a/b/c.yml', '/tmp/a/b/d.json'], None), - (['-'], None), - (['-', '/tmp/a/b/d.json'], None), - (['/tmp/a/b/c.json', '/tmp/a/b/d.json'], 'json'), - ) - for inp, exp in ies: - self.assertEqual( - TT.find_by_the_paths(inp), exp - ) - - def test_try_detecting_input_type(self): - ies = (([], None), - (['-'], None), - (['a.conf'], None), - (['-I', 'json', 'a.conf'], 'json'), - (['a.json'], 'json'), - ) - for inp, exp in ies: - (_psr, args) = anyconfig.cli.parse_args.parse( - inp, prog='anyconfig_cli' - ) - self.assertEqual( - TT.try_detecting_input_type(args), exp, args - ) - - def test_try_detecting_output_type(self): - ies = ((['-I', 'json', 'a.conf'], 'json'), - (['a.json'], 'json'), - (['-I', 'json', 'a.conf', '-o', 'b.conf'], 'json'), - (['a.json', '-o', 'b.conf'], 'json'), - (['a.json', '-O', 'json', '-o', 'b.conf'], 'json'), - ) - for inp, exp in ies: - (_psr, args) = anyconfig.cli.parse_args.parse( - inp, prog='anyconfig_cli' - ) - self.assertEqual( - TT.try_detecting_output_type(args), exp, args - ) - - def test_try_detecting_output_type__failures(self): - ies = (['-'], - ['a.conf'], - ['a.conf', '-o', 'b.conf'], - ) - with warnings.catch_warnings(): - warnings.simplefilter('ignore') - - for inp in ies: - (_psr, args) = anyconfig.cli.parse_args.parse( - inp, prog='anyconfig_cli' - ) - with self.assertRaises(SystemExit): - with contextlib.redirect_stdout(io.StringIO()): - with contextlib.redirect_stderr(io.StringIO()): - TT.try_detecting_output_type(args) - -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize( + ("paths", "exp"), + (([], False), + (['/tmp/a/b/c.conf'], True), + (['/tmp/a/b/c.yml', '/tmp/a/b/d.yml'], True), + ) +) +def test_are_same_file_types(paths: list[str], exp: bool) -> None: + assert TT.are_same_file_types(paths) == exp + + +@pytest.mark.parametrize( + ("typ", "exp"), + (('', None), + (None, None), + ('json', 'json'), + ('type_not_exit', None), + ) +) +def test_find_by_the_type(typ: str, exp: typing.Optional[str]): + with warnings.catch_warnings(): + warnings.simplefilter('ignore') + assert TT.find_by_the_type(typ) == exp + + +@pytest.mark.parametrize( + ("paths", "exp"), + (([], None), + (['/tmp/a/b/c.yml', '/tmp/a/b/d.json'], None), + (['-'], None), + (['-', '/tmp/a/b/d.json'], None), + (['/tmp/a/b/c.json', '/tmp/a/b/d.json'], 'json'), + ) +) +def test_find_by_the_paths( + paths: list[str], exp: typing.Optional[str] +) -> None: + assert TT.find_by_the_paths(paths) == exp + + +@pytest.mark.parametrize( + ("argv", "exp"), + (([], None), + (['-'], None), + (['a.conf'], None), + (['-I', 'json', 'a.conf'], 'json'), + (['a.json'], 'json'), + ) +) +def test_try_detecting_input_type( + argv: list[str], exp: typing.Optional[str] +) -> None: + (_psr, args) = anyconfig.cli.parse_args.parse( + argv, prog='anyconfig_cli' + ) + assert TT.try_detecting_input_type(args) == exp + + +@pytest.mark.parametrize( + ("argv", "exp"), + ((['-I', 'json', 'a.conf'], 'json'), + (['a.json'], 'json'), + (['-I', 'json', 'a.conf', '-o', 'b.conf'], 'json'), + (['a.json', '-o', 'b.conf'], 'json'), + (['a.json', '-O', 'json', '-o', 'b.conf'], 'json'), + ) +) +def test_try_detecting_output_type( + argv: list[str], exp: typing.Optional[str] +) -> None: + (_psr, args) = anyconfig.cli.parse_args.parse( + argv, prog='anyconfig_cli' + ) + assert TT.try_detecting_output_type(args) == exp + + +@pytest.mark.parametrize( + ("argv", ), + ((['-'], ), + (['a.conf'], ), + (['a.conf', '-o', 'b.conf'], ), + ) +) +def test_try_detecting_output_type__failures(argv: list[str]) -> None: + with warnings.catch_warnings(): + warnings.simplefilter('ignore') + + (_psr, args) = anyconfig.cli.parse_args.parse( + argv, prog='anyconfig_cli' + ) + with pytest.raises(SystemExit): + with contextlib.redirect_stdout(io.StringIO()): + with contextlib.redirect_stderr(io.StringIO()): + TT.try_detecting_output_type(args) diff --git a/tests/cli/test_errors.py b/tests/cli/test_errors.py index 441c89db..6ac45a43 100644 --- a/tests/cli/test_errors.py +++ b/tests/cli/test_errors.py @@ -1,25 +1,40 @@ # -# Copyright (C) 2013 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2013 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -"""test cases of anyconfig.cli.main without arguments and cause errors. -""" -from .. import base -from . import collectors, test_base +"""test cases of anyconfig.cli.main without arguments and cause errors.""" +from __future__ import annotations +import typing -class Collector(collectors.Collector): - kind = 'errors' +import pytest +from .. import common +from . import datatypes +from .common import run_main -class TestCase(test_base.BaseTestCase): - collector = Collector() +if typing.TYPE_CHECKING: + import pathlib - def make_args(self, tdata): # pylint: disable=no-self-use - """Make arguments to run cli.main. - """ - args = base.load_data(tdata.inp_path, default=[]) - return ['anyconfig_cli'] + tdata.opts + args -# vim:sw=4:ts=4:et: +NAMES: list[str] = ("ipath", "ipaths", "opts", "exp") +DATA = common.load_data_for_testfile( + __file__, values=(("o", []), ("e", None)), load_idata=True +) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data(): + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_cli( + ipath: pathlib.Path, ipaths: list[str], opts: list[str], exp: dict, + tmp_path: pathlib.Path +) -> None: + expected = datatypes.Expected(**exp) + tdata = datatypes.TData(ipath, ipaths, opts, expected) + + run_main(tdata, tmp_path) diff --git a/tests/cli/test_extra_options.py b/tests/cli/test_extra_options.py index 1b669d06..cc8c35f4 100644 --- a/tests/cli/test_extra_options.py +++ b/tests/cli/test_extra_options.py @@ -1,31 +1,38 @@ # -# Copyright (C) 2013 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2013 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring -"""test cases of anyconfig.cli.main with schema options. -""" -from .. import base -from . import collectors, test_base +# pylint: disable=missing-docstring, too-many-arguments +"""Test cases of anyconfig.cli.main with extra options.""" +from __future__ import annotations +import typing -class Collector(collectors.Collector): - kind = 'extra_options' +import pytest +from .. import common +from . import datatypes +from .common import run_main, NAMES_WITH_REF as NAMES -class TestCase(test_base.BaseTestCase): - collector = Collector() +if typing.TYPE_CHECKING: + import pathlib - def post_checks(self, tdata, *args, **kwargs): - """Post checks to compare the outputs of ref. and result. - .. seealso:: tests.cli.test_base.BaseTestCase._run_main - """ - ref_path = base.maybe_data_path( - tdata.datadir / 'r', tdata.inp_path.stem - ) - ref = ref_path.read_text().strip().rstrip() - out = args[0].read_text().strip().rstrip() - self.assertEqual(out, ref) +DATA = common.load_data_for_testfile( + __file__, values=(("o", []), ("e", None), ("on", ""), ("r", None)) +) +DATA_IDS: list[str] = common.get_test_ids(DATA) -# vim:sw=4:ts=4:et: + +def test_data(): + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_cli( + ipath: pathlib.Path, opts: list[str], exp: dict, oname: str, ref, + tmp_path: pathlib.Path +) -> None: + expected = datatypes.Expected(**exp) + tdata = datatypes.TData(ipath, [str(ipath)], opts, expected, oname, ref) + run_main(tdata, tmp_path) diff --git a/tests/cli/test_ignore_missing.py b/tests/cli/test_ignore_missing.py index 9bdb20a9..98d0ae1a 100644 --- a/tests/cli/test_ignore_missing.py +++ b/tests/cli/test_ignore_missing.py @@ -1,23 +1,40 @@ # -# Copyright (C) 2013 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2013 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring -"""test cases of anyconfig.cli.main of which input does not exist but ignored. -""" -from . import collectors, test_base +# pylint: disable=missing-docstring, too-many-arguments +"""Test cases of anyconfig.cli.main with ignore-missing option.""" +from __future__ import annotations +import typing -class Collector(collectors.Collector): - kind = 'ignore_missing' +import pytest +from .. import common +from . import datatypes +from .common import run_main, NAMES_WITH_REF as NAMES -class TestCase(test_base.BaseTestCase): - collector = Collector() +if typing.TYPE_CHECKING: + import pathlib - def make_args(self, tdata): # pylint: disable=no-self-use - """Make arguments to run cli.main. - """ - return ['anyconfig_cli'] + tdata.opts + ['file_not_exist.json'] -# vim:sw=4:ts=4:et: +DATA = common.load_data_for_testfile( + __file__, values=(("o", []), ("e", None), ("on", ""), ("r", None)), +) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data(): + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_cli( + ipath: pathlib.Path, opts: list[str], exp: dict, oname: str, ref, + tmp_path: pathlib.Path +) -> None: + expected = datatypes.Expected(**exp) + tdata = datatypes.TData( + ipath, [str(ipath), "file_not_exist.json"], opts, expected, oname, ref + ) + run_main(tdata, tmp_path) diff --git a/tests/cli/test_multi_inputs.py b/tests/cli/test_multi_inputs.py index a51567a8..9068329a 100644 --- a/tests/cli/test_multi_inputs.py +++ b/tests/cli/test_multi_inputs.py @@ -1,23 +1,42 @@ # -# Copyright (C) 2013 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2013 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring -"""test cases of anyconfig.cli.main with multiple inputs. -""" -from . import collectors, test_base +# pylint: disable=missing-docstring, too-many-arguments +"""Test cases of anyconfig.cli.main with ignore-missing option.""" +from __future__ import annotations +import typing -class Collector(collectors.MultiDataCollector): - kind = 'multi_inputs' +import pytest +from .. import common +from . import datatypes +from .common import run_main, NAMES_WITH_REF as NAMES -class TestCase(test_base.BaseTestCase): - collector = Collector() +if typing.TYPE_CHECKING: + import pathlib - def make_args(self, tdata): # pylint: disable=no-self-use - """Make arguments to run cli.main. - """ - return ['anyconfig_cli'] + tdata.opts + [str(p) for p in tdata.inputs] -# vim:sw=4:ts=4:et: +DATA = [ + (i, o, e, on, r) for i, o, e, on, r + in common.load_data_for_testfile( + __file__, values=(("o", []), ("e", {}), ("on", ""), ("r", None)), + ) if r is not None +] +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data(): + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_cli( + ipath: pathlib.Path, opts: list[str], exp: dict, oname: str, ref, + tmp_path: pathlib.Path +) -> None: + expected = datatypes.Expected(**exp) + ipaths = sorted(str(p) for p in ipath.parent.glob("*.*")) + tdata = datatypes.TData(ipath, ipaths, opts, expected, oname, ref) + run_main(tdata, tmp_path) diff --git a/tests/cli/test_parse_args.py b/tests/cli/test_parse_args.py index 3efbc0d7..ebcfda53 100644 --- a/tests/cli/test_parse_args.py +++ b/tests/cli/test_parse_args.py @@ -1,33 +1,26 @@ # -# Copyright (C) 2013 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2013 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring, invalid-name, too-many-public-methods -"""test cases of anyconfig.cli.main without arguments. -""" -import unittest +# pylint: disable=missing-docstring +"""Test cases of anyconfig.cli.main without arguments.""" +from __future__ import annotations import anyconfig.cli.parse_args as TT -class TestCase(unittest.TestCase): +def test_make_parser() -> None: + psr = TT.make_parser() + assert isinstance(psr, TT.argparse.ArgumentParser) - def test_make_parser(self): - psr = TT.make_parser() - self.assertTrue( - isinstance(psr, TT.argparse.ArgumentParser) - ) - - # ref = TT.DEFAULTS.copy() - ref = dict( - args=None, atype=None, env=False, extra_opts=None, - gen_schema=False, get=None, ignore_missing=False, inputs=[], - itype=None, list=False, loglevel=0, merge='merge_dicts', - otype=None, output=None, query=None, schema=None, set=None, - template=False, validate=False - ) - self.assertEqual( - vars(psr.parse_args([])), ref - ) - -# vim:sw=4:ts=4:et: + # ref = TT.DEFAULTS.copy() + ref = { + "args": None, "atype": None, "env": False, + "extra_opts": None, "gen_schema": False, + "get": None, "ignore_missing": False, "inputs": [], + "itype": None, "list": False, "loglevel": 0, + "merge": 'merge_dicts', "otype": None, "output": None, + "query": None, "schema": None, "set": None, + "template": False, "validate": False + } + assert vars(psr.parse_args([])) == ref diff --git a/tests/cli/test_query.py b/tests/cli/test_query.py index 63adac3b..d01d3c5a 100644 --- a/tests/cli/test_query.py +++ b/tests/cli/test_query.py @@ -1,24 +1,47 @@ # -# Copyright (C) 2013 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2013 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring -"""test cases of anyconfig.cli.main to query using JMESPath expression. -""" -import unittest +# pylint: disable=missing-docstring, too-many-arguments +# pylint: disable=unused-import +"""Test cases of anyconfig.cli.main with query option.""" +from __future__ import annotations -import anyconfig.query +import typing -from . import collectors, test_base +import pytest +try: + import anyconfig.query.query # noqa: F401 +except ImportError: + pytest.skip( + "Required query module is not available", + allow_module_level=True + ) -class Collector(collectors.Collector): - kind = 'query' +from .. import common +from . import datatypes +from .common import run_main, NAMES_WITH_REF as NAMES +if typing.TYPE_CHECKING: + import pathlib -@unittest.skipIf(not anyconfig.query.SUPPORTED, - 'Library to query using JMESPath is not available') -class TestCase(test_base.BaseTestCase): - collector = Collector() -# vim:sw=4:ts=4:et: +DATA = common.load_data_for_testfile( + __file__, values=(("o", []), ("e", {}), ("on", ""), ("r", None)) +) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data(): + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_cli( + ipath: pathlib.Path, opts: list[str], exp: dict, oname: str, ref, + tmp_path: pathlib.Path +) -> None: + expected = datatypes.Expected(**exp) + tdata = datatypes.TData(ipath, [str(ipath)], opts, expected, oname, ref) + run_main(tdata, tmp_path) diff --git a/tests/cli/test_schema.py b/tests/cli/test_schema.py index 41e6eab0..43b4e1ae 100644 --- a/tests/cli/test_schema.py +++ b/tests/cli/test_schema.py @@ -1,51 +1,52 @@ # -# Copyright (C) 2013 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2013 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring -"""test cases of anyconfig.cli.main with schema options. -""" -import unittest -import warnings - -import anyconfig.schema - -from . import collectors, test_base +# pylint: disable=missing-docstring, too-many-arguments +"""Test cases of anyconfig.cli.main with ignore-missing option.""" +from __future__ import annotations +import typing +import warnings -ERR = 'Library for JSON schema validation is not available' +import pytest +import anyconfig.schema -class Collector(collectors.Collector): - kind = 'schema' +from .. import common +from . import datatypes +from .common import run_main, NAMES +if typing.TYPE_CHECKING: + import pathlib -@unittest.skipIf(not anyconfig.schema.SUPPORTED, ERR) -class TestCase(test_base.BaseTestCase): - collector = Collector() +if not anyconfig.schema.SUPPORTED: + pytest.skip( + "Library for JSON schema validation is not available", + allow_module_level=True + ) - def make_args(self, tdata): # pylint: disable=no-self-use - """Make arguments to run cli.main. - """ - return [ - 'anyconfig_cli', '--validate', '--schema', str(tdata.scm), - str(tdata.inp_path), *tdata.opts - ] - def _run_main(self, tdata): - """Override it to suppress some warnings. - """ - with warnings.catch_warnings(): - warnings.simplefilter('ignore') - super()._run_main(tdata) +DATA = common.load_data_for_testfile( + __file__, values=(("o", []), ("e", {})) +) +DATA_IDS: list[str] = common.get_test_ids(DATA) -class SchemaErrorsCollector(collectors.Collector): - kind = 'schema_errors' +def test_data(): + assert DATA -@unittest.skipIf(not anyconfig.schema.SUPPORTED, ERR) -class SchemaErrorsTestCase(test_base.BaseTestCase): - collector = SchemaErrorsCollector() +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_cli( + ipath: pathlib.Path, opts: list[str], exp: dict, + tmp_path: pathlib.Path +) -> None: + scm = list((ipath.parent / "s").glob("*.*"))[0] + sopts = ["--schema", str(scm)] -# vim:sw=4:ts=4:et: + expected = datatypes.Expected(**exp) + tdata = datatypes.TData(ipath, [str(ipath)], [*opts, *sopts], expected) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + run_main(tdata, tmp_path) diff --git a/tests/cli/test_schema_errors.py b/tests/cli/test_schema_errors.py new file mode 100644 index 00000000..d3a47d74 --- /dev/null +++ b/tests/cli/test_schema_errors.py @@ -0,0 +1,49 @@ +# +# Copyright (C) 2013 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring, too-many-arguments +"""Test cases of anyconfig.cli.main with invalid schema option.""" +from __future__ import annotations + +import typing +import warnings + +import pytest + +import anyconfig.schema + +from .. import common +from . import datatypes +from .common import run_main, NAMES + +if typing.TYPE_CHECKING: + import pathlib + +if not anyconfig.schema.SUPPORTED: + pytest.skip( + "Library for JSON schema validation is not available", + allow_module_level=True + ) + + +DATA = common.load_data_for_testfile( + __file__, values=(("o", []), ("e", {})) +) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data(): + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_cli( + ipath: pathlib.Path, opts: list[str], exp: dict, + tmp_path: pathlib.Path +) -> None: + expected = datatypes.Expected(**exp) + tdata = datatypes.TData(ipath, [str(ipath)], opts, expected) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + run_main(tdata, tmp_path) diff --git a/tests/cli/test_show.py b/tests/cli/test_show.py index 2c841d57..6932f080 100644 --- a/tests/cli/test_show.py +++ b/tests/cli/test_show.py @@ -1,37 +1,45 @@ # -# Copyright (C) 2013 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2013 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring -"""test cases of anyconfig.cli.main without arguments to show info. -""" -import anyconfig.api +# pylint: disable=missing-docstring, too-many-arguments +"""Test cases of anyconfig.cli.main without arguments to show info.""" +from __future__ import annotations -from . import collectors, datatypes, test_base +import typing +import pytest -class Collector(collectors.Collector): - kind = 'show' +import anyconfig.schema +from .. import common +from . import datatypes +from .common import run_main, NAMES -class TestCase(test_base.NoInputTestCase): - collector = Collector() +if typing.TYPE_CHECKING: + import pathlib +if not anyconfig.schema.SUPPORTED: + pytest.skip( + "Library for JSON schema validation is not available", + allow_module_level=True + ) -class VersionCollector(collectors.Collector): - kind = 'show_version' - def load_dataset(self, datadir, inp): - ver = '.'.join(anyconfig.api.version()) - tdata = super().load_dataset(datadir, inp) +DATA = common.load_data_for_testfile( + __file__, values=(("o", []), ("e", {})) +) +DATA_IDS: list[str] = common.get_test_ids(DATA) - return datatypes.TData( - tdata.datadir, tdata.inp_path, tdata.opts, - datatypes.Expected(words_in_stdout=ver) - ) +def test_data(): + assert DATA -class VersionTestCase(test_base.NoInputTestCase): - collector = VersionCollector() -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_cli( + ipath: pathlib.Path, opts: list[str], exp: dict, tmp_path +) -> None: + expected = datatypes.Expected(**exp) + tdata = datatypes.TData(ipath, [], opts, expected) + run_main(tdata, tmp_path) diff --git a/tests/cli/test_single_input.py b/tests/cli/test_single_input.py index c7bd9b1d..f87dd53c 100644 --- a/tests/cli/test_single_input.py +++ b/tests/cli/test_single_input.py @@ -1,18 +1,52 @@ # -# Copyright (C) 2013 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2013 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring -"""test cases of anyconfig.cli.main without arguments and cause errors. -""" -from . import collectors, test_base +# pylint: disable=missing-docstring, too-many-arguments +"""Test cases of anyconfig.cli.main with sinngle file innput.""" +from __future__ import annotations +import typing -class Collector(collectors.Collector): - kind = 'single_input' +import pytest +import anyconfig.schema -class TestCase(test_base.BaseTestCase): - collector = Collector() +from .. import common +from . import datatypes +from .common import run_main, NAMES_WITH_REF -# vim:sw=4:ts=4:et: +if typing.TYPE_CHECKING: + import pathlib + +if not anyconfig.schema.SUPPORTED: + pytest.skip( + "Library for JSON schema validation is not available", + allow_module_level=True + ) + + +NAMES: list[str] = (*NAMES_WITH_REF, "oopts") +DATA = common.load_data_for_testfile( + __file__, + values=(("o", []), ("e", {}), ("on", ""), ("r", {}), ("oo", {})) +) +DATA_IDS: list[str] = common.get_test_ids(DATA) + + +def test_data(): + assert DATA + + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_cli( + ipath: pathlib.Path, opts: list[str], exp: dict, + oname: str, ref: dict, oopts: dict, + tmp_path: pathlib.Path +) -> None: + expected = datatypes.Expected(**exp) + tdata = datatypes.TData( + ipath, [str(ipath)], opts, expected, + outname=oname, ref=ref, oo_opts=oopts + ) + run_main(tdata, tmp_path) diff --git a/tests/cli/test_single_input_to_yaml_output.py b/tests/cli/test_single_input_to_yaml_output.py index 37f2bea4..efe253c7 100644 --- a/tests/cli/test_single_input_to_yaml_output.py +++ b/tests/cli/test_single_input_to_yaml_output.py @@ -1,25 +1,52 @@ # -# Copyright (C) 2013 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2013 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring -""" -Test cases of anyconfig.cli.main to load single input with support -using extra libraries. -""" -import unittest +# pylint: disable=missing-docstring, too-many-arguments +"""Test cases of anyconfig.cli.main with yaml output option.""" +from __future__ import annotations + +import typing + +import pytest import anyconfig.api -from . import collectors, test_base +from .. import common +from . import datatypes +from .common import run_main, NAMES_WITH_REF + +if typing.TYPE_CHECKING: + import pathlib + +if "yaml" not in anyconfig.api.list_types(): + pytest.skip( + "YAML support library is not available", + allow_module_level=True + ) + + +NAMES: list[str] = (*NAMES_WITH_REF, "oopts") +DATA = common.load_data_for_testfile( + __file__, + values=(("o", []), ("e", {}), ("on", ""), ("r", {}), ("oo", {})) +) +DATA_IDS: list[str] = common.get_test_ids(DATA) -class Collector(collectors.Collector): - kind = 'single_input_to_yaml_output' +def test_data(): + assert DATA -@unittest.skipIf('yaml' not in anyconfig.api.list_types(), - 'loading and dumping yaml support is not available') -class TestCase(test_base.BaseTestCase): - collector = Collector() -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_cli( + ipath: pathlib.Path, opts: list[str], exp: dict, + oname: str, ref: dict, oopts: dict, + tmp_path: pathlib.Path +) -> None: + expected = datatypes.Expected(**exp) + tdata = datatypes.TData( + ipath, [str(ipath)], opts, expected, + outname=oname, ref=ref, oo_opts=oopts + ) + run_main(tdata, tmp_path) diff --git a/tests/cli/test_template.py b/tests/cli/test_template.py index 3354c889..cb768fab 100644 --- a/tests/cli/test_template.py +++ b/tests/cli/test_template.py @@ -1,34 +1,48 @@ # -# Copyright (C) 2013 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2013 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring -"""test cases of anyconfig.cli.main with template options. -""" -import unittest +# pylint: disable=missing-docstring, too-many-arguments +"""Test cases for anyconfig.cli.main with template option.""" +from __future__ import annotations -import anyconfig.template +import typing + +import pytest -from . import collectors, test_base +import anyconfig.template +from .. import common +from . import datatypes +from .common import run_main, NAMES_WITH_REF as NAMES -class Collector(collectors.Collector): - kind = 'template' +if typing.TYPE_CHECKING: + import pathlib +if not anyconfig.template.SUPPORTED: + pytest.skip( + "Library for template rendering is not available", + allow_module_level=True + ) -@unittest.skipIf(not anyconfig.template.SUPPORTED, - 'Library for template rendering is not available') -class TestCase(test_base.BaseTestCase): - collector = Collector() +DATA = common.load_data_for_testfile( + __file__, values=(("o", []), ("e", {}), ("on", ""), ("r", {})) +) +DATA_IDS: list[str] = common.get_test_ids(DATA) -class NoTemplateCollector(collectors.Collector): - kind = 'no_template' +def test_data(): + assert DATA -@unittest.skipIf(anyconfig.template.SUPPORTED, - 'Library for template rendering is available') -class SchemaErrorsTestCase(test_base.BaseTestCase): - collector = NoTemplateCollector() -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_cli( + ipath: pathlib.Path, opts: list[str], exp: dict, oname: str, ref: dict, + tmp_path: pathlib.Path +) -> None: + expected = datatypes.Expected(**exp) + tdata = datatypes.TData( + ipath, [str(ipath)], opts, expected, outname=oname, ref=ref + ) + run_main(tdata, tmp_path) diff --git a/tests/common/__init__.py b/tests/common/__init__.py index e69de29b..ed489ea2 100644 --- a/tests/common/__init__.py +++ b/tests/common/__init__.py @@ -0,0 +1,16 @@ +# +# Copyright (C) 2023, 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +"tests.common - common global variables and functions.""" +from .globals_ import TESTDIR, RESOURCE_DIR +from .paths import load_data +from .tdc import ( + get_test_ids, load_data_for_testfile +) + +__all__ = [ + "TESTDIR", "RESOURCE_DIR", + "load_data", + "get_test_ids", "load_data_for_testfile", +] diff --git a/tests/common/dumper.py b/tests/common/dumper.py deleted file mode 100644 index 72e07361..00000000 --- a/tests/common/dumper.py +++ /dev/null @@ -1,66 +0,0 @@ -# -# Copyright (C) 2024 Satoru SATOH -# SPDX-License-Identifier: MIT -# -# pylint: disable=missing-docstring,too-few-public-methods -r"""Dumper test cases. -""" -import pathlib -import typing -import warnings - -import tests.common.load -import anyconfig.ioinfo - - -class TestCase: - """Base class for dumper test cases.""" - psr_cls = None - - exact_match: bool = True - - def _get_all( - self, ipath: pathlib.Path, aux: typing.Dict[str, typing.Any], - ): - if self.psr_cls is None: - warnings.warn( # noqa - f"Failed to initialize test target: {__file__}", - ) - psr = None - else: - psr = self.psr_cls() # pylint: disable=not-callable - - idata = tests.common.load.load_data(ipath) - - return ( - aux["e"], # It should NOT fail. - aux.get("o", {}), - psr, - idata - ) - - def _assert_dumps( - self, ipath: pathlib.Path, aux: typing.Dict[str, typing.Any], - ): - (exp, opts, psr, idata) = self._get_all(ipath, aux) - out_s: str = psr.dumps(idata, **opts) - - assert psr.loads(out_s, **opts) == idata - if self.exact_match: - assert out_s == exp, f"'{out_s}' vs. '{exp}'" - - def _assert_dump( - self, ipath: pathlib.Path, aux: typing.Dict[str, typing.Any], - tmp_path: pathlib.Path - ): - (exp, opts, psr, idata) = self._get_all(ipath, aux) - - opath = tmp_path / f"{ipath.name}.{psr.extensions()[0]}" - ioi = anyconfig.ioinfo.make(opath) - psr.dump(idata, ioi, **opts) - - out_s: str = psr.ropen(str(opath)).read() - - assert psr.load(ioi, **opts) == idata - if self.exact_match: - assert out_s == exp, f"'{out_s}' vs. '{exp}'" diff --git a/tests/common/globals_.py b/tests/common/globals_.py index 792057f5..31eb3c85 100644 --- a/tests/common/globals_.py +++ b/tests/common/globals_.py @@ -5,6 +5,9 @@ # pylint: disable=missing-docstring r"""Some global variables for test cases. """ +import pathlib + + DATA_PAIRS = ( ('None', None), ('1', 1), @@ -17,3 +20,8 @@ ) TEST_DATA_FILENAME: str = "test_data.py" + +TEST_DATA_MAJOR_VERSION: int = 1 + +TESTDIR: pathlib.Path = pathlib.Path(__file__).parent.parent.resolve() +RESOURCE_DIR: pathlib.Path = TESTDIR / "res" / str(TEST_DATA_MAJOR_VERSION) diff --git a/tests/common/loader.py b/tests/common/loader.py deleted file mode 100644 index 7d7f6065..00000000 --- a/tests/common/loader.py +++ /dev/null @@ -1,56 +0,0 @@ -# -# Copyright (C) 2023, 2024 Satoru SATOH -# SPDX-License-Identifier: MIT -# -# pylint: disable=missing-docstring,too-few-public-methods -r"""Loader test cases. -""" -import pathlib -import typing -import warnings - -import anyconfig.ioinfo - - -class TestCase: - """Base class for loader test cases. - """ - psr_cls = None - - def _get_all( - self, ipath: pathlib.Path, aux: typing.Dict[str, typing.Any] - ): - if self.psr_cls is None: - warnings.warn( # noqa - f"Failed to ini test target: {__file__}" - ) - psr = None - else: - psr = self.psr_cls() # pylint: disable=not-callable - - ioi = anyconfig.ioinfo.make(ipath) - - return ( - aux["e"], # It should NOT fail. - aux.get("o", {}), - psr, - ioi - ) - - def _assert_loads( - self, ipath: pathlib.Path, aux: typing.Dict[str, typing.Any] - ): - (exp, opts, psr, _ioi) = self._get_all(ipath, aux) - if 'b' in psr._open_read_mode: # pylint: disable=protected-access - res = psr.loads(ipath.read_bytes(), **opts) - else: - res = psr.loads(ipath.read_text(), **opts) - - assert res == exp, f"'{res!r}' vs. '{exp!r}'" - - def _assert_load( - self, ipath: pathlib.Path, aux: typing.Dict[str, typing.Any] - ): - (exp, opts, psr, ioi) = self._get_all(ipath, aux) - res = psr.load(ioi, **opts) - assert res == exp, f"'{res!r}' vs. '{exp!r}'" diff --git a/tests/common/paths.py b/tests/common/paths.py index f53ddb27..e598aa2b 100644 --- a/tests/common/paths.py +++ b/tests/common/paths.py @@ -1,20 +1,17 @@ # -# Copyright (C) 2023 Satoru SATOH +# Copyright (C) 2023, 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring r"""Compute paths. """ +from __future__ import annotations + import pathlib import typing from . import load - - -TEST_DATA_MAJOR_VERSION: int = 1 - -TESTDIR: pathlib.Path = pathlib.Path(__file__).parent.parent.resolve() -RESOURCE_DIR: pathlib.Path = TESTDIR / "res" / str(TEST_DATA_MAJOR_VERSION) +from .globals_ import RESOURCE_DIR def get_resource_dir( @@ -30,9 +27,9 @@ def get_resource_dir( def get_aux_data_paths( ipath: pathlib.Path, - skip_file_exts: typing.Tuple[str, ...] = (".pyc", ), + skip_file_exts: tuple[str, ...] = (".pyc", ), **_kwargs -) -> typing.Dict[str, pathlib.Path]: +) -> dict[str, pathlib.Path]: """Get a map of subdirs and paths to auxiliary data for input, `ipath`. It expects that aux data is in `ipath.parent`/*/. @@ -47,9 +44,7 @@ def get_aux_data_paths( def get_data( topdir: typing.Optional[pathlib.Path], -) -> typing.List[ - typing.Tuple[pathlib.Path, typing.Dict[str, pathlib.Path]] -]: +) -> list[tuple[pathlib.Path, dict[str, pathlib.Path]]]: # find the dir holding input data files. pattern = "*.*" if not any(x for x in topdir.iterdir() if x.is_file()): @@ -63,11 +58,9 @@ def get_data( def load_data( topdir: typing.Optional[pathlib.Path], - **kwargs -) -> typing.List[ - typing.Tuple[pathlib.Path, typing.Dict[str, typing.Any]] -]: - return [ + load_idata: bool = False, **kwargs +) -> list[tuple[pathlib.Path, dict[str, typing.Any]]]: + res = [ ( ipath, { @@ -77,3 +70,10 @@ def load_data( ) for ipath, adata in get_data(topdir) ] + if load_idata: + return [ + (ipath, load.load_data(ipath), adata) + for ipath, adata in res + ] + + return res diff --git a/tests/common/tdc.py b/tests/common/tdc.py new file mode 100644 index 00000000..0f7d1d45 --- /dev/null +++ b/tests/common/tdc.py @@ -0,0 +1,84 @@ +# +# Copyright (C) 2023, 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +r"""Test Data Collecor.""" +from __future__ import annotations + +import itertools +import os.path +import pathlib +import re +import typing + +from . import paths, globals_ + + +TEST_FILE_RE: re.Pattern = re.compile(r"test_(.+).py") + +VALUES: tuple[tuple[str, typing.Optional[dict], ...], ...] = ( + ("o", {}), ("e", None) +) + +LVL_DEFAULT: int = 3 + + +def get_test_id(path: pathlib.Path, level: int = LVL_DEFAULT) -> str: + return os.path.join(*path.parts[-level:]) + + +def get_test_ids( + data: list[tuple[pathlib.Path, typing.Any, ...]], + level: int = LVL_DEFAULT +) -> list[str]: + return [get_test_id(p, level=level) for p, *_ in data] + + +def get_test_resdir( + testfile: str, + topdir: pathlib.Path = globals_.TESTDIR, + resdir: pathlib.Path = globals_.RESOURCE_DIR, + pattern: re.Pattern = TEST_FILE_RE +) -> pathlib.Path: + """Get test resource dir for given test file path. + + ex. tests/api/single_load/test_query.py + -> /path/to/tests/res/1/api/single_load/query/ + """ + path = pathlib.Path(testfile).resolve() + subdir = pattern.match(path.name).groups()[0] + relpath = os.path.join( + *[x for x, y in itertools.zip_longest(path.parent.parts, topdir.parts) + if y is None] + ) + + return resdir / relpath / subdir + + +def load_data_for_testfile( + testfile: str, + values: tuple[tuple[str, typing.Optional[dict], ...], ...] = VALUES, + load_idata: bool = False, + datadir: typing.Optional[pathlib.Path] = None, + **opts +) -> list[tuple[pathlib.Path, dict[str, typing.Any], ...]]: + """Collct test data for test file, ``testfile``. + + :param testfile: a str represents test file path + :param opts: keyword options for `get_test_resdir` + """ + if datadir is None: + datadir = get_test_resdir(testfile, **opts) + + if load_idata: + return [ + (ipath, idata, *[aux.get(k, v) for k, v in values]) + for ipath, idata, aux + in paths.load_data(datadir, load_idata=True) + ] + + return [ + (ipath, *[aux.get(k, v) for k, v in values]) + for ipath, aux in paths.load_data(datadir) + ] diff --git a/tests/common/tdi_base.py b/tests/common/tdi_base.py deleted file mode 100644 index 8a9accb2..00000000 --- a/tests/common/tdi_base.py +++ /dev/null @@ -1,73 +0,0 @@ -# -# Copyright (C) 2023, 2024 Satoru SATOH -# SPDX-License-Identifier: MIT -# -# pylint: disable=missing-docstring,too-few-public-methods -r"""Classes to load data sets. -""" -import importlib -import re - -from . import paths - - -def name_from_path(path: str): - """Compute a name from given path `path`.""" - match = re.match(r".+[/\\:\.]test_([^_]+)_([^_]+).py", path) - if not match: - raise NameError(f"Filename does not match expected pattern: {path}") - - return ".".join(match.groups()) - - -class TDI: - """A base class to `inject` datasets for loaders and dumpers to test.""" - _path: str = "" - - # Override it in children: - # _cid: str = name_from_path(__file__) - _cid: str = "" - _is_loader: bool = True - - _data = [] - _mod = None - - @classmethod - def cid(cls) -> str: - return cls._cid - - @classmethod - def is_loader(cls) -> bool: - return cls._is_loader - - def __init__(self): - """Initialize members.""" - self.topdir = paths.get_resource_dir(self.cid(), self.is_loader()) - - def load(self): - self._data = paths.load_data(self.topdir) - - def get_data(self): - if not self._data: - try: - self.load() - except FileNotFoundError: - pass - - return self._data - - def get_data_ids(self): - return [f"{i.parent.name}/{i.name}" for i, _aux in self.get_data()] - - def get_mod(self): - if self._mod is None: - mname = f"anyconfig.backend.{self.cid()}" - try: - self._mod = importlib.import_module(mname) - except ModuleNotFoundError: - pass - - return self._mod - - def get_all(self): - return (self.get_mod(), self.get_data(), self.get_data_ids()) diff --git a/tests/common/test_globals_.py b/tests/common/test_globals_.py new file mode 100644 index 00000000..58f67625 --- /dev/null +++ b/tests/common/test_globals_.py @@ -0,0 +1,16 @@ +# +# Copyright (C) 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring +r"""Test cases for tests.common.paths. +""" +import os.path + +from . import globals_ as TT + + +def test_module_globals(): + assert str(TT.TESTDIR) == os.path.abspath( + f"{os.path.dirname(__file__)}/.." + ) diff --git a/tests/common/test_paths.py b/tests/common/test_paths.py index 61359670..2936a580 100644 --- a/tests/common/test_paths.py +++ b/tests/common/test_paths.py @@ -1,12 +1,13 @@ # -# Copyright (C) 2023 Satoru SATOH +# Copyright (C) 2023, 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring r"""Test cases for tests.common.paths. """ +from __future__ import annotations + import json -import os.path import pathlib import typing @@ -15,12 +16,6 @@ from . import paths as TT -def test_module_globals(): - assert str(TT.TESTDIR) == os.path.abspath( - f"{os.path.dirname(__file__)}/.." - ) - - @pytest.mark.parametrize( ("loader_or_dumper", "is_loader", "topdir", "exp"), (("json.json", True, None, TT.RESOURCE_DIR / "loaders" / "json.json"), @@ -51,8 +46,7 @@ def test_get_resource_dir( ) ) def test_get_aux_data_paths( - ipath: str, aux_paths: typing.Tuple[str, ...], - tmp_path: pathlib.Path + ipath: str, aux_paths: tuple[str, ...], tmp_path: pathlib.Path ): (tmp_path / ipath).touch() paths = [tmp_path / a for a in aux_paths] @@ -88,7 +82,7 @@ def test_get_aux_data_paths( ) ) def test_get_data( - ipaths: typing.Tuple[str], aux_paths: typing.Tuple[str, ...], + ipaths: tuple[str], aux_paths: tuple[str, ...], tmp_path: pathlib.Path ): for ipath in ipaths: @@ -138,8 +132,8 @@ def test_get_data( ) ) def test_load_data( - ipaths: typing.Tuple[str], - aux_data: typing.Tuple[typing.Tuple[str, typing.Any], ...], + ipaths: tuple[str], + aux_data: tuple[tuple[str, typing.Any], ...], tmp_path: pathlib.Path ): for ipath in ipaths: diff --git a/tests/common/test_tdc.py b/tests/common/test_tdc.py new file mode 100644 index 00000000..202b404b --- /dev/null +++ b/tests/common/test_tdc.py @@ -0,0 +1,138 @@ +# +# Copyright (C) 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring,too-few-public-methods +r"""Test cases for Test Data Collecor.""" +from __future__ import annotations + +import json +import pathlib + +import pytest + +from . import tdc as TT, globals_ as G + + +SELF = pathlib.Path(__file__) +CUDIR = SELF.parent + + +@pytest.mark.parametrize( + ("path", "level", "exp"), + (("a/b/c/d/e.py", TT.LVL_DEFAULT, "c/d/e.py"), + ("/a/b/c/d/e.py", TT.LVL_DEFAULT, "c/d/e.py"), + ("/a/b/c/d/e.py", 4, "b/c/d/e.py"), + ), +) +def test_get_test_id(path, level, exp) -> None: + exp = str(pathlib.Path(exp)) + assert TT.get_test_id(pathlib.Path(path), level=level) == exp + + +@pytest.mark.parametrize( + ("data", "level", "exp"), + (([], 1, []), + ([(pathlib.Path("a/b/c/d/e.py"), {}, None)], + TT.LVL_DEFAULT, ["c/d/e.py"]), + ), +) +def test_get_test_ids(data, level, exp) -> None: + exp = [str(pathlib.Path(e)) for e in exp] + assert TT.get_test_ids(data, level=level) == exp + + +TEST_FILE_10 = "foobar/baz/test_xyz.py" +TEST_TOP_DIR_10 = pathlib.Path("/home/foo/projects/bar/tests") +TEST_RES_DIR_10 = TEST_TOP_DIR_10 / "resources" +TEST_DATA_PATH_10 = TEST_RES_DIR_10 / "foobar" / "baz" / "xyz" + + +@pytest.mark.parametrize( + ("path", "opts", "exp"), + ((str(SELF), {}, G.RESOURCE_DIR / "common" / "tdc"), + (str(CUDIR / "test_paths.py"), {}, G.RESOURCE_DIR / "common" / "paths"), + (str(TEST_TOP_DIR_10 / TEST_FILE_10), + {"topdir": TEST_TOP_DIR_10, "resdir": TEST_RES_DIR_10}, + TEST_DATA_PATH_10), + ), + ids=(SELF.name, "test_paths.py", TEST_FILE_10), +) +def test_get_test_resdir(path, opts, exp): + assert TT.get_test_resdir(path, **opts) == exp + + +# .. note:: See files under tests/res/1/common/tdc/. +TEST_DATA_10 = [ + ("10/00.json", {}, {}), + ("20/10.json", [1, 2], {"a": "aaa"}), + ("30/20.json", {"a": "A"}, {"b": [1, 2], "c": {"baz": "fbz"}}), +] +TEST_DATA_20 = [ + ( + G.RESOURCE_DIR / "common" / "tdc" / "10" / "100_null.json", + None, + {"e": None} + ), + ( + G.RESOURCE_DIR / "common" / "tdc" / "20" / "220_a_list.json", + [1, 2], + {"e": [1, 2], "o": {"ac_ordered": True}} + ), +] + + +@pytest.mark.parametrize( + ("testfile", "kwargs", "exp"), + (pytest.param( + __file__, {}, + [(i, *[a.get(k, v) for k, v in TT.VALUES]) + for i, _, a in TEST_DATA_20], + id=f"{CUDIR.name}/{SELF.name} without loading data from ipath"), + pytest.param( + __file__, {"load_idata": True}, + [(i, d, *[a.get(k, v) for k, v in TT.VALUES]) + for i, d, a in TEST_DATA_20], + id=f"{CUDIR.name}/{SELF.name} with loading data from ipath"), + # ("foo/bar/test_baz.py", {"values": (("b", []), ("c", {}))}, + # TEST_DATA_10), + ), +) +def test_load_data_for_testfile( + testfile, kwargs, exp, tmp_path +): + if pathlib.Path(testfile).exists(): + assert TT.load_data_for_testfile(testfile, **kwargs) == exp + else: + testfile = tmp_path / testfile + testfile.parent.mkdir(parents=True, exist_ok=True) + testfile.touch() + + # kwargs for TT.get_test_resdir + kwargs.update(topdir=tmp_path, resdir=tmp_path) + resdir = tmp_path / TT.get_test_resdir(testfile, tmp_path, tmp_path) + + exp_new = [] + + for ipath, data, opts in exp: + path = resdir / ipath + path.parent.mkdir(parents=True, exist_ok=True) + json.dump(data, path.open("w")) + + opts_new = [] + for subdir, val in opts.items(): + if subdir not in kwargs.get("values", TT.VALUES): + continue + + (path.parent / subdir).mkdir(exist_ok=True) + + aname = path.name.replace(path.suffix, ".py") + (path.parent / subdir / aname).write_text(repr(val)) + opts_new.append({subdir: val}) + + if kwargs.get("load_idata", False): + exp_new.append((path, data, *opts_new)) + else: + exp_new.append((path, *opts_new)) + + assert TT.load_data_for_testfile(str(testfile), **kwargs) == exp_new diff --git a/tests/common/test_tdi_base.py b/tests/common/test_tdi_base.py deleted file mode 100644 index c9d94a12..00000000 --- a/tests/common/test_tdi_base.py +++ /dev/null @@ -1,60 +0,0 @@ -# -# Copyright (C) 2024 Satoru SATOH -# SPDX-License-Identifier: MIT -# -# pylint: disable=missing-docstring,invalid-name -r"""Test cases for tests.common.tdi. -""" -import typing - -import pytest - -import tests.common.tdi_base -from . import tdi_base as TT - - -@pytest.mark.parametrize( - ("path", "exp", "exc"), - (("/0/1/2/a/b/c/test_foo_bar.py", "foo.bar", None), - ("c:\\0\\1\\2\\a\\b\\c\\test_foo_bar.py", "foo.bar", None), - ("/0/1/2/foo.py", None, NameError), - ), -) -def test_name_from_path( - path: str, exp: str, exc: typing.Optional[Exception] -): - if exc is None: - assert TT.name_from_path(path) == exp - else: - with pytest.raises(exc): - TT.name_from_path(path) - - -def test_tdi_base_original(): - tdi = TT.TDI() - assert tdi.cid() == TT.TDI.cid() - - -class TDI(TT.TDI): - _cid: str = tests.common.tdi_base.name_from_path(__file__) - - -def test_tdi_base(): - tdi = TDI() - assert tdi.cid() == "tdi.base" - - -class FakeStdJsonLoaderTDI(TT.TDI): - _cid: str = "json.stdlib" # override. - - -def test_tdi_base_children(): - tdi = FakeStdJsonLoaderTDI() - assert tdi.cid() == FakeStdJsonLoaderTDI.cid() - - tdi.load() - (mod, data, data_ids) = tdi.get_all() - - assert data - assert data_ids - assert mod.Parser.cid() == tdi.cid() diff --git a/tests/dicts/common.py b/tests/dicts/common.py deleted file mode 100644 index f8f99d5c..00000000 --- a/tests/dicts/common.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring -import unittest - -from .. import base - - -class Collector(base.TDataCollector): - ordered: bool = True - - -class TestCase(unittest.TestCase, Collector): - - def setUp(self): - self.init() - -# vim:sw=4:ts=4:et: diff --git a/tests/dicts/test_functions.py b/tests/dicts/test_functions.py index 637ecd16..d6ed5ae7 100644 --- a/tests/dicts/test_functions.py +++ b/tests/dicts/test_functions.py @@ -1,10 +1,11 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring,protected-access -"""Test cases for some functions in anyconfig.parser. -""" +"""Test cases for some functions in anyconfig.parser.""" +from __future__ import annotations + import collections import pytest @@ -13,9 +14,9 @@ @pytest.mark.parametrize( - 'inp,exp', - (('/a~1b', '/a/b'), - ('~1aaa~1~0bbb', '/aaa/~bbb'), + ("inp", "exp"), + (("/a~1b", "/a/b"), + ("~1aaa~1~0bbb", "/aaa/~bbb"), ), ) def test_jsnp_unescape(inp, exp): @@ -23,17 +24,17 @@ def test_jsnp_unescape(inp, exp): @pytest.mark.parametrize( - 'args,exp', - ((('', ), []), - (('/', ), ['']), - (('/a', ), ['a']), - (('.a', ), ['a']), - (('a', ), ['a']), - (('a.', ), ['a']), - (('/a/b/c', ), ['a', 'b', 'c']), - (('a.b.c', ), ['a', 'b', 'c']), - (('abc', ), ['abc']), - (('/a/b/c', ), ['a', 'b', 'c']), + ("args", "exp"), + ((("", ), []), + (("/", ), [""]), + (("/a", ), ["a"]), + ((".a", ), ["a"]), + (("a", ), ["a"]), + (("a.", ), ["a"]), + (("/a/b/c", ), ["a", "b", "c"]), + (("a.b.c", ), ["a", "b", "c"]), + (("abc", ), ["abc"]), + (("/a/b/c", ), ["a", "b", "c"]), ), ) def test_split_path(args, exp): @@ -42,9 +43,9 @@ def test_split_path(args, exp): # FIXME: Add some more test cases @pytest.mark.parametrize( - 'args,exp', - (((dict(a=1, b=dict(c=2, )), 'a.b.d', 3), - dict(a=dict(b=dict(d=3)), b=dict(c=2))), + ("args", "exp"), + ((({"a": 1, "b": {"c": 2}}, "a.b.d", 3), + {"a": {"b": {"d": 3}}, "b": {"c": 2}}), ), ) def test_set_(args, exp): @@ -57,21 +58,24 @@ def test_set_(args, exp): # FIXME: Likewise. @pytest.mark.parametrize( - 'args,exp', - (((OD((('a', 1), )), False, dict), dict(a=1)), - ((OD((('a', OD((('b', OD((('c', 1), ))), ))), )), False, dict), - dict(a=dict(b=dict(c=1)))), + ("obj", "opts", "exp"), + ((OD((("a", 1), )), + {"ac_ordered": False, "ac_dict": dict}, + {"a": 1}), + (OD((("a", OD((("b", OD((("c", 1), ))), ))), )), + {"ac_ordered": False, "ac_dict": dict}, + {"a": {"b": {"c": 1}}}), ), ) -def test_convert_to(args, exp): - assert TT.convert_to(*args) == exp +def test_convert_to(obj, opts, exp): + assert TT.convert_to(obj, **opts) == exp @pytest.mark.parametrize( - 'objs,exp', - ((([], (), [x for x in range(10)], (x for x in range(4))), True), + ("objs", "exp"), + ((([], (), list(range(10)), list(range(4))), True), (([], {}), False), - (([], 'aaa'), False), + (([], "aaa"), False), ), ) def test_are_list_like(objs, exp): diff --git a/tests/dicts/test_get.py b/tests/dicts/test_get.py index fb31e5bb..5e442029 100644 --- a/tests/dicts/test_get.py +++ b/tests/dicts/test_get.py @@ -1,28 +1,34 @@ # # Forked from m9dicts.tests.{api,dicts} # -# Copyright (C) 2011 - 2021 Satoru SATOH +# Copyright (C) 2011 - 2024 Satoru SATOH # # pylint: disable=missing-docstring,invalid-name +from __future__ import annotations + +import pytest + import anyconfig.dicts as TT -from .. import base -from . import common +from .. import common + +NAMES: list[str] = ("obj", "query", "exp", "emsg") +DATA_0: list[tuple] = common.load_data_for_testfile( + __file__, (("q", ""), ("e", None), ("s", "")), + load_idata=True +) +DATA: list[tuple] = [(d, *rest) for _, d, *rest in DATA_0] +DATA_IDS: list[str] = common.get_test_ids(DATA_0) -class TestCase(common.TestCase): - kind = 'get' - def test_get(self): - for data in self.each_data(): - emsg = base.load_data(data.scm) # diversion. - (res, err) = TT.get(data.inp, data.query) +def test_data(): + assert DATA - if emsg: - self.assertTrue(bool(err), data) - else: # emsg = '' - self.assertEqual(err, '', data) - self.assertEqual(res, data.exp, data) +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_get(obj, query, exp, emsg): + (res, err) = TT.get(obj, query) -# vim:sw=4:ts=4:et: + assert bool(err) if emsg else err == "" + assert res == exp diff --git a/tests/dicts/test_merge.py b/tests/dicts/test_merge.py index 6b11bb37..9ff1c42b 100644 --- a/tests/dicts/test_merge.py +++ b/tests/dicts/test_merge.py @@ -1,38 +1,45 @@ # # Forked from m9dicts.tests.{api,dicts} # -# Copyright (C) 2011 - 2021 Satoru SATOH +# Copyright (C) 2011 - 2024 Satoru SATOH # -# pylint: disable=missing-docstring,invalid-name +# pylint: disable=missing-docstring +from __future__ import annotations + +import pytest + import anyconfig.dicts as TT -from .. import base -from . import common +from .. import common + + +NAMES: list[str] = ("obj", "exp", "upd", "opts") +DATA_0: list[tuple] = common.load_data_for_testfile( + __file__, (("e", None), ("s", {}), ("o", {})), + load_idata=True +) +DATA: list[tuple] = [(d, *rest) for _, d, *rest in DATA_0] +DATA_IDS: list[str] = common.get_test_ids(DATA_0) + +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_merge(obj, exp, upd, opts): + TT.merge(obj, upd, **opts) + assert obj == exp -class TestCase(common.TestCase): - kind = 'merge' - def test_merge(self): - for data in self.each_data(): - upd = base.load_data(data.scm, ordered=True) # diversion. - TT.merge(data.inp, upd, **data.opts) - self.assertEqual(data.inp, data.exp, data) +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_merge_with_a_dict(obj, exp, upd, opts): + TT.merge(obj, upd, **opts) + assert obj == exp - def test_merge_with_a_dict(self): - for data in self.each_data(): - upd = base.load_data(data.scm) - TT.merge(data.inp, upd, **data.opts) - self.assertEqual(data.inp, data.exp, data) - def test_merge_with_an_iterable(self): - for data in self.each_data(): - upd = base.load_data(data.scm).items() - TT.merge(data.inp, upd, **data.opts) - self.assertEqual(data.inp, data.exp, data) +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_merge_with_an_iterable(obj, exp, upd, opts): + TT.merge(obj, upd.items(), **opts) + assert obj == exp - def test_merge_with_invalid_data(self): - with self.assertRaises((ValueError, TypeError)): - TT.merge(dict(a=1), 1) -# vim:sw=4:ts=4:et: +def test_merge_with_invalid_data(): + with pytest.raises((ValueError, TypeError)): + TT.merge({"a": 1}, 1) diff --git a/tests/dicts/test_mk_nested_dic.py b/tests/dicts/test_mk_nested_dic.py index 0a6e75a7..201f0439 100644 --- a/tests/dicts/test_mk_nested_dic.py +++ b/tests/dicts/test_mk_nested_dic.py @@ -1,25 +1,27 @@ # # Forked from m9dicts.tests.{api,dicts} # -# Copyright (C) 2011 - 2021 Satoru SATOH +# Copyright (C) 2011 - 2024 Satoru SATOH # -# pylint: disable=missing-docstring,invalid-name +# pylint: disable=missing-docstring +from __future__ import annotations + +import pytest + import anyconfig.dicts as TT -from . import common +from .. import common -class TestCase(common.TestCase): - kind = 'mk_nested_dic' - pattern = '*.*' - ordered = False +NAMES: list[str] = ("obj", "val", "exp", "opts") +DATA_0: list[tuple] = common.load_data_for_testfile( + __file__, (("q", ""), ("e", None), ("o", {})), + load_idata=True +) +DATA: list[tuple] = [(d, *rest) for _, d, *rest in DATA_0] +DATA_IDS: list[str] = common.get_test_ids(DATA_0) - def test_mk_nested_dic(self): - for data in self.each_data(): - val = data.query # diversion. - self.assertEqual( - TT.mk_nested_dic(data.inp, val, **data.opts), - data.exp - ) -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_mk_nested_dic(obj, val, exp, opts): + assert TT.mk_nested_dic(obj, val, **opts) == exp diff --git a/tests/ioinfo/constants.py b/tests/ioinfo/constants.py index 70fde95f..724f7bd6 100644 --- a/tests/ioinfo/constants.py +++ b/tests/ioinfo/constants.py @@ -1,11 +1,9 @@ # -# Copyright (C) 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2021 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring import pathlib TEST_PY = pathlib.Path(__file__).resolve() - -# vim:sw=4:ts=4:et: diff --git a/tests/ioinfo/test_detectors.py b/tests/ioinfo/test_detectors.py index 2b2f439b..8ee2b586 100644 --- a/tests/ioinfo/test_detectors.py +++ b/tests/ioinfo/test_detectors.py @@ -1,88 +1,77 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring,consider-using-with -r"""test cases for anyconfig.ioinfo.detectors. -""" +# pylint: disable=missing-docstring +r"""test cases for anyconfig.ioinfo.detectors.""" +from __future__ import annotations + import pathlib -import unittest -import typing + +import pytest import anyconfig.ioinfo import anyconfig.ioinfo.detectors as TT -class TestCase(unittest.TestCase): - - def _run(self, - target_fn: typing.Callable[..., typing.Any], - ies: typing.Iterable[typing.Tuple[typing.Any, bool]] - ) -> None: - for inp, exp in ies: - meth = self.assertTrue if exp else self.assertFalse - meth(target_fn(inp), f'input: {inp!r}, expected: {exp!r}') - - def test_is_path_str(self): - self._run( - TT.is_path_str, - ((None, False), - ('/tmp/t.xt', True), - (0, False), - (pathlib.Path(__file__), False), - (open(__file__), False), - (anyconfig.ioinfo.make(__file__), False), - ) - ) - - def test_is_path_obj(self): - self._run( - TT.is_path_obj, - ((None, False), - (__file__, False), - (pathlib.Path(__file__), True), - (str(pathlib.Path(__file__).resolve()), False), - (open(__file__), False), - (anyconfig.ioinfo.make(__file__), False), - ) - ) - - def test_is_io_stream(self): - self._run( - TT.is_io_stream, - ((None, False), - (__file__, False), - (pathlib.Path(__file__), False), - (str(pathlib.Path(__file__).resolve()), False), - (open(__file__), True), - (anyconfig.ioinfo.make(__file__), False), - ) - ) +PATH_STR_10 = __file__ +PATH_OBJ_10 = pathlib.Path(PATH_STR_10) +FILE_OBJ_10 = open(__file__, encoding="utf-8") +IOI_OBJ_10 = anyconfig.ioinfo.make(__file__) - def test_is_ioinfo(self): - self._run( - TT.is_ioinfo, - ((None, False), - (__file__, False), - (pathlib.Path(__file__), False), - (str(pathlib.Path(__file__).resolve()), False), - (open(__file__), False), - (anyconfig.ioinfo.make(__file__), True), - (anyconfig.ioinfo.make(open(__file__)), True), - ) - ) +PATH_OBJ_20 = PATH_OBJ_10.resolve() +PATH_STR_20 = str(PATH_OBJ_20) +IOI_OBJ_20 = anyconfig.ioinfo.make(FILE_OBJ_10) - def test_is_stream(self): - self._run( - TT.is_stream, - ((None, False), - (__file__, False), - (pathlib.Path(__file__), False), - (str(pathlib.Path(__file__).resolve()), False), - (open(__file__), False), - (anyconfig.ioinfo.make(__file__), False), - (anyconfig.ioinfo.make(open(__file__)), True), - ) - ) -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize( + ("target_fn", "obj", "exp"), + ((TT.is_path_str, None, False), + (TT.is_path_str, 0, False), + (TT.is_path_str, PATH_OBJ_10, False), + (TT.is_path_str, PATH_OBJ_20, False), + (TT.is_path_str, FILE_OBJ_10, False), + (TT.is_path_str, IOI_OBJ_10, False), + (TT.is_path_str, IOI_OBJ_20, False), + (TT.is_path_str, PATH_STR_10, True), + (TT.is_path_str, PATH_STR_20, True), + (TT.is_path_obj, None, False), + (TT.is_path_obj, 0, False), + (TT.is_path_obj, PATH_STR_10, False), + (TT.is_path_obj, PATH_STR_20, False), + (TT.is_path_obj, FILE_OBJ_10, False), + (TT.is_path_obj, IOI_OBJ_10, False), + (TT.is_path_obj, IOI_OBJ_20, False), + (TT.is_path_obj, PATH_OBJ_10, True), + (TT.is_path_obj, PATH_OBJ_20, True), + (TT.is_io_stream, None, False), + (TT.is_io_stream, 0, False), + (TT.is_io_stream, PATH_STR_10, False), + (TT.is_io_stream, PATH_STR_20, False), + (TT.is_io_stream, PATH_OBJ_10, False), + (TT.is_io_stream, PATH_OBJ_20, False), + (TT.is_io_stream, IOI_OBJ_10, False), + (TT.is_io_stream, IOI_OBJ_20, False), + (TT.is_io_stream, FILE_OBJ_10, True), + (TT.is_ioinfo, None, False), + (TT.is_ioinfo, 0, False), + (TT.is_ioinfo, PATH_STR_10, False), + (TT.is_ioinfo, PATH_STR_20, False), + (TT.is_ioinfo, PATH_OBJ_10, False), + (TT.is_ioinfo, PATH_OBJ_20, False), + (TT.is_ioinfo, FILE_OBJ_10, False), + (TT.is_ioinfo, IOI_OBJ_10, True), + (TT.is_ioinfo, IOI_OBJ_20, True), + (TT.is_stream, None, False), + (TT.is_stream, 0, False), + (TT.is_stream, PATH_STR_10, False), + (TT.is_stream, PATH_STR_20, False), + (TT.is_stream, PATH_OBJ_10, False), + (TT.is_stream, PATH_OBJ_20, False), + (TT.is_stream, FILE_OBJ_10, False), + (TT.is_stream, IOI_OBJ_10, False), + (TT.is_stream, IOI_OBJ_20, True), + ), +) +def test_is_path_str(target_fn, obj, exp): + assert (target_fn(obj) == exp if exp else not target_fn(obj)) diff --git a/tests/ioinfo/test_factory.py b/tests/ioinfo/test_factory.py index c88db0d4..e4562ffc 100644 --- a/tests/ioinfo/test_factory.py +++ b/tests/ioinfo/test_factory.py @@ -1,11 +1,12 @@ # -# Copyright (C) 2018 - 2021 Satoru SATOH +# Copyright (C) 2018 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring, invalid-name -"""Test cases for anyconfig.ioinfo.factory. -""" -import unittest +# pylint: disable=missing-docstring +"""Test cases for anyconfig.ioinfo.factory.""" +from __future__ import annotations + +import pytest import anyconfig.ioinfo.factory as TT @@ -17,28 +18,30 @@ TEST_IOI_PATH_OBJ = IOInfo( - src=TEST_PY, type=IOI_PATH_OBJ, path=str(TEST_PY), extension='py' + src=TEST_PY, type=IOI_PATH_OBJ, path=str(TEST_PY), extension="py" ) TEST_IOI_STREAM = IOInfo( - src=TEST_PY.open(), type=IOI_STREAM, path=str(TEST_PY), extension='py' + src=TEST_PY.open(), type=IOI_STREAM, path=str(TEST_PY), extension="py" ) -class TestCase(unittest.TestCase): - - def test_make(self): - ies = ( - (TEST_IOI_PATH_OBJ, TEST_IOI_PATH_OBJ), - (TEST_IOI_STREAM, TEST_IOI_STREAM), - (str(TEST_PY), TEST_IOI_PATH_OBJ), - ) - for inp, exp in ies: - self.assertEqual(TT.make(inp), exp) +@pytest.mark.parametrize( + ("obj", "exp"), + (pytest.param(TEST_IOI_PATH_OBJ, TEST_IOI_PATH_OBJ, id="pathlib.Path"), + pytest.param(TEST_IOI_STREAM, TEST_IOI_STREAM, id="IO stream"), + pytest.param(str(TEST_PY), TEST_IOI_PATH_OBJ, id="path (str)"), + ), +) +def test_make(obj, exp): + assert TT.make(obj) == exp - def test_make_failures(self): - inps = (None, ) - for inp in inps: - with self.assertRaises(ValueError): - TT.make(inp) -# vim:sw=4:ts=4:et: +@pytest.mark.filterwarnings("ignore") +@pytest.mark.parametrize( + ("obj", ), + ((None, ), + ), +) +def test_make_failiures(obj): + with pytest.raises(ValueError): + TT.make(obj) diff --git a/tests/ioinfo/test_utils.py b/tests/ioinfo/test_utils.py index 91b306b6..53b3f0ff 100644 --- a/tests/ioinfo/test_utils.py +++ b/tests/ioinfo/test_utils.py @@ -1,10 +1,11 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -r"""Test cases for anyconfig.utils.files. -""" +r"""Test cases for anyconfig.utils.files.""" +from __future__ import annotations + import pathlib import pytest @@ -16,18 +17,18 @@ @pytest.mark.parametrize( - ('inp', 'exp'), - ((SELF, (SELF.resolve(), 'py')), + ("obj", "exp"), + ((SELF, (SELF.resolve(), "py")), ) ) -def test_get_path_and_ext(inp, exp): - res = TT.get_path_and_ext(inp) +def test_get_path_and_ext(obj, exp): + res = TT.get_path_and_ext(obj) assert res == exp try: PATH_RESOLVE_SHOULD_WORK: bool = bool( - pathlib.Path('').expanduser().resolve() + pathlib.Path("").expanduser().resolve() ) except (RuntimeError, OSError): PATH_RESOLVE_SHOULD_WORK: bool = False @@ -35,35 +36,33 @@ def test_get_path_and_ext(inp, exp): @pytest.mark.skipif( PATH_RESOLVE_SHOULD_WORK, - reason='pathlib.Path.resolve() should work' + reason="pathlib.Path.resolve() should work" ) def test_get_path_and_ext_failures(): - path = pathlib.Path('') + path = pathlib.Path("") res = TT.get_path_and_ext(path) - assert res == (path, '') + assert res == (path, "") def test_expand_from_path(tmp_path): - tdir = tmp_path / 'a' / 'b' / 'c' + tdir = tmp_path / "a" / "b" / "c" tdir.mkdir(parents=True) - pathlib.Path(tdir / 'd.txt').touch() - pathlib.Path(tdir / 'e.txt').touch() - pathlib.Path(tdir / 'f.json').write_text("{'a': 1}\n") - - path = tdir / 'd.txt' - - for inp, exp in ((path, [path]), - (tdir / '*.txt', - [tdir / 'd.txt', tdir / 'e.txt']), - (tdir.parent / '**' / '*.txt', - [tdir / 'd.txt', tdir / 'e.txt']), - (tdir.parent / '**' / '*.*', - [tdir / 'd.txt', - tdir / 'e.txt', - tdir / 'f.json']), + pathlib.Path(tdir / "d.txt").touch() + pathlib.Path(tdir / "e.txt").touch() + pathlib.Path(tdir / "f.json").write_text("{'a': 1}\n") + + path = tdir / "d.txt" + + for obj, exp in ((path, [path]), + (tdir / "*.txt", + [tdir / "d.txt", tdir / "e.txt"]), + (tdir.parent / "**" / "*.txt", + [tdir / "d.txt", tdir / "e.txt"]), + (tdir.parent / "**" / "*.*", + [tdir / "d.txt", + tdir / "e.txt", + tdir / "f.json"]), ): - res = sorted(TT.expand_from_path(inp)) - assert res == sorted(exp), f'{inp!r} vs. {exp!r}' - -# vim:sw=4:ts=4:et: + res = sorted(TT.expand_from_path(obj)) + assert res == sorted(exp), f"{obj!r} vs. {exp!r}" diff --git a/tests/parser/common.py b/tests/parser/common.py deleted file mode 100644 index 89371489..00000000 --- a/tests/parser/common.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (C) 2021 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring -import unittest - -from .. import base - - -class Collector(base.TDataCollector): - pass - - -class TestCase(unittest.TestCase, Collector): - - def setUp(self): - self.init() - -# vim:sw=4:ts=4:et: diff --git a/tests/parser/test_attrlist.py b/tests/parser/test_attrlist.py index 9b01d28a..4632ec5f 100644 --- a/tests/parser/test_attrlist.py +++ b/tests/parser/test_attrlist.py @@ -1,25 +1,31 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -"""Test cases for anyconfig.parser.parse_attrlist. -""" +"""Test cases for anyconfig.parser.parse_attrlist.""" +from __future__ import annotations + +import pytest + import anyconfig.parser as TT -from . import common +from .. import common + + +NAMES: list[str] = ("obj", "exp", "opts") +DATA_0: list[tuple] = common.load_data_for_testfile( + __file__, (("e", None), ("o", {})), + load_idata=True +) +DATA: list[tuple] = [(d, *rest) for _, d, *rest in DATA_0] +DATA_IDS: list[str] = common.get_test_ids(DATA_0) -class TestCase(common.TestCase): - kind = 'attrlist' - pattern = '*.txt' +def test_data(): + assert DATA - def test_parse_attrlist(self): - for data in self.each_data(): - self.assertEqual( - TT.parse_attrlist(data.inp, **data.opts), - data.exp, - data - ) -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_parse_attrlist(obj, exp, opts) -> None: + assert TT.parse_attrlist(obj, **opts) == exp diff --git a/tests/parser/test_attrlist_0.py b/tests/parser/test_attrlist_0.py index 86d52bc7..4f49e45b 100644 --- a/tests/parser/test_attrlist_0.py +++ b/tests/parser/test_attrlist_0.py @@ -1,25 +1,31 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -"""Test cases for anyconfig.parser.parse_attrlist_0. -""" +"""Test cases for anyconfig.parser.parse_attrlist_0.""" +from __future__ import annotations + +import pytest + import anyconfig.parser as TT -from . import common +from .. import common + + +NAMES: list[str] = ("obj", "exp", "opts") +DATA_0: list[tuple] = common.load_data_for_testfile( + __file__, (("e", None), ("o", {})), + load_idata=True +) +DATA: list[tuple] = [(d, *rest) for _, d, *rest in DATA_0] +DATA_IDS: list[str] = common.get_test_ids(DATA_0) -class TestCase(common.TestCase): - kind = 'attrlist_0' - pattern = '*.txt' +def test_data(): + assert DATA - def test_parse_attrlist_0(self): - for data in self.each_data(): - self.assertEqual( - TT.parse_attrlist_0(data.inp, **data.opts), - data.exp, - data - ) -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_parse_attrlist_0(obj, exp, opts) -> None: + assert TT.parse_attrlist_0(obj, **opts) == exp diff --git a/tests/parser/test_list.py b/tests/parser/test_list.py index a9313dce..e0489fa9 100644 --- a/tests/parser/test_list.py +++ b/tests/parser/test_list.py @@ -1,25 +1,31 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -"""Test cases for anyconfig.parser.parse_list. -""" +"""Test cases for anyconfig.parser.parse_list.""" +from __future__ import annotations + +import pytest + import anyconfig.parser as TT -from . import common +from .. import common + + +NAMES: list[str] = ("obj", "exp", "opts") +DATA_0: list[tuple] = common.load_data_for_testfile( + __file__, (("e", None), ("o", {})), + load_idata=True +) +DATA: list[tuple] = [(d, *rest) for _, d, *rest in DATA_0] +DATA_IDS: list[str] = common.get_test_ids(DATA_0) -class TestCase(common.TestCase): - kind = 'list' - pattern = '*.*' +def test_data(): + assert DATA - def test_parse_list(self): - for data in self.each_data(): - self.assertEqual( - TT.parse_list(data.inp, **data.opts), - data.exp, - data - ) -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_parse_list(obj, exp, opts) -> None: + assert TT.parse_list(obj, **opts) == exp diff --git a/tests/parser/test_parse.py b/tests/parser/test_parse.py index ca7b2184..3266b597 100644 --- a/tests/parser/test_parse.py +++ b/tests/parser/test_parse.py @@ -1,25 +1,31 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -"""Test cases for anyconfig.parser.parse. -""" +"""Test cases for anyconfig.parser.parse.""" +from __future__ import annotations + +import pytest + import anyconfig.parser as TT -from . import common +from .. import common + + +NAMES: list[str] = ("obj", "exp", "opts") +DATA_0: list[tuple] = common.load_data_for_testfile( + __file__, (("e", None), ("o", {})), + load_idata=True +) +DATA: list[tuple] = [(d, *rest) for _, d, *rest in DATA_0] +DATA_IDS: list[str] = common.get_test_ids(DATA_0) -class TestCase(common.TestCase): - kind = 'parse' - pattern = '*.*' +def test_data(): + assert DATA - def test_parse(self): - for data in self.each_data(): - self.assertEqual( - TT.parse(data.inp, **data.opts), - data.exp, - data - ) -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_parse(obj, exp, opts) -> None: + assert TT.parse(obj, **opts) == exp diff --git a/tests/parser/test_single.py b/tests/parser/test_single.py index e5367dee..853179cc 100644 --- a/tests/parser/test_single.py +++ b/tests/parser/test_single.py @@ -1,21 +1,30 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -"""Test cases for anyconfig.parser.parse_single. -""" +"""Test cases for anyconfig.parser.parse_single.""" +from __future__ import annotations + +import pytest + import anyconfig.parser as TT -from . import common +from .. import common + + +NAMES: list[str] = ("obj", "exp") +DATA_0: list[tuple] = common.load_data_for_testfile( + __file__, (("e", None), ), load_idata=True +) +DATA: list[tuple] = [(d, *rest) for _, d, *rest in DATA_0] +DATA_IDS: list[str] = common.get_test_ids(DATA_0) -class TestCase(common.TestCase): - kind = 'single' - pattern = '*.*' +def test_data(): + assert DATA - def test_parse_single(self): - for data in self.each_data(): - self.assertEqual(TT.parse_single(data.inp), data.exp) -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) +def test_parse_single(obj, exp) -> None: + assert TT.parse_single(obj) == exp diff --git a/tests/parsers/test_parsers.py b/tests/parsers/test_parsers.py index cd85fcb9..f0a7b799 100644 --- a/tests/parsers/test_parsers.py +++ b/tests/parsers/test_parsers.py @@ -1,14 +1,18 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring, invalid-name +# pylint: disable=missing-docstring +"""Test cases for anyconfig.parsers. +""" +from __future__ import annotations import pathlib -import unittest + +import pytest import anyconfig.backend.json -import anyconfig.backend.json.stdlib as JSON +import anyconfig.backend.json.stdlib as JSONStdlib try: import anyconfig.backend.yaml.pyyaml as PYYAML except ImportError: @@ -21,50 +25,59 @@ UnknownProcessorTypeError, UnknownFileTypeError ) -from .. import base +from ..common import RESOURCE_DIR + + +CNF_PATH: pathlib.Path = ( + RESOURCE_DIR / "loaders/json.stdlib/10/360_a_nested_map.json" +) + +@pytest.fixture(name="parsers") +def found_parsers(): + return TT.Parsers() -CNF_PATH = base.RES_DIR / 'base/basics/10/10.json' +def test_json_parsers(parsers): + psrs = parsers.findall(None, forced_type="json") + assert psrs + assert JSONStdlib.Parser in psrs + assert psrs[0] == JSONStdlib.Parser -class Test(unittest.TestCase): - def setUp(self): - self.psrs = TT.Parsers() +@pytest.mark.skipif(PYYAML is None, reason="PyYAML is not available.") +def test_yaml_parsers(parsers): + psrs = parsers.findall(None, forced_type="yaml") + assert psrs + assert PYYAML.Parser in psrs + assert psrs[0] == PYYAML.Parser - def test_10_json_parsers(self): - jpsrs = self.psrs.findall(None, forced_type="json") - self.assertTrue(isinstance(jpsrs[0], JSON.Parser)) - def test_12_yaml_parsers(self): - if PYYAML: - ypsrs = self.psrs.findall(None, forced_type="yaml") - self.assertTrue(isinstance(ypsrs[0], PYYAML.Parser)) +@pytest.mark.parametrize( + ("exc", "arg0", "kwargs"), + ((ValueError, None, {}), + (UnknownProcessorTypeError, None, {"forced_type": "_unkonw_type_"}), + (UnknownFileTypeError, "cnf.unknown_ext", {}), + ), +) +def test_find__failures(exc, arg0, kwargs, parsers): + with pytest.raises(exc): + parsers.find(arg0, **kwargs) - def test_30_find__ng_cases(self): - self.assertRaises(ValueError, self.psrs.find, None) - self.assertRaises(UnknownProcessorTypeError, self.psrs.find, None, - forced_type="_unkonw_type_") - self.assertRaises(UnknownFileTypeError, self.psrs.find, - "cnf.unknown_ext") - def test_32_find__ng_cases(self): - pcls = anyconfig.backend.json.Parser - self.assertTrue(isinstance(self.psrs.find("x.conf", - forced_type="json"), - pcls)) - self.assertTrue(isinstance(self.psrs.find("x.json"), pcls)) +def test_find(parsers): + pcls = anyconfig.backend.json.Parser + assert isinstance(parsers.find("x.conf", forced_type="json"), pcls) + assert isinstance(parsers.find("x.json"), pcls) - with open(CNF_PATH) as inp: - self.assertTrue(isinstance(self.psrs.find(inp), pcls)) + with open(CNF_PATH, encoding="utf-8") as inp: + assert isinstance(parsers.find(inp), pcls) - if pathlib is not None: - inp = pathlib.Path("x.json") - self.assertTrue(isinstance(self.psrs.find(inp), pcls)) + inp = pathlib.Path("x.json") + assert isinstance(parsers.find(inp), pcls) - def test_34_find__input_object(self): - inp = anyconfig.ioinfo.make(CNF_PATH) - psr = self.psrs.find(inp) - self.assertTrue(isinstance(psr, anyconfig.backend.json.Parser)) -# vim:sw=4:ts=4:et: +def test_find__input_object(parsers): + inp = anyconfig.ioinfo.make(CNF_PATH) + psr = parsers.find(inp) + assert isinstance(psr, anyconfig.backend.json.Parser) diff --git a/tests/parsers/test_utils.py b/tests/parsers/test_utils.py index 21186365..398e1431 100644 --- a/tests/parsers/test_utils.py +++ b/tests/parsers/test_utils.py @@ -1,71 +1,83 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2012 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring, invalid-name +# pylint: disable=missing-docstring +"""Test cases for anyconfig.parsers.utils. +""" +from __future__ import annotations + import operator -import unittest +import pytest + +import anyconfig.parsers.parsers import anyconfig.parsers.utils as TT from anyconfig.common import ( UnknownFileTypeError, UnknownProcessorTypeError ) from anyconfig.backend.json import PARSERS as JSON_PSR_CLSS -from anyconfig.parsers.parsers import Parsers -PSRS = Parsers().list() +PSRS = anyconfig.parsers.parsers.Parsers().list() JSON_PSRS = sorted( (p() for p in JSON_PSR_CLSS), - key=operator.methodcaller('priority'), reverse=True + key=operator.methodcaller("priority"), reverse=True +) + + +def test_load_plugins(): + TT.load_plugins() + assert PSRS + + +def test_list_types(): + res = TT.list_types() + assert bool(res) + assert any(x in res for x in ("json", "ini", "xml")) + + +def test_list_by_x(): + for lfn in (TT.list_by_cid, TT.list_by_type, TT.list_by_extension): + psrs = lfn() + assert bool(psrs) + + +@pytest.mark.parametrize( + ("args", "exc"), + (((None, None), ValueError), # w/o path nor type + (("/tmp/x.xyz", None), UnknownFileTypeError), + (("/dev/null", None), UnknownFileTypeError), + ((None, "xyz"), UnknownProcessorTypeError), + ) +) +def test_findall_ng_cases(args, exc): + with pytest.raises(exc): + TT.findall(*args) + + +@pytest.mark.parametrize( + ("obj", "typ"), + (("foo.json", None), + (None, "json"), + ) ) +def test_findall(obj, typ): + psrs = TT.findall(obj=obj, forced_type=typ) + assert bool(psrs) + assert psrs == JSON_PSRS -class TestCase(unittest.TestCase): - - def test_load_plugins(self): - TT.load_plugins() - self.assertTrue(PSRS) - - def test_list_types(self): - res = TT.list_types() - self.assertTrue(bool(res)) - self.assertTrue(any(x in res for x in ('json', 'ini', 'xml'))) - - def test_list_by_x(self): - for lfn in (TT.list_by_cid, TT.list_by_type, TT.list_by_extension): - psrs = lfn() - self.assertTrue(bool(psrs)) - - def test_findall_ng_cases(self): - ies = (((None, None), ValueError), # w/o path nor type - (('/tmp/x.xyz', None), UnknownFileTypeError), - (('/dev/null', None), UnknownFileTypeError), - ((None, 'xyz'), UnknownProcessorTypeError), - ) - for inp, exc in ies: - with self.assertRaises(exc): - TT.findall(*inp) - - def test_findall(self): - argss = (('foo.json', None), - (None, 'json'), - ) - for args in argss: - psrs = TT.findall(*args) - - self.assertTrue(bool(psrs)) - self.assertEqual(psrs, JSON_PSRS) - - def test_find(self): - argss = (('foo.json', None), - (None, 'json'), - (None, JSON_PSR_CLSS[0]), - (None, JSON_PSRS[0]), - ) - for args in argss: - psr = TT.find(*args) - self.assertEqual(psr, JSON_PSRS[0]) - -# vim:sw=4:ts=4:et: + +@pytest.mark.parametrize( + ("obj", "typ"), + (("foo.json", None), + (None, "json"), + (None, JSON_PSR_CLSS[0]), + (None, JSON_PSRS[0]), + ) +) +def test_find(obj, typ): + psr = TT.find(obj=obj, forced_type=typ) + assert psr == JSON_PSRS[0] diff --git a/tests/processors/common.py b/tests/processors/common.py index 732127b4..e7a4d7d2 100644 --- a/tests/processors/common.py +++ b/tests/processors/common.py @@ -7,32 +7,32 @@ class A(anyconfig.models.processor.Processor): - _cid = 'A' - _type = 'json' - _extensions = ['json', 'jsn', 'js'] + _cid = "A" + _type = "json" + _extensions = ["json", "jsn", "js"] class A2(A): - _cid = 'A2' + _cid = "A2" _priority = 20 # Higher priority than A. class A3(A): - _cid = 'A3' + _cid = "A3" _priority = 99 # Higher priority than A and A2. class B(anyconfig.models.processor.Processor): - _cid = 'B' - _type = 'yaml' - _extensions = ['yaml', 'yml'] + _cid = "B" + _type = "yaml" + _extensions = ["yaml", "yml"] _priority = 99 # Higher priority than C. class C(anyconfig.models.processor.Processor): - _cid = 'dummy' - _type = 'yaml' - _extensions = ['yaml', 'yml'] + _cid = "dummy" + _type = "yaml" + _extensions = ["yaml", "yml"] PRS = [A, A2, A3, B, C] diff --git a/tests/processors/test_processors.py b/tests/processors/test_processors.py index 144aacea..bb3226fd 100644 --- a/tests/processors/test_processors.py +++ b/tests/processors/test_processors.py @@ -1,51 +1,53 @@ # -# Copyright (C) 2018 - 2021 Satoru SATOH +# Copyright (C) 2018 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring, invalid-name +# pylint: disable=missing-docstring import operator -import unittest + +import pytest import anyconfig.processors.processors as TT from .common import A, A2, B, C, PRS -class Test_10_Processor(unittest.TestCase): +def test_processort_compare() -> None: + (a1, a2, a22, b) = (A(), A(), A2(), B()) + assert a1 == a2 + assert a1 != b + assert a1 != a22 + - def test_10_eq(self): - (a1, a2, a22, b) = (A(), A(), A2(), B()) - self.assertEqual(a1, a2) - self.assertNotEqual(a1, b) - self.assertNotEqual(a1, a22) +def test_processor_init(): + prcs = TT.Processors() + assert not prcs.list() -class Test_40_Processors(unittest.TestCase): +def test_processor_init_with_processors(): + prcs = TT.Processors(PRS) + assert prcs.list(sort=True) == sorted( + PRS, + key=operator.methodcaller("cid") + ) - def test_10_init(self): - prcs = TT.Processors() - self.assertFalse(prcs.list()) - def test_12_init_with_processors(self): - prcs = TT.Processors(PRS) - self.assertEqual(prcs.list(sort=True), - sorted(PRS, key=operator.methodcaller('cid'))) +def test_processor_list_by_cid(): + prcs = TT.Processors(PRS) + exp = sorted( + ((p.cid(), [p]) for p in PRS), + key=TT.operator.itemgetter(0) + ) + assert prcs.list_by_cid() == exp - def test_20_list_by_cid(self): - prcs = TT.Processors(PRS) - exp = sorted(((p.cid(), [p]) for p in PRS), - key=TT.operator.itemgetter(0)) - self.assertEqual(prcs.list_by_cid(), exp) - def test_20_list_x(self): - prcs = TT.Processors(PRS) - self.assertRaises(ValueError, prcs.list_x) - self.assertEqual(prcs.list_x('cid'), - sorted(set(p.cid() for p in PRS))) - self.assertEqual(prcs.list_x('type'), - sorted(set(p.type() for p in PRS))) +def test_processor_list_x(): + prcs = TT.Processors(PRS) + with pytest.raises(ValueError): + prcs.list_x() - res = sorted(set(A.extensions() + B.extensions() + C.extensions())) - self.assertEqual(prcs.list_x('extension'), res) + assert prcs.list_x("cid") == sorted({p.cid() for p in PRS}) + assert prcs.list_x("type") == sorted({p.type() for p in PRS}) -# vim:sw=4:ts=4:et: + res = sorted(set(A.extensions() + B.extensions() + C.extensions())) + assert prcs.list_x("extension") == res diff --git a/tests/processors/test_utils.py b/tests/processors/test_utils.py index 471d3d77..8b0fa6e9 100644 --- a/tests/processors/test_utils.py +++ b/tests/processors/test_utils.py @@ -1,9 +1,14 @@ # -# Copyright (C) 2018 - 2021 Satoru SATOH +# Copyright (C) 2018 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring, invalid-name -import unittest +# pylint: disable=missing-docstring +"""Test cases for anyconfig.processors.utils.""" +from __future__ import annotations + +import typing + +import pytest import anyconfig.ioinfo import anyconfig.processors.utils as TT @@ -13,210 +18,240 @@ ) from .common import A, A2, A3, B, C, PRS +if typing.TYPE_CHECKING: + import collections.abc + PRS = [p() for p in PRS] # Instantiate all. -class TestCase(unittest.TestCase): - - def test_select_by_key(self): - ies = (([], []), - (((['a'], 1), ), [('a', [1])]), - (((['a', 'aaa'], 1), - (['b', 'bb'], 2), - (['a'], 3)), - [('a', [1, 3]), - ('aaa', [1]), - ('b', [2]), - ('bb', [2])])) - - for inp, exp in ies: - self.assertEqual(TT.select_by_key(inp), exp) - - def test_select_by_key_reversed(self): - ies = ((((['a', 'aaa'], 1), - (['a'], 3)), - [('a', [3, 1]), - ('aaa', [1])]), - ) - - def sfn(itr): - return sorted(itr, reverse=True) - - for inp, exp in ies: - self.assertEqual(TT.select_by_key(inp, sfn), exp) - - def test_list_by_x(self): - (a, a2, a3, b, c) = (A(), A2(), A3(), B(), C()) - ies = ((([], 'type'), []), - (([a], 'type'), [(a.type(), [a])]), - (([a], 'extensions'), - [(x, [a]) for x in a.extensions()]), - (((a, a2, a3), 'type'), - [(a.type(), [a3, a2, a])]), - (([a, b, c], 'type'), - [(a.type(), [a]), (b.type(), [b, c])]), - ((PRS, 'type'), - [(a.type(), [a3, a2, a]), (b.type(), [b, c])]), - ((PRS, 'extensions'), - [('js', [a3, a2, a]), ('json', [a3, a2, a]), - ('jsn', [a3, a2, a]), ('yaml', [b, c]), ('yml', [b, c])]), - ) - - for prs_key, exp in ies: - self.assertEqual( - sorted(TT.list_by_x(*prs_key)), sorted(exp) - ) - - def test_list_by_x_ng_cases(self): - with self.assertRaises(ValueError): - TT.list_by_x(PRS, 'undef') - - def test_findall_with_pred__type(self): - def _findall_by_type(typ): - return TT.findall_with_pred(lambda p: p.type() == typ, PRS) - - (a, a2, a3, b, c) = (A(), A2(), A3(), B(), C()) - ies = (('json', [a3, a2, a]), - ('yaml', [b, c]), - ('undefined', []), - ) - - for inp, exp in ies: - self.assertEqual(_findall_by_type(inp), exp) - - def test_findall_with_pred__ext(self): - def _findall_with_pred__ext(ext): - return TT.findall_with_pred(lambda p: ext in p.extensions(), PRS) - - (a, a2, a3, b, c) = (A(), A2(), A3(), B(), C()) - ies = (('js', [a3, a2, a]), - ('yml', [b, c]), - ('xyz', []), - ) - - for inp, exp in ies: - self.assertEqual(_findall_with_pred__ext(inp), exp) - - def assertInstance(self, obj, cls): - self.assertTrue(isinstance(obj, cls)) - - def test_maybe_processor(self): - a3 = A3() - ies = (((a3, A3), True), - ((A3, A3), True), - ((B, A3), False), - ) - for inp, exp in ies: - afn = self.assertTrue if exp else self.assertFalse - res = TT.maybe_processor(*inp) - afn(isinstance(res, A3)) - - if not exp: - self.assertTrue(res is None) - - def test_find_by_type_or_id(self): - (a, a2, a3, b, c) = (A(), A2(), A3(), B(), C()) - ies = ((('json', PRS), [a3, a2, a]), - (('yaml', PRS), [b, c]), - (('dummy', PRS), [c]), - ) - for inp, exp in ies: - self.assertEqual(TT.find_by_type_or_id(*inp), exp) - - def test_find_by_type_or_id_ng_cases(self): - with self.assertRaises(UnknownProcessorTypeError): - TT.find_by_type_or_id('xyz', PRS) - - def test_find_by_fileext(self): - ies = ((('js', PRS), [A3(), A2(), A()]), - (('yml', PRS), [B(), C()]), - ) - for inp, exp in ies: - self.assertEqual(TT.find_by_fileext(*inp), exp) - - def test_find_by_fileext_ng_cases(self): - with self.assertRaises(UnknownFileTypeError): - TT.find_by_fileext('xyz', PRS) - - def test_find_by_maybe_file(self): - (a, a2, a3, b, c) = (A(), A2(), A3(), B(), C()) - obj = anyconfig.ioinfo.make('/path/to/a.json') - - ies = ((('/path/to/a.jsn', PRS), [a3, a2, a]), - (('../../path/to/b.yml', PRS), [b, c]), - ((obj, PRS), [a3, a2, a]), - ) - - for inp, exp in ies: - self.assertEqual(TT.find_by_maybe_file(*inp), exp) - - def test_find_by_maybe_file_ng_cases(self): - ies = (('/tmp/x.xyz', PRS), - ('/dev/null', PRS), - ) - for inp in ies: - with self.assertRaises(UnknownFileTypeError): - TT.find_by_maybe_file(*inp) - - def test_findall_ng_cases(self): - ies = (((None, PRS, None), ValueError), # w/o path nor type - (('/tmp/x.xyz', PRS, None), UnknownFileTypeError), - (('/dev/null', PRS, None), UnknownFileTypeError), - ((None, PRS, 'xyz'), UnknownProcessorTypeError), - ) - for inp, exc in ies: - with self.assertRaises(exc): - TT.findall(*inp) - - def test_findall_by_maybe_file(self): - (a, a2, a3, b, c) = (A(), A2(), A3(), B(), C()) - obj = anyconfig.ioinfo.make('/path/to/a.json') - - ies = ((('/path/to/a.jsn', PRS), [a3, a2, a]), - (('../../path/to/b.yml', PRS), [b, c]), - ((obj, PRS), [a3, a2, a]), - ) - for inp, exp in ies: - self.assertEqual(TT.findall(*inp), exp) - - def test_findall_by_type_or_id(self): - (a, a2, a3, b, c) = (A(), A2(), A3(), B(), C()) - ies = (((None, PRS, 'json'), [a3, a2, a]), - ((None, PRS, 'yaml'), [b, c]), - ((None, PRS, 'dummy'), [c]), - ) - for inp, exp in ies: - self.assertEqual(TT.findall(*inp), exp) - - def test_find_by_forced_type(self): - a2 = A2() - c = C() - ies = (((None, PRS, A2), a2), - ((None, PRS, A2), a2), - ((None, PRS, c.cid()), c), - ) - - for inp, exp in ies: - self.assertEqual(TT.find(*inp), exp) - - def test_find__maybe_file(self): - (a3, b) = (A3(), B()) - obj = anyconfig.ioinfo.make('/path/to/a.json') - - ies = ((('/path/to/a.jsn', PRS), a3), - (('../../path/to/b.yml', PRS), b), - ((obj, PRS), a3), - ) - for inp, exp in ies: - self.assertEqual(TT.find(*inp), exp) - - def test_find__type_or_id(self): - ies = (((None, PRS, 'json'), A3()), - ((None, PRS, 'yaml'), B()), - ((None, PRS, 'dummy'), C()), - ) - for inp, exp in ies: - self.assertEqual(TT.find(*inp), exp) - -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize( + ("items", "exp"), + (([], []), + (((["a"], 1), ), [("a", [1])]), + (((["a", "aaa"], 1), (["b", "bb"], 2), (["a"], 3)), + [("a", [1, 3]), ("aaa", [1]), ("b", [2]), ("bb", [2])]) + ), +) +def test_select_by_key( + items: collections.abc.Iterable, exp: collections.abc.Iterable +): + assert TT.select_by_key(items) == exp + + +@pytest.mark.parametrize( + ("items", "exp"), + ((((["a", "aaa"], 1), (["a"], 3)), [("a", [3, 1]), ("aaa", [1])]), + ), +) +def test_select_by_key_reversed( + items: collections.abc.Iterable, exp: collections.abc.Iterable +): + def sfn(itr): + return sorted(itr, reverse=True) + + assert TT.select_by_key(items, sfn) == exp + + +PRS10 = (AI0, AI2, AI3, BI0, CI0) = (A(), A2(), A3(), B(), C()) +OBJ0 = anyconfig.ioinfo.make("/path/to/a.json") + + +@pytest.mark.parametrize( + ("items", "exp"), + ((([], "type"), []), + (([AI0], "type"), [(AI0.type(), [AI0])]), + (([AI0], "extensions"), [(x, [AI0]) for x in AI0.extensions()]), + (((AI0, AI2, AI3), "type"), [(AI0.type(), [AI3, AI2, AI0])]), + (([AI0, BI0, CI0], "type"), + [(AI0.type(), [AI0]), (BI0.type(), [BI0, CI0])]), + ((PRS, "type"), + [(AI0.type(), [AI3, AI2, AI0]), (BI0.type(), [BI0, CI0])]), + ((PRS, "extensions"), + [("js", [AI3, AI2, AI0]), ("json", [AI3, AI2, AI0]), + ("jsn", [AI3, AI2, AI0]), ("yaml", [BI0, CI0]), ("yml", [BI0, CI0])]), + ), +) +def test_list_by_x( + items: collections.abc.Iterable, exp: collections.abc.Iterable +) -> None: + assert sorted(TT.list_by_x(*items)) == sorted(exp) + + +def test_list_by_x_ng_cases(): + with pytest.raises(ValueError): + TT.list_by_x(PRS, "undef") + + +@pytest.mark.parametrize( + ("typ", "exp"), + (("json", [AI3, AI2, AI0]), + ("yaml", [BI0, CI0]), + ("undefined", []) + ), +) +def test_findall_with_pred__type( + typ: str, exp: collections.abc.Iterable +) -> None: + def _findall_by_type(typ): + return TT.findall_with_pred(lambda p: p.type() == typ, PRS) + + assert _findall_by_type(typ) == exp + + +@pytest.mark.parametrize( + ("typ", "exp"), + (("js", [AI3, AI2, AI0]), + ("yml", [BI0, CI0]), + ("xyz", []), + ), +) +def test_findall_with_pred__ext( + typ: str, exp: collections.abc.Iterable +) -> None: + def _findall_with_pred__ext(ext): + return TT.findall_with_pred(lambda p: ext in p.extensions(), PRS) + + assert _findall_with_pred__ext(typ) == exp + + +@pytest.mark.parametrize( + ("items", "exp"), + (((AI3, A3), True), + ((A3, A3), True), + ((B, A3), False), + ), +) +def test_maybe_processor( + items: collections.abc.Iterable, exp: bool +) -> None: + res = TT.maybe_processor(*items) + if exp: + assert isinstance(res, A3) + else: + assert not isinstance(res, A3) + assert res is None + + +@pytest.mark.parametrize( + ("typ_prs", "exp"), + ((("json", PRS), [AI3, AI2, AI0]), + (("yaml", PRS), [BI0, CI0]), + (("dummy", PRS), [CI0]), + ), +) +def test_find_by_type_or_id( + typ_prs: collections.abc.Iterable, exp: collections.abc.Iterable +) -> None: + assert TT.find_by_type_or_id(*typ_prs) == exp + + +def test_find_by_type_or_id_ng_cases() -> None: + with pytest.raises(UnknownProcessorTypeError): + TT.find_by_type_or_id("xyz", PRS) + + +@pytest.mark.parametrize( + ("typ_prs", "exp"), + ((("js", PRS), [A3(), A2(), A()]), + (("yml", PRS), [B(), C()]), + ), +) +def test_find_by_fileext( + typ_prs: collections.abc.Iterable, exp: collections.abc.Iterable +) -> None: + assert TT.find_by_fileext(*typ_prs) == exp + + +def test_find_by_fileext_ng_cases(): + with pytest.raises(UnknownFileTypeError): + TT.find_by_fileext("xyz", PRS) + + +@pytest.mark.parametrize( + ("objs", "exp"), + ((("/path/to/a.jsn", PRS), [AI3, AI2, AI0]), + (("../../path/to/b.yml", PRS), [BI0, CI0]), + ((OBJ0, PRS), [AI3, AI2, AI0]), + ) +) +def test_find_by_maybe_file(objs, exp): + assert TT.find_by_maybe_file(*objs) == exp + + +@pytest.mark.parametrize( + ("obj", ), + (("/tmp/x.xyz", ), + ("/dev/null", ), + ) +) +def test_find_by_maybe_file_ng_cases(obj): + with pytest.raises(UnknownFileTypeError): + TT.find_by_maybe_file(obj, PRS) + + +@pytest.mark.parametrize( + ("obj", "typ", "exc"), + ((None, None, ValueError), # w/o path nor type + ("/tmp/x.xyz", None, UnknownFileTypeError), + ("/dev/null", None, UnknownFileTypeError), + (None, "xyz", UnknownProcessorTypeError), + ) +) +def test_findall_ng_cases(obj, typ, exc): + with pytest.raises(exc): + TT.findall(obj, PRS, forced_type=typ) + + +@pytest.mark.parametrize( + ("obj", "exp"), + (("/path/to/a.jsn", [AI3, AI2, AI0]), + ("../../path/to/b.yml", [BI0, CI0]), + (OBJ0, [AI3, AI2, AI0]), + ) +) +def test_findall_by_maybe_file(obj, exp): + assert TT.findall(obj, PRS) == exp + + +@pytest.mark.parametrize( + ("typ", "exp"), + (("json", [AI3, AI2, AI0]), + ("yaml", [BI0, CI0]), + ("dummy", [CI0]), + ) +) +def test_findall_by_type_or_id(typ, exp): + assert TT.findall(None, PRS, forced_type=typ) == exp + + +@pytest.mark.parametrize( + ("typ", "exp"), + ((A2, AI2), + (CI0.cid(), CI0), + ) +) +def test_find_by_forced_type(typ, exp): + assert TT.find(None, PRS, forced_type=typ) == exp + + +@pytest.mark.parametrize( + ("obj", "exp"), + (("/path/to/a.jsn", AI3), + ("../../path/to/b.yml", BI0), + (OBJ0, AI3), + ) +) +def test_find__maybe_file(obj, exp): + assert TT.find(obj, PRS) == exp + + +@pytest.mark.parametrize( + ("typ", "exp"), + (("json", A3()), + ("yaml", B()), + ("dummy", C()), + ) +) +def test_find__type_or_id(typ, exp): + assert TT.find(None, PRS, forced_type=typ) == exp diff --git a/tests/query/test_query.py b/tests/query/test_query.py index 90c801d7..bc2a07a6 100644 --- a/tests/query/test_query.py +++ b/tests/query/test_query.py @@ -1,44 +1,31 @@ # -# Copyright (C) 2017 - 2021 Satoru SATOH +# Copyright (C) 2017 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring, invalid-name -"""test cases for anyconfig.query.query. -""" -import os -import unittest +# pylint: disable=missing-docstring +"""test cases for anyconfig.query.query.""" +from __future__ import annotations + +import pytest try: import anyconfig.query.query as TT -except ImportError: - raise unittest.SkipTest('Needed library to query was not found') - - -class Test_00_Functions(unittest.TestCase): - - def _assert_dicts_equal(self, dic, ref): - self.assertEqual(dic, ref, - "%r%s vs.%s%r" % (dic, os.linesep, os.linesep, ref)) - - def _assert_query(self, data_exp_ref_list, dicts=False): - _assert = self._assert_dicts_equal if dicts else self.assertEqual - for data, exp, ref in data_exp_ref_list: - try: - _assert(TT.query(data, exp)[0], ref) - except ValueError: - pass - - def test_10_query(self): - self._assert_query([({"a": 1}, "a", 1), - ({"a": {"b": 2}}, "a.b", 2)]) - - def test_12_invalid_query(self): - data = {"a": 1} - self._assert_query([(data, "b.", data)]) - - def test_14_empty_query(self): - data = {"a": 1} - self._assert_query([(data, None, data), - (data, '', data)]) - -# vim:sw=4:ts=4:et: +except ImportError as exc: + raise pytest.skip( + "Needed library to query was not found", + allow_module_leve=True + ) from exc + + +@pytest.mark.parametrize( + ("data", "query", "exp"), + (({"a": 1}, "a", 1), + ({"a": {"b": 2}}, "a.b", 2), + ({"a": 1}, "b.", {"a": 1}), + ({"a": 1}, None, {"a": 1}), + ({"a": 1}, "", {"a": 1}), + ), +) +def test_query(data, query: str, exp): + (res, _exc) = TT.query(data, query) + assert res == exp diff --git a/tests/requirements.d/base.txt b/tests/requirements.d/base.txt new file mode 100644 index 00000000..20f12d04 --- /dev/null +++ b/tests/requirements.d/base.txt @@ -0,0 +1 @@ +tox-uv diff --git a/tests/requirements.d/full.txt b/tests/requirements.d/full.txt new file mode 100644 index 00000000..b3969611 --- /dev/null +++ b/tests/requirements.d/full.txt @@ -0,0 +1,2 @@ +Jinja2 +tomlkit diff --git a/tests/requirements.d/lint-legacy.txt b/tests/requirements.d/lint-legacy.txt new file mode 100644 index 00000000..5b02312c --- /dev/null +++ b/tests/requirements.d/lint-legacy.txt @@ -0,0 +1,22 @@ +bandit +# cohesion +flake8 +flake8-bandit +flake8-bugbear +# flake8-builtins +# flake8-comprehensions +flake8-deprecated +flake8-docstrings +# flake8-eradicate +# flake8-expression-complexity +# flake8-functions +flake8-implicit-str-concat +flake8-isort +flake8-length +flake8-pytest-style +# flake8-spellcheck +# flake8-use-fstring +mccabe +pep8-naming +pycodestyle +pylint diff --git a/tests/requirements.d/lint.txt b/tests/requirements.d/lint.txt new file mode 100644 index 00000000..af3ee576 --- /dev/null +++ b/tests/requirements.d/lint.txt @@ -0,0 +1 @@ +ruff diff --git a/tests/requirements_plugins.txt b/tests/requirements.d/plugins.txt similarity index 100% rename from tests/requirements_plugins.txt rename to tests/requirements.d/plugins.txt diff --git a/tests/requirements.d/test.txt b/tests/requirements.d/test.txt new file mode 100644 index 00000000..da0a0ab3 --- /dev/null +++ b/tests/requirements.d/test.txt @@ -0,0 +1,9 @@ +# It's not available in epel. +#coveralls +pytest +# It's not available in fedora. +#pytest-clarity +pytest-cov +pytest-randomly +pytest-xdist +pytest-xdist[psutil]; sys_platform == 'linux' diff --git a/tests/requirements.d/type-check.txt b/tests/requirements.d/type-check.txt new file mode 100644 index 00000000..acd70680 --- /dev/null +++ b/tests/requirements.d/type-check.txt @@ -0,0 +1,10 @@ +mypy + +# 3rd party modules +types-Jinja2 +types-PyYAML +types-jmespath +types-jsonschema +types-simplejson +types-toml +typing-extensions; python_version < "3.12" diff --git a/tests/requirements.txt b/tests/requirements.txt index 94f673df..76d4af1b 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,30 +1 @@ -bandit -# cohesion -coveralls -flake8 -flake8-bandit -flake8-bugbear -# flake8-builtins -# flake8-comprehensions -flake8-deprecated -flake8-docstrings -# flake8-eradicate -# flake8-expression-complexity -# flake8-functions -flake8-implicit-str-concat -flake8-isort -flake8-length -flake8-pytest-style -# flake8-spellcheck -# flake8-use-fstring -mccabe -pep8-naming -pycodestyle -pylint -pytest -pytest-cov -pytest-randomly -# Disable until 'pip install' failures happen in CI env. -# see: https://notes.crmarsh.com/python-tooling-could-be-much-much-faster -# ruff; python_version > '3.6' -ruff +-r requirements.d/test.txt diff --git a/tests/requirements_type-check.txt b/tests/requirements_type-check.txt deleted file mode 100644 index bfb10b7b..00000000 --- a/tests/requirements_type-check.txt +++ /dev/null @@ -1,6 +0,0 @@ -mypy -types-Jinja2 -types-PyYAML -types-pkg_resources -types-simplejson -types-toml diff --git a/tests/res/00-00-cnf.json b/tests/res/00-00-cnf.json deleted file mode 100644 index cb5b2f69..00000000 --- a/tests/res/00-00-cnf.json +++ /dev/null @@ -1 +0,0 @@ -{"a": 1} diff --git a/tests/res/00-00-cnf_indented.json b/tests/res/00-00-cnf_indented.json deleted file mode 100644 index 8d6b85c7..00000000 --- a/tests/res/00-00-cnf_indented.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "a": 1 -} diff --git a/tests/res/00-01-cnf.json b/tests/res/00-01-cnf.json deleted file mode 100644 index d633a103..00000000 --- a/tests/res/00-01-cnf.json +++ /dev/null @@ -1 +0,0 @@ -{"b": {"b": [1, 2], "c": "C"}} diff --git a/tests/res/00-02-cnf.json b/tests/res/00-02-cnf.json deleted file mode 100644 index a8837ec0..00000000 --- a/tests/res/00-02-cnf.json +++ /dev/null @@ -1 +0,0 @@ -{"name": "aaa"} diff --git a/tests/res/00-cnf.json b/tests/res/00-cnf.json deleted file mode 100644 index 7eb37504..00000000 --- a/tests/res/00-cnf.json +++ /dev/null @@ -1 +0,0 @@ -{"a": 1, "b": {"b": [1, 2], "c": "C"}, "name": "aaa"} \ No newline at end of file diff --git a/tests/res/00-cnf.yml b/tests/res/00-cnf.yml deleted file mode 100644 index e1b5be35..00000000 --- a/tests/res/00-cnf.yml +++ /dev/null @@ -1,5 +0,0 @@ -a: 1 -b: - b: [1, 2] - c: C -name: aaa diff --git a/tests/res/00-scm.json b/tests/res/00-scm.json deleted file mode 100644 index e02cfd26..00000000 --- a/tests/res/00-scm.json +++ /dev/null @@ -1 +0,0 @@ -{"type": "object", "properties": {"name": {"type": "string"}, "a": {"type": "integer"}, "b": {"type": "object", "properties": {"b": {"type": "array", "items": {"type": "integer"}}}}}} \ No newline at end of file diff --git a/tests/res/00-template-ctx.yml b/tests/res/00-template-ctx.yml deleted file mode 100644 index 4ae62722..00000000 --- a/tests/res/00-template-ctx.yml +++ /dev/null @@ -1,10 +0,0 @@ -{# Template context #} -foo: - bar: "foo_bar_str" - -navigation: - - href: "https://www.google.com" - caption: Google - - href: "https://www.bing.com" - caption: Bing - diff --git a/tests/res/1/api/dump b/tests/res/1/api/dump new file mode 120000 index 00000000..36f48c71 --- /dev/null +++ b/tests/res/1/api/dump @@ -0,0 +1 @@ +dumps \ No newline at end of file diff --git a/tests/res/1/api/dumps/basics/10/00.json b/tests/res/1/api/dumps/basics/10/00.json new file mode 120000 index 00000000..0c5cd86a --- /dev/null +++ b/tests/res/1/api/dumps/basics/10/00.json @@ -0,0 +1 @@ +../../../single_load/basics/10/00.json \ No newline at end of file diff --git a/tests/res/1/api/dumps/basics/10/10.json b/tests/res/1/api/dumps/basics/10/10.json new file mode 120000 index 00000000..5247a50b --- /dev/null +++ b/tests/res/1/api/dumps/basics/10/10.json @@ -0,0 +1 @@ +../../../single_load/basics/10/10.json \ No newline at end of file diff --git a/tests/res/1/api/dumps/basics/10/20.json b/tests/res/1/api/dumps/basics/10/20.json new file mode 120000 index 00000000..661a4269 --- /dev/null +++ b/tests/res/1/api/dumps/basics/10/20.json @@ -0,0 +1 @@ +../../../single_load/basics/10/20.json \ No newline at end of file diff --git a/tests/res/dump/basics/10/e/00.txt b/tests/res/1/api/dumps/basics/10/e/00.txt similarity index 100% rename from tests/res/dump/basics/10/e/00.txt rename to tests/res/1/api/dumps/basics/10/e/00.txt diff --git a/tests/res/dump/basics/10/e/10.txt b/tests/res/1/api/dumps/basics/10/e/10.txt similarity index 100% rename from tests/res/dump/basics/10/e/10.txt rename to tests/res/1/api/dumps/basics/10/e/10.txt diff --git a/tests/res/dump/basics/10/e/20.txt b/tests/res/1/api/dumps/basics/10/e/20.txt similarity index 100% rename from tests/res/dump/basics/10/e/20.txt rename to tests/res/1/api/dumps/basics/10/e/20.txt diff --git a/tests/res/dumps/basics/10/o/00.json b/tests/res/1/api/dumps/basics/10/o/00.json similarity index 100% rename from tests/res/dumps/basics/10/o/00.json rename to tests/res/1/api/dumps/basics/10/o/00.json diff --git a/tests/res/dicts/get/20/10.json b/tests/res/1/api/dumps/basics/10/o/10.json similarity index 100% rename from tests/res/dicts/get/20/10.json rename to tests/res/1/api/dumps/basics/10/o/10.json diff --git a/tests/res/dicts/get/20/20.json b/tests/res/1/api/dumps/basics/10/o/20.json similarity index 100% rename from tests/res/dicts/get/20/20.json rename to tests/res/1/api/dumps/basics/10/o/20.json diff --git a/tests/res/1/api/dumps/basics/20/00.json b/tests/res/1/api/dumps/basics/20/00.json new file mode 120000 index 00000000..af40a2e9 --- /dev/null +++ b/tests/res/1/api/dumps/basics/20/00.json @@ -0,0 +1 @@ +../../../single_load/basics/20/00.json \ No newline at end of file diff --git a/tests/res/1/api/dumps/basics/20/10.json b/tests/res/1/api/dumps/basics/20/10.json new file mode 120000 index 00000000..20463277 --- /dev/null +++ b/tests/res/1/api/dumps/basics/20/10.json @@ -0,0 +1 @@ +../../../single_load/basics/20/10.json \ No newline at end of file diff --git a/tests/res/1/api/dumps/basics/20/20.json b/tests/res/1/api/dumps/basics/20/20.json new file mode 120000 index 00000000..b5b93429 --- /dev/null +++ b/tests/res/1/api/dumps/basics/20/20.json @@ -0,0 +1 @@ +../../../single_load/basics/20/20.json \ No newline at end of file diff --git a/tests/res/dump/basics/20/e/00.txt b/tests/res/1/api/dumps/basics/20/e/00.txt similarity index 100% rename from tests/res/dump/basics/20/e/00.txt rename to tests/res/1/api/dumps/basics/20/e/00.txt diff --git a/tests/res/dump/basics/20/e/10.txt b/tests/res/1/api/dumps/basics/20/e/10.txt similarity index 100% rename from tests/res/dump/basics/20/e/10.txt rename to tests/res/1/api/dumps/basics/20/e/10.txt diff --git a/tests/res/dump/basics/20/e/20.txt b/tests/res/1/api/dumps/basics/20/e/20.txt similarity index 100% rename from tests/res/dump/basics/20/e/20.txt rename to tests/res/1/api/dumps/basics/20/e/20.txt diff --git a/tests/res/dumps/basics/20/o/00.json b/tests/res/1/api/dumps/basics/20/o/00.json similarity index 100% rename from tests/res/dumps/basics/20/o/00.json rename to tests/res/1/api/dumps/basics/20/o/00.json diff --git a/tests/res/dump/basics/10/o/10.json b/tests/res/1/api/dumps/basics/20/o/10.json similarity index 100% rename from tests/res/dump/basics/10/o/10.json rename to tests/res/1/api/dumps/basics/20/o/10.json diff --git a/tests/res/dump/basics/10/o/20.json b/tests/res/1/api/dumps/basics/20/o/20.json similarity index 100% rename from tests/res/dump/basics/10/o/20.json rename to tests/res/1/api/dumps/basics/20/o/20.json diff --git a/tests/res/load/multi b/tests/res/1/api/load/multi_load similarity index 100% rename from tests/res/load/multi rename to tests/res/1/api/load/multi_load diff --git a/tests/res/load/single b/tests/res/1/api/load/single_load similarity index 100% rename from tests/res/load/single rename to tests/res/1/api/load/single_load diff --git a/tests/res/1/api/loads/basics/10/00.txt b/tests/res/1/api/loads/basics/10/00.txt new file mode 120000 index 00000000..0c5cd86a --- /dev/null +++ b/tests/res/1/api/loads/basics/10/00.txt @@ -0,0 +1 @@ +../../../single_load/basics/10/00.json \ No newline at end of file diff --git a/tests/res/1/api/loads/basics/10/10.txt b/tests/res/1/api/loads/basics/10/10.txt new file mode 120000 index 00000000..5247a50b --- /dev/null +++ b/tests/res/1/api/loads/basics/10/10.txt @@ -0,0 +1 @@ +../../../single_load/basics/10/10.json \ No newline at end of file diff --git a/tests/res/1/api/loads/basics/10/20.txt b/tests/res/1/api/loads/basics/10/20.txt new file mode 120000 index 00000000..661a4269 --- /dev/null +++ b/tests/res/1/api/loads/basics/10/20.txt @@ -0,0 +1 @@ +../../../single_load/basics/10/20.json \ No newline at end of file diff --git a/tests/res/loads/basics/10/e/00.json b/tests/res/1/api/loads/basics/10/e/00.json similarity index 100% rename from tests/res/loads/basics/10/e/00.json rename to tests/res/1/api/loads/basics/10/e/00.json diff --git a/tests/res/loads/basics/10/e/10.json b/tests/res/1/api/loads/basics/10/e/10.json similarity index 100% rename from tests/res/loads/basics/10/e/10.json rename to tests/res/1/api/loads/basics/10/e/10.json diff --git a/tests/res/loads/basics/10/e/20.json b/tests/res/1/api/loads/basics/10/e/20.json similarity index 100% rename from tests/res/loads/basics/10/e/20.json rename to tests/res/1/api/loads/basics/10/e/20.json diff --git a/tests/res/loads/basics/10/o/00.json b/tests/res/1/api/loads/basics/10/o/00.json similarity index 100% rename from tests/res/loads/basics/10/o/00.json rename to tests/res/1/api/loads/basics/10/o/00.json diff --git a/tests/res/dump/basics/20/o/10.json b/tests/res/1/api/loads/basics/10/o/10.json similarity index 100% rename from tests/res/dump/basics/20/o/10.json rename to tests/res/1/api/loads/basics/10/o/10.json diff --git a/tests/res/dump/basics/20/o/20.json b/tests/res/1/api/loads/basics/10/o/20.json similarity index 100% rename from tests/res/dump/basics/20/o/20.json rename to tests/res/1/api/loads/basics/10/o/20.json diff --git a/tests/res/1/api/loads/query/10/00_00.txt b/tests/res/1/api/loads/query/10/00_00.txt new file mode 120000 index 00000000..5974c6e1 --- /dev/null +++ b/tests/res/1/api/loads/query/10/00_00.txt @@ -0,0 +1 @@ +../../../single_load/query/10/00_00.json \ No newline at end of file diff --git a/tests/res/1/api/loads/query/10/00_10.txt b/tests/res/1/api/loads/query/10/00_10.txt new file mode 120000 index 00000000..385f168e --- /dev/null +++ b/tests/res/1/api/loads/query/10/00_10.txt @@ -0,0 +1 @@ +../../../single_load/query/10/00_10.json \ No newline at end of file diff --git a/tests/res/1/api/loads/query/10/10_00.txt b/tests/res/1/api/loads/query/10/10_00.txt new file mode 120000 index 00000000..328240fe --- /dev/null +++ b/tests/res/1/api/loads/query/10/10_00.txt @@ -0,0 +1 @@ +../../../single_load/query/10/10_00.json \ No newline at end of file diff --git a/tests/res/1/api/loads/query/10/10_10.txt b/tests/res/1/api/loads/query/10/10_10.txt new file mode 120000 index 00000000..388a8826 --- /dev/null +++ b/tests/res/1/api/loads/query/10/10_10.txt @@ -0,0 +1 @@ +../../../single_load/query/10/10_10.json \ No newline at end of file diff --git a/tests/res/1/api/loads/query/10/10_20.txt b/tests/res/1/api/loads/query/10/10_20.txt new file mode 120000 index 00000000..8d377b68 --- /dev/null +++ b/tests/res/1/api/loads/query/10/10_20.txt @@ -0,0 +1 @@ +../../../single_load/query/10/10_20.json \ No newline at end of file diff --git a/tests/res/1/api/loads/query/10/10_30.txt b/tests/res/1/api/loads/query/10/10_30.txt new file mode 120000 index 00000000..b13a05b8 --- /dev/null +++ b/tests/res/1/api/loads/query/10/10_30.txt @@ -0,0 +1 @@ +../../../single_load/query/10/10_30.json \ No newline at end of file diff --git a/tests/res/1/api/loads/query/10/10_40.txt b/tests/res/1/api/loads/query/10/10_40.txt new file mode 120000 index 00000000..7d600c64 --- /dev/null +++ b/tests/res/1/api/loads/query/10/10_40.txt @@ -0,0 +1 @@ +../../../single_load/query/10/10_40.json \ No newline at end of file diff --git a/tests/res/1/api/loads/query/10/10_50.txt b/tests/res/1/api/loads/query/10/10_50.txt new file mode 120000 index 00000000..46725333 --- /dev/null +++ b/tests/res/1/api/loads/query/10/10_50.txt @@ -0,0 +1 @@ +../../../single_load/query/10/10_50.json \ No newline at end of file diff --git a/tests/res/1/api/loads/query/10/20_00.txt b/tests/res/1/api/loads/query/10/20_00.txt new file mode 120000 index 00000000..82054765 --- /dev/null +++ b/tests/res/1/api/loads/query/10/20_00.txt @@ -0,0 +1 @@ +../../../single_load/query/10/20_00.json \ No newline at end of file diff --git a/tests/res/1/api/loads/query/10/20_10.txt b/tests/res/1/api/loads/query/10/20_10.txt new file mode 120000 index 00000000..f691a000 --- /dev/null +++ b/tests/res/1/api/loads/query/10/20_10.txt @@ -0,0 +1 @@ +../../../single_load/query/10/20_10.json \ No newline at end of file diff --git a/tests/res/1/api/loads/query/10/20_20.txt b/tests/res/1/api/loads/query/10/20_20.txt new file mode 120000 index 00000000..0a5f5e2f --- /dev/null +++ b/tests/res/1/api/loads/query/10/20_20.txt @@ -0,0 +1 @@ +../../../single_load/query/10/20_20.json \ No newline at end of file diff --git a/tests/res/1/api/loads/query/10/e b/tests/res/1/api/loads/query/10/e new file mode 120000 index 00000000..d8d2526b --- /dev/null +++ b/tests/res/1/api/loads/query/10/e @@ -0,0 +1 @@ +../../../single_load/query/10/e \ No newline at end of file diff --git a/tests/res/loads/query/10/o/00_00.json b/tests/res/1/api/loads/query/10/o/00_00.json similarity index 100% rename from tests/res/loads/query/10/o/00_00.json rename to tests/res/1/api/loads/query/10/o/00_00.json diff --git a/tests/res/loads/query/10/o/00_10.json b/tests/res/1/api/loads/query/10/o/00_10.json similarity index 100% rename from tests/res/loads/query/10/o/00_10.json rename to tests/res/1/api/loads/query/10/o/00_10.json diff --git a/tests/res/json/query/e/10_00.json b/tests/res/1/api/loads/query/10/o/10_00.json similarity index 100% rename from tests/res/json/query/e/10_00.json rename to tests/res/1/api/loads/query/10/o/10_00.json diff --git a/tests/res/loads/query/10/o/10_10.json b/tests/res/1/api/loads/query/10/o/10_10.json similarity index 100% rename from tests/res/loads/query/10/o/10_10.json rename to tests/res/1/api/loads/query/10/o/10_10.json diff --git a/tests/res/loads/query/10/o/10_20.json b/tests/res/1/api/loads/query/10/o/10_20.json similarity index 100% rename from tests/res/loads/query/10/o/10_20.json rename to tests/res/1/api/loads/query/10/o/10_20.json diff --git a/tests/res/loads/query/10/o/10_30.json b/tests/res/1/api/loads/query/10/o/10_30.json similarity index 100% rename from tests/res/loads/query/10/o/10_30.json rename to tests/res/1/api/loads/query/10/o/10_30.json diff --git a/tests/res/loads/query/10/o/10_40.json b/tests/res/1/api/loads/query/10/o/10_40.json similarity index 100% rename from tests/res/loads/query/10/o/10_40.json rename to tests/res/1/api/loads/query/10/o/10_40.json diff --git a/tests/res/json/query/e/20_00.json b/tests/res/1/api/loads/query/10/o/10_50.json similarity index 100% rename from tests/res/json/query/e/20_00.json rename to tests/res/1/api/loads/query/10/o/10_50.json diff --git a/tests/res/loads/query/10/o/20_00.json b/tests/res/1/api/loads/query/10/o/20_00.json similarity index 100% rename from tests/res/loads/query/10/o/20_00.json rename to tests/res/1/api/loads/query/10/o/20_00.json diff --git a/tests/res/loads/query/10/o/20_10.json b/tests/res/1/api/loads/query/10/o/20_10.json similarity index 100% rename from tests/res/loads/query/10/o/20_10.json rename to tests/res/1/api/loads/query/10/o/20_10.json diff --git a/tests/res/loads/query/10/o/20_20.json b/tests/res/1/api/loads/query/10/o/20_20.json similarity index 100% rename from tests/res/loads/query/10/o/20_20.json rename to tests/res/1/api/loads/query/10/o/20_20.json diff --git a/tests/res/1/api/loads/query/10/q b/tests/res/1/api/loads/query/10/q new file mode 120000 index 00000000..5169604f --- /dev/null +++ b/tests/res/1/api/loads/query/10/q @@ -0,0 +1 @@ +../../../single_load/query/10/q \ No newline at end of file diff --git a/tests/res/1/api/loads/schema/10/00.txt b/tests/res/1/api/loads/schema/10/00.txt new file mode 120000 index 00000000..7feafc4e --- /dev/null +++ b/tests/res/1/api/loads/schema/10/00.txt @@ -0,0 +1 @@ +../../../single_load/schema//10/00.json \ No newline at end of file diff --git a/tests/res/1/api/loads/schema/10/10.txt b/tests/res/1/api/loads/schema/10/10.txt new file mode 120000 index 00000000..0817d222 --- /dev/null +++ b/tests/res/1/api/loads/schema/10/10.txt @@ -0,0 +1 @@ +../../../single_load/schema//10/10.json \ No newline at end of file diff --git a/tests/res/1/api/loads/schema/10/20.txt b/tests/res/1/api/loads/schema/10/20.txt new file mode 120000 index 00000000..e14ac3af --- /dev/null +++ b/tests/res/1/api/loads/schema/10/20.txt @@ -0,0 +1 @@ +../../../single_load/schema//10/20.json \ No newline at end of file diff --git a/tests/res/loads/schema/10/e b/tests/res/1/api/loads/schema/10/e similarity index 100% rename from tests/res/loads/schema/10/e rename to tests/res/1/api/loads/schema/10/e diff --git a/tests/res/loads/schema/10/o b/tests/res/1/api/loads/schema/10/o similarity index 100% rename from tests/res/loads/schema/10/o rename to tests/res/1/api/loads/schema/10/o diff --git a/tests/res/1/api/loads/schema/10/s/00.txt b/tests/res/1/api/loads/schema/10/s/00.txt new file mode 120000 index 00000000..888d8dd8 --- /dev/null +++ b/tests/res/1/api/loads/schema/10/s/00.txt @@ -0,0 +1 @@ +../../../../single_load/schema/10/s/00.json \ No newline at end of file diff --git a/tests/res/1/api/loads/schema/10/s/10.txt b/tests/res/1/api/loads/schema/10/s/10.txt new file mode 120000 index 00000000..b64fd712 --- /dev/null +++ b/tests/res/1/api/loads/schema/10/s/10.txt @@ -0,0 +1 @@ +../../../../single_load/schema/10/s/10.json \ No newline at end of file diff --git a/tests/res/1/api/loads/schema/10/s/20.txt b/tests/res/1/api/loads/schema/10/s/20.txt new file mode 120000 index 00000000..c0b8448a --- /dev/null +++ b/tests/res/1/api/loads/schema/10/s/20.txt @@ -0,0 +1 @@ +../../../../single_load/schema/10/s/20.json \ No newline at end of file diff --git a/tests/res/1/api/loads/template/10/00.txt b/tests/res/1/api/loads/template/10/00.txt new file mode 120000 index 00000000..e72201f1 --- /dev/null +++ b/tests/res/1/api/loads/template/10/00.txt @@ -0,0 +1 @@ +../../../single_load/template/10/00.j2 \ No newline at end of file diff --git a/tests/res/1/api/loads/template/10/10.txt b/tests/res/1/api/loads/template/10/10.txt new file mode 120000 index 00000000..ed2a42a2 --- /dev/null +++ b/tests/res/1/api/loads/template/10/10.txt @@ -0,0 +1 @@ +../../../single_load/template/10/10.j2 \ No newline at end of file diff --git a/tests/res/1/api/loads/template/10/20.txt b/tests/res/1/api/loads/template/10/20.txt new file mode 120000 index 00000000..b48229f1 --- /dev/null +++ b/tests/res/1/api/loads/template/10/20.txt @@ -0,0 +1 @@ +../../../single_load/template/10/20.j2 \ No newline at end of file diff --git a/tests/res/1/api/loads/template/10/30.txt b/tests/res/1/api/loads/template/10/30.txt new file mode 120000 index 00000000..f0d4fad0 --- /dev/null +++ b/tests/res/1/api/loads/template/10/30.txt @@ -0,0 +1 @@ +../../../single_load/template/10/30.j2 \ No newline at end of file diff --git a/tests/res/1/api/loads/template/10/c b/tests/res/1/api/loads/template/10/c new file mode 120000 index 00000000..7e59bdfe --- /dev/null +++ b/tests/res/1/api/loads/template/10/c @@ -0,0 +1 @@ +../../../single_load/template/10/c \ No newline at end of file diff --git a/tests/res/1/api/loads/template/10/e b/tests/res/1/api/loads/template/10/e new file mode 120000 index 00000000..2ecd3b9a --- /dev/null +++ b/tests/res/1/api/loads/template/10/e @@ -0,0 +1 @@ +../../../single_load/template/10/e \ No newline at end of file diff --git a/tests/res/loads/template/10/o/00.json b/tests/res/1/api/loads/template/10/o/00.json similarity index 100% rename from tests/res/loads/template/10/o/00.json rename to tests/res/1/api/loads/template/10/o/00.json diff --git a/tests/res/dumps/basics/10/o/10.json b/tests/res/1/api/loads/template/10/o/10.json similarity index 100% rename from tests/res/dumps/basics/10/o/10.json rename to tests/res/1/api/loads/template/10/o/10.json diff --git a/tests/res/dumps/basics/10/o/20.json b/tests/res/1/api/loads/template/10/o/20.json similarity index 100% rename from tests/res/dumps/basics/10/o/20.json rename to tests/res/1/api/loads/template/10/o/20.json diff --git a/tests/res/dicts/get/20/30.json b/tests/res/1/api/loads/template/10/o/30.json similarity index 100% rename from tests/res/dicts/get/20/30.json rename to tests/res/1/api/loads/template/10/o/30.json diff --git a/tests/res/json/basic/00.json b/tests/res/1/api/multi_load/basics/00/00.json similarity index 100% rename from tests/res/json/basic/00.json rename to tests/res/1/api/multi_load/basics/00/00.json diff --git a/tests/res/multi_load/basics/00/10.json b/tests/res/1/api/multi_load/basics/00/10.json similarity index 100% rename from tests/res/multi_load/basics/00/10.json rename to tests/res/1/api/multi_load/basics/00/10.json diff --git a/tests/res/multi_load/basics/00/20.json b/tests/res/1/api/multi_load/basics/00/20.json similarity index 100% rename from tests/res/multi_load/basics/00/20.json rename to tests/res/1/api/multi_load/basics/00/20.json diff --git a/tests/res/multi_load/basics/00/30.json b/tests/res/1/api/multi_load/basics/00/30.json similarity index 100% rename from tests/res/multi_load/basics/00/30.json rename to tests/res/1/api/multi_load/basics/00/30.json diff --git a/tests/res/multi_load/basics/00/40.json b/tests/res/1/api/multi_load/basics/00/40.json similarity index 100% rename from tests/res/multi_load/basics/00/40.json rename to tests/res/1/api/multi_load/basics/00/40.json diff --git a/tests/res/json/basic/20.json b/tests/res/1/api/multi_load/basics/00/e/00.json similarity index 100% rename from tests/res/json/basic/20.json rename to tests/res/1/api/multi_load/basics/00/e/00.json diff --git a/tests/res/dump/basics/10/o/00.json b/tests/res/1/api/multi_load/basics/00/o/00.json similarity index 100% rename from tests/res/dump/basics/10/o/00.json rename to tests/res/1/api/multi_load/basics/00/o/00.json diff --git a/tests/res/multi_load/schema/00/s/00.json b/tests/res/1/api/multi_load/basics/00/s/00.json similarity index 100% rename from tests/res/multi_load/schema/00/s/00.json rename to tests/res/1/api/multi_load/basics/00/s/00.json diff --git a/tests/res/multi_load/basics/10/00.json b/tests/res/1/api/multi_load/basics/10/00.json similarity index 100% rename from tests/res/multi_load/basics/10/00.json rename to tests/res/1/api/multi_load/basics/10/00.json diff --git a/tests/res/multi_load/basics/10/10.json b/tests/res/1/api/multi_load/basics/10/10.json similarity index 100% rename from tests/res/multi_load/basics/10/10.json rename to tests/res/1/api/multi_load/basics/10/10.json diff --git a/tests/res/multi_load/basics/10/20.json b/tests/res/1/api/multi_load/basics/10/20.json similarity index 100% rename from tests/res/multi_load/basics/10/20.json rename to tests/res/1/api/multi_load/basics/10/20.json diff --git a/tests/res/multi_load/basics/10/30.json b/tests/res/1/api/multi_load/basics/10/30.json similarity index 100% rename from tests/res/multi_load/basics/10/30.json rename to tests/res/1/api/multi_load/basics/10/30.json diff --git a/tests/res/multi_load/basics/10/40.json b/tests/res/1/api/multi_load/basics/10/40.json similarity index 100% rename from tests/res/multi_load/basics/10/40.json rename to tests/res/1/api/multi_load/basics/10/40.json diff --git a/tests/res/multi_load/basics/10/50.json b/tests/res/1/api/multi_load/basics/10/50.json similarity index 100% rename from tests/res/multi_load/basics/10/50.json rename to tests/res/1/api/multi_load/basics/10/50.json diff --git a/tests/res/multi_load/basics/10/e b/tests/res/1/api/multi_load/basics/10/e similarity index 100% rename from tests/res/multi_load/basics/10/e rename to tests/res/1/api/multi_load/basics/10/e diff --git a/tests/res/multi_load/basics/10/o/00.json b/tests/res/1/api/multi_load/basics/10/o/00.json similarity index 100% rename from tests/res/multi_load/basics/10/o/00.json rename to tests/res/1/api/multi_load/basics/10/o/00.json diff --git a/tests/res/multi_load/basics/20/00.json b/tests/res/1/api/multi_load/basics/20/00.json similarity index 100% rename from tests/res/multi_load/basics/20/00.json rename to tests/res/1/api/multi_load/basics/20/00.json diff --git a/tests/res/multi_load/basics/20/10.json b/tests/res/1/api/multi_load/basics/20/10.json similarity index 100% rename from tests/res/multi_load/basics/20/10.json rename to tests/res/1/api/multi_load/basics/20/10.json diff --git a/tests/res/multi_load/basics/20/20.json b/tests/res/1/api/multi_load/basics/20/20.json similarity index 100% rename from tests/res/multi_load/basics/20/20.json rename to tests/res/1/api/multi_load/basics/20/20.json diff --git a/tests/res/multi_load/basics/20/30.json b/tests/res/1/api/multi_load/basics/20/30.json similarity index 100% rename from tests/res/multi_load/basics/20/30.json rename to tests/res/1/api/multi_load/basics/20/30.json diff --git a/tests/res/multi_load/basics/20/40.json b/tests/res/1/api/multi_load/basics/20/40.json similarity index 100% rename from tests/res/multi_load/basics/20/40.json rename to tests/res/1/api/multi_load/basics/20/40.json diff --git a/tests/res/multi_load/basics/20/50.json b/tests/res/1/api/multi_load/basics/20/50.json similarity index 100% rename from tests/res/multi_load/basics/20/50.json rename to tests/res/1/api/multi_load/basics/20/50.json diff --git a/tests/res/multi_load/basics/20/e/exp.json b/tests/res/1/api/multi_load/basics/20/e/00.json similarity index 100% rename from tests/res/multi_load/basics/20/e/exp.json rename to tests/res/1/api/multi_load/basics/20/e/00.json diff --git a/tests/res/multi_load/basics/20/o/00.json b/tests/res/1/api/multi_load/basics/20/o/00.json similarity index 100% rename from tests/res/multi_load/basics/20/o/00.json rename to tests/res/1/api/multi_load/basics/20/o/00.json diff --git a/tests/res/multi_load/basics/30/00.json b/tests/res/1/api/multi_load/basics/30/00.json similarity index 100% rename from tests/res/multi_load/basics/30/00.json rename to tests/res/1/api/multi_load/basics/30/00.json diff --git a/tests/res/multi_load/basics/30/10.json b/tests/res/1/api/multi_load/basics/30/10.json similarity index 100% rename from tests/res/multi_load/basics/30/10.json rename to tests/res/1/api/multi_load/basics/30/10.json diff --git a/tests/res/multi_load/basics/30/20.json b/tests/res/1/api/multi_load/basics/30/20.json similarity index 100% rename from tests/res/multi_load/basics/30/20.json rename to tests/res/1/api/multi_load/basics/30/20.json diff --git a/tests/res/multi_load/basics/30/30.json b/tests/res/1/api/multi_load/basics/30/30.json similarity index 100% rename from tests/res/multi_load/basics/30/30.json rename to tests/res/1/api/multi_load/basics/30/30.json diff --git a/tests/res/multi_load/basics/30/40.json b/tests/res/1/api/multi_load/basics/30/40.json similarity index 100% rename from tests/res/multi_load/basics/30/40.json rename to tests/res/1/api/multi_load/basics/30/40.json diff --git a/tests/res/multi_load/basics/30/50.json b/tests/res/1/api/multi_load/basics/30/50.json similarity index 100% rename from tests/res/multi_load/basics/30/50.json rename to tests/res/1/api/multi_load/basics/30/50.json diff --git a/tests/res/multi_load/basics/30/e/exp.json b/tests/res/1/api/multi_load/basics/30/e/00.json similarity index 100% rename from tests/res/multi_load/basics/30/e/exp.json rename to tests/res/1/api/multi_load/basics/30/e/00.json diff --git a/tests/res/multi_load/basics/30/o/00.json b/tests/res/1/api/multi_load/basics/30/o/00.json similarity index 100% rename from tests/res/multi_load/basics/30/o/00.json rename to tests/res/1/api/multi_load/basics/30/o/00.json diff --git a/tests/res/multi_load/multi_types/10/00.json b/tests/res/1/api/multi_load/multi_types/10/00.json similarity index 100% rename from tests/res/multi_load/multi_types/10/00.json rename to tests/res/1/api/multi_load/multi_types/10/00.json diff --git a/tests/res/multi_load/multi_types/10/10.ini b/tests/res/1/api/multi_load/multi_types/10/10.ini similarity index 100% rename from tests/res/multi_load/multi_types/10/10.ini rename to tests/res/1/api/multi_load/multi_types/10/10.ini diff --git a/tests/res/multi_load/multi_types/10/20.json b/tests/res/1/api/multi_load/multi_types/10/20.json similarity index 100% rename from tests/res/multi_load/multi_types/10/20.json rename to tests/res/1/api/multi_load/multi_types/10/20.json diff --git a/tests/res/multi_load/multi_types/10/30.json b/tests/res/1/api/multi_load/multi_types/10/30.json similarity index 100% rename from tests/res/multi_load/multi_types/10/30.json rename to tests/res/1/api/multi_load/multi_types/10/30.json diff --git a/tests/res/multi_load/multi_types/10/40.properties b/tests/res/1/api/multi_load/multi_types/10/40.properties similarity index 100% rename from tests/res/multi_load/multi_types/10/40.properties rename to tests/res/1/api/multi_load/multi_types/10/40.properties diff --git a/tests/res/multi_load/multi_types/10/50.sh b/tests/res/1/api/multi_load/multi_types/10/50.sh similarity index 100% rename from tests/res/multi_load/multi_types/10/50.sh rename to tests/res/1/api/multi_load/multi_types/10/50.sh diff --git a/tests/res/multi_load/multi_types/10/e/exp.json b/tests/res/1/api/multi_load/multi_types/10/e/00.json similarity index 100% rename from tests/res/multi_load/multi_types/10/e/exp.json rename to tests/res/1/api/multi_load/multi_types/10/e/00.json diff --git a/tests/res/multi_load/basics/00/o/00.json b/tests/res/1/api/multi_load/multi_types/10/o/00.json similarity index 100% rename from tests/res/multi_load/basics/00/o/00.json rename to tests/res/1/api/multi_load/multi_types/10/o/00.json diff --git a/tests/res/multi_load/query/00_00/00.json b/tests/res/1/api/multi_load/query/00_00/00.json similarity index 100% rename from tests/res/multi_load/query/00_00/00.json rename to tests/res/1/api/multi_load/query/00_00/00.json diff --git a/tests/res/multi_load/query/00_00/10.json b/tests/res/1/api/multi_load/query/00_00/10.json similarity index 100% rename from tests/res/multi_load/query/00_00/10.json rename to tests/res/1/api/multi_load/query/00_00/10.json diff --git a/tests/res/multi_load/query/00_00/20.json b/tests/res/1/api/multi_load/query/00_00/20.json similarity index 100% rename from tests/res/multi_load/query/00_00/20.json rename to tests/res/1/api/multi_load/query/00_00/20.json diff --git a/tests/res/multi_load/query/00_00/30.json b/tests/res/1/api/multi_load/query/00_00/30.json similarity index 100% rename from tests/res/multi_load/query/00_00/30.json rename to tests/res/1/api/multi_load/query/00_00/30.json diff --git a/tests/res/multi_load/query/00_00/40.json b/tests/res/1/api/multi_load/query/00_00/40.json similarity index 100% rename from tests/res/multi_load/query/00_00/40.json rename to tests/res/1/api/multi_load/query/00_00/40.json diff --git a/tests/res/dicts/get/10/e/20.json b/tests/res/1/api/multi_load/query/00_00/e/00.json similarity index 100% rename from tests/res/dicts/get/10/e/20.json rename to tests/res/1/api/multi_load/query/00_00/e/00.json diff --git a/tests/res/multi_load/multi_types/10/o/00.json b/tests/res/1/api/multi_load/query/00_00/o/00.json similarity index 100% rename from tests/res/multi_load/multi_types/10/o/00.json rename to tests/res/1/api/multi_load/query/00_00/o/00.json diff --git a/tests/res/json/query/q/00_00.txt b/tests/res/1/api/multi_load/query/00_00/q/00.txt similarity index 100% rename from tests/res/json/query/q/00_00.txt rename to tests/res/1/api/multi_load/query/00_00/q/00.txt diff --git a/tests/res/multi_load/query/00_10/00.json b/tests/res/1/api/multi_load/query/00_10/00.json similarity index 100% rename from tests/res/multi_load/query/00_10/00.json rename to tests/res/1/api/multi_load/query/00_10/00.json diff --git a/tests/res/multi_load/query/00_10/10.json b/tests/res/1/api/multi_load/query/00_10/10.json similarity index 100% rename from tests/res/multi_load/query/00_10/10.json rename to tests/res/1/api/multi_load/query/00_10/10.json diff --git a/tests/res/multi_load/query/00_10/20.json b/tests/res/1/api/multi_load/query/00_10/20.json similarity index 100% rename from tests/res/multi_load/query/00_10/20.json rename to tests/res/1/api/multi_load/query/00_10/20.json diff --git a/tests/res/multi_load/query/00_10/30.json b/tests/res/1/api/multi_load/query/00_10/30.json similarity index 100% rename from tests/res/multi_load/query/00_10/30.json rename to tests/res/1/api/multi_load/query/00_10/30.json diff --git a/tests/res/multi_load/query/00_10/40.json b/tests/res/1/api/multi_load/query/00_10/40.json similarity index 100% rename from tests/res/multi_load/query/00_10/40.json rename to tests/res/1/api/multi_load/query/00_10/40.json diff --git a/tests/res/cli/single_input/20/r/10.json b/tests/res/1/api/multi_load/query/00_10/e/00.json similarity index 100% rename from tests/res/cli/single_input/20/r/10.json rename to tests/res/1/api/multi_load/query/00_10/e/00.json diff --git a/tests/res/multi_load/query/00_00/o/00.json b/tests/res/1/api/multi_load/query/00_10/o/00.json similarity index 100% rename from tests/res/multi_load/query/00_00/o/00.json rename to tests/res/1/api/multi_load/query/00_10/o/00.json diff --git a/tests/res/json/primitives/30.json b/tests/res/1/api/multi_load/query/00_10/q/00.txt similarity index 100% rename from tests/res/json/primitives/30.json rename to tests/res/1/api/multi_load/query/00_10/q/00.txt diff --git a/tests/res/multi_load/query/10_10/00.json b/tests/res/1/api/multi_load/query/10_10/00.json similarity index 100% rename from tests/res/multi_load/query/10_10/00.json rename to tests/res/1/api/multi_load/query/10_10/00.json diff --git a/tests/res/multi_load/query/10_10/10.json b/tests/res/1/api/multi_load/query/10_10/10.json similarity index 100% rename from tests/res/multi_load/query/10_10/10.json rename to tests/res/1/api/multi_load/query/10_10/10.json diff --git a/tests/res/multi_load/query/10_10/20.json b/tests/res/1/api/multi_load/query/10_10/20.json similarity index 100% rename from tests/res/multi_load/query/10_10/20.json rename to tests/res/1/api/multi_load/query/10_10/20.json diff --git a/tests/res/multi_load/query/10_10/30.json b/tests/res/1/api/multi_load/query/10_10/30.json similarity index 100% rename from tests/res/multi_load/query/10_10/30.json rename to tests/res/1/api/multi_load/query/10_10/30.json diff --git a/tests/res/multi_load/query/10_10/40.json b/tests/res/1/api/multi_load/query/10_10/40.json similarity index 100% rename from tests/res/multi_load/query/10_10/40.json rename to tests/res/1/api/multi_load/query/10_10/40.json diff --git a/tests/res/dicts/get/20/e/40.py b/tests/res/1/api/multi_load/query/10_10/e/00.json similarity index 100% rename from tests/res/dicts/get/20/e/40.py rename to tests/res/1/api/multi_load/query/10_10/e/00.json diff --git a/tests/res/multi_load/query/00_10/o/00.json b/tests/res/1/api/multi_load/query/10_10/o/00.json similarity index 100% rename from tests/res/multi_load/query/00_10/o/00.json rename to tests/res/1/api/multi_load/query/10_10/o/00.json diff --git a/tests/res/json/primitives/e/30.txt b/tests/res/1/api/multi_load/query/10_10/q/00.txt similarity index 100% rename from tests/res/json/primitives/e/30.txt rename to tests/res/1/api/multi_load/query/10_10/q/00.txt diff --git a/tests/res/multi_load/query/10_20/00.json b/tests/res/1/api/multi_load/query/10_20/00.json similarity index 100% rename from tests/res/multi_load/query/10_20/00.json rename to tests/res/1/api/multi_load/query/10_20/00.json diff --git a/tests/res/multi_load/query/10_20/10.json b/tests/res/1/api/multi_load/query/10_20/10.json similarity index 100% rename from tests/res/multi_load/query/10_20/10.json rename to tests/res/1/api/multi_load/query/10_20/10.json diff --git a/tests/res/multi_load/query/10_20/20.json b/tests/res/1/api/multi_load/query/10_20/20.json similarity index 100% rename from tests/res/multi_load/query/10_20/20.json rename to tests/res/1/api/multi_load/query/10_20/20.json diff --git a/tests/res/multi_load/query/10_20/30.json b/tests/res/1/api/multi_load/query/10_20/30.json similarity index 100% rename from tests/res/multi_load/query/10_20/30.json rename to tests/res/1/api/multi_load/query/10_20/30.json diff --git a/tests/res/multi_load/query/10_20/40.json b/tests/res/1/api/multi_load/query/10_20/40.json similarity index 100% rename from tests/res/multi_load/query/10_20/40.json rename to tests/res/1/api/multi_load/query/10_20/40.json diff --git a/tests/res/multi_load/query/10_20/e/exp.json b/tests/res/1/api/multi_load/query/10_20/e/00.json similarity index 100% rename from tests/res/multi_load/query/10_20/e/exp.json rename to tests/res/1/api/multi_load/query/10_20/e/00.json diff --git a/tests/res/multi_load/query/10_10/o/00.json b/tests/res/1/api/multi_load/query/10_20/o/00.json similarity index 100% rename from tests/res/multi_load/query/10_10/o/00.json rename to tests/res/1/api/multi_load/query/10_20/o/00.json diff --git a/tests/res/json/query/q/10_20.txt b/tests/res/1/api/multi_load/query/10_20/q/00.txt similarity index 100% rename from tests/res/json/query/q/10_20.txt rename to tests/res/1/api/multi_load/query/10_20/q/00.txt diff --git a/tests/res/multi_load/query/10_30/00.json b/tests/res/1/api/multi_load/query/10_30/00.json similarity index 100% rename from tests/res/multi_load/query/10_30/00.json rename to tests/res/1/api/multi_load/query/10_30/00.json diff --git a/tests/res/multi_load/query/10_30/10.json b/tests/res/1/api/multi_load/query/10_30/10.json similarity index 100% rename from tests/res/multi_load/query/10_30/10.json rename to tests/res/1/api/multi_load/query/10_30/10.json diff --git a/tests/res/multi_load/query/10_30/20.json b/tests/res/1/api/multi_load/query/10_30/20.json similarity index 100% rename from tests/res/multi_load/query/10_30/20.json rename to tests/res/1/api/multi_load/query/10_30/20.json diff --git a/tests/res/multi_load/query/10_30/30.json b/tests/res/1/api/multi_load/query/10_30/30.json similarity index 100% rename from tests/res/multi_load/query/10_30/30.json rename to tests/res/1/api/multi_load/query/10_30/30.json diff --git a/tests/res/multi_load/query/10_30/40.json b/tests/res/1/api/multi_load/query/10_30/40.json similarity index 100% rename from tests/res/multi_load/query/10_30/40.json rename to tests/res/1/api/multi_load/query/10_30/40.json diff --git a/tests/res/cli/single_input/20/r/20.json b/tests/res/1/api/multi_load/query/10_30/e/00.json similarity index 100% rename from tests/res/cli/single_input/20/r/20.json rename to tests/res/1/api/multi_load/query/10_30/e/00.json diff --git a/tests/res/multi_load/query/10_20/o/00.json b/tests/res/1/api/multi_load/query/10_30/o/00.json similarity index 100% rename from tests/res/multi_load/query/10_20/o/00.json rename to tests/res/1/api/multi_load/query/10_30/o/00.json diff --git a/tests/res/json/query/q/10_30.txt b/tests/res/1/api/multi_load/query/10_30/q/00.txt similarity index 100% rename from tests/res/json/query/q/10_30.txt rename to tests/res/1/api/multi_load/query/10_30/q/00.txt diff --git a/tests/res/multi_load/query/10_40/00.json b/tests/res/1/api/multi_load/query/10_40/00.json similarity index 100% rename from tests/res/multi_load/query/10_40/00.json rename to tests/res/1/api/multi_load/query/10_40/00.json diff --git a/tests/res/multi_load/query/10_40/10.json b/tests/res/1/api/multi_load/query/10_40/10.json similarity index 100% rename from tests/res/multi_load/query/10_40/10.json rename to tests/res/1/api/multi_load/query/10_40/10.json diff --git a/tests/res/multi_load/query/10_40/20.json b/tests/res/1/api/multi_load/query/10_40/20.json similarity index 100% rename from tests/res/multi_load/query/10_40/20.json rename to tests/res/1/api/multi_load/query/10_40/20.json diff --git a/tests/res/multi_load/query/10_40/30.json b/tests/res/1/api/multi_load/query/10_40/30.json similarity index 100% rename from tests/res/multi_load/query/10_40/30.json rename to tests/res/1/api/multi_load/query/10_40/30.json diff --git a/tests/res/multi_load/query/10_40/40.json b/tests/res/1/api/multi_load/query/10_40/40.json similarity index 100% rename from tests/res/multi_load/query/10_40/40.json rename to tests/res/1/api/multi_load/query/10_40/40.json diff --git a/tests/res/json/query/e/10_40.json b/tests/res/1/api/multi_load/query/10_40/e/00.json similarity index 100% rename from tests/res/json/query/e/10_40.json rename to tests/res/1/api/multi_load/query/10_40/e/00.json diff --git a/tests/res/multi_load/query/10_30/o/00.json b/tests/res/1/api/multi_load/query/10_40/o/00.json similarity index 100% rename from tests/res/multi_load/query/10_30/o/00.json rename to tests/res/1/api/multi_load/query/10_40/o/00.json diff --git a/tests/res/json/query/q/10_40.txt b/tests/res/1/api/multi_load/query/10_40/q/00.txt similarity index 100% rename from tests/res/json/query/q/10_40.txt rename to tests/res/1/api/multi_load/query/10_40/q/00.txt diff --git a/tests/res/multi_load/query/10_50/00.json b/tests/res/1/api/multi_load/query/10_50/00.json similarity index 100% rename from tests/res/multi_load/query/10_50/00.json rename to tests/res/1/api/multi_load/query/10_50/00.json diff --git a/tests/res/multi_load/query/10_50/10.json b/tests/res/1/api/multi_load/query/10_50/10.json similarity index 100% rename from tests/res/multi_load/query/10_50/10.json rename to tests/res/1/api/multi_load/query/10_50/10.json diff --git a/tests/res/multi_load/query/10_50/20.json b/tests/res/1/api/multi_load/query/10_50/20.json similarity index 100% rename from tests/res/multi_load/query/10_50/20.json rename to tests/res/1/api/multi_load/query/10_50/20.json diff --git a/tests/res/multi_load/query/10_50/30.json b/tests/res/1/api/multi_load/query/10_50/30.json similarity index 100% rename from tests/res/multi_load/query/10_50/30.json rename to tests/res/1/api/multi_load/query/10_50/30.json diff --git a/tests/res/multi_load/query/10_50/40.json b/tests/res/1/api/multi_load/query/10_50/40.json similarity index 100% rename from tests/res/multi_load/query/10_50/40.json rename to tests/res/1/api/multi_load/query/10_50/40.json diff --git a/tests/res/dicts/mk_nested_dic/10/q/10.py b/tests/res/1/api/multi_load/query/10_50/e/00.json similarity index 100% rename from tests/res/dicts/mk_nested_dic/10/q/10.py rename to tests/res/1/api/multi_load/query/10_50/e/00.json diff --git a/tests/res/multi_load/query/10_40/o/00.json b/tests/res/1/api/multi_load/query/10_50/o/00.json similarity index 100% rename from tests/res/multi_load/query/10_40/o/00.json rename to tests/res/1/api/multi_load/query/10_50/o/00.json diff --git a/tests/res/json/query/q/10_50.txt b/tests/res/1/api/multi_load/query/10_50/q/00.txt similarity index 100% rename from tests/res/json/query/q/10_50.txt rename to tests/res/1/api/multi_load/query/10_50/q/00.txt diff --git a/tests/res/multi_load/query/20_00/00.json b/tests/res/1/api/multi_load/query/20_00/00.json similarity index 100% rename from tests/res/multi_load/query/20_00/00.json rename to tests/res/1/api/multi_load/query/20_00/00.json diff --git a/tests/res/multi_load/query/20_00/10.json b/tests/res/1/api/multi_load/query/20_00/10.json similarity index 100% rename from tests/res/multi_load/query/20_00/10.json rename to tests/res/1/api/multi_load/query/20_00/10.json diff --git a/tests/res/multi_load/query/20_00/20.json b/tests/res/1/api/multi_load/query/20_00/20.json similarity index 100% rename from tests/res/multi_load/query/20_00/20.json rename to tests/res/1/api/multi_load/query/20_00/20.json diff --git a/tests/res/multi_load/query/20_00/30.json b/tests/res/1/api/multi_load/query/20_00/30.json similarity index 100% rename from tests/res/multi_load/query/20_00/30.json rename to tests/res/1/api/multi_load/query/20_00/30.json diff --git a/tests/res/multi_load/query/20_00/40.json b/tests/res/1/api/multi_load/query/20_00/40.json similarity index 100% rename from tests/res/multi_load/query/20_00/40.json rename to tests/res/1/api/multi_load/query/20_00/40.json diff --git a/tests/res/dicts/get/30/e/20.json b/tests/res/1/api/multi_load/query/20_00/e/00.json similarity index 100% rename from tests/res/dicts/get/30/e/20.json rename to tests/res/1/api/multi_load/query/20_00/e/00.json diff --git a/tests/res/multi_load/query/10_50/o/00.json b/tests/res/1/api/multi_load/query/20_00/o/00.json similarity index 100% rename from tests/res/multi_load/query/10_50/o/00.json rename to tests/res/1/api/multi_load/query/20_00/o/00.json diff --git a/tests/res/single_load/query/10/q/00_00.txt b/tests/res/1/api/multi_load/query/20_00/q/00.txt similarity index 100% rename from tests/res/single_load/query/10/q/00_00.txt rename to tests/res/1/api/multi_load/query/20_00/q/00.txt diff --git a/tests/res/multi_load/query/20_10/00.json b/tests/res/1/api/multi_load/query/20_10/00.json similarity index 100% rename from tests/res/multi_load/query/20_10/00.json rename to tests/res/1/api/multi_load/query/20_10/00.json diff --git a/tests/res/multi_load/query/20_10/10.json b/tests/res/1/api/multi_load/query/20_10/10.json similarity index 100% rename from tests/res/multi_load/query/20_10/10.json rename to tests/res/1/api/multi_load/query/20_10/10.json diff --git a/tests/res/multi_load/query/20_10/20.json b/tests/res/1/api/multi_load/query/20_10/20.json similarity index 100% rename from tests/res/multi_load/query/20_10/20.json rename to tests/res/1/api/multi_load/query/20_10/20.json diff --git a/tests/res/multi_load/query/20_10/30.json b/tests/res/1/api/multi_load/query/20_10/30.json similarity index 100% rename from tests/res/multi_load/query/20_10/30.json rename to tests/res/1/api/multi_load/query/20_10/30.json diff --git a/tests/res/multi_load/query/20_10/40.json b/tests/res/1/api/multi_load/query/20_10/40.json similarity index 100% rename from tests/res/multi_load/query/20_10/40.json rename to tests/res/1/api/multi_load/query/20_10/40.json diff --git a/tests/res/json/primitives/20.json b/tests/res/1/api/multi_load/query/20_10/e/00.json similarity index 100% rename from tests/res/json/primitives/20.json rename to tests/res/1/api/multi_load/query/20_10/e/00.json diff --git a/tests/res/multi_load/query/20_00/o/00.json b/tests/res/1/api/multi_load/query/20_10/o/00.json similarity index 100% rename from tests/res/multi_load/query/20_00/o/00.json rename to tests/res/1/api/multi_load/query/20_10/o/00.json diff --git a/tests/res/json/query/q/20_10.txt b/tests/res/1/api/multi_load/query/20_10/q/00.txt similarity index 100% rename from tests/res/json/query/q/20_10.txt rename to tests/res/1/api/multi_load/query/20_10/q/00.txt diff --git a/tests/res/multi_load/query/20_20/00.json b/tests/res/1/api/multi_load/query/20_20/00.json similarity index 100% rename from tests/res/multi_load/query/20_20/00.json rename to tests/res/1/api/multi_load/query/20_20/00.json diff --git a/tests/res/multi_load/query/20_20/10.json b/tests/res/1/api/multi_load/query/20_20/10.json similarity index 100% rename from tests/res/multi_load/query/20_20/10.json rename to tests/res/1/api/multi_load/query/20_20/10.json diff --git a/tests/res/multi_load/query/20_20/20.json b/tests/res/1/api/multi_load/query/20_20/20.json similarity index 100% rename from tests/res/multi_load/query/20_20/20.json rename to tests/res/1/api/multi_load/query/20_20/20.json diff --git a/tests/res/multi_load/query/20_20/30.json b/tests/res/1/api/multi_load/query/20_20/30.json similarity index 100% rename from tests/res/multi_load/query/20_20/30.json rename to tests/res/1/api/multi_load/query/20_20/30.json diff --git a/tests/res/multi_load/query/20_20/40.json b/tests/res/1/api/multi_load/query/20_20/40.json similarity index 100% rename from tests/res/multi_load/query/20_20/40.json rename to tests/res/1/api/multi_load/query/20_20/40.json diff --git a/tests/res/dicts/get/30/e/30.json b/tests/res/1/api/multi_load/query/20_20/e/00.json similarity index 100% rename from tests/res/dicts/get/30/e/30.json rename to tests/res/1/api/multi_load/query/20_20/e/00.json diff --git a/tests/res/multi_load/query/20_10/o/00.json b/tests/res/1/api/multi_load/query/20_20/o/00.json similarity index 100% rename from tests/res/multi_load/query/20_10/o/00.json rename to tests/res/1/api/multi_load/query/20_20/o/00.json diff --git a/tests/res/json/query/q/20_20.txt b/tests/res/1/api/multi_load/query/20_20/q/00.txt similarity index 100% rename from tests/res/json/query/q/20_20.txt rename to tests/res/1/api/multi_load/query/20_20/q/00.txt diff --git a/tests/res/multi_load/basics/00/00.json b/tests/res/1/api/multi_load/schema/00/00.json similarity index 100% rename from tests/res/multi_load/basics/00/00.json rename to tests/res/1/api/multi_load/schema/00/00.json diff --git a/tests/res/multi_load/schema/00/10.json b/tests/res/1/api/multi_load/schema/00/10.json similarity index 100% rename from tests/res/multi_load/schema/00/10.json rename to tests/res/1/api/multi_load/schema/00/10.json diff --git a/tests/res/multi_load/schema/00/20.json b/tests/res/1/api/multi_load/schema/00/20.json similarity index 100% rename from tests/res/multi_load/schema/00/20.json rename to tests/res/1/api/multi_load/schema/00/20.json diff --git a/tests/res/multi_load/schema/00/30.json b/tests/res/1/api/multi_load/schema/00/30.json similarity index 100% rename from tests/res/multi_load/schema/00/30.json rename to tests/res/1/api/multi_load/schema/00/30.json diff --git a/tests/res/multi_load/schema/00/40.json b/tests/res/1/api/multi_load/schema/00/40.json similarity index 100% rename from tests/res/multi_load/schema/00/40.json rename to tests/res/1/api/multi_load/schema/00/40.json diff --git a/tests/res/multi_load/basics/00/e/exp.json b/tests/res/1/api/multi_load/schema/00/e/00.json similarity index 100% rename from tests/res/multi_load/basics/00/e/exp.json rename to tests/res/1/api/multi_load/schema/00/e/00.json diff --git a/tests/res/multi_load/query/20_20/o/00.json b/tests/res/1/api/multi_load/schema/00/o/00.json similarity index 100% rename from tests/res/multi_load/query/20_20/o/00.json rename to tests/res/1/api/multi_load/schema/00/o/00.json diff --git a/tests/res/json/basic/schema/20.json b/tests/res/1/api/multi_load/schema/00/s/00.json similarity index 100% rename from tests/res/json/basic/schema/20.json rename to tests/res/1/api/multi_load/schema/00/s/00.json diff --git a/tests/res/multi_load/template/00/00.json b/tests/res/1/api/multi_load/template/00/00.json similarity index 100% rename from tests/res/multi_load/template/00/00.json rename to tests/res/1/api/multi_load/template/00/00.json diff --git a/tests/res/multi_load/template/00/30.json b/tests/res/1/api/multi_load/template/00/30.json similarity index 100% rename from tests/res/multi_load/template/00/30.json rename to tests/res/1/api/multi_load/template/00/30.json diff --git a/tests/res/multi_load/template/00/c/00.json b/tests/res/1/api/multi_load/template/00/c/00.json similarity index 100% rename from tests/res/multi_load/template/00/c/00.json rename to tests/res/1/api/multi_load/template/00/c/00.json diff --git a/tests/res/multi_load/template/00/e/exp.json b/tests/res/1/api/multi_load/template/00/e/00.json similarity index 100% rename from tests/res/multi_load/template/00/e/exp.json rename to tests/res/1/api/multi_load/template/00/e/00.json diff --git a/tests/res/multi_load/template/00/o/00.json b/tests/res/1/api/multi_load/template/00/o/00.json similarity index 100% rename from tests/res/multi_load/template/00/o/00.json rename to tests/res/1/api/multi_load/template/00/o/00.json diff --git a/tests/res/json/template/00.json b/tests/res/1/api/multi_load/template/10/00.json similarity index 100% rename from tests/res/json/template/00.json rename to tests/res/1/api/multi_load/template/10/00.json diff --git a/tests/res/multi_load/template/10/10.json b/tests/res/1/api/multi_load/template/10/10.json similarity index 100% rename from tests/res/multi_load/template/10/10.json rename to tests/res/1/api/multi_load/template/10/10.json diff --git a/tests/res/multi_load/template/10/20.json b/tests/res/1/api/multi_load/template/10/20.json similarity index 100% rename from tests/res/multi_load/template/10/20.json rename to tests/res/1/api/multi_load/template/10/20.json diff --git a/tests/res/multi_load/template/10/30.json b/tests/res/1/api/multi_load/template/10/30.json similarity index 100% rename from tests/res/multi_load/template/10/30.json rename to tests/res/1/api/multi_load/template/10/30.json diff --git a/tests/res/multi_load/template/10/c/00.json b/tests/res/1/api/multi_load/template/10/c/00.json similarity index 100% rename from tests/res/multi_load/template/10/c/00.json rename to tests/res/1/api/multi_load/template/10/c/00.json diff --git a/tests/res/multi_load/schema/00/e/exp.json b/tests/res/1/api/multi_load/template/10/e/00.json similarity index 100% rename from tests/res/multi_load/schema/00/e/exp.json rename to tests/res/1/api/multi_load/template/10/e/00.json diff --git a/tests/res/multi_load/template/10/o/00.json b/tests/res/1/api/multi_load/template/10/o/00.json similarity index 100% rename from tests/res/multi_load/template/10/o/00.json rename to tests/res/1/api/multi_load/template/10/o/00.json diff --git a/tests/res/multi_load/template/20/00.json b/tests/res/1/api/multi_load/template/20/00.json similarity index 100% rename from tests/res/multi_load/template/20/00.json rename to tests/res/1/api/multi_load/template/20/00.json diff --git a/tests/res/multi_load/template/20/10.json b/tests/res/1/api/multi_load/template/20/10.json similarity index 100% rename from tests/res/multi_load/template/20/10.json rename to tests/res/1/api/multi_load/template/20/10.json diff --git a/tests/res/multi_load/template/20/20.json b/tests/res/1/api/multi_load/template/20/20.json similarity index 100% rename from tests/res/multi_load/template/20/20.json rename to tests/res/1/api/multi_load/template/20/20.json diff --git a/tests/res/multi_load/template/20/30.json b/tests/res/1/api/multi_load/template/20/30.json similarity index 100% rename from tests/res/multi_load/template/20/30.json rename to tests/res/1/api/multi_load/template/20/30.json diff --git a/tests/res/multi_load/template/20/e/exp.json b/tests/res/1/api/multi_load/template/20/e/00.json similarity index 100% rename from tests/res/multi_load/template/20/e/exp.json rename to tests/res/1/api/multi_load/template/20/e/00.json diff --git a/tests/res/multi_load/template/20/o b/tests/res/1/api/multi_load/template/20/o similarity index 100% rename from tests/res/multi_load/template/20/o rename to tests/res/1/api/multi_load/template/20/o diff --git a/tests/res/1/api/open/basics b/tests/res/1/api/open/basics new file mode 120000 index 00000000..5c0d3053 --- /dev/null +++ b/tests/res/1/api/open/basics @@ -0,0 +1 @@ +../single_load/basics \ No newline at end of file diff --git a/tests/res/single_load/ac_parser/10/00.conf b/tests/res/1/api/single_load/ac_parser/10/00.conf similarity index 100% rename from tests/res/single_load/ac_parser/10/00.conf rename to tests/res/1/api/single_load/ac_parser/10/00.conf diff --git a/tests/res/single_load/ac_parser/10/10.conf b/tests/res/1/api/single_load/ac_parser/10/10.conf similarity index 100% rename from tests/res/single_load/ac_parser/10/10.conf rename to tests/res/1/api/single_load/ac_parser/10/10.conf diff --git a/tests/res/single_load/ac_parser/10/20.conf b/tests/res/1/api/single_load/ac_parser/10/20.conf similarity index 100% rename from tests/res/single_load/ac_parser/10/20.conf rename to tests/res/1/api/single_load/ac_parser/10/20.conf diff --git a/tests/res/single_load/ac_parser/10/e b/tests/res/1/api/single_load/ac_parser/10/e similarity index 100% rename from tests/res/single_load/ac_parser/10/e rename to tests/res/1/api/single_load/ac_parser/10/e diff --git a/tests/res/single_load/ac_parser/10/o/00.json b/tests/res/1/api/single_load/ac_parser/10/o/00.json similarity index 100% rename from tests/res/single_load/ac_parser/10/o/00.json rename to tests/res/1/api/single_load/ac_parser/10/o/00.json diff --git a/tests/res/dumps/basics/20/o/10.json b/tests/res/1/api/single_load/ac_parser/10/o/10.json similarity index 100% rename from tests/res/dumps/basics/20/o/10.json rename to tests/res/1/api/single_load/ac_parser/10/o/10.json diff --git a/tests/res/dumps/basics/20/o/20.json b/tests/res/1/api/single_load/ac_parser/10/o/20.json similarity index 100% rename from tests/res/dumps/basics/20/o/20.json rename to tests/res/1/api/single_load/ac_parser/10/o/20.json diff --git a/tests/res/multi_load/schema/00/00.json b/tests/res/1/api/single_load/basics/10/00.json similarity index 100% rename from tests/res/multi_load/schema/00/00.json rename to tests/res/1/api/single_load/basics/10/00.json diff --git a/tests/res/json/basic/10.json b/tests/res/1/api/single_load/basics/10/10.json similarity index 100% rename from tests/res/json/basic/10.json rename to tests/res/1/api/single_load/basics/10/10.json diff --git a/tests/res/single_load/basics/10/20.json b/tests/res/1/api/single_load/basics/10/20.json similarity index 100% rename from tests/res/single_load/basics/10/20.json rename to tests/res/1/api/single_load/basics/10/20.json diff --git a/tests/res/single_load/basics/10/e/00.py b/tests/res/1/api/single_load/basics/10/e/00.py similarity index 100% rename from tests/res/single_load/basics/10/e/00.py rename to tests/res/1/api/single_load/basics/10/e/00.py diff --git a/tests/res/single_load/basics/10/e/10.py b/tests/res/1/api/single_load/basics/10/e/10.py similarity index 100% rename from tests/res/single_load/basics/10/e/10.py rename to tests/res/1/api/single_load/basics/10/e/10.py diff --git a/tests/res/single_load/basics/10/e/20.py b/tests/res/1/api/single_load/basics/10/e/20.py similarity index 100% rename from tests/res/single_load/basics/10/e/20.py rename to tests/res/1/api/single_load/basics/10/e/20.py diff --git a/tests/res/single_load/basics/20/00.json b/tests/res/1/api/single_load/basics/20/00.json similarity index 100% rename from tests/res/single_load/basics/20/00.json rename to tests/res/1/api/single_load/basics/20/00.json diff --git a/tests/res/cli/single_input/20/10.json b/tests/res/1/api/single_load/basics/20/10.json similarity index 100% rename from tests/res/cli/single_input/20/10.json rename to tests/res/1/api/single_load/basics/20/10.json diff --git a/tests/res/single_load/basics/20/20.json b/tests/res/1/api/single_load/basics/20/20.json similarity index 100% rename from tests/res/single_load/basics/20/20.json rename to tests/res/1/api/single_load/basics/20/20.json diff --git a/tests/res/dicts/get/20/e/00.json b/tests/res/1/api/single_load/basics/20/e/00.json similarity index 100% rename from tests/res/dicts/get/20/e/00.json rename to tests/res/1/api/single_load/basics/20/e/00.json diff --git a/tests/res/cli/show/10/o/10.json b/tests/res/1/api/single_load/basics/20/e/10.json similarity index 100% rename from tests/res/cli/show/10/o/10.json rename to tests/res/1/api/single_load/basics/20/e/10.json diff --git a/tests/res/cli/show/10/o/20.json b/tests/res/1/api/single_load/basics/20/e/20.json similarity index 100% rename from tests/res/cli/show/10/o/20.json rename to tests/res/1/api/single_load/basics/20/e/20.json diff --git a/tests/res/single_load/basics/30/00.json b/tests/res/1/api/single_load/basics/30/00.json similarity index 100% rename from tests/res/single_load/basics/30/00.json rename to tests/res/1/api/single_load/basics/30/00.json diff --git a/tests/res/cli/single_input/30/10.json b/tests/res/1/api/single_load/basics/30/10.json similarity index 100% rename from tests/res/cli/single_input/30/10.json rename to tests/res/1/api/single_load/basics/30/10.json diff --git a/tests/res/single_load/basics/30/20.json b/tests/res/1/api/single_load/basics/30/20.json similarity index 100% rename from tests/res/single_load/basics/30/20.json rename to tests/res/1/api/single_load/basics/30/20.json diff --git a/tests/res/single_load/basics/20/e/00.json b/tests/res/1/api/single_load/basics/30/e/00.json similarity index 100% rename from tests/res/single_load/basics/20/e/00.json rename to tests/res/1/api/single_load/basics/30/e/00.json diff --git a/tests/res/cli/show/20/o/10.json b/tests/res/1/api/single_load/basics/30/e/10.json similarity index 100% rename from tests/res/cli/show/20/o/10.json rename to tests/res/1/api/single_load/basics/30/e/10.json diff --git a/tests/res/cli/show/20/o/20.json b/tests/res/1/api/single_load/basics/30/e/20.json similarity index 100% rename from tests/res/cli/show/20/o/20.json rename to tests/res/1/api/single_load/basics/30/e/20.json diff --git a/tests/res/single_load/basics/30/o/00.json b/tests/res/1/api/single_load/basics/30/o/00.json similarity index 100% rename from tests/res/single_load/basics/30/o/00.json rename to tests/res/1/api/single_load/basics/30/o/00.json diff --git a/tests/res/loads/basics/10/o/10.json b/tests/res/1/api/single_load/basics/30/o/10.json similarity index 100% rename from tests/res/loads/basics/10/o/10.json rename to tests/res/1/api/single_load/basics/30/o/10.json diff --git a/tests/res/loads/basics/10/o/20.json b/tests/res/1/api/single_load/basics/30/o/20.json similarity index 100% rename from tests/res/loads/basics/10/o/20.json rename to tests/res/1/api/single_load/basics/30/o/20.json diff --git a/tests/res/single_load/multi_types/10/10.ini b/tests/res/1/api/single_load/multi_types/10/10.ini similarity index 100% rename from tests/res/single_load/multi_types/10/10.ini rename to tests/res/1/api/single_load/multi_types/10/10.ini diff --git a/tests/res/single_load/multi_types/10/20.properties b/tests/res/1/api/single_load/multi_types/10/20.properties similarity index 100% rename from tests/res/single_load/multi_types/10/20.properties rename to tests/res/1/api/single_load/multi_types/10/20.properties diff --git a/tests/res/single_load/multi_types/10/e/10.json b/tests/res/1/api/single_load/multi_types/10/e/10.json similarity index 100% rename from tests/res/single_load/multi_types/10/e/10.json rename to tests/res/1/api/single_load/multi_types/10/e/10.json diff --git a/tests/res/single_load/multi_types/10/e/20.json b/tests/res/1/api/single_load/multi_types/10/e/20.json similarity index 100% rename from tests/res/single_load/multi_types/10/e/20.json rename to tests/res/1/api/single_load/multi_types/10/e/20.json diff --git a/tests/res/shellvars/10.sh b/tests/res/1/api/single_load/multi_types/20/10.sh similarity index 100% rename from tests/res/shellvars/10.sh rename to tests/res/1/api/single_load/multi_types/20/10.sh diff --git a/tests/res/shellvars/e/10.json b/tests/res/1/api/single_load/multi_types/20/e/10.json similarity index 100% rename from tests/res/shellvars/e/10.json rename to tests/res/1/api/single_load/multi_types/20/e/10.json diff --git a/tests/res/single_load/multi_types/20/o/10.json b/tests/res/1/api/single_load/multi_types/20/o/10.json similarity index 100% rename from tests/res/single_load/multi_types/20/o/10.json rename to tests/res/1/api/single_load/multi_types/20/o/10.json diff --git a/tests/res/single_load/multi_types/30/00.xml b/tests/res/1/api/single_load/multi_types/30/00.xml similarity index 100% rename from tests/res/single_load/multi_types/30/00.xml rename to tests/res/1/api/single_load/multi_types/30/00.xml diff --git a/tests/res/single_load/multi_types/30/10.xml b/tests/res/1/api/single_load/multi_types/30/10.xml similarity index 100% rename from tests/res/single_load/multi_types/30/10.xml rename to tests/res/1/api/single_load/multi_types/30/10.xml diff --git a/tests/res/single_load/multi_types/30/20.xml b/tests/res/1/api/single_load/multi_types/30/20.xml similarity index 100% rename from tests/res/single_load/multi_types/30/20.xml rename to tests/res/1/api/single_load/multi_types/30/20.xml diff --git a/tests/res/single_load/multi_types/30/e/00.json b/tests/res/1/api/single_load/multi_types/30/e/00.json similarity index 100% rename from tests/res/single_load/multi_types/30/e/00.json rename to tests/res/1/api/single_load/multi_types/30/e/00.json diff --git a/tests/res/single_load/multi_types/30/e/10.json b/tests/res/1/api/single_load/multi_types/30/e/10.json similarity index 100% rename from tests/res/single_load/multi_types/30/e/10.json rename to tests/res/1/api/single_load/multi_types/30/e/10.json diff --git a/tests/res/single_load/multi_types/30/e/20.json b/tests/res/1/api/single_load/multi_types/30/e/20.json similarity index 100% rename from tests/res/single_load/multi_types/30/e/20.json rename to tests/res/1/api/single_load/multi_types/30/e/20.json diff --git a/tests/res/json/primitives/10.json b/tests/res/1/api/single_load/primitives/10/10.json similarity index 100% rename from tests/res/json/primitives/10.json rename to tests/res/1/api/single_load/primitives/10/10.json diff --git a/tests/res/single_load/primitives/10/20.json b/tests/res/1/api/single_load/primitives/10/20.json similarity index 100% rename from tests/res/single_load/primitives/10/20.json rename to tests/res/1/api/single_load/primitives/10/20.json diff --git a/tests/res/single_load/primitives/10/30.json b/tests/res/1/api/single_load/primitives/10/30.json similarity index 100% rename from tests/res/single_load/primitives/10/30.json rename to tests/res/1/api/single_load/primitives/10/30.json diff --git a/tests/res/json/primitives/40.json b/tests/res/1/api/single_load/primitives/10/40.json similarity index 100% rename from tests/res/json/primitives/40.json rename to tests/res/1/api/single_load/primitives/10/40.json diff --git a/tests/res/single_load/primitives/10/e/10.py b/tests/res/1/api/single_load/primitives/10/e/10.py similarity index 100% rename from tests/res/single_load/primitives/10/e/10.py rename to tests/res/1/api/single_load/primitives/10/e/10.py diff --git a/tests/res/single_load/primitives/10/e/20.py b/tests/res/1/api/single_load/primitives/10/e/20.py similarity index 100% rename from tests/res/single_load/primitives/10/e/20.py rename to tests/res/1/api/single_load/primitives/10/e/20.py diff --git a/tests/res/single_load/primitives/10/e/30.py b/tests/res/1/api/single_load/primitives/10/e/30.py similarity index 100% rename from tests/res/single_load/primitives/10/e/30.py rename to tests/res/1/api/single_load/primitives/10/e/30.py diff --git a/tests/res/single_load/primitives/10/e/40.py b/tests/res/1/api/single_load/primitives/10/e/40.py similarity index 100% rename from tests/res/single_load/primitives/10/e/40.py rename to tests/res/1/api/single_load/primitives/10/e/40.py diff --git a/tests/res/single_load/query/10/00_00.json b/tests/res/1/api/single_load/query/10/00_00.json similarity index 100% rename from tests/res/single_load/query/10/00_00.json rename to tests/res/1/api/single_load/query/10/00_00.json diff --git a/tests/res/single_load/query/10/00_10.json b/tests/res/1/api/single_load/query/10/00_10.json similarity index 100% rename from tests/res/single_load/query/10/00_10.json rename to tests/res/1/api/single_load/query/10/00_10.json diff --git a/tests/res/single_load/query/10/10_00.json b/tests/res/1/api/single_load/query/10/10_00.json similarity index 100% rename from tests/res/single_load/query/10/10_00.json rename to tests/res/1/api/single_load/query/10/10_00.json diff --git a/tests/res/single_load/query/10/10_10.json b/tests/res/1/api/single_load/query/10/10_10.json similarity index 100% rename from tests/res/single_load/query/10/10_10.json rename to tests/res/1/api/single_load/query/10/10_10.json diff --git a/tests/res/single_load/query/10/10_20.json b/tests/res/1/api/single_load/query/10/10_20.json similarity index 100% rename from tests/res/single_load/query/10/10_20.json rename to tests/res/1/api/single_load/query/10/10_20.json diff --git a/tests/res/single_load/query/10/10_30.json b/tests/res/1/api/single_load/query/10/10_30.json similarity index 100% rename from tests/res/single_load/query/10/10_30.json rename to tests/res/1/api/single_load/query/10/10_30.json diff --git a/tests/res/single_load/schema/10/10.json b/tests/res/1/api/single_load/query/10/10_40.json similarity index 100% rename from tests/res/single_load/schema/10/10.json rename to tests/res/1/api/single_load/query/10/10_40.json diff --git a/tests/res/1/api/single_load/query/10/10_50.json b/tests/res/1/api/single_load/query/10/10_50.json new file mode 120000 index 00000000..8da49fea --- /dev/null +++ b/tests/res/1/api/single_load/query/10/10_50.json @@ -0,0 +1 @@ +../../basics/10/10.json \ No newline at end of file diff --git a/tests/res/single_load/query/10/20_00.json b/tests/res/1/api/single_load/query/10/20_00.json similarity index 100% rename from tests/res/single_load/query/10/20_00.json rename to tests/res/1/api/single_load/query/10/20_00.json diff --git a/tests/res/single_load/query/10/20_10.json b/tests/res/1/api/single_load/query/10/20_10.json similarity index 100% rename from tests/res/single_load/query/10/20_10.json rename to tests/res/1/api/single_load/query/10/20_10.json diff --git a/tests/res/single_load/query/10/20_20.json b/tests/res/1/api/single_load/query/10/20_20.json similarity index 100% rename from tests/res/single_load/query/10/20_20.json rename to tests/res/1/api/single_load/query/10/20_20.json diff --git a/tests/res/json/query/e/00_00.json b/tests/res/1/api/single_load/query/10/e/00_00.json similarity index 100% rename from tests/res/json/query/e/00_00.json rename to tests/res/1/api/single_load/query/10/e/00_00.json diff --git a/tests/res/json/query/e/00_10.json b/tests/res/1/api/single_load/query/10/e/00_10.json similarity index 100% rename from tests/res/json/query/e/00_10.json rename to tests/res/1/api/single_load/query/10/e/00_10.json diff --git a/tests/res/loads/query/10/o/10_00.json b/tests/res/1/api/single_load/query/10/e/10_00.json similarity index 100% rename from tests/res/loads/query/10/o/10_00.json rename to tests/res/1/api/single_load/query/10/e/10_00.json diff --git a/tests/res/json/query/e/10_10.json b/tests/res/1/api/single_load/query/10/e/10_10.json similarity index 100% rename from tests/res/json/query/e/10_10.json rename to tests/res/1/api/single_load/query/10/e/10_10.json diff --git a/tests/res/json/query/e/10_20.json b/tests/res/1/api/single_load/query/10/e/10_20.json similarity index 100% rename from tests/res/json/query/e/10_20.json rename to tests/res/1/api/single_load/query/10/e/10_20.json diff --git a/tests/res/json/query/e/10_30.json b/tests/res/1/api/single_load/query/10/e/10_30.json similarity index 100% rename from tests/res/json/query/e/10_30.json rename to tests/res/1/api/single_load/query/10/e/10_30.json diff --git a/tests/res/single_load/query/10/e/10_40.json b/tests/res/1/api/single_load/query/10/e/10_40.json similarity index 100% rename from tests/res/single_load/query/10/e/10_40.json rename to tests/res/1/api/single_load/query/10/e/10_40.json diff --git a/tests/res/json/query/e/10_50.json b/tests/res/1/api/single_load/query/10/e/10_50.json similarity index 100% rename from tests/res/json/query/e/10_50.json rename to tests/res/1/api/single_load/query/10/e/10_50.json diff --git a/tests/res/single_load/query/10/e/20_00.json b/tests/res/1/api/single_load/query/10/e/20_00.json similarity index 100% rename from tests/res/single_load/query/10/e/20_00.json rename to tests/res/1/api/single_load/query/10/e/20_00.json diff --git a/tests/res/json/query/e/20_10.json b/tests/res/1/api/single_load/query/10/e/20_10.json similarity index 100% rename from tests/res/json/query/e/20_10.json rename to tests/res/1/api/single_load/query/10/e/20_10.json diff --git a/tests/res/json/query/e/20_20.json b/tests/res/1/api/single_load/query/10/e/20_20.json similarity index 100% rename from tests/res/json/query/e/20_20.json rename to tests/res/1/api/single_load/query/10/e/20_20.json diff --git a/tests/res/1/api/single_load/query/10/q/00_00.txt b/tests/res/1/api/single_load/query/10/q/00_00.txt new file mode 100644 index 00000000..c9599b95 --- /dev/null +++ b/tests/res/1/api/single_load/query/10/q/00_00.txt @@ -0,0 +1 @@ +"z" diff --git a/tests/res/json/query/q/00_10.txt b/tests/res/1/api/single_load/query/10/q/00_10.txt similarity index 100% rename from tests/res/json/query/q/00_10.txt rename to tests/res/1/api/single_load/query/10/q/00_10.txt diff --git a/tests/res/json/query/q/10_00.txt b/tests/res/1/api/single_load/query/10/q/10_00.txt similarity index 100% rename from tests/res/json/query/q/10_00.txt rename to tests/res/1/api/single_load/query/10/q/10_00.txt diff --git a/tests/res/json/query/q/10_10.txt b/tests/res/1/api/single_load/query/10/q/10_10.txt similarity index 100% rename from tests/res/json/query/q/10_10.txt rename to tests/res/1/api/single_load/query/10/q/10_10.txt diff --git a/tests/res/single_load/query/10/q/10_20.txt b/tests/res/1/api/single_load/query/10/q/10_20.txt similarity index 100% rename from tests/res/single_load/query/10/q/10_20.txt rename to tests/res/1/api/single_load/query/10/q/10_20.txt diff --git a/tests/res/single_load/query/10/q/10_30.txt b/tests/res/1/api/single_load/query/10/q/10_30.txt similarity index 100% rename from tests/res/single_load/query/10/q/10_30.txt rename to tests/res/1/api/single_load/query/10/q/10_30.txt diff --git a/tests/res/single_load/query/10/q/10_40.txt b/tests/res/1/api/single_load/query/10/q/10_40.txt similarity index 100% rename from tests/res/single_load/query/10/q/10_40.txt rename to tests/res/1/api/single_load/query/10/q/10_40.txt diff --git a/tests/res/single_load/query/10/q/10_50.txt b/tests/res/1/api/single_load/query/10/q/10_50.txt similarity index 100% rename from tests/res/single_load/query/10/q/10_50.txt rename to tests/res/1/api/single_load/query/10/q/10_50.txt diff --git a/tests/res/json/query/q/20_00.txt b/tests/res/1/api/single_load/query/10/q/20_00.txt similarity index 100% rename from tests/res/json/query/q/20_00.txt rename to tests/res/1/api/single_load/query/10/q/20_00.txt diff --git a/tests/res/single_load/query/10/q/20_10.txt b/tests/res/1/api/single_load/query/10/q/20_10.txt similarity index 100% rename from tests/res/single_load/query/10/q/20_10.txt rename to tests/res/1/api/single_load/query/10/q/20_10.txt diff --git a/tests/res/single_load/query/10/q/20_20.txt b/tests/res/1/api/single_load/query/10/q/20_20.txt similarity index 100% rename from tests/res/single_load/query/10/q/20_20.txt rename to tests/res/1/api/single_load/query/10/q/20_20.txt diff --git a/tests/res/single_load/schema/10/00.json b/tests/res/1/api/single_load/schema/10/00.json similarity index 100% rename from tests/res/single_load/schema/10/00.json rename to tests/res/1/api/single_load/schema/10/00.json diff --git a/tests/res/1/api/single_load/schema/10/10.json b/tests/res/1/api/single_load/schema/10/10.json new file mode 120000 index 00000000..8da49fea --- /dev/null +++ b/tests/res/1/api/single_load/schema/10/10.json @@ -0,0 +1 @@ +../../basics/10/10.json \ No newline at end of file diff --git a/tests/res/single_load/schema/10/20.json b/tests/res/1/api/single_load/schema/10/20.json similarity index 100% rename from tests/res/single_load/schema/10/20.json rename to tests/res/1/api/single_load/schema/10/20.json diff --git a/tests/res/single_load/schema/10/e b/tests/res/1/api/single_load/schema/10/e similarity index 100% rename from tests/res/single_load/schema/10/e rename to tests/res/1/api/single_load/schema/10/e diff --git a/tests/res/json/basic/schema/00.json b/tests/res/1/api/single_load/schema/10/s/00.json similarity index 100% rename from tests/res/json/basic/schema/00.json rename to tests/res/1/api/single_load/schema/10/s/00.json diff --git a/tests/res/json/basic/schema/10.json b/tests/res/1/api/single_load/schema/10/s/10.json similarity index 100% rename from tests/res/json/basic/schema/10.json rename to tests/res/1/api/single_load/schema/10/s/10.json diff --git a/tests/res/single_load/schema/10/s/20.json b/tests/res/1/api/single_load/schema/10/s/20.json similarity index 100% rename from tests/res/single_load/schema/10/s/20.json rename to tests/res/1/api/single_load/schema/10/s/20.json diff --git a/tests/res/single_load/template/00/00.j2 b/tests/res/1/api/single_load/template/00/00.j2 similarity index 100% rename from tests/res/single_load/template/00/00.j2 rename to tests/res/1/api/single_load/template/00/00.j2 diff --git a/tests/res/single_load/template/00/10.j2 b/tests/res/1/api/single_load/template/00/10.j2 similarity index 100% rename from tests/res/single_load/template/00/10.j2 rename to tests/res/1/api/single_load/template/00/10.j2 diff --git a/tests/res/single_load/template/00/20.j2 b/tests/res/1/api/single_load/template/00/20.j2 similarity index 100% rename from tests/res/single_load/template/00/20.j2 rename to tests/res/1/api/single_load/template/00/20.j2 diff --git a/tests/res/single_load/template/00/c/00.json b/tests/res/1/api/single_load/template/00/c/00.json similarity index 100% rename from tests/res/single_load/template/00/c/00.json rename to tests/res/1/api/single_load/template/00/c/00.json diff --git a/tests/res/single_load/template/00/c/10.json b/tests/res/1/api/single_load/template/00/c/10.json similarity index 100% rename from tests/res/single_load/template/00/c/10.json rename to tests/res/1/api/single_load/template/00/c/10.json diff --git a/tests/res/single_load/template/00/c/20.json b/tests/res/1/api/single_load/template/00/c/20.json similarity index 100% rename from tests/res/single_load/template/00/c/20.json rename to tests/res/1/api/single_load/template/00/c/20.json diff --git a/tests/res/single_load/template/00/e/00.json b/tests/res/1/api/single_load/template/00/e/00.json similarity index 100% rename from tests/res/single_load/template/00/e/00.json rename to tests/res/1/api/single_load/template/00/e/00.json diff --git a/tests/res/single_load/template/00/e/10.json b/tests/res/1/api/single_load/template/00/e/10.json similarity index 100% rename from tests/res/single_load/template/00/e/10.json rename to tests/res/1/api/single_load/template/00/e/10.json diff --git a/tests/res/single_load/template/00/e/20.json b/tests/res/1/api/single_load/template/00/e/20.json similarity index 100% rename from tests/res/single_load/template/00/e/20.json rename to tests/res/1/api/single_load/template/00/e/20.json diff --git a/tests/res/single_load/template/00/o/00.json b/tests/res/1/api/single_load/template/00/o/00.json similarity index 100% rename from tests/res/single_load/template/00/o/00.json rename to tests/res/1/api/single_load/template/00/o/00.json diff --git a/tests/res/loads/template/10/o/10.json b/tests/res/1/api/single_load/template/00/o/10.json similarity index 100% rename from tests/res/loads/template/10/o/10.json rename to tests/res/1/api/single_load/template/00/o/10.json diff --git a/tests/res/loads/template/10/o/20.json b/tests/res/1/api/single_load/template/00/o/20.json similarity index 100% rename from tests/res/loads/template/10/o/20.json rename to tests/res/1/api/single_load/template/00/o/20.json diff --git a/tests/res/single_load/template/10/00.j2 b/tests/res/1/api/single_load/template/10/00.j2 similarity index 100% rename from tests/res/single_load/template/10/00.j2 rename to tests/res/1/api/single_load/template/10/00.j2 diff --git a/tests/res/single_load/template/10/10.j2 b/tests/res/1/api/single_load/template/10/10.j2 similarity index 100% rename from tests/res/single_load/template/10/10.j2 rename to tests/res/1/api/single_load/template/10/10.j2 diff --git a/tests/res/single_load/template/10/20.j2 b/tests/res/1/api/single_load/template/10/20.j2 similarity index 100% rename from tests/res/single_load/template/10/20.j2 rename to tests/res/1/api/single_load/template/10/20.j2 diff --git a/tests/res/single_load/template/10/30.j2 b/tests/res/1/api/single_load/template/10/30.j2 similarity index 100% rename from tests/res/single_load/template/10/30.j2 rename to tests/res/1/api/single_load/template/10/30.j2 diff --git a/tests/res/single_load/template/10/c/00.json b/tests/res/1/api/single_load/template/10/c/00.json similarity index 100% rename from tests/res/single_load/template/10/c/00.json rename to tests/res/1/api/single_load/template/10/c/00.json diff --git a/tests/res/single_load/template/10/c/10.json b/tests/res/1/api/single_load/template/10/c/10.json similarity index 100% rename from tests/res/single_load/template/10/c/10.json rename to tests/res/1/api/single_load/template/10/c/10.json diff --git a/tests/res/single_load/template/10/c/20.json b/tests/res/1/api/single_load/template/10/c/20.json similarity index 100% rename from tests/res/single_load/template/10/c/20.json rename to tests/res/1/api/single_load/template/10/c/20.json diff --git a/tests/res/single_load/template/10/c/30.json b/tests/res/1/api/single_load/template/10/c/30.json similarity index 100% rename from tests/res/single_load/template/10/c/30.json rename to tests/res/1/api/single_load/template/10/c/30.json diff --git a/tests/res/single_load/template/10/e/00.json b/tests/res/1/api/single_load/template/10/e/00.json similarity index 100% rename from tests/res/single_load/template/10/e/00.json rename to tests/res/1/api/single_load/template/10/e/00.json diff --git a/tests/res/single_load/template/10/e/10.json b/tests/res/1/api/single_load/template/10/e/10.json similarity index 100% rename from tests/res/single_load/template/10/e/10.json rename to tests/res/1/api/single_load/template/10/e/10.json diff --git a/tests/res/cli/multi_inputs/10/20.json b/tests/res/1/api/single_load/template/10/e/20.json similarity index 100% rename from tests/res/cli/multi_inputs/10/20.json rename to tests/res/1/api/single_load/template/10/e/20.json diff --git a/tests/res/parser/list/10/o/30.json b/tests/res/1/api/single_load/template/10/e/30.json similarity index 100% rename from tests/res/parser/list/10/o/30.json rename to tests/res/1/api/single_load/template/10/e/30.json diff --git a/tests/res/single_load/template/10/o/00.json b/tests/res/1/api/single_load/template/10/o/00.json similarity index 100% rename from tests/res/single_load/template/10/o/00.json rename to tests/res/1/api/single_load/template/10/o/00.json diff --git a/tests/res/parser/list/10/o/10.json b/tests/res/1/api/single_load/template/10/o/10.json similarity index 100% rename from tests/res/parser/list/10/o/10.json rename to tests/res/1/api/single_load/template/10/o/10.json diff --git a/tests/res/parser/list/10/o/20.json b/tests/res/1/api/single_load/template/10/o/20.json similarity index 100% rename from tests/res/parser/list/10/o/20.json rename to tests/res/1/api/single_load/template/10/o/20.json diff --git a/tests/res/single_load/template/10/e/30.json b/tests/res/1/api/single_load/template/10/o/30.json similarity index 100% rename from tests/res/single_load/template/10/e/30.json rename to tests/res/1/api/single_load/template/10/o/30.json diff --git a/tests/res/single_load/template/20/00.j2 b/tests/res/1/api/single_load/template/20/00.j2 similarity index 100% rename from tests/res/single_load/template/20/00.j2 rename to tests/res/1/api/single_load/template/20/00.j2 diff --git a/tests/res/single_load/template/20/10.j2 b/tests/res/1/api/single_load/template/20/10.j2 similarity index 100% rename from tests/res/single_load/template/20/10.j2 rename to tests/res/1/api/single_load/template/20/10.j2 diff --git a/tests/res/single_load/template/20/20.j2 b/tests/res/1/api/single_load/template/20/20.j2 similarity index 100% rename from tests/res/single_load/template/20/20.j2 rename to tests/res/1/api/single_load/template/20/20.j2 diff --git a/tests/res/single_load/template/20/c/00.json b/tests/res/1/api/single_load/template/20/c/00.json similarity index 100% rename from tests/res/single_load/template/20/c/00.json rename to tests/res/1/api/single_load/template/20/c/00.json diff --git a/tests/res/single_load/template/20/c/10.json b/tests/res/1/api/single_load/template/20/c/10.json similarity index 100% rename from tests/res/single_load/template/20/c/10.json rename to tests/res/1/api/single_load/template/20/c/10.json diff --git a/tests/res/single_load/template/20/c/20.json b/tests/res/1/api/single_load/template/20/c/20.json similarity index 100% rename from tests/res/single_load/template/20/c/20.json rename to tests/res/1/api/single_load/template/20/c/20.json diff --git a/tests/res/single_load/template/20/e b/tests/res/1/api/single_load/template/20/e similarity index 100% rename from tests/res/single_load/template/20/e rename to tests/res/1/api/single_load/template/20/e diff --git a/tests/res/single_load/template/20/o/00.json b/tests/res/1/api/single_load/template/20/o/00.json similarity index 100% rename from tests/res/single_load/template/20/o/00.json rename to tests/res/1/api/single_load/template/20/o/00.json diff --git a/tests/res/single_load/ac_parser/10/o/10.json b/tests/res/1/api/single_load/template/20/o/10.json similarity index 100% rename from tests/res/single_load/ac_parser/10/o/10.json rename to tests/res/1/api/single_load/template/20/o/10.json diff --git a/tests/res/single_load/ac_parser/10/o/20.json b/tests/res/1/api/single_load/template/20/o/20.json similarity index 100% rename from tests/res/single_load/ac_parser/10/o/20.json rename to tests/res/1/api/single_load/template/20/o/20.json diff --git a/tests/res/single_load/toml/10/00.toml b/tests/res/1/api/single_load/toml/10/00.toml similarity index 100% rename from tests/res/single_load/toml/10/00.toml rename to tests/res/1/api/single_load/toml/10/00.toml diff --git a/tests/res/single_load/toml/10/10.toml b/tests/res/1/api/single_load/toml/10/10.toml similarity index 100% rename from tests/res/single_load/toml/10/10.toml rename to tests/res/1/api/single_load/toml/10/10.toml diff --git a/tests/res/single_load/toml/10/20.toml b/tests/res/1/api/single_load/toml/10/20.toml similarity index 100% rename from tests/res/single_load/toml/10/20.toml rename to tests/res/1/api/single_load/toml/10/20.toml diff --git a/tests/res/single_load/basics/10/00.json b/tests/res/1/api/single_load/toml/10/e/00.json similarity index 100% rename from tests/res/single_load/basics/10/00.json rename to tests/res/1/api/single_load/toml/10/e/00.json diff --git a/tests/res/single_load/basics/10/10.json b/tests/res/1/api/single_load/toml/10/e/10.json similarity index 100% rename from tests/res/single_load/basics/10/10.json rename to tests/res/1/api/single_load/toml/10/e/10.json diff --git a/tests/res/single_load/toml/10/e/20.json b/tests/res/1/api/single_load/toml/10/e/20.json similarity index 100% rename from tests/res/single_load/toml/10/e/20.json rename to tests/res/1/api/single_load/toml/10/e/20.json diff --git a/tests/res/single_load/yaml/10/00.yml b/tests/res/1/api/single_load/yaml/10/00.yml similarity index 100% rename from tests/res/single_load/yaml/10/00.yml rename to tests/res/1/api/single_load/yaml/10/00.yml diff --git a/tests/res/single_load/yaml/10/10.yml b/tests/res/1/api/single_load/yaml/10/10.yml similarity index 100% rename from tests/res/single_load/yaml/10/10.yml rename to tests/res/1/api/single_load/yaml/10/10.yml diff --git a/tests/res/single_load/yaml/10/20.yml b/tests/res/1/api/single_load/yaml/10/20.yml similarity index 100% rename from tests/res/single_load/yaml/10/20.yml rename to tests/res/1/api/single_load/yaml/10/20.yml diff --git a/tests/res/single_load/toml/10/e/00.json b/tests/res/1/api/single_load/yaml/10/e/00.json similarity index 100% rename from tests/res/single_load/toml/10/e/00.json rename to tests/res/1/api/single_load/yaml/10/e/00.json diff --git a/tests/res/single_load/toml/10/e/10.json b/tests/res/1/api/single_load/yaml/10/e/10.json similarity index 100% rename from tests/res/single_load/toml/10/e/10.json rename to tests/res/1/api/single_load/yaml/10/e/10.json diff --git a/tests/res/single_load/yaml/10/e/20.json b/tests/res/1/api/single_load/yaml/10/e/20.json similarity index 100% rename from tests/res/single_load/yaml/10/e/20.json rename to tests/res/1/api/single_load/yaml/10/e/20.json diff --git a/tests/res/cli/basics/10/10.json b/tests/res/1/cli/basics/10/10.json similarity index 100% rename from tests/res/cli/basics/10/10.json rename to tests/res/1/cli/basics/10/10.json diff --git a/tests/res/cli/basics/10/e/10.json b/tests/res/1/cli/basics/10/e/10.json similarity index 100% rename from tests/res/cli/basics/10/e/10.json rename to tests/res/1/cli/basics/10/e/10.json diff --git a/tests/res/cli/basics/10/o/10.json b/tests/res/1/cli/basics/10/o/10.json similarity index 100% rename from tests/res/cli/basics/10/o/10.json rename to tests/res/1/cli/basics/10/o/10.json diff --git a/tests/res/cli/errors/10/10.json b/tests/res/1/cli/errors/10/10.json similarity index 100% rename from tests/res/cli/errors/10/10.json rename to tests/res/1/cli/errors/10/10.json diff --git a/tests/res/cli/errors/10/20.json b/tests/res/1/cli/errors/10/20.json similarity index 100% rename from tests/res/cli/errors/10/20.json rename to tests/res/1/cli/errors/10/20.json diff --git a/tests/res/cli/errors/10/30.json b/tests/res/1/cli/errors/10/30.json similarity index 100% rename from tests/res/cli/errors/10/30.json rename to tests/res/1/cli/errors/10/30.json diff --git a/tests/res/cli/errors/10/40.json b/tests/res/1/cli/errors/10/40.json similarity index 100% rename from tests/res/cli/errors/10/40.json rename to tests/res/1/cli/errors/10/40.json diff --git a/tests/res/cli/errors/10/50.json b/tests/res/1/cli/errors/10/50.json similarity index 100% rename from tests/res/cli/errors/10/50.json rename to tests/res/1/cli/errors/10/50.json diff --git a/tests/res/cli/errors/10/e/10.json b/tests/res/1/cli/errors/10/e/10.json similarity index 100% rename from tests/res/cli/errors/10/e/10.json rename to tests/res/1/cli/errors/10/e/10.json diff --git a/tests/res/cli/errors/10/e/20.json b/tests/res/1/cli/errors/10/e/20.json similarity index 100% rename from tests/res/cli/errors/10/e/20.json rename to tests/res/1/cli/errors/10/e/20.json diff --git a/tests/res/cli/errors/10/e/30.json b/tests/res/1/cli/errors/10/e/30.json similarity index 100% rename from tests/res/cli/errors/10/e/30.json rename to tests/res/1/cli/errors/10/e/30.json diff --git a/tests/res/cli/errors/10/e/40.json b/tests/res/1/cli/errors/10/e/40.json similarity index 100% rename from tests/res/cli/errors/10/e/40.json rename to tests/res/1/cli/errors/10/e/40.json diff --git a/tests/res/cli/errors/10/e/50.json b/tests/res/1/cli/errors/10/e/50.json similarity index 100% rename from tests/res/cli/errors/10/e/50.json rename to tests/res/1/cli/errors/10/e/50.json diff --git a/tests/res/cli/errors/10/o/10.json b/tests/res/1/cli/errors/10/o/10.json similarity index 100% rename from tests/res/cli/errors/10/o/10.json rename to tests/res/1/cli/errors/10/o/10.json diff --git a/tests/res/cli/errors/10/o/20.json b/tests/res/1/cli/errors/10/o/20.json similarity index 100% rename from tests/res/cli/errors/10/o/20.json rename to tests/res/1/cli/errors/10/o/20.json diff --git a/tests/res/cli/errors/10/o/30.json b/tests/res/1/cli/errors/10/o/30.json similarity index 100% rename from tests/res/cli/errors/10/o/30.json rename to tests/res/1/cli/errors/10/o/30.json diff --git a/tests/res/cli/errors/10/o/40.json b/tests/res/1/cli/errors/10/o/40.json similarity index 100% rename from tests/res/cli/errors/10/o/40.json rename to tests/res/1/cli/errors/10/o/40.json diff --git a/tests/res/cli/errors/10/o/50.json b/tests/res/1/cli/errors/10/o/50.json similarity index 100% rename from tests/res/cli/errors/10/o/50.json rename to tests/res/1/cli/errors/10/o/50.json diff --git a/tests/res/cli/extra_options/10/10.json b/tests/res/1/cli/extra_options/10/10.json similarity index 100% rename from tests/res/cli/extra_options/10/10.json rename to tests/res/1/cli/extra_options/10/10.json diff --git a/tests/res/cli/extra_options/10/e/10.json b/tests/res/1/cli/extra_options/10/e/10.json similarity index 100% rename from tests/res/cli/extra_options/10/e/10.json rename to tests/res/1/cli/extra_options/10/e/10.json diff --git a/tests/res/cli/extra_options/10/o/10.json b/tests/res/1/cli/extra_options/10/o/10.json similarity index 100% rename from tests/res/cli/extra_options/10/o/10.json rename to tests/res/1/cli/extra_options/10/o/10.json diff --git a/tests/res/cli/extra_options/10/on/10.json b/tests/res/1/cli/extra_options/10/on/10.json similarity index 100% rename from tests/res/cli/extra_options/10/on/10.json rename to tests/res/1/cli/extra_options/10/on/10.json diff --git a/tests/res/cli/extra_options/10/r/10.json b/tests/res/1/cli/extra_options/10/r/10.json similarity index 100% rename from tests/res/cli/extra_options/10/r/10.json rename to tests/res/1/cli/extra_options/10/r/10.json diff --git a/tests/res/1/cli/ignore_missing/10/10.json b/tests/res/1/cli/ignore_missing/10/10.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/tests/res/1/cli/ignore_missing/10/10.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/tests/res/1/cli/ignore_missing/10/20.conf b/tests/res/1/cli/ignore_missing/10/20.conf new file mode 120000 index 00000000..5c3b763d --- /dev/null +++ b/tests/res/1/cli/ignore_missing/10/20.conf @@ -0,0 +1 @@ +../../single_input/10/20.conf \ No newline at end of file diff --git a/tests/res/1/cli/ignore_missing/10/e/10.json b/tests/res/1/cli/ignore_missing/10/e/10.json new file mode 100644 index 00000000..d72f0ca2 --- /dev/null +++ b/tests/res/1/cli/ignore_missing/10/e/10.json @@ -0,0 +1 @@ +{"exit_code_matches": true, "exit_code": 0} \ No newline at end of file diff --git a/tests/res/cli/schema/10/20.json b/tests/res/1/cli/ignore_missing/10/e/20.json similarity index 100% rename from tests/res/cli/schema/10/20.json rename to tests/res/1/cli/ignore_missing/10/e/20.json diff --git a/tests/res/cli/ignore_missing/10/o/10.json b/tests/res/1/cli/ignore_missing/10/o/10.json similarity index 100% rename from tests/res/cli/ignore_missing/10/o/10.json rename to tests/res/1/cli/ignore_missing/10/o/10.json diff --git a/tests/res/1/cli/ignore_missing/10/o/20.json b/tests/res/1/cli/ignore_missing/10/o/20.json new file mode 100644 index 00000000..567e420f --- /dev/null +++ b/tests/res/1/cli/ignore_missing/10/o/20.json @@ -0,0 +1 @@ +["-I", "json", "-O", "json", "--ignore-missing"] diff --git a/tests/res/cli/ignore_missing/10/on/10.json b/tests/res/1/cli/ignore_missing/10/on/10.json similarity index 100% rename from tests/res/cli/ignore_missing/10/on/10.json rename to tests/res/1/cli/ignore_missing/10/on/10.json diff --git a/tests/res/cli/single_input/10/e/20.json b/tests/res/1/cli/ignore_missing/10/on/20.json similarity index 100% rename from tests/res/cli/single_input/10/e/20.json rename to tests/res/1/cli/ignore_missing/10/on/20.json diff --git a/tests/res/cli/ignore_missing/10/e/10.json b/tests/res/1/cli/ignore_missing/10/r/10.json similarity index 100% rename from tests/res/cli/ignore_missing/10/e/10.json rename to tests/res/1/cli/ignore_missing/10/r/10.json diff --git a/tests/res/1/cli/ignore_missing/10/r/20.json b/tests/res/1/cli/ignore_missing/10/r/20.json new file mode 120000 index 00000000..4c08f7e2 --- /dev/null +++ b/tests/res/1/cli/ignore_missing/10/r/20.json @@ -0,0 +1 @@ +../20.conf \ No newline at end of file diff --git a/tests/res/cli/multi_inputs/10/10.json b/tests/res/1/cli/multi_inputs/10/10.json similarity index 100% rename from tests/res/cli/multi_inputs/10/10.json rename to tests/res/1/cli/multi_inputs/10/10.json diff --git a/tests/res/single_load/template/10/e/20.json b/tests/res/1/cli/multi_inputs/10/20.json similarity index 100% rename from tests/res/single_load/template/10/e/20.json rename to tests/res/1/cli/multi_inputs/10/20.json diff --git a/tests/res/cli/multi_inputs/10/30.json b/tests/res/1/cli/multi_inputs/10/30.json similarity index 100% rename from tests/res/cli/multi_inputs/10/30.json rename to tests/res/1/cli/multi_inputs/10/30.json diff --git a/tests/res/cli/ignore_missing/10/r/10.json b/tests/res/1/cli/multi_inputs/10/e/10.json similarity index 100% rename from tests/res/cli/ignore_missing/10/r/10.json rename to tests/res/1/cli/multi_inputs/10/e/10.json diff --git a/tests/res/cli/ignore_missing/10/10.json b/tests/res/1/cli/multi_inputs/10/o/10.json similarity index 100% rename from tests/res/cli/ignore_missing/10/10.json rename to tests/res/1/cli/multi_inputs/10/o/10.json diff --git a/tests/res/cli/multi_inputs/10/on/10.json b/tests/res/1/cli/multi_inputs/10/on/10.json similarity index 100% rename from tests/res/cli/multi_inputs/10/on/10.json rename to tests/res/1/cli/multi_inputs/10/on/10.json diff --git a/tests/res/cli/multi_inputs/10/r/10.json b/tests/res/1/cli/multi_inputs/10/r/10.json similarity index 100% rename from tests/res/cli/multi_inputs/10/r/10.json rename to tests/res/1/cli/multi_inputs/10/r/10.json diff --git a/tests/res/cli/multi_inputs/20/10.json b/tests/res/1/cli/multi_inputs/20/10.json similarity index 100% rename from tests/res/cli/multi_inputs/20/10.json rename to tests/res/1/cli/multi_inputs/20/10.json diff --git a/tests/res/cli/multi_inputs/20/20.xml b/tests/res/1/cli/multi_inputs/20/20.xml similarity index 100% rename from tests/res/cli/multi_inputs/20/20.xml rename to tests/res/1/cli/multi_inputs/20/20.xml diff --git a/tests/res/cli/multi_inputs/20/30.sh b/tests/res/1/cli/multi_inputs/20/30.sh similarity index 100% rename from tests/res/cli/multi_inputs/20/30.sh rename to tests/res/1/cli/multi_inputs/20/30.sh diff --git a/tests/res/cli/multi_inputs/10/e/10.json b/tests/res/1/cli/multi_inputs/20/e/10.json similarity index 100% rename from tests/res/cli/multi_inputs/10/e/10.json rename to tests/res/1/cli/multi_inputs/20/e/10.json diff --git a/tests/res/cli/multi_inputs/10/o/10.json b/tests/res/1/cli/multi_inputs/20/o/10.json similarity index 100% rename from tests/res/cli/multi_inputs/10/o/10.json rename to tests/res/1/cli/multi_inputs/20/o/10.json diff --git a/tests/res/cli/multi_inputs/20/on/10.json b/tests/res/1/cli/multi_inputs/20/on/10.json similarity index 100% rename from tests/res/cli/multi_inputs/20/on/10.json rename to tests/res/1/cli/multi_inputs/20/on/10.json diff --git a/tests/res/cli/multi_inputs/20/r/10.json b/tests/res/1/cli/multi_inputs/20/r/10.json similarity index 100% rename from tests/res/cli/multi_inputs/20/r/10.json rename to tests/res/1/cli/multi_inputs/20/r/10.json diff --git a/tests/res/cli/no_template/10/10.json b/tests/res/1/cli/no_template/10/10.json similarity index 100% rename from tests/res/cli/no_template/10/10.json rename to tests/res/1/cli/no_template/10/10.json diff --git a/tests/res/cli/no_template/10/o/10.json b/tests/res/1/cli/no_template/10/o/10.json similarity index 100% rename from tests/res/cli/no_template/10/o/10.json rename to tests/res/1/cli/no_template/10/o/10.json diff --git a/tests/res/cli/no_template/10/on/10.json b/tests/res/1/cli/no_template/10/on/10.json similarity index 100% rename from tests/res/cli/no_template/10/on/10.json rename to tests/res/1/cli/no_template/10/on/10.json diff --git a/tests/res/cli/no_template/10/r/10.json b/tests/res/1/cli/no_template/10/r/10.json similarity index 100% rename from tests/res/cli/no_template/10/r/10.json rename to tests/res/1/cli/no_template/10/r/10.json diff --git a/tests/res/cli/query/10/10.json b/tests/res/1/cli/query/10/10.json similarity index 100% rename from tests/res/cli/query/10/10.json rename to tests/res/1/cli/query/10/10.json diff --git a/tests/res/cli/multi_inputs/20/e/10.json b/tests/res/1/cli/query/10/e/10.json similarity index 100% rename from tests/res/cli/multi_inputs/20/e/10.json rename to tests/res/1/cli/query/10/e/10.json diff --git a/tests/res/cli/query/10/o/10.json b/tests/res/1/cli/query/10/o/10.json similarity index 100% rename from tests/res/cli/query/10/o/10.json rename to tests/res/1/cli/query/10/o/10.json diff --git a/tests/res/cli/query/10/on/10.json b/tests/res/1/cli/query/10/on/10.json similarity index 100% rename from tests/res/cli/query/10/on/10.json rename to tests/res/1/cli/query/10/on/10.json diff --git a/tests/res/cli/query/10/r/10.json b/tests/res/1/cli/query/10/r/10.json similarity index 100% rename from tests/res/cli/query/10/r/10.json rename to tests/res/1/cli/query/10/r/10.json diff --git a/tests/res/1/cli/schema/10/00.json b/tests/res/1/cli/schema/10/00.json new file mode 120000 index 00000000..688fedec --- /dev/null +++ b/tests/res/1/cli/schema/10/00.json @@ -0,0 +1 @@ +../../../api/single_load/schema/10/00.json \ No newline at end of file diff --git a/tests/res/1/cli/schema/10/10.json b/tests/res/1/cli/schema/10/10.json new file mode 120000 index 00000000..040d3dfb --- /dev/null +++ b/tests/res/1/cli/schema/10/10.json @@ -0,0 +1 @@ +../../../api/single_load/schema/10/10.json \ No newline at end of file diff --git a/tests/res/1/cli/schema/10/20.json b/tests/res/1/cli/schema/10/20.json new file mode 120000 index 00000000..8417604b --- /dev/null +++ b/tests/res/1/cli/schema/10/20.json @@ -0,0 +1 @@ +../../../api/single_load/schema/10/20.json \ No newline at end of file diff --git a/tests/res/cli/schema/10/e/10.json b/tests/res/1/cli/schema/10/e/00.json similarity index 100% rename from tests/res/cli/schema/10/e/10.json rename to tests/res/1/cli/schema/10/e/00.json diff --git a/tests/res/single_load/basics/30/o/10.json b/tests/res/1/cli/schema/10/e/10.json similarity index 100% rename from tests/res/single_load/basics/30/o/10.json rename to tests/res/1/cli/schema/10/e/10.json diff --git a/tests/res/single_load/basics/30/o/20.json b/tests/res/1/cli/schema/10/e/20.json similarity index 100% rename from tests/res/single_load/basics/30/o/20.json rename to tests/res/1/cli/schema/10/e/20.json diff --git a/tests/res/cli/schema_errors/10/o/10.json b/tests/res/1/cli/schema/10/o/00.json similarity index 100% rename from tests/res/cli/schema_errors/10/o/10.json rename to tests/res/1/cli/schema/10/o/00.json diff --git a/tests/res/single_load/template/00/o/10.json b/tests/res/1/cli/schema/10/o/10.json similarity index 100% rename from tests/res/single_load/template/00/o/10.json rename to tests/res/1/cli/schema/10/o/10.json diff --git a/tests/res/single_load/template/00/o/20.json b/tests/res/1/cli/schema/10/o/20.json similarity index 100% rename from tests/res/single_load/template/00/o/20.json rename to tests/res/1/cli/schema/10/o/20.json diff --git a/tests/res/cli/single_input/10/30.json b/tests/res/1/cli/schema/10/o/30.json similarity index 100% rename from tests/res/cli/single_input/10/30.json rename to tests/res/1/cli/schema/10/o/30.json diff --git a/tests/res/1/cli/schema/10/s b/tests/res/1/cli/schema/10/s new file mode 120000 index 00000000..ef553143 --- /dev/null +++ b/tests/res/1/cli/schema/10/s @@ -0,0 +1 @@ +../../../api/single_load/schema/10/s \ No newline at end of file diff --git a/tests/res/cli/multi_inputs/20/o/10.json b/tests/res/1/cli/schema_errors/10/10.json similarity index 100% rename from tests/res/cli/multi_inputs/20/o/10.json rename to tests/res/1/cli/schema_errors/10/10.json diff --git a/tests/res/cli/schema_errors/10/e/10.json b/tests/res/1/cli/schema_errors/10/e/10.json similarity index 100% rename from tests/res/cli/schema_errors/10/e/10.json rename to tests/res/1/cli/schema_errors/10/e/10.json diff --git a/tests/res/1/cli/schema_errors/10/o/10.json b/tests/res/1/cli/schema_errors/10/o/10.json new file mode 100644 index 00000000..99a359a0 --- /dev/null +++ b/tests/res/1/cli/schema_errors/10/o/10.json @@ -0,0 +1 @@ +["--validate"] diff --git a/tests/res/cli/show/10/10.json b/tests/res/1/cli/show/10/10.json similarity index 100% rename from tests/res/cli/show/10/10.json rename to tests/res/1/cli/show/10/10.json diff --git a/tests/res/cli/show/10/20.json b/tests/res/1/cli/show/10/20.json similarity index 100% rename from tests/res/cli/show/10/20.json rename to tests/res/1/cli/show/10/20.json diff --git a/tests/res/cli/show/10/30.json b/tests/res/1/cli/show/10/30.json similarity index 100% rename from tests/res/cli/show/10/30.json rename to tests/res/1/cli/show/10/30.json diff --git a/tests/res/cli/show/10/40.json b/tests/res/1/cli/show/10/40.json similarity index 100% rename from tests/res/cli/show/10/40.json rename to tests/res/1/cli/show/10/40.json diff --git a/tests/res/cli/show/10/e/10.json b/tests/res/1/cli/show/10/e/10.json similarity index 100% rename from tests/res/cli/show/10/e/10.json rename to tests/res/1/cli/show/10/e/10.json diff --git a/tests/res/cli/show/10/e/20.json b/tests/res/1/cli/show/10/e/20.json similarity index 100% rename from tests/res/cli/show/10/e/20.json rename to tests/res/1/cli/show/10/e/20.json diff --git a/tests/res/cli/show/10/e/30.json b/tests/res/1/cli/show/10/e/30.json similarity index 100% rename from tests/res/cli/show/10/e/30.json rename to tests/res/1/cli/show/10/e/30.json diff --git a/tests/res/cli/show/10/e/40.json b/tests/res/1/cli/show/10/e/40.json similarity index 100% rename from tests/res/cli/show/10/e/40.json rename to tests/res/1/cli/show/10/e/40.json diff --git a/tests/res/cli/show_version/10/o/10.json b/tests/res/1/cli/show/10/o/10.json similarity index 100% rename from tests/res/cli/show_version/10/o/10.json rename to tests/res/1/cli/show/10/o/10.json diff --git a/tests/res/single_load/basics/20/e/20.json b/tests/res/1/cli/show/10/o/20.json similarity index 100% rename from tests/res/single_load/basics/20/e/20.json rename to tests/res/1/cli/show/10/o/20.json diff --git a/tests/res/cli/show/10/o/30.json b/tests/res/1/cli/show/10/o/30.json similarity index 100% rename from tests/res/cli/show/10/o/30.json rename to tests/res/1/cli/show/10/o/30.json diff --git a/tests/res/cli/show/10/o/40.json b/tests/res/1/cli/show/10/o/40.json similarity index 100% rename from tests/res/cli/show/10/o/40.json rename to tests/res/1/cli/show/10/o/40.json diff --git a/tests/res/cli/show/20/10.json b/tests/res/1/cli/show/20/10.json similarity index 100% rename from tests/res/cli/show/20/10.json rename to tests/res/1/cli/show/20/10.json diff --git a/tests/res/cli/show/20/20.json b/tests/res/1/cli/show/20/20.json similarity index 100% rename from tests/res/cli/show/20/20.json rename to tests/res/1/cli/show/20/20.json diff --git a/tests/res/cli/show/20/e/10.json b/tests/res/1/cli/show/20/e/10.json similarity index 100% rename from tests/res/cli/show/20/e/10.json rename to tests/res/1/cli/show/20/e/10.json diff --git a/tests/res/cli/show/20/e/20.json b/tests/res/1/cli/show/20/e/20.json similarity index 100% rename from tests/res/cli/show/20/e/20.json rename to tests/res/1/cli/show/20/e/20.json diff --git a/tests/res/cli/single_input/10/r/10.json b/tests/res/1/cli/show/20/o/10.json similarity index 100% rename from tests/res/cli/single_input/10/r/10.json rename to tests/res/1/cli/show/20/o/10.json diff --git a/tests/res/single_load/basics/30/e/20.json b/tests/res/1/cli/show/20/o/20.json similarity index 100% rename from tests/res/single_load/basics/30/e/20.json rename to tests/res/1/cli/show/20/o/20.json diff --git a/tests/res/cli/show_version/10/10.json b/tests/res/1/cli/show_version/10/10.json similarity index 100% rename from tests/res/cli/show_version/10/10.json rename to tests/res/1/cli/show_version/10/10.json diff --git a/tests/res/cli/query/10/e/10.json b/tests/res/1/cli/show_version/10/e/10.json similarity index 100% rename from tests/res/cli/query/10/e/10.json rename to tests/res/1/cli/show_version/10/e/10.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/r/10.json b/tests/res/1/cli/show_version/10/o/10.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/r/10.json rename to tests/res/1/cli/show_version/10/o/10.json diff --git a/tests/res/cli/single_input/10/10.json b/tests/res/1/cli/single_input/10/10.json similarity index 100% rename from tests/res/cli/single_input/10/10.json rename to tests/res/1/cli/single_input/10/10.json diff --git a/tests/res/cli/single_input/10/20.conf b/tests/res/1/cli/single_input/10/20.conf similarity index 100% rename from tests/res/cli/single_input/10/20.conf rename to tests/res/1/cli/single_input/10/20.conf diff --git a/tests/res/cli/single_input/10/e/30.json b/tests/res/1/cli/single_input/10/30.json similarity index 100% rename from tests/res/cli/single_input/10/e/30.json rename to tests/res/1/cli/single_input/10/30.json diff --git a/tests/res/cli/show_version/10/e/10.json b/tests/res/1/cli/single_input/10/e/10.json similarity index 100% rename from tests/res/cli/show_version/10/e/10.json rename to tests/res/1/cli/single_input/10/e/10.json diff --git a/tests/res/cli/single_input/10/on/20.json b/tests/res/1/cli/single_input/10/e/20.json similarity index 100% rename from tests/res/cli/single_input/10/on/20.json rename to tests/res/1/cli/single_input/10/e/20.json diff --git a/tests/res/cli/single_input/10/r/30.json b/tests/res/1/cli/single_input/10/e/30.json similarity index 100% rename from tests/res/cli/single_input/10/r/30.json rename to tests/res/1/cli/single_input/10/e/30.json diff --git a/tests/res/cli/schema_errors/10/10.json b/tests/res/1/cli/single_input/10/o/10.json similarity index 100% rename from tests/res/cli/schema_errors/10/10.json rename to tests/res/1/cli/single_input/10/o/10.json diff --git a/tests/res/cli/single_input/10/o/20.json b/tests/res/1/cli/single_input/10/o/20.json similarity index 100% rename from tests/res/cli/single_input/10/o/20.json rename to tests/res/1/cli/single_input/10/o/20.json diff --git a/tests/res/cli/single_input/10/o/30.json b/tests/res/1/cli/single_input/10/o/30.json similarity index 100% rename from tests/res/cli/single_input/10/o/30.json rename to tests/res/1/cli/single_input/10/o/30.json diff --git a/tests/res/cli/single_input/10/on/10.json b/tests/res/1/cli/single_input/10/on/10.json similarity index 100% rename from tests/res/cli/single_input/10/on/10.json rename to tests/res/1/cli/single_input/10/on/10.json diff --git a/tests/res/cli/single_input/10/oo/20.json b/tests/res/1/cli/single_input/10/on/20.json similarity index 100% rename from tests/res/cli/single_input/10/oo/20.json rename to tests/res/1/cli/single_input/10/on/20.json diff --git a/tests/res/cli/single_input/10/on/30.json b/tests/res/1/cli/single_input/10/on/30.json similarity index 100% rename from tests/res/cli/single_input/10/on/30.json rename to tests/res/1/cli/single_input/10/on/30.json diff --git a/tests/res/cli/single_input/10/e/10.json b/tests/res/1/cli/single_input/10/oo/10.json similarity index 100% rename from tests/res/cli/single_input/10/e/10.json rename to tests/res/1/cli/single_input/10/oo/10.json diff --git a/tests/res/cli/single_input/10/r/20.json b/tests/res/1/cli/single_input/10/oo/20.json similarity index 100% rename from tests/res/cli/single_input/10/r/20.json rename to tests/res/1/cli/single_input/10/oo/20.json diff --git a/tests/res/cli/single_input/10/oo/30.json b/tests/res/1/cli/single_input/10/oo/30.json similarity index 100% rename from tests/res/cli/single_input/10/oo/30.json rename to tests/res/1/cli/single_input/10/oo/30.json diff --git a/tests/res/dicts/get/10/e/10.json b/tests/res/1/cli/single_input/10/r/10.json similarity index 100% rename from tests/res/dicts/get/10/e/10.json rename to tests/res/1/cli/single_input/10/r/10.json diff --git a/tests/res/cli/single_input/20/20.json b/tests/res/1/cli/single_input/10/r/20.json similarity index 100% rename from tests/res/cli/single_input/20/20.json rename to tests/res/1/cli/single_input/10/r/20.json diff --git a/tests/res/cli/single_input/20/30.json b/tests/res/1/cli/single_input/10/r/30.json similarity index 100% rename from tests/res/cli/single_input/20/30.json rename to tests/res/1/cli/single_input/10/r/30.json diff --git a/tests/res/cli/single_input/40/10.json b/tests/res/1/cli/single_input/20/10.json similarity index 100% rename from tests/res/cli/single_input/40/10.json rename to tests/res/1/cli/single_input/20/10.json diff --git a/tests/res/cli/single_input/20/e/20.json b/tests/res/1/cli/single_input/20/20.json similarity index 100% rename from tests/res/cli/single_input/20/e/20.json rename to tests/res/1/cli/single_input/20/20.json diff --git a/tests/res/cli/single_input/20/on/30.json b/tests/res/1/cli/single_input/20/30.json similarity index 100% rename from tests/res/cli/single_input/20/on/30.json rename to tests/res/1/cli/single_input/20/30.json diff --git a/tests/res/cli/single_input/10/oo/10.json b/tests/res/1/cli/single_input/20/e/10.json similarity index 100% rename from tests/res/cli/single_input/10/oo/10.json rename to tests/res/1/cli/single_input/20/e/10.json diff --git a/tests/res/cli/single_input/20/on/20.json b/tests/res/1/cli/single_input/20/e/20.json similarity index 100% rename from tests/res/cli/single_input/20/on/20.json rename to tests/res/1/cli/single_input/20/e/20.json diff --git a/tests/res/cli/single_input/20/e/30.json b/tests/res/1/cli/single_input/20/e/30.json similarity index 100% rename from tests/res/cli/single_input/20/e/30.json rename to tests/res/1/cli/single_input/20/e/30.json diff --git a/tests/res/cli/single_input/20/o/10.json b/tests/res/1/cli/single_input/20/o/10.json similarity index 100% rename from tests/res/cli/single_input/20/o/10.json rename to tests/res/1/cli/single_input/20/o/10.json diff --git a/tests/res/cli/single_input/20/o/20.json b/tests/res/1/cli/single_input/20/o/20.json similarity index 100% rename from tests/res/cli/single_input/20/o/20.json rename to tests/res/1/cli/single_input/20/o/20.json diff --git a/tests/res/cli/single_input/20/o/30.json b/tests/res/1/cli/single_input/20/o/30.json similarity index 100% rename from tests/res/cli/single_input/20/o/30.json rename to tests/res/1/cli/single_input/20/o/30.json diff --git a/tests/res/cli/single_input/20/on/10.json b/tests/res/1/cli/single_input/20/on/10.json similarity index 100% rename from tests/res/cli/single_input/20/on/10.json rename to tests/res/1/cli/single_input/20/on/10.json diff --git a/tests/res/cli/single_input/30/20.json b/tests/res/1/cli/single_input/20/on/20.json similarity index 100% rename from tests/res/cli/single_input/30/20.json rename to tests/res/1/cli/single_input/20/on/20.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/e/30.json b/tests/res/1/cli/single_input/20/on/30.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/e/30.json rename to tests/res/1/cli/single_input/20/on/30.json diff --git a/tests/res/single_load/primitives/10/10.json b/tests/res/1/cli/single_input/20/r/10.json similarity index 100% rename from tests/res/single_load/primitives/10/10.json rename to tests/res/1/cli/single_input/20/r/10.json diff --git a/tests/res/json/primitives/e/40.txt b/tests/res/1/cli/single_input/20/r/20.json similarity index 100% rename from tests/res/json/primitives/e/40.txt rename to tests/res/1/cli/single_input/20/r/20.json diff --git a/tests/res/dicts/merge/20/10.json b/tests/res/1/cli/single_input/30/10.json similarity index 100% rename from tests/res/dicts/merge/20/10.json rename to tests/res/1/cli/single_input/30/10.json diff --git a/tests/res/cli/single_input/30/e/20.json b/tests/res/1/cli/single_input/30/20.json similarity index 100% rename from tests/res/cli/single_input/30/e/20.json rename to tests/res/1/cli/single_input/30/20.json diff --git a/tests/res/cli/single_input/20/e/10.json b/tests/res/1/cli/single_input/30/e/10.json similarity index 100% rename from tests/res/cli/single_input/20/e/10.json rename to tests/res/1/cli/single_input/30/e/10.json diff --git a/tests/res/cli/single_input/30/on/20.json b/tests/res/1/cli/single_input/30/e/20.json similarity index 100% rename from tests/res/cli/single_input/30/on/20.json rename to tests/res/1/cli/single_input/30/e/20.json diff --git a/tests/res/cli/single_input/30/o/10.json b/tests/res/1/cli/single_input/30/o/10.json similarity index 100% rename from tests/res/cli/single_input/30/o/10.json rename to tests/res/1/cli/single_input/30/o/10.json diff --git a/tests/res/cli/single_input/30/o/20.json b/tests/res/1/cli/single_input/30/o/20.json similarity index 100% rename from tests/res/cli/single_input/30/o/20.json rename to tests/res/1/cli/single_input/30/o/20.json diff --git a/tests/res/cli/single_input/30/on/10.json b/tests/res/1/cli/single_input/30/on/10.json similarity index 100% rename from tests/res/cli/single_input/30/on/10.json rename to tests/res/1/cli/single_input/30/on/10.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/20.json b/tests/res/1/cli/single_input/30/on/20.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/20.json rename to tests/res/1/cli/single_input/30/on/20.json diff --git a/tests/res/cli/single_input/30/r/10.json b/tests/res/1/cli/single_input/30/r/10.json similarity index 100% rename from tests/res/cli/single_input/30/r/10.json rename to tests/res/1/cli/single_input/30/r/10.json diff --git a/tests/res/cli/single_input/30/r/20.json b/tests/res/1/cli/single_input/30/r/20.json similarity index 100% rename from tests/res/cli/single_input/30/r/20.json rename to tests/res/1/cli/single_input/30/r/20.json diff --git a/tests/res/dicts/merge/30/10.json b/tests/res/1/cli/single_input/40/10.json similarity index 100% rename from tests/res/dicts/merge/30/10.json rename to tests/res/1/cli/single_input/40/10.json diff --git a/tests/res/cli/single_input/30/e/10.json b/tests/res/1/cli/single_input/40/e/10.json similarity index 100% rename from tests/res/cli/single_input/30/e/10.json rename to tests/res/1/cli/single_input/40/e/10.json diff --git a/tests/res/cli/single_input/40/o/10.json b/tests/res/1/cli/single_input/40/o/10.json similarity index 100% rename from tests/res/cli/single_input/40/o/10.json rename to tests/res/1/cli/single_input/40/o/10.json diff --git a/tests/res/cli/single_input/40/on/10.json b/tests/res/1/cli/single_input/40/on/10.json similarity index 100% rename from tests/res/cli/single_input/40/on/10.json rename to tests/res/1/cli/single_input/40/on/10.json diff --git a/tests/res/cli/single_input/40/r/10.json b/tests/res/1/cli/single_input/40/r/10.json similarity index 100% rename from tests/res/cli/single_input/40/r/10.json rename to tests/res/1/cli/single_input/40/r/10.json diff --git a/tests/res/cli/schema/10/10.json b/tests/res/1/cli/single_input/50/10.json similarity index 100% rename from tests/res/cli/schema/10/10.json rename to tests/res/1/cli/single_input/50/10.json diff --git a/tests/res/cli/single_input/40/e/10.json b/tests/res/1/cli/single_input/50/e/10.json similarity index 100% rename from tests/res/cli/single_input/40/e/10.json rename to tests/res/1/cli/single_input/50/e/10.json diff --git a/tests/res/cli/single_input/50/o/10.json b/tests/res/1/cli/single_input/50/o/10.json similarity index 100% rename from tests/res/cli/single_input/50/o/10.json rename to tests/res/1/cli/single_input/50/o/10.json diff --git a/tests/res/cli/single_input/50/on/10.json b/tests/res/1/cli/single_input/50/on/10.json similarity index 100% rename from tests/res/cli/single_input/50/on/10.json rename to tests/res/1/cli/single_input/50/on/10.json diff --git a/tests/res/cli/schema/10/s/10.json b/tests/res/1/cli/single_input/50/r/10.json similarity index 100% rename from tests/res/cli/schema/10/s/10.json rename to tests/res/1/cli/single_input/50/r/10.json diff --git a/tests/res/cli/single_input/50/10.json b/tests/res/1/cli/single_input_to_yaml_output/10/10.json similarity index 100% rename from tests/res/cli/single_input/50/10.json rename to tests/res/1/cli/single_input_to_yaml_output/10/10.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/e/20.json b/tests/res/1/cli/single_input_to_yaml_output/10/20.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/e/20.json rename to tests/res/1/cli/single_input_to_yaml_output/10/20.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/30.yml b/tests/res/1/cli/single_input_to_yaml_output/10/30.yml similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/30.yml rename to tests/res/1/cli/single_input_to_yaml_output/10/30.yml diff --git a/tests/res/cli/single_input_to_yaml_output/10/40.yml b/tests/res/1/cli/single_input_to_yaml_output/10/40.yml similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/40.yml rename to tests/res/1/cli/single_input_to_yaml_output/10/40.yml diff --git a/tests/res/cli/single_input/50/e/10.json b/tests/res/1/cli/single_input_to_yaml_output/10/e/10.json similarity index 100% rename from tests/res/cli/single_input/50/e/10.json rename to tests/res/1/cli/single_input_to_yaml_output/10/e/10.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/on/20.json b/tests/res/1/cli/single_input_to_yaml_output/10/e/20.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/on/20.json rename to tests/res/1/cli/single_input_to_yaml_output/10/e/20.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/o/30.json b/tests/res/1/cli/single_input_to_yaml_output/10/e/30.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/o/30.json rename to tests/res/1/cli/single_input_to_yaml_output/10/e/30.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/e/40.json b/tests/res/1/cli/single_input_to_yaml_output/10/e/40.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/e/40.json rename to tests/res/1/cli/single_input_to_yaml_output/10/e/40.json diff --git a/tests/res/cli/single_input/10/o/10.json b/tests/res/1/cli/single_input_to_yaml_output/10/o/10.json similarity index 100% rename from tests/res/cli/single_input/10/o/10.json rename to tests/res/1/cli/single_input_to_yaml_output/10/o/10.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/o/20.json b/tests/res/1/cli/single_input_to_yaml_output/10/o/20.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/o/20.json rename to tests/res/1/cli/single_input_to_yaml_output/10/o/20.json diff --git a/tests/res/dicts/get/30/30.json b/tests/res/1/cli/single_input_to_yaml_output/10/o/30.json similarity index 100% rename from tests/res/dicts/get/30/30.json rename to tests/res/1/cli/single_input_to_yaml_output/10/o/30.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/o/40.json b/tests/res/1/cli/single_input_to_yaml_output/10/o/40.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/o/40.json rename to tests/res/1/cli/single_input_to_yaml_output/10/o/40.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/on/10.json b/tests/res/1/cli/single_input_to_yaml_output/10/on/10.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/on/10.json rename to tests/res/1/cli/single_input_to_yaml_output/10/on/10.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/oo/20.json b/tests/res/1/cli/single_input_to_yaml_output/10/on/20.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/oo/20.json rename to tests/res/1/cli/single_input_to_yaml_output/10/on/20.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/on/30.json b/tests/res/1/cli/single_input_to_yaml_output/10/on/30.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/on/30.json rename to tests/res/1/cli/single_input_to_yaml_output/10/on/30.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/on/40.json b/tests/res/1/cli/single_input_to_yaml_output/10/on/40.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/on/40.json rename to tests/res/1/cli/single_input_to_yaml_output/10/on/40.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/oo/10.json b/tests/res/1/cli/single_input_to_yaml_output/10/oo/10.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/oo/10.json rename to tests/res/1/cli/single_input_to_yaml_output/10/oo/10.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/r/20.json b/tests/res/1/cli/single_input_to_yaml_output/10/oo/20.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/r/20.json rename to tests/res/1/cli/single_input_to_yaml_output/10/oo/20.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/oo/30.json b/tests/res/1/cli/single_input_to_yaml_output/10/oo/30.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/oo/30.json rename to tests/res/1/cli/single_input_to_yaml_output/10/oo/30.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/oo/40.json b/tests/res/1/cli/single_input_to_yaml_output/10/oo/40.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/oo/40.json rename to tests/res/1/cli/single_input_to_yaml_output/10/oo/40.json diff --git a/tests/res/single_load/basics/20/e/10.json b/tests/res/1/cli/single_input_to_yaml_output/10/r/10.json similarity index 100% rename from tests/res/single_load/basics/20/e/10.json rename to tests/res/1/cli/single_input_to_yaml_output/10/r/10.json diff --git a/tests/res/cli/template/10/e/20.json b/tests/res/1/cli/single_input_to_yaml_output/10/r/20.json similarity index 100% rename from tests/res/cli/template/10/e/20.json rename to tests/res/1/cli/single_input_to_yaml_output/10/r/20.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/r/30.json b/tests/res/1/cli/single_input_to_yaml_output/10/r/30.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/r/30.json rename to tests/res/1/cli/single_input_to_yaml_output/10/r/30.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/r/40.json b/tests/res/1/cli/single_input_to_yaml_output/10/r/40.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/r/40.json rename to tests/res/1/cli/single_input_to_yaml_output/10/r/40.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/10.json b/tests/res/1/cli/template/10/10.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/10.json rename to tests/res/1/cli/template/10/10.json diff --git a/tests/res/cli/template/10/20.json b/tests/res/1/cli/template/10/20.json similarity index 100% rename from tests/res/cli/template/10/20.json rename to tests/res/1/cli/template/10/20.json diff --git a/tests/res/cli/single_input_to_yaml_output/10/e/10.json b/tests/res/1/cli/template/10/e/10.json similarity index 100% rename from tests/res/cli/single_input_to_yaml_output/10/e/10.json rename to tests/res/1/cli/template/10/e/10.json diff --git a/tests/res/cli/template/10/o/20.json b/tests/res/1/cli/template/10/e/20.json similarity index 100% rename from tests/res/cli/template/10/o/20.json rename to tests/res/1/cli/template/10/e/20.json diff --git a/tests/res/cli/template/10/o/10.json b/tests/res/1/cli/template/10/o/10.json similarity index 100% rename from tests/res/cli/template/10/o/10.json rename to tests/res/1/cli/template/10/o/10.json diff --git a/tests/res/cli/template/10/on/20.json b/tests/res/1/cli/template/10/o/20.json similarity index 100% rename from tests/res/cli/template/10/on/20.json rename to tests/res/1/cli/template/10/o/20.json diff --git a/tests/res/cli/template/10/on/10.json b/tests/res/1/cli/template/10/on/10.json similarity index 100% rename from tests/res/cli/template/10/on/10.json rename to tests/res/1/cli/template/10/on/10.json diff --git a/tests/res/dicts/get/10/20.json b/tests/res/1/cli/template/10/on/20.json similarity index 100% rename from tests/res/dicts/get/10/20.json rename to tests/res/1/cli/template/10/on/20.json diff --git a/tests/res/cli/template/10/r/10.json b/tests/res/1/cli/template/10/r/10.json similarity index 100% rename from tests/res/cli/template/10/r/10.json rename to tests/res/1/cli/template/10/r/10.json diff --git a/tests/res/cli/template/10/r/20.json b/tests/res/1/cli/template/10/r/20.json similarity index 100% rename from tests/res/cli/template/10/r/20.json rename to tests/res/1/cli/template/10/r/20.json diff --git a/tests/res/1/common/tdc/10/100_null.json b/tests/res/1/common/tdc/10/100_null.json new file mode 120000 index 00000000..acf38797 --- /dev/null +++ b/tests/res/1/common/tdc/10/100_null.json @@ -0,0 +1 @@ +../../../loaders/json.stdlib/10/100_null.json \ No newline at end of file diff --git a/tests/res/1/common/tdc/10/e/100_null.json.py b/tests/res/1/common/tdc/10/e/100_null.json.py new file mode 120000 index 00000000..953fe6df --- /dev/null +++ b/tests/res/1/common/tdc/10/e/100_null.json.py @@ -0,0 +1 @@ +../../../../loaders/json.stdlib/10/e/100_null.json.py \ No newline at end of file diff --git a/tests/res/1/common/tdc/20/220_a_list.json b/tests/res/1/common/tdc/20/220_a_list.json new file mode 120000 index 00000000..5c977d1a --- /dev/null +++ b/tests/res/1/common/tdc/20/220_a_list.json @@ -0,0 +1 @@ +../../../loaders/json.stdlib/20/220_a_list.json \ No newline at end of file diff --git a/tests/res/1/common/tdc/20/e/220_a_list.json.py b/tests/res/1/common/tdc/20/e/220_a_list.json.py new file mode 120000 index 00000000..f1ec22b1 --- /dev/null +++ b/tests/res/1/common/tdc/20/e/220_a_list.json.py @@ -0,0 +1 @@ +../../../../loaders/json.stdlib/20/e/220_a_list.json.py \ No newline at end of file diff --git a/tests/res/1/common/tdc/20/o/220_a_list.json.json b/tests/res/1/common/tdc/20/o/220_a_list.json.json new file mode 120000 index 00000000..3a329717 --- /dev/null +++ b/tests/res/1/common/tdc/20/o/220_a_list.json.json @@ -0,0 +1 @@ +../../../../loaders/json.stdlib/20/o/220_a_list.json.json \ No newline at end of file diff --git a/tests/res/dicts/get/10/10.json b/tests/res/1/dicts/get/10/10.json similarity index 100% rename from tests/res/dicts/get/10/10.json rename to tests/res/1/dicts/get/10/10.json diff --git a/tests/res/dicts/get/30/20.json b/tests/res/1/dicts/get/10/20.json similarity index 100% rename from tests/res/dicts/get/30/20.json rename to tests/res/1/dicts/get/10/20.json diff --git a/tests/res/single_load/basics/30/e/10.json b/tests/res/1/dicts/get/10/e/10.json similarity index 100% rename from tests/res/single_load/basics/30/e/10.json rename to tests/res/1/dicts/get/10/e/10.json diff --git a/tests/res/single_load/query/10/e/00_00.json b/tests/res/1/dicts/get/10/e/20.json similarity index 100% rename from tests/res/single_load/query/10/e/00_00.json rename to tests/res/1/dicts/get/10/e/20.json diff --git a/tests/res/dicts/get/10/q/10.py b/tests/res/1/dicts/get/10/q/10.py similarity index 100% rename from tests/res/dicts/get/10/q/10.py rename to tests/res/1/dicts/get/10/q/10.py diff --git a/tests/res/dicts/get/10/q/20.py b/tests/res/1/dicts/get/10/q/20.py similarity index 100% rename from tests/res/dicts/get/10/q/20.py rename to tests/res/1/dicts/get/10/q/20.py diff --git a/tests/res/dicts/get/10/s/10.py b/tests/res/1/dicts/get/10/s/10.py similarity index 100% rename from tests/res/dicts/get/10/s/10.py rename to tests/res/1/dicts/get/10/s/10.py diff --git a/tests/res/dicts/get/10/s/20.py b/tests/res/1/dicts/get/10/s/20.py similarity index 100% rename from tests/res/dicts/get/10/s/20.py rename to tests/res/1/dicts/get/10/s/20.py diff --git a/tests/res/dicts/get/20/00.json b/tests/res/1/dicts/get/20/00.json similarity index 100% rename from tests/res/dicts/get/20/00.json rename to tests/res/1/dicts/get/20/00.json diff --git a/tests/res/single_load/template/10/o/10.json b/tests/res/1/dicts/get/20/10.json similarity index 100% rename from tests/res/single_load/template/10/o/10.json rename to tests/res/1/dicts/get/20/10.json diff --git a/tests/res/dicts/get/20/100.json b/tests/res/1/dicts/get/20/100.json similarity index 100% rename from tests/res/dicts/get/20/100.json rename to tests/res/1/dicts/get/20/100.json diff --git a/tests/res/dicts/get/20/110.json b/tests/res/1/dicts/get/20/110.json similarity index 100% rename from tests/res/dicts/get/20/110.json rename to tests/res/1/dicts/get/20/110.json diff --git a/tests/res/single_load/template/10/o/20.json b/tests/res/1/dicts/get/20/20.json similarity index 100% rename from tests/res/single_load/template/10/o/20.json rename to tests/res/1/dicts/get/20/20.json diff --git a/tests/res/single_load/template/10/o/30.json b/tests/res/1/dicts/get/20/30.json similarity index 100% rename from tests/res/single_load/template/10/o/30.json rename to tests/res/1/dicts/get/20/30.json diff --git a/tests/res/dicts/get/20/40.json b/tests/res/1/dicts/get/20/40.json similarity index 100% rename from tests/res/dicts/get/20/40.json rename to tests/res/1/dicts/get/20/40.json diff --git a/tests/res/dicts/get/20/50.json b/tests/res/1/dicts/get/20/50.json similarity index 100% rename from tests/res/dicts/get/20/50.json rename to tests/res/1/dicts/get/20/50.json diff --git a/tests/res/dicts/get/20/60.json b/tests/res/1/dicts/get/20/60.json similarity index 100% rename from tests/res/dicts/get/20/60.json rename to tests/res/1/dicts/get/20/60.json diff --git a/tests/res/dicts/get/20/70.json b/tests/res/1/dicts/get/20/70.json similarity index 100% rename from tests/res/dicts/get/20/70.json rename to tests/res/1/dicts/get/20/70.json diff --git a/tests/res/dicts/get/20/80.json b/tests/res/1/dicts/get/20/80.json similarity index 100% rename from tests/res/dicts/get/20/80.json rename to tests/res/1/dicts/get/20/80.json diff --git a/tests/res/dicts/get/20/90.json b/tests/res/1/dicts/get/20/90.json similarity index 100% rename from tests/res/dicts/get/20/90.json rename to tests/res/1/dicts/get/20/90.json diff --git a/tests/res/single_load/basics/30/e/00.json b/tests/res/1/dicts/get/20/e/00.json similarity index 100% rename from tests/res/single_load/basics/30/e/00.json rename to tests/res/1/dicts/get/20/e/00.json diff --git a/tests/res/dicts/get/20/e/10.json b/tests/res/1/dicts/get/20/e/10.json similarity index 100% rename from tests/res/dicts/get/20/e/10.json rename to tests/res/1/dicts/get/20/e/10.json diff --git a/tests/res/dicts/get/20/e/100.py b/tests/res/1/dicts/get/20/e/100.py similarity index 100% rename from tests/res/dicts/get/20/e/100.py rename to tests/res/1/dicts/get/20/e/100.py diff --git a/tests/res/dicts/get/20/e/110.py b/tests/res/1/dicts/get/20/e/110.py similarity index 100% rename from tests/res/dicts/get/20/e/110.py rename to tests/res/1/dicts/get/20/e/110.py diff --git a/tests/res/dicts/get/20/e/20.py b/tests/res/1/dicts/get/20/e/20.py similarity index 100% rename from tests/res/dicts/get/20/e/20.py rename to tests/res/1/dicts/get/20/e/20.py diff --git a/tests/res/dicts/get/20/e/30.py b/tests/res/1/dicts/get/20/e/30.py similarity index 100% rename from tests/res/dicts/get/20/e/30.py rename to tests/res/1/dicts/get/20/e/30.py diff --git a/tests/res/json/primitives/e/10.txt b/tests/res/1/dicts/get/20/e/40.py similarity index 100% rename from tests/res/json/primitives/e/10.txt rename to tests/res/1/dicts/get/20/e/40.py diff --git a/tests/res/dicts/get/20/e/50.py b/tests/res/1/dicts/get/20/e/50.py similarity index 100% rename from tests/res/dicts/get/20/e/50.py rename to tests/res/1/dicts/get/20/e/50.py diff --git a/tests/res/dicts/get/20/e/60.py b/tests/res/1/dicts/get/20/e/60.py similarity index 100% rename from tests/res/dicts/get/20/e/60.py rename to tests/res/1/dicts/get/20/e/60.py diff --git a/tests/res/dicts/get/20/e/70.py b/tests/res/1/dicts/get/20/e/70.py similarity index 100% rename from tests/res/dicts/get/20/e/70.py rename to tests/res/1/dicts/get/20/e/70.py diff --git a/tests/res/dicts/get/20/e/80.py b/tests/res/1/dicts/get/20/e/80.py similarity index 100% rename from tests/res/dicts/get/20/e/80.py rename to tests/res/1/dicts/get/20/e/80.py diff --git a/tests/res/dicts/get/20/e/90.py b/tests/res/1/dicts/get/20/e/90.py similarity index 100% rename from tests/res/dicts/get/20/e/90.py rename to tests/res/1/dicts/get/20/e/90.py diff --git a/tests/res/dicts/get/20/q/00.py b/tests/res/1/dicts/get/20/q/00.py similarity index 100% rename from tests/res/dicts/get/20/q/00.py rename to tests/res/1/dicts/get/20/q/00.py diff --git a/tests/res/dicts/get/20/q/10.py b/tests/res/1/dicts/get/20/q/10.py similarity index 100% rename from tests/res/dicts/get/20/q/10.py rename to tests/res/1/dicts/get/20/q/10.py diff --git a/tests/res/dicts/get/20/q/100.py b/tests/res/1/dicts/get/20/q/100.py similarity index 100% rename from tests/res/dicts/get/20/q/100.py rename to tests/res/1/dicts/get/20/q/100.py diff --git a/tests/res/dicts/get/20/q/110.py b/tests/res/1/dicts/get/20/q/110.py similarity index 100% rename from tests/res/dicts/get/20/q/110.py rename to tests/res/1/dicts/get/20/q/110.py diff --git a/tests/res/dicts/get/20/q/20.py b/tests/res/1/dicts/get/20/q/20.py similarity index 100% rename from tests/res/dicts/get/20/q/20.py rename to tests/res/1/dicts/get/20/q/20.py diff --git a/tests/res/dicts/get/20/q/30.py b/tests/res/1/dicts/get/20/q/30.py similarity index 100% rename from tests/res/dicts/get/20/q/30.py rename to tests/res/1/dicts/get/20/q/30.py diff --git a/tests/res/dicts/get/20/q/40.py b/tests/res/1/dicts/get/20/q/40.py similarity index 100% rename from tests/res/dicts/get/20/q/40.py rename to tests/res/1/dicts/get/20/q/40.py diff --git a/tests/res/dicts/get/20/q/50.py b/tests/res/1/dicts/get/20/q/50.py similarity index 100% rename from tests/res/dicts/get/20/q/50.py rename to tests/res/1/dicts/get/20/q/50.py diff --git a/tests/res/dicts/get/20/q/60.py b/tests/res/1/dicts/get/20/q/60.py similarity index 100% rename from tests/res/dicts/get/20/q/60.py rename to tests/res/1/dicts/get/20/q/60.py diff --git a/tests/res/dicts/get/20/q/70.py b/tests/res/1/dicts/get/20/q/70.py similarity index 100% rename from tests/res/dicts/get/20/q/70.py rename to tests/res/1/dicts/get/20/q/70.py diff --git a/tests/res/1/dicts/get/20/q/80.py b/tests/res/1/dicts/get/20/q/80.py new file mode 100644 index 00000000..7f25ba4a --- /dev/null +++ b/tests/res/1/dicts/get/20/q/80.py @@ -0,0 +1 @@ +r'/i\j' diff --git a/tests/res/dicts/get/20/q/90.py b/tests/res/1/dicts/get/20/q/90.py similarity index 100% rename from tests/res/dicts/get/20/q/90.py rename to tests/res/1/dicts/get/20/q/90.py diff --git a/tests/res/dicts/get/20/s/00.py b/tests/res/1/dicts/get/20/s/00.py similarity index 100% rename from tests/res/dicts/get/20/s/00.py rename to tests/res/1/dicts/get/20/s/00.py diff --git a/tests/res/dicts/get/20/s/10.py b/tests/res/1/dicts/get/20/s/10.py similarity index 100% rename from tests/res/dicts/get/20/s/10.py rename to tests/res/1/dicts/get/20/s/10.py diff --git a/tests/res/dicts/get/20/s/100.py b/tests/res/1/dicts/get/20/s/100.py similarity index 100% rename from tests/res/dicts/get/20/s/100.py rename to tests/res/1/dicts/get/20/s/100.py diff --git a/tests/res/dicts/get/20/s/110.py b/tests/res/1/dicts/get/20/s/110.py similarity index 100% rename from tests/res/dicts/get/20/s/110.py rename to tests/res/1/dicts/get/20/s/110.py diff --git a/tests/res/dicts/get/20/s/20.py b/tests/res/1/dicts/get/20/s/20.py similarity index 100% rename from tests/res/dicts/get/20/s/20.py rename to tests/res/1/dicts/get/20/s/20.py diff --git a/tests/res/dicts/get/20/s/30.py b/tests/res/1/dicts/get/20/s/30.py similarity index 100% rename from tests/res/dicts/get/20/s/30.py rename to tests/res/1/dicts/get/20/s/30.py diff --git a/tests/res/dicts/get/20/s/40.py b/tests/res/1/dicts/get/20/s/40.py similarity index 100% rename from tests/res/dicts/get/20/s/40.py rename to tests/res/1/dicts/get/20/s/40.py diff --git a/tests/res/dicts/get/20/s/50.py b/tests/res/1/dicts/get/20/s/50.py similarity index 100% rename from tests/res/dicts/get/20/s/50.py rename to tests/res/1/dicts/get/20/s/50.py diff --git a/tests/res/dicts/get/20/s/60.py b/tests/res/1/dicts/get/20/s/60.py similarity index 100% rename from tests/res/dicts/get/20/s/60.py rename to tests/res/1/dicts/get/20/s/60.py diff --git a/tests/res/dicts/get/20/s/70.py b/tests/res/1/dicts/get/20/s/70.py similarity index 100% rename from tests/res/dicts/get/20/s/70.py rename to tests/res/1/dicts/get/20/s/70.py diff --git a/tests/res/dicts/get/20/s/80.py b/tests/res/1/dicts/get/20/s/80.py similarity index 100% rename from tests/res/dicts/get/20/s/80.py rename to tests/res/1/dicts/get/20/s/80.py diff --git a/tests/res/dicts/get/20/s/90.py b/tests/res/1/dicts/get/20/s/90.py similarity index 100% rename from tests/res/dicts/get/20/s/90.py rename to tests/res/1/dicts/get/20/s/90.py diff --git a/tests/res/dicts/get/30/10.json b/tests/res/1/dicts/get/30/10.json similarity index 100% rename from tests/res/dicts/get/30/10.json rename to tests/res/1/dicts/get/30/10.json diff --git a/tests/res/dicts/mk_nested_dic/10/e/20.json b/tests/res/1/dicts/get/30/20.json similarity index 100% rename from tests/res/dicts/mk_nested_dic/10/e/20.json rename to tests/res/1/dicts/get/30/20.json diff --git a/tests/res/dicts/mk_nested_dic/10/o/20.json b/tests/res/1/dicts/get/30/30.json similarity index 100% rename from tests/res/dicts/mk_nested_dic/10/o/20.json rename to tests/res/1/dicts/get/30/30.json diff --git a/tests/res/dicts/get/30/e/10.py b/tests/res/1/dicts/get/30/e/10.py similarity index 100% rename from tests/res/dicts/get/30/e/10.py rename to tests/res/1/dicts/get/30/e/10.py diff --git a/tests/res/single_load/query/10/e/20_20.json b/tests/res/1/dicts/get/30/e/20.json similarity index 100% rename from tests/res/single_load/query/10/e/20_20.json rename to tests/res/1/dicts/get/30/e/20.json diff --git a/tests/res/1/dicts/get/30/e/30.json b/tests/res/1/dicts/get/30/e/30.json new file mode 100644 index 00000000..19765bd5 --- /dev/null +++ b/tests/res/1/dicts/get/30/e/30.json @@ -0,0 +1 @@ +null diff --git a/tests/res/dicts/get/30/q/10.txt b/tests/res/1/dicts/get/30/q/10.txt similarity index 100% rename from tests/res/dicts/get/30/q/10.txt rename to tests/res/1/dicts/get/30/q/10.txt diff --git a/tests/res/dicts/get/30/q/20.txt b/tests/res/1/dicts/get/30/q/20.txt similarity index 100% rename from tests/res/dicts/get/30/q/20.txt rename to tests/res/1/dicts/get/30/q/20.txt diff --git a/tests/res/dicts/get/30/q/30.txt b/tests/res/1/dicts/get/30/q/30.txt similarity index 100% rename from tests/res/dicts/get/30/q/30.txt rename to tests/res/1/dicts/get/30/q/30.txt diff --git a/tests/res/parser/list/10/00.txt b/tests/res/1/dicts/get/30/s/10.txt similarity index 100% rename from tests/res/parser/list/10/00.txt rename to tests/res/1/dicts/get/30/s/10.txt diff --git a/tests/res/dicts/get/30/s/20.txt b/tests/res/1/dicts/get/30/s/20.txt similarity index 100% rename from tests/res/dicts/get/30/s/20.txt rename to tests/res/1/dicts/get/30/s/20.txt diff --git a/tests/res/dicts/get/30/s/30.txt b/tests/res/1/dicts/get/30/s/30.txt similarity index 100% rename from tests/res/dicts/get/30/s/30.txt rename to tests/res/1/dicts/get/30/s/30.txt diff --git a/tests/res/dicts/merge/10/10.json b/tests/res/1/dicts/merge/10/10.json similarity index 100% rename from tests/res/dicts/merge/10/10.json rename to tests/res/1/dicts/merge/10/10.json diff --git a/tests/res/dicts/merge/10/e/10.json b/tests/res/1/dicts/merge/10/e/10.json similarity index 100% rename from tests/res/dicts/merge/10/e/10.json rename to tests/res/1/dicts/merge/10/e/10.json diff --git a/tests/res/dicts/merge/10/o/10.json b/tests/res/1/dicts/merge/10/o/10.json similarity index 100% rename from tests/res/dicts/merge/10/o/10.json rename to tests/res/1/dicts/merge/10/o/10.json diff --git a/tests/res/dicts/merge/10/s/10.json b/tests/res/1/dicts/merge/10/s/10.json similarity index 100% rename from tests/res/dicts/merge/10/s/10.json rename to tests/res/1/dicts/merge/10/s/10.json diff --git a/tests/res/dicts/merge/40/10.json b/tests/res/1/dicts/merge/20/10.json similarity index 100% rename from tests/res/dicts/merge/40/10.json rename to tests/res/1/dicts/merge/20/10.json diff --git a/tests/res/dicts/merge/20/e/10.json b/tests/res/1/dicts/merge/20/e/10.json similarity index 100% rename from tests/res/dicts/merge/20/e/10.json rename to tests/res/1/dicts/merge/20/e/10.json diff --git a/tests/res/dicts/merge/20/o/10.json b/tests/res/1/dicts/merge/20/o/10.json similarity index 100% rename from tests/res/dicts/merge/20/o/10.json rename to tests/res/1/dicts/merge/20/o/10.json diff --git a/tests/res/dicts/merge/20/s/10.json b/tests/res/1/dicts/merge/20/s/10.json similarity index 100% rename from tests/res/dicts/merge/20/s/10.json rename to tests/res/1/dicts/merge/20/s/10.json diff --git a/tests/res/single_load/basics/20/10.json b/tests/res/1/dicts/merge/30/10.json similarity index 100% rename from tests/res/single_load/basics/20/10.json rename to tests/res/1/dicts/merge/30/10.json diff --git a/tests/res/dicts/merge/30/e/10.json b/tests/res/1/dicts/merge/30/e/10.json similarity index 100% rename from tests/res/dicts/merge/30/e/10.json rename to tests/res/1/dicts/merge/30/e/10.json diff --git a/tests/res/cli/template/10/e/10.json b/tests/res/1/dicts/merge/30/o/10.json similarity index 100% rename from tests/res/cli/template/10/e/10.json rename to tests/res/1/dicts/merge/30/o/10.json diff --git a/tests/res/dicts/merge/30/s/10.json b/tests/res/1/dicts/merge/30/s/10.json similarity index 100% rename from tests/res/dicts/merge/30/s/10.json rename to tests/res/1/dicts/merge/30/s/10.json diff --git a/tests/res/single_load/basics/30/10.json b/tests/res/1/dicts/merge/40/10.json similarity index 100% rename from tests/res/single_load/basics/30/10.json rename to tests/res/1/dicts/merge/40/10.json diff --git a/tests/res/dicts/merge/40/e/10.json b/tests/res/1/dicts/merge/40/e/10.json similarity index 100% rename from tests/res/dicts/merge/40/e/10.json rename to tests/res/1/dicts/merge/40/e/10.json diff --git a/tests/res/dicts/merge/40/o/10.json b/tests/res/1/dicts/merge/40/o/10.json similarity index 100% rename from tests/res/dicts/merge/40/o/10.json rename to tests/res/1/dicts/merge/40/o/10.json diff --git a/tests/res/dicts/merge/40/s/10.json b/tests/res/1/dicts/merge/40/s/10.json similarity index 100% rename from tests/res/dicts/merge/40/s/10.json rename to tests/res/1/dicts/merge/40/s/10.json diff --git a/tests/res/dicts/mk_nested_dic/10/10.py b/tests/res/1/dicts/mk_nested_dic/10/10.py similarity index 100% rename from tests/res/dicts/mk_nested_dic/10/10.py rename to tests/res/1/dicts/mk_nested_dic/10/10.py diff --git a/tests/res/dicts/mk_nested_dic/10/20.py b/tests/res/1/dicts/mk_nested_dic/10/20.py similarity index 100% rename from tests/res/dicts/mk_nested_dic/10/20.py rename to tests/res/1/dicts/mk_nested_dic/10/20.py diff --git a/tests/res/dicts/mk_nested_dic/10/e/10.json b/tests/res/1/dicts/mk_nested_dic/10/e/10.json similarity index 100% rename from tests/res/dicts/mk_nested_dic/10/e/10.json rename to tests/res/1/dicts/mk_nested_dic/10/e/10.json diff --git a/tests/res/1/dicts/mk_nested_dic/10/e/20.json b/tests/res/1/dicts/mk_nested_dic/10/e/20.json new file mode 120000 index 00000000..0159d68f --- /dev/null +++ b/tests/res/1/dicts/mk_nested_dic/10/e/20.json @@ -0,0 +1 @@ +10.json \ No newline at end of file diff --git a/tests/res/dicts/mk_nested_dic/10/o/10.json b/tests/res/1/dicts/mk_nested_dic/10/o/10.json similarity index 100% rename from tests/res/dicts/mk_nested_dic/10/o/10.json rename to tests/res/1/dicts/mk_nested_dic/10/o/10.json diff --git a/tests/res/1/dicts/mk_nested_dic/10/o/20.json b/tests/res/1/dicts/mk_nested_dic/10/o/20.json new file mode 120000 index 00000000..0159d68f --- /dev/null +++ b/tests/res/1/dicts/mk_nested_dic/10/o/20.json @@ -0,0 +1 @@ +10.json \ No newline at end of file diff --git a/tests/res/single_load/query/10/e/00_10.json b/tests/res/1/dicts/mk_nested_dic/10/q/10.py similarity index 100% rename from tests/res/single_load/query/10/e/00_10.json rename to tests/res/1/dicts/mk_nested_dic/10/q/10.py diff --git a/tests/res/dicts/mk_nested_dic/10/q/20.py b/tests/res/1/dicts/mk_nested_dic/10/q/20.py similarity index 100% rename from tests/res/dicts/mk_nested_dic/10/q/20.py rename to tests/res/1/dicts/mk_nested_dic/10/q/20.py diff --git a/tests/res/1/dumpers/json.stdlib/10/350_a_diordered_map.py b/tests/res/1/dumpers/json.stdlib/10/350_a_diordered_map.py new file mode 120000 index 00000000..8aa379ea --- /dev/null +++ b/tests/res/1/dumpers/json.stdlib/10/350_a_diordered_map.py @@ -0,0 +1 @@ +../../../loaders/json.stdlib/10/350_a_diordered_map.json \ No newline at end of file diff --git a/tests/res/1/dumpers/json.stdlib/10/e/350_a_diordered_map.py.py b/tests/res/1/dumpers/json.stdlib/10/e/350_a_diordered_map.py.py new file mode 100644 index 00000000..41b7a877 --- /dev/null +++ b/tests/res/1/dumpers/json.stdlib/10/e/350_a_diordered_map.py.py @@ -0,0 +1 @@ +DATA = '{"z": 0, "b": "c", "a": 1}' diff --git a/tests/res/1/dumpers/json.stdlib/20/350_a_diordered_map.py b/tests/res/1/dumpers/json.stdlib/20/350_a_diordered_map.py new file mode 120000 index 00000000..5bdc3ee6 --- /dev/null +++ b/tests/res/1/dumpers/json.stdlib/20/350_a_diordered_map.py @@ -0,0 +1 @@ +../10/350_a_diordered_map.py \ No newline at end of file diff --git a/tests/res/1/dumpers/json.stdlib/20/352_a_diordered_map.py b/tests/res/1/dumpers/json.stdlib/20/352_a_diordered_map.py new file mode 120000 index 00000000..54b95ec6 --- /dev/null +++ b/tests/res/1/dumpers/json.stdlib/20/352_a_diordered_map.py @@ -0,0 +1 @@ +350_a_diordered_map.py \ No newline at end of file diff --git a/tests/res/1/dumpers/json.stdlib/20/e/350_a_diordered_map.py.py b/tests/res/1/dumpers/json.stdlib/20/e/350_a_diordered_map.py.py new file mode 100644 index 00000000..ee01830c --- /dev/null +++ b/tests/res/1/dumpers/json.stdlib/20/e/350_a_diordered_map.py.py @@ -0,0 +1 @@ +DATA = '''{"z": 0, "b": "c", "a": 1}''' diff --git a/tests/res/1/dumpers/json.stdlib/20/e/352_a_diordered_map.py.py b/tests/res/1/dumpers/json.stdlib/20/e/352_a_diordered_map.py.py new file mode 100644 index 00000000..767895a6 --- /dev/null +++ b/tests/res/1/dumpers/json.stdlib/20/e/352_a_diordered_map.py.py @@ -0,0 +1 @@ +DATA = '''{"a": 1, "b": "c", "z": 0}''' diff --git a/tests/res/1/dumpers/json.stdlib/20/o/350_a_diordered_map.py.json b/tests/res/1/dumpers/json.stdlib/20/o/350_a_diordered_map.py.json new file mode 100644 index 00000000..297980d5 --- /dev/null +++ b/tests/res/1/dumpers/json.stdlib/20/o/350_a_diordered_map.py.json @@ -0,0 +1 @@ +{"sort_keys": false} diff --git a/tests/res/1/dumpers/json.stdlib/20/o/352_a_diordered_map.py.json b/tests/res/1/dumpers/json.stdlib/20/o/352_a_diordered_map.py.json new file mode 100644 index 00000000..5d65bfec --- /dev/null +++ b/tests/res/1/dumpers/json.stdlib/20/o/352_a_diordered_map.py.json @@ -0,0 +1 @@ +{"sort_keys": true} diff --git a/tests/res/1/dumpers/properties.builtin/10/e/100.json.py b/tests/res/1/dumpers/properties.builtin/10/e/100.json.py index 794bcaf7..546654b7 100644 --- a/tests/res/1/dumpers/properties.builtin/10/e/100.json.py +++ b/tests/res/1/dumpers/properties.builtin/10/e/100.json.py @@ -1,6 +1,5 @@ # flake8: noqa: W291 -DATA = """\ -a = 0 +DATA = """a = 0 b = bbb c = sect0.c = x;y;z diff --git a/tests/res/1/dumpers/sh.variables/10/e/100_basics.json.py b/tests/res/1/dumpers/sh.variables/10/e/100_basics.json.py index 935bd3e2..93dbacf5 100644 --- a/tests/res/1/dumpers/sh.variables/10/e/100_basics.json.py +++ b/tests/res/1/dumpers/sh.variables/10/e/100_basics.json.py @@ -1,5 +1,4 @@ -DATA = """\ -a='0' +DATA = """a='0' b='bbb' c='ccc' d='ddd' diff --git a/tests/res/1/dumpers/toml.tomllib/10/200_a_simple_map_with_basic_values.py b/tests/res/1/dumpers/toml.tomllib/10/200_a_simple_map_with_basic_values.py deleted file mode 120000 index e98b795e..00000000 --- a/tests/res/1/dumpers/toml.tomllib/10/200_a_simple_map_with_basic_values.py +++ /dev/null @@ -1 +0,0 @@ -../../../loaders/toml.tomllib/10/e/200_a_simple_map_with_basic_values.toml.py \ No newline at end of file diff --git a/tests/res/1/dumpers/toml.tomllib/10/e/200_a_simple_map_with_basic_values.py.py b/tests/res/1/dumpers/toml.tomllib/10/e/200_a_simple_map_with_basic_values.py.py index 761d34c1..a214f689 100644 --- a/tests/res/1/dumpers/toml.tomllib/10/e/200_a_simple_map_with_basic_values.py.py +++ b/tests/res/1/dumpers/toml.tomllib/10/e/200_a_simple_map_with_basic_values.py.py @@ -1,4 +1,4 @@ -DATA = """[x] +DATA = r"""[x] a0 = 0 a1 = 1 a2 = 42 diff --git a/tests/res/1/dumpers/toml.tomllib/10/e/310_a_map_with_arrays.json.py b/tests/res/1/dumpers/toml.tomllib/10/e/310_a_map_with_arrays.json.py index 4825338f..843d2ace 100644 --- a/tests/res/1/dumpers/toml.tomllib/10/e/310_a_map_with_arrays.json.py +++ b/tests/res/1/dumpers/toml.tomllib/10/e/310_a_map_with_arrays.json.py @@ -1,5 +1,4 @@ -DATA = """\ -[x] +DATA = """[x] integers = [ 1, 2, diff --git a/tests/res/1/dumpers/toml.tomllib/10/e/410_complex_maps.py.py b/tests/res/1/dumpers/toml.tomllib/10/e/410_complex_maps.py.py index 742e220d..2ca6289d 100644 --- a/tests/res/1/dumpers/toml.tomllib/10/e/410_complex_maps.py.py +++ b/tests/res/1/dumpers/toml.tomllib/10/e/410_complex_maps.py.py @@ -1,5 +1,4 @@ -DATA = """\ -title = "Table examples" +DATA = """title = "Table examples" name = { first = "Tom", last = "Preston-Werner" } point = { x = 1, y = 2 } diff --git a/tests/res/1/dumpers/toml.tomllib/10/e/420_array_of_tables.json.py b/tests/res/1/dumpers/toml.tomllib/10/e/420_array_of_tables.json.py index 85c071de..7aebaaad 100644 --- a/tests/res/1/dumpers/toml.tomllib/10/e/420_array_of_tables.json.py +++ b/tests/res/1/dumpers/toml.tomllib/10/e/420_array_of_tables.json.py @@ -1,5 +1,4 @@ -DATA = """\ -products = [ +DATA = """products = [ { name = "Hammer", sku = 738594937 }, {}, { name = "Nail", sku = 284758393, color = "gray" }, diff --git a/tests/res/1/dumpers/xml.etree/10/e/100.json.py b/tests/res/1/dumpers/xml.etree/10/e/100.json.py index 62eb8b1a..ccf87ebc 100644 --- a/tests/res/1/dumpers/xml.etree/10/e/100.json.py +++ b/tests/res/1/dumpers/xml.etree/10/e/100.json.py @@ -1,4 +1,4 @@ DATA = ( - b"\n" - b"A" + b""" +A""" ) diff --git a/tests/res/1/dumpers/xml.etree/20/100.json b/tests/res/1/dumpers/xml.etree/20/100.json new file mode 120000 index 00000000..894c9ba6 --- /dev/null +++ b/tests/res/1/dumpers/xml.etree/20/100.json @@ -0,0 +1 @@ +../10/100.json \ No newline at end of file diff --git a/tests/res/1/dumpers/xml.etree/20/200.json b/tests/res/1/dumpers/xml.etree/20/200.json new file mode 120000 index 00000000..a7628a62 --- /dev/null +++ b/tests/res/1/dumpers/xml.etree/20/200.json @@ -0,0 +1 @@ +../10/200.json \ No newline at end of file diff --git a/tests/res/1/dumpers/xml.etree/20/300.json b/tests/res/1/dumpers/xml.etree/20/300.json new file mode 120000 index 00000000..9e12f6b6 --- /dev/null +++ b/tests/res/1/dumpers/xml.etree/20/300.json @@ -0,0 +1 @@ +../10/300.json \ No newline at end of file diff --git a/tests/res/1/dumpers/xml.etree/20/e/100.json.py b/tests/res/1/dumpers/xml.etree/20/e/100.json.py new file mode 100644 index 00000000..efb57419 --- /dev/null +++ b/tests/res/1/dumpers/xml.etree/20/e/100.json.py @@ -0,0 +1,3 @@ +DATA = ( + b"A" +) diff --git a/tests/res/1/dumpers/xml.etree/20/e/200.json.py b/tests/res/1/dumpers/xml.etree/20/e/200.json.py new file mode 100644 index 00000000..8e26613d --- /dev/null +++ b/tests/res/1/dumpers/xml.etree/20/e/200.json.py @@ -0,0 +1,7 @@ +DATA = ( + b'' + b'1' + b'C' + b'' +) diff --git a/tests/res/1/dumpers/xml.etree/20/e/300.json.py b/tests/res/1/dumpers/xml.etree/20/e/300.json.py new file mode 100644 index 00000000..d2941e4e --- /dev/null +++ b/tests/res/1/dumpers/xml.etree/20/e/300.json.py @@ -0,0 +1,11 @@ +DATA = ( + b'' + b'0' + b'bbb' + b'' + b'x, y, z' + b'012' + b'ij' + b'' +) diff --git a/tests/res/1/dumpers/xml.etree/20/o/100.json b/tests/res/1/dumpers/xml.etree/20/o/100.json new file mode 100644 index 00000000..36894ae9 --- /dev/null +++ b/tests/res/1/dumpers/xml.etree/20/o/100.json @@ -0,0 +1 @@ +{"xml_declaration": false} diff --git a/tests/res/1/dumpers/xml.etree/20/o/200.json b/tests/res/1/dumpers/xml.etree/20/o/200.json new file mode 120000 index 00000000..8099b0bd --- /dev/null +++ b/tests/res/1/dumpers/xml.etree/20/o/200.json @@ -0,0 +1 @@ +100.json \ No newline at end of file diff --git a/tests/res/1/dumpers/xml.etree/20/o/300.json b/tests/res/1/dumpers/xml.etree/20/o/300.json new file mode 120000 index 00000000..8099b0bd --- /dev/null +++ b/tests/res/1/dumpers/xml.etree/20/o/300.json @@ -0,0 +1 @@ +100.json \ No newline at end of file diff --git a/tests/res/1/loaders/json.stdlib/10/350_a_diordered_map.json b/tests/res/1/loaders/json.stdlib/10/350_a_diordered_map.json new file mode 100644 index 00000000..a4b2f5db --- /dev/null +++ b/tests/res/1/loaders/json.stdlib/10/350_a_diordered_map.json @@ -0,0 +1 @@ +{"z": 0, "b": "c", "a": 1} diff --git a/tests/res/1/loaders/json.stdlib/10/e/350_a_diordered_map.json.py b/tests/res/1/loaders/json.stdlib/10/e/350_a_diordered_map.json.py new file mode 120000 index 00000000..8649ed3d --- /dev/null +++ b/tests/res/1/loaders/json.stdlib/10/e/350_a_diordered_map.json.py @@ -0,0 +1 @@ +../350_a_diordered_map.json \ No newline at end of file diff --git a/tests/res/single_load/basics/20/test_cases_with_schema_validation.txt b/tests/res/1/parser/attrlist/10/00.txt similarity index 100% rename from tests/res/single_load/basics/20/test_cases_with_schema_validation.txt rename to tests/res/1/parser/attrlist/10/00.txt diff --git a/tests/res/parser/attrlist/10/10.txt b/tests/res/1/parser/attrlist/10/10.txt similarity index 100% rename from tests/res/parser/attrlist/10/10.txt rename to tests/res/1/parser/attrlist/10/10.txt diff --git a/tests/res/parser/attrlist/10/20.txt b/tests/res/1/parser/attrlist/10/20.txt similarity index 100% rename from tests/res/parser/attrlist/10/20.txt rename to tests/res/1/parser/attrlist/10/20.txt diff --git a/tests/res/parser/attrlist/10/30.txt b/tests/res/1/parser/attrlist/10/30.txt similarity index 100% rename from tests/res/parser/attrlist/10/30.txt rename to tests/res/1/parser/attrlist/10/30.txt diff --git a/tests/res/parser/attrlist/10/40.txt b/tests/res/1/parser/attrlist/10/40.txt similarity index 100% rename from tests/res/parser/attrlist/10/40.txt rename to tests/res/1/parser/attrlist/10/40.txt diff --git a/tests/res/multi_load/schema/00/o/00.json b/tests/res/1/parser/attrlist/10/e/00.json similarity index 100% rename from tests/res/multi_load/schema/00/o/00.json rename to tests/res/1/parser/attrlist/10/e/00.json diff --git a/tests/res/parser/attrlist/10/e/10.json b/tests/res/1/parser/attrlist/10/e/10.json similarity index 100% rename from tests/res/parser/attrlist/10/e/10.json rename to tests/res/1/parser/attrlist/10/e/10.json diff --git a/tests/res/parser/attrlist/10/e/20.json b/tests/res/1/parser/attrlist/10/e/20.json similarity index 100% rename from tests/res/parser/attrlist/10/e/20.json rename to tests/res/1/parser/attrlist/10/e/20.json diff --git a/tests/res/parser/attrlist/10/e/30.json b/tests/res/1/parser/attrlist/10/e/30.json similarity index 100% rename from tests/res/parser/attrlist/10/e/30.json rename to tests/res/1/parser/attrlist/10/e/30.json diff --git a/tests/res/parser/attrlist/10/e/40.json b/tests/res/1/parser/attrlist/10/e/40.json similarity index 100% rename from tests/res/parser/attrlist/10/e/40.json rename to tests/res/1/parser/attrlist/10/e/40.json diff --git a/tests/res/parser/attrlist_0/10/00.txt b/tests/res/1/parser/attrlist_0/10/00.txt similarity index 100% rename from tests/res/parser/attrlist_0/10/00.txt rename to tests/res/1/parser/attrlist_0/10/00.txt diff --git a/tests/res/parser/attrlist_0/10/10.txt b/tests/res/1/parser/attrlist_0/10/10.txt similarity index 100% rename from tests/res/parser/attrlist_0/10/10.txt rename to tests/res/1/parser/attrlist_0/10/10.txt diff --git a/tests/res/parser/attrlist_0/10/20.txt b/tests/res/1/parser/attrlist_0/10/20.txt similarity index 100% rename from tests/res/parser/attrlist_0/10/20.txt rename to tests/res/1/parser/attrlist_0/10/20.txt diff --git a/tests/res/parser/attrlist_0/10/30.txt b/tests/res/1/parser/attrlist_0/10/30.txt similarity index 100% rename from tests/res/parser/attrlist_0/10/30.txt rename to tests/res/1/parser/attrlist_0/10/30.txt diff --git a/tests/res/parser/attrlist_0/10/40.txt b/tests/res/1/parser/attrlist_0/10/40.txt similarity index 100% rename from tests/res/parser/attrlist_0/10/40.txt rename to tests/res/1/parser/attrlist_0/10/40.txt diff --git a/tests/res/parser/attrlist_0/10/e/00.json b/tests/res/1/parser/attrlist_0/10/e/00.json similarity index 100% rename from tests/res/parser/attrlist_0/10/e/00.json rename to tests/res/1/parser/attrlist_0/10/e/00.json diff --git a/tests/res/parser/attrlist_0/10/e/10.py b/tests/res/1/parser/attrlist_0/10/e/10.py similarity index 100% rename from tests/res/parser/attrlist_0/10/e/10.py rename to tests/res/1/parser/attrlist_0/10/e/10.py diff --git a/tests/res/parser/attrlist_0/10/e/20.py b/tests/res/1/parser/attrlist_0/10/e/20.py similarity index 100% rename from tests/res/parser/attrlist_0/10/e/20.py rename to tests/res/1/parser/attrlist_0/10/e/20.py diff --git a/tests/res/parser/attrlist_0/10/e/30.py b/tests/res/1/parser/attrlist_0/10/e/30.py similarity index 100% rename from tests/res/parser/attrlist_0/10/e/30.py rename to tests/res/1/parser/attrlist_0/10/e/30.py diff --git a/tests/res/parser/attrlist_0/10/e/40.py b/tests/res/1/parser/attrlist_0/10/e/40.py similarity index 100% rename from tests/res/parser/attrlist_0/10/e/40.py rename to tests/res/1/parser/attrlist_0/10/e/40.py diff --git a/tests/res/single_load/basics/30/test_cases_with_explicit_parser_type_option.txt b/tests/res/1/parser/list/10/00.txt similarity index 100% rename from tests/res/single_load/basics/30/test_cases_with_explicit_parser_type_option.txt rename to tests/res/1/parser/list/10/00.txt diff --git a/tests/res/parser/list/10/10.txt b/tests/res/1/parser/list/10/10.txt similarity index 100% rename from tests/res/parser/list/10/10.txt rename to tests/res/1/parser/list/10/10.txt diff --git a/tests/res/parser/list/10/20.txt b/tests/res/1/parser/list/10/20.txt similarity index 100% rename from tests/res/parser/list/10/20.txt rename to tests/res/1/parser/list/10/20.txt diff --git a/tests/res/parser/list/10/30.txt b/tests/res/1/parser/list/10/30.txt similarity index 100% rename from tests/res/parser/list/10/30.txt rename to tests/res/1/parser/list/10/30.txt diff --git a/tests/res/parser/list/10/40.txt b/tests/res/1/parser/list/10/40.txt similarity index 100% rename from tests/res/parser/list/10/40.txt rename to tests/res/1/parser/list/10/40.txt diff --git a/tests/res/parser/list/10/50.txt b/tests/res/1/parser/list/10/50.txt similarity index 100% rename from tests/res/parser/list/10/50.txt rename to tests/res/1/parser/list/10/50.txt diff --git a/tests/res/parser/list/10/e/00.json b/tests/res/1/parser/list/10/e/00.json similarity index 100% rename from tests/res/parser/list/10/e/00.json rename to tests/res/1/parser/list/10/e/00.json diff --git a/tests/res/parser/list/10/e/10.json b/tests/res/1/parser/list/10/e/10.json similarity index 100% rename from tests/res/parser/list/10/e/10.json rename to tests/res/1/parser/list/10/e/10.json diff --git a/tests/res/parser/list/10/e/20.json b/tests/res/1/parser/list/10/e/20.json similarity index 100% rename from tests/res/parser/list/10/e/20.json rename to tests/res/1/parser/list/10/e/20.json diff --git a/tests/res/parser/list/10/e/30.json b/tests/res/1/parser/list/10/e/30.json similarity index 100% rename from tests/res/parser/list/10/e/30.json rename to tests/res/1/parser/list/10/e/30.json diff --git a/tests/res/parser/list/10/e/40.json b/tests/res/1/parser/list/10/e/40.json similarity index 100% rename from tests/res/parser/list/10/e/40.json rename to tests/res/1/parser/list/10/e/40.json diff --git a/tests/res/parser/list/10/e/50.json b/tests/res/1/parser/list/10/e/50.json similarity index 100% rename from tests/res/parser/list/10/e/50.json rename to tests/res/1/parser/list/10/e/50.json diff --git a/tests/res/parser/attrlist/10/e/00.json b/tests/res/1/parser/list/10/o/00.json similarity index 100% rename from tests/res/parser/attrlist/10/e/00.json rename to tests/res/1/parser/list/10/o/00.json diff --git a/tests/res/single_load/template/20/o/10.json b/tests/res/1/parser/list/10/o/10.json similarity index 100% rename from tests/res/single_load/template/20/o/10.json rename to tests/res/1/parser/list/10/o/10.json diff --git a/tests/res/single_load/template/20/o/20.json b/tests/res/1/parser/list/10/o/20.json similarity index 100% rename from tests/res/single_load/template/20/o/20.json rename to tests/res/1/parser/list/10/o/20.json diff --git a/tests/res/parser/list/10/o/40.json b/tests/res/1/parser/list/10/o/30.json similarity index 100% rename from tests/res/parser/list/10/o/40.json rename to tests/res/1/parser/list/10/o/30.json diff --git a/tests/res/1/parser/list/10/o/40.json b/tests/res/1/parser/list/10/o/40.json new file mode 120000 index 00000000..a9005a1b --- /dev/null +++ b/tests/res/1/parser/list/10/o/40.json @@ -0,0 +1 @@ +00.json \ No newline at end of file diff --git a/tests/res/parser/list/10/o/50.json b/tests/res/1/parser/list/10/o/50.json similarity index 100% rename from tests/res/parser/list/10/o/50.json rename to tests/res/1/parser/list/10/o/50.json diff --git a/tests/res/parser/parse/10/00.py b/tests/res/1/parser/parse/10/00.py similarity index 100% rename from tests/res/parser/parse/10/00.py rename to tests/res/1/parser/parse/10/00.py diff --git a/tests/res/parser/parse/10/10.txt b/tests/res/1/parser/parse/10/10.txt similarity index 100% rename from tests/res/parser/parse/10/10.txt rename to tests/res/1/parser/parse/10/10.txt diff --git a/tests/res/parser/parse/10/20.txt b/tests/res/1/parser/parse/10/20.txt similarity index 100% rename from tests/res/parser/parse/10/20.txt rename to tests/res/1/parser/parse/10/20.txt diff --git a/tests/res/parser/parse/10/30.txt b/tests/res/1/parser/parse/10/30.txt similarity index 100% rename from tests/res/parser/parse/10/30.txt rename to tests/res/1/parser/parse/10/30.txt diff --git a/tests/res/parser/parse/10/40.py b/tests/res/1/parser/parse/10/40.py similarity index 100% rename from tests/res/parser/parse/10/40.py rename to tests/res/1/parser/parse/10/40.py diff --git a/tests/res/parser/parse/10/50.py b/tests/res/1/parser/parse/10/50.py similarity index 100% rename from tests/res/parser/parse/10/50.py rename to tests/res/1/parser/parse/10/50.py diff --git a/tests/res/parser/parse/10/60.txt b/tests/res/1/parser/parse/10/60.txt similarity index 100% rename from tests/res/parser/parse/10/60.txt rename to tests/res/1/parser/parse/10/60.txt diff --git a/tests/res/parser/parse/10/70.txt b/tests/res/1/parser/parse/10/70.txt similarity index 100% rename from tests/res/parser/parse/10/70.txt rename to tests/res/1/parser/parse/10/70.txt diff --git a/tests/res/parser/parse/10/e b/tests/res/1/parser/parse/10/e similarity index 100% rename from tests/res/parser/parse/10/e rename to tests/res/1/parser/parse/10/e diff --git a/tests/res/parser/parse/20/20.txt b/tests/res/1/parser/parse/20/20.txt similarity index 100% rename from tests/res/parser/parse/20/20.txt rename to tests/res/1/parser/parse/20/20.txt diff --git a/tests/res/parser/parse/20/30.txt b/tests/res/1/parser/parse/20/30.txt similarity index 100% rename from tests/res/parser/parse/20/30.txt rename to tests/res/1/parser/parse/20/30.txt diff --git a/tests/res/parser/parse/20/40.txt b/tests/res/1/parser/parse/20/40.txt similarity index 100% rename from tests/res/parser/parse/20/40.txt rename to tests/res/1/parser/parse/20/40.txt diff --git a/tests/res/parser/parse/20/e b/tests/res/1/parser/parse/20/e similarity index 100% rename from tests/res/parser/parse/20/e rename to tests/res/1/parser/parse/20/e diff --git a/tests/res/parser/parse/30/10.txt b/tests/res/1/parser/parse/30/10.txt similarity index 100% rename from tests/res/parser/parse/30/10.txt rename to tests/res/1/parser/parse/30/10.txt diff --git a/tests/res/parser/parse/30/20.txt b/tests/res/1/parser/parse/30/20.txt similarity index 100% rename from tests/res/parser/parse/30/20.txt rename to tests/res/1/parser/parse/30/20.txt diff --git a/tests/res/parser/parse/30/30.txt b/tests/res/1/parser/parse/30/30.txt similarity index 100% rename from tests/res/parser/parse/30/30.txt rename to tests/res/1/parser/parse/30/30.txt diff --git a/tests/res/parser/parse/30/40.txt b/tests/res/1/parser/parse/30/40.txt similarity index 100% rename from tests/res/parser/parse/30/40.txt rename to tests/res/1/parser/parse/30/40.txt diff --git a/tests/res/parser/parse/30/e b/tests/res/1/parser/parse/30/e similarity index 100% rename from tests/res/parser/parse/30/e rename to tests/res/1/parser/parse/30/e diff --git a/tests/res/parser/single/10/00.py b/tests/res/1/parser/single/10/00.py similarity index 100% rename from tests/res/parser/single/10/00.py rename to tests/res/1/parser/single/10/00.py diff --git a/tests/res/parser/single/10/10.txt b/tests/res/1/parser/single/10/10.txt similarity index 100% rename from tests/res/parser/single/10/10.txt rename to tests/res/1/parser/single/10/10.txt diff --git a/tests/res/parser/single/10/20.txt b/tests/res/1/parser/single/10/20.txt similarity index 100% rename from tests/res/parser/single/10/20.txt rename to tests/res/1/parser/single/10/20.txt diff --git a/tests/res/parser/single/10/30.txt b/tests/res/1/parser/single/10/30.txt similarity index 100% rename from tests/res/parser/single/10/30.txt rename to tests/res/1/parser/single/10/30.txt diff --git a/tests/res/parser/single/10/40.py b/tests/res/1/parser/single/10/40.py similarity index 100% rename from tests/res/parser/single/10/40.py rename to tests/res/1/parser/single/10/40.py diff --git a/tests/res/parser/single/10/50.py b/tests/res/1/parser/single/10/50.py similarity index 100% rename from tests/res/parser/single/10/50.py rename to tests/res/1/parser/single/10/50.py diff --git a/tests/res/parser/single/10/60.txt b/tests/res/1/parser/single/10/60.txt similarity index 100% rename from tests/res/parser/single/10/60.txt rename to tests/res/1/parser/single/10/60.txt diff --git a/tests/res/parser/single/10/70.txt b/tests/res/1/parser/single/10/70.txt similarity index 100% rename from tests/res/parser/single/10/70.txt rename to tests/res/1/parser/single/10/70.txt diff --git a/tests/res/parser/single/10/e/00.py b/tests/res/1/parser/single/10/e/00.py similarity index 100% rename from tests/res/parser/single/10/e/00.py rename to tests/res/1/parser/single/10/e/00.py diff --git a/tests/res/parser/single/10/e/10.py b/tests/res/1/parser/single/10/e/10.py similarity index 100% rename from tests/res/parser/single/10/e/10.py rename to tests/res/1/parser/single/10/e/10.py diff --git a/tests/res/parser/single/10/e/20.py b/tests/res/1/parser/single/10/e/20.py similarity index 100% rename from tests/res/parser/single/10/e/20.py rename to tests/res/1/parser/single/10/e/20.py diff --git a/tests/res/parser/single/10/e/30.py b/tests/res/1/parser/single/10/e/30.py similarity index 100% rename from tests/res/parser/single/10/e/30.py rename to tests/res/1/parser/single/10/e/30.py diff --git a/tests/res/parser/single/10/e/40.py b/tests/res/1/parser/single/10/e/40.py similarity index 100% rename from tests/res/parser/single/10/e/40.py rename to tests/res/1/parser/single/10/e/40.py diff --git a/tests/res/parser/single/10/e/50.py b/tests/res/1/parser/single/10/e/50.py similarity index 100% rename from tests/res/parser/single/10/e/50.py rename to tests/res/1/parser/single/10/e/50.py diff --git a/tests/res/parser/single/10/e/60.py b/tests/res/1/parser/single/10/e/60.py similarity index 100% rename from tests/res/parser/single/10/e/60.py rename to tests/res/1/parser/single/10/e/60.py diff --git a/tests/res/parser/single/10/e/70.py b/tests/res/1/parser/single/10/e/70.py similarity index 100% rename from tests/res/parser/single/10/e/70.py rename to tests/res/1/parser/single/10/e/70.py diff --git a/tests/res/template/jinja2/10/10.j2 b/tests/res/1/templates/jinja2/10/10.j2 similarity index 100% rename from tests/res/template/jinja2/10/10.j2 rename to tests/res/1/templates/jinja2/10/10.j2 diff --git a/tests/res/template/jinja2/10/20.j2 b/tests/res/1/templates/jinja2/10/20.j2 similarity index 100% rename from tests/res/template/jinja2/10/20.j2 rename to tests/res/1/templates/jinja2/10/20.j2 diff --git a/tests/res/template/jinja2/10/r/10.txt b/tests/res/1/templates/jinja2/10/r/10.txt similarity index 100% rename from tests/res/template/jinja2/10/r/10.txt rename to tests/res/1/templates/jinja2/10/r/10.txt diff --git a/tests/res/template/jinja2/20/10.j2 b/tests/res/1/templates/jinja2/20/10.j2 similarity index 100% rename from tests/res/template/jinja2/20/10.j2 rename to tests/res/1/templates/jinja2/20/10.j2 diff --git a/tests/res/template/jinja2/20/r/10.txt b/tests/res/1/templates/jinja2/20/r/10.txt similarity index 100% rename from tests/res/template/jinja2/20/r/10.txt rename to tests/res/1/templates/jinja2/20/r/10.txt diff --git a/tests/res/20-00-cnf.ini b/tests/res/20-00-cnf.ini deleted file mode 100644 index 7811dab2..00000000 --- a/tests/res/20-00-cnf.ini +++ /dev/null @@ -1,8 +0,0 @@ -# :seealso: :class:`tests.backend.common.CNF_0` -[DEFAULT] -a: 0 -b: bbb -c: 5 - -[sect0] -d: x,y,z diff --git a/tests/res/20-00-cnf.json b/tests/res/20-00-cnf.json deleted file mode 100644 index 9f42c1c8..00000000 --- a/tests/res/20-00-cnf.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "a": 0, - "b": "bbb", - "c": 5, - "sect0": { - "d": ["x", "y", "z"] - } -} diff --git a/tests/res/20-00-cnf.properties b/tests/res/20-00-cnf.properties deleted file mode 100644 index 6e868996..00000000 --- a/tests/res/20-00-cnf.properties +++ /dev/null @@ -1,12 +0,0 @@ -a = 0 - b = bbb -c: - -sect0.c = x;y;z -sect1.d = \ - 1,2,3 - -d=\ -val1,\ -val2,\ -val3 diff --git a/tests/res/20-00-cnf.sh b/tests/res/20-00-cnf.sh deleted file mode 100644 index 7fb3d32e..00000000 --- a/tests/res/20-00-cnf.sh +++ /dev/null @@ -1,5 +0,0 @@ -a=0 -b='bbb' # a comment -c="ccc" # an another comment -export d='ddd' ## double comment - export e="eee" ### tripple comment diff --git a/tests/res/20-00-cnf.toml b/tests/res/20-00-cnf.toml deleted file mode 100644 index 27035dc1..00000000 --- a/tests/res/20-00-cnf.toml +++ /dev/null @@ -1,30 +0,0 @@ -# Taken from https://github.com/toml-lang/toml: -title = "TOML Example" - -[owner] -name = "Tom Preston-Werner" -dob = 1979-05-27T07:32:00Z # First class dates - -[database] -server = "192.168.1.1" -ports = [ 8001, 8001, 8002 ] -connection_max = 5000 -enabled = true - -[servers] - - [servers.alpha] - ip = "10.0.0.1" - dc = "eqdc10" - - [servers.beta] - ip = "10.0.0.2" - dc = "eqdc10" - -[clients] -data = [ ["gamma", "delta"], [1, 2] ] - -hosts = [ - "alpha", - "omega" -] diff --git a/tests/res/20-00-cnf.xml b/tests/res/20-00-cnf.xml deleted file mode 100644 index 311e1b8b..00000000 --- a/tests/res/20-00-cnf.xml +++ /dev/null @@ -1,5 +0,0 @@ - - 1 - C - diff --git a/tests/res/20-00-cnf.yml b/tests/res/20-00-cnf.yml deleted file mode 100644 index d1272b8f..00000000 --- a/tests/res/20-00-cnf.yml +++ /dev/null @@ -1,12 +0,0 @@ -a: 0 -b: bbb -c: - - 1 - - 2 - - 3 - -sect0: §0 - d: ["x", "y", "z"] -sect1: - <<: *sect0 - e: true diff --git a/tests/res/20-10-cnf.xml b/tests/res/20-10-cnf.xml deleted file mode 100644 index 65c8b0f4..00000000 --- a/tests/res/20-10-cnf.xml +++ /dev/null @@ -1,20 +0,0 @@ - - - 0 - bbb - - - x, y, z - - - 0 - 1 - 2 - - - i - j - - diff --git a/tests/res/30-00-cnf.json b/tests/res/30-00-cnf.json deleted file mode 100644 index dedf618c..00000000 --- a/tests/res/30-00-cnf.json +++ /dev/null @@ -1 +0,0 @@ -{% include '30-00-template-cnf.json' %} diff --git a/tests/res/30-00-template-cnf-ng-scm.json b/tests/res/30-00-template-cnf-ng-scm.json deleted file mode 100644 index e0b87281..00000000 --- a/tests/res/30-00-template-cnf-ng-scm.json +++ /dev/null @@ -1 +0,0 @@ -{"type": "integer"} diff --git a/tests/res/30-00-template-cnf.json b/tests/res/30-00-template-cnf.json deleted file mode 100644 index 667b3b1b..00000000 --- a/tests/res/30-00-template-cnf.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "name": "{{ name|default('a') }}", - "a": {{ a|default(1) }}, - "b": { - "b": [ - {% if b is defined and b and b.b is defined and b.b -%} - {% for x in b.b %}{{ x }}{% if not loop.last %},{% endif %}{% endfor %} - {% else %}1, 2{% endif %} - ], - "c": "{% if b is defined and b %}{{ b.c|default('C') }}{% else %}'C'{% endif %}" - } -} diff --git a/tests/res/30-10-template-cnf.json b/tests/res/30-10-template-cnf.json deleted file mode 100644 index 76303c35..00000000 --- a/tests/res/30-10-template-cnf.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "a": {{ a|default(1) }}, - "b": { - "b": [ - {% if b is defined and b and b.b is defined and b.b -%} - {% for x in b.b %}{{ x }}{% if not loop.last %},{% endif %}{% endfor %} - {% else %}1, 2{% endif %} - ], - "c": "{% if b is defined and b %}{{ b.c|default('C') }}{% else %}'C'{% endif %}" - }, - "e": 0 -} diff --git a/tests/res/cli/errors/10/README.md b/tests/res/cli/errors/10/README.md deleted file mode 100644 index 8450131c..00000000 --- a/tests/res/cli/errors/10/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# Test cases for anyconfig\_cli - -- 10.json + o/10.json: no args, no options -- 20.json + o/20.json: no args, an wrong option -- 30.json + o/30.json: an input with unknown file type, no options -- 40.json + o/40.json: an input with unknown file type, an -I (input type) option gives an unknown file type -- 50.json + o/50.json: an input with known file type, an -o option gives file with known file type and an -O (output type) option gives an unknown file type diff --git a/tests/res/cli/extra_options/10/README.md b/tests/res/cli/extra_options/10/README.md deleted file mode 100644 index 9a5f458e..00000000 --- a/tests/res/cli/extra_options/10/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Test cases for anyconfig\_cli - -- 10.json + o/10.json: a JSON input with '--extra-args ...' option to load and dump to a JSON output diff --git a/tests/res/cli/ignore_missing/10/README.md b/tests/res/cli/ignore_missing/10/README.md deleted file mode 100644 index 2fdffd29..00000000 --- a/tests/res/cli/ignore_missing/10/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Test cases for anyconfig\_cli - -- 10.json + o/10.json: an input (this input file will be replaced with a file doees not exist) with '--ignore-missing' and '-O json' options to load and dump a JSON data as a string diff --git a/tests/res/cli/multi_inputs/10/README.md b/tests/res/cli/multi_inputs/10/README.md deleted file mode 100644 index 9d2b8d6c..00000000 --- a/tests/res/cli/multi_inputs/10/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Test cases for anyconfig\_cli - -- {1,2,3}0.json + o/10.json: multi JSON inputs without any options to load, and dump to a JSON file without any options diff --git a/tests/res/cli/multi_inputs/10/i/10.json b/tests/res/cli/multi_inputs/10/i/10.json deleted file mode 100644 index 0fe9a9f8..00000000 --- a/tests/res/cli/multi_inputs/10/i/10.json +++ /dev/null @@ -1 +0,0 @@ -"*.json" diff --git a/tests/res/cli/multi_inputs/20/README.md b/tests/res/cli/multi_inputs/20/README.md deleted file mode 100644 index cb7fae73..00000000 --- a/tests/res/cli/multi_inputs/20/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Test cases for anyconfig\_cli - -- 10.json, 20.xml, 30.sh + o/10.json: multi type inputs without any options to load, and dump to a JSON file without any options diff --git a/tests/res/cli/multi_inputs/20/i/10.json b/tests/res/cli/multi_inputs/20/i/10.json deleted file mode 100644 index ebdfde7c..00000000 --- a/tests/res/cli/multi_inputs/20/i/10.json +++ /dev/null @@ -1 +0,0 @@ -"*0.*" diff --git a/tests/res/cli/schema/10/e/20.json b/tests/res/cli/schema/10/e/20.json deleted file mode 100644 index a5a80882..00000000 --- a/tests/res/cli/schema/10/e/20.json +++ /dev/null @@ -1 +0,0 @@ -{"exit_code_matches": false} diff --git a/tests/res/cli/schema/10/s/20.json b/tests/res/cli/schema/10/s/20.json deleted file mode 100644 index a1634127..00000000 --- a/tests/res/cli/schema/10/s/20.json +++ /dev/null @@ -1 +0,0 @@ -{"type": "object", "properties": {"name": {"type": "integer"}, "a": {"type": "string"}, "b": {"type": "object", "properties": {"b": {"type": "array", "items": {"type": "integer"}}, "c": {"type": "string"}}}}} diff --git a/tests/res/cli/schema_errors/10/README.md b/tests/res/cli/schema_errors/10/README.md deleted file mode 100644 index 47d3d0af..00000000 --- a/tests/res/cli/schema_errors/10/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Test cases for anyconfig\_cli - -- 10.json + o/10.json: An JSON input to load with "--validate" option without "--schema" option should cause an error. diff --git a/tests/res/cli/show/10/README.md b/tests/res/cli/show/10/README.md deleted file mode 100644 index 52549389..00000000 --- a/tests/res/cli/show/10/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# Test cases for anyconfig\_cli - -- 10.json + o/10.json: no args, a '-h' option, help messages should be printed (see e/10.json) -- 20.json + o/20.json: no args, a '--help' option, help messages should be printed (see e/20.json) -- 30.json + o/30.json: no args, a '-L' option, parsers should be printed (see e/30.json) -- 40.json + o/40.json: no args, a '--list' option, parsers should be printed (see e/40.json) - diff --git a/tests/res/cli/show/20/README.md b/tests/res/cli/show/20/README.md deleted file mode 100644 index 486338b0..00000000 --- a/tests/res/cli/show/20/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# Test cases for anyconfig\_cli - -- 10.json + o/10.json: no args with '-E' (load config from environment variables) option and a '-O josn' (JSON output type) option; it should print environment variables as a dict in JSON format -- 20.json + o/20.json: no args with '--env' (load config from environment variables) option and a '-O josn' (JSON output type) option; it should print environment variables as a dict in JSON format diff --git a/tests/res/cli/show_version/10/README.md b/tests/res/cli/show_version/10/README.md deleted file mode 100644 index f19e67da..00000000 --- a/tests/res/cli/show_version/10/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Test cases for anyconfig\_cli - -- 10.json + o/10.json: no args, a '--version' option diff --git a/tests/res/cli/single_input/10/README.md b/tests/res/cli/single_input/10/README.md deleted file mode 100644 index 79687fb6..00000000 --- a/tests/res/cli/single_input/10/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Test cases for anyconfig\_cli - -- 10.json + o/10.json: an input with known file type, without any options to load, and dump to a JSON file without any output options -- 20.conf + o/20.json: an input with unknown file type and an "-I json" option to load, and dump to a JSON file without any output options -- 30.json + o/30.json: an input with unknown file type and an "-I json" option to load, and dump to a JSON file without '.json' file extension with "-O json" option diff --git a/tests/res/cli/single_input/30/README.md b/tests/res/cli/single_input/30/README.md deleted file mode 100644 index 921e64d0..00000000 --- a/tests/res/cli/single_input/30/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# Test cases for anyconfig\_cli - -- 10.json + o/10.json: an input with known file type with "--set a=2" option to load, and dump to the modified JSON file without any output options -- 20.conf + o/20.json: an input with known file type with "--set b.c=ccc" option to load, and dump to the modified JSON file without any output options diff --git a/tests/res/cli/single_input/40/README.md b/tests/res/cli/single_input/40/README.md deleted file mode 100644 index f7306bf0..00000000 --- a/tests/res/cli/single_input/40/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Test cases for anyconfig\_cli - -- 10.json + o/10.json: a JSON input with "--args ..." option to load, and dump to the modified JSON file without any output options diff --git a/tests/res/cli/single_input/50/README.md b/tests/res/cli/single_input/50/README.md deleted file mode 100644 index ee3052b9..00000000 --- a/tests/res/cli/single_input/50/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Test cases for anyconfig\_cli - -- 10.json + o/10.json: a JSON input with "--gen-schema" option to load, and dump to a JSON schema file without any output options diff --git a/tests/res/cli/single_input/50/r/10.json b/tests/res/cli/single_input/50/r/10.json deleted file mode 100644 index afd319ce..00000000 --- a/tests/res/cli/single_input/50/r/10.json +++ /dev/null @@ -1 +0,0 @@ -{"type": "object", "properties": {"name": {"type": "string"}, "a": {"type": "integer"}, "b": {"type": "object", "properties": {"b": {"type": "array", "items": {"type": "integer"}}, "c": {"type": "string"}}}}} \ No newline at end of file diff --git a/tests/res/cli/single_input_to_yaml_output/10/README.md b/tests/res/cli/single_input_to_yaml_output/10/README.md deleted file mode 100644 index 80fe4e8b..00000000 --- a/tests/res/cli/single_input_to_yaml_output/10/README.md +++ /dev/null @@ -1,6 +0,0 @@ -# Test cases for anyconfig\_cli - -- 10.json + o/10.json: a JSON input without any options to load, and dump to a YAML file without any options -- 20.json + o/20.json: a JSON input without any options to load, and dump to a YAML file with '-O yaml' option -- 30.yml + o/30.json: a YAML input without any options to load, and dump to a JSON file without any options -- 40.yml + o/40.json: a YAML input with '-I yaml' option to load, and dump to a JSON file without any options diff --git a/tests/res/cli/single_input_to_yaml_output/10/o/10.json b/tests/res/cli/single_input_to_yaml_output/10/o/10.json deleted file mode 100644 index fe51488c..00000000 --- a/tests/res/cli/single_input_to_yaml_output/10/o/10.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tests/res/cli/template/10/10.json b/tests/res/cli/template/10/10.json deleted file mode 120000 index fba3b9e4..00000000 --- a/tests/res/cli/template/10/10.json +++ /dev/null @@ -1 +0,0 @@ -../../single_input/10/10.json \ No newline at end of file diff --git a/tests/res/common/00_primitives.json b/tests/res/common/00_primitives.json deleted file mode 100644 index 573541ac..00000000 --- a/tests/res/common/00_primitives.json +++ /dev/null @@ -1 +0,0 @@ -0 diff --git a/tests/res/common/10.json b/tests/res/common/10.json deleted file mode 100644 index 93c1826f..00000000 --- a/tests/res/common/10.json +++ /dev/null @@ -1 +0,0 @@ -{a: 1} diff --git a/tests/res/common/10_primitives.json b/tests/res/common/10_primitives.json deleted file mode 100644 index 563094e9..00000000 --- a/tests/res/common/10_primitives.json +++ /dev/null @@ -1,2 +0,0 @@ -0.1 -foo diff --git a/tests/res/dicts/get/20/q/80.py b/tests/res/dicts/get/20/q/80.py deleted file mode 100644 index aee900e9..00000000 --- a/tests/res/dicts/get/20/q/80.py +++ /dev/null @@ -1 +0,0 @@ -'/i\j' diff --git a/tests/res/dicts/merge/30/o/10.json b/tests/res/dicts/merge/30/o/10.json deleted file mode 100644 index 0967ef42..00000000 --- a/tests/res/dicts/merge/30/o/10.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/res/dump/basics/10/00.json b/tests/res/dump/basics/10/00.json deleted file mode 120000 index b8e5136c..00000000 --- a/tests/res/dump/basics/10/00.json +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/00.json \ No newline at end of file diff --git a/tests/res/dump/basics/10/10.json b/tests/res/dump/basics/10/10.json deleted file mode 120000 index b8c4d11c..00000000 --- a/tests/res/dump/basics/10/10.json +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/10.json \ No newline at end of file diff --git a/tests/res/dump/basics/10/20.json b/tests/res/dump/basics/10/20.json deleted file mode 120000 index 374a2983..00000000 --- a/tests/res/dump/basics/10/20.json +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/20.json \ No newline at end of file diff --git a/tests/res/dump/basics/20/00.json b/tests/res/dump/basics/20/00.json deleted file mode 120000 index b8e5136c..00000000 --- a/tests/res/dump/basics/20/00.json +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/00.json \ No newline at end of file diff --git a/tests/res/dump/basics/20/10.json b/tests/res/dump/basics/20/10.json deleted file mode 120000 index b8c4d11c..00000000 --- a/tests/res/dump/basics/20/10.json +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/10.json \ No newline at end of file diff --git a/tests/res/dump/basics/20/20.json b/tests/res/dump/basics/20/20.json deleted file mode 120000 index 374a2983..00000000 --- a/tests/res/dump/basics/20/20.json +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/20.json \ No newline at end of file diff --git a/tests/res/dump/basics/20/o/00.json b/tests/res/dump/basics/20/o/00.json deleted file mode 100644 index 3726331a..00000000 --- a/tests/res/dump/basics/20/o/00.json +++ /dev/null @@ -1 +0,0 @@ -{"indent": 2} diff --git a/tests/res/dumps/basics/10/00.json b/tests/res/dumps/basics/10/00.json deleted file mode 120000 index b8e5136c..00000000 --- a/tests/res/dumps/basics/10/00.json +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/00.json \ No newline at end of file diff --git a/tests/res/dumps/basics/10/10.json b/tests/res/dumps/basics/10/10.json deleted file mode 120000 index b8c4d11c..00000000 --- a/tests/res/dumps/basics/10/10.json +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/10.json \ No newline at end of file diff --git a/tests/res/dumps/basics/10/20.json b/tests/res/dumps/basics/10/20.json deleted file mode 120000 index 374a2983..00000000 --- a/tests/res/dumps/basics/10/20.json +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/20.json \ No newline at end of file diff --git a/tests/res/dumps/basics/10/e/00.txt b/tests/res/dumps/basics/10/e/00.txt deleted file mode 120000 index a5f954a8..00000000 --- a/tests/res/dumps/basics/10/e/00.txt +++ /dev/null @@ -1 +0,0 @@ -../00.json \ No newline at end of file diff --git a/tests/res/dumps/basics/10/e/10.txt b/tests/res/dumps/basics/10/e/10.txt deleted file mode 120000 index 0cdec646..00000000 --- a/tests/res/dumps/basics/10/e/10.txt +++ /dev/null @@ -1 +0,0 @@ -../10.json \ No newline at end of file diff --git a/tests/res/dumps/basics/10/e/20.txt b/tests/res/dumps/basics/10/e/20.txt deleted file mode 120000 index b1858408..00000000 --- a/tests/res/dumps/basics/10/e/20.txt +++ /dev/null @@ -1 +0,0 @@ -../20.json \ No newline at end of file diff --git a/tests/res/dumps/basics/20/00.json b/tests/res/dumps/basics/20/00.json deleted file mode 120000 index b8e5136c..00000000 --- a/tests/res/dumps/basics/20/00.json +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/00.json \ No newline at end of file diff --git a/tests/res/dumps/basics/20/10.json b/tests/res/dumps/basics/20/10.json deleted file mode 120000 index b8c4d11c..00000000 --- a/tests/res/dumps/basics/20/10.json +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/10.json \ No newline at end of file diff --git a/tests/res/dumps/basics/20/20.json b/tests/res/dumps/basics/20/20.json deleted file mode 120000 index 374a2983..00000000 --- a/tests/res/dumps/basics/20/20.json +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/20.json \ No newline at end of file diff --git a/tests/res/dumps/basics/20/e/00.txt b/tests/res/dumps/basics/20/e/00.txt deleted file mode 120000 index 26187011..00000000 --- a/tests/res/dumps/basics/20/e/00.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../dump/basics/20/e/00.txt \ No newline at end of file diff --git a/tests/res/dumps/basics/20/e/10.txt b/tests/res/dumps/basics/20/e/10.txt deleted file mode 120000 index d6475418..00000000 --- a/tests/res/dumps/basics/20/e/10.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../dump/basics/20/e/10.txt \ No newline at end of file diff --git a/tests/res/dumps/basics/20/e/20.txt b/tests/res/dumps/basics/20/e/20.txt deleted file mode 120000 index 3337beb2..00000000 --- a/tests/res/dumps/basics/20/e/20.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../dump/basics/20/e/20.txt \ No newline at end of file diff --git a/tests/res/json/primitives/e/20.txt b/tests/res/json/primitives/e/20.txt deleted file mode 100644 index 0ca95142..00000000 --- a/tests/res/json/primitives/e/20.txt +++ /dev/null @@ -1 +0,0 @@ -True diff --git a/tests/res/json/query/00_00.json b/tests/res/json/query/00_00.json deleted file mode 120000 index d7f8c47d..00000000 --- a/tests/res/json/query/00_00.json +++ /dev/null @@ -1 +0,0 @@ -../basic/00.json \ No newline at end of file diff --git a/tests/res/json/query/00_10.json b/tests/res/json/query/00_10.json deleted file mode 120000 index d7f8c47d..00000000 --- a/tests/res/json/query/00_10.json +++ /dev/null @@ -1 +0,0 @@ -../basic/00.json \ No newline at end of file diff --git a/tests/res/json/query/10_00.json b/tests/res/json/query/10_00.json deleted file mode 120000 index 4ff165d3..00000000 --- a/tests/res/json/query/10_00.json +++ /dev/null @@ -1 +0,0 @@ -../basic/10.json \ No newline at end of file diff --git a/tests/res/json/query/10_10.json b/tests/res/json/query/10_10.json deleted file mode 120000 index 4ff165d3..00000000 --- a/tests/res/json/query/10_10.json +++ /dev/null @@ -1 +0,0 @@ -../basic/10.json \ No newline at end of file diff --git a/tests/res/json/query/10_20.json b/tests/res/json/query/10_20.json deleted file mode 120000 index 4ff165d3..00000000 --- a/tests/res/json/query/10_20.json +++ /dev/null @@ -1 +0,0 @@ -../basic/10.json \ No newline at end of file diff --git a/tests/res/json/query/10_30.json b/tests/res/json/query/10_30.json deleted file mode 120000 index 4ff165d3..00000000 --- a/tests/res/json/query/10_30.json +++ /dev/null @@ -1 +0,0 @@ -../basic/10.json \ No newline at end of file diff --git a/tests/res/json/query/10_40.json b/tests/res/json/query/10_40.json deleted file mode 120000 index 4ff165d3..00000000 --- a/tests/res/json/query/10_40.json +++ /dev/null @@ -1 +0,0 @@ -../basic/10.json \ No newline at end of file diff --git a/tests/res/json/query/20_00.json b/tests/res/json/query/20_00.json deleted file mode 120000 index e7ad7c92..00000000 --- a/tests/res/json/query/20_00.json +++ /dev/null @@ -1 +0,0 @@ -../basic/20.json \ No newline at end of file diff --git a/tests/res/json/query/20_10.json b/tests/res/json/query/20_10.json deleted file mode 120000 index e7ad7c92..00000000 --- a/tests/res/json/query/20_10.json +++ /dev/null @@ -1 +0,0 @@ -../basic/20.json \ No newline at end of file diff --git a/tests/res/json/query/20_20.json b/tests/res/json/query/20_20.json deleted file mode 120000 index e7ad7c92..00000000 --- a/tests/res/json/query/20_20.json +++ /dev/null @@ -1 +0,0 @@ -../basic/20.json \ No newline at end of file diff --git a/tests/res/json/template/10.json b/tests/res/json/template/10.json deleted file mode 100644 index 727d14d4..00000000 --- a/tests/res/json/template/10.json +++ /dev/null @@ -1 +0,0 @@ -{"a": 1, "b": {"b": [{{ b.b | join(',') }}], "c": "{{ b.c }}"}} diff --git a/tests/res/json/template/20.json b/tests/res/json/template/20.json deleted file mode 100644 index e417e5eb..00000000 --- a/tests/res/json/template/20.json +++ /dev/null @@ -1 +0,0 @@ -{"a": 1, "b": {"b": [{{ b.b | join(',') }}], "c": "{{ b.c }}", "d": true}, "e": null, "name": "aaa"} diff --git a/tests/res/json/template/ctx/00.json b/tests/res/json/template/ctx/00.json deleted file mode 120000 index d76dca3d..00000000 --- a/tests/res/json/template/ctx/00.json +++ /dev/null @@ -1 +0,0 @@ -../../basic/00.json \ No newline at end of file diff --git a/tests/res/json/template/ctx/10.json b/tests/res/json/template/ctx/10.json deleted file mode 120000 index 6b6103f0..00000000 --- a/tests/res/json/template/ctx/10.json +++ /dev/null @@ -1 +0,0 @@ -../../basic/10.json \ No newline at end of file diff --git a/tests/res/json/template/ctx/20.json b/tests/res/json/template/ctx/20.json deleted file mode 120000 index 3cebc88d..00000000 --- a/tests/res/json/template/ctx/20.json +++ /dev/null @@ -1 +0,0 @@ -../../basic/20.json \ No newline at end of file diff --git a/tests/res/json/template/e/00.json b/tests/res/json/template/e/00.json deleted file mode 120000 index c7bc0ee3..00000000 --- a/tests/res/json/template/e/00.json +++ /dev/null @@ -1 +0,0 @@ -../ctx/00.json \ No newline at end of file diff --git a/tests/res/json/template/e/10.json b/tests/res/json/template/e/10.json deleted file mode 100644 index 7e44a95c..00000000 --- a/tests/res/json/template/e/10.json +++ /dev/null @@ -1 +0,0 @@ -{"a": 1, "b": {"b": [1, 2], "c": "C"}} diff --git a/tests/res/json/template/e/20.json b/tests/res/json/template/e/20.json deleted file mode 120000 index 5bbb0070..00000000 --- a/tests/res/json/template/e/20.json +++ /dev/null @@ -1 +0,0 @@ -../ctx/20.json \ No newline at end of file diff --git a/tests/res/loads/basics/10/00.txt b/tests/res/loads/basics/10/00.txt deleted file mode 120000 index b8e5136c..00000000 --- a/tests/res/loads/basics/10/00.txt +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/00.json \ No newline at end of file diff --git a/tests/res/loads/basics/10/10.txt b/tests/res/loads/basics/10/10.txt deleted file mode 120000 index b8c4d11c..00000000 --- a/tests/res/loads/basics/10/10.txt +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/10.json \ No newline at end of file diff --git a/tests/res/loads/basics/10/20.txt b/tests/res/loads/basics/10/20.txt deleted file mode 120000 index 374a2983..00000000 --- a/tests/res/loads/basics/10/20.txt +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/20.json \ No newline at end of file diff --git a/tests/res/loads/query/10/00_00.txt b/tests/res/loads/query/10/00_00.txt deleted file mode 120000 index 50a2bc6c..00000000 --- a/tests/res/loads/query/10/00_00.txt +++ /dev/null @@ -1 +0,0 @@ -../../../json/query/00_00.json \ No newline at end of file diff --git a/tests/res/loads/query/10/00_10.txt b/tests/res/loads/query/10/00_10.txt deleted file mode 120000 index 55252a92..00000000 --- a/tests/res/loads/query/10/00_10.txt +++ /dev/null @@ -1 +0,0 @@ -../../../json/query/00_10.json \ No newline at end of file diff --git a/tests/res/loads/query/10/10_00.txt b/tests/res/loads/query/10/10_00.txt deleted file mode 120000 index 4daf3acc..00000000 --- a/tests/res/loads/query/10/10_00.txt +++ /dev/null @@ -1 +0,0 @@ -../../../json/query/10_00.json \ No newline at end of file diff --git a/tests/res/loads/query/10/10_10.txt b/tests/res/loads/query/10/10_10.txt deleted file mode 120000 index 34f9433b..00000000 --- a/tests/res/loads/query/10/10_10.txt +++ /dev/null @@ -1 +0,0 @@ -../../../json/query/10_10.json \ No newline at end of file diff --git a/tests/res/loads/query/10/10_20.txt b/tests/res/loads/query/10/10_20.txt deleted file mode 120000 index f142b7cd..00000000 --- a/tests/res/loads/query/10/10_20.txt +++ /dev/null @@ -1 +0,0 @@ -../../../json/query/10_20.json \ No newline at end of file diff --git a/tests/res/loads/query/10/10_30.txt b/tests/res/loads/query/10/10_30.txt deleted file mode 120000 index bbbaf3de..00000000 --- a/tests/res/loads/query/10/10_30.txt +++ /dev/null @@ -1 +0,0 @@ -../../../json/query/10_30.json \ No newline at end of file diff --git a/tests/res/loads/query/10/10_40.txt b/tests/res/loads/query/10/10_40.txt deleted file mode 120000 index e9800dff..00000000 --- a/tests/res/loads/query/10/10_40.txt +++ /dev/null @@ -1 +0,0 @@ -../../../json/query/10_40.json \ No newline at end of file diff --git a/tests/res/loads/query/10/20_00.txt b/tests/res/loads/query/10/20_00.txt deleted file mode 120000 index 3bc60a8d..00000000 --- a/tests/res/loads/query/10/20_00.txt +++ /dev/null @@ -1 +0,0 @@ -../../../json/query/20_00.json \ No newline at end of file diff --git a/tests/res/loads/query/10/20_10.txt b/tests/res/loads/query/10/20_10.txt deleted file mode 120000 index d38cc55b..00000000 --- a/tests/res/loads/query/10/20_10.txt +++ /dev/null @@ -1 +0,0 @@ -../../../json/query/20_10.json \ No newline at end of file diff --git a/tests/res/loads/query/10/20_20.txt b/tests/res/loads/query/10/20_20.txt deleted file mode 120000 index 45716ff5..00000000 --- a/tests/res/loads/query/10/20_20.txt +++ /dev/null @@ -1 +0,0 @@ -../../../json/query/20_20.json \ No newline at end of file diff --git a/tests/res/loads/query/10/e b/tests/res/loads/query/10/e deleted file mode 120000 index d1af22eb..00000000 --- a/tests/res/loads/query/10/e +++ /dev/null @@ -1 +0,0 @@ -../../../json/query/e \ No newline at end of file diff --git a/tests/res/loads/query/10/q b/tests/res/loads/query/10/q deleted file mode 120000 index 97115cee..00000000 --- a/tests/res/loads/query/10/q +++ /dev/null @@ -1 +0,0 @@ -../../../json/query/q \ No newline at end of file diff --git a/tests/res/loads/schema/10/00.txt b/tests/res/loads/schema/10/00.txt deleted file mode 120000 index b8e5136c..00000000 --- a/tests/res/loads/schema/10/00.txt +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/00.json \ No newline at end of file diff --git a/tests/res/loads/schema/10/10.txt b/tests/res/loads/schema/10/10.txt deleted file mode 120000 index b8c4d11c..00000000 --- a/tests/res/loads/schema/10/10.txt +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/10.json \ No newline at end of file diff --git a/tests/res/loads/schema/10/20.txt b/tests/res/loads/schema/10/20.txt deleted file mode 120000 index 374a2983..00000000 --- a/tests/res/loads/schema/10/20.txt +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/20.json \ No newline at end of file diff --git a/tests/res/loads/schema/10/s/00.txt b/tests/res/loads/schema/10/s/00.txt deleted file mode 120000 index a5f74b0a..00000000 --- a/tests/res/loads/schema/10/s/00.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../base/basics/10/s/00.json \ No newline at end of file diff --git a/tests/res/loads/schema/10/s/10.txt b/tests/res/loads/schema/10/s/10.txt deleted file mode 120000 index 22496a9f..00000000 --- a/tests/res/loads/schema/10/s/10.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../base/basics/10/s/10.json \ No newline at end of file diff --git a/tests/res/loads/schema/10/s/20.txt b/tests/res/loads/schema/10/s/20.txt deleted file mode 120000 index 74c954b1..00000000 --- a/tests/res/loads/schema/10/s/20.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../base/basics/10/s/20.json \ No newline at end of file diff --git a/tests/res/loads/template/10/00.txt b/tests/res/loads/template/10/00.txt deleted file mode 120000 index ca42c775..00000000 --- a/tests/res/loads/template/10/00.txt +++ /dev/null @@ -1 +0,0 @@ -../../../json/template/00.json \ No newline at end of file diff --git a/tests/res/loads/template/10/10.txt b/tests/res/loads/template/10/10.txt deleted file mode 120000 index 9ea6be1d..00000000 --- a/tests/res/loads/template/10/10.txt +++ /dev/null @@ -1 +0,0 @@ -../../../json/template/10.json \ No newline at end of file diff --git a/tests/res/loads/template/10/20.txt b/tests/res/loads/template/10/20.txt deleted file mode 120000 index c14c6cc1..00000000 --- a/tests/res/loads/template/10/20.txt +++ /dev/null @@ -1 +0,0 @@ -../../../json/template/20.json \ No newline at end of file diff --git a/tests/res/loads/template/10/c b/tests/res/loads/template/10/c deleted file mode 120000 index cb440095..00000000 --- a/tests/res/loads/template/10/c +++ /dev/null @@ -1 +0,0 @@ -../../../base/basics/10/c \ No newline at end of file diff --git a/tests/res/loads/template/10/e b/tests/res/loads/template/10/e deleted file mode 120000 index cba86d3d..00000000 --- a/tests/res/loads/template/10/e +++ /dev/null @@ -1 +0,0 @@ -../../../json/template/e \ No newline at end of file diff --git a/tests/res/multi_load/basics/00/s/00.json b/tests/res/multi_load/basics/00/s/00.json deleted file mode 120000 index 74c954b1..00000000 --- a/tests/res/multi_load/basics/00/s/00.json +++ /dev/null @@ -1 +0,0 @@ -../../../../base/basics/10/s/20.json \ No newline at end of file diff --git a/tests/res/multi_load/query/00_00/e/exp.json b/tests/res/multi_load/query/00_00/e/exp.json deleted file mode 120000 index 9cbbedfa..00000000 --- a/tests/res/multi_load/query/00_00/e/exp.json +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/e/00_00.json \ No newline at end of file diff --git a/tests/res/multi_load/query/00_00/q/q.txt b/tests/res/multi_load/query/00_00/q/q.txt deleted file mode 120000 index b19e4dc2..00000000 --- a/tests/res/multi_load/query/00_00/q/q.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/q/00_00.txt \ No newline at end of file diff --git a/tests/res/multi_load/query/00_10/e/exp.json b/tests/res/multi_load/query/00_10/e/exp.json deleted file mode 120000 index ddd4fe1a..00000000 --- a/tests/res/multi_load/query/00_10/e/exp.json +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/e/00_10.json \ No newline at end of file diff --git a/tests/res/multi_load/query/00_10/q/q.txt b/tests/res/multi_load/query/00_10/q/q.txt deleted file mode 120000 index d6b83fb2..00000000 --- a/tests/res/multi_load/query/00_10/q/q.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/q/00_10.txt \ No newline at end of file diff --git a/tests/res/multi_load/query/10_10/e/exp.json b/tests/res/multi_load/query/10_10/e/exp.json deleted file mode 120000 index 7fe51ec1..00000000 --- a/tests/res/multi_load/query/10_10/e/exp.json +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/e/10_10.json \ No newline at end of file diff --git a/tests/res/multi_load/query/10_10/q/q.txt b/tests/res/multi_load/query/10_10/q/q.txt deleted file mode 120000 index f4e6f0c8..00000000 --- a/tests/res/multi_load/query/10_10/q/q.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/q/10_10.txt \ No newline at end of file diff --git a/tests/res/multi_load/query/10_20/q/q.txt b/tests/res/multi_load/query/10_20/q/q.txt deleted file mode 120000 index 90b4cabd..00000000 --- a/tests/res/multi_load/query/10_20/q/q.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/q/10_20.txt \ No newline at end of file diff --git a/tests/res/multi_load/query/10_30/e/exp.json b/tests/res/multi_load/query/10_30/e/exp.json deleted file mode 120000 index 343f4da9..00000000 --- a/tests/res/multi_load/query/10_30/e/exp.json +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/e/10_30.json \ No newline at end of file diff --git a/tests/res/multi_load/query/10_30/q/q.txt b/tests/res/multi_load/query/10_30/q/q.txt deleted file mode 120000 index f8fb052f..00000000 --- a/tests/res/multi_load/query/10_30/q/q.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/q/10_30.txt \ No newline at end of file diff --git a/tests/res/multi_load/query/10_40/e/exp.json b/tests/res/multi_load/query/10_40/e/exp.json deleted file mode 120000 index 3ba11484..00000000 --- a/tests/res/multi_load/query/10_40/e/exp.json +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/e/10_40.json \ No newline at end of file diff --git a/tests/res/multi_load/query/10_40/q/q.txt b/tests/res/multi_load/query/10_40/q/q.txt deleted file mode 120000 index ecce312a..00000000 --- a/tests/res/multi_load/query/10_40/q/q.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/q/10_40.txt \ No newline at end of file diff --git a/tests/res/multi_load/query/10_50/e/exp.json b/tests/res/multi_load/query/10_50/e/exp.json deleted file mode 120000 index 7cbabf97..00000000 --- a/tests/res/multi_load/query/10_50/e/exp.json +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/e/10_50.json \ No newline at end of file diff --git a/tests/res/multi_load/query/10_50/q/q.txt b/tests/res/multi_load/query/10_50/q/q.txt deleted file mode 120000 index 63dc4377..00000000 --- a/tests/res/multi_load/query/10_50/q/q.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/q/10_50.txt \ No newline at end of file diff --git a/tests/res/multi_load/query/20_00/e/exp.json b/tests/res/multi_load/query/20_00/e/exp.json deleted file mode 120000 index d83df4e7..00000000 --- a/tests/res/multi_load/query/20_00/e/exp.json +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/e/20_00.json \ No newline at end of file diff --git a/tests/res/multi_load/query/20_00/q/q.txt b/tests/res/multi_load/query/20_00/q/q.txt deleted file mode 120000 index 57a2facd..00000000 --- a/tests/res/multi_load/query/20_00/q/q.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/q/20_00.txt \ No newline at end of file diff --git a/tests/res/multi_load/query/20_10/e/exp.json b/tests/res/multi_load/query/20_10/e/exp.json deleted file mode 120000 index ae7149d0..00000000 --- a/tests/res/multi_load/query/20_10/e/exp.json +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/e/20_10.json \ No newline at end of file diff --git a/tests/res/multi_load/query/20_10/q/q.txt b/tests/res/multi_load/query/20_10/q/q.txt deleted file mode 120000 index b413e52e..00000000 --- a/tests/res/multi_load/query/20_10/q/q.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/q/20_10.txt \ No newline at end of file diff --git a/tests/res/multi_load/query/20_20/e/exp.json b/tests/res/multi_load/query/20_20/e/exp.json deleted file mode 120000 index 0abb6ad0..00000000 --- a/tests/res/multi_load/query/20_20/e/exp.json +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/e/20_20.json \ No newline at end of file diff --git a/tests/res/multi_load/query/20_20/q/q.txt b/tests/res/multi_load/query/20_20/q/q.txt deleted file mode 120000 index 30d01047..00000000 --- a/tests/res/multi_load/query/20_20/q/q.txt +++ /dev/null @@ -1 +0,0 @@ -../../../../json/query/q/20_20.txt \ No newline at end of file diff --git a/tests/res/multi_load/template/10/00.json b/tests/res/multi_load/template/10/00.json deleted file mode 100644 index c0088f0f..00000000 --- a/tests/res/multi_load/template/10/00.json +++ /dev/null @@ -1 +0,0 @@ -{"a": {{ a }}} diff --git a/tests/res/multi_load/template/10/e/exp.json b/tests/res/multi_load/template/10/e/exp.json deleted file mode 100644 index 8bf358af..00000000 --- a/tests/res/multi_load/template/10/e/exp.json +++ /dev/null @@ -1 +0,0 @@ -{"a": 1, "b": {"b": [1, 2], "c": "C", "d": true}, "e": null, "name": "aaa"} diff --git a/tests/res/open/basics/10 b/tests/res/open/basics/10 deleted file mode 120000 index 9aabe51d..00000000 --- a/tests/res/open/basics/10 +++ /dev/null @@ -1 +0,0 @@ -../../base/basics/10 \ No newline at end of file diff --git a/tests/res/parser/list/10/o/00.json b/tests/res/parser/list/10/o/00.json deleted file mode 100644 index 0967ef42..00000000 --- a/tests/res/parser/list/10/o/00.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/res/single_load/multi_types/20/10.sh b/tests/res/single_load/multi_types/20/10.sh deleted file mode 100644 index 7fb3d32e..00000000 --- a/tests/res/single_load/multi_types/20/10.sh +++ /dev/null @@ -1,5 +0,0 @@ -a=0 -b='bbb' # a comment -c="ccc" # an another comment -export d='ddd' ## double comment - export e="eee" ### tripple comment diff --git a/tests/res/single_load/multi_types/20/e/10.json b/tests/res/single_load/multi_types/20/e/10.json deleted file mode 100644 index bc2aea07..00000000 --- a/tests/res/single_load/multi_types/20/e/10.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "a": "0", - "b": "bbb", - "c": "ccc", - "d": "ddd", - "e": "eee" -} - diff --git a/tests/res/single_load/primitives/10/40.json b/tests/res/single_load/primitives/10/40.json deleted file mode 100644 index 44e2ace7..00000000 --- a/tests/res/single_load/primitives/10/40.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2] diff --git a/tests/res/single_load/query/10/e/10_00.json b/tests/res/single_load/query/10/e/10_00.json deleted file mode 120000 index 4060f79c..00000000 --- a/tests/res/single_load/query/10/e/10_00.json +++ /dev/null @@ -1 +0,0 @@ -00_00.json \ No newline at end of file diff --git a/tests/res/single_load/query/10/e/10_10.json b/tests/res/single_load/query/10/e/10_10.json deleted file mode 120000 index 5d400961..00000000 --- a/tests/res/single_load/query/10/e/10_10.json +++ /dev/null @@ -1 +0,0 @@ -00_10.json \ No newline at end of file diff --git a/tests/res/single_load/query/10/e/10_20.json b/tests/res/single_load/query/10/e/10_20.json deleted file mode 100644 index 5d5dd706..00000000 --- a/tests/res/single_load/query/10/e/10_20.json +++ /dev/null @@ -1 +0,0 @@ - {"b": [1, 2], "c": "C"} diff --git a/tests/res/single_load/query/10/e/10_30.json b/tests/res/single_load/query/10/e/10_30.json deleted file mode 100644 index 44e2ace7..00000000 --- a/tests/res/single_load/query/10/e/10_30.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2] diff --git a/tests/res/single_load/query/10/e/10_50.json b/tests/res/single_load/query/10/e/10_50.json deleted file mode 100644 index d00491fd..00000000 --- a/tests/res/single_load/query/10/e/10_50.json +++ /dev/null @@ -1 +0,0 @@ -1 diff --git a/tests/res/single_load/query/10/e/20_10.json b/tests/res/single_load/query/10/e/20_10.json deleted file mode 100644 index 27ba77dd..00000000 --- a/tests/res/single_load/query/10/e/20_10.json +++ /dev/null @@ -1 +0,0 @@ -true diff --git a/tests/res/single_load/query/10/q/00_10.txt b/tests/res/single_load/query/10/q/00_10.txt deleted file mode 100644 index 231f150c..00000000 --- a/tests/res/single_load/query/10/q/00_10.txt +++ /dev/null @@ -1 +0,0 @@ -"a" diff --git a/tests/res/single_load/query/10/q/10_00.txt b/tests/res/single_load/query/10/q/10_00.txt deleted file mode 120000 index de123e01..00000000 --- a/tests/res/single_load/query/10/q/10_00.txt +++ /dev/null @@ -1 +0,0 @@ -00_00.txt \ No newline at end of file diff --git a/tests/res/single_load/query/10/q/10_10.txt b/tests/res/single_load/query/10/q/10_10.txt deleted file mode 120000 index 5d848c8d..00000000 --- a/tests/res/single_load/query/10/q/10_10.txt +++ /dev/null @@ -1 +0,0 @@ -00_10.txt \ No newline at end of file diff --git a/tests/res/single_load/query/10/q/20_00.txt b/tests/res/single_load/query/10/q/20_00.txt deleted file mode 120000 index de123e01..00000000 --- a/tests/res/single_load/query/10/q/20_00.txt +++ /dev/null @@ -1 +0,0 @@ -00_00.txt \ No newline at end of file diff --git a/tests/res/single_load/schema/10/s/00.json b/tests/res/single_load/schema/10/s/00.json deleted file mode 100644 index 2f14ca5e..00000000 --- a/tests/res/single_load/schema/10/s/00.json +++ /dev/null @@ -1 +0,0 @@ -{"type": "object", "properties": {"a": {"type": "integer"}}} \ No newline at end of file diff --git a/tests/res/single_load/schema/10/s/10.json b/tests/res/single_load/schema/10/s/10.json deleted file mode 100644 index 39e56c62..00000000 --- a/tests/res/single_load/schema/10/s/10.json +++ /dev/null @@ -1 +0,0 @@ -{"type": "object", "properties": {"a": {"type": "integer"}, "b": {"type": "object", "properties": {"b": {"type": "array", "items": {"type": "integer"}}, "c": {"type": "string"}}}, "name": {"type": "string"}}} \ No newline at end of file diff --git a/tests/res/single_load/yaml/10/e/00.json b/tests/res/single_load/yaml/10/e/00.json deleted file mode 100644 index cb5b2f69..00000000 --- a/tests/res/single_load/yaml/10/e/00.json +++ /dev/null @@ -1 +0,0 @@ -{"a": 1} diff --git a/tests/res/single_load/yaml/10/e/10.json b/tests/res/single_load/yaml/10/e/10.json deleted file mode 100644 index 8c08088a..00000000 --- a/tests/res/single_load/yaml/10/e/10.json +++ /dev/null @@ -1 +0,0 @@ -{"a": 1, "b": {"b": [1, 2], "c": "C"}, "name": "aaa"} diff --git a/tests/schema/jsonschema/__init__.py b/tests/schema/jsonschema/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/schema/jsonschema/constants.py b/tests/schema/jsonschema/constants.py new file mode 100644 index 00000000..24da9dbc --- /dev/null +++ b/tests/schema/jsonschema/constants.py @@ -0,0 +1,38 @@ +# +# Copyright (C) 2015 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring, invalid-name, protected-access +# pylint: disable=bare-except +from __future__ import annotations + +import copy + + +OBJ_10: dict = {"a": 1} +SCM_10: dict = { + "properties": {"a": {"type": "integer"}}, + "type": "object" +} +STRICT_SCM_10 = copy.deepcopy(SCM_10) +STRICT_SCM_10["required"] = ["a"] + +OBJ_20: dict = {"a": 1, "b": [1, 2], "c": {"d": "aaa", "e": 0.1}} +SCM_20: dict = { + "properties": { + "a": {"type": "integer"}, + "b": {"items": {"type": "integer"}, "type": "array"}, + "c": { + "properties": {"d": {"type": "string"}, "e": {"type": "number"}}, + "type": "object" + } + }, + "type": "object" +} +STRICT_SCM_20 = copy.deepcopy(SCM_20) +STRICT_SCM_20["properties"]["b"]["minItems"] = 2 +STRICT_SCM_20["properties"]["b"]["uniqueItems"] = True +STRICT_SCM_20["properties"]["c"]["required"] = ["d", "e"] +STRICT_SCM_20["required"] = ["a", "b", "c"] + +NG_OBJ_10: dict = {"a": "aaa"} diff --git a/tests/schema/jsonschema/test_generator.py b/tests/schema/jsonschema/test_generator.py new file mode 100644 index 00000000..a7103b11 --- /dev/null +++ b/tests/schema/jsonschema/test_generator.py @@ -0,0 +1,70 @@ +# +# Copyright (C) 2015 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring, invalid-name, protected-access +# pylint: disable=bare-except +from __future__ import annotations + +import pytest + +import anyconfig.schema.jsonschema.generator as TT + +from .constants import ( + OBJ_10, OBJ_20, + SCM_10, SCM_20, + STRICT_SCM_10, STRICT_SCM_20, +) + + +@pytest.mark.parametrize( + ("arr", "ops", "exp"), + (([], {}, {"items": {"type": "string"}, "type": "array"}), + ([1], {}, {"items": {"type": "integer"}, "type": "array"}), + ), +) +def test_array_to_schema(arr, ops, exp): + assert TT.array_to_schema(arr, **ops) == exp + + +@pytest.mark.parametrize( + ("obj", "ops", "exp"), + (({"a": 1}, {}, + {"type": "object", "properties": {"a": {"type": "integer"}}}), + ), +) +def test_object_to_schema(obj, ops, exp): + assert TT.object_to_schema(obj, **ops) == exp + + +@pytest.mark.parametrize( + ("obj", "exp_scm"), + ((None, {"type": "null"}), + (0, {"type": "integer"}), + ("aaa", {"type": "string"}), + ([1], {"items": {"type": "integer"}, "type": "array"}), + (OBJ_10, SCM_10), + (OBJ_20, SCM_20), + ), +) +def test_gen_schema_validate(obj, exp_scm): + assert TT.gen_schema(obj) == exp_scm + + +@pytest.mark.parametrize( + ("obj", "exp_scm"), + ((None, {"type": "null"}), + (0, {"type": "integer"}), + ("aaa", {"type": "string"}), + ([1], + {"items": {"type": "integer"}, "type": "array", + "minItems": 1, "uniqueItems": True}), + (["aaa", "bbb", "aaa"], + {"items": {"type": "string"}, "type": "array", + "minItems": 3, "uniqueItems": False}), + (OBJ_10, STRICT_SCM_10), + (OBJ_20, STRICT_SCM_20), + ), +) +def test_gen_strict_schema_validate(obj, exp_scm): + assert TT.gen_schema(obj, ac_schema_strict=True) == exp_scm diff --git a/tests/schema/jsonschema/test_validator.py b/tests/schema/jsonschema/test_validator.py new file mode 100644 index 00000000..78f7cd44 --- /dev/null +++ b/tests/schema/jsonschema/test_validator.py @@ -0,0 +1,80 @@ +# +# Copyright (C) 2015 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT +# +# pylint: disable=missing-docstring, invalid-name, protected-access +# pylint: disable=bare-except +from __future__ import annotations + +import pytest + +from .constants import ( + OBJ_10, OBJ_20, + SCM_10, SCM_20, + STRICT_SCM_10, STRICT_SCM_20, + NG_OBJ_10 +) +try: + import anyconfig.schema.jsonschema.validator as TT +except ImportError: + pytest.skip( + "Required jsonschema lib is not available.", + allow_module_level=True + ) + + +@pytest.mark.parametrize( + ("obj", "scm"), + ((OBJ_10, SCM_10), + (OBJ_20, SCM_20), + (OBJ_10, STRICT_SCM_10), + (OBJ_20, STRICT_SCM_20), + ), +) +def test_validate(obj, scm): + (ret, msg) = TT.validate(obj, scm) + assert not msg + assert ret + + +@pytest.mark.parametrize( + ("obj", "scm"), + ((NG_OBJ_10, SCM_10), + (NG_OBJ_10, SCM_20), + ), +) +def test_validate__an_error(obj, scm): + (ret, msg) = TT.validate(obj, scm, ac_schema_safe=True) + assert msg + assert not ret + + with pytest.raises(Exception): # noqa: B017 + TT.validate(obj, scm, ac_schema_safe=False) + + +def test_validate__errors(): + obj: dict = {"a": 1, "b": 2.0} + scm: dict = { + "type": "object", + "properties": {"a": {"type": "integer"}, "b": {"type": "string"}} + } + + (ret, msg) = TT.validate(obj, scm, ac_schema_errors=True) + assert msg # ["'a' is not of type ...", "'b' is not ..."] + assert not ret + + +@pytest.mark.filterwarnings("ignore") +@pytest.mark.parametrize( + ("obj", "scm", "success"), + ((OBJ_10, SCM_10, True), + (NG_OBJ_10, SCM_10, False), + (NG_OBJ_10, SCM_20, False), + ), +) +def test_is_valid(obj, scm, success): + assert TT.is_valid(obj, scm) == success + + if not success: + with pytest.raises(TT.ValidationError): + TT.is_valid(obj, scm, ac_schema_safe=False) diff --git a/tests/schema/test_jsonschema.py b/tests/schema/test_jsonschema.py deleted file mode 100644 index 1eaadade..00000000 --- a/tests/schema/test_jsonschema.py +++ /dev/null @@ -1,179 +0,0 @@ -# -# Copyright (C) 2015 - 2019 Satoru SATOH -# License: MIT -# -# pylint: disable=missing-docstring, invalid-name, protected-access -# pylint: disable=bare-except -import unittest - -try: - import anyconfig.schema.jsonschema as TT - SUPPORTED: bool = True -except ImportError: - SUPPORTED: bool = False # type: ignore - - -class Test_00_Base(unittest.TestCase): - - obj = {'a': 1} - schema = {"type": "object", - "properties": {"a": {"type": "integer"}}} - - obj2 = dict(a=1, b=[1, 2], c=dict(d="aaa", e=0.1)) - ref_scm = {'properties': {'a': {'type': 'integer'}, - 'b': {'items': {'type': 'integer'}, - 'type': 'array'}, - 'c': {'properties': {'d': {'type': 'string'}, - 'e': {'type': - 'number'}}, - 'type': 'object'}}, - 'type': 'object'} - - opts = dict(ac_schema_typemap=SUPPORTED) - - -class Test_00_Functions(Test_00_Base): - - def test_20_array_to_schema(self): - scm = TT.array_to_schema([1]) - ref = dict(items=dict(type="integer"), type="array") - self.assertEqual(scm, ref, scm) - - def test_22_array_to_schema__empty_array(self): - scm = TT.array_to_schema([]) - ref = dict(items=dict(type="string"), type="array") - self.assertEqual(scm, ref, scm) - - def test_30_object_to_schema_nodes_iter(self): - scm = TT.object_to_schema({'a': 1}) - ref = dict(type="object", properties=dict(a=dict(type="integer"))) - self.assertEqual(scm, ref, scm) - - -@unittest.skipIf(not SUPPORTED, "json schema lib is not available") -class Test_10_Validation(Test_00_Base): - obj_ng = dict(a='aaa') - - def test_10_validate(self): - (ret, msg) = TT.validate(self.obj, self.schema) - self.assertFalse(msg) - self.assertTrue(ret) - - def test_12_validate__ng(self): - (ret, msg) = TT.validate(self.obj_ng, self.schema) - self.assertTrue(msg) - self.assertFalse(ret) - - def test_14_validate__ng_no_safe(self): - self.assertRaises(Exception, TT.validate, self.obj_ng, - self.schema, ac_schema_safe=False) - - def test_20_is_valid_ok(self): - self.assertTrue(TT.is_valid(self.obj, self.schema)) - - def test_22_is_valid_ng(self): - self.assertFalse( - TT.is_valid(self.obj_ng, self.schema, ac_schema_safe=True) - ) - - def test_24_is_valid_or_fail_ng_1(self): - with self.assertRaises(TT.ValidationError): - TT.is_valid(self.obj_ng, self.schema, ac_schema_safe=False) - - -@unittest.skipIf(not SUPPORTED, "json schema lib is not available") -class Test_12_Validation_Errors(Test_00_Base): - - obj = dict(a=1, b=2.0) - scm = {"type": "object", "properties": {"a": {"type": "integer"}, - "b": {"type": "string"}}} - - def test_10_validate__ng(self): - (ret, msg) = TT.validate(self.obj, self.scm, ac_schema_errors=True) - self.assertTrue(msg) # ["'a' is not of type ...", "'b' is not ..."] - self.assertFalse(ret) - - -class Test_20_GenSchema(Test_00_Base): - - def test_40_gen_schema__primitive_types(self): - self.assertEqual(TT.gen_schema(None), {'type': 'null'}) - self.assertEqual(TT.gen_schema(0), {'type': 'integer'}) - self.assertEqual(TT.gen_schema("aaa"), {'type': 'string'}) - - scm = TT.gen_schema([1]) - ref_scm = {'items': {'type': 'integer'}, 'type': 'array'} - self.assertEqual(scm, ref_scm) - - scm = TT.gen_schema({'a': 1}) - ref_scm = {'properties': {'a': {'type': 'integer'}}, 'type': 'object'} - self.assertEqual(scm, ref_scm) - - def test_42_gen_schema_and_validate(self): - scm = TT.gen_schema(self.obj) - self.assertTrue(TT.validate(self.obj, scm)) - - def test_44_gen_schema__complex_types(self): - scm = TT.gen_schema(self.obj2) - self.assertEqual(scm, self.ref_scm) - - def test_46_gen_schema_and_validate__complex_types(self): - scm = TT.gen_schema(self.obj2) - self.assertTrue(TT.validate(self.obj2, scm)) - - -def _gen_scm(val): - return TT.gen_schema(val, ac_schema_strict=True) - - -class Test_30_GenStrictSchema(Test_00_Base): - - schema = {"type": "object", - "properties": {"a": {"type": "integer"}}, - "required": ["a"]} - - ref_scm = {'properties': {'a': {'type': 'integer'}, - 'b': {'items': {'type': 'integer'}, - 'type': 'array', - 'minItems': 2, 'uniqueItems': True}, - 'c': {'properties': {'d': {'type': 'string'}, - 'e': {'type': - 'number'}}, - 'type': 'object', - 'required': ['d', 'e']}}, - 'type': 'object', - 'required': ['a', 'b', 'c']} - - def test_40_gen_schema__primitive_types(self): - self.assertEqual(_gen_scm(None), {'type': 'null'}) - self.assertEqual(_gen_scm(0), {'type': 'integer'}) - self.assertEqual(_gen_scm("aaa"), {'type': 'string'}) - - scm = _gen_scm([1]) - ref_scm = {'items': {'type': 'integer'}, 'type': 'array', - 'minItems': 1, 'uniqueItems': True} - self.assertEqual(scm, ref_scm) - - scm = _gen_scm(["aaa", "bbb", "aaa"]) - ref_scm = {'items': {'type': 'string'}, 'type': 'array', - 'minItems': 3, 'uniqueItems': False} - self.assertEqual(scm, ref_scm) - - scm = _gen_scm({'a': 1}) - ref_scm = {'properties': {'a': {'type': 'integer'}}, - 'type': 'object', 'required': ['a']} - self.assertEqual(scm, ref_scm) - - def test_42_gen_schema_and_validate(self): - scm = _gen_scm(self.obj) - self.assertTrue(TT.validate(self.obj, scm)) - - def test_44_gen_schema__complex_types(self): - scm = _gen_scm(self.obj2) - self.assertEqual(scm, self.ref_scm) - - def test_46_gen_schema_and_validate__complex_types(self): - scm = _gen_scm(self.obj2) - self.assertTrue(TT.validate(self.obj2, scm)) - -# vim:sw=4:ts=4:et: diff --git a/tests/template/test_jinja2.py b/tests/template/test_jinja2.py index 8cbadb4b..6127eb19 100644 --- a/tests/template/test_jinja2.py +++ b/tests/template/test_jinja2.py @@ -1,33 +1,46 @@ # -# Copyright (C) 2015 - 2021 Satoru SATOH -# License: MIT +# Copyright (C) 2015 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring, unused-variable, invalid-name +# pylint: disable=missing-docstring +from __future__ import annotations + import os import pathlib -import tempfile -import unittest import unittest.mock +import pytest + try: import anyconfig.template.jinja2 as TT except ImportError: - raise unittest.SkipTest('jinja2 does not look available.') + pytest.skip( + "jinja2 does not look available.", + allow_module_level=True + ) -from .. import base +from .. import common -TDATA_DIR = base.RES_DIR / 'template/jinja2/' +TDATA_DIR = common.RESOURCE_DIR / "templates" / "jinja2" TEMPLATES = [ - (path, (TDATA_DIR / '10/r/10.txt').read_text()) - for path in (TDATA_DIR / '10').glob('*.j2') + (p, r.read_text()) for p, r in ( + (path, (TDATA_DIR / "10" / "r" / f"{path.stem}.txt")) + for path in (TDATA_DIR / "10").glob("*.j2") + ) if r.exists() ] + TEMPLATES_WITH_FILTERS = [ - (path, (TDATA_DIR / f'20/r/{path.stem}.txt').read_text()) - for path in (TDATA_DIR / '20').glob('*.j2') + (p, r.read_text()) for p, r in ( + (path, (TDATA_DIR / "20" / "r" / f"{path.stem}.txt")) + for path in (TDATA_DIR / "20").glob("*.j2") + ) if r.exists() ] +assert TEMPLATES +assert TEMPLATES_WITH_FILTERS + def normalize(txt: str): """Strip white spaces and line break at the end of the content ``txt``. @@ -39,89 +52,79 @@ def negate(value): return - value -class FunctionsTestCase(unittest.TestCase): - - def test_make_template_paths(self): - tpath0 = pathlib.Path('/path/to/a/').resolve() - path0 = tpath0 / 'tmpl.j2' - tmp0 = pathlib.Path('/tmp').resolve() - ies = (((path0, ), [tpath0]), - ((path0, [tmp0]), [tpath0, tmp0]), - ) - for inp, exp in ies: - self.assertEqual( - TT.make_template_paths(*inp), exp - ) - - def test_make_template_paths_after_chdir(self): - tmp0 = pathlib.Path('/tmp').resolve() - saved = pathlib.Path().cwd().resolve() - try: - os.chdir(str(tmp0)) - tpath1 = pathlib.Path('.') - path1 = tpath1 / 'tmpl.j2' - ies = (((path1, ), [tmp0]), - ((path1, [tmp0]), [tmp0]), - ) - - for inp, exp in ies: - self.assertEqual( - TT.make_template_paths(*inp), exp - ) - except FileNotFoundError: - pass # ``tmp0`` does not exist on windows. - finally: - os.chdir(str(saved)) - - -class TestCase(unittest.TestCase): - - def assertAlmostEqual(self, inp, exp, **_kwargs): - """Override to allow to compare texts. - """ - self.assertEqual(normalize(inp), normalize(exp)) - - def test_render_impl_without_paths(self): - for inp, exp in TEMPLATES: - self.assertAlmostEqual(TT.render_impl(inp), exp) - - def test_render_impl_with_paths(self): - for inp, exp in TEMPLATES: - self.assertAlmostEqual( - TT.render_impl(inp, paths=[inp.parent]), exp - ) - - def test_render_without_paths(self): - for inp, exp in TEMPLATES: - self.assertAlmostEqual(TT.render(inp), exp) - - def test_render_with_wrong_path(self): - with tempfile.TemporaryDirectory() as tdir: - workdir = pathlib.Path(tdir) - - ng_t = workdir / 'ng.j2' - ok_t = workdir / 'ok.j2' - ok_t_content = 'a: {{ a }}' - ok_content = 'a: aaa' - ctx = dict(a='aaa', ) - - ok_t.write_text(ok_t_content) - - with unittest.mock.patch('builtins.input') as mock_input: - mock_input.return_value = str(ok_t) - c_r = TT.render(str(ng_t), ctx, ask=True) - self.assertEqual(c_r, ok_content) - - with self.assertRaises(TT.jinja2.TemplateNotFound): - TT.render(str(ng_t), ctx, ask=False) - - def test_try_render_with_empty_filepath_and_content(self): - self.assertRaises(ValueError, TT.try_render) - - def test_render_with_filter(self): - for inp, exp in TEMPLATES_WITH_FILTERS: - self.assertAlmostEqual( - TT.render(inp, filters={'negate': negate}), exp - ) - -# vim:sw=4:ts=4:et: +TMPL_DIR_10 = pathlib.Path("/path/to/a/").resolve() +TMPL_PATH_10 = TMPL_DIR_10 / "tmpl.j2" +TMP_DIR = pathlib.Path("/tmp").resolve() + + +@pytest.mark.parametrize( + ("args", "exp"), + (((TMPL_PATH_10, ), [TMPL_DIR_10]), + ((TMPL_PATH_10, [TMP_DIR]), [TMPL_DIR_10, TMP_DIR]), + ), +) +def test_make_template_paths(args, exp): + assert TT.make_template_paths(*args) == exp + + +def test_make_template_paths_after_chdir(tmp_path): + old_pwd = pathlib.Path().cwd().resolve() + path_1 = tmp_path / "t.j2" + + try: + os.chdir(str(tmp_path)) + + assert TT.make_template_paths(path_1) == [tmp_path] + assert TT.make_template_paths(path_1, [tmp_path]) == [tmp_path] + except FileNotFoundError: + pass # ``tmp0`` does not exist on windows. + finally: + os.chdir(str(old_pwd)) + + +def __assert_almost_eq(lhs, rhs): + assert normalize(lhs) == normalize(rhs) + + +@pytest.mark.parametrize(("tmpl", "exp"), TEMPLATES) +def test_render_impl_without_paths(tmpl, exp): + __assert_almost_eq(TT.render_impl(tmpl), exp) + + +@pytest.mark.parametrize(("tmpl", "exp"), TEMPLATES) +def test_render_impl_with_paths(tmpl, exp): + __assert_almost_eq(TT.render_impl(tmpl, paths=[tmpl.parent]), exp) + + +@pytest.mark.parametrize(("tmpl", "exp"), TEMPLATES) +def test_render_without_paths(tmpl, exp): + __assert_almost_eq(TT.render(tmpl), exp) + + +def test_try_render_with_empty_filepath_and_content(): + with pytest.raises(ValueError): + TT.try_render() + + +@pytest.mark.parametrize(("tmpl", "exp"), TEMPLATES_WITH_FILTERS) +def test_render_with_filter(tmpl, exp): + __assert_almost_eq(TT.render(tmpl, filters={"negate": negate}), exp) + + +def test_render_with_wrong_path(tmp_path): + workdir = tmp_path + + ng_t = workdir / "ng.j2" + ok_t = workdir / "ok.j2" + ok_t_content = "a: {{ a }}" + ok_content = "a: aaa" + ctx = {"a": "aaa"} + + ok_t.write_text(ok_t_content) + + with unittest.mock.patch("builtins.input") as mock_input: + mock_input.return_value = str(ok_t) + assert TT.render(str(ng_t), ctx, ask=True) == ok_content + + with pytest.raises(TT.jinja2.TemplateNotFound): + TT.render(str(ng_t), ctx, ask=False) diff --git a/tests/test_lib.py b/tests/test_lib.py index 2f8a2590..2d6c8eab 100644 --- a/tests/test_lib.py +++ b/tests/test_lib.py @@ -1,41 +1,34 @@ # -# Copyright (C) 2015 - 2021 Satoru SATOH +# Copyright (C) 2015 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring import pathlib import subprocess -import tempfile -import unittest -SCRIPT_TO_USE_ANYCONFIG = """\ -#! /usr/bin/env python +SCRIPT_TO_USE_ANYCONFIG = """#! /usr/bin/env python import anyconfig c = anyconfig.load("/") or {} anyconfig.dump(c, "/dev/null", "yaml") """ -NULL_DEV = '/dev/null' +NULL_DEV = "/dev/null" if not pathlib.Path(NULL_DEV).exists(): - NULL_DEV = 'NUL' + NULL_DEV = "NUL" def check_output(cmd): - devnull = open(NULL_DEV, 'w') - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=devnull) - return proc.communicate()[0] + with open(NULL_DEV, mode="w", encoding="utf-8") as devnull: + with subprocess.Popen(cmd, stdout=subprocess.PIPE, + stderr=devnull) as proc: + return proc.communicate()[0] -class TestCase(unittest.TestCase): +def test_run_script(tmp_path: pathlib.Path): + script = tmp_path / "a.py" + script.write_text(SCRIPT_TO_USE_ANYCONFIG) + out = check_output(["python", str(script)]) - def test_00_run_script(self): - with tempfile.TemporaryDirectory(prefix='anyconfig-tests-') as tmpdir: - script = pathlib.Path(tmpdir) / "a.py" - script.write_text(SCRIPT_TO_USE_ANYCONFIG) - - out = check_output(["python", str(script)]) - self.assertTrue(out in (b'', '')) - -# vim:sw=4:ts=4:et: + assert out in (b"", "") diff --git a/tests/test_singleton.py b/tests/test_singleton.py index b8b868a1..2088a5be 100644 --- a/tests/test_singleton.py +++ b/tests/test_singleton.py @@ -1,45 +1,44 @@ # -# Copyright (C) 2018 Satoru SATOH -# License: MIT +# Copyright (C) 2018 - 2024 Satoru SATOH +# SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring, invalid-name, too-few-public-methods -import unittest +from __future__ import annotations + import anyconfig.singleton as TT -class TestSingleton(unittest.TestCase): +def test_basic_singletons() -> None: + class A(TT.Singleton): + pass - def test_10_basic(self): - class A(TT.Singleton): - pass + class B(TT.Singleton): + pass - class B(TT.Singleton): - pass + (a1, a2) = (A(), A()) + (b1, b2) = (B(), B()) + assert a1 is a2 + assert b1 is b2 + assert a1 is not b1 - (a1, a2) = (A(), A()) - (b1, b2) = (B(), B()) - self.assertTrue(a1 is a2) - self.assertTrue(b1 is b2) - self.assertTrue(a1 is not b1) - def test_20_descendant(self): - class A(TT.Singleton): - pass +def test_descendants() -> None: + class A(TT.Singleton): + pass - class A2(A): - pass + class A2(A): + pass - (a1, a2) = (A(), A2()) - self.assertTrue(a1 is a2) + (a1, a2) = (A(), A2()) + assert a1 is a2 - def test_30_mixin(self): - class Base: - pass - class A(Base, TT.Singleton): - pass +def test_mixins() -> None: + class Base: + pass - (a1, a2) = (A(), A()) - self.assertTrue(a1 is a2) + class A(Base, TT.Singleton): + pass -# vim:sw=4:ts=4:et: + (a1, a2) = (A(), A()) + assert a1 is a2 diff --git a/tests/utils/test_detectors.py b/tests/utils/test_detectors.py index 3ea449f5..3b864117 100644 --- a/tests/utils/test_detectors.py +++ b/tests/utils/test_detectors.py @@ -1,9 +1,11 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring,invalid-name """test cases for anyconfig.utils.""" +from __future__ import annotations + import collections import pytest @@ -12,12 +14,30 @@ @pytest.mark.parametrize( - 'inp,exp', + ("inp", "exp"), + ((None, False), + ([], False), + ({}, False), + (object(), False), + ((1, ), False), + (True, True), + (2, True), + (3.14, True), + ("a string", True), + (b"a string", True), + ), +) +def test_is_primitive_type(inp, exp): + assert TT.is_primitive_type(inp) == exp + + +@pytest.mark.parametrize( + ("inp", "exp"), ((None, False), ([], True), ((), True), ((str(x) for x in range(10)), True), ([str(x) for x in range(10)], True), - ('abc', False), (0, False), ({}, False), + ("abc", False), (0, False), ({}, False), ), ) def test_is_iterable(inp, exp): @@ -25,10 +45,10 @@ def test_is_iterable(inp, exp): @pytest.mark.parametrize( - 'inp,exp', + ("inp", "exp"), ((None, False), (0, False), - ('aaa', False), + ("aaa", False), ({}, False), ([], True), ((), True), ((str(x) for x in range(10)), True), @@ -40,18 +60,16 @@ def test_is_list_like(inp, exp): @pytest.mark.parametrize( - 'inp,exp', + ("inp", "exp"), ((None, False), (0, False), - ('aaa', False), + ("aaa", False), ([], False), ((1, ), False), - (collections.namedtuple('Point', ('x', 'y'))(1, 2), False), + (collections.namedtuple("Point", ("x", "y"))(1, 2), False), ({}, True), - (collections.OrderedDict((('a', 1), ('b', 2))), True), + (collections.OrderedDict((("a", 1), ("b", 2))), True), ), ) def test_is_dict_like(inp, exp): assert TT.is_dict_like(inp) == exp - -# vim:sw=4:ts=4:et: diff --git a/tests/utils/test_files.py b/tests/utils/test_files.py index 8307f6de..747cbb4d 100644 --- a/tests/utils/test_files.py +++ b/tests/utils/test_files.py @@ -1,39 +1,40 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -r"""Test cases for anyconfig.utils.files. -""" +r"""Test cases for anyconfig.utils.files.""" +from __future__ import annotations + import pathlib -import unittest +import typing -import anyconfig.utils.files as TT +import pytest +import anyconfig.utils.files as TT -class TestCase(unittest.TestCase): - def test_is_io_stream(self): - ies = ( - (open(__file__), True), - (__file__, False), - ([__file__], False), - (pathlib.Path(__file__), False), - ([pathlib.Path(__file__)], False), - ) - for inp, exp in ies: - res = TT.is_io_stream(inp) - (self.assertTrue if exp else self.assertFalse)(res) +@pytest.mark.parametrize( + ("obj", "exp"), + ((open(__file__, encoding="utf-8"), True), + (__file__, False), + ([__file__], False), + (pathlib.Path(__file__), False), + ([pathlib.Path(__file__)], False), + ), +) +def test_is_io_stream(obj: typing.Any, exp: bool) -> None: + res = TT.is_io_stream(obj) + assert res if exp else not res - def test_get_path_from_stream(self): - this = __file__ - with pathlib.Path(this).open() as strm: - self.assertEqual(TT.get_path_from_stream(strm), this) +def test_get_path_from_stream() -> None: + this = __file__ - with self.assertRaises(ValueError): - TT.get_path_from_stream(this) + with pathlib.Path(this).open(encoding="utf-8") as strm: + assert TT.get_path_from_stream(strm) == this - self.assertEqual(TT.get_path_from_stream(this, safe=True), '') + with pytest.raises(ValueError): + TT.get_path_from_stream(this) -# vim:sw=4:ts=4:et: + assert TT.get_path_from_stream(this, safe=True) == "" diff --git a/tests/utils/test_lists.py b/tests/utils/test_lists.py index ad987855..3ba5641d 100644 --- a/tests/utils/test_lists.py +++ b/tests/utils/test_lists.py @@ -1,36 +1,35 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # -# pylint: disable=missing-docstring,invalid-name -r"""test cases for anyconfig.utils.lists. -""" +# pylint: disable=missing-docstring +r"""test cases for anyconfig.utils.lists.""" +from __future__ import annotations + import operator -import unittest -import anyconfig.utils.lists as TT +import pytest +import anyconfig.utils.lists as TT -class TestCase(unittest.TestCase): - def test_groupby(self): - items = (('a', 1), ('b', -1), ('c', 1)) - res = TT.groupby(items, operator.itemgetter(1)) - self.assertEqual( - [(key, tuple(grp)) for key, grp in res], - [(-1, (('b', -1),)), (1, (('a', 1), ('c', 1)))] - ) +def test_groupby() -> None: + items = (("a", 1), ("b", -1), ("c", 1)) + res = TT.groupby(items, operator.itemgetter(1)) + assert [ + (key, tuple(grp)) for key, grp in res + ] == [(-1, (("b", -1),)), (1, (("a", 1), ("c", 1)))] - def test_concat(self): - ies = ( - ([[]], []), - ((()), []), - ([[1, 2, 3], [4, 5]], [1, 2, 3, 4, 5]), - ([[1, 2, 3], [4, 5, [6, 7]]], [1, 2, 3, 4, 5, [6, 7]]), - (((1, 2, 3), (4, 5, (6, 7))), [1, 2, 3, 4, 5, (6, 7)]), - (((i, i * 2) for i in range(3)), [0, 0, 1, 2, 2, 4]), - ) - for inp, exp in ies: - self.assertEqual(TT.concat(inp), exp) -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize( + ("xss", "exp"), + (([[]], []), + ((()), []), + ([[1, 2, 3], [4, 5]], [1, 2, 3, 4, 5]), + ([[1, 2, 3], [4, 5, [6, 7]]], [1, 2, 3, 4, 5, [6, 7]]), + (((1, 2, 3), (4, 5, (6, 7))), [1, 2, 3, 4, 5, (6, 7)]), + (((i, i * 2) for i in range(3)), [0, 0, 1, 2, 2, 4]) + ), +) +def test_concat(xss, exp): + assert TT.concat(xss) == exp diff --git a/tests/utils/test_utils.py b/tests/utils/test_utils.py index 34742de6..a4314403 100644 --- a/tests/utils/test_utils.py +++ b/tests/utils/test_utils.py @@ -1,23 +1,21 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2024 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -r"""test cases for anyconfig.utils.lists. -""" -import unittest - -import anyconfig.utils.utils as TT +r"""test cases for anyconfig.utils.lists.""" +from __future__ import annotations +import pytest -class TestCase(unittest.TestCase): +import anyconfig.utils.utils as TT - def test_filter_options(self): - data = ( - (('aaa', ), dict(aaa=1, bbb=2), dict(aaa=1)), - (('aaa', ), dict(bbb=2), dict()), - ) - for keys, inp, exp in data: - self.assertEqual(TT.filter_options(keys, inp), exp) -# vim:sw=4:ts=4:et: +@pytest.mark.parametrize( + ("keys", "opts", "exp"), + ((('aaa', ), {"aaa": 1, "bbb": 2}, {"aaa": 1}), + (('aaa', ), {"bbb": 2}, {}), + ) +) +def test_filter_options(keys, opts, exp) -> None: + assert TT.filter_options(keys, opts) == exp diff --git a/tox.ini b/tox.ini index 1a1fd454..ffd9cf69 100644 --- a/tox.ini +++ b/tox.ini @@ -1,14 +1,26 @@ [tox] -envlist = py38, py39, py310, py311, py312, plugins, doc, min, dists +envlist = + py39 + py310 + py311 + py312 + py313 + lint + type-check + min + full + plugins + doc + dists skip_missing_interpreters = true [gh-actions] python = - 3.8: py38 3.9: py39 3.10: py310 - 3.11: py311, type-check, lint, plugins, min + 3.11: py311 3.12: py312 + 3.13: py313, type-check, lint, plugins, min [flake8] exclude = .git,.tox,dist,*egg,setup.py @@ -17,33 +29,47 @@ exclude = .git,.tox,dist,*egg,setup.py deps = -r{toxinidir}/requirements.txt -r{toxinidir}/tests/requirements.txt - lint: ruff + -r {toxinidir}/tests/requirements.d/base.txt commands = pytest setenv = PYTHONPATH = {toxinidir}/src [testenv:lint] +deps = + -r {toxinidir}/tests/requirements.d/lint.txt +commands = + ruff check src --output-format pylint + ruff check src --statistics + +[testenv:lint-legacy] +deps = + {[testenv:lint]deps} + -r {toxinidir}/tests/requirements.d/lint-legacy.txt commands = + {[testenv:lint]commands} flake8 --doctests src tests - - pylint --disable=invalid-name,locally-disabled --init-hook 'import os,sys; sys.path.insert(0, os.curdir)' src - ruff src tests + - pylint --init-hook 'import os,sys; sys.path.insert(0, os.curdir)' src [testenv:type-check] deps = - -r {toxinidir}/tests/requirements_type-check.txt + -r {toxinidir}/tests/requirements.d/type-check.txt commands = mypy src +[testenv:full] +deps = + {[testenv]deps} + -r{toxinidir}/tests/requirements.d/full.txt + [testenv:plugins] deps = {[testenv]deps} - -r{toxinidir}/tests/requirements_plugins.txt + -r{toxinidir}/tests/requirements.d/plugins.txt [testenv:doc] allowlist_externals = make - deps = {[testenv]deps} -r{toxinidir}/docs/requirements.txt @@ -60,23 +86,54 @@ commands = pytest [testenv:sdist] +deps = + setuptools commands = python setup.py sdist [testenv:dists] -passenv = - _SNAPSHOT_BUILD deps = + {[testenv:sdist]deps} wheel + twine commands = {[testenv:sdist]commands} python setup.py bdist_wheel + twine check dist/*-*tar.gz + +# Depends: ~/.pypirc +[testenv:upload] +deps = + {[testenv:dists]deps} + twine +commands = + {[testenv:dists]commands} + twine upload --repository anyconfig dist/* [testenv:srpm] -passenv = - _SNAPSHOT_BUILD +allowlist_externals = + ./pkg/buildsrpm.sh +deps = + {[testenv:sdist]deps} commands = {[testenv:sdist]commands} - python setup.py bdist_rpm --source-only --dist-dir {toxinidir}/dist + ./pkg/buildsrpm.sh + +[testenv:upload-copr] +allowlist_externals = + {[testenv:srpm]allowlist_externals} + ./pkg/copr-build.sh +deps = + {[testenv:srpm]deps} +commands = + {[testenv:srpm]commands} + ./pkg/copr-build.sh -# vim:sw=4:ts=4:et: +[testenv:rpms] +allowlist_externals = + {[testenv:upload-copr]allowlist_externals} +deps = + {[testenv:upload-copr]deps} +commands = + {[testenv:srpm]commands} + ./pkg/copr-build.sh check diff --git a/uv.lock b/uv.lock new file mode 100644 index 00000000..9ce84688 --- /dev/null +++ b/uv.lock @@ -0,0 +1,602 @@ +version = 1 +revision = 2 +requires-python = ">=3.9" + +[[package]] +name = "anyconfig" +source = { editable = "." } +dependencies = [ + { name = "tox-uv" }, +] + +[package.optional-dependencies] +query = [ + { name = "jmespath" }, +] +schema = [ + { name = "jsonschema" }, +] +template = [ + { name = "jinja2" }, +] +toml = [ + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "tomli-w" }, +] +yaml = [ + { name = "pyyaml" }, +] + +[package.metadata] +requires-dist = [ + { name = "jinja2", marker = "extra == 'template'" }, + { name = "jmespath", marker = "extra == 'query'" }, + { name = "jsonschema", marker = "extra == 'schema'" }, + { name = "pyyaml", marker = "extra == 'yaml'" }, + { name = "tomli", marker = "python_full_version < '3.11' and extra == 'toml'" }, + { name = "tomli-w", marker = "extra == 'toml'" }, + { name = "tox-uv" }, +] +provides-extras = ["yaml", "toml", "query", "schema", "template"] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "cachetools" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/89/817ad5d0411f136c484d535952aef74af9b25e0d99e90cdffbe121e6d628/cachetools-6.1.0.tar.gz", hash = "sha256:b4c4f404392848db3ce7aac34950d17be4d864da4b8b66911008e430bc544587", size = 30714, upload-time = "2025-06-16T18:51:03.07Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/f0/2ef431fe4141f5e334759d73e81120492b23b2824336883a91ac04ba710b/cachetools-6.1.0-py3-none-any.whl", hash = "sha256:1c7bb3cf9193deaf3508b7c5f2a79986c13ea38965c5adcff1f84519cf39163e", size = 11189, upload-time = "2025-06-16T18:51:01.514Z" }, +] + +[[package]] +name = "chardet" +version = "5.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/f7b6ab21ec75897ed80c17d79b15951a719226b9fababf1e40ea74d69079/chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7", size = 2069618, upload-time = "2023-08-01T19:23:02.662Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/6f/f5fbc992a329ee4e0f288c1fe0e2ad9485ed064cac731ed2fe47dcc38cbf/chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970", size = 199385, upload-time = "2023-08-01T19:23:00.661Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "distlib" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, +] + +[[package]] +name = "filelock" +version = "3.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload-time = "2025-03-14T07:11:40.47Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d5/00/a297a868e9d0784450faa7365c2172a7d6110c763e30ba861867c32ae6a9/jsonschema-4.25.0.tar.gz", hash = "sha256:e63acf5c11762c0e6672ffb61482bdf57f0876684d8d249c0fe2d730d48bc55f", size = 356830, upload-time = "2025-07-18T15:39:45.11Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/54/c86cd8e011fe98803d7e382fd67c0df5ceab8d2b7ad8c5a81524f791551c/jsonschema-4.25.0-py3-none-any.whl", hash = "sha256:24c2e8da302de79c8b9382fee3e76b355e44d2a4364bb207159ce10b517bd716", size = 89184, upload-time = "2025-07-18T15:39:42.956Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513, upload-time = "2025-04-23T12:34:07.418Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437, upload-time = "2025-04-23T12:34:05.422Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" }, + { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" }, + { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" }, + { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" }, + { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" }, + { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" }, + { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" }, + { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" }, + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, + { url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344, upload-time = "2024-10-18T15:21:43.721Z" }, + { url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389, upload-time = "2024-10-18T15:21:44.666Z" }, + { url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607, upload-time = "2024-10-18T15:21:45.452Z" }, + { url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728, upload-time = "2024-10-18T15:21:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826, upload-time = "2024-10-18T15:21:47.134Z" }, + { url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843, upload-time = "2024-10-18T15:21:48.334Z" }, + { url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219, upload-time = "2024-10-18T15:21:49.587Z" }, + { url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946, upload-time = "2024-10-18T15:21:50.441Z" }, + { url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063, upload-time = "2024-10-18T15:21:51.385Z" }, + { url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506, upload-time = "2024-10-18T15:21:52.974Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.3.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pyproject-api" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/fd/437901c891f58a7b9096511750247535e891d2d5a5a6eefbc9386a2b41d5/pyproject_api-1.9.1.tar.gz", hash = "sha256:43c9918f49daab37e302038fc1aed54a8c7a91a9fa935d00b9a485f37e0f5335", size = 22710, upload-time = "2025-05-12T14:41:58.025Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/e6/c293c06695d4a3ab0260ef124a74ebadba5f4c511ce3a4259e976902c00b/pyproject_api-1.9.1-py3-none-any.whl", hash = "sha256:7d6238d92f8962773dd75b5f0c4a6a27cce092a14b623b811dba656f3b628948", size = 13158, upload-time = "2025-05-12T14:41:56.217Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, + { url = "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", size = 184777, upload-time = "2024-08-06T20:33:25.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", size = 172318, upload-time = "2024-08-06T20:33:27.212Z" }, + { url = "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", size = 720891, upload-time = "2024-08-06T20:33:28.974Z" }, + { url = "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", size = 722614, upload-time = "2024-08-06T20:33:34.157Z" }, + { url = "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", size = 737360, upload-time = "2024-08-06T20:33:35.84Z" }, + { url = "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", size = 699006, upload-time = "2024-08-06T20:33:37.501Z" }, + { url = "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", size = 723577, upload-time = "2024-08-06T20:33:39.389Z" }, + { url = "https://files.pythonhosted.org/packages/ed/5e/46168b1f2757f1fcd442bc3029cd8767d88a98c9c05770d8b420948743bb/PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", size = 144593, upload-time = "2024-08-06T20:33:46.63Z" }, + { url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312, upload-time = "2024-08-06T20:33:49.073Z" }, +] + +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/aa/4456d84bbb54adc6a916fb10c9b374f78ac840337644e4a5eda229c81275/rpds_py-0.26.0.tar.gz", hash = "sha256:20dae58a859b0906f0685642e591056f1e787f3a8b39c8e8749a45dc7d26bdb0", size = 27385, upload-time = "2025-07-01T15:57:13.958Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/31/1459645f036c3dfeacef89e8e5825e430c77dde8489f3b99eaafcd4a60f5/rpds_py-0.26.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4c70c70f9169692b36307a95f3d8c0a9fcd79f7b4a383aad5eaa0e9718b79b37", size = 372466, upload-time = "2025-07-01T15:53:40.55Z" }, + { url = "https://files.pythonhosted.org/packages/dd/ff/3d0727f35836cc8773d3eeb9a46c40cc405854e36a8d2e951f3a8391c976/rpds_py-0.26.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:777c62479d12395bfb932944e61e915741e364c843afc3196b694db3d669fcd0", size = 357825, upload-time = "2025-07-01T15:53:42.247Z" }, + { url = "https://files.pythonhosted.org/packages/bf/ce/badc5e06120a54099ae287fa96d82cbb650a5f85cf247ffe19c7b157fd1f/rpds_py-0.26.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec671691e72dff75817386aa02d81e708b5a7ec0dec6669ec05213ff6b77e1bd", size = 381530, upload-time = "2025-07-01T15:53:43.585Z" }, + { url = "https://files.pythonhosted.org/packages/1e/a5/fa5d96a66c95d06c62d7a30707b6a4cfec696ab8ae280ee7be14e961e118/rpds_py-0.26.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a1cb5d6ce81379401bbb7f6dbe3d56de537fb8235979843f0d53bc2e9815a79", size = 396933, upload-time = "2025-07-01T15:53:45.78Z" }, + { url = "https://files.pythonhosted.org/packages/00/a7/7049d66750f18605c591a9db47d4a059e112a0c9ff8de8daf8fa0f446bba/rpds_py-0.26.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f789e32fa1fb6a7bf890e0124e7b42d1e60d28ebff57fe806719abb75f0e9a3", size = 513973, upload-time = "2025-07-01T15:53:47.085Z" }, + { url = "https://files.pythonhosted.org/packages/0e/f1/528d02c7d6b29d29fac8fd784b354d3571cc2153f33f842599ef0cf20dd2/rpds_py-0.26.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c55b0a669976cf258afd718de3d9ad1b7d1fe0a91cd1ab36f38b03d4d4aeaaf", size = 402293, upload-time = "2025-07-01T15:53:48.117Z" }, + { url = "https://files.pythonhosted.org/packages/15/93/fde36cd6e4685df2cd08508f6c45a841e82f5bb98c8d5ecf05649522acb5/rpds_py-0.26.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c70d9ec912802ecfd6cd390dadb34a9578b04f9bcb8e863d0a7598ba5e9e7ccc", size = 383787, upload-time = "2025-07-01T15:53:50.874Z" }, + { url = "https://files.pythonhosted.org/packages/69/f2/5007553aaba1dcae5d663143683c3dfd03d9395289f495f0aebc93e90f24/rpds_py-0.26.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3021933c2cb7def39d927b9862292e0f4c75a13d7de70eb0ab06efed4c508c19", size = 416312, upload-time = "2025-07-01T15:53:52.046Z" }, + { url = "https://files.pythonhosted.org/packages/8f/a7/ce52c75c1e624a79e48a69e611f1c08844564e44c85db2b6f711d76d10ce/rpds_py-0.26.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8a7898b6ca3b7d6659e55cdac825a2e58c638cbf335cde41f4619e290dd0ad11", size = 558403, upload-time = "2025-07-01T15:53:53.192Z" }, + { url = "https://files.pythonhosted.org/packages/79/d5/e119db99341cc75b538bf4cb80504129fa22ce216672fb2c28e4a101f4d9/rpds_py-0.26.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:12bff2ad9447188377f1b2794772f91fe68bb4bbfa5a39d7941fbebdbf8c500f", size = 588323, upload-time = "2025-07-01T15:53:54.336Z" }, + { url = "https://files.pythonhosted.org/packages/93/94/d28272a0b02f5fe24c78c20e13bbcb95f03dc1451b68e7830ca040c60bd6/rpds_py-0.26.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:191aa858f7d4902e975d4cf2f2d9243816c91e9605070aeb09c0a800d187e323", size = 554541, upload-time = "2025-07-01T15:53:55.469Z" }, + { url = "https://files.pythonhosted.org/packages/93/e0/8c41166602f1b791da892d976057eba30685486d2e2c061ce234679c922b/rpds_py-0.26.0-cp310-cp310-win32.whl", hash = "sha256:b37a04d9f52cb76b6b78f35109b513f6519efb481d8ca4c321f6a3b9580b3f45", size = 220442, upload-time = "2025-07-01T15:53:56.524Z" }, + { url = "https://files.pythonhosted.org/packages/87/f0/509736bb752a7ab50fb0270c2a4134d671a7b3038030837e5536c3de0e0b/rpds_py-0.26.0-cp310-cp310-win_amd64.whl", hash = "sha256:38721d4c9edd3eb6670437d8d5e2070063f305bfa2d5aa4278c51cedcd508a84", size = 231314, upload-time = "2025-07-01T15:53:57.842Z" }, + { url = "https://files.pythonhosted.org/packages/09/4c/4ee8f7e512030ff79fda1df3243c88d70fc874634e2dbe5df13ba4210078/rpds_py-0.26.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9e8cb77286025bdb21be2941d64ac6ca016130bfdcd228739e8ab137eb4406ed", size = 372610, upload-time = "2025-07-01T15:53:58.844Z" }, + { url = "https://files.pythonhosted.org/packages/fa/9d/3dc16be00f14fc1f03c71b1d67c8df98263ab2710a2fbd65a6193214a527/rpds_py-0.26.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e09330b21d98adc8ccb2dbb9fc6cb434e8908d4c119aeaa772cb1caab5440a0", size = 358032, upload-time = "2025-07-01T15:53:59.985Z" }, + { url = "https://files.pythonhosted.org/packages/e7/5a/7f1bf8f045da2866324a08ae80af63e64e7bfaf83bd31f865a7b91a58601/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9c1b92b774b2e68d11193dc39620d62fd8ab33f0a3c77ecdabe19c179cdbc1", size = 381525, upload-time = "2025-07-01T15:54:01.162Z" }, + { url = "https://files.pythonhosted.org/packages/45/8a/04479398c755a066ace10e3d158866beb600867cacae194c50ffa783abd0/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:824e6d3503ab990d7090768e4dfd9e840837bae057f212ff9f4f05ec6d1975e7", size = 397089, upload-time = "2025-07-01T15:54:02.319Z" }, + { url = "https://files.pythonhosted.org/packages/72/88/9203f47268db488a1b6d469d69c12201ede776bb728b9d9f29dbfd7df406/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ad7fd2258228bf288f2331f0a6148ad0186b2e3643055ed0db30990e59817a6", size = 514255, upload-time = "2025-07-01T15:54:03.38Z" }, + { url = "https://files.pythonhosted.org/packages/f5/b4/01ce5d1e853ddf81fbbd4311ab1eff0b3cf162d559288d10fd127e2588b5/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0dc23bbb3e06ec1ea72d515fb572c1fea59695aefbffb106501138762e1e915e", size = 402283, upload-time = "2025-07-01T15:54:04.923Z" }, + { url = "https://files.pythonhosted.org/packages/34/a2/004c99936997bfc644d590a9defd9e9c93f8286568f9c16cdaf3e14429a7/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80bf832ac7b1920ee29a426cdca335f96a2b5caa839811803e999b41ba9030d", size = 383881, upload-time = "2025-07-01T15:54:06.482Z" }, + { url = "https://files.pythonhosted.org/packages/05/1b/ef5fba4a8f81ce04c427bfd96223f92f05e6cd72291ce9d7523db3b03a6c/rpds_py-0.26.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0919f38f5542c0a87e7b4afcafab6fd2c15386632d249e9a087498571250abe3", size = 415822, upload-time = "2025-07-01T15:54:07.605Z" }, + { url = "https://files.pythonhosted.org/packages/16/80/5c54195aec456b292f7bd8aa61741c8232964063fd8a75fdde9c1e982328/rpds_py-0.26.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d422b945683e409000c888e384546dbab9009bb92f7c0b456e217988cf316107", size = 558347, upload-time = "2025-07-01T15:54:08.591Z" }, + { url = "https://files.pythonhosted.org/packages/f2/1c/1845c1b1fd6d827187c43afe1841d91678d7241cbdb5420a4c6de180a538/rpds_py-0.26.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77a7711fa562ba2da1aa757e11024ad6d93bad6ad7ede5afb9af144623e5f76a", size = 587956, upload-time = "2025-07-01T15:54:09.963Z" }, + { url = "https://files.pythonhosted.org/packages/2e/ff/9e979329dd131aa73a438c077252ddabd7df6d1a7ad7b9aacf6261f10faa/rpds_py-0.26.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:238e8c8610cb7c29460e37184f6799547f7e09e6a9bdbdab4e8edb90986a2318", size = 554363, upload-time = "2025-07-01T15:54:11.073Z" }, + { url = "https://files.pythonhosted.org/packages/00/8b/d78cfe034b71ffbe72873a136e71acc7a831a03e37771cfe59f33f6de8a2/rpds_py-0.26.0-cp311-cp311-win32.whl", hash = "sha256:893b022bfbdf26d7bedb083efeea624e8550ca6eb98bf7fea30211ce95b9201a", size = 220123, upload-time = "2025-07-01T15:54:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/94/c1/3c8c94c7dd3905dbfde768381ce98778500a80db9924731d87ddcdb117e9/rpds_py-0.26.0-cp311-cp311-win_amd64.whl", hash = "sha256:87a5531de9f71aceb8af041d72fc4cab4943648d91875ed56d2e629bef6d4c03", size = 231732, upload-time = "2025-07-01T15:54:13.434Z" }, + { url = "https://files.pythonhosted.org/packages/67/93/e936fbed1b734eabf36ccb5d93c6a2e9246fbb13c1da011624b7286fae3e/rpds_py-0.26.0-cp311-cp311-win_arm64.whl", hash = "sha256:de2713f48c1ad57f89ac25b3cb7daed2156d8e822cf0eca9b96a6f990718cc41", size = 221917, upload-time = "2025-07-01T15:54:14.559Z" }, + { url = "https://files.pythonhosted.org/packages/ea/86/90eb87c6f87085868bd077c7a9938006eb1ce19ed4d06944a90d3560fce2/rpds_py-0.26.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:894514d47e012e794f1350f076c427d2347ebf82f9b958d554d12819849a369d", size = 363933, upload-time = "2025-07-01T15:54:15.734Z" }, + { url = "https://files.pythonhosted.org/packages/63/78/4469f24d34636242c924626082b9586f064ada0b5dbb1e9d096ee7a8e0c6/rpds_py-0.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc921b96fa95a097add244da36a1d9e4f3039160d1d30f1b35837bf108c21136", size = 350447, upload-time = "2025-07-01T15:54:16.922Z" }, + { url = "https://files.pythonhosted.org/packages/ad/91/c448ed45efdfdade82348d5e7995e15612754826ea640afc20915119734f/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1157659470aa42a75448b6e943c895be8c70531c43cb78b9ba990778955582", size = 384711, upload-time = "2025-07-01T15:54:18.101Z" }, + { url = "https://files.pythonhosted.org/packages/ec/43/e5c86fef4be7f49828bdd4ecc8931f0287b1152c0bb0163049b3218740e7/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:521ccf56f45bb3a791182dc6b88ae5f8fa079dd705ee42138c76deb1238e554e", size = 400865, upload-time = "2025-07-01T15:54:19.295Z" }, + { url = "https://files.pythonhosted.org/packages/55/34/e00f726a4d44f22d5c5fe2e5ddd3ac3d7fd3f74a175607781fbdd06fe375/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9def736773fd56b305c0eef698be5192c77bfa30d55a0e5885f80126c4831a15", size = 517763, upload-time = "2025-07-01T15:54:20.858Z" }, + { url = "https://files.pythonhosted.org/packages/52/1c/52dc20c31b147af724b16104500fba13e60123ea0334beba7b40e33354b4/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdad4ea3b4513b475e027be79e5a0ceac8ee1c113a1a11e5edc3c30c29f964d8", size = 406651, upload-time = "2025-07-01T15:54:22.508Z" }, + { url = "https://files.pythonhosted.org/packages/2e/77/87d7bfabfc4e821caa35481a2ff6ae0b73e6a391bb6b343db2c91c2b9844/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82b165b07f416bdccf5c84546a484cc8f15137ca38325403864bfdf2b5b72f6a", size = 386079, upload-time = "2025-07-01T15:54:23.987Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d4/7f2200c2d3ee145b65b3cddc4310d51f7da6a26634f3ac87125fd789152a/rpds_py-0.26.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d04cab0a54b9dba4d278fe955a1390da3cf71f57feb78ddc7cb67cbe0bd30323", size = 421379, upload-time = "2025-07-01T15:54:25.073Z" }, + { url = "https://files.pythonhosted.org/packages/ae/13/9fdd428b9c820869924ab62236b8688b122baa22d23efdd1c566938a39ba/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:79061ba1a11b6a12743a2b0f72a46aa2758613d454aa6ba4f5a265cc48850158", size = 562033, upload-time = "2025-07-01T15:54:26.225Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e1/b69686c3bcbe775abac3a4c1c30a164a2076d28df7926041f6c0eb5e8d28/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f405c93675d8d4c5ac87364bb38d06c988e11028a64b52a47158a355079661f3", size = 591639, upload-time = "2025-07-01T15:54:27.424Z" }, + { url = "https://files.pythonhosted.org/packages/5c/c9/1e3d8c8863c84a90197ac577bbc3d796a92502124c27092413426f670990/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dafd4c44b74aa4bed4b250f1aed165b8ef5de743bcca3b88fc9619b6087093d2", size = 557105, upload-time = "2025-07-01T15:54:29.93Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c5/90c569649057622959f6dcc40f7b516539608a414dfd54b8d77e3b201ac0/rpds_py-0.26.0-cp312-cp312-win32.whl", hash = "sha256:3da5852aad63fa0c6f836f3359647870e21ea96cf433eb393ffa45263a170d44", size = 223272, upload-time = "2025-07-01T15:54:31.128Z" }, + { url = "https://files.pythonhosted.org/packages/7d/16/19f5d9f2a556cfed454eebe4d354c38d51c20f3db69e7b4ce6cff904905d/rpds_py-0.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf47cfdabc2194a669dcf7a8dbba62e37a04c5041d2125fae0233b720da6f05c", size = 234995, upload-time = "2025-07-01T15:54:32.195Z" }, + { url = "https://files.pythonhosted.org/packages/83/f0/7935e40b529c0e752dfaa7880224771b51175fce08b41ab4a92eb2fbdc7f/rpds_py-0.26.0-cp312-cp312-win_arm64.whl", hash = "sha256:20ab1ae4fa534f73647aad289003f1104092890849e0266271351922ed5574f8", size = 223198, upload-time = "2025-07-01T15:54:33.271Z" }, + { url = "https://files.pythonhosted.org/packages/6a/67/bb62d0109493b12b1c6ab00de7a5566aa84c0e44217c2d94bee1bd370da9/rpds_py-0.26.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:696764a5be111b036256c0b18cd29783fab22154690fc698062fc1b0084b511d", size = 363917, upload-time = "2025-07-01T15:54:34.755Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f3/34e6ae1925a5706c0f002a8d2d7f172373b855768149796af87bd65dcdb9/rpds_py-0.26.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e6c15d2080a63aaed876e228efe4f814bc7889c63b1e112ad46fdc8b368b9e1", size = 350073, upload-time = "2025-07-01T15:54:36.292Z" }, + { url = "https://files.pythonhosted.org/packages/75/83/1953a9d4f4e4de7fd0533733e041c28135f3c21485faaef56a8aadbd96b5/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390e3170babf42462739a93321e657444f0862c6d722a291accc46f9d21ed04e", size = 384214, upload-time = "2025-07-01T15:54:37.469Z" }, + { url = "https://files.pythonhosted.org/packages/48/0e/983ed1b792b3322ea1d065e67f4b230f3b96025f5ce3878cc40af09b7533/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7da84c2c74c0f5bc97d853d9e17bb83e2dcafcff0dc48286916001cc114379a1", size = 400113, upload-time = "2025-07-01T15:54:38.954Z" }, + { url = "https://files.pythonhosted.org/packages/69/7f/36c0925fff6f660a80be259c5b4f5e53a16851f946eb080351d057698528/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c5fe114a6dd480a510b6d3661d09d67d1622c4bf20660a474507aaee7eeeee9", size = 515189, upload-time = "2025-07-01T15:54:40.57Z" }, + { url = "https://files.pythonhosted.org/packages/13/45/cbf07fc03ba7a9b54662c9badb58294ecfb24f828b9732970bd1a431ed5c/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3100b3090269f3a7ea727b06a6080d4eb7439dca4c0e91a07c5d133bb1727ea7", size = 406998, upload-time = "2025-07-01T15:54:43.025Z" }, + { url = "https://files.pythonhosted.org/packages/6c/b0/8fa5e36e58657997873fd6a1cf621285ca822ca75b4b3434ead047daa307/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c03c9b0c64afd0320ae57de4c982801271c0c211aa2d37f3003ff5feb75bb04", size = 385903, upload-time = "2025-07-01T15:54:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f7/b25437772f9f57d7a9fbd73ed86d0dcd76b4c7c6998348c070d90f23e315/rpds_py-0.26.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5963b72ccd199ade6ee493723d18a3f21ba7d5b957017607f815788cef50eaf1", size = 419785, upload-time = "2025-07-01T15:54:46.043Z" }, + { url = "https://files.pythonhosted.org/packages/a7/6b/63ffa55743dfcb4baf2e9e77a0b11f7f97ed96a54558fcb5717a4b2cd732/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9da4e873860ad5bab3291438525cae80169daecbfafe5657f7f5fb4d6b3f96b9", size = 561329, upload-time = "2025-07-01T15:54:47.64Z" }, + { url = "https://files.pythonhosted.org/packages/2f/07/1f4f5e2886c480a2346b1e6759c00278b8a69e697ae952d82ae2e6ee5db0/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5afaddaa8e8c7f1f7b4c5c725c0070b6eed0228f705b90a1732a48e84350f4e9", size = 590875, upload-time = "2025-07-01T15:54:48.9Z" }, + { url = "https://files.pythonhosted.org/packages/cc/bc/e6639f1b91c3a55f8c41b47d73e6307051b6e246254a827ede730624c0f8/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4916dc96489616a6f9667e7526af8fa693c0fdb4f3acb0e5d9f4400eb06a47ba", size = 556636, upload-time = "2025-07-01T15:54:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/05/4c/b3917c45566f9f9a209d38d9b54a1833f2bb1032a3e04c66f75726f28876/rpds_py-0.26.0-cp313-cp313-win32.whl", hash = "sha256:2a343f91b17097c546b93f7999976fd6c9d5900617aa848c81d794e062ab302b", size = 222663, upload-time = "2025-07-01T15:54:52.023Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0b/0851bdd6025775aaa2365bb8de0697ee2558184c800bfef8d7aef5ccde58/rpds_py-0.26.0-cp313-cp313-win_amd64.whl", hash = "sha256:0a0b60701f2300c81b2ac88a5fb893ccfa408e1c4a555a77f908a2596eb875a5", size = 234428, upload-time = "2025-07-01T15:54:53.692Z" }, + { url = "https://files.pythonhosted.org/packages/ed/e8/a47c64ed53149c75fb581e14a237b7b7cd18217e969c30d474d335105622/rpds_py-0.26.0-cp313-cp313-win_arm64.whl", hash = "sha256:257d011919f133a4746958257f2c75238e3ff54255acd5e3e11f3ff41fd14256", size = 222571, upload-time = "2025-07-01T15:54:54.822Z" }, + { url = "https://files.pythonhosted.org/packages/89/bf/3d970ba2e2bcd17d2912cb42874107390f72873e38e79267224110de5e61/rpds_py-0.26.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:529c8156d7506fba5740e05da8795688f87119cce330c244519cf706a4a3d618", size = 360475, upload-time = "2025-07-01T15:54:56.228Z" }, + { url = "https://files.pythonhosted.org/packages/82/9f/283e7e2979fc4ec2d8ecee506d5a3675fce5ed9b4b7cb387ea5d37c2f18d/rpds_py-0.26.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f53ec51f9d24e9638a40cabb95078ade8c99251945dad8d57bf4aabe86ecee35", size = 346692, upload-time = "2025-07-01T15:54:58.561Z" }, + { url = "https://files.pythonhosted.org/packages/e3/03/7e50423c04d78daf391da3cc4330bdb97042fc192a58b186f2d5deb7befd/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab504c4d654e4a29558eaa5bb8cea5fdc1703ea60a8099ffd9c758472cf913f", size = 379415, upload-time = "2025-07-01T15:54:59.751Z" }, + { url = "https://files.pythonhosted.org/packages/57/00/d11ee60d4d3b16808432417951c63df803afb0e0fc672b5e8d07e9edaaae/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd0641abca296bc1a00183fe44f7fced8807ed49d501f188faa642d0e4975b83", size = 391783, upload-time = "2025-07-01T15:55:00.898Z" }, + { url = "https://files.pythonhosted.org/packages/08/b3/1069c394d9c0d6d23c5b522e1f6546b65793a22950f6e0210adcc6f97c3e/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b312fecc1d017b5327afa81d4da1480f51c68810963a7336d92203dbb3d4f1", size = 512844, upload-time = "2025-07-01T15:55:02.201Z" }, + { url = "https://files.pythonhosted.org/packages/08/3b/c4fbf0926800ed70b2c245ceca99c49f066456755f5d6eb8863c2c51e6d0/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c741107203954f6fc34d3066d213d0a0c40f7bb5aafd698fb39888af277c70d8", size = 402105, upload-time = "2025-07-01T15:55:03.698Z" }, + { url = "https://files.pythonhosted.org/packages/1c/b0/db69b52ca07413e568dae9dc674627a22297abb144c4d6022c6d78f1e5cc/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3e55a7db08dc9a6ed5fb7103019d2c1a38a349ac41901f9f66d7f95750942f", size = 383440, upload-time = "2025-07-01T15:55:05.398Z" }, + { url = "https://files.pythonhosted.org/packages/4c/e1/c65255ad5b63903e56b3bb3ff9dcc3f4f5c3badde5d08c741ee03903e951/rpds_py-0.26.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e851920caab2dbcae311fd28f4313c6953993893eb5c1bb367ec69d9a39e7ed", size = 412759, upload-time = "2025-07-01T15:55:08.316Z" }, + { url = "https://files.pythonhosted.org/packages/e4/22/bb731077872377a93c6e93b8a9487d0406c70208985831034ccdeed39c8e/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dfbf280da5f876d0b00c81f26bedce274e72a678c28845453885a9b3c22ae632", size = 556032, upload-time = "2025-07-01T15:55:09.52Z" }, + { url = "https://files.pythonhosted.org/packages/e0/8b/393322ce7bac5c4530fb96fc79cc9ea2f83e968ff5f6e873f905c493e1c4/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1cc81d14ddfa53d7f3906694d35d54d9d3f850ef8e4e99ee68bc0d1e5fed9a9c", size = 585416, upload-time = "2025-07-01T15:55:11.216Z" }, + { url = "https://files.pythonhosted.org/packages/49/ae/769dc372211835bf759319a7aae70525c6eb523e3371842c65b7ef41c9c6/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dca83c498b4650a91efcf7b88d669b170256bf8017a5db6f3e06c2bf031f57e0", size = 554049, upload-time = "2025-07-01T15:55:13.004Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f9/4c43f9cc203d6ba44ce3146246cdc38619d92c7bd7bad4946a3491bd5b70/rpds_py-0.26.0-cp313-cp313t-win32.whl", hash = "sha256:4d11382bcaf12f80b51d790dee295c56a159633a8e81e6323b16e55d81ae37e9", size = 218428, upload-time = "2025-07-01T15:55:14.486Z" }, + { url = "https://files.pythonhosted.org/packages/7e/8b/9286b7e822036a4a977f2f1e851c7345c20528dbd56b687bb67ed68a8ede/rpds_py-0.26.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff110acded3c22c033e637dd8896e411c7d3a11289b2edf041f86663dbc791e9", size = 231524, upload-time = "2025-07-01T15:55:15.745Z" }, + { url = "https://files.pythonhosted.org/packages/55/07/029b7c45db910c74e182de626dfdae0ad489a949d84a468465cd0ca36355/rpds_py-0.26.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:da619979df60a940cd434084355c514c25cf8eb4cf9a508510682f6c851a4f7a", size = 364292, upload-time = "2025-07-01T15:55:17.001Z" }, + { url = "https://files.pythonhosted.org/packages/13/d1/9b3d3f986216b4d1f584878dca15ce4797aaf5d372d738974ba737bf68d6/rpds_py-0.26.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ea89a2458a1a75f87caabefe789c87539ea4e43b40f18cff526052e35bbb4fdf", size = 350334, upload-time = "2025-07-01T15:55:18.922Z" }, + { url = "https://files.pythonhosted.org/packages/18/98/16d5e7bc9ec715fa9668731d0cf97f6b032724e61696e2db3d47aeb89214/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feac1045b3327a45944e7dcbeb57530339f6b17baff154df51ef8b0da34c8c12", size = 384875, upload-time = "2025-07-01T15:55:20.399Z" }, + { url = "https://files.pythonhosted.org/packages/f9/13/aa5e2b1ec5ab0e86a5c464d53514c0467bec6ba2507027d35fc81818358e/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b818a592bd69bfe437ee8368603d4a2d928c34cffcdf77c2e761a759ffd17d20", size = 399993, upload-time = "2025-07-01T15:55:21.729Z" }, + { url = "https://files.pythonhosted.org/packages/17/03/8021810b0e97923abdbab6474c8b77c69bcb4b2c58330777df9ff69dc559/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a8b0dd8648709b62d9372fc00a57466f5fdeefed666afe3fea5a6c9539a0331", size = 516683, upload-time = "2025-07-01T15:55:22.918Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b1/da8e61c87c2f3d836954239fdbbfb477bb7b54d74974d8f6fcb34342d166/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d3498ad0df07d81112aa6ec6c95a7e7b1ae00929fb73e7ebee0f3faaeabad2f", size = 408825, upload-time = "2025-07-01T15:55:24.207Z" }, + { url = "https://files.pythonhosted.org/packages/38/bc/1fc173edaaa0e52c94b02a655db20697cb5fa954ad5a8e15a2c784c5cbdd/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24a4146ccb15be237fdef10f331c568e1b0e505f8c8c9ed5d67759dac58ac246", size = 387292, upload-time = "2025-07-01T15:55:25.554Z" }, + { url = "https://files.pythonhosted.org/packages/7c/eb/3a9bb4bd90867d21916f253caf4f0d0be7098671b6715ad1cead9fe7bab9/rpds_py-0.26.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a9a63785467b2d73635957d32a4f6e73d5e4df497a16a6392fa066b753e87387", size = 420435, upload-time = "2025-07-01T15:55:27.798Z" }, + { url = "https://files.pythonhosted.org/packages/cd/16/e066dcdb56f5632713445271a3f8d3d0b426d51ae9c0cca387799df58b02/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:de4ed93a8c91debfd5a047be327b7cc8b0cc6afe32a716bbbc4aedca9e2a83af", size = 562410, upload-time = "2025-07-01T15:55:29.057Z" }, + { url = "https://files.pythonhosted.org/packages/60/22/ddbdec7eb82a0dc2e455be44c97c71c232983e21349836ce9f272e8a3c29/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:caf51943715b12af827696ec395bfa68f090a4c1a1d2509eb4e2cb69abbbdb33", size = 590724, upload-time = "2025-07-01T15:55:30.719Z" }, + { url = "https://files.pythonhosted.org/packages/2c/b4/95744085e65b7187d83f2fcb0bef70716a1ea0a9e5d8f7f39a86e5d83424/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4a59e5bc386de021f56337f757301b337d7ab58baa40174fb150accd480bc953", size = 558285, upload-time = "2025-07-01T15:55:31.981Z" }, + { url = "https://files.pythonhosted.org/packages/37/37/6309a75e464d1da2559446f9c811aa4d16343cebe3dbb73701e63f760caa/rpds_py-0.26.0-cp314-cp314-win32.whl", hash = "sha256:92c8db839367ef16a662478f0a2fe13e15f2227da3c1430a782ad0f6ee009ec9", size = 223459, upload-time = "2025-07-01T15:55:33.312Z" }, + { url = "https://files.pythonhosted.org/packages/d9/6f/8e9c11214c46098b1d1391b7e02b70bb689ab963db3b19540cba17315291/rpds_py-0.26.0-cp314-cp314-win_amd64.whl", hash = "sha256:b0afb8cdd034150d4d9f53926226ed27ad15b7f465e93d7468caaf5eafae0d37", size = 236083, upload-time = "2025-07-01T15:55:34.933Z" }, + { url = "https://files.pythonhosted.org/packages/47/af/9c4638994dd623d51c39892edd9d08e8be8220a4b7e874fa02c2d6e91955/rpds_py-0.26.0-cp314-cp314-win_arm64.whl", hash = "sha256:ca3f059f4ba485d90c8dc75cb5ca897e15325e4e609812ce57f896607c1c0867", size = 223291, upload-time = "2025-07-01T15:55:36.202Z" }, + { url = "https://files.pythonhosted.org/packages/4d/db/669a241144460474aab03e254326b32c42def83eb23458a10d163cb9b5ce/rpds_py-0.26.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:5afea17ab3a126006dc2f293b14ffc7ef3c85336cf451564a0515ed7648033da", size = 361445, upload-time = "2025-07-01T15:55:37.483Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2d/133f61cc5807c6c2fd086a46df0eb8f63a23f5df8306ff9f6d0fd168fecc/rpds_py-0.26.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:69f0c0a3df7fd3a7eec50a00396104bb9a843ea6d45fcc31c2d5243446ffd7a7", size = 347206, upload-time = "2025-07-01T15:55:38.828Z" }, + { url = "https://files.pythonhosted.org/packages/05/bf/0e8fb4c05f70273469eecf82f6ccf37248558526a45321644826555db31b/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:801a71f70f9813e82d2513c9a96532551fce1e278ec0c64610992c49c04c2dad", size = 380330, upload-time = "2025-07-01T15:55:40.175Z" }, + { url = "https://files.pythonhosted.org/packages/d4/a8/060d24185d8b24d3923322f8d0ede16df4ade226a74e747b8c7c978e3dd3/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df52098cde6d5e02fa75c1f6244f07971773adb4a26625edd5c18fee906fa84d", size = 392254, upload-time = "2025-07-01T15:55:42.015Z" }, + { url = "https://files.pythonhosted.org/packages/b9/7b/7c2e8a9ee3e6bc0bae26bf29f5219955ca2fbb761dca996a83f5d2f773fe/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bc596b30f86dc6f0929499c9e574601679d0341a0108c25b9b358a042f51bca", size = 516094, upload-time = "2025-07-01T15:55:43.603Z" }, + { url = "https://files.pythonhosted.org/packages/75/d6/f61cafbed8ba1499b9af9f1777a2a199cd888f74a96133d8833ce5eaa9c5/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dfbe56b299cf5875b68eb6f0ebaadc9cac520a1989cac0db0765abfb3709c19", size = 402889, upload-time = "2025-07-01T15:55:45.275Z" }, + { url = "https://files.pythonhosted.org/packages/92/19/c8ac0a8a8df2dd30cdec27f69298a5c13e9029500d6d76718130f5e5be10/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac64f4b2bdb4ea622175c9ab7cf09444e412e22c0e02e906978b3b488af5fde8", size = 384301, upload-time = "2025-07-01T15:55:47.098Z" }, + { url = "https://files.pythonhosted.org/packages/41/e1/6b1859898bc292a9ce5776016c7312b672da00e25cec74d7beced1027286/rpds_py-0.26.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:181ef9b6bbf9845a264f9aa45c31836e9f3c1f13be565d0d010e964c661d1e2b", size = 412891, upload-time = "2025-07-01T15:55:48.412Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b9/ceb39af29913c07966a61367b3c08b4f71fad841e32c6b59a129d5974698/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:49028aa684c144ea502a8e847d23aed5e4c2ef7cadfa7d5eaafcb40864844b7a", size = 557044, upload-time = "2025-07-01T15:55:49.816Z" }, + { url = "https://files.pythonhosted.org/packages/2f/27/35637b98380731a521f8ec4f3fd94e477964f04f6b2f8f7af8a2d889a4af/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e5d524d68a474a9688336045bbf76cb0def88549c1b2ad9dbfec1fb7cfbe9170", size = 585774, upload-time = "2025-07-01T15:55:51.192Z" }, + { url = "https://files.pythonhosted.org/packages/52/d9/3f0f105420fecd18551b678c9a6ce60bd23986098b252a56d35781b3e7e9/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c1851f429b822831bd2edcbe0cfd12ee9ea77868f8d3daf267b189371671c80e", size = 554886, upload-time = "2025-07-01T15:55:52.541Z" }, + { url = "https://files.pythonhosted.org/packages/6b/c5/347c056a90dc8dd9bc240a08c527315008e1b5042e7a4cf4ac027be9d38a/rpds_py-0.26.0-cp314-cp314t-win32.whl", hash = "sha256:7bdb17009696214c3b66bb3590c6d62e14ac5935e53e929bcdbc5a495987a84f", size = 219027, upload-time = "2025-07-01T15:55:53.874Z" }, + { url = "https://files.pythonhosted.org/packages/75/04/5302cea1aa26d886d34cadbf2dc77d90d7737e576c0065f357b96dc7a1a6/rpds_py-0.26.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f14440b9573a6f76b4ee4770c13f0b5921f71dde3b6fcb8dabbefd13b7fe05d7", size = 232821, upload-time = "2025-07-01T15:55:55.167Z" }, + { url = "https://files.pythonhosted.org/packages/fb/74/846ab687119c9d31fc21ab1346ef9233c31035ce53c0e2d43a130a0c5a5e/rpds_py-0.26.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:7a48af25d9b3c15684059d0d1fc0bc30e8eee5ca521030e2bffddcab5be40226", size = 372786, upload-time = "2025-07-01T15:55:56.512Z" }, + { url = "https://files.pythonhosted.org/packages/33/02/1f9e465cb1a6032d02b17cd117c7bd9fb6156bc5b40ffeb8053d8a2aa89c/rpds_py-0.26.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0c71c2f6bf36e61ee5c47b2b9b5d47e4d1baad6426bfed9eea3e858fc6ee8806", size = 358062, upload-time = "2025-07-01T15:55:58.084Z" }, + { url = "https://files.pythonhosted.org/packages/2a/49/81a38e3c67ac943907a9711882da3d87758c82cf26b2120b8128e45d80df/rpds_py-0.26.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d815d48b1804ed7867b539236b6dd62997850ca1c91cad187f2ddb1b7bbef19", size = 381576, upload-time = "2025-07-01T15:55:59.422Z" }, + { url = "https://files.pythonhosted.org/packages/14/37/418f030a76ef59f41e55f9dc916af8afafa3c9e3be38df744b2014851474/rpds_py-0.26.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84cfbd4d4d2cdeb2be61a057a258d26b22877266dd905809e94172dff01a42ae", size = 397062, upload-time = "2025-07-01T15:56:00.868Z" }, + { url = "https://files.pythonhosted.org/packages/47/e3/9090817a8f4388bfe58e28136e9682fa7872a06daff2b8a2f8c78786a6e1/rpds_py-0.26.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fbaa70553ca116c77717f513e08815aec458e6b69a028d4028d403b3bc84ff37", size = 516277, upload-time = "2025-07-01T15:56:02.672Z" }, + { url = "https://files.pythonhosted.org/packages/3f/3a/1ec3dd93250fb8023f27d49b3f92e13f679141f2e59a61563f88922c2821/rpds_py-0.26.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39bfea47c375f379d8e87ab4bb9eb2c836e4f2069f0f65731d85e55d74666387", size = 402604, upload-time = "2025-07-01T15:56:04.453Z" }, + { url = "https://files.pythonhosted.org/packages/f2/98/9133c06e42ec3ce637936263c50ac647f879b40a35cfad2f5d4ad418a439/rpds_py-0.26.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1533b7eb683fb5f38c1d68a3c78f5fdd8f1412fa6b9bf03b40f450785a0ab915", size = 383664, upload-time = "2025-07-01T15:56:05.823Z" }, + { url = "https://files.pythonhosted.org/packages/a9/10/a59ce64099cc77c81adb51f06909ac0159c19a3e2c9d9613bab171f4730f/rpds_py-0.26.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c5ab0ee51f560d179b057555b4f601b7df909ed31312d301b99f8b9fc6028284", size = 415944, upload-time = "2025-07-01T15:56:07.132Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f1/ae0c60b3be9df9d5bef3527d83b8eb4b939e3619f6dd8382840e220a27df/rpds_py-0.26.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e5162afc9e0d1f9cae3b577d9c29ddbab3505ab39012cb794d94a005825bde21", size = 558311, upload-time = "2025-07-01T15:56:08.484Z" }, + { url = "https://files.pythonhosted.org/packages/fb/2b/bf1498ebb3ddc5eff2fe3439da88963d1fc6e73d1277fa7ca0c72620d167/rpds_py-0.26.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:43f10b007033f359bc3fa9cd5e6c1e76723f056ffa9a6b5c117cc35720a80292", size = 587928, upload-time = "2025-07-01T15:56:09.946Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/e6b949edf7af5629848c06d6e544a36c9f2781e2d8d03b906de61ada04d0/rpds_py-0.26.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e3730a48e5622e598293eee0762b09cff34dd3f271530f47b0894891281f051d", size = 554554, upload-time = "2025-07-01T15:56:11.775Z" }, + { url = "https://files.pythonhosted.org/packages/0a/1c/aa0298372ea898620d4706ad26b5b9e975550a4dd30bd042b0fe9ae72cce/rpds_py-0.26.0-cp39-cp39-win32.whl", hash = "sha256:4b1f66eb81eab2e0ff5775a3a312e5e2e16bf758f7b06be82fb0d04078c7ac51", size = 220273, upload-time = "2025-07-01T15:56:13.273Z" }, + { url = "https://files.pythonhosted.org/packages/b8/b0/8b3bef6ad0b35c172d1c87e2e5c2bb027d99e2a7bc7a16f744e66cf318f3/rpds_py-0.26.0-cp39-cp39-win_amd64.whl", hash = "sha256:519067e29f67b5c90e64fb1a6b6e9d2ec0ba28705c51956637bac23a2f4ddae1", size = 231627, upload-time = "2025-07-01T15:56:14.853Z" }, + { url = "https://files.pythonhosted.org/packages/ef/9a/1f033b0b31253d03d785b0cd905bc127e555ab496ea6b4c7c2e1f951f2fd/rpds_py-0.26.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3c0909c5234543ada2515c05dc08595b08d621ba919629e94427e8e03539c958", size = 373226, upload-time = "2025-07-01T15:56:16.578Z" }, + { url = "https://files.pythonhosted.org/packages/58/29/5f88023fd6aaaa8ca3c4a6357ebb23f6f07da6079093ccf27c99efce87db/rpds_py-0.26.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c1fb0cda2abcc0ac62f64e2ea4b4e64c57dfd6b885e693095460c61bde7bb18e", size = 359230, upload-time = "2025-07-01T15:56:17.978Z" }, + { url = "https://files.pythonhosted.org/packages/6c/6c/13eaebd28b439da6964dde22712b52e53fe2824af0223b8e403249d10405/rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84d142d2d6cf9b31c12aa4878d82ed3b2324226270b89b676ac62ccd7df52d08", size = 382363, upload-time = "2025-07-01T15:56:19.977Z" }, + { url = "https://files.pythonhosted.org/packages/55/fc/3bb9c486b06da19448646f96147796de23c5811ef77cbfc26f17307b6a9d/rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a547e21c5610b7e9093d870be50682a6a6cf180d6da0f42c47c306073bfdbbf6", size = 397146, upload-time = "2025-07-01T15:56:21.39Z" }, + { url = "https://files.pythonhosted.org/packages/15/18/9d1b79eb4d18e64ba8bba9e7dec6f9d6920b639f22f07ee9368ca35d4673/rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35e9a70a0f335371275cdcd08bc5b8051ac494dd58bff3bbfb421038220dc871", size = 514804, upload-time = "2025-07-01T15:56:22.78Z" }, + { url = "https://files.pythonhosted.org/packages/4f/5a/175ad7191bdbcd28785204621b225ad70e85cdfd1e09cc414cb554633b21/rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0dfa6115c6def37905344d56fb54c03afc49104e2ca473d5dedec0f6606913b4", size = 402820, upload-time = "2025-07-01T15:56:24.584Z" }, + { url = "https://files.pythonhosted.org/packages/11/45/6a67ecf6d61c4d4aff4bc056e864eec4b2447787e11d1c2c9a0242c6e92a/rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:313cfcd6af1a55a286a3c9a25f64af6d0e46cf60bc5798f1db152d97a216ff6f", size = 384567, upload-time = "2025-07-01T15:56:26.064Z" }, + { url = "https://files.pythonhosted.org/packages/a1/ba/16589da828732b46454c61858950a78fe4c931ea4bf95f17432ffe64b241/rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f7bf2496fa563c046d05e4d232d7b7fd61346e2402052064b773e5c378bf6f73", size = 416520, upload-time = "2025-07-01T15:56:27.608Z" }, + { url = "https://files.pythonhosted.org/packages/81/4b/00092999fc7c0c266045e984d56b7314734cc400a6c6dc4d61a35f135a9d/rpds_py-0.26.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:aa81873e2c8c5aa616ab8e017a481a96742fdf9313c40f14338ca7dbf50cb55f", size = 559362, upload-time = "2025-07-01T15:56:29.078Z" }, + { url = "https://files.pythonhosted.org/packages/96/0c/43737053cde1f93ac4945157f7be1428724ab943e2132a0d235a7e161d4e/rpds_py-0.26.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:68ffcf982715f5b5b7686bdd349ff75d422e8f22551000c24b30eaa1b7f7ae84", size = 588113, upload-time = "2025-07-01T15:56:30.485Z" }, + { url = "https://files.pythonhosted.org/packages/46/46/8e38f6161466e60a997ed7e9951ae5de131dedc3cf778ad35994b4af823d/rpds_py-0.26.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6188de70e190847bb6db3dc3981cbadff87d27d6fe9b4f0e18726d55795cee9b", size = 555429, upload-time = "2025-07-01T15:56:31.956Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ac/65da605e9f1dd643ebe615d5bbd11b6efa1d69644fc4bf623ea5ae385a82/rpds_py-0.26.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1c962145c7473723df9722ba4c058de12eb5ebedcb4e27e7d902920aa3831ee8", size = 231950, upload-time = "2025-07-01T15:56:33.337Z" }, + { url = "https://files.pythonhosted.org/packages/51/f2/b5c85b758a00c513bb0389f8fc8e61eb5423050c91c958cdd21843faa3e6/rpds_py-0.26.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f61a9326f80ca59214d1cceb0a09bb2ece5b2563d4e0cd37bfd5515c28510674", size = 373505, upload-time = "2025-07-01T15:56:34.716Z" }, + { url = "https://files.pythonhosted.org/packages/23/e0/25db45e391251118e915e541995bb5f5ac5691a3b98fb233020ba53afc9b/rpds_py-0.26.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:183f857a53bcf4b1b42ef0f57ca553ab56bdd170e49d8091e96c51c3d69ca696", size = 359468, upload-time = "2025-07-01T15:56:36.219Z" }, + { url = "https://files.pythonhosted.org/packages/0b/73/dd5ee6075bb6491be3a646b301dfd814f9486d924137a5098e61f0487e16/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:941c1cfdf4799d623cf3aa1d326a6b4fdb7a5799ee2687f3516738216d2262fb", size = 382680, upload-time = "2025-07-01T15:56:37.644Z" }, + { url = "https://files.pythonhosted.org/packages/2f/10/84b522ff58763a5c443f5bcedc1820240e454ce4e620e88520f04589e2ea/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72a8d9564a717ee291f554eeb4bfeafe2309d5ec0aa6c475170bdab0f9ee8e88", size = 397035, upload-time = "2025-07-01T15:56:39.241Z" }, + { url = "https://files.pythonhosted.org/packages/06/ea/8667604229a10a520fcbf78b30ccc278977dcc0627beb7ea2c96b3becef0/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:511d15193cbe013619dd05414c35a7dedf2088fcee93c6bbb7c77859765bd4e8", size = 514922, upload-time = "2025-07-01T15:56:40.645Z" }, + { url = "https://files.pythonhosted.org/packages/24/e6/9ed5b625c0661c4882fc8cdf302bf8e96c73c40de99c31e0b95ed37d508c/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aea1f9741b603a8d8fedb0ed5502c2bc0accbc51f43e2ad1337fe7259c2b77a5", size = 402822, upload-time = "2025-07-01T15:56:42.137Z" }, + { url = "https://files.pythonhosted.org/packages/8a/58/212c7b6fd51946047fb45d3733da27e2fa8f7384a13457c874186af691b1/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4019a9d473c708cf2f16415688ef0b4639e07abaa569d72f74745bbeffafa2c7", size = 384336, upload-time = "2025-07-01T15:56:44.239Z" }, + { url = "https://files.pythonhosted.org/packages/aa/f5/a40ba78748ae8ebf4934d4b88e77b98497378bc2c24ba55ebe87a4e87057/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:093d63b4b0f52d98ebae33b8c50900d3d67e0666094b1be7a12fffd7f65de74b", size = 416871, upload-time = "2025-07-01T15:56:46.284Z" }, + { url = "https://files.pythonhosted.org/packages/d5/a6/33b1fc0c9f7dcfcfc4a4353daa6308b3ece22496ceece348b3e7a7559a09/rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2abe21d8ba64cded53a2a677e149ceb76dcf44284202d737178afe7ba540c1eb", size = 559439, upload-time = "2025-07-01T15:56:48.549Z" }, + { url = "https://files.pythonhosted.org/packages/71/2d/ceb3f9c12f8cfa56d34995097f6cd99da1325642c60d1b6680dd9df03ed8/rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:4feb7511c29f8442cbbc28149a92093d32e815a28aa2c50d333826ad2a20fdf0", size = 588380, upload-time = "2025-07-01T15:56:50.086Z" }, + { url = "https://files.pythonhosted.org/packages/c8/ed/9de62c2150ca8e2e5858acf3f4f4d0d180a38feef9fdab4078bea63d8dba/rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e99685fc95d386da368013e7fb4269dd39c30d99f812a8372d62f244f662709c", size = 555334, upload-time = "2025-07-01T15:56:51.703Z" }, + { url = "https://files.pythonhosted.org/packages/7e/78/a08e2f28e91c7e45db1150813c6d760a0fb114d5652b1373897073369e0d/rpds_py-0.26.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a90a13408a7a856b87be8a9f008fff53c5080eea4e4180f6c2e546e4a972fb5d", size = 373157, upload-time = "2025-07-01T15:56:53.291Z" }, + { url = "https://files.pythonhosted.org/packages/52/01/ddf51517497c8224fb0287e9842b820ed93748bc28ea74cab56a71e3dba4/rpds_py-0.26.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ac51b65e8dc76cf4949419c54c5528adb24fc721df722fd452e5fbc236f5c40", size = 358827, upload-time = "2025-07-01T15:56:54.963Z" }, + { url = "https://files.pythonhosted.org/packages/4d/f4/acaefa44b83705a4fcadd68054280127c07cdb236a44a1c08b7c5adad40b/rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59b2093224a18c6508d95cfdeba8db9cbfd6f3494e94793b58972933fcee4c6d", size = 382182, upload-time = "2025-07-01T15:56:56.474Z" }, + { url = "https://files.pythonhosted.org/packages/e9/a2/d72ac03d37d33f6ff4713ca4c704da0c3b1b3a959f0bf5eb738c0ad94ea2/rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f01a5d6444a3258b00dc07b6ea4733e26f8072b788bef750baa37b370266137", size = 397123, upload-time = "2025-07-01T15:56:58.272Z" }, + { url = "https://files.pythonhosted.org/packages/74/58/c053e9d1da1d3724434dd7a5f506623913e6404d396ff3cf636a910c0789/rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b6e2c12160c72aeda9d1283e612f68804621f448145a210f1bf1d79151c47090", size = 516285, upload-time = "2025-07-01T15:57:00.283Z" }, + { url = "https://files.pythonhosted.org/packages/94/41/c81e97ee88b38b6d1847c75f2274dee8d67cb8d5ed7ca8c6b80442dead75/rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb28c1f569f8d33b2b5dcd05d0e6ef7005d8639c54c2f0be824f05aedf715255", size = 402182, upload-time = "2025-07-01T15:57:02.587Z" }, + { url = "https://files.pythonhosted.org/packages/74/74/38a176b34ce5197b4223e295f36350dd90713db13cf3c3b533e8e8f7484e/rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1766b5724c3f779317d5321664a343c07773c8c5fd1532e4039e6cc7d1a815be", size = 384436, upload-time = "2025-07-01T15:57:04.125Z" }, + { url = "https://files.pythonhosted.org/packages/e4/21/f40b9a5709d7078372c87fd11335469dc4405245528b60007cd4078ed57a/rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b6d9e5a2ed9c4988c8f9b28b3bc0e3e5b1aaa10c28d210a594ff3a8c02742daf", size = 417039, upload-time = "2025-07-01T15:57:05.608Z" }, + { url = "https://files.pythonhosted.org/packages/02/ee/ed835925731c7e87306faa80a3a5e17b4d0f532083155e7e00fe1cd4e242/rpds_py-0.26.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:b5f7a446ddaf6ca0fad9a5535b56fbfc29998bf0e0b450d174bbec0d600e1d72", size = 559111, upload-time = "2025-07-01T15:57:07.371Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/d6e9e686b8ffb6139b82eb1c319ef32ae99aeb21f7e4bf45bba44a760d09/rpds_py-0.26.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:eed5ac260dd545fbc20da5f4f15e7efe36a55e0e7cf706e4ec005b491a9546a0", size = 588609, upload-time = "2025-07-01T15:57:09.319Z" }, + { url = "https://files.pythonhosted.org/packages/e5/96/09bcab08fa12a69672716b7f86c672ee7f79c5319f1890c5a79dcb8e0df2/rpds_py-0.26.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:582462833ba7cee52e968b0341b85e392ae53d44c0f9af6a5927c80e539a8b67", size = 555212, upload-time = "2025-07-01T15:57:10.905Z" }, + { url = "https://files.pythonhosted.org/packages/2c/07/c554b6ed0064b6e0350a622714298e930b3cf5a3d445a2e25c412268abcf/rpds_py-0.26.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:69a607203441e07e9a8a529cff1d5b73f6a160f22db1097211e6212a68567d11", size = 232048, upload-time = "2025-07-01T15:57:12.473Z" }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "tomli-w" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/75/241269d1da26b624c0d5e110e8149093c759b7a286138f4efd61a60e75fe/tomli_w-1.2.0.tar.gz", hash = "sha256:2dd14fac5a47c27be9cd4c976af5a12d87fb1f0b4512f81d69cce3b35ae25021", size = 7184, upload-time = "2025-01-15T12:07:24.262Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/18/c86eb8e0202e32dd3df50d43d7ff9854f8e0603945ff398974c1d91ac1ef/tomli_w-1.2.0-py3-none-any.whl", hash = "sha256:188306098d013b691fcadc011abd66727d3c414c571bb01b1a174ba8c983cf90", size = 6675, upload-time = "2025-01-15T12:07:22.074Z" }, +] + +[[package]] +name = "tox" +version = "4.28.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "chardet" }, + { name = "colorama" }, + { name = "filelock" }, + { name = "packaging" }, + { name = "platformdirs" }, + { name = "pluggy" }, + { name = "pyproject-api" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/22/ca/9114e959d8b18f891327b58346a7b6ed6fc71815504fb2f3773dd5950b14/tox-4.28.3.tar.gz", hash = "sha256:b91db7219e5242002cf4040a299c8852026d6af35fcd21274d456fb62dafee7b", size = 199617, upload-time = "2025-07-26T00:45:59.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/14/bc4935af126b676334ace4c44f451fa4f4a8b2dcfec75b11929d0aff8b3f/tox-4.28.3-py3-none-any.whl", hash = "sha256:1debe9daf0b7e64d425ef99a17292b0792385686b1d541df34c7298211e99269", size = 174011, upload-time = "2025-07-26T00:45:57.474Z" }, +] + +[[package]] +name = "tox-uv" +version = "1.26.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "tox" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, + { name = "uv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7e/31/2c02a8b4d85d3538d6e9a7aa55dfaf3ea372b2007496b9235047e18c0953/tox_uv-1.26.2.tar.gz", hash = "sha256:5270d5d49e26c1303d902b90d6143a593b43ae148ccc5107251b79bf5bd4fefd", size = 21895, upload-time = "2025-07-21T17:03:39.196Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/c9/354b2a28112ce9619616f09a3e8363dae01f9a4c5a2716fa92bcfcf6ccc5/tox_uv-1.26.2-py3-none-any.whl", hash = "sha256:f95c8635b6e046534faf4de88f46c46ac0d644f2dbe0104fc6adac637e0d44b6", size = 16666, upload-time = "2025-07-21T17:03:38.037Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.14.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, +] + +[[package]] +name = "uv" +version = "0.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/e3/2c3cb3e992fa1bf9af590bb37983f13e3ae67155820a09a98945664f71f3/uv-0.8.3.tar.gz", hash = "sha256:2ccaae4c749126c99f6404d67a0ae1eae29cbafb05603d09094a775061fdf4e5", size = 3415565, upload-time = "2025-07-24T21:14:34.417Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/ab/7b881bb236b9c5f6d99a98adf0c4d1e7c4f0cf4b49051d6d24eb82f19c10/uv-0.8.3-py3-none-linux_armv6l.whl", hash = "sha256:ae7efe91dcfc24126fa91e0fb69a1daf6c0e494a781ba192bb0cc62d7ab623ee", size = 17912668, upload-time = "2025-07-24T21:13:50.682Z" }, + { url = "https://files.pythonhosted.org/packages/fa/9b/64d2ed7388ce88971ffb93d45e74465c95bb885bff40c93f5037b7250930/uv-0.8.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:966ec7d7f57521fef0fee685d71e183c9cafb358ddcfe27519dfeaf40550f247", size = 17947557, upload-time = "2025-07-24T21:13:54.59Z" }, + { url = "https://files.pythonhosted.org/packages/9c/ba/8ceec5d6a1adf6b827db557077d8059e573a84c3708a70433d22a0470fab/uv-0.8.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3f904f574dc2d7aa1d96ddf2483480ecd121dc9d060108cadd8bff100b754b64", size = 16638472, upload-time = "2025-07-24T21:13:57.57Z" }, + { url = "https://files.pythonhosted.org/packages/a3/76/6d2eb90936603756c4a71f9cf5de8d9214fa4d11dcb5a89117389acecd5e/uv-0.8.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:8b16f1bddfdf8f7470924ab34a7b55e4c372d5340c7c1e47e7fc84a743dc541f", size = 17221472, upload-time = "2025-07-24T21:14:00.158Z" }, + { url = "https://files.pythonhosted.org/packages/5b/bf/c3e1cc9604b114dfb49a3a40a230b5410fc97776c149ca73bb524990f9ba/uv-0.8.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:526f2c3bd6f311ce31f6f7b6b7d818b191f41e76bed3aaab671b716220c02d8f", size = 17607299, upload-time = "2025-07-24T21:14:02.226Z" }, + { url = "https://files.pythonhosted.org/packages/53/16/819f876f5ca2f8989c19d9b65b7d794d60e6cca0d13187bbc8c8b5532b52/uv-0.8.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76de331a07e5ae9b6490e70a9439a072b91b3167a5684510af10c2752c4ece9a", size = 18218124, upload-time = "2025-07-24T21:14:04.809Z" }, + { url = "https://files.pythonhosted.org/packages/61/a8/1df852a9153fec0c713358a50cfd7a21a4e17b5ed5704a390c0f3da448ab/uv-0.8.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:989898caeb6e972979543b57547d1c28ab8af81ff8fc15921fd354c17d432749", size = 19638846, upload-time = "2025-07-24T21:14:07.074Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/adeedaa009d8d919107c52afb58689d5e9db578b07f8dea5e15e4c738d52/uv-0.8.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ce7981f4fbeecf93dc5cf0a5a7915e84956fd99ad3ac977c048fe0cfdb1a17e", size = 19384261, upload-time = "2025-07-24T21:14:09.425Z" }, + { url = "https://files.pythonhosted.org/packages/8d/87/b3981f499e2b13c5ef0022fd7809f0fccbecd41282ae4f6a0e3fd5fa1430/uv-0.8.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8486f7576d15cc73509f93f47b3190f44701ea36839906369301b58c8604d5db", size = 18673722, upload-time = "2025-07-24T21:14:11.656Z" }, + { url = "https://files.pythonhosted.org/packages/5e/62/0d1ba1c666c5492d3716d8d3fba425f65ed2acc6707544c3cbbd381f6cbe/uv-0.8.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1eb7c896fc0d80ed534748aaf46697b6ebc8ce401f1c51666ce0b9923c3db9a", size = 18658829, upload-time = "2025-07-24T21:14:13.798Z" }, + { url = "https://files.pythonhosted.org/packages/cc/ae/11d09be3c74ca4896d55701ebbca7fe7a32db0502cf9f4c57e20bf77bfc4/uv-0.8.3-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:1121ad1c9389b865d029385031d3fd7d90d343c92a2149a4d4aa20bf469cb27f", size = 17460029, upload-time = "2025-07-24T21:14:15.993Z" }, + { url = "https://files.pythonhosted.org/packages/22/47/b67296c62381b8369f082a33d9fdcb7c579ad9922bcce7b09cd4af935dfa/uv-0.8.3-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:5313ee776ad65731ffa8ac585246f987d3a2bf72e6153c12add1fff22ad6e500", size = 18398665, upload-time = "2025-07-24T21:14:18.399Z" }, + { url = "https://files.pythonhosted.org/packages/01/5f/23990de5487085ca86e12f99d0a8f8410419442ffd35c42838675df5549b/uv-0.8.3-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:daa6e0d657a94f20e962d4a03d833ef7af5c8e51b7c8a2d92ba6cf64a4c07ac1", size = 17560408, upload-time = "2025-07-24T21:14:20.609Z" }, + { url = "https://files.pythonhosted.org/packages/89/42/1a8ce79d2ce7268e52690cd0f1b6c3e6c8d748a68d42de206e37219e9627/uv-0.8.3-py3-none-musllinux_1_1_i686.whl", hash = "sha256:ad13453ab0a1dfa64a221aac8f52199efdcaa52c97134fffd7bcebed794a6f4b", size = 17758504, upload-time = "2025-07-24T21:14:23.086Z" }, + { url = "https://files.pythonhosted.org/packages/6b/39/ae94e06ac00cb5002e636af0e48c5180fab5b50a463dc96386875ea511ea/uv-0.8.3-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:5843cc43bafad05cc710d8e31bd347ee37202462a63d32c30746e9df48cfbda2", size = 18741736, upload-time = "2025-07-24T21:14:25.329Z" }, + { url = "https://files.pythonhosted.org/packages/18/e0/a2fe9cc5f7b8815cbf97cb1bf64abb71fcb65f25ca7a5a8cdd4c2e23af97/uv-0.8.3-py3-none-win32.whl", hash = "sha256:17bcdb0615e37cc5f985f7d7546f755ac6343c1dc8bbe876c892437f14f8f904", size = 17723422, upload-time = "2025-07-24T21:14:28.02Z" }, + { url = "https://files.pythonhosted.org/packages/cf/c3/da508ec0f6883f1c269a0a477bb6447c81d5383fe3ad5d5ea3d45469fd30/uv-0.8.3-py3-none-win_amd64.whl", hash = "sha256:2e311c029bff2ca07c6ddf877ccc5935cabb78e09b94b53a849542665b6a6fa1", size = 19531666, upload-time = "2025-07-24T21:14:30.192Z" }, + { url = "https://files.pythonhosted.org/packages/b2/8d/c0354e416697b4baa7ceaad0e423639b6683d1f8299355e390a64809f7bf/uv-0.8.3-py3-none-win_arm64.whl", hash = "sha256:391c97577048a40fd8c85b370055df6420f26e81df7fa906f0e0ce1aa2af3527", size = 18161557, upload-time = "2025-07-24T21:14:32.482Z" }, +] + +[[package]] +name = "virtualenv" +version = "20.32.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a9/96/0834f30fa08dca3738614e6a9d42752b6420ee94e58971d702118f7cfd30/virtualenv-20.32.0.tar.gz", hash = "sha256:886bf75cadfdc964674e6e33eb74d787dff31ca314ceace03ca5810620f4ecf0", size = 6076970, upload-time = "2025-07-21T04:09:50.985Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/c6/f8f28009920a736d0df434b52e9feebfb4d702ba942f15338cb4a83eafc1/virtualenv-20.32.0-py3-none-any.whl", hash = "sha256:2c310aecb62e5aa1b06103ed7c2977b81e042695de2697d01017ff0f1034af56", size = 6057761, upload-time = "2025-07-21T04:09:48.059Z" }, +]