diff --git a/.coveragerc b/.coveragerc index 524e15a..5e83ea1 100644 --- a/.coveragerc +++ b/.coveragerc @@ -21,3 +21,9 @@ exclude_lines = raise NotImplementedError if 0: if __name__ == .__main__.: + if typing.TYPE_CHECKING: + if types.TYPE_CHECKING: + @overload + @types.overload + @typing.overload + types.Protocol diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 0000000..da17bb3 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,41 @@ +name: "CodeQL" + +on: + push: + branches: [ "develop" ] + pull_request: + branches: [ "develop" ] + schedule: + - cron: "46 1 * * 3" + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ python ] + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + queries: +security-and-quality + + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: "/language:${{ matrix.language }}" diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 437e61a..0e1f36c 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,4 +1,4 @@ -name: tox +name: pytest on: push: @@ -8,20 +8,51 @@ on: jobs: build: runs-on: ubuntu-latest - timeout-minutes: 2 + timeout-minutes: 4 strategy: matrix: - python-version: [2.7, 3.6, 3.7, 3.8, 3.9, '3.10'] + python-version: ['pypy3.9', 'pypy3.10', '3.9', '3.10', '3.11', '3.12'] # Maybe soon?, '3.13'] + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 1 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip setuptools ruff + pip install -e '.[tests]' + - name: Get versions + run: | + python -V + pip freeze + - name: ruff + run: ruff check --output-format=github + - name: pytest + run: py.test + docs_and_lint: + runs-on: ubuntu-latest + timeout-minutes: 2 steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 with: - python-version: ${{ matrix.python-version }} + python-version: '3.10' - name: Install dependencies run: | - python -m pip install --upgrade pip - pip install tox tox-gh-actions - - name: Test with tox - run: tox + python -m pip install --upgrade pip setuptools + pip install -e '.[docs,tests]' pyright ruff mypy + - name: build docs + run: make html + working-directory: docs/ + - name: ruff + run: ruff check --output-format=github + - name: mypy + run: mypy python_utils setup.py + - name: pyright + run: pyright diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 0000000..7101b3f --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,17 @@ +name: Close stale issues and pull requests + +on: + workflow_dispatch: + schedule: + - cron: '0 0 * * *' # Run every day at midnight + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v8 + with: + days-before-stale: 30 + exempt-issue-labels: in-progress,help-wanted,pinned,security,enhancement + exempt-all-pr-assignees: true + diff --git a/.gitignore b/.gitignore index a04bcd0..46105bf 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ /docs/_build /cover /.eggs +/.* \ No newline at end of file diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000..bee434d --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,35 @@ +# Read the Docs configuration file for Sphinx projects +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the OS, Python version and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.12" + # You can also specify other tool versions: + # nodejs: "20" + # rust: "1.70" + # golang: "1.20" + +# Build documentation in the "docs/" directory with Sphinx +sphinx: + configuration: docs/conf.py + # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs + # builder: "dirhtml" + # Fail on all warnings to avoid broken references + # fail_on_warning: true + +# Optionally build your docs in additional formats such as PDF and ePub +formats: + - pdf + - epub + +# Optional but recommended, declare the Python requirements required +# to build your documentation +# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html +python: + install: + - requirements: docs/requirements.txt diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 88380c2..0000000 --- a/.travis.yml +++ /dev/null @@ -1,79 +0,0 @@ -dist: xenial -sudo: false -language: python - -env: - global: - - PIP_WHEEL_DIR=$HOME/.wheels - - PIP_FIND_LINKS=file://$PIP_WHEEL_DIR - -matrix: - include: - - python: '3.6' - env: TOXENV=docs - - python: '3.6' - env: TOXENV=flake8 - - python: '2.7' - env: TOXENV=py27 - - python: '3.5' - env: TOXENV=py35 - - python: '3.6' - env: TOXENV=py36 - - python: '3.7' - env: TOXENV=py37 - - python: '3.8' - env: TOXENV=py38 - - python: '3.9-dev' - env: TOXENV=py39 - - python: 'pypy' - env: TOXENV=pypy - # Added power support architecture - - arch: ppc64le - python: '3.6' - env: TOXENV=docs - - arch: ppc64le - python: '3.6' - env: TOXENV=flake8 - - arch: ppc64le - python: '2.7' - env: TOXENV=py27 - - arch: ppc64le - python: '3.5' - env: TOXENV=py35 - - arch: ppc64le - python: '3.6' - env: TOXENV=py36 - - arch: ppc64le - python: '3.7' - env: TOXENV=py37 - - arch: ppc64le - python: '3.8' - env: TOXENV=py38 - - arch: ppc64le - python: '3.9-dev' - env: TOXENV=py39 - -cache: - directories: - - $HOME/.wheels - -# command to install dependencies, e.g. pip install -r requirements.txt -install: - - mkdir -p $PIP_WHEEL_DIR - - pip wheel -r _python_utils_tests/requirements.txt - - pip install -e . - - pip install tox - -script: - - tox - -after_success: - - pip install codecov coveralls - - coveralls - - codecov - -notifications: - email: - on_success: never - on_failure: change - diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..d13f7da --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,87 @@ +# Contributing to python-utils + +Bug reports, code and documentation contributions are welcome. You can help this +project also by using the development version and by reporting any bugs you might encounter + +## 1. Reporting bugs +It's important to provide following details when submitting a bug +- Python version +- python-utils version +- OS details + +If possible also provide a minimum reproducible working code. +## 2. Contributing Code and Docs + +Before working on a new feature or a bug, please browse [existing issues](https://github.com/WoLpH/python-utils/issues) +to see whether it has previously been discussed. + +If your change alters python-util's behaviour or interface, it's a good idea to +discuss it before you start working on it. + +If you are fixing an issue, the first step should be to create a test case that +reproduces the incorrect behaviour. That will also help you to build an +understanding of the issue at hand. + +Make sure to add relevant tests and update documentation in order to get +your PRs merged. We strictly adhere to 100% code coverage. + +### Development Environment + +#### Getting the code + +Go to and fork the project repository. + +```bash +# Clone your fork +$ git clone git@github.com:/python-utils.git + +# Enter the project directory +$ cd python-utils + +# Create a branch for your changes +$ git checkout -b my_awesome_branch +``` + +#### Testing +Before submitting any PR make sure your code passes all the tests. + +To run the full test-suite, make sure you have `tox` installed and run the following command: + +```bash +$ tox +``` + +Or to speed it up (replace 8 with your number of cores), run: + +```bash +$ tox -p8 +``` + +During development I recommend using pytest directly and installing the package in development mode. + +Create virtual environment and activate +```bash +$ python3 -m venv venv +$ source venv/bin/activate +``` +Install test requirements +```bash +$ cd python-utils +$ pip install -e ".[tests]" +``` +Run tests +```bash +$ py.test +``` + +Note that this won't run `ruff` yet, so once all the tests succeed you can run `ruff check` to check for code style errors. + +```bash +$ ruff check +``` + +Lastly we test the types using `pyright`: + +```bash +$ pyright +``` diff --git a/MANIFEST.in b/MANIFEST.in index fc8acef..0680580 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -7,5 +7,6 @@ include requirements.txt include setup.cfg include setup.py include tox.ini +include python_utils/py.typed recursive-include _python_utils_tests *.py *.txt recursive-exclude __pycache__ * diff --git a/README.rst b/README.rst index f59d94c..9573a7e 100644 --- a/README.rst +++ b/README.rst @@ -1,8 +1,8 @@ Useful Python Utils ============================================================================== -.. image:: https://travis-ci.org/WoLpH/python-utils.svg?branch=master - :target: https://travis-ci.org/WoLpH/python-utils +.. image:: https://github.com/WoLpH/python-utils/actions/workflows/main.yml/badge.svg?branch=master + :target: https://github.com/WoLpH/python-utils/actions/workflows/main.yml .. image:: https://coveralls.io/repos/WoLpH/python-utils/badge.svg?branch=master :target: https://coveralls.io/r/WoLpH/python-utils?branch=master @@ -25,6 +25,13 @@ Links - Documentation: https://python-utils.readthedocs.io/en/latest/ - My blog: https://wol.ph/ +Security contact information +------------------------------------------------------------------------------ + +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. + Requirements for installing: ------------------------------------------------------------------------------ @@ -36,16 +43,22 @@ Installation: The package can be installed through `pip` (this is the recommended method): +.. code-block:: bash + pip install python-utils Or if `pip` is not available, `easy_install` should work as well: +.. code-block:: bash + easy_install python-utils Or download the latest release from Pypi (https://pypi.python.org/pypi/python-utils) or Github. Note that the releases on Pypi are signed with my GPG key (https://pgp.mit.edu/pks/lookup?op=vindex&search=0xE81444E9CE1F695D) and can be checked using GPG: +.. code-block:: bash + gpg --verify python-utils-.tar.gz.asc python-utils-.tar.gz Quickstart @@ -58,6 +71,31 @@ format. Examples ------------------------------------------------------------------------------ +Automatically converting a generator to a list, dict or other collections +using a decorator: + +.. code-block:: pycon + + >>> @decorators.listify() + ... def generate_list(): + ... yield 1 + ... yield 2 + ... yield 3 + ... + >>> generate_list() + [1, 2, 3] + + >>> @listify(collection=dict) + ... def dict_generator(): + ... yield 'a', 1 + ... yield 'b', 2 + + >>> dict_generator() + {'a': 1, 'b': 2} + +Retrying until timeout +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + To easily retry a block of code with a configurable timeout, you can use the `time.timeout_generator`: @@ -69,6 +107,9 @@ To easily retry a block of code with a configurable timeout, you can use the ... except Exception as e: ... # Handle the exception +Formatting of timestamps, dates and times +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Easy formatting of timestamps and calculating the time since: .. code-block:: pycon @@ -98,12 +139,15 @@ Easy formatting of timestamps and calculating the time since: '1 minute ago' Converting your test from camel-case to underscores: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: pycon >>> camel_to_underscore('SpamEggsAndBacon') 'spam_eggs_and_bacon' +Attribute setting decorator. Very useful for the Django admin +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A convenient decorator to set function attributes using a decorator: .. code-block:: pycon @@ -119,7 +163,11 @@ A convenient decorator to set function attributes using a decorator: >>> upper_case_name.short_description = 'Name' -Or to scale numbers: +This can be very useful for the Django admin as it allows you to have all +metadata in one place. + +Scaling numbers between ranges +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: pycon @@ -130,7 +178,8 @@ Or to scale numbers: >>> remap(decimal.Decimal('250.0'), 0.0, 1000.0, 0.0, 100.0) Decimal('25.0') -To get the screen/window/terminal size in characters: +Get the screen/window/terminal size in characters: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: pycon @@ -140,7 +189,8 @@ To get the screen/window/terminal size in characters: That method supports IPython and Jupyter as well as regular shells, using `blessings` and other modules depending on what is available. -To extract a number from nearly every string: +Extracting numbers from nearly every string: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: pycon @@ -151,6 +201,9 @@ To extract a number from nearly every string: >>> number = converters.to_int('spam', default=1) 1 +Doing a global import of all the modules in a package programmatically: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + To do a global import programmatically you can use the `import_global` function. This effectively emulates a `from ... import *` @@ -161,6 +214,9 @@ function. This effectively emulates a `from ... import *` # The following is the equivalent of `from some_module import *` import_global('some_module') +Automatically named logger for classes: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Or add a correclty named logger to your classes which can be easily accessed: .. code-block:: python @@ -183,3 +239,39 @@ Or add a correclty named logger to your classes which can be easily accessed: import logging my_class.log(logging.ERROR, 'log') +Alternatively loguru is also supported. It is largely a drop-in replacement for the logging module which is a bit more convenient to configure: + +First install the extra loguru package: + +.. code-block:: bash + + pip install 'python-utils[loguru]' + +.. code-block:: python + + class MyClass(Logurud): + ... + +Now you can use the `Logurud` class to make functions such as `self.info()` +available. The benefit of this approach is that you can add extra context or +options to you specific loguru instance (i.e. `self.logger`): + +Convenient type aliases and some commonly used types: + +.. code-block:: python + + # For type hinting scopes such as locals/globals/vars + Scope = Dict[str, Any] + OptionalScope = O[Scope] + + # Note that Number is only useful for extra clarity since float + # will work for both int and float in practice. + Number = U[int, float] + DecimalNumber = U[Number, decimal.Decimal] + + # To accept an exception or list of exceptions + ExceptionType = Type[Exception] + ExceptionsType = U[Tuple[ExceptionType, ...], ExceptionType] + + # Matching string/bytes types: + StringTypes = U[str, bytes] diff --git a/_python_utils_tests/test_aio.py b/_python_utils_tests/test_aio.py new file mode 100644 index 0000000..9096f10 --- /dev/null +++ b/_python_utils_tests/test_aio.py @@ -0,0 +1,68 @@ +import asyncio + +import pytest + +from python_utils import types +from python_utils.aio import acontainer, acount, adict + + +@pytest.mark.asyncio +async def test_acount(monkeypatch: pytest.MonkeyPatch) -> None: + sleeps: types.List[float] = [] + + async def mock_sleep(delay: float) -> None: + sleeps.append(delay) + + monkeypatch.setattr(asyncio, 'sleep', mock_sleep) + + async for _i in acount(delay=1, stop=3.5): + pass + + assert len(sleeps) == 4 + assert sum(sleeps) == 4 + + +@pytest.mark.asyncio +async def test_acontainer() -> None: + async def async_gen() -> types.AsyncIterable[int]: + yield 1 + yield 2 + yield 3 + + async def empty_gen() -> types.AsyncIterable[int]: + if False: + yield 1 + + assert await acontainer(async_gen) == [1, 2, 3] + assert await acontainer(async_gen()) == [1, 2, 3] + assert await acontainer(async_gen, set) == {1, 2, 3} + assert await acontainer(async_gen(), set) == {1, 2, 3} + assert await acontainer(async_gen, list) == [1, 2, 3] + assert await acontainer(async_gen(), list) == [1, 2, 3] + assert await acontainer(async_gen, tuple) == (1, 2, 3) + assert await acontainer(async_gen(), tuple) == (1, 2, 3) + assert await acontainer(empty_gen) == [] + assert await acontainer(empty_gen()) == [] + assert await acontainer(empty_gen, set) == set() + assert await acontainer(empty_gen(), set) == set() + assert await acontainer(empty_gen, list) == list() + assert await acontainer(empty_gen(), list) == list() + assert await acontainer(empty_gen, tuple) == tuple() + assert await acontainer(empty_gen(), tuple) == tuple() + + +@pytest.mark.asyncio +async def test_adict() -> None: + async def async_gen() -> types.AsyncIterable[types.Tuple[int, int]]: + yield 1, 2 + yield 3, 4 + yield 5, 6 + + async def empty_gen() -> types.AsyncIterable[types.Tuple[int, int]]: + if False: + yield 1, 2 + + assert await adict(async_gen) == {1: 2, 3: 4, 5: 6} + assert await adict(async_gen()) == {1: 2, 3: 4, 5: 6} + assert await adict(empty_gen) == {} + assert await adict(empty_gen()) == {} diff --git a/_python_utils_tests/test_containers.py b/_python_utils_tests/test_containers.py new file mode 100644 index 0000000..a38609d --- /dev/null +++ b/_python_utils_tests/test_containers.py @@ -0,0 +1,73 @@ +import pytest + +from python_utils import containers + + +def test_unique_list_ignore() -> None: + a: containers.UniqueList[int] = containers.UniqueList() + a.append(1) + a.append(1) + assert a == [1] + + a = containers.UniqueList(*range(20)) + with pytest.raises(RuntimeError): + a[10:20:2] = [1, 2, 3, 4, 5] + + a[3] = 5 + + +def test_unique_list_raise() -> None: + a: containers.UniqueList[int] = containers.UniqueList( + *range(20), on_duplicate='raise' + ) + with pytest.raises(ValueError): + a[10:20:2] = [1, 2, 3, 4, 5] + + a[10:20:2] = [21, 22, 23, 24, 25] + with pytest.raises(ValueError): + a[3] = 5 + + del a[10] + del a[5:15] + + +def test_sliceable_deque() -> None: + d: containers.SliceableDeque[int] = containers.SliceableDeque(range(10)) + assert d[0] == 0 + assert d[-1] == 9 + assert d[1:3] == [1, 2] + assert d[1:3:2] == [1] + assert d[1:3:-1] == [] + assert d[3:1] == [] + assert d[3:1:-1] == [3, 2] + assert d[3:1:-2] == [3] + with pytest.raises(ValueError): + assert d[1:3:0] + assert d[1:3:1] == [1, 2] + assert d[1:3:2] == [1] + assert d[1:3:-1] == [] + + +def test_sliceable_deque_pop() -> None: + d: containers.SliceableDeque[int] = containers.SliceableDeque(range(10)) + + assert d.pop() == 9 == 9 + assert d.pop(0) == 0 + + with pytest.raises(IndexError): + assert d.pop(100) + + with pytest.raises(IndexError): + assert d.pop(2) + + with pytest.raises(IndexError): + assert d.pop(-2) + + +def test_sliceable_deque_eq() -> None: + d: containers.SliceableDeque[int] = containers.SliceableDeque([1, 2, 3]) + assert d == [1, 2, 3] + assert d == (1, 2, 3) + assert d == {1, 2, 3} + assert d == d + assert d == containers.SliceableDeque([1, 2, 3]) diff --git a/_python_utils_tests/test_decorators.py b/_python_utils_tests/test_decorators.py new file mode 100644 index 0000000..281698b --- /dev/null +++ b/_python_utils_tests/test_decorators.py @@ -0,0 +1,71 @@ +import typing +from unittest.mock import MagicMock + +import pytest + +from python_utils.decorators import sample, wraps_classmethod + +T = typing.TypeVar('T') + + +@pytest.fixture +def random(monkeypatch: pytest.MonkeyPatch) -> MagicMock: + mock = MagicMock() + monkeypatch.setattr( + 'python_utils.decorators.random.random', mock, raising=True + ) + return mock + + +def test_sample_called(random: MagicMock) -> None: + demo_function = MagicMock() + decorated = sample(0.5)(demo_function) + random.return_value = 0.4 + decorated() + random.return_value = 0.0 + decorated() + args = [1, 2] + kwargs = {'1': 1, '2': 2} + decorated(*args, **kwargs) + demo_function.assert_called_with(*args, **kwargs) + assert demo_function.call_count == 3 + + +def test_sample_not_called(random: MagicMock) -> None: + demo_function = MagicMock() + decorated = sample(0.5)(demo_function) + random.return_value = 0.5 + decorated() + random.return_value = 1.0 + decorated() + assert demo_function.call_count == 0 + + +class SomeClass: + @classmethod + def some_classmethod(cls, arg: T) -> T: + return arg + + @classmethod + def some_annotated_classmethod(cls, arg: int) -> int: + return arg + + +def test_wraps_classmethod() -> None: + some_class = SomeClass() + some_class.some_classmethod = MagicMock() # type: ignore[method-assign] + wrapped_method = wraps_classmethod(SomeClass.some_classmethod)( + some_class.some_classmethod + ) + wrapped_method(123) + some_class.some_classmethod.assert_called_with(123) + + +def test_wraps_annotated_classmethod() -> None: + some_class = SomeClass() + some_class.some_annotated_classmethod = MagicMock() # type: ignore[method-assign] + wrapped_method = wraps_classmethod(SomeClass.some_annotated_classmethod)( + some_class.some_annotated_classmethod + ) + wrapped_method(123) + some_class.some_annotated_classmethod.assert_called_with(123) diff --git a/_python_utils_tests/test_generators.py b/_python_utils_tests/test_generators.py new file mode 100644 index 0000000..39498e0 --- /dev/null +++ b/_python_utils_tests/test_generators.py @@ -0,0 +1,68 @@ +import asyncio + +import pytest + +import python_utils +from python_utils import types + + +@pytest.mark.asyncio +async def test_abatcher() -> None: + async for batch in python_utils.abatcher(python_utils.acount(stop=9), 3): + assert len(batch) == 3 + + async for batch in python_utils.abatcher(python_utils.acount(stop=2), 3): + assert len(batch) == 2 + + +@pytest.mark.asyncio +async def test_abatcher_timed() -> None: + batches: types.List[types.List[int]] = [] + async for batch in python_utils.abatcher( + python_utils.acount(stop=10, delay=0.08), interval=0.1 + ): + batches.append(batch) + + assert batches == [[0, 1, 2], [3, 4], [5, 6], [7, 8], [9]] + assert len(batches) == 5 + + +@pytest.mark.asyncio +async def test_abatcher_timed_with_timeout() -> None: + async def generator() -> types.AsyncIterator[int]: + # Test if the timeout is respected + yield 0 + yield 1 + await asyncio.sleep(0.11) + + # Test if the timeout is respected + yield 2 + yield 3 + await asyncio.sleep(0.11) + + # Test if exceptions are handled correctly + await asyncio.wait_for(asyncio.sleep(1), timeout=0.05) + + # Test if StopAsyncIteration is handled correctly + yield 4 + + batcher = python_utils.abatcher(generator(), interval=0.1) + assert await batcher.__anext__() == [0, 1] + assert await batcher.__anext__() == [2, 3] + + with pytest.raises(asyncio.TimeoutError): + await batcher.__anext__() + + with pytest.raises(StopAsyncIteration): + await batcher.__anext__() + + +def test_batcher() -> None: + batch = [] + for batch in python_utils.batcher(range(9), 3): + assert len(batch) == 3 + + for batch in python_utils.batcher(range(4), 3): + assert batch is not None + + assert len(batch) == 1 diff --git a/_python_utils_tests/test_import.py b/_python_utils_tests/test_import.py index e9f9255..31be2be 100644 --- a/_python_utils_tests/test_import.py +++ b/_python_utils_tests/test_import.py @@ -1,50 +1,53 @@ -from python_utils import import_ +from python_utils import import_, types -def test_import_globals_relative_import(): +def test_import_globals_relative_import() -> None: for i in range(-1, 5): relative_import(i) -def relative_import(level): - locals_ = {} +def relative_import(level: int) -> None: + locals_: types.Dict[str, types.Any] = {} globals_ = {'__name__': 'python_utils.import_'} import_.import_global('.formatters', locals_=locals_, globals_=globals_) - import pprint - pprint.pprint(globals_) assert 'camel_to_underscore' in globals_ -def test_import_globals_without_inspection(): - locals_ = {} - globals_ = {'__name__': __name__} +def test_import_globals_without_inspection() -> None: + locals_: types.Dict[str, types.Any] = {} + globals_: types.Dict[str, types.Any] = {'__name__': __name__} import_.import_global( - 'python_utils.formatters', locals_=locals_, globals_=globals_) + 'python_utils.formatters', locals_=locals_, globals_=globals_ + ) assert 'camel_to_underscore' in globals_ -def test_import_globals_single_method(): - locals_ = {} - globals_ = {'__name__': __name__} +def test_import_globals_single_method() -> None: + locals_: types.Dict[str, types.Any] = {} + globals_: types.Dict[str, types.Any] = {'__name__': __name__} import_.import_global( - 'python_utils.formatters', ['camel_to_underscore'], locals_=locals_, - globals_=globals_) + 'python_utils.formatters', + ['camel_to_underscore'], + locals_=locals_, + globals_=globals_, + ) assert 'camel_to_underscore' in globals_ -def test_import_globals_with_inspection(): +def test_import_globals_with_inspection() -> None: import_.import_global('python_utils.formatters') assert 'camel_to_underscore' in globals() -def test_import_globals_missing_module(): +def test_import_globals_missing_module() -> None: import_.import_global( - 'python_utils.spam', exceptions=ImportError, locals_=locals()) + 'python_utils.spam', exceptions=ImportError, locals_=locals() + ) assert 'camel_to_underscore' in globals() -def test_import_locals_missing_module(): +def test_import_locals_missing_module() -> None: import_.import_global( - 'python_utils.spam', exceptions=ImportError, globals_=globals()) + 'python_utils.spam', exceptions=ImportError, globals_=globals() + ) assert 'camel_to_underscore' in globals() - diff --git a/_python_utils_tests/test_logger.py b/_python_utils_tests/test_logger.py new file mode 100644 index 0000000..2d26696 --- /dev/null +++ b/_python_utils_tests/test_logger.py @@ -0,0 +1,20 @@ +# mypy: disable-error-code=misc +import pytest + +from python_utils.loguru import Logurud + +loguru = pytest.importorskip('loguru') + + +def test_logurud() -> None: + class MyClass(Logurud): + pass + + my_class = MyClass() + my_class.debug('debug') + my_class.info('info') + my_class.warning('warning') + my_class.error('error') + my_class.critical('critical') + my_class.exception('exception') + my_class.log(0, 'log') diff --git a/_python_utils_tests/test_python_utils.py b/_python_utils_tests/test_python_utils.py index d6af258..5a41d4b 100644 --- a/_python_utils_tests/test_python_utils.py +++ b/_python_utils_tests/test_python_utils.py @@ -1,10 +1,9 @@ from python_utils import __about__ -def test_definitions(): +def test_definitions() -> None: # The setup.py requires this so we better make sure they exist :) assert __about__.__version__ assert __about__.__author__ assert __about__.__author_email__ assert __about__.__description__ - diff --git a/_python_utils_tests/test_time.py b/_python_utils_tests/test_time.py new file mode 100644 index 0000000..d4c4658 --- /dev/null +++ b/_python_utils_tests/test_time.py @@ -0,0 +1,192 @@ +import asyncio +import itertools +from datetime import timedelta + +import pytest + +import python_utils +from python_utils import types + + +@pytest.mark.parametrize( + 'timeout,interval,interval_multiplier,maximum_interval,iterable,result', + [ + (0.2, 0.1, 0.4, 0.2, python_utils.acount, 2), + (0.3, 0.1, 0.4, 0.2, python_utils.acount(), 3), + (0.3, 0.06, 1.0, None, python_utils.acount, 5), + ( + timedelta(seconds=0.1), + timedelta(seconds=0.06), + 2.0, + timedelta(seconds=0.1), + python_utils.acount, + 2, + ), + ], +) +@pytest.mark.asyncio +async def test_aio_timeout_generator( + timeout: float, + interval: float, + interval_multiplier: float, + maximum_interval: float, + iterable: types.AsyncIterable[types.Any], + result: int, +) -> None: + i = None + async for i in python_utils.aio_timeout_generator( + timeout, interval, iterable, maximum_interval=maximum_interval + ): + pass + + assert i == result + + +@pytest.mark.parametrize( + 'timeout,interval,interval_multiplier,maximum_interval,iterable,result', + [ + (0.1, 0.06, 0.5, 0.1, 'abc', 'c'), + (0.1, 0.07, 0.5, 0.1, itertools.count, 2), + (0.1, 0.07, 0.5, 0.1, itertools.count(), 2), + (0.1, 0.06, 1.0, None, 'abc', 'c'), + ( + timedelta(seconds=0.1), + timedelta(seconds=0.06), + 2.0, + timedelta(seconds=0.1), + itertools.count, + 2, + ), + ], +) +def test_timeout_generator( + timeout: float, + interval: float, + interval_multiplier: float, + maximum_interval: float, + iterable: types.Union[ + str, + types.Iterable[types.Any], + types.Callable[..., types.Iterable[types.Any]], + ], + result: int, +) -> None: + i = None + for i in python_utils.timeout_generator( + timeout=timeout, + interval=interval, + interval_multiplier=interval_multiplier, + iterable=iterable, + maximum_interval=maximum_interval, + ): + assert i is not None + + assert i == result + + +@pytest.mark.asyncio +async def test_aio_generator_timeout_detector() -> None: + # Make pyright happy + i = None + + async def generator() -> types.AsyncGenerator[int, None]: + for i in range(10): + await asyncio.sleep(i / 20.0) + yield i + + detector = python_utils.aio_generator_timeout_detector + # Test regular timeout with reraise + with pytest.raises(asyncio.TimeoutError): + async for i in detector(generator(), 0.25): + pass + + # Test regular timeout with clean exit + async for i in detector(generator(), 0.25, on_timeout=None): + pass + + assert i == 4 + + # Test total timeout with reraise + with pytest.raises(asyncio.TimeoutError): + async for i in detector(generator(), total_timeout=0.5): + pass + + # Test total timeout with clean exit + async for i in detector(generator(), total_timeout=0.5, on_timeout=None): + pass + + assert i == 4 + + # Test stop iteration + async for i in detector(generator(), on_timeout=None): + pass + + +@pytest.mark.asyncio +async def test_aio_generator_timeout_detector_decorator_reraise() -> None: + # Test regular timeout with reraise + @python_utils.aio_generator_timeout_detector_decorator(timeout=0.05) + async def generator_timeout() -> types.AsyncGenerator[int, None]: + for i in range(10): + await asyncio.sleep(i / 100.0) + yield i + + with pytest.raises(asyncio.TimeoutError): + async for _ in generator_timeout(): + pass + + +@pytest.mark.asyncio +async def test_aio_generator_timeout_detector_decorator_clean_exit() -> None: + # Make pyright happy + i = None + + # Test regular timeout with clean exit + @python_utils.aio_generator_timeout_detector_decorator( + timeout=0.05, on_timeout=None + ) + async def generator_clean() -> types.AsyncGenerator[int, None]: + for i in range(10): + await asyncio.sleep(i / 100.0) + yield i + + async for i in generator_clean(): + pass + + assert i == 4 + + +@pytest.mark.asyncio +async def test_aio_generator_timeout_detector_decorator_reraise_total() -> ( + None +): + # Test total timeout with reraise + @python_utils.aio_generator_timeout_detector_decorator(total_timeout=0.1) + async def generator_reraise() -> types.AsyncGenerator[int, None]: + for i in range(10): + await asyncio.sleep(i / 100.0) + yield i + + with pytest.raises(asyncio.TimeoutError): + async for _ in generator_reraise(): + pass + + +@pytest.mark.asyncio +async def test_aio_generator_timeout_detector_decorator_clean_total() -> None: + # Make pyright happy + i = None + + # Test total timeout with clean exit + @python_utils.aio_generator_timeout_detector_decorator( + total_timeout=0.1, on_timeout=None + ) + async def generator_clean_total() -> types.AsyncGenerator[int, None]: + for i in range(10): + await asyncio.sleep(i / 100.0) + yield i + + async for i in generator_clean_total(): + pass + + assert i == 4 diff --git a/docs/Makefile b/docs/Makefile index 7cab5b5..d4bb2cb 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -1,153 +1,20 @@ -# Makefile for Sphinx documentation +# Minimal makefile for Sphinx documentation # -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . BUILDDIR = _build -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - +# Put it first so that "make" without argument is like "make help". help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - -rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PythonUtils.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PythonUtils.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/PythonUtils" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PythonUtils" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." +.PHONY: help Makefile -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/_theme/LICENSE b/docs/_theme/LICENSE deleted file mode 100644 index f258ba0..0000000 --- a/docs/_theme/LICENSE +++ /dev/null @@ -1,46 +0,0 @@ -Modifications: - -Copyright (c) 2012 Rick van Hattem. - - -Original Projects: - -Copyright (c) 2010 Kenneth Reitz. -Copyright (c) 2010 by Armin Ronacher. - - -Some rights reserved. - -Redistribution and use in source and binary forms of the theme, with or -without modification, are permitted provided that the following conditions -are met: - -* Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - -* The names of the contributors may not be used to endorse or - promote products derived from this software without specific - prior written permission. - -We kindly ask you to only use these themes in an unmodified manner just -for Flask and Flask-related products, not for unrelated projects. If you -like the visual style and want to use it for your own projects, please -consider making some larger changes to the themes (such as changing -font faces, sizes, colors or margins). - -THIS THEME IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS THEME, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/_theme/flask_theme_support.py b/docs/_theme/flask_theme_support.py deleted file mode 100644 index 33f4744..0000000 --- a/docs/_theme/flask_theme_support.py +++ /dev/null @@ -1,86 +0,0 @@ -# flasky extensions. flasky pygments style based on tango style -from pygments.style import Style -from pygments.token import Keyword, Name, Comment, String, Error, \ - Number, Operator, Generic, Whitespace, Punctuation, Other, Literal - - -class FlaskyStyle(Style): - background_color = "#f8f8f8" - default_style = "" - - styles = { - # No corresponding class for the following: - #Text: "", # class: '' - Whitespace: "underline #f8f8f8", # class: 'w' - Error: "#a40000 border:#ef2929", # class: 'err' - Other: "#000000", # class 'x' - - Comment: "italic #8f5902", # class: 'c' - Comment.Preproc: "noitalic", # class: 'cp' - - Keyword: "bold #004461", # class: 'k' - Keyword.Constant: "bold #004461", # class: 'kc' - Keyword.Declaration: "bold #004461", # class: 'kd' - Keyword.Namespace: "bold #004461", # class: 'kn' - Keyword.Pseudo: "bold #004461", # class: 'kp' - Keyword.Reserved: "bold #004461", # class: 'kr' - Keyword.Type: "bold #004461", # class: 'kt' - - Operator: "#582800", # class: 'o' - Operator.Word: "bold #004461", # class: 'ow' - like keywords - - Punctuation: "bold #000000", # class: 'p' - - # because special names such as Name.Class, Name.Function, etc. - # are not recognized as such later in the parsing, we choose them - # to look the same as ordinary variables. - Name: "#000000", # class: 'n' - Name.Attribute: "#c4a000", # class: 'na' - to be revised - Name.Builtin: "#004461", # class: 'nb' - Name.Builtin.Pseudo: "#3465a4", # class: 'bp' - Name.Class: "#000000", # class: 'nc' - to be revised - Name.Constant: "#000000", # class: 'no' - to be revised - Name.Decorator: "#888", # class: 'nd' - to be revised - Name.Entity: "#ce5c00", # class: 'ni' - Name.Exception: "bold #cc0000", # class: 'ne' - Name.Function: "#000000", # class: 'nf' - Name.Property: "#000000", # class: 'py' - Name.Label: "#f57900", # class: 'nl' - Name.Namespace: "#000000", # class: 'nn' - to be revised - Name.Other: "#000000", # class: 'nx' - Name.Tag: "bold #004461", # class: 'nt' - like a keyword - Name.Variable: "#000000", # class: 'nv' - to be revised - Name.Variable.Class: "#000000", # class: 'vc' - to be revised - Name.Variable.Global: "#000000", # class: 'vg' - to be revised - Name.Variable.Instance: "#000000", # class: 'vi' - to be revised - - Number: "#990000", # class: 'm' - - Literal: "#000000", # class: 'l' - Literal.Date: "#000000", # class: 'ld' - - String: "#4e9a06", # class: 's' - String.Backtick: "#4e9a06", # class: 'sb' - String.Char: "#4e9a06", # class: 'sc' - String.Doc: "italic #8f5902", # class: 'sd' - like a comment - String.Double: "#4e9a06", # class: 's2' - String.Escape: "#4e9a06", # class: 'se' - String.Heredoc: "#4e9a06", # class: 'sh' - String.Interpol: "#4e9a06", # class: 'si' - String.Other: "#4e9a06", # class: 'sx' - String.Regex: "#4e9a06", # class: 'sr' - String.Single: "#4e9a06", # class: 's1' - String.Symbol: "#4e9a06", # class: 'ss' - - Generic: "#000000", # class: 'g' - Generic.Deleted: "#a40000", # class: 'gd' - Generic.Emph: "italic #000000", # class: 'ge' - Generic.Error: "#ef2929", # class: 'gr' - Generic.Heading: "bold #000080", # class: 'gh' - Generic.Inserted: "#00A000", # class: 'gi' - Generic.Output: "#888", # class: 'go' - Generic.Prompt: "#745334", # class: 'gp' - Generic.Strong: "bold #000000", # class: 'gs' - Generic.Subheading: "bold #800080", # class: 'gu' - Generic.Traceback: "bold #a40000", # class: 'gt' - } diff --git a/docs/_theme/wolph/layout.html b/docs/_theme/wolph/layout.html deleted file mode 100644 index a39162f..0000000 --- a/docs/_theme/wolph/layout.html +++ /dev/null @@ -1,16 +0,0 @@ -{%- extends "basic/layout.html" %} -{%- block extrahead %} - {{ super() }} - {% if theme_touch_icon %} - - {% endif %} - -{% endblock %} -{%- block relbar2 %}{% endblock %} -{%- block footer %} - -{%- endblock %} diff --git a/docs/_theme/wolph/relations.html b/docs/_theme/wolph/relations.html deleted file mode 100644 index 3bbcde8..0000000 --- a/docs/_theme/wolph/relations.html +++ /dev/null @@ -1,19 +0,0 @@ -

Related Topics

- diff --git a/docs/_theme/wolph/static/flasky.css_t b/docs/_theme/wolph/static/flasky.css_t deleted file mode 100644 index 71aae28..0000000 --- a/docs/_theme/wolph/static/flasky.css_t +++ /dev/null @@ -1,431 +0,0 @@ -/* - * flasky.css_t - * ~~~~~~~~~~~~ - * - * :copyright: Copyright 2010 by Armin Ronacher. Modifications by Kenneth Reitz. - * :license: Flask Design License, see LICENSE for details. - */ - -{% set page_width = '940px' %} -{% set sidebar_width = '220px' %} - -@import url("basic.css"); - -/* -- page layout ----------------------------------------------------------- */ - -body { - font-family: 'goudy old style', 'minion pro', 'bell mt', Georgia, 'Hiragino Mincho Pro'; - font-size: 17px; - background-color: white; - color: #000; - margin: 0; - padding: 0; -} - -div.document { - width: {{ page_width }}; - margin: 30px auto 0 auto; -} - -div.documentwrapper { - float: left; - width: 100%; -} - -div.bodywrapper { - margin: 0 0 0 {{ sidebar_width }}; -} - -div.sphinxsidebar { - width: {{ sidebar_width }}; -} - -hr { - border: 1px solid #B1B4B6; -} - -div.body { - background-color: #ffffff; - color: #3E4349; - padding: 0 30px 0 30px; -} - -img.floatingflask { - padding: 0 0 10px 10px; - float: right; -} - -div.footer { - width: {{ page_width }}; - margin: 20px auto 30px auto; - font-size: 14px; - color: #888; - text-align: right; -} - -div.footer a { - color: #888; -} - -div.related { - display: none; -} - -div.sphinxsidebar a { - color: #444; - text-decoration: none; - border-bottom: 1px dotted #999; -} - -div.sphinxsidebar a:hover { - border-bottom: 1px solid #999; -} - -div.sphinxsidebar { - font-size: 14px; - line-height: 1.5; -} - -div.sphinxsidebarwrapper { - padding: 0px 10px; -} - -div.sphinxsidebarwrapper p.logo { - padding: 0 0 20px 0; - margin: 0; - text-align: center; -} - -div.sphinxsidebar h3, -div.sphinxsidebar h4 { - font-family: 'Garamond', 'Georgia', serif; - color: #555; - font-size: 24px; - font-weight: normal; - margin: 0 0 5px 0; - padding: 0; -} - -div.sphinxsidebar h4 { - font-size: 20px; -} - -div.sphinxsidebar h3 a { - color: #444; -} - -div.sphinxsidebar p.logo a, -div.sphinxsidebar h3 a, -div.sphinxsidebar p.logo a:hover, -div.sphinxsidebar h3 a:hover { - border: none; -} - -div.sphinxsidebar p { - color: #555; - margin: 10px 0; -} - -div.sphinxsidebar ul { - margin: 10px 0; - padding: 0; - color: #000; -} - -div.sphinxsidebar input[type="text"] { - width: 160px!important; -} -div.sphinxsidebar input { - border: 1px solid #ccc; - font-family: 'Georgia', serif; - font-size: 1em; -} - -/* -- body styles ----------------------------------------------------------- */ - -a { - color: #004B6B; - text-decoration: underline; -} - -a:hover { - color: #6D4100; - text-decoration: underline; -} - -div.body h1, -div.body h2, -div.body h3, -div.body h4, -div.body h5, -div.body h6 { - font-family: 'Garamond', 'Georgia', serif; - font-weight: normal; - margin: 30px 0px 10px 0px; - padding: 0; -} - -div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; } -div.body h2 { font-size: 180%; } -div.body h3 { font-size: 150%; } -div.body h4 { font-size: 130%; } -div.body h5 { font-size: 100%; } -div.body h6 { font-size: 100%; } - -a.headerlink { - color: #ddd; - padding: 0 4px; - text-decoration: none; -} - -a.headerlink:hover { - color: #444; - background: #eaeaea; -} - -div.body p, div.body dd, div.body li { - line-height: 1.4em; -} - -div.admonition { - background: #fafafa; - margin: 20px -30px; - padding: 10px 30px; - border-top: 1px solid #ccc; - border-bottom: 1px solid #ccc; -} - -div.admonition tt.xref, div.admonition a tt { - border-bottom: 1px solid #fafafa; -} - -dd div.admonition { - margin-left: -60px; - padding-left: 60px; -} - -div.admonition p.admonition-title { - font-family: 'Garamond', 'Georgia', serif; - font-weight: normal; - font-size: 24px; - margin: 0 0 10px 0; - padding: 0; - line-height: 1; -} - -div.admonition p.last { - margin-bottom: 0; -} - -div.highlight { - background-color: white; -} - -dt:target, .highlight { - background: #FAF3E8; -} - -div.note { - background-color: #eee; - border: 1px solid #ccc; -} - -div.seealso { - background-color: #ffc; - border: 1px solid #ff6; -} - -div.topic { - background-color: #eee; -} - -p.admonition-title { - display: inline; -} - -p.admonition-title:after { - content: ":"; -} - -pre, tt { - font-family: 'Consolas', 'Menlo', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace; - font-size: 0.9em; -} - -img.screenshot { -} - -tt.descname, tt.descclassname { - font-size: 0.95em; -} - -tt.descname { - padding-right: 0.08em; -} - -img.screenshot { - -moz-box-shadow: 2px 2px 4px #eee; - -webkit-box-shadow: 2px 2px 4px #eee; - box-shadow: 2px 2px 4px #eee; -} - -table.docutils { - border: 1px solid #888; - -moz-box-shadow: 2px 2px 4px #eee; - -webkit-box-shadow: 2px 2px 4px #eee; - box-shadow: 2px 2px 4px #eee; -} - -table.docutils td, table.docutils th { - border: 1px solid #888; - padding: 0.25em 0.7em; -} - -table.field-list, table.footnote { - border: none; - -moz-box-shadow: none; - -webkit-box-shadow: none; - box-shadow: none; -} - -table.footnote { - margin: 15px 0; - width: 100%; - border: 1px solid #eee; - background: #fdfdfd; - font-size: 0.9em; -} - -table.footnote + table.footnote { - margin-top: -15px; - border-top: none; -} - -table.field-list th { - padding: 0 0.8em 0 0; -} - -table.field-list td { - padding: 0; -} - -table.footnote td.label { - width: 0px; - padding: 0.3em 0 0.3em 0.5em; -} - -table.footnote td { - padding: 0.3em 0.5em; -} - -dl { - margin: 0; - padding: 0; -} - -dl dd { - margin-left: 30px; -} - -blockquote { - margin: 0 0 0 30px; - padding: 0; -} - -ul, ol { - margin: 10px 0 10px 30px; - padding: 0; -} - -pre { - background: #eee; - padding: 7px 30px; - margin: 15px -30px; - line-height: 1.3em; -} - -dl pre, blockquote pre, li pre { - margin-left: -60px; - padding-left: 60px; -} - -dl dl pre { - margin-left: -90px; - padding-left: 90px; -} - -tt { - background-color: #ecf0f3; - color: #222; - /* padding: 1px 2px; */ -} - -tt.xref, a tt { - background-color: #FBFBFB; - border-bottom: 1px solid white; -} - -a.reference { - text-decoration: none; - border-bottom: 1px dotted #004B6B; -} - -a.reference:hover { - border-bottom: 1px solid #6D4100; -} - -a.footnote-reference { - text-decoration: none; - font-size: 0.7em; - vertical-align: top; - border-bottom: 1px dotted #004B6B; -} - -a.footnote-reference:hover { - border-bottom: 1px solid #6D4100; -} - -a:hover tt { - background: #EEE; -} - - -/* scrollbars */ - -::-webkit-scrollbar { - width: 6px; - height: 6px; -} - -::-webkit-scrollbar-button:start:decrement, -::-webkit-scrollbar-button:end:increment { - display: block; - height: 10px; -} - -::-webkit-scrollbar-button:vertical:increment { - background-color: #fff; -} - -::-webkit-scrollbar-track-piece { - background-color: #eee; - -webkit-border-radius: 3px; -} - -::-webkit-scrollbar-thumb:vertical { - height: 50px; - background-color: #ccc; - -webkit-border-radius: 3px; -} - -::-webkit-scrollbar-thumb:horizontal { - width: 50px; - background-color: #ccc; - -webkit-border-radius: 3px; -} - -/* misc. */ - -.revsys-inline { - display: none!important; -} diff --git a/docs/_theme/wolph/static/small_flask.css b/docs/_theme/wolph/static/small_flask.css deleted file mode 100644 index 1c6df30..0000000 --- a/docs/_theme/wolph/static/small_flask.css +++ /dev/null @@ -1,70 +0,0 @@ -/* - * small_flask.css_t - * ~~~~~~~~~~~~~~~~~ - * - * :copyright: Copyright 2010 by Armin Ronacher. - * :license: Flask Design License, see LICENSE for details. - */ - -body { - margin: 0; - padding: 20px 30px; -} - -div.documentwrapper { - float: none; - background: white; -} - -div.sphinxsidebar { - display: block; - float: none; - width: 102.5%; - margin: 50px -30px -20px -30px; - padding: 10px 20px; - background: #333; - color: white; -} - -div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p, -div.sphinxsidebar h3 a { - color: white; -} - -div.sphinxsidebar a { - color: #aaa; -} - -div.sphinxsidebar p.logo { - display: none; -} - -div.document { - width: 100%; - margin: 0; -} - -div.related { - display: block; - margin: 0; - padding: 10px 0 20px 0; -} - -div.related ul, -div.related ul li { - margin: 0; - padding: 0; -} - -div.footer { - display: none; -} - -div.bodywrapper { - margin: 0; -} - -div.body { - min-height: 0; - padding: 0; -} diff --git a/docs/_theme/wolph/theme.conf b/docs/_theme/wolph/theme.conf deleted file mode 100644 index 307a1f0..0000000 --- a/docs/_theme/wolph/theme.conf +++ /dev/null @@ -1,7 +0,0 @@ -[theme] -inherit = basic -stylesheet = flasky.css -pygments_style = flask_theme_support.FlaskyStyle - -[options] -touch_icon = diff --git a/docs/conf.py b/docs/conf.py index 2b2536d..44fac03 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,33 +1,40 @@ -# -*- coding: utf-8 -*- +""" +Configuration file for the Sphinx documentation builder. # -# Python Utils documentation build configuration file, created by -# sphinx-quickstart on Wed May 9 16:57:31 2012. -# -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. +This file only contains a selection of the most common options. For a full +list see the documentation: +https://www.sphinx-doc.org/en/master/usage/configuration.html + +-- Path setup -------------------------------------------------------------- + +If extensions (or modules to document with autodoc) are in another directory, +add these directories to sys.path here. If the directory is relative to the +documentation root, use os.path.abspath to make it absolute, like shown here. # -# All configuration values have a default; values that are commented out -# serve to show the default. +""" import os import sys -import datetime +from datetime import date -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('..')) + from python_utils import __about__ -# -- General configuration ----------------------------------------------------- +# -- Project information ----------------------------------------------------- -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +project = 'Python Utils' +author = __about__.__author__ +copyright = f'{date.today().year}, {author}' -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +# The full version, including alpha/beta/rc tags +release = __about__.__version__ + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', @@ -40,269 +47,21 @@ # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'Python Utils' -copyright = u'%s, %s' % ( - datetime.date.today().year, - __about__.__author__, -) - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = __about__.__version__ -# The full version, including alpha/beta/rc tags. -release = __about__.__version__ - -suppress_warnings = [ - 'image.nonlocal_uri', -] - -needs_sphinx = '1.4' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - - -# -- Options for HTML output --------------------------------------------------- +# -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'wolph' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -html_theme_path = ['_theme'] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None +# +html_theme = 'alabaster' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'PythonUtilsdoc' - - -# -- Options for LaTeX output -------------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ('index', 'PythonUtils.tex', u'Python Utils Documentation', - __about__.__author__, 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output -------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'pythonutils', u'Python Utils Documentation', - [__about__.__author__], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------------ - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'PythonUtils', u'Python Utils Documentation', - __about__.__author__, 'PythonUtils', __about__.__description__, - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - - -# -- Options for Epub output --------------------------------------------------- - -# Bibliographic Dublin Core info. -epub_title = u'Python Utils' -epub_author = __about__.__author__ -epub_publisher = __about__.__author__ -epub_copyright = copyright - -# The language of the text. It defaults to the language option -# or en if the language is not set. -#epub_language = '' - -# The scheme of the identifier. Typical schemes are ISBN or URL. -#epub_scheme = '' - -# The unique identifier of the text. This can be a ISBN number -# or the project homepage. -#epub_identifier = '' - -# A unique identification for the text. -#epub_uid = '' - -# A tuple containing the cover image and cover page html template filenames. -#epub_cover = () - -# HTML files that should be inserted before the pages created by sphinx. -# The format is a list of tuples containing the path and title. -#epub_pre_files = [] - -# HTML files shat should be inserted after the pages created by sphinx. -# The format is a list of tuples containing the path and title. -#epub_post_files = [] - -# A list of files that should not be packed into the epub file. -#epub_exclude_files = [] - -# The depth of the table of contents in toc.ncx. -#epub_tocdepth = 3 - -# Allow duplicate toc entries. -#epub_tocdup = True - +# html_static_path = ['_static'] -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'http://docs.python.org/': None} +intersphinx_mapping = {'python': ('https://docs.python.org/3', None)} diff --git a/docs/index.rst b/docs/index.rst index 3715735..df4d31a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,6 +1,12 @@ Welcome to Python Utils's documentation! ======================================== +.. image:: https://github.com/WoLpH/python-utils/actions/workflows/main.yml/badge.svg?branch=master + :target: https://github.com/WoLpH/python-utils/actions/workflows/main.yml/badge.svg?branch=master + +.. image:: https://coveralls.io/repos/WoLpH/python-utils/badge.svg?branch=master + :target: https://coveralls.io/r/WoLpH/python-utils?branch=master + Contents: .. toctree:: @@ -9,16 +15,6 @@ Contents: usage python_utils -Travis status: - -.. image:: https://travis-ci.org/WoLpH/python-utils.png?branch=master - :target: https://travis-ci.org/WoLpH/python-utils - -Coverage: - -.. image:: https://coveralls.io/repos/WoLpH/python-utils/badge.png?branch=master - :target: https://coveralls.io/r/WoLpH/python-utils?branch=master - Indices and tables ================== diff --git a/docs/make.bat b/docs/make.bat index 3342faf..8084272 100644 --- a/docs/make.bat +++ b/docs/make.bat @@ -1,190 +1,35 @@ @ECHO OFF +pushd %~dp0 + REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) +set SOURCEDIR=. set BUILDDIR=_build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . -set I18NSPHINXOPTS=%SPHINXOPTS% . -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% - set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% -) if "%1" == "" goto help -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. singlehtml to make a single large HTML file - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. devhelp to make HTML files and a Devhelp project - echo. epub to make an epub - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. text to make text files - echo. man to make manual pages - echo. texinfo to make Texinfo files - echo. gettext to make PO message catalogs - echo. changes to make an overview over all changed/added/deprecated items - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "singlehtml" ( - %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - if errorlevel 1 exit /b 1 +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - if errorlevel 1 exit /b 1 + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. echo. - echo.Build finished; now you can process the JSON files. - goto end + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 ) -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\PythonUtils.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\PythonUtils.ghc - goto end -) - -if "%1" == "devhelp" ( - %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. - goto end -) - -if "%1" == "epub" ( - %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub file is in %BUILDDIR%/epub. - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "text" ( - %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The text files are in %BUILDDIR%/text. - goto end -) - -if "%1" == "man" ( - %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The manual pages are in %BUILDDIR%/man. - goto end -) - -if "%1" == "texinfo" ( - %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. - goto end -) - -if "%1" == "gettext" ( - %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The message catalogs are in %BUILDDIR%/locale. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - if errorlevel 1 exit /b 1 - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - if errorlevel 1 exit /b 1 - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - if errorlevel 1 exit /b 1 - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end +popd diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..0bf3c24 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,20 @@ +[tool.black] +line-length = 79 +target-version = ['py37', 'py38', 'py39', 'py310', 'py311'] +skip-string-normalization = true + +[tool.pyright] +# include = ['python_utils'] +include = ['python_utils', '_python_utils_tests', 'setup.py'] +strict = ['python_utils', '_python_utils_tests', 'setup.py'] +# The terminal file is very OS specific and dependent on imports so we're skipping it from type checking +ignore = ['python_utils/terminal.py'] +pythonVersion = '3.9' + +[tool.mypy] +strict = true +check_untyped_defs = true +files = ['python_utils', '_python_utils_tests', 'setup.py'] + +[[tool.mypy.overrides]] +module = '_python_utils_tests.*' diff --git a/pytest.ini b/pytest.ini index 2de1634..a8e632a 100644 --- a/pytest.ini +++ b/pytest.ini @@ -6,13 +6,11 @@ python_files = addopts = --doctest-modules --cov python_utils - --cov-report html --cov-report term-missing +; --mypy -flake8-ignore = - *.py W391 - docs/*.py ALL - -doctest_optionflags = +doctest_optionflags = ALLOW_UNICODE ALLOW_BYTES + +asyncio_mode = strict diff --git a/python_utils/__about__.py b/python_utils/__about__.py index 24c2cb7..a96a6fa 100644 --- a/python_utils/__about__.py +++ b/python_utils/__about__.py @@ -1,9 +1,22 @@ -__package_name__ = 'python-utils' -__version__ = '2.7.1' -__author__ = 'Rick van Hattem' -__author_email__ = 'Wolph@wol.ph' -__description__ = ( - 'Python Utils is a module with some convenient utilities not included ' - 'with the standard Python install') -__url__ = 'https://github.com/WoLpH/python-utils' +""" +This module contains metadata about the `python-utils` package. + +Attributes: + __package_name__ (str): The name of the package. + __author__ (str): The author of the package. + __author_email__ (str): The email of the author. + __description__ (str): A brief description of the package. + __url__ (str): The URL of the package's repository. + __version__ (str): The current version of the package. +""" +__package_name__: str = 'python-utils' +__author__: str = 'Rick van Hattem' +__author_email__: str = 'Wolph@wol.ph' +__description__: str = ( + 'Python Utils is a module with some convenient utilities not included ' + 'with the standard Python install' +) +__url__: str = 'https://github.com/WoLpH/python-utils' +# Omit type info due to automatic versioning script +__version__ = '3.9.1' diff --git a/python_utils/__init__.py b/python_utils/__init__.py index e69de29..7c4242c 100644 --- a/python_utils/__init__.py +++ b/python_utils/__init__.py @@ -0,0 +1,126 @@ +""" +This module initializes the `python_utils` package by importing various +submodules and functions. + +Submodules: + aio + converters + decorators + formatters + generators + import_ + logger + terminal + time + types + +Functions: + acount + remap + scale_1024 + to_float + to_int + to_str + to_unicode + listify + set_attributes + raise_exception + reraise + camel_to_underscore + timesince + abatcher + batcher + import_global + get_terminal_size + aio_generator_timeout_detector + aio_generator_timeout_detector_decorator + aio_timeout_generator + delta_to_seconds + delta_to_seconds_or_none + format_time + timedelta_to_seconds + timeout_generator + +Classes: + CastedDict + LazyCastedDict + UniqueList + Logged + LoggerBase +""" + +from . import ( + aio, + converters, + decorators, + formatters, + generators, + import_, + logger, + terminal, + time, + types, +) +from .aio import acount +from .containers import CastedDict, LazyCastedDict, UniqueList +from .converters import remap, scale_1024, to_float, to_int, to_str, to_unicode +from .decorators import listify, set_attributes +from .exceptions import raise_exception, reraise +from .formatters import camel_to_underscore, timesince +from .generators import abatcher, batcher +from .import_ import import_global +from .logger import Logged, LoggerBase +from .terminal import get_terminal_size +from .time import ( + aio_generator_timeout_detector, + aio_generator_timeout_detector_decorator, + aio_timeout_generator, + delta_to_seconds, + delta_to_seconds_or_none, + format_time, + timedelta_to_seconds, + timeout_generator, +) + +__all__ = [ + 'CastedDict', + 'LazyCastedDict', + 'Logged', + 'LoggerBase', + 'UniqueList', + 'abatcher', + 'acount', + 'aio', + 'aio_generator_timeout_detector', + 'aio_generator_timeout_detector_decorator', + 'aio_timeout_generator', + 'batcher', + 'camel_to_underscore', + 'converters', + 'decorators', + 'delta_to_seconds', + 'delta_to_seconds_or_none', + 'format_time', + 'formatters', + 'generators', + 'get_terminal_size', + 'import_', + 'import_global', + 'listify', + 'logger', + 'raise_exception', + 'remap', + 'reraise', + 'scale_1024', + 'set_attributes', + 'terminal', + 'time', + 'timedelta_to_seconds', + 'timeout_generator', + 'timesince', + 'to_float', + 'to_int', + 'to_str', + 'to_unicode', + 'types', +] diff --git a/python_utils/aio.py b/python_utils/aio.py new file mode 100644 index 0000000..7a7b3b3 --- /dev/null +++ b/python_utils/aio.py @@ -0,0 +1,117 @@ +"""Asyncio equivalents to regular Python functions.""" + +import asyncio +import itertools +import typing + +from . import types + +_N = types.TypeVar('_N', int, float) +_T = types.TypeVar('_T') +_K = types.TypeVar('_K') +_V = types.TypeVar('_V') + + +async def acount( + start: _N = 0, + step: _N = 1, + delay: float = 0, + stop: types.Optional[_N] = None, +) -> types.AsyncIterator[_N]: + """Asyncio version of itertools.count().""" + for item in itertools.count(start, step): # pragma: no branch + if stop is not None and item >= stop: + break + + yield item + await asyncio.sleep(delay) + + +@typing.overload +async def acontainer( + iterable: types.Union[ + types.AsyncIterable[_T], + types.Callable[..., types.AsyncIterable[_T]], + ], + container: types.Type[types.Tuple[_T, ...]], +) -> types.Tuple[_T, ...]: ... + + +@typing.overload +async def acontainer( + iterable: types.Union[ + types.AsyncIterable[_T], + types.Callable[..., types.AsyncIterable[_T]], + ], + container: types.Type[types.List[_T]] = list, +) -> types.List[_T]: ... + + +@typing.overload +async def acontainer( + iterable: types.Union[ + types.AsyncIterable[_T], + types.Callable[..., types.AsyncIterable[_T]], + ], + container: types.Type[types.Set[_T]], +) -> types.Set[_T]: ... + + +async def acontainer( + iterable: types.Union[ + types.AsyncIterable[_T], + types.Callable[..., types.AsyncIterable[_T]], + ], + container: types.Callable[ + [types.Iterable[_T]], types.Collection[_T] + ] = list, +) -> types.Collection[_T]: + """ + Asyncio version of list()/set()/tuple()/etc() using an async for loop. + + So instead of doing `[item async for item in iterable]` you can do + `await acontainer(iterable)`. + + """ + iterable_: types.AsyncIterable[_T] + if callable(iterable): + iterable_ = iterable() + else: + iterable_ = iterable + + item: _T + items: types.List[_T] = [] + async for item in iterable_: # pragma: no branch + items.append(item) + + return container(items) + + +async def adict( + iterable: types.Union[ + types.AsyncIterable[types.Tuple[_K, _V]], + types.Callable[..., types.AsyncIterable[types.Tuple[_K, _V]]], + ], + container: types.Callable[ + [types.Iterable[types.Tuple[_K, _V]]], types.Mapping[_K, _V] + ] = dict, +) -> types.Mapping[_K, _V]: + """ + Asyncio version of dict() using an async for loop. + + So instead of doing `{key: value async for key, value in iterable}` you + can do `await adict(iterable)`. + + """ + iterable_: types.AsyncIterable[types.Tuple[_K, _V]] + if callable(iterable): + iterable_ = iterable() + else: + iterable_ = iterable + + item: types.Tuple[_K, _V] + items: types.List[types.Tuple[_K, _V]] = [] + async for item in iterable_: # pragma: no branch + items.append(item) + + return container(items) diff --git a/python_utils/containers.py b/python_utils/containers.py new file mode 100644 index 0000000..e7e7c4a --- /dev/null +++ b/python_utils/containers.py @@ -0,0 +1,623 @@ +""" +This module provides custom container classes with enhanced functionality. + +Classes: + CastedDictBase: Abstract base class for dictionaries that cast keys and + values. + CastedDict: Dictionary that casts keys and values to specified types. + LazyCastedDict: Dictionary that lazily casts values to specified types upon + access. + UniqueList: List that only allows unique values, with configurable behavior + on duplicates. + SliceableDeque: Deque that supports slicing and enhanced equality checks. + +Type Aliases: + KT: Type variable for dictionary keys. + VT: Type variable for dictionary values. + DT: Type alias for a dictionary with keys of type KT and values of type VT. + KT_cast: Type alias for a callable that casts dictionary keys. + VT_cast: Type alias for a callable that casts dictionary values. + HT: Type variable for hashable values in UniqueList. + T: Type variable for generic types. + DictUpdateArgs: Union type for arguments that can be used to update a + dictionary. + OnDuplicate: Literal type for handling duplicate values in UniqueList. + +Usage: + - CastedDict and LazyCastedDict can be used to create dictionaries with + automatic type casting. + - UniqueList ensures all elements are unique and can raise an error on + duplicates. + - SliceableDeque extends deque with slicing support and enhanced equality + checks. + +Examples: + >>> d = CastedDict(int, int) + >>> d[1] = 2 + >>> d['3'] = '4' + >>> d.update({'5': '6'}) + >>> d.update([('7', '8')]) + >>> d + {1: 2, 3: 4, 5: 6, 7: 8} + + >>> l = UniqueList(1, 2, 3) + >>> l.append(4) + >>> l.append(4) + >>> l.insert(0, 4) + >>> l.insert(0, 5) + >>> l[1] = 10 + >>> l + [5, 10, 2, 3, 4] + + >>> d = SliceableDeque([1, 2, 3, 4, 5]) + >>> d[1:4] + SliceableDeque([2, 3, 4]) +""" + +# pyright: reportIncompatibleMethodOverride=false +import abc +import collections +import typing + +from . import types + +if typing.TYPE_CHECKING: + import _typeshed # noqa: F401 + +#: A type alias for a type that can be used as a key in a dictionary. +KT = types.TypeVar('KT') +#: A type alias for a type that can be used as a value in a dictionary. +VT = types.TypeVar('VT') +#: A type alias for a dictionary with keys of type KT and values of type VT. +DT = types.Dict[KT, VT] +#: A type alias for the casted type of a dictionary key. +KT_cast = types.Optional[types.Callable[..., KT]] +#: A type alias for the casted type of a dictionary value. +VT_cast = types.Optional[types.Callable[..., VT]] +#: A type alias for the hashable values of the `UniqueList` +HT = types.TypeVar('HT', bound=types.Hashable) +#: A type alias for a regular generic type +T = types.TypeVar('T') + +# Using types.Union instead of | since Python 3.7 doesn't fully support it +DictUpdateArgs = types.Union[ + types.Mapping[KT, VT], + types.Iterable[types.Tuple[KT, VT]], + types.Iterable[types.Mapping[KT, VT]], + '_typeshed.SupportsKeysAndGetItem[KT, VT]', +] + +OnDuplicate = types.Literal['ignore', 'raise'] + + +class CastedDictBase(types.Dict[KT, VT], abc.ABC): + """ + Abstract base class for dictionaries that cast keys and values. + + Attributes: + _key_cast (KT_cast[KT]): Callable to cast dictionary keys. + _value_cast (VT_cast[VT]): Callable to cast dictionary values. + + Methods: + __init__(key_cast: KT_cast[KT] = None, value_cast: VT_cast[VT] = None, + *args: DictUpdateArgs[KT, VT], **kwargs: VT) -> None: + Initializes the dictionary with optional key and value casting + callables. + update(*args: DictUpdateArgs[types.Any, types.Any], + **kwargs: types.Any) -> None: + Updates the dictionary with the given arguments. + __setitem__(key: types.Any, value: types.Any) -> None: + Sets the item in the dictionary, casting the key if a key cast + callable is provided. + """ + + _key_cast: KT_cast[KT] + _value_cast: VT_cast[VT] + + def __init__( + self, + key_cast: KT_cast[KT] = None, + value_cast: VT_cast[VT] = None, + *args: DictUpdateArgs[KT, VT], + **kwargs: VT, + ) -> None: + """ + Initializes the CastedDictBase with optional key and value + casting callables. + + Args: + key_cast (KT_cast[KT], optional): Callable to cast + dictionary keys. Defaults to None. + value_cast (VT_cast[VT], optional): Callable to cast + dictionary values. Defaults to None. + *args (DictUpdateArgs[KT, VT]): Arguments to initialize + the dictionary. + **kwargs (VT): Keyword arguments to initialize the + dictionary. + """ + self._value_cast = value_cast + self._key_cast = key_cast + self.update(*args, **kwargs) + + def update( + self, *args: DictUpdateArgs[types.Any, types.Any], **kwargs: types.Any + ) -> None: + """ + Updates the dictionary with the given arguments. + + Args: + *args (DictUpdateArgs[types.Any, types.Any]): Arguments to update + the dictionary. + **kwargs (types.Any): Keyword arguments to update the dictionary. + """ + if args: + kwargs.update(*args) + + if kwargs: + for key, value in kwargs.items(): + self[key] = value + + def __setitem__(self, key: types.Any, value: types.Any) -> None: + """ + Sets the item in the dictionary, casting the key if a key cast + callable is provided. + + Args: + key (types.Any): The key to set in the dictionary. + value (types.Any): The value to set in the dictionary. + """ + if self._key_cast is not None: + key = self._key_cast(key) + + return super().__setitem__(key, value) + + +class CastedDict(CastedDictBase[KT, VT]): + """ + Custom dictionary that casts keys and values to the specified typing. + + Note that you can specify the types for mypy and type hinting with: + CastedDict[int, int](int, int) + + >>> d: CastedDict[int, int] = CastedDict(int, int) + >>> d[1] = 2 + >>> d['3'] = '4' + >>> d.update({'5': '6'}) + >>> d.update([('7', '8')]) + >>> d + {1: 2, 3: 4, 5: 6, 7: 8} + >>> list(d.keys()) + [1, 3, 5, 7] + >>> list(d) + [1, 3, 5, 7] + >>> list(d.values()) + [2, 4, 6, 8] + >>> list(d.items()) + [(1, 2), (3, 4), (5, 6), (7, 8)] + >>> d[3] + 4 + + # Casts are optional and can be disabled by passing None as the cast + >>> d = CastedDict() + >>> d[1] = 2 + >>> d['3'] = '4' + >>> d.update({'5': '6'}) + >>> d.update([('7', '8')]) + >>> d + {1: 2, '3': '4', '5': '6', '7': '8'} + """ + + def __setitem__(self, key: typing.Any, value: typing.Any) -> None: + """Sets `key` to `cast(value)` in the dictionary.""" + if self._value_cast is not None: + value = self._value_cast(value) + + super().__setitem__(key, value) + + +class LazyCastedDict(CastedDictBase[KT, VT]): + """ + Custom dictionary that casts keys and lazily casts values to the specified + typing. Note that the values are cast only when they are accessed and + are not cached between executions. + + Note that you can specify the types for mypy and type hinting with: + LazyCastedDict[int, int](int, int) + + >>> d: LazyCastedDict[int, int] = LazyCastedDict(int, int) + >>> d[1] = 2 + >>> d['3'] = '4' + >>> d.update({'5': '6'}) + >>> d.update([('7', '8')]) + >>> d + {1: 2, 3: '4', 5: '6', 7: '8'} + >>> list(d.keys()) + [1, 3, 5, 7] + >>> list(d) + [1, 3, 5, 7] + >>> list(d.values()) + [2, 4, 6, 8] + >>> list(d.items()) + [(1, 2), (3, 4), (5, 6), (7, 8)] + >>> d[3] + 4 + + # Casts are optional and can be disabled by passing None as the cast + >>> d = LazyCastedDict() + >>> d[1] = 2 + >>> d['3'] = '4' + >>> d.update({'5': '6'}) + >>> d.update([('7', '8')]) + >>> d + {1: 2, '3': '4', '5': '6', '7': '8'} + >>> list(d.keys()) + [1, '3', '5', '7'] + >>> list(d.values()) + [2, '4', '6', '8'] + + >>> list(d.items()) + [(1, 2), ('3', '4'), ('5', '6'), ('7', '8')] + >>> d['3'] + '4' + """ + + def __setitem__(self, key: types.Any, value: types.Any) -> None: + """ + Sets the item in the dictionary, casting the key if a key cast + callable is provided. + + Args: + key (types.Any): The key to set in the dictionary. + value (types.Any): The value to set in the dictionary. + """ + if self._key_cast is not None: + key = self._key_cast(key) + + super().__setitem__(key, value) + + def __getitem__(self, key: types.Any) -> VT: + """ + Gets the item from the dictionary, casting the value if a value cast + callable is provided. + + Args: + key (types.Any): The key to get from the dictionary. + + Returns: + VT: The value from the dictionary. + """ + if self._key_cast is not None: + key = self._key_cast(key) + + value = super().__getitem__(key) + + if self._value_cast is not None: + value = self._value_cast(value) + + return value + + def items( # type: ignore[override] + self, + ) -> types.Generator[types.Tuple[KT, VT], None, None]: + """ + Returns a generator of the dictionary's items, casting the values if a + value cast callable is provided. + + Yields: + types.Generator[types.Tuple[KT, VT], None, None]: A generator of + the dictionary's items. + """ + if self._value_cast is None: + yield from super().items() + else: + for key, value in super().items(): + yield key, self._value_cast(value) + + def values(self) -> types.Generator[VT, None, None]: # type: ignore[override] + """ + Returns a generator of the dictionary's values, casting the values if a + value cast callable is provided. + + Yields: + types.Generator[VT, None, None]: A generator of the dictionary's + values. + """ + if self._value_cast is None: + yield from super().values() + else: + for value in super().values(): + yield self._value_cast(value) + + +class UniqueList(types.List[HT]): + """ + A list that only allows unique values. Duplicate values are ignored by + default, but can be configured to raise an exception instead. + + >>> l = UniqueList(1, 2, 3) + >>> l.append(4) + >>> l.append(4) + >>> l.insert(0, 4) + >>> l.insert(0, 5) + >>> l[1] = 10 + >>> l + [5, 10, 2, 3, 4] + + >>> l = UniqueList(1, 2, 3, on_duplicate='raise') + >>> l.append(4) + >>> l.append(4) + Traceback (most recent call last): + ... + ValueError: Duplicate value: 4 + >>> l.insert(0, 4) + Traceback (most recent call last): + ... + ValueError: Duplicate value: 4 + >>> 4 in l + True + >>> l[0] + 1 + >>> l[1] = 4 + Traceback (most recent call last): + ... + ValueError: Duplicate value: 4 + """ + + _set: types.Set[HT] + + def __init__( + self, + *args: HT, + on_duplicate: OnDuplicate = 'ignore', + ): + """ + Initializes the UniqueList with optional duplicate handling behavior. + + Args: + *args (HT): Initial values for the list. + on_duplicate (OnDuplicate, optional): Behavior on duplicates. + Defaults to 'ignore'. + """ + self.on_duplicate = on_duplicate + self._set = set() + super().__init__() + for arg in args: + self.append(arg) + + def insert(self, index: types.SupportsIndex, value: HT) -> None: + """ + Inserts a value at the specified index, ensuring uniqueness. + + Args: + index (types.SupportsIndex): The index to insert the value at. + value (HT): The value to insert. + + Raises: + ValueError: If the value is a duplicate and `on_duplicate` is set + to 'raise'. + """ + if value in self._set: + if self.on_duplicate == 'raise': + raise ValueError(f'Duplicate value: {value}') + else: + return + + self._set.add(value) + super().insert(index, value) + + def append(self, value: HT) -> None: + """ + Appends a value to the list, ensuring uniqueness. + + Args: + value (HT): The value to append. + + Raises: + ValueError: If the value is a duplicate and `on_duplicate` is set + to 'raise'. + """ + if value in self._set: + if self.on_duplicate == 'raise': + raise ValueError(f'Duplicate value: {value}') + else: + return + + self._set.add(value) + super().append(value) + + def __contains__(self, item: HT) -> bool: # type: ignore[override] + """ + Checks if the list contains the specified item. + + Args: + item (HT): The item to check for. + + Returns: + bool: True if the item is in the list, False otherwise. + """ + return item in self._set + + @typing.overload + def __setitem__( + self, indices: types.SupportsIndex, values: HT + ) -> None: ... + + @typing.overload + def __setitem__( + self, indices: slice, values: types.Iterable[HT] + ) -> None: ... + + def __setitem__( + self, + indices: types.Union[slice, types.SupportsIndex], + values: types.Union[types.Iterable[HT], HT], + ) -> None: + """ + Sets the item(s) at the specified index/indices, ensuring uniqueness. + + Args: + indices (types.Union[slice, types.SupportsIndex]): The index or + slice to set the value(s) at. + values (types.Union[types.Iterable[HT], HT]): The value(s) to set. + + Raises: + RuntimeError: If `on_duplicate` is 'ignore' and setting slices. + ValueError: If the value(s) are duplicates and `on_duplicate` is + set to 'raise'. + """ + if isinstance(indices, slice): + values = types.cast(types.Iterable[HT], values) + if self.on_duplicate == 'ignore': + raise RuntimeError( + 'ignore mode while setting slices introduces ambiguous ' + 'behaviour and is therefore not supported' + ) + + duplicates: types.Set[HT] = set(values) & self._set + if duplicates and values != list(self[indices]): + raise ValueError(f'Duplicate values: {duplicates}') + + self._set.update(values) + else: + values = types.cast(HT, values) + if values in self._set and values != self[indices]: + if self.on_duplicate == 'raise': + raise ValueError(f'Duplicate value: {values}') + else: + return + + self._set.add(values) + + super().__setitem__( + types.cast(slice, indices), types.cast(types.List[HT], values) + ) + + def __delitem__( + self, index: types.Union[types.SupportsIndex, slice] + ) -> None: + """ + Deletes the item(s) at the specified index/indices. + + Args: + index (types.Union[types.SupportsIndex, slice]): The index or slice + to delete the item(s) at. + """ + if isinstance(index, slice): + for value in self[index]: + self._set.remove(value) + else: + self._set.remove(self[index]) + + super().__delitem__(index) + + +# Type hinting `collections.deque` does not work consistently between Python +# runtime, mypy and pyright currently so we have to ignore the errors +class SliceableDeque(types.Generic[T], collections.deque[T]): + """ + A deque that supports slicing and enhanced equality checks. + + Methods: + __getitem__(index: types.Union[types.SupportsIndex, slice]) -> + types.Union[T, 'SliceableDeque[T]']: + Returns the item or slice at the given index. + __eq__(other: types.Any) -> bool: + Checks equality with another object, allowing for comparison with + lists, tuples, and sets. + pop(index: int = -1) -> T: + Removes and returns the item at the given index. Only supports + index 0 and the last index. + """ + + @typing.overload + def __getitem__(self, index: types.SupportsIndex) -> T: ... + + @typing.overload + def __getitem__(self, index: slice) -> 'SliceableDeque[T]': ... + + def __getitem__( + self, index: types.Union[types.SupportsIndex, slice] + ) -> types.Union[T, 'SliceableDeque[T]']: + """ + Return the item or slice at the given index. + + Args: + index (types.Union[types.SupportsIndex, slice]): The index or + slice to retrieve. + + Returns: + types.Union[T, 'SliceableDeque[T]']: The item or slice at the + given index. + + Examples: + >>> d = SliceableDeque[int]([1, 2, 3, 4, 5]) + >>> d[1:4] + SliceableDeque([2, 3, 4]) + + >>> d = SliceableDeque[str](['a', 'b', 'c']) + >>> d[-2:] + SliceableDeque(['b', 'c']) + """ + if isinstance(index, slice): + start, stop, step = index.indices(len(self)) + return self.__class__(self[i] for i in range(start, stop, step)) + else: + return super().__getitem__(index) + + def __eq__(self, other: types.Any) -> bool: + """ + Checks equality with another object, allowing for comparison with + lists, tuples, and sets. + + Args: + other (types.Any): The object to compare with. + + Returns: + bool: True if the objects are equal, False otherwise. + """ + if isinstance(other, list): + return list(self) == other + elif isinstance(other, tuple): + return tuple(self) == other + elif isinstance(other, set): + return set(self) == other + else: + return super().__eq__(other) + + def pop(self, index: int = -1) -> T: + """ + Removes and returns the item at the given index. Only supports index 0 + and the last index. + + Args: + index (int, optional): The index of the item to remove. Defaults to + -1. + + Returns: + T: The removed item. + + Raises: + IndexError: If the index is not 0 or the last index. + + Examples: + >>> d = SliceableDeque([1, 2, 3]) + >>> d.pop(0) + 1 + >>> d.pop() + 3 + """ + if index == 0: + return super().popleft() + elif index in {-1, len(self) - 1}: + return super().pop() + else: + raise IndexError( + 'Only index 0 and the last index (`N-1` or `-1`) ' + 'are supported' + ) + + +if __name__ == '__main__': + import doctest + + doctest.testmod() diff --git a/python_utils/converters.py b/python_utils/converters.py index 2796f2e..c4240f3 100644 --- a/python_utils/converters.py +++ b/python_utils/converters.py @@ -1,18 +1,45 @@ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +""" +This module provides utility functions for type conversion. + +Functions: + - to_int: Convert a string to an integer with optional regular expression + matching. + - to_float: Convert a string to a float with optional regular expression + matching. + - to_unicode: Convert objects to Unicode strings. + - to_str: Convert objects to byte strings. + - scale_1024: Scale a number down to a suitable size based on powers of + 1024. + - remap: Remap a value from one range to another. +""" + +# Ignoring all mypy errors because mypy doesn't understand many modern typing +# constructs... please, use pyright instead if you can. +from __future__ import annotations import decimal import math import re +import typing +from typing import Union -import six +from . import types +_TN = types.TypeVar('_TN', bound=types.DecimalNumber) -def to_int(input_, default=0, exception=(ValueError, TypeError), regexp=None): - r''' - Convert the given input to an integer or return default +_RegexpType: types.TypeAlias = Union[ + types.Pattern[str], str, types.Literal[True], None +] + + +def to_int( + input_: str | None = None, + default: int = 0, + exception: types.ExceptionsType = (ValueError, TypeError), + regexp: _RegexpType = None, +) -> int: + r""" + Convert the given input to an integer or return default. When trying to convert the exceptions given in the exception parameter are automatically catched and the default will be returned. @@ -29,6 +56,10 @@ def to_int(input_, default=0, exception=(ValueError, TypeError), regexp=None): 0 >>> to_int('1') 1 + >>> to_int('') + 0 + >>> to_int() + 0 >>> to_int('abc123') 0 >>> to_int('123abc') @@ -67,31 +98,36 @@ def to_int(input_, default=0, exception=(ValueError, TypeError), regexp=None): Traceback (most recent call last): ... TypeError: unknown argument for regexp parameter: 123 - ''' - + """ if regexp is True: regexp = re.compile(r'(\d+)') - elif isinstance(regexp, six.string_types): + elif isinstance(regexp, str): regexp = re.compile(regexp) elif hasattr(regexp, 'search'): pass elif regexp is not None: - raise TypeError('unknown argument for regexp parameter: %r' % regexp) + raise TypeError(f'unknown argument for regexp parameter: {regexp!r}') try: - if regexp: - match = regexp.search(input_) - if match: - input_ = match.groups()[-1] - return int(input_) + if regexp and input_ and (match := regexp.search(input_)): + input_ = match.groups()[-1] + + if input_ is None: + return default + else: + return int(input_) except exception: return default -def to_float(input_, default=0, exception=(ValueError, TypeError), - regexp=None): - r''' - Convert the given `input_` to an integer or return default +def to_float( + input_: str, + default: int = 0, + exception: types.ExceptionsType = (ValueError, TypeError), + regexp: _RegexpType = None, +) -> types.Number: + r""" + Convert the given `input_` to an integer or return default. When trying to convert the exceptions given in the exception parameter are automatically catched and the default will be returned. @@ -100,7 +136,7 @@ def to_float(input_, default=0, exception=(ValueError, TypeError), in a string. When True it will automatically match any digit in the string. When a (regexp) object (has a search method) is given, that will be used. - WHen a string is given, re.compile will be run over it first + When a string is given, re.compile will be run over it first The last group of the regexp will be used as value @@ -140,11 +176,10 @@ def to_float(input_, default=0, exception=(ValueError, TypeError), Traceback (most recent call last): ... TypeError: unknown argument for regexp parameter - ''' - + """ if regexp is True: regexp = re.compile(r'(\d+(\.\d+|))') - elif isinstance(regexp, six.string_types): + elif isinstance(regexp, str): regexp = re.compile(regexp) elif hasattr(regexp, 'search'): pass @@ -152,69 +187,78 @@ def to_float(input_, default=0, exception=(ValueError, TypeError), raise TypeError('unknown argument for regexp parameter') try: - if regexp: - match = regexp.search(input_) - if match: - input_ = match.group(1) + if regexp and (match := regexp.search(input_)): + input_ = match.group(1) return float(input_) except exception: return default -def to_unicode(input_, encoding='utf-8', errors='replace'): - '''Convert objects to unicode, if needed decodes string with the given +def to_unicode( + input_: types.StringTypes, + encoding: str = 'utf-8', + errors: str = 'replace', +) -> str: + """Convert objects to unicode, if needed decodes string with the given encoding and errors settings. - :rtype: unicode + :rtype: str >>> to_unicode(b'a') 'a' >>> to_unicode('a') 'a' - >>> to_unicode(u'a') + >>> to_unicode('a') 'a' - >>> class Foo(object): __str__ = lambda s: u'a' + >>> class Foo(object): + ... __str__ = lambda s: 'a' >>> to_unicode(Foo()) 'a' >>> to_unicode(Foo) "" - ''' - if isinstance(input_, six.binary_type): + """ + if isinstance(input_, bytes): input_ = input_.decode(encoding, errors) else: - input_ = six.text_type(input_) + input_ = str(input_) return input_ -def to_str(input_, encoding='utf-8', errors='replace'): - '''Convert objects to string, encodes to the given encoding +def to_str( + input_: types.StringTypes, + encoding: str = 'utf-8', + errors: str = 'replace', +) -> bytes: + """Convert objects to string, encodes to the given encoding. :rtype: str >>> to_str('a') b'a' - >>> to_str(u'a') + >>> to_str('a') b'a' >>> to_str(b'a') b'a' - >>> class Foo(object): __str__ = lambda s: u'a' + >>> class Foo(object): + ... __str__ = lambda s: 'a' >>> to_str(Foo()) 'a' >>> to_str(Foo) "" - ''' - if isinstance(input_, six.binary_type): - pass - else: + """ + if not isinstance(input_, bytes): if not hasattr(input_, 'encode'): - input_ = six.text_type(input_) + input_ = str(input_) input_ = input_.encode(encoding, errors) return input_ -def scale_1024(x, n_prefixes): - '''Scale a number down to a suitable size, based on powers of 1024. +def scale_1024( + x: types.Number, + n_prefixes: int, +) -> types.Tuple[types.Number, types.Number]: + """Scale a number down to a suitable size, based on powers of 1024. Returns the scaled number and the power of 1024 used. @@ -230,7 +274,7 @@ def scale_1024(x, n_prefixes): (0.5, 0) >>> scale_1024(1, 2) (1.0, 0) - ''' + """ if x <= 0: power = 0 else: @@ -239,8 +283,76 @@ def scale_1024(x, n_prefixes): return scaled, power -def remap(value, old_min, old_max, new_min, new_max): - ''' +@typing.overload +def remap( + value: decimal.Decimal, + old_min: decimal.Decimal | float, + old_max: decimal.Decimal | float, + new_min: decimal.Decimal | float, + new_max: decimal.Decimal | float, +) -> decimal.Decimal: ... + + +@typing.overload +def remap( + value: decimal.Decimal | float, + old_min: decimal.Decimal, + old_max: decimal.Decimal | float, + new_min: decimal.Decimal | float, + new_max: decimal.Decimal | float, +) -> decimal.Decimal: ... + + +@typing.overload +def remap( + value: decimal.Decimal | float, + old_min: decimal.Decimal | float, + old_max: decimal.Decimal, + new_min: decimal.Decimal | float, + new_max: decimal.Decimal | float, +) -> decimal.Decimal: ... + + +@typing.overload +def remap( + value: decimal.Decimal | float, + old_min: decimal.Decimal | float, + old_max: decimal.Decimal | float, + new_min: decimal.Decimal, + new_max: decimal.Decimal | float, +) -> decimal.Decimal: ... + + +@typing.overload +def remap( + value: decimal.Decimal | float, + old_min: decimal.Decimal | float, + old_max: decimal.Decimal | float, + new_min: decimal.Decimal | float, + new_max: decimal.Decimal, +) -> decimal.Decimal: ... + + +# Note that float captures both int and float types so we don't need to +# specify them separately +@typing.overload +def remap( + value: float, + old_min: float, + old_max: float, + new_min: float, + new_max: float, +) -> float: ... + + +def remap( # pyright: ignore[reportInconsistentOverload] + value: _TN, + old_min: _TN, + old_max: _TN, + new_min: _TN, + new_max: _TN, +) -> _TN: + """ remap a value from one range into another. >>> remap(500, 0, 1000, 0, 100) @@ -285,78 +397,73 @@ def remap(value, old_min, old_max, new_min, new_max): ... ValueError: Output range (0-0) is empty - :param value: value to be converted - :type value: int, float, decimal.Decimal - - :param old_min: minimum of the range for the value that has been passed - :type old_min: int, float, decimal.Decimal - - :param old_max: maximum of the range for the value that has been passed - :type old_max: int, float, decimal.Decimal - - :param new_min: the minimum of the new range - :type new_min: int, float, decimal.Decimal - - :param new_max: the maximum of the new range - :type new_max: int, float, decimal.Decimal - - :return: value that has been re ranged. if any of the parameters passed is - a `decimal.Decimal` all of the parameters will be converted to - `decimal.Decimal`. The same thing also happens if one of the - parameters is a `float`. otherwise all parameters will get converted - into an `int`. technically you can pass a `str` of an integer and it - will get converted. The returned value type will be `decimal.Decimal` - of any of the passed parameters ar `decimal.Decimal`, the return type - will be `float` if any of the passed parameters are a `float` otherwise - the returned type will be `int`. - - :rtype: int, float, decimal.Decimal - ''' - + Args: + value (int, float, decimal.Decimal): Value to be converted. + old_min (int, float, decimal.Decimal): Minimum of the range for the + value that has been passed. + old_max (int, float, decimal.Decimal): Maximum of the range for the + value that has been passed. + new_min (int, float, decimal.Decimal): The minimum of the new range. + new_max (int, float, decimal.Decimal): The maximum of the new range. + + Returns: int, float, decimal.Decimal: Value that has been re-ranged. If + any of the parameters passed is a `decimal.Decimal`, all of the + parameters will be converted to `decimal.Decimal`. The same thing also + happens if one of the parameters is a `float`. Otherwise, all + parameters will get converted into an `int`. Technically, you can pass + a `str` of an integer and it will get converted. The returned value + type will be `decimal.Decimal` if any of the passed parameters are + `decimal.Decimal`, the return type will be `float` if any of the + passed parameters are a `float`, otherwise the returned type will be + `int`. + """ + type_: types.Type[types.DecimalNumber] if ( - isinstance(value, decimal.Decimal) or - isinstance(old_min, decimal.Decimal) or - isinstance(old_max, decimal.Decimal) or - isinstance(new_min, decimal.Decimal) or - isinstance(new_max, decimal.Decimal) + isinstance(value, decimal.Decimal) + or isinstance(old_min, decimal.Decimal) + or isinstance(old_max, decimal.Decimal) + or isinstance(new_min, decimal.Decimal) + or isinstance(new_max, decimal.Decimal) ): type_ = decimal.Decimal elif ( - isinstance(value, float) or - isinstance(old_min, float) or - isinstance(old_max, float) or - isinstance(new_min, float) or - isinstance(new_max, float) + isinstance(value, float) + or isinstance(old_min, float) + or isinstance(old_max, float) + or isinstance(new_min, float) + or isinstance(new_max, float) ): type_ = float - else: type_ = int - value = type_(value) - old_min = type_(old_min) - old_max = type_(old_max) - new_max = type_(new_max) - new_min = type_(new_min) + value = types.cast(_TN, type_(value)) + old_min = types.cast(_TN, type_(old_min)) + old_max = types.cast(_TN, type_(old_max)) + new_max = types.cast(_TN, type_(new_max)) + new_min = types.cast(_TN, type_(new_min)) - old_range = old_max - old_min - new_range = new_max - new_min + # These might not be floats but the Python type system doesn't understand + # the generic type system in this case + old_range = types.cast(float, old_max) - types.cast(float, old_min) + new_range = types.cast(float, new_max) - types.cast(float, new_min) if old_range == 0: - raise ValueError('Input range ({}-{}) is empty'.format( - old_min, old_max)) + raise ValueError(f'Input range ({old_min}-{old_max}) is empty') if new_range == 0: - raise ValueError('Output range ({}-{}) is empty'.format( - new_min, new_max)) + raise ValueError(f'Output range ({new_min}-{new_max}) is empty') - new_value = (value - old_min) * new_range + # The current state of Python typing makes it impossible to use the + # generic type system in this case. Or so extremely verbose that it's not + # worth it. + new_value = (value - old_min) * new_range # type: ignore[operator] # pyright: ignore[reportOperatorIssue, reportUnknownVariableType] - if type_ == int: - new_value //= old_range + if type_ is int: + new_value //= old_range # pyright: ignore[reportUnknownVariableType] else: - new_value /= old_range + new_value /= old_range # pyright: ignore[reportUnknownVariableType] - new_value += new_min + new_value += new_min # type: ignore[operator] # pyright: ignore[reportOperatorIssue, reportUnknownVariableType] - return new_value + return types.cast(_TN, new_value) diff --git a/python_utils/decorators.py b/python_utils/decorators.py index fc3e83a..8799581 100644 --- a/python_utils/decorators.py +++ b/python_utils/decorators.py @@ -1,7 +1,33 @@ +""" +This module provides various utility decorators for Python functions +and methods. +The decorators include: -def set_attributes(**kwargs): - '''Decorator to set attributes on functions and classes +1. `set_attributes`: Sets attributes on functions and classes. +2. `listify`: Converts any generator to a list or other collection. +3. `sample`: Limits calls to a function based on a sample rate. +4. `wraps_classmethod`: Wraps classmethods with type info from a + regular method. + +Each decorator is designed to enhance the functionality of Python +functions and methods in a simple and reusable manner. +""" + +import contextlib +import functools +import logging +import random + +from . import types + +_T = types.TypeVar('_T') +_P = types.ParamSpec('_P') +_S = types.TypeVar('_S', covariant=True) + + +def set_attributes(**kwargs: types.Any) -> types.Callable[..., types.Any]: + """Decorator to set attributes on functions and classes. A common usage for this pattern is the Django Admin where functions can get an optional short_description. To illustrate: @@ -13,18 +39,182 @@ def set_attributes(**kwargs): >>> @set_attributes(short_description='Name') ... def upper_case_name(self, obj): - ... return ("%s %s" % (obj.first_name, obj.last_name)).upper() + ... return ('%s %s' % (obj.first_name, obj.last_name)).upper() The standard Django version: >>> def upper_case_name(obj): - ... return ("%s %s" % (obj.first_name, obj.last_name)).upper() + ... return ('%s %s' % (obj.first_name, obj.last_name)).upper() >>> upper_case_name.short_description = 'Name' - ''' - def _set_attributes(function): + """ + + def _set_attributes( + function: types.Callable[_P, _T], + ) -> types.Callable[_P, _T]: for key, value in kwargs.items(): setattr(function, key, value) return function + return _set_attributes + + +def listify( + collection: types.Callable[ + [types.Iterable[_T]], types.Collection[_T] + ] = list, + allow_empty: bool = True, +) -> types.Callable[ + [types.Callable[..., types.Optional[types.Iterable[_T]]]], + types.Callable[..., types.Collection[_T]], +]: + """ + Convert any generator to a list or other type of collection. + + >>> @listify() + ... def generator(): + ... yield 1 + ... yield 2 + ... yield 3 + + >>> generator() + [1, 2, 3] + + >>> @listify() + ... def empty_generator(): + ... pass + + >>> empty_generator() + [] + + >>> @listify(allow_empty=False) + ... def empty_generator_not_allowed(): + ... pass + + >>> empty_generator_not_allowed() # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + TypeError: ... `allow_empty` is `False` + + >>> @listify(collection=set) + ... def set_generator(): + ... yield 1 + ... yield 1 + ... yield 2 + + >>> set_generator() + {1, 2} + + >>> @listify(collection=dict) + ... def dict_generator(): + ... yield 'a', 1 + ... yield 'b', 2 + + >>> dict_generator() + {'a': 1, 'b': 2} + """ + + def _listify( + function: types.Callable[..., types.Optional[types.Iterable[_T]]], + ) -> types.Callable[..., types.Collection[_T]]: + def __listify( + *args: types.Any, **kwargs: types.Any + ) -> types.Collection[_T]: + result: types.Optional[types.Iterable[_T]] = function( + *args, **kwargs + ) + if result is None: + if allow_empty: + return collection(iter(())) + else: + raise TypeError( + f'{function} returned `None` and `allow_empty` ' + 'is `False`' + ) + else: + return collection(result) + + return __listify + + return _listify + + +def sample( + sample_rate: float, +) -> types.Callable[ + [types.Callable[_P, _T]], + types.Callable[_P, types.Optional[_T]], +]: + """ + Limit calls to a function based on given sample rate. + Number of calls to the function will be roughly equal to + sample_rate percentage. + + Usage: + + >>> @sample(0.5) + ... def demo_function(*args, **kwargs): + ... return 1 + + Calls to *demo_function* will be limited to 50% approximatly. + """ + + def _sample( + function: types.Callable[_P, _T], + ) -> types.Callable[_P, types.Optional[_T]]: + @functools.wraps(function) + def __sample( + *args: _P.args, **kwargs: _P.kwargs + ) -> types.Optional[_T]: + if random.random() < sample_rate: + return function(*args, **kwargs) + else: + logging.debug( + 'Skipped execution of %r(%r, %r) due to sampling', + function, + args, + kwargs, + ) + return None + + return __sample + + return _sample + + +def wraps_classmethod( + wrapped: types.Callable[types.Concatenate[_S, _P], _T], +) -> types.Callable[ + [ + types.Callable[types.Concatenate[types.Any, _P], _T], + ], + types.Callable[types.Concatenate[_S, _P], _T], +]: + """ + Like `functools.wraps`, but for wrapping classmethods with the type info + from a regular method. + """ + + def _wraps_classmethod( + wrapper: types.Callable[types.Concatenate[types.Any, _P], _T], + ) -> types.Callable[types.Concatenate[_S, _P], _T]: + # For some reason `functools.update_wrapper` fails on some test + # runs but not while running actual code + with contextlib.suppress(AttributeError): + wrapper = functools.update_wrapper( + wrapper, + wrapped, + assigned=tuple( + a + for a in functools.WRAPPER_ASSIGNMENTS + if a != '__annotations__' + ), + ) + if annotations := getattr(wrapped, '__annotations__', {}): + annotations.pop('self', None) + wrapper.__annotations__ = annotations + + return wrapper + + return _wraps_classmethod diff --git a/python_utils/exceptions.py b/python_utils/exceptions.py new file mode 100644 index 0000000..ee7c195 --- /dev/null +++ b/python_utils/exceptions.py @@ -0,0 +1,44 @@ +""" +This module provides utility functions for raising and reraising exceptions. + +Functions: + raise_exception(exception_class, *args, **kwargs): + Returns a function that raises an exception of the given type with + the given arguments. + + reraise(*args, **kwargs): + Reraises the current exception. +""" + +from . import types + + +def raise_exception( + exception_class: types.Type[Exception], + *args: types.Any, + **kwargs: types.Any, +) -> types.Callable[..., None]: + """ + Returns a function that raises an exception of the given type with the + given arguments. + + >>> raise_exception(ValueError, 'spam')('eggs') + Traceback (most recent call last): + ... + ValueError: spam + """ + + def raise_(*args_: types.Any, **kwargs_: types.Any) -> types.Any: + raise exception_class(*args, **kwargs) + + return raise_ + + +def reraise(*args: types.Any, **kwargs: types.Any) -> types.Any: + """ + Reraises the current exception. + + This function seems useless, but it can be useful when you need to pass + a callable to another function that raises an exception. + """ + raise diff --git a/python_utils/formatters.py b/python_utils/formatters.py index 06dccac..44ec873 100644 --- a/python_utils/formatters.py +++ b/python_utils/formatters.py @@ -1,8 +1,27 @@ +""" +This module provides utility functions for formatting strings and dates. + +Functions: + camel_to_underscore(name: str) -> str: + Convert camel case style naming to underscore/snake case style naming. + + apply_recursive(function: Callable[[str], str], data: OptionalScope = None, + **kwargs: Any) -> OptionalScope: + Apply a function to all keys in a scope recursively. + + timesince(dt: Union[datetime.datetime, datetime.timedelta], + default: str = 'just now') -> str: + Returns string representing 'time since' e.g. 3 days ago, 5 hours ago. +""" + +# pyright: reportUnnecessaryIsInstance=false import datetime +from python_utils import types -def camel_to_underscore(name): - '''Convert camel case style naming to underscore style naming + +def camel_to_underscore(name: str) -> str: + """Convert camel case style naming to underscore/snake case style naming. If there are existing underscores they will be collapsed with the to-be-added underscores. Multiple consecutive capital letters will not be @@ -18,8 +37,8 @@ def camel_to_underscore(name): '__spam_and_bacon__' >>> camel_to_underscore('__SpamANDBacon__') '__spam_and_bacon__' - ''' - output = [] + """ + output: types.List[str] = [] for i, c in enumerate(name): if i > 0: pc = name[i - 1] @@ -30,7 +49,7 @@ def camel_to_underscore(name): elif i > 3 and not c.isupper(): # Will return the last 3 letters to check if we are changing # case - previous = name[i - 3:i] + previous = name[i - 3 : i] if previous.isalpha() and previous.isupper(): output.insert(len(output) - 1, '_') @@ -39,8 +58,50 @@ def camel_to_underscore(name): return ''.join(output) -def timesince(dt, default='just now'): - ''' +def apply_recursive( + function: types.Callable[[str], str], + data: types.OptionalScope = None, + **kwargs: types.Any, +) -> types.OptionalScope: + """ + Apply a function to all keys in a scope recursively. + + >>> apply_recursive(camel_to_underscore, {'SpamEggsAndBacon': 'spam'}) + {'spam_eggs_and_bacon': 'spam'} + >>> apply_recursive( + ... camel_to_underscore, + ... { + ... 'SpamEggsAndBacon': { + ... 'SpamEggsAndBacon': 'spam', + ... } + ... }, + ... ) + {'spam_eggs_and_bacon': {'spam_eggs_and_bacon': 'spam'}} + + >>> a = {'a_b_c': 123, 'def': {'DeF': 456}} + >>> b = apply_recursive(camel_to_underscore, a) + >>> b + {'a_b_c': 123, 'def': {'de_f': 456}} + + >>> apply_recursive(camel_to_underscore, None) + """ + if data is None: + return None + + elif isinstance(data, dict): + return { + function(key): apply_recursive(function, value, **kwargs) + for key, value in data.items() + } + else: + return data + + +def timesince( + dt: types.Union[datetime.datetime, datetime.timedelta], + default: str = 'just now', +) -> str: + """ Returns string representing 'time since' e.g. 3 days ago, 5 hours ago etc. @@ -81,7 +142,7 @@ def timesince(dt, default='just now'): '1 hour and 2 minutes ago' >>> timesince(datetime.timedelta(seconds=3721)) '1 hour and 2 minutes ago' - ''' + """ if isinstance(dt, datetime.timedelta): diff = dt else: @@ -98,16 +159,15 @@ def timesince(dt, default='just now'): (diff.seconds % 60, 'second', 'seconds'), ) - output = [] + output: types.List[str] = [] for period, singular, plural in periods: - if int(period): - if int(period) == 1: - output.append('%d %s' % (period, singular)) - else: - output.append('%d %s' % (period, plural)) + int_period = int(period) + if int_period == 1: + output.append(f'{int_period} {singular}') + elif int_period: + output.append(f'{int_period} {plural}') if output: - return '%s ago' % ' and '.join(output[:2]) + return f'{" and ".join(output[:2])} ago' return default - diff --git a/python_utils/generators.py b/python_utils/generators.py new file mode 100644 index 0000000..0c4a437 --- /dev/null +++ b/python_utils/generators.py @@ -0,0 +1,126 @@ +""" +This module provides generator utilities for batching items from +iterables and async iterables. + +Functions: + abatcher(generator, batch_size=None, interval=None): + Asyncio generator wrapper that returns items with a given batch + size or interval (whichever is reached first). + + batcher(iterable, batch_size=10): + Generator wrapper that returns items with a given batch size. +""" + +import asyncio +import time + +import python_utils +from python_utils import types + +_T = types.TypeVar('_T') + + +async def abatcher( + generator: types.Union[ + types.AsyncGenerator[_T, None], + types.AsyncIterator[_T], + ], + batch_size: types.Optional[int] = None, + interval: types.Optional[types.delta_type] = None, +) -> types.AsyncGenerator[types.List[_T], None]: + """ + Asyncio generator wrapper that returns items with a given batch size or + interval (whichever is reached first). + + Args: + generator: The async generator or iterator to batch. + batch_size (types.Optional[int], optional): The number of items per + batch. Defaults to None. + interval (types.Optional[types.delta_type], optional): The time + interval to wait before yielding a batch. Defaults to None. + + Yields: + types.AsyncGenerator[types.List[_T], None]: A generator that yields + batches of items. + """ + batch: types.List[_T] = [] + + assert batch_size or interval, 'Must specify either batch_size or interval' + + # If interval is specified, use it to determine when to yield the batch + # Alternatively set a really long timeout to keep the code simpler + if interval: + interval_s = python_utils.delta_to_seconds(interval) + else: + # Set the timeout to 10 years + interval_s = 60 * 60 * 24 * 365 * 10.0 + + next_yield: float = time.perf_counter() + interval_s + + done: types.Set[asyncio.Task[_T]] + pending: types.Set[asyncio.Task[_T]] = set() + + while True: + try: + done, pending = await asyncio.wait( + pending + or [ + asyncio.create_task( + types.cast( + types.Coroutine[None, None, _T], + generator.__anext__(), + ) + ), + ], + timeout=interval_s, + return_when=asyncio.FIRST_COMPLETED, + ) + + if done: + for result in done: + batch.append(result.result()) + + except StopAsyncIteration: + if batch: + yield batch + + break + + if batch_size is not None and len(batch) == batch_size: + yield batch + batch = [] + + if interval and batch and time.perf_counter() > next_yield: + yield batch + batch = [] + # Always set the next yield time to the current time. If the + # loop is running slow due to blocking functions we do not + # want to burst too much + next_yield = time.perf_counter() + interval_s + + +def batcher( + iterable: types.Iterable[_T], + batch_size: int = 10, +) -> types.Generator[types.List[_T], None, None]: + """ + Generator wrapper that returns items with a given batch size. + + Args: + iterable (types.Iterable[_T]): The iterable to batch. + batch_size (int, optional): The number of items per batch. Defaults + to 10. + + Yields: + types.Generator[types.List[_T], None, None]: A generator that yields + batches of items. + """ + batch: types.List[_T] = [] + for item in iterable: + batch.append(item) + if len(batch) == batch_size: + yield batch + batch = [] + + if batch: + yield batch diff --git a/python_utils/import_.py b/python_utils/import_.py index 61091d8..38336eb 100644 --- a/python_utils/import_.py +++ b/python_utils/import_.py @@ -1,32 +1,60 @@ +""" +This module provides utilities for importing modules and handling exceptions. -class DummyException(Exception): - pass +Classes: + DummyError(Exception): + A custom exception class used as a default for exception handling. - -def import_global( - name, modules=None, exceptions=DummyException, locals_=None, +Functions: + import_global(name, modules=None, exceptions=DummyError, locals_=None, globals_=None, level=-1): - '''Import the requested items into the global scope + Imports the requested items into the global scope, with support for + relative imports and custom exception handling. +""" + +from . import types + + +class DummyError(Exception): + """A custom exception class used as a default for exception handling.""" + + +# Legacy alias for DummyError +DummyException = DummyError + + +def import_global( # noqa: C901 + name: str, + modules: types.Optional[types.List[str]] = None, + exceptions: types.ExceptionsType = DummyError, + locals_: types.OptionalScope = None, + globals_: types.OptionalScope = None, + level: int = -1, +) -> types.Any: # sourcery skip: hoist-if-from-if + """Import the requested items into the global scope. WARNING! this method _will_ overwrite your global scope - If you have a variable named "path" and you call import_global('sys') - it will be overwritten with sys.path + If you have a variable named `path` and you call `import_global('sys')` + it will be overwritten with `sys.path` Args: name (str): the name of the module to import, e.g. sys modules (str): the modules to import, use None for everything - exception (Exception): the exception to catch, e.g. ImportError - `locals_`: the `locals()` method (in case you need a different scope) - `globals_`: the `globals()` method (in case you need a different scope) + exceptions (Exception): the exception to catch, e.g. ImportError + locals_: the `locals()` method (in case you need a different scope) + globals_: the `globals()` method (in case you need a different scope) level (int): the level to import from, this can be used for relative imports - ''' + """ frame = None + name_parts: types.List[str] = name.split('.') + modules_set: types.Set[str] = set() try: # If locals_ or globals_ are not given, autodetect them by inspecting # the current stack if locals_ is None or globals_ is None: import inspect + frame = inspect.stack()[1][0] if locals_ is None: @@ -36,44 +64,52 @@ def import_global( globals_ = frame.f_globals try: - name = name.split('.') - # Relative imports are supported (from .spam import eggs) - if not name[0]: - name = name[1:] + if not name_parts[0]: + name_parts = name_parts[1:] level = 1 # raise IOError((name, level)) module = __import__( - name=name[0] or '.', + name=name_parts[0] or '.', globals=globals_, locals=locals_, - fromlist=name[1:], + fromlist=name_parts[1:], level=max(level, 0), ) # Make sure we get the right part of a dotted import (i.e. # spam.eggs should return eggs, not spam) try: - for attr in name[1:]: + for attr in name_parts[1:]: module = getattr(module, attr) - except AttributeError: - raise ImportError('No module named ' + '.'.join(name)) + except AttributeError as e: + raise ImportError( + 'No module named ' + '.'.join(name_parts) + ) from e # If no list of modules is given, autodetect from either __all__ # or a dir() of the module if not modules: - modules = getattr(module, '__all__', dir(module)) + modules_set = set(getattr(module, '__all__', dir(module))) else: - modules = set(modules).intersection(dir(module)) + modules_set = set(modules).intersection(dir(module)) # Add all items in modules to the global scope - for k in set(dir(module)).intersection(modules): + for k in set(dir(module)).intersection(modules_set): if k and k[0] != '_': globals_[k] = getattr(module, k) except exceptions as e: return e finally: # Clean up, just to be sure - del name, modules, exceptions, locals_, globals_, frame - + del ( + name, + name_parts, + modules, + modules_set, + exceptions, + locals_, + globals_, + frame, + ) diff --git a/python_utils/logger.py b/python_utils/logger.py index a5b6526..bd988c5 100644 --- a/python_utils/logger.py +++ b/python_utils/logger.py @@ -1,18 +1,141 @@ +""" +This module provides a base class `LoggerBase` and a derived class `Logged` +for adding logging capabilities to classes. The `LoggerBase` class expects +a `logger` attribute to be a `logging.Logger` or compatible instance and +provides methods for logging at various levels. The `Logged` class +automatically adds a named logger to the class. + +Classes: + LoggerBase: + A base class that adds logging utilities to a class. + Logged: + A derived class that automatically adds a named logger to a class. + +Example: + >>> class MyClass(Logged): + ... def __init__(self): + ... Logged.__init__(self) + + >>> my_class = MyClass() + >>> my_class.debug('debug') + >>> my_class.info('info') + >>> my_class.warning('warning') + >>> my_class.error('error') + >>> my_class.exception('exception') + >>> my_class.log(0, 'log') +""" + +import abc import logging -import functools + +from . import decorators __all__ = ['Logged'] +from . import types + +# From the logging typeshed, converted to be compatible with Python 3.8 +# https://github.com/python/typeshed/blob/main/stdlib/logging/__init__.pyi +_ExcInfoType: types.TypeAlias = types.Union[ + bool, + types.Tuple[ + types.Type[BaseException], + BaseException, + types.Union[types.TracebackType, None], + ], + types.Tuple[None, None, None], + BaseException, + None, +] +_P = types.ParamSpec('_P') +_T = types.TypeVar('_T', covariant=True) + + +class LoggerProtocol(types.Protocol): + def debug( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: types.Union[types.Mapping[str, object], None] = None, + ) -> None: ... + + def info( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: types.Union[types.Mapping[str, object], None] = None, + ) -> None: ... + + def warning( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: types.Union[types.Mapping[str, object], None] = None, + ) -> None: ... + + def error( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: types.Union[types.Mapping[str, object], None] = None, + ) -> None: ... -class Logged(object): - '''Class which automatically adds a named logger to your class when - interiting + def critical( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: types.Union[types.Mapping[str, object], None] = None, + ) -> None: ... + + def exception( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: types.Union[types.Mapping[str, object], None] = None, + ) -> None: ... + + def log( + self, + level: int, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: types.Union[types.Mapping[str, object], None] = None, + ) -> None: ... + + +class LoggerBase(abc.ABC): + """Class which automatically adds logging utilities to your class when + interiting. Expects `logger` to be a logging.Logger or compatible instance. Adds easy access to debug, info, warning, error, exception and log methods - >>> class MyClass(Logged): + >>> class MyClass(LoggerBase): + ... logger = logging.getLogger(__name__) + ... ... def __init__(self): ... Logged.__init__(self) + >>> my_class = MyClass() >>> my_class.debug('debug') >>> my_class.info('info') @@ -20,43 +143,208 @@ class Logged(object): >>> my_class.error('error') >>> my_class.exception('exception') >>> my_class.log(0, 'log') - ''' - def __new__(cls, *args, **kwargs): - cls.logger = logging.getLogger( - cls.__get_name(cls.__module__, cls.__name__)) - return super(Logged, cls).__new__(cls) + """ + + # I've tried using a protocol to properly type the logger but it gave all + # sorts of issues with mypy so we're using the lazy solution for now. The + # actual classes define the correct type anyway + logger: types.Any + # logger: LoggerProtocol @classmethod - def __get_name(cls, *name_parts): + def __get_name( # pyright: ignore[reportUnusedFunction] + cls, *name_parts: str + ) -> str: return '.'.join(n.strip() for n in name_parts if n.strip()) + @decorators.wraps_classmethod(logging.Logger.debug) + @classmethod + def debug( + cls, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: types.Union[types.Mapping[str, object], None] = None, + ) -> None: + return cls.logger.debug( # type: ignore[no-any-return] + msg, + *args, + exc_info=exc_info, + stack_info=stack_info, + stacklevel=stacklevel, + extra=extra, + ) + + @decorators.wraps_classmethod(logging.Logger.info) @classmethod - @functools.wraps(logging.debug) - def debug(cls, msg, *args, **kwargs): - cls.logger.debug(msg, *args, **kwargs) + def info( + cls, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: types.Union[types.Mapping[str, object], None] = None, + ) -> None: + return cls.logger.info( # type: ignore[no-any-return] + msg, + *args, + exc_info=exc_info, + stack_info=stack_info, + stacklevel=stacklevel, + extra=extra, + ) + @decorators.wraps_classmethod(logging.Logger.warning) @classmethod - @functools.wraps(logging.info) - def info(cls, msg, *args, **kwargs): - cls.logger.info(msg, *args, **kwargs) + def warning( + cls, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: types.Union[types.Mapping[str, object], None] = None, + ) -> None: + return cls.logger.warning( # type: ignore[no-any-return] + msg, + *args, + exc_info=exc_info, + stack_info=stack_info, + stacklevel=stacklevel, + extra=extra, + ) + @decorators.wraps_classmethod(logging.Logger.error) @classmethod - @functools.wraps(logging.warning) - def warning(cls, msg, *args, **kwargs): - cls.logger.warning(msg, *args, **kwargs) + def error( + cls, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: types.Union[types.Mapping[str, object], None] = None, + ) -> None: + return cls.logger.error( # type: ignore[no-any-return] + msg, + *args, + exc_info=exc_info, + stack_info=stack_info, + stacklevel=stacklevel, + extra=extra, + ) + @decorators.wraps_classmethod(logging.Logger.critical) @classmethod - @functools.wraps(logging.error) - def error(cls, msg, *args, **kwargs): - cls.logger.error(msg, *args, **kwargs) + def critical( + cls, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: types.Union[types.Mapping[str, object], None] = None, + ) -> None: + return cls.logger.critical( # type: ignore[no-any-return] + msg, + *args, + exc_info=exc_info, + stack_info=stack_info, + stacklevel=stacklevel, + extra=extra, + ) + @decorators.wraps_classmethod(logging.Logger.exception) @classmethod - @functools.wraps(logging.exception) - def exception(cls, msg, *args, **kwargs): - cls.logger.exception(msg, *args, **kwargs) + def exception( + cls, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: types.Union[types.Mapping[str, object], None] = None, + ) -> None: + return cls.logger.exception( # type: ignore[no-any-return] + msg, + *args, + exc_info=exc_info, + stack_info=stack_info, + stacklevel=stacklevel, + extra=extra, + ) + @decorators.wraps_classmethod(logging.Logger.log) @classmethod - @functools.wraps(logging.log) - def log(cls, lvl, msg, *args, **kwargs): - cls.logger.log(lvl, msg, *args, **kwargs) + def log( + cls, + level: int, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: types.Union[types.Mapping[str, object], None] = None, + ) -> None: + return cls.logger.log( # type: ignore[no-any-return] + level, + msg, + *args, + exc_info=exc_info, + stack_info=stack_info, + stacklevel=stacklevel, + extra=extra, + ) + + +class Logged(LoggerBase): + """Class which automatically adds a named logger to your class when + interiting. + + Adds easy access to debug, info, warning, error, exception and log methods + >>> class MyClass(Logged): + ... def __init__(self): + ... Logged.__init__(self) + + >>> my_class = MyClass() + >>> my_class.debug('debug') + >>> my_class.info('info') + >>> my_class.warning('warning') + >>> my_class.error('error') + >>> my_class.exception('exception') + >>> my_class.log(0, 'log') + + >>> my_class._Logged__get_name('spam') + 'spam' + """ + + logger: logging.Logger # pragma: no cover + + @classmethod + def __get_name(cls, *name_parts: str) -> str: + return types.cast( + str, + LoggerBase._LoggerBase__get_name(*name_parts), # type: ignore[attr-defined] # pyright: ignore[reportUnknownMemberType, reportUnknownVariableType, reportAttributeAccessIssue] + ) + + def __new__(cls, *args: types.Any, **kwargs: types.Any) -> 'Logged': + """ + Create a new instance of the class and initialize the logger. + + The logger is named using the module and class name. + + Args: + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. + + Returns: + An instance of the class. + """ + cls.logger = logging.getLogger( + cls.__get_name(cls.__module__, cls.__name__) + ) + return super().__new__(cls) diff --git a/python_utils/loguru.py b/python_utils/loguru.py new file mode 100644 index 0000000..c1cd8ab --- /dev/null +++ b/python_utils/loguru.py @@ -0,0 +1,51 @@ +""" +This module provides a `Logurud` class that integrates the `loguru` logger +with the base logging functionality defined in `logger_module.LoggerBase`. + +Classes: + Logurud: A class that extends `LoggerBase` and uses `loguru` for logging. + +Usage example: + >>> from python_utils.loguru import Logurud + >>> class MyClass(Logurud): + ... def __init__(self): + ... Logurud.__init__(self) + >>> my_class = MyClass() + >>> my_class.logger.info('This is an info message') +""" + +from __future__ import annotations + +import typing + +import loguru + +from . import logger as logger_module + +__all__ = ['Logurud'] + + +class Logurud(logger_module.LoggerBase): + """ + A class that extends `LoggerBase` and uses `loguru` for logging. + + Attributes: + logger (loguru.Logger): The `loguru` logger instance. + """ + + logger: loguru.Logger + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Logurud: + """ + Creates a new instance of `Logurud` and initializes the `loguru` + logger. + + Args: + *args (typing.Any): Variable length argument list. + **kwargs (typing.Any): Arbitrary keyword arguments. + + Returns: + Logurud: A new instance of `Logurud`. + """ + cls.logger: loguru.Logger = loguru.logger.opt(depth=1) + return super().__new__(cls) diff --git a/python_utils/compat.py b/python_utils/py.typed similarity index 100% rename from python_utils/compat.py rename to python_utils/py.typed diff --git a/python_utils/terminal.py b/python_utils/terminal.py index d51e8ed..c87a6ca 100644 --- a/python_utils/terminal.py +++ b/python_utils/terminal.py @@ -1,8 +1,33 @@ +""" +This module provides functions to get the terminal size across different +platforms. + +Functions: + get_terminal_size: Get the current size of the terminal. + _get_terminal_size_windows: Get terminal size on Windows. + _get_terminal_size_tput: Get terminal size using `tput`. + _get_terminal_size_linux: Get terminal size on Linux. + +Usage example: + >>> width, height = get_terminal_size() +""" + +from __future__ import annotations + +import contextlib import os +import typing + +from . import converters +Dimensions = tuple[int, int] +OptionalDimensions = typing.Optional[Dimensions] +_StrDimensions = tuple[str, str] +_OptionalStrDimensions = typing.Optional[_StrDimensions] -def get_terminal_size(): # pragma: no cover - '''Get the current size of your terminal + +def get_terminal_size() -> Dimensions: # pragma: no cover + """Get the current size of your terminal. Multiple returns are not always a good idea, but in this case it greatly simplifies the code so I believe it's justified. It's not the prettiest @@ -10,78 +35,70 @@ def get_terminal_size(): # pragma: no cover Returns: width, height: Two integers containing width and height - ''' + """ + w: int | None + h: int | None - try: + with contextlib.suppress(Exception): # Default to 79 characters for IPython notebooks - from IPython import get_ipython - ipython = get_ipython() - from ipykernel import zmqshell + from IPython import get_ipython # type: ignore[attr-defined] + + ipython = get_ipython() # type: ignore[no-untyped-call] + from ipykernel import zmqshell # type: ignore[import-not-found] + if isinstance(ipython, zmqshell.ZMQInteractiveShell): return 79, 24 - except Exception: # pragma: no cover - pass - - try: + with contextlib.suppress(Exception): # This works for Python 3, but not Pypy3. Probably the best method if # it's supported so let's always try import shutil + w, h = shutil.get_terminal_size() if w and h: # The off by one is needed due to progressbars in some cases, for # safety we'll always substract it. return w - 1, h - except Exception: # pragma: no cover - pass - - try: - w = int(os.environ.get('COLUMNS')) - h = int(os.environ.get('LINES')) + with contextlib.suppress(Exception): + w = converters.to_int(os.environ.get('COLUMNS')) + h = converters.to_int(os.environ.get('LINES')) if w and h: return w, h - except Exception: # pragma: no cover - pass + with contextlib.suppress(Exception): + import blessings # type: ignore[import-untyped] - try: - import blessings terminal = blessings.Terminal() w = terminal.width h = terminal.height if w and h: return w, h - except Exception: # pragma: no cover - pass + with contextlib.suppress(Exception): + # The method can return None so we don't unpack it + wh = _get_terminal_size_linux() + if wh is not None and all(wh): + return wh - try: - w, h = _get_terminal_size_linux() - if w and h: - return w, h - except Exception: # pragma: no cover - pass - - try: + with contextlib.suppress(Exception): # Windows detection doesn't always work, let's try anyhow - w, h = _get_terminal_size_windows() - if w and h: - return w, h - except Exception: # pragma: no cover - pass + wh = _get_terminal_size_windows() + if wh is not None and all(wh): + return wh - try: + with contextlib.suppress(Exception): # needed for window's python in cygwin's xterm! - w, h = _get_terminal_size_tput() - if w and h: - return w, h - except Exception: # pragma: no cover - pass + wh = _get_terminal_size_tput() + if wh is not None and all(wh): + return wh return 79, 24 -def _get_terminal_size_windows(): # pragma: no cover +def _get_terminal_size_windows() -> OptionalDimensions: # pragma: no cover res = None try: - from ctypes import windll, create_string_buffer + from ctypes import ( # type: ignore[attr-defined] + create_string_buffer, + windll, + ) # stdin handle is -10 # stdout handle is -11 @@ -95,8 +112,10 @@ def _get_terminal_size_windows(): # pragma: no cover if res: import struct - (_, _, _, _, _, left, top, right, bottom, _, _) = \ - struct.unpack("hhhhHhhhhhh", csbi.raw) + + (_, _, _, _, _, left, top, right, bottom, _, _) = struct.unpack( + 'hhhhHhhhhhh', csbi.raw + ) w = right - left h = bottom - top return w, h @@ -104,46 +123,58 @@ def _get_terminal_size_windows(): # pragma: no cover return None -def _get_terminal_size_tput(): # pragma: no cover +def _get_terminal_size_tput() -> OptionalDimensions: # pragma: no cover # get terminal width src: http://stackoverflow.com/questions/263890/ try: import subprocess + proc = subprocess.Popen( - ['tput', 'cols'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + ['tput', 'cols'], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) output = proc.communicate(input=None) w = int(output[0]) proc = subprocess.Popen( - ['tput', 'lines'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + ['tput', 'lines'], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) output = proc.communicate(input=None) h = int(output[0]) - return w, h except Exception: return None + else: + return w, h -def _get_terminal_size_linux(): # pragma: no cover - def ioctl_GWINSZ(fd): +def _get_terminal_size_linux() -> OptionalDimensions: # pragma: no cover + def ioctl_gwinsz(fd: int) -> tuple[str, str] | None: try: import fcntl - import termios import struct - size = struct.unpack( - 'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')) + import termios + + return typing.cast( + _OptionalStrDimensions, + struct.unpack( + 'hh', + fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'), # type: ignore[call-overload] + ), + ) except Exception: return None - return size - size = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) + size: _OptionalStrDimensions + size = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2) if not size: - try: + with contextlib.suppress(Exception): fd = os.open(os.ctermid(), os.O_RDONLY) - size = ioctl_GWINSZ(fd) + size = ioctl_gwinsz(fd) os.close(fd) - except Exception: - pass if not size: try: size = os.environ['LINES'], os.environ['COLUMNS'] diff --git a/python_utils/time.py b/python_utils/time.py index 1d4b510..224d8e1 100644 --- a/python_utils/time.py +++ b/python_utils/time.py @@ -1,8 +1,35 @@ -from __future__ import absolute_import -import six -import time +""" +This module provides utility functions for handling time-related operations. + +Functions: +- timedelta_to_seconds: Convert a timedelta to seconds with microseconds as + fraction. +- delta_to_seconds: Convert a timedelta or numeric interval to seconds. +- delta_to_seconds_or_none: Convert a timedelta to seconds or return None. +- format_time: Format a timestamp (timedelta, datetime, or seconds) to a + string. +- timeout_generator: Generate items from an iterable until a timeout is + reached. +- aio_timeout_generator: Asynchronously generate items from an iterable until a + timeout is reached. +- aio_generator_timeout_detector: Detect if an async generator has not yielded + an element for a set amount of time. +- aio_generator_timeout_detector_decorator: Decorator for + aio_generator_timeout_detector. +""" + +# pyright: reportUnnecessaryIsInstance=false +import asyncio import datetime +import functools import itertools +import time + +import python_utils +from python_utils import aio, exceptions, types + +_T = types.TypeVar('_T') +_P = types.ParamSpec('_P') # There might be a better way to get the epoch with tzinfo, please create @@ -10,8 +37,8 @@ epoch = datetime.datetime(year=1970, month=1, day=1) -def timedelta_to_seconds(delta): - '''Convert a timedelta to seconds with the microseconds as fraction +def timedelta_to_seconds(delta: datetime.timedelta) -> types.Number: + """Convert a timedelta to seconds with the microseconds as fraction. Note that this method has become largely obsolete with the `timedelta.total_seconds()` method introduced in Python 2.7. @@ -25,7 +52,7 @@ def timedelta_to_seconds(delta): '1.000001' >>> '%.6f' % timedelta_to_seconds(timedelta(microseconds=1)) '0.000001' - ''' + """ # Only convert to float if needed if delta.microseconds: total = delta.microseconds * 1e-6 @@ -36,8 +63,44 @@ def timedelta_to_seconds(delta): return total -def format_time(timestamp, precision=datetime.timedelta(seconds=1)): - '''Formats timedelta/datetime/seconds +def delta_to_seconds(interval: types.delta_type) -> types.Number: + """ + Convert a timedelta to seconds. + + >>> delta_to_seconds(datetime.timedelta(seconds=1)) + 1 + >>> delta_to_seconds(datetime.timedelta(seconds=1, microseconds=1)) + 1.000001 + >>> delta_to_seconds(1) + 1 + >>> delta_to_seconds('whatever') # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + TypeError: Unknown type ... + """ + if isinstance(interval, datetime.timedelta): + return timedelta_to_seconds(interval) + elif isinstance(interval, (int, float)): + return interval + else: + raise TypeError(f'Unknown type {type(interval)}: {interval!r}') + + +def delta_to_seconds_or_none( + interval: types.Optional[types.delta_type], +) -> types.Optional[types.Number]: + """Convert a timedelta to seconds or return None.""" + if interval is None: + return None + else: + return delta_to_seconds(interval) + + +def format_time( + timestamp: types.timestamp_type, + precision: datetime.timedelta = datetime.timedelta(seconds=1), +) -> str: + """Formats timedelta/datetime/seconds. >>> format_time('1') '0:00:01' @@ -58,13 +121,15 @@ def format_time(timestamp, precision=datetime.timedelta(seconds=1)): ... TypeError: Unknown type ... - ''' + """ precision_seconds = precision.total_seconds() - if isinstance(timestamp, six.string_types + six.integer_types + (float, )): + if isinstance(timestamp, str): + timestamp = float(timestamp) + + if isinstance(timestamp, (int, float)): try: - castfunc = six.integer_types[-1] - timestamp = datetime.timedelta(seconds=castfunc(timestamp)) + timestamp = datetime.timedelta(seconds=timestamp) except OverflowError: # pragma: no cover timestamp = None @@ -85,11 +150,8 @@ def format_time(timestamp, precision=datetime.timedelta(seconds=1)): seconds = seconds - (seconds % precision_seconds) try: # pragma: no cover - if six.PY3: - dt = datetime.datetime.fromtimestamp(seconds) - else: - dt = datetime.datetime.utcfromtimestamp(seconds) - except ValueError: # pragma: no cover + dt = datetime.datetime.fromtimestamp(seconds) + except (ValueError, OSError): # pragma: no cover dt = datetime.datetime.max return str(dt) elif isinstance(timestamp, datetime.date): @@ -97,20 +159,60 @@ def format_time(timestamp, precision=datetime.timedelta(seconds=1)): elif timestamp is None: return '--:--:--' else: - raise TypeError('Unknown type %s: %r' % (type(timestamp), timestamp)) + raise TypeError(f'Unknown type {type(timestamp)}: {timestamp!r}') + + +@types.overload +def _to_iterable( + iterable: types.Union[ + types.Callable[[], types.AsyncIterable[_T]], + types.AsyncIterable[_T], + ], +) -> types.AsyncIterable[_T]: ... + + +@types.overload +def _to_iterable( + iterable: types.Union[ + types.Callable[[], types.Iterable[_T]], types.Iterable[_T] + ], +) -> types.Iterable[_T]: ... + + +def _to_iterable( + iterable: types.Union[ + types.Iterable[_T], + types.Callable[[], types.Iterable[_T]], + types.AsyncIterable[_T], + types.Callable[[], types.AsyncIterable[_T]], + ], +) -> types.Union[types.Iterable[_T], types.AsyncIterable[_T]]: + if callable(iterable): + return iterable() + else: + return iterable def timeout_generator( - timeout, - interval=datetime.timedelta(seconds=1), - iterable=itertools.count, - interval_multiplier=1.0, -): - ''' + timeout: types.delta_type, + interval: types.delta_type = datetime.timedelta(seconds=1), + iterable: types.Union[ + types.Iterable[_T], types.Callable[[], types.Iterable[_T]] + ] = itertools.count, # type: ignore[assignment] + interval_multiplier: float = 1.0, + maximum_interval: types.Optional[types.delta_type] = None, +) -> types.Iterable[_T]: + """ Generator that walks through the given iterable (a counter by default) - until the timeout is reached with a configurable interval between items + until the float_timeout is reached with a configurable float_interval + between items. + + This can be used to limit the time spent on a slow operation. This can be + useful for testing slow APIs so you get a small sample of the data in a + reasonable amount of time. >>> for i in timeout_generator(0.1, 0.06): + ... # Put your slow code here ... print(i) 0 1 @@ -133,31 +235,174 @@ def timeout_generator( ... print(i) 0 1 - ''' + 2 + """ + float_interval: float = delta_to_seconds(interval) + float_maximum_interval: types.Optional[float] = delta_to_seconds_or_none( + maximum_interval + ) + iterable_ = _to_iterable(iterable) - if isinstance(interval, datetime.timedelta): - interval = timedelta_to_seconds(interval) + end = delta_to_seconds(timeout) + time.perf_counter() + for item in iterable_: + yield item - if isinstance(timeout, datetime.timedelta): - timeout = timedelta_to_seconds(timeout) + if time.perf_counter() >= end: + break + + time.sleep(float_interval) + + float_interval *= interval_multiplier + if float_maximum_interval: + float_interval = min(float_interval, float_maximum_interval) - if callable(iterable): - iterable = iterable() - interval *= interval_multiplier - time.sleep(interval) +async def aio_timeout_generator( + timeout: types.delta_type, # noqa: ASYNC109 + interval: types.delta_type = datetime.timedelta(seconds=1), + iterable: types.Union[ + types.AsyncIterable[_T], types.Callable[..., types.AsyncIterable[_T]] + ] = aio.acount, + interval_multiplier: float = 1.0, + maximum_interval: types.Optional[types.delta_type] = None, +) -> types.AsyncGenerator[_T, None]: + """ + Async generator that walks through the given async iterable (a counter by + default) until the float_timeout is reached with a configurable + float_interval between items. - if six.PY3: # pragma: no cover - timer = time.perf_counter - else: # pragma: no cover - timer = time.time + The interval_exponent automatically increases the float_timeout with each + run. Note that if the float_interval is less than 1, 1/interval_exponent + will be used so the float_interval is always growing. To double the + float_interval with each run, specify 2. - end = timeout + timer() - for item in iterable: + Doctests and asyncio are not friends, so no examples. But this function is + effectively the same as the `timeout_generator` but it uses `async for` + instead. + """ + float_interval: float = delta_to_seconds(interval) + float_maximum_interval: types.Optional[float] = delta_to_seconds_or_none( + maximum_interval + ) + iterable_ = _to_iterable(iterable) + + end = delta_to_seconds(timeout) + time.perf_counter() + async for item in iterable_: # pragma: no branch yield item - if timer() >= end: + if time.perf_counter() >= end: break - interval *= interval_multiplier - time.sleep(interval) + await asyncio.sleep(float_interval) + + float_interval *= interval_multiplier + if float_maximum_interval: # pragma: no branch + float_interval = min(float_interval, float_maximum_interval) + + +async def aio_generator_timeout_detector( + generator: types.AsyncGenerator[_T, None], + timeout: types.Optional[types.delta_type] = None, # noqa: ASYNC109 + total_timeout: types.Optional[types.delta_type] = None, + on_timeout: types.Optional[ + types.Callable[ + [ + types.AsyncGenerator[_T, None], + types.Optional[types.delta_type], + types.Optional[types.delta_type], + BaseException, + ], + types.Any, + ] + ] = exceptions.reraise, + **on_timeout_kwargs: types.Mapping[types.Text, types.Any], +) -> types.AsyncGenerator[_T, None]: + """ + This function is used to detect if an asyncio generator has not yielded + an element for a set amount of time. + + The `on_timeout` argument is called with the `generator`, `timeout`, + `total_timeout`, `exception` and the extra `**kwargs` to this function as + arguments. + If `on_timeout` is not specified, the exception is reraised. + If `on_timeout` is `None`, the exception is silently ignored and the + generator will finish as normal. + """ + if total_timeout is None: + total_timeout_end = None + else: + total_timeout_end = time.perf_counter() + delta_to_seconds( + total_timeout + ) + + timeout_s = python_utils.delta_to_seconds_or_none(timeout) + + while True: + try: + if total_timeout_end and time.perf_counter() >= total_timeout_end: + raise asyncio.TimeoutError( # noqa: TRY301 + 'Total timeout reached' + ) + + if timeout_s: + yield await asyncio.wait_for(generator.__anext__(), timeout_s) + else: + yield await generator.__anext__() + + except asyncio.TimeoutError as exception: # noqa: PERF203 + if on_timeout is not None: + await on_timeout( + generator, + timeout, + total_timeout, + exception, + **on_timeout_kwargs, + ) + break + + except StopAsyncIteration: + break + + +def aio_generator_timeout_detector_decorator( + timeout: types.Optional[types.delta_type] = None, + total_timeout: types.Optional[types.delta_type] = None, + on_timeout: types.Optional[ + types.Callable[ + [ + types.AsyncGenerator[types.Any, None], + types.Optional[types.delta_type], + types.Optional[types.delta_type], + BaseException, + ], + types.Any, + ] + ] = exceptions.reraise, + **on_timeout_kwargs: types.Mapping[types.Text, types.Any], +) -> types.Callable[ + [types.Callable[_P, types.AsyncGenerator[_T, None]]], + types.Callable[_P, types.AsyncGenerator[_T, None]], +]: + """A decorator wrapper for aio_generator_timeout_detector.""" + + def _timeout_detector_decorator( + generator: types.Callable[_P, types.AsyncGenerator[_T, None]], + ) -> types.Callable[_P, types.AsyncGenerator[_T, None]]: + """The decorator itself.""" + + @functools.wraps(generator) + def wrapper( + *args: _P.args, + **kwargs: _P.kwargs, + ) -> types.AsyncGenerator[_T, None]: + return aio_generator_timeout_detector( + generator(*args, **kwargs), + timeout, + total_timeout, + on_timeout, + **on_timeout_kwargs, + ) + + return wrapper + + return _timeout_detector_decorator diff --git a/python_utils/types.py b/python_utils/types.py new file mode 100644 index 0000000..ab89c43 --- /dev/null +++ b/python_utils/types.py @@ -0,0 +1,181 @@ +""" +This module provides type definitions and utility functions for type hinting. + +It includes: +- Shorthand for commonly used types such as Optional and Union. +- Type aliases for various data structures and common types. +- Importing all types from the `typing` and `typing_extensions` modules. +- Importing specific types from the `types` module. + +The module also configures Pyright to ignore wildcard import warnings. +""" +# pyright: reportWildcardImportFromLibrary=false +# ruff: noqa: F405 + +import datetime +import decimal +from re import Match, Pattern +from types import * # pragma: no cover # noqa: F403 +from typing import * # pragma: no cover # noqa: F403 + +# import * does not import these in all Python versions +# Quickhand for optional because it gets so much use. If only Python had +# support for an optional type shorthand such as `SomeType?` instead of +# `Optional[SomeType]`. +# Since the Union operator is only supported for Python 3.10, we'll create a +# shorthand for it. +from typing import ( + IO, + BinaryIO, + Optional as O, # noqa: N817 + TextIO, + Union as U, # noqa: N817 +) + +from typing_extensions import * # type: ignore[no-redef,assignment] # noqa: F403 + +Scope = Dict[str, Any] +OptionalScope = O[Scope] +Number = U[int, float] +DecimalNumber = U[Number, decimal.Decimal] +ExceptionType = Type[Exception] +ExceptionsType = U[Tuple[ExceptionType, ...], ExceptionType] +StringTypes = U[str, bytes] + +delta_type = U[datetime.timedelta, int, float] +timestamp_type = U[ + datetime.timedelta, + datetime.date, + datetime.datetime, + str, + int, + float, + None, +] + +__all__ = [ + 'IO', + 'TYPE_CHECKING', + # ABCs (from collections.abc). + 'AbstractSet', + # The types from the typing module. + # Super-special typing primitives. + 'Annotated', + 'Any', + # One-off things. + 'AnyStr', + 'AsyncContextManager', + 'AsyncGenerator', + 'AsyncGeneratorType', + 'AsyncIterable', + 'AsyncIterator', + 'Awaitable', + # Other concrete types. + 'BinaryIO', + 'BuiltinFunctionType', + 'BuiltinMethodType', + 'ByteString', + 'Callable', + # Concrete collection types. + 'ChainMap', + 'ClassMethodDescriptorType', + 'ClassVar', + 'CodeType', + 'Collection', + 'Concatenate', + 'Container', + 'ContextManager', + 'Coroutine', + 'CoroutineType', + 'Counter', + 'DecimalNumber', + 'DefaultDict', + 'Deque', + 'Dict', + 'DynamicClassAttribute', + 'Final', + 'ForwardRef', + 'FrameType', + 'FrozenSet', + # Types from the `types` module. + 'FunctionType', + 'Generator', + 'GeneratorType', + 'Generic', + 'GetSetDescriptorType', + 'Hashable', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'LambdaType', + 'List', + 'Literal', + 'Mapping', + 'MappingProxyType', + 'MappingView', + 'Match', + 'MemberDescriptorType', + 'MethodDescriptorType', + 'MethodType', + 'MethodWrapperType', + 'ModuleType', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'NamedTuple', # Not really a type. + 'NewType', + 'NoReturn', + 'Number', + 'Optional', + 'OptionalScope', + 'OrderedDict', + 'ParamSpec', + 'ParamSpecArgs', + 'ParamSpecKwargs', + 'Pattern', + 'Protocol', + # Structural checks, a.k.a. protocols. + 'Reversible', + 'Sequence', + 'Set', + 'SimpleNamespace', + 'Sized', + 'SupportsAbs', + 'SupportsBytes', + 'SupportsComplex', + 'SupportsFloat', + 'SupportsIndex', + 'SupportsIndex', + 'SupportsInt', + 'SupportsRound', + 'Text', + 'TextIO', + 'TracebackType', + 'TracebackType', + 'Tuple', + 'Type', + 'TypeAlias', + 'TypeGuard', + 'TypeVar', + 'TypedDict', # Not really a type. + 'Union', + 'ValuesView', + 'WrapperDescriptorType', + 'cast', + 'coroutine', + 'delta_type', + 'final', + 'get_args', + 'get_origin', + 'get_type_hints', + 'is_typeddict', + 'new_class', + 'no_type_check', + 'no_type_check_decorator', + 'overload', + 'prepare_class', + 'resolve_bases', + 'runtime_checkable', + 'timestamp_type', +] diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 0000000..294cb3c --- /dev/null +++ b/ruff.toml @@ -0,0 +1,114 @@ +# We keep the ruff configuration separate so it can easily be shared across +# all projects + +target-version = 'py39' + +exclude = [ + '.venv', + '.tox', + # Ignore local test files/directories/old-stuff + 'test.py', + '*_old.py', +] + +line-length = 79 + +[lint] +ignore = [ + 'A001', # Variable {name} is shadowing a Python builtin + 'A002', # Argument {name} is shadowing a Python builtin + 'A003', # Class attribute {name} is shadowing a Python builtin + 'B023', # function-uses-loop-variable + 'B024', # `FormatWidgetMixin` is an abstract base class, but it has no abstract methods + 'D205', # blank-line-after-summary + 'D212', # multi-line-summary-first-line + 'RET505', # Unnecessary `else` after `return` statement + 'TRY003', # Avoid specifying long messages outside the exception class + 'RET507', # Unnecessary `elif` after `continue` statement + 'C405', # Unnecessary {obj_type} literal (rewrite as a set literal) + 'C406', # Unnecessary {obj_type} literal (rewrite as a dict literal) + 'C408', # Unnecessary {obj_type} call (rewrite as a literal) + 'SIM114', # Combine `if` branches using logical `or` operator + 'RET506', # Unnecessary `else` after `raise` statement + 'Q001', # Remove bad quotes + 'Q002', # Remove bad quotes + 'FA100', # Missing `from __future__ import annotations`, but uses `typing.Optional` + 'COM812', # Missing trailing comma in a list + 'ISC001', # String concatenation with implicit str conversion + 'SIM108', # Ternary operators are not always more readable + 'RUF100', # Unused noqa directives. Due to multiple Python versions, we need to keep them +] + +select = [ + 'A', # flake8-builtins + 'ASYNC', # flake8 async checker + 'B', # flake8-bugbear + 'C4', # flake8-comprehensions + 'C90', # mccabe + 'COM', # flake8-commas + + ## Require docstrings for all public methods, would be good to enable at some point + 'D', # pydocstyle + + 'E', # pycodestyle error ('W' for warning) + 'F', # pyflakes + 'FA', # flake8-future-annotations + 'I', # isort + 'ICN', # flake8-import-conventions + 'INP', # flake8-no-pep420 + 'ISC', # flake8-implicit-str-concat + 'N', # pep8-naming + 'NPY', # NumPy-specific rules + 'PERF', # perflint, + 'PIE', # flake8-pie + 'Q', # flake8-quotes + + 'RET', # flake8-return + 'RUF', # Ruff-specific rules + 'SIM', # flake8-simplify + 'T20', # flake8-print + 'TD', # flake8-todos + 'TRY', # tryceratops + 'UP', # pyupgrade +] + +[lint.per-file-ignores] +'*tests/*' = ['INP001', 'T201', 'T203', 'ASYNC109', 'B007'] +'examples.py' = ['T201', 'N806'] +'docs/conf.py' = ['E501', 'INP001'] +'docs/_theme/flask_theme_support.py' = ['RUF012', 'INP001'] +'*/types.py' = ['F405'] + +[lint.pydocstyle] +convention = 'google' +ignore-decorators = [ + 'typing.overload', + 'typing.override', +] + +[lint.isort] +case-sensitive = true +combine-as-imports = true +force-wrap-aliases = true + +[lint.flake8-quotes] +docstring-quotes = 'single' +inline-quotes = 'single' +multiline-quotes = 'single' + +[format] +line-ending = 'lf' +indent-style = 'space' +quote-style = 'single' +docstring-code-format = true +skip-magic-trailing-comma = false +exclude = [ + '__init__.py', +] + +[lint.pycodestyle] +max-line-length = 79 + +[lint.flake8-pytest-style] +mark-parentheses = true + diff --git a/setup.cfg b/setup.cfg index fb5b3dd..ca2401e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -29,3 +29,14 @@ universal = 1 [upload] sign = 1 +[flake8] +per-file-ignores = + python_utils/types.py: F403,F405 +ignore = W391, W503, E741, E203, F811 +exclude = + docs + +[mypy] +files = + python_utils, + _python_utils_tests diff --git a/setup.py b/setup.py index b07cf4d..8a66f9d 100644 --- a/setup.py +++ b/setup.py @@ -1,26 +1,34 @@ -import os -import sys +""" +Setup script for the python-utils package. + +This script uses setuptools to package the python-utils library. It reads +metadata from the `python_utils/__about__.py` file and the `README.rst` file to +populate the package information. The script also defines the package +requirements and optional dependencies for different use cases such as logging, +documentation, and testing. +""" + +import pathlib + import setuptools +# pyright: reportUnknownMemberType=false + # To prevent importing about and thereby breaking the coverage info we use this # exec hack -about = {} +about: dict[str, str] = {} with open('python_utils/__about__.py') as fp: exec(fp.read(), about) - -if os.path.isfile('README.rst'): - long_description = open('README.rst').read() +_readme_path = pathlib.Path(__file__).parent / 'README.rst' +if _readme_path.exists() and _readme_path.is_file(): + long_description = _readme_path.read_text() else: long_description = 'See http://pypi.python.org/pypi/python-utils/' - -needs_pytest = set(['ptr', 'pytest', 'test']).intersection(sys.argv) -pytest_runner = ['pytest-runner'] if needs_pytest else [] - - if __name__ == '__main__': setuptools.setup( + python_requires='>=3.9.0', name='python-utils', version=about['__version__'], author=about['__author__'], @@ -28,27 +36,35 @@ description=about['__description__'], url=about['__url__'], license='BSD', - packages=setuptools.find_packages(exclude=[ - '_python_utils_tests', '*.__pycache__']), + packages=setuptools.find_packages( + exclude=['_python_utils_tests', '*.__pycache__'], + ), + package_data={'python_utils': ['py.typed']}, long_description=long_description, - install_requires=['six'], - tests_require=['pytest'], + install_requires=['typing_extensions>3.10.0.2'], extras_require={ + 'loguru': [ + 'loguru', + ], 'docs': [ - 'six', 'mock', 'sphinx', 'python-utils', ], 'tests': [ - 'flake8', + 'ruff', + 'pyright', 'pytest', 'pytest-cov', - 'pytest-flake8', + 'pytest-mypy', + 'pytest-asyncio', 'sphinx', + 'types-setuptools', + 'loguru', + 'loguru-mypy', + 'mypy-ipython', + 'blessings', ], }, - setup_requires=[] + pytest_runner, classifiers=['License :: OSI Approved :: BSD License'], ) - diff --git a/tox.ini b/tox.ini index 1b2b296..03f2717 100644 --- a/tox.ini +++ b/tox.ini @@ -1,29 +1,55 @@ [tox] -envlist = py27, py35, py36, py37, py38, py39, py310, pypy, flake8, docs +envlist = ruff, black, pypy3, py39, py310, py311, py312, py313, docs, mypy, pyright skip_missing_interpreters = True [testenv] basepython = - py27: python2.7 - py35: python3.5 - py36: python3.6 - py37: python3.7 - py38: python3.8 py39: python3.9 py310: python3.10 - pypy: pypy + py311: python3.11 + py312: python3.12 + py313: python3.13 + pypy3: pypy3 setenv = PY_IGNORE_IMPORTMISMATCH=1 -deps = -r{toxinidir}/_python_utils_tests/requirements.txt -commands = py.test --basetemp="{envtmpdir}" --confcutdir=.. {posargs} python_utils _python_utils_tests +deps = + mypy + pyright + -r{toxinidir}/_python_utils_tests/requirements.txt +commands = + mypy + pyright + py.test --basetemp="{envtmpdir}" --confcutdir=.. {posargs} python_utils _python_utils_tests + +[testenv:ruff] +basepython = python3 +deps = ruff +commands = ruff check {toxinidir}/setup.py {toxinidir}/_python_utils_tests {toxinidir}/python_utils + +[testenv:black] +basepython = python3 +deps = black +commands = black --skip-string-normalization --line-length 79 {toxinidir}/setup.py {toxinidir}/_python_utils_tests {toxinidir}/python_utils + +[testenv:pyright] +basepython = python3 +deps = + pyright + -r{toxinidir}/_python_utils_tests/requirements.txt +commands = pyright {posargs} -[testenv:flake8] +[testenv:mypy] basepython = python3 -deps = flake8 -commands = flake8 python_utils {posargs} +deps = -r{toxinidir}/_python_utils_tests/requirements.txt +commands = mypy {posargs} [testenv:docs] +changedir = basepython = python3 +deps = -r{toxinidir}/docs/requirements.txt +allowlist_externals = + rm + mkdir whitelist_externals = rm cd @@ -35,10 +61,4 @@ commands = sphinx-apidoc -o docs/ python_utils rm -f docs/modules.rst sphinx-build -W -b html -d docs/_build/doctrees docs docs/_build/html {posargs} -deps = -r{toxinidir}/docs/requirements.txt - -[flake8] -ignore = W391, W504, E741 -exclude = - docs