diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..241e4cc --- /dev/null +++ b/.coveragerc @@ -0,0 +1,2 @@ +[run] +omit = stream/tests/* \ No newline at end of file diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..5e8b594 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @JimmyPettersson85 @xernobyl diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..2545a09 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,41 @@ +name: build +on: + push: + branches: + - 'main' + pull_request: + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref }} + cancel-in-progress: true + +jobs: + build: + name: ๐Ÿงช Test & lint + runs-on: ubuntu-latest + strategy: + max-parallel: 1 + matrix: + python: ['3.8', '3.9', '3.10', '3.11', '3.12'] + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 # gives the commit linter access to previous commits + + - uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python }} + + - name: Install deps with ${{ matrix.python }} + run: pip install -q ".[test, ci]" + + - name: Lint with ${{ matrix.python }} + if: ${{ matrix.python == '3.8' }} + run: make lint + + - name: Install, test and code coverage with ${{ matrix.python }} + env: + STREAM_KEY: ${{ secrets.STREAM_KEY }} + STREAM_SECRET: ${{ secrets.STREAM_SECRET }} + PYTHONPATH: ${{ github.workspace }} + run: make test diff --git a/.github/workflows/initiate_release.yml b/.github/workflows/initiate_release.yml new file mode 100644 index 0000000..0af41fd --- /dev/null +++ b/.github/workflows/initiate_release.yml @@ -0,0 +1,47 @@ +name: Create release PR + +on: + workflow_dispatch: + inputs: + version: + description: "The new version number with 'v' prefix. Example: v1.40.1" + required: true + +jobs: + init_release: + name: ๐Ÿš€ Create release PR + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 # gives the changelog generator access to all previous commits + + - name: Update CHANGELOG.md, __pkg__.py and push release branch + env: + VERSION: ${{ github.event.inputs.version }} + run: | + npx --yes standard-version@9.3.2 --release-as "$VERSION" --skip.tag --skip.commit --tag-prefix=v + git config --global user.name 'github-actions' + git config --global user.email 'release@getstream.io' + git checkout -q -b "release-$VERSION" + git commit -am "chore(release): $VERSION" + git push -q -u origin "release-$VERSION" + + - name: Get changelog diff + uses: actions/github-script@v5 + with: + script: | + const get_change_log_diff = require('./scripts/get_changelog_diff.js') + core.exportVariable('CHANGELOG', get_change_log_diff()) + + - name: Open pull request + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + gh pr create \ + -t "chore(release): ${{ github.event.inputs.version }}" \ + -b "# :rocket: ${{ github.event.inputs.version }} + Make sure to use squash & merge when merging! + Once this is merged, another job will kick off automatically and publish the package. + # :memo: Changelog + ${{ env.CHANGELOG }}" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..5edd544 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,48 @@ +name: Release + +on: + pull_request: + types: [closed] + branches: + - main + +jobs: + Release: + name: ๐Ÿš€ Release + if: github.event.pull_request.merged && startsWith(github.head_ref, 'release-') + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - uses: actions/github-script@v5 + with: + script: | + const get_change_log_diff = require('./scripts/get_changelog_diff.js') + core.exportVariable('CHANGELOG', get_change_log_diff()) + + // Getting the release version from the PR source branch + // Source branch looks like this: release-1.0.0 + const version = context.payload.pull_request.head.ref.split('-')[1] + core.exportVariable('VERSION', version) + + - uses: actions/setup-python@v3 + with: + python-version: "3.10" + + - name: Publish to PyPi + env: + TWINE_USERNAME: "__token__" + TWINE_PASSWORD: "${{ secrets.PYPI_TOKEN }}" + run: | + pip install -q twine==3.7.1 wheel==0.37.1 + python setup.py sdist bdist_wheel + twine upload --non-interactive dist/* + + - name: Create release on GitHub + uses: ncipollo/release-action@v1 + with: + body: ${{ env.CHANGELOG }} + tag: ${{ env.VERSION }} + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/reviewdog.yml b/.github/workflows/reviewdog.yml new file mode 100644 index 0000000..fc88763 --- /dev/null +++ b/.github/workflows/reviewdog.yml @@ -0,0 +1,30 @@ +name: reviewdog +on: + pull_request: + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref }} + cancel-in-progress: true + +jobs: + reviewdog: + name: ๐Ÿถ Reviewdog + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - uses: reviewdog/action-setup@v1 + with: + reviewdog_version: latest + + - uses: actions/setup-python@v3 + with: + python-version: "3.10" + + - name: Install deps + run: pip install ".[ci]" + + - name: Reviewdog + env: + REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: make reviewdog diff --git a/.gitignore b/.gitignore index 6cfbe24..615fdd5 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ var/ *.egg-info/ .installed.cfg *.egg +.eggs/ # Installer logs pip-log.txt @@ -52,7 +53,17 @@ coverage.xml # Sphinx documentation docs/_build/ + +.python-version secrets.*sh .idea +.vscode/ +.python-version .venv +.venv3.7 +.venv3.8 +.venv3.9 +.venv3.10 +.venv3.11 +.envrc diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 5b5d8ab..0000000 --- a/.travis.yml +++ /dev/null @@ -1,26 +0,0 @@ -language: python -python: - - 2.7 - - 3.4 - - 3.5 - - 3.6 - - 3.7 - - 3.8 - -matrix: - fast_finish: true - include: - - python: 3.7 - dist: xenial - -cache: pip - -install: - - pip install -r dev_requirements.txt -script: - - echo $STREAM_KEY - - py.test -lv --cov=./ -after_script: - - "pep8 --exclude=migrations --ignore=E501,E225,W293 stream" - - "python setup.py install" - - "codecov" diff --git a/.versionrc.js b/.versionrc.js new file mode 100644 index 0000000..6131ae6 --- /dev/null +++ b/.versionrc.js @@ -0,0 +1,16 @@ +const pkgUpdater = { + VERSION_REGEX: /__version__ = "(.+)"/, + + readVersion: function (contents) { + const version = this.VERSION_REGEX.exec(contents)[1]; + return version; + }, + + writeVersion: function (contents, version) { + return contents.replace(this.VERSION_REGEX.exec(contents)[0], `__version__ = "${version}"`); + } +} + +module.exports = { + bumpFiles: [{ filename: './stream/__init__.py', updater: pkgUpdater }], +} diff --git a/CHANGELOG b/CHANGELOG deleted file mode 100644 index fbc7a93..0000000 --- a/CHANGELOG +++ /dev/null @@ -1,275 +0,0 @@ -================ - Change history -================ - -===== -3.1.1 -===== -:release-date: 2019-11-07 -:by: Tommaso Barbuli - -Bump crypto deps - -===== -3.1.0 -===== -:release-date: 2018-05-24 -:by: Jelte Fennema - -Batch partial update - -===== -3.0.2 -===== -:release-date: 2018-05-24 -:by: Jelte Fennema - -Fixes for filtering by reactions by kind - - -====== -3.0.1 -====== -:release-date: 2018-12-04 -:by: Tommaso Barbugli - -Add short-hand version for collections.create_reference() - -====== -3.0.0 -====== -:release-date: 2018-12-03 -:by: Tommaso Barbugli - -Add support for reactions -Add support for users -Removed HTTP Signatures based auth -Use JWT auth for everything -Add feed.get enrichment params - -====== -2.12.0 -====== -:release-date: 2018-10-08 -:by: Peter van Kampen - -Add user-session-token support - -====== -2.11.0 -====== -:release-date: 2017-08-23 -:by: Tommaso Barbugli - -Add collection helpers to create refs - -====== -2.10.0 -====== -:release-date: 2017-07-30 -:by: Tommaso Barbugli - -Partial activity API endpoint - -====== -2.9.3 -====== -:release-date: 2017-07-20 -:by: Tommaso Barbugli - -Use Readme.md content as package long description - -====== -2.9.2 -====== -:release-date: 2017-07-20 -:by: Tommaso Barbugli - -Fixed deserialization problem with datetime objects with zeroed microseconds -Support newer versions of the pyJWT lib - - -====== -2.9.1 -====== -:release-date: 2017-07-18 -:by: Tommaso Barbugli - -Renamed client.get_activities' foreign_id_time param to foreign_id_times - - -====== -2.9.0 -====== -:release-date: 2017-07-05 -:by: Tommaso Barbugli - -Add support for get activity API endpoint - -====== -2.8.1 -====== -:release-date: 2017-12-21 -:by: Tommaso Barbugli - -Fixes a regression with embedded httpsig and Python 3 - -====== -2.8.0 -====== -:release-date: 2017-12-21 -:by: Tommaso Barbugli - -Fixes install issues on Windows - -* Bundle http-sig library -* Use pycryptodomex instead of the discontinued pycrypto library - -====== -2.7.0 -====== -:release-date: 2017-12-14 -:by: Aaron McMillin - -* All client methods that make requests will return the response - -2.6.2 -===== -:release-date 2017-12-08 -:by: Balazs - -Consolidate API URL generation across API, Collections and Personalization services - -2.6.0 -===== -:release-date 2017-12-08 -:by: Balazs - -Support the new collections endpoint and flexible get requests for personalization - -2.5.0 -====== -:release-date: 2017-10-19 -:by: Tommaso Barbugli - -* Use new .com domain for API and Analytics - -2.4.0 -====== -:release-date: 2017-08-31 -:by: Tommaso Barbugli - -* Added support for To target update endpoint - -2.3.11 -====== -:release-date: 2017-05-22 -:by: Ian Douglas - -* Added support for Python 2.6.9 and downgrade to requests 2.2.1 - - -2.3.9 -========== -:release-date: 2016-12-20 -:by: Jelte Fennema - -* Fix errors_from_fields function so it displays the extra data returned by the - server about InputException errors. - - -2.3.8 -===== -:release-date: 2016-06-09 -:by: Tommaso Barbugli - -* Add support for keep_history on unfollow - -2.3.7 -===== -:release-date: 2016-06-02 -:by: Tommaso Barbugli - -* Add HTTP Signature auth method (for application auth resources) -* Add support for follow_many batch operation -* Add support for add_to_many batch operation -* Decode JWT from bytes to UTF-8 -* Skip add_activities API call if activity_list is empty -* Fix feed group and id validation, dashes are now allowed - -2.3.5 -===== -:release-date: 2015-10-07 -:by: Thierry Schellenbach - -* Added support for activity update - - -2.3.3 -===== -:release-date: 2015-10-07 -:by: Thierry Schellenbach - -* Added support for creating redirect urls - - -2.3.0 -===== -:release-date: 2015-06-11 -:by: Tommaso Barbugli - -* Added support for read-only tokens - -2.1.4 -===== -:release-date: 2015-01-14 -:by: Tommaso Barbugli - -* Added support for extra data for follow actions - -2.1.3 -===== -:release-date: 2015-01-05 -:by: Thierry Schellenbach - -* Bugfix, mark_seen and mark_read now work - -2.1.0 -===== -:release-date: 2014-12-19 -:by: Thierry Schellenbach - -* Added location support to reduce latency - -2.0.1 -===== -:release-date: 2014-11-18 -:by: Thierry Schellenbach - -* Additional validation on feed_slug and user_id - -2.0.0 -===== -:release-date: 2014-11-10 -:by: Thierry Schellenbach - -* Breaking change: New style feed syntax, client.feed('user', '1') instead of client.feed('user:3') -* Breaking change: New style follow syntax, feed.follow('user', 3) -* API versioning support -* Configurable timeouts -* Python 3 support - - -1.1.1 -===== -:release-date: 2014-09-20 08:00 A.M GMT -:by: Tommaso Barbugli - -* Add HTTP client retries - -1.1.0 -===== -:release-date: 2014-09-08 08:00 A.M GMT -:by: Tommaso Barbugli - -* Add support for mark read (notifications feeds) diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..ca69d7c --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,227 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [5.4.0](https://github.com/GetStream/stream-python/compare/v5.3.1...v5.4.0) (2025-09-30) + +### [5.3.1](https://github.com/GetStream/stream-python/compare/v5.2.1...v5.3.1) (2023-10-25) + +### [5.2.1](https://github.com/GetStream/stream-python/compare/v5.2.0...v5.2.1) (2023-02-27) + +## [5.2.0](https://github.com/GetStream/stream-python/compare/v5.1.1...v5.2.0) (2023-02-16) + + +### Features + +* add support for 3.11 ([2eae7d7](https://github.com/GetStream/stream-python/commit/2eae7d7958f3b869982701188fc0d04a5b8ab021)) +* added async support ([b4515d3](https://github.com/GetStream/stream-python/commit/b4515d337be88ff50ba1cbad8645b1fbc8862ce0)) + + +### Bug Fixes + +* tests and linting ([cfacbbc](https://github.com/GetStream/stream-python/commit/cfacbbcadf45ca91d3e6c2a310dfd6fea1a03146)) +* redirect, uniqueness and deprecations ([aefdcd3](https://github.com/GetStream/stream-python/commit/aefdcd39ff8a41a443455f1a41cc819039015cdb)) + +## 5.1.1 - 2022-01-18 + +* Handle backward compatible pyjwt 1.x support for token generation + +## 5.1.0 - 2021-04-16 + +* Add analytics support for `track_engagements` and `track_impressions` +* Update license to BSD-3 canonical description + +## 5.0.1 - 2021-01-22 + +* Bump pyjwt to 2.x + +## 5.0.0 - 2020-09-17 + +* Drop python 3.5 and add 3.9 +* Improve install and CI + +## 4.0.0 - 2020-09-02 + +* Drop old create_user_session_token in favor create_user_token +* Drop python support before 3.4 +* Allow custom data in client.create_jwt_token +* Add kind filter for reactions in enrichment +* Add follow stat support +* Move to github actions from travis and improve static analysis +* Update readme for old docs +* Update some crypto dependencies + +## 3.5.1 - 2020-06-08 + +* Handle warning in JWT decode regarding missing algorithm + +## 3.5.0 - 2020-06-08 + +* Add enrichment support to direct activity get + +## 3.4.0 - 2020-05-11 + +* Expose target_feeds_extra_data to add extra data to activities from reactions + +## 3.3.0 - 2020-05-04 + +* Add batch unfollow support + +## 3.2.1 - 2020-03-17 + +* Set timezone as utc in serialization hooks + +## 3.2.0 - 2020-03-17 + +* Add open graph scrape support +* Update python support (drop 2.6, add 3.8) +* Fixes in docs for collections and personalization + +## 3.1.1 - 2019-11-07 + +* Bump crypto deps + +## 3.1.0 - 2018-05-24 + +* Batch partial update + +## 3.0.2 - 2018-05-24 + +* Fixes for filtering by reactions by kind + +## 3.0.1 - 2018-12-04 + +* Add short-hand version for collections.create_reference() + +## 3.0.0 - 2018-12-03 + +* Add support for reactions +* Add support for users +* Removed HTTP Signatures based auth +* Use JWT auth for everything +* Add feed.get enrichment params + +## 2.12.0 - 2018-10-08 + +* Add user-session-token support + +## 2.11.0 - 2017-08-23 + +* Add collection helpers to create refs + +## 2.10.0 - 2017-07-30 + +* Partial activity API endpoint + +## 2.9.3 - 2017-07-20 + +* Use Readme.md content as package long description + +## 2.9.2 - 2017-07-20 + +* Fixed deserialization problem with datetime objects with zeroed microseconds +* Support newer versions of the pyJWT lib + +## 2.9.1 - 2017-07-18 + +Renamed client.get_activities' foreign_id_time param to foreign_id_times + +## 2.9.0 - 2017-07-05 + +* Add support for get activity API endpoint + +## 2.8.1 - 2017-12-21 + +* Fixes a regression with embedded httpsig and Python 3 + +## 2.8.0 - 2017-12-21 + +* Fixes install issues on Windows +* Bundle http-sig library +* Use pycryptodomex instead of the discontinued pycrypto library + +## 2.7.0 - 2017-12-14 + +* All client methods that make requests will return the response + +## 2.6.2 - 2017-12-08 + +* Consolidate API URL generation across API, Collections and Personalization services + +## 2.6.0 - 2017-12-08 + +Support the new collections endpoint and flexible get requests for personalization + +## 2.5.0 - 2017-10-19 + +* Use new .com domain for API and Analytics + +## 2.4.0 - 2017-08-31 + +* Added support for To target update endpoint + +## 2.3.11 - 2017-05-22 + +* Added support for Python 2.6.9 and downgrade to requests 2.2.1 + +## 2.3.9 - 2016-12-20 + +* Fix errors_from_fields function so it displays the extra data returned by the + server about InputException errors. + +## 2.3.8 - 2016-06-09 + +* Add support for keep_history on unfollow + +## 2.3.7 - 2016-06-02 + +* Add HTTP Signature auth method (for application auth resources) +* Add support for follow_many batch operation +* Add support for add_to_many batch operation +* Decode JWT from bytes to UTF-8 +* Skip add_activities API call if activity_list is empty +* Fix feed group and id validation, dashes are now allowed + +## 2.3.5 - 2015-10-07 + +* Added support for activity update + +## 2.3.3 - 2015-10-07 + +* Added support for creating redirect urls + +## 2.3.0 - 2015-06-11 + +* Added support for read-only tokens + +## 2.1.4 - 2015-01-14 + +* Added support for extra data for follow actions + +## 2.1.3 - 2015-01-05 + +* Bugfix, mark_seen and mark_read now work + +## 2.1.0 - 2014-12-19 + +* Added location support to reduce latency + +## 2.0.1 - 2014-11-18 + +* Additional validation on feed_slug and user_id + +## 2.0.0 - 2014-11-10 + +* Breaking change: New style feed syntax, client.feed('user', '1') instead of client.feed('user:3') +* Breaking change: New style follow syntax, feed.follow('user', 3) +* API versioning support +* Configurable timeouts +* Python 3 support + +## 1.1.1 - 2014-09-20 + +* Add HTTP client retries + +## 1.1.0 -2014-09-08 + +* Add support for mark read (notifications feeds) diff --git a/LICENSE b/LICENSE index 73bc810..e97bd1f 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2014-2017 Stream.io Inc, and individual contributors. +Copyright (c) 2014-2021, Stream.io Inc, and individual contributors. All rights reserved. @@ -25,24 +25,3 @@ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSE THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - httpsig - - https://github.com/ahknight/httpsig - - Copyright (c) 2014 Adam Knight - Copyright (c) 2012 Adam T. Lindsay (original author) - - Permission is hereby granted, free of charge, to any person obtaining a copy of this - software and associated documentation files (the "Software"), to deal in the Software without - restriction, including without limitation the rights to use, copy, modify, merge, publish, - distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the - Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or - substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING - BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, - DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..276b1f0 --- /dev/null +++ b/Makefile @@ -0,0 +1,26 @@ +STREAM_KEY ?= NOT_EXIST +STREAM_SECRET ?= NOT_EXIST + +# These targets are not files +.PHONY: help check test lint lint-fix + +help: ## Display this help message + @echo "Please use \`make \` where is one of" + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; \ + {printf "\033[36m%-40s\033[0m %s\n", $$1, $$2}' + +lint: ## Run linters + black --check stream + flake8 --ignore=E501,E225,W293,W503,F401 stream + +lint-fix: + black stream + +test: ## Run tests + STREAM_KEY=$(STREAM_KEY) STREAM_SECRET=$(STREAM_SECRET) pytest stream/tests + +check: lint test ## Run linters + tests + +reviewdog: + black --check --diff --quiet stream | reviewdog -f=diff -f.diff.strip=0 -filter-mode="diff_context" -name=black -reporter=github-pr-review + flake8 --ignore=E501,W503,E225,W293,F401 stream | reviewdog -f=flake8 -name=flake8 -reporter=github-pr-review diff --git a/README.md b/README.md index 52d69bb..2b986e1 100644 --- a/README.md +++ b/README.md @@ -1,44 +1,60 @@ -stream-python -============= +# Official Python SDK for [Stream Feeds](https://getstream.io/activity-feeds/) -[![Build Status](https://travis-ci.org/GetStream/stream-python.svg?branch=master)](https://travis-ci.org/GetStream/stream-python) [![codecov](https://codecov.io/gh/GetStream/stream-python/branch/master/graph/badge.svg)](https://codecov.io/gh/GetStream/stream-python) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) +[![build](https://github.com/GetStream/stream-python/workflows/build/badge.svg)](https://github.com/GetStream/stream-python/actions) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/stream-python.svg) -[stream-python](https://github.com/GetStream/stream-python) is the official Python client for [Stream](https://getstream.io/), a web service for building scalable newsfeeds and activity streams. +

+ +

+

+ Official Python API client for Stream Feeds, a web service for building scalable newsfeeds and activity streams. +
+ Explore the docs ยป +
+
+ Django Code Sample + ยท + Report Bug + ยท + Request Feature +

-Note there is also a higher level [Django - Stream integration](https://github.com/getstream/stream-django) library which hooks into the Django ORM. +## ๐Ÿ“ About Stream -You can sign up for a Stream account at https://getstream.io/get_started. +> ๐Ÿ’ก Note: this is a library for the **Feeds** product. The Chat SDKs can be found [here](https://getstream.io/chat/docs/). -### Installation +You can sign up for a Stream account at our [Get Started](https://getstream.io/get_started/) page. -stream-python supports: +You can use this library to access feeds API endpoints server-side. -- Python (2.6, 2.7, 3.4, 3.5, 3.6, 3.7) +For the client-side integrations (web and mobile) have a look at the JavaScript, iOS and Android SDK libraries ([docs](https://getstream.io/activity-feeds/)). + +> ๐Ÿ’ก We have a Django integration available [here](https://github.com/GetStream/stream-django). + +## โš™๏ธ Installation -#### Install from Pypi ```bash -pip install stream-python +$ pip install stream-python ``` -### Full documentation +## ๐Ÿ“š Full documentation -Documentation for this Python client are available at the [Stream website](https://getstream.io/docs/?language=python) or on [Read the Docs](http://stream-python.readthedocs.org/en/latest/). +Documentation for this Python client are available at the [Stream website](https://getstream.io/docs/?language=python). -### Usage +## โœจ Getting started ```python import datetime -# Instantiate a new client +# Create a new client import stream client = stream.connect('YOUR_API_KEY', 'API_KEY_SECRET') -# INstantiate a new client specifying datacenter location +# Create a new client specifying data center location client = stream.connect('YOUR_API_KEY', 'API_KEY_SECRET', location='us-east') # Find your API keys here https://getstream.io/dashboard/ -# Instantiate a feed object +# Create a feed object user_feed_1 = client.feed('user', '1') # Get activities from 5 to 10 (slow pagination) @@ -104,6 +120,9 @@ client.get_activities(foreign_id_times=[ (foreign_id, activity_time), ]) +# Enrich while getting activities +client.get_activities(ids=[activity_id], enrich=True, reactions={"counts": True}) + # Update some parts of an activity with activity_partial_update set = { 'product.name': 'boots', @@ -148,44 +167,141 @@ events = [impression, engagement] redirect_url = client.create_redirect_url('http://google.com/', 'user_id', events) ``` -[JS client](http://github.com/getstream/stream-js). +### Async code usage +```python +import datetime +import stream +client = stream.connect('YOUR_API_KEY', 'API_KEY_SECRET', use_async=True) -### Contributing -First, make sure you can run the test suite. Tests are run via py.test +# Create a new client specifying data center location +client = stream.connect('YOUR_API_KEY', 'API_KEY_SECRET', location='us-east', use_async=True) +# Find your API keys here https://getstream.io/dashboard/ -```bash -py.test -# with coverage -py.test --cov stream --cov-report html -# against a local API backend -LOCAL=true py.test -``` +# Create a feed object +user_feed_1 = client.feed('user', '1') + +# Get activities from 5 to 10 (slow pagination) +result = await user_feed_1.get(limit=5, offset=5) +# (Recommended & faster) Filter on an id less than the given UUID +result = await user_feed_1.get(limit=5, id_lt="e561de8f-00f1-11e4-b400-0cc47a024be0") -Install black and flake8 +# Create a new activity +activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1, 'foreign_id': 'tweet:1'} +activity_response = await user_feed_1.add_activity(activity_data) +# Create a bit more complex activity +activity_data = {'actor': 1, 'verb': 'run', 'object': 1, 'foreign_id': 'run:1', + 'course': {'name': 'Golden Gate park', 'distance': 10}, + 'participants': ['Thierry', 'Tommaso'], + 'started_at': datetime.datetime.now() +} +await user_feed_1.add_activity(activity_data) -``` -pip install black -pip install flake8 -``` +# Remove an activity by its id +await user_feed_1.remove_activity("e561de8f-00f1-11e4-b400-0cc47a024be0") +# or by foreign id +await user_feed_1.remove_activity(foreign_id='tweet:1') + +# Follow another feed +await user_feed_1.follow('flat', '42') + +# Stop following another feed +await user_feed_1.unfollow('flat', '42') + +# List followers/following +following = await user_feed_1.following(offset=0, limit=2) +followers = await user_feed_1.followers(offset=0, limit=10) + +# Creates many follow relationships in one request +follows = [ + {'source': 'flat:1', 'target': 'user:1'}, + {'source': 'flat:1', 'target': 'user:2'}, + {'source': 'flat:1', 'target': 'user:3'} +] +await client.follow_many(follows) + +# Batch adding activities +activities = [ + {'actor': 1, 'verb': 'tweet', 'object': 1}, + {'actor': 2, 'verb': 'watch', 'object': 3} +] +await user_feed_1.add_activities(activities) + +# Add an activity and push it to other feeds too using the `to` field +activity = { + "actor":"1", + "verb":"like", + "object":"3", + "to":["user:44", "user:45"] +} +await user_feed_1.add_activity(activity) + +# Retrieve an activity by its ID +await client.get_activities(ids=[activity_id]) + +# Retrieve an activity by the combination of foreign_id and time +await client.get_activities(foreign_id_times=[ + (foreign_id, activity_time), +]) + +# Enrich while getting activities +await client.get_activities(ids=[activity_id], enrich=True, reactions={"counts": True}) + +# Update some parts of an activity with activity_partial_update +set = { + 'product.name': 'boots', + 'colors': { + 'red': '0xFF0000', + 'green': '0x00FF00' + } +} +unset = [ 'popularity', 'details.info' ] +# ...by ID +await client.activity_partial_update(id=activity_id, set=set, unset=unset) +# ...or by combination of foreign_id and time +await client.activity_partial_update(foreign_id=foreign_id, time=activity_time, set=set, unset=unset) -Install git hooks to avoid pushing invalid code (git commit will run black and flak8) +# Generating user token for client side usage (JS client) +user_token = client.create_user_token("user-42") -### Releasing a new version +# Javascript client side feed initialization +# client = stream.connect(apiKey, userToken, appId); -In order to release new version you need to be a maintainer on Pypi. +# Generate a redirect url for the Stream Analytics platform to track +# events/impressions on url clicks +impression = { + 'content_list': ['tweet:1', 'tweet:2', 'tweet:3'], + 'user_data': 'tommaso', + 'location': 'email', + 'feed_id': 'user:global' +} + +engagement = { + 'content': 'tweet:2', + 'label': 'click', + 'position': 1, + 'user_data': 'tommaso', + 'location': 'email', + 'feed_id': + 'user:global' +} + +events = [impression, engagement] + +redirect_url = client.create_redirect_url('http://google.com/', 'user_id', events) + +``` + +[JS client](http://github.com/getstream/stream-js). -- Update CHANGELOG -- Update the version on setup.py -- Commit and push to Github -- Create a new tag for the version (eg. `v2.9.0`) -- Create a new dist with python `python setup.py sdist` -- Upload the new distributable with wine `twine upload dist/stream-python-VERSION-NAME.tar.gz` +## โœ๏ธ Contributing +======= -If unsure you can also test using the Pypi test servers `twine upload --repository-url https://test.pypi.org/legacy/ dist/stream-python-VERSION-NAME.tar.gz` +We welcome code changes that improve this library or fix a problem, please make sure to follow all best practices and add tests if applicable before submitting a Pull Request on Github. We are very happy to merge your code in the official repository. Make sure to sign our [Contributor License Agreement (CLA)](https://docs.google.com/forms/d/e/1FAIpQLScFKsKkAJI7mhCr7K9rEIOpqIDThrWxuvxnwUq2XkHyG154vQ/viewform) first. See our [license file](./LICENSE) for more details. -### Copyright and License Information +## ๐Ÿง‘โ€๐Ÿ’ป We are hiring! -Copyright (c) 2014-2017 Stream.io Inc, and individual contributors. All rights reserved. +We've recently closed a [$38 million Series B funding round](https://techcrunch.com/2021/03/04/stream-raises-38m-as-its-chat-and-activity-feed-apis-power-communications-for-1b-users/) and we keep actively growing. +Our APIs are used by more than a billion end-users, and you'll have a chance to make a huge impact on the product within a team of the strongest engineers all over the world. -See the file "LICENSE" for information on the history of this software, terms & conditions for usage, and a DISCLAIMER OF ALL WARRANTIES. +Check out our current openings and apply via [Stream's website](https://getstream.io/team/#jobs). diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..4094801 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,16 @@ +# Reporting a Vulnerability +At Stream we are committed to the security of our Software. We appreciate your efforts in disclosing vulnerabilities responsibly and we will make every effort to acknowledge your contributions. + +Report security vulnerabilities at the following email address: +``` +[security@getstream.io](mailto:security@getstream.io) +``` +Alternatively it is also possible to open a new issue in the affected repository, tagging it with the `security` tag. + +A team member will acknowledge the vulnerability and will follow-up with more detailed information. A representative of the security team will be in touch if more information is needed. + +# Information to include in a report +While we appreciate any information that you are willing to provide, please make sure to include the following: +* Which repository is affected +* Which branch, if relevant +* Be as descriptive as possible, the team will replicate the vulnerability before working on a fix. diff --git a/assets/logo.svg b/assets/logo.svg new file mode 100644 index 0000000..1c68c5c --- /dev/null +++ b/assets/logo.svg @@ -0,0 +1,16 @@ + + + + STREAM MARK + Created with Sketch. + + + + + + + + + + + \ No newline at end of file diff --git a/dev_requirements.txt b/dev_requirements.txt deleted file mode 100644 index 551d254..0000000 --- a/dev_requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -pytest==3.2.5 -codecov==2.0.15 -unittest2==1.1.0 -pytest-cov==2.5.1 -python-dateutil --e . diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index c34f348..0000000 --- a/docs/Makefile +++ /dev/null @@ -1,177 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = _build - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/stream-python.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/stream-python.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/stream-python" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/stream-python" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." - -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100644 index 74f0734..0000000 --- a/docs/conf.py +++ /dev/null @@ -1,340 +0,0 @@ -# -*- coding: utf-8 -*- -# -# stream-python documentation build configuration file, created by -# sphinx-quickstart on Tue May 27 16:29:21 2014. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os - -# on_rtd is whether we are on readthedocs.org -import os -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' - -if not on_rtd: # only import and set the theme if we're building docs locally - import sphinx_rtd_theme - html_theme = 'sphinx_rtd_theme' - html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.viewcode', -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'stream-python' -copyright = u'2014, Stream.io, Inc' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = '0.1.0' -# The full version, including alpha/beta/rc tags. -release = '0.1.0' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -#html_theme = 'default' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -#html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'stream-pythondoc' - - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - #'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ('index', 'stream-python.tex', u'stream-python Documentation', - u'Thierry Schellenbach', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'stream-python', u'stream-python Documentation', - [u'Thierry Schellenbach'], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'stream-python', u'stream-python Documentation', - u'Thierry Schellenbach', 'stream-python', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False - - -# -- Options for Epub output ---------------------------------------------- - -# Bibliographic Dublin Core info. -epub_title = u'stream-python' -epub_author = u'Thierry Schellenbach' -epub_publisher = u'Thierry Schellenbach' -epub_copyright = u'2014, Stream.io, Inc' - -# The basename for the epub file. It defaults to the project name. -#epub_basename = u'stream-python' - -# The HTML theme for the epub output. Since the default themes are not optimized -# for small screen space, using the same theme for HTML and epub output is -# usually not wise. This defaults to 'epub', a theme designed to save visual -# space. -#epub_theme = 'epub' - -# The language of the text. It defaults to the language option -# or en if the language is not set. -#epub_language = '' - -# The scheme of the identifier. Typical schemes are ISBN or URL. -#epub_scheme = '' - -# The unique identifier of the text. This can be a ISBN number -# or the project homepage. -#epub_identifier = '' - -# A unique identification for the text. -#epub_uid = '' - -# A tuple containing the cover image and cover page html template filenames. -#epub_cover = () - -# A sequence of (type, uri, title) tuples for the guide element of content.opf. -#epub_guide = () - -# HTML files that should be inserted before the pages created by sphinx. -# The format is a list of tuples containing the path and title. -#epub_pre_files = [] - -# HTML files shat should be inserted after the pages created by sphinx. -# The format is a list of tuples containing the path and title. -#epub_post_files = [] - -# A list of files that should not be packed into the epub file. -epub_exclude_files = ['search.html'] - -# The depth of the table of contents in toc.ncx. -#epub_tocdepth = 3 - -# Allow duplicate toc entries. -#epub_tocdup = True - -# Choose between 'default' and 'includehidden'. -#epub_tocscope = 'default' - -# Fix unsupported image types using the PIL. -#epub_fix_images = False - -# Scale large images. -#epub_max_image_width = 0 - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#epub_show_urls = 'inline' - -# If false, no index is generated. -#epub_use_index = True diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index 6c15351..0000000 --- a/docs/index.rst +++ /dev/null @@ -1,23 +0,0 @@ -.. stream-python documentation master file, created by - sphinx-quickstart on Tue May 27 16:29:21 2014. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to stream-python's documentation! -========================================= - -Contents: - -.. toctree:: - :maxdepth: 3 - - stream - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - diff --git a/docs/make.bat b/docs/make.bat deleted file mode 100644 index 541fca8..0000000 --- a/docs/make.bat +++ /dev/null @@ -1,242 +0,0 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set BUILDDIR=_build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . -set I18NSPHINXOPTS=%SPHINXOPTS% . -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% - set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. singlehtml to make a single large HTML file - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. devhelp to make HTML files and a Devhelp project - echo. epub to make an epub - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. text to make text files - echo. man to make manual pages - echo. texinfo to make Texinfo files - echo. gettext to make PO message catalogs - echo. changes to make an overview over all changed/added/deprecated items - echo. xml to make Docutils-native XML files - echo. pseudoxml to make pseudoxml-XML files for display purposes - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - - -%SPHINXBUILD% 2> nul -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "singlehtml" ( - %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\stream-python.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\stream-python.ghc - goto end -) - -if "%1" == "devhelp" ( - %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. - goto end -) - -if "%1" == "epub" ( - %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub file is in %BUILDDIR%/epub. - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdf" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf - cd %BUILDDIR%/.. - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdfja" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf-ja - cd %BUILDDIR%/.. - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "text" ( - %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The text files are in %BUILDDIR%/text. - goto end -) - -if "%1" == "man" ( - %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The manual pages are in %BUILDDIR%/man. - goto end -) - -if "%1" == "texinfo" ( - %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. - goto end -) - -if "%1" == "gettext" ( - %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The message catalogs are in %BUILDDIR%/locale. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - if errorlevel 1 exit /b 1 - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - if errorlevel 1 exit /b 1 - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - if errorlevel 1 exit /b 1 - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) - -if "%1" == "xml" ( - %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The XML files are in %BUILDDIR%/xml. - goto end -) - -if "%1" == "pseudoxml" ( - %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. - goto end -) - -:end diff --git a/docs/stream.rst b/docs/stream.rst deleted file mode 100644 index 5779489..0000000 --- a/docs/stream.rst +++ /dev/null @@ -1,62 +0,0 @@ -stream package -============== - -Submodules ----------- - -stream.client module --------------------- - -.. automodule:: stream.client - :members: - :undoc-members: - :show-inheritance: - -stream.exceptions module ------------------------- - -.. automodule:: stream.exceptions - :members: - :undoc-members: - :show-inheritance: - -stream.feed module ------------------- - -.. automodule:: stream.feed - :members: - :undoc-members: - :show-inheritance: - -stream.signing module ---------------------- - -.. automodule:: stream.signing - :members: - :undoc-members: - :show-inheritance: - -stream.tests module -------------------- - -.. automodule:: stream.tests - :members: - :undoc-members: - :show-inheritance: - -stream.utils module -------------------- - -.. automodule:: stream.utils - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: stream - :members: - :undoc-members: - :show-inheritance: diff --git a/dotgit/hooks/pre-commit-format.sh b/dotgit/hooks/pre-commit-format.sh index 63259bf..bf0d444 100755 --- a/dotgit/hooks/pre-commit-format.sh +++ b/dotgit/hooks/pre-commit-format.sh @@ -2,12 +2,17 @@ set -e -if ! black . --check -q; then - black . +if ! black stream --check -q; then + black stream + echo echo "some files were not formatted correctly (black) commit aborted!" echo "your changes are still staged, you can accept formatting changes with git add or ignore them by adding --no-verify to git commit" exit 1 fi -flake8 - +if ! flake8 --ignore=E501,E225,W293,W503,F401 stream; then + echo + echo "commit is aborted because there are some error prone issues in your changes as printed above" + echo "your changes are still staged, you can accept formatting changes with git add or ignore them by adding --no-verify to git commit" + exit 1 +fi diff --git a/pyproject.toml b/pyproject.toml index edf85a6..4891814 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.black] line-length = 88 -py36 = true +target-version = ['py38'] include = '\.pyi?$' exclude = ''' /( diff --git a/scripts/get_changelog_diff.js b/scripts/get_changelog_diff.js new file mode 100644 index 0000000..ce03438 --- /dev/null +++ b/scripts/get_changelog_diff.js @@ -0,0 +1,26 @@ +/* +Here we're trying to parse the latest changes from CHANGELOG.md file. +The changelog looks like this: + +## 0.0.3 +- Something #3 +## 0.0.2 +- Something #2 +## 0.0.1 +- Something #1 + +In this case we're trying to extract "- Something #3" since that's the latest change. +*/ +module.exports = () => { + const fs = require('fs') + + changelog = fs.readFileSync('CHANGELOG.md', 'utf8') + releases = changelog.match(/## [?[0-9](.+)/g) + + current_release = changelog.indexOf(releases[0]) + previous_release = changelog.indexOf(releases[1]) + + latest_changes = changelog.substr(current_release, previous_release - current_release) + + return latest_changes +} diff --git a/setup.py b/setup.py index ae6d018..49b41fb 100644 --- a/setup.py +++ b/setup.py @@ -2,44 +2,19 @@ from setuptools import setup, find_packages -from setuptools.command.test import test as TestCommand from stream import __version__, __maintainer__, __email__, __license__ -import sys -unit = "unittest2py3k" if sys.version_info > (3, 0, 0) else "unittest2" -tests_require = [unit, "pytest==3.2.5", "unittest2", "pytest-cov", "python-dateutil"] +install_requires = [ + "requests>=2.31.0,<3", + "pyjwt>=2.8.0,<3", + "pytz>=2023.3.post1", + "aiohttp>=3.9.0b0", +] +tests_require = ["pytest", "pytest-cov", "python-dateutil", "pytest-asyncio"] +ci_require = ["black", "flake8", "pytest-cov"] long_description = open("README.md", "r").read() -requests = "requests>=2.3.0,<3" - -if sys.version_info < (2, 7, 9): - requests = "requests[security]>=2.4.1,<3" - -install_requires = ["pycryptodomex>=3.4.7,<4", requests, "six>=1.8.0"] - -if sys.version_info < (2, 7, 0): - install_requires.append("pyOpenSSL<18.0.0") - install_requires.append("pyjwt>=1.3.0,<1.6.0") - install_requires.append("pycparser<2.19") -else: - install_requires.append("pyjwt>=1.3.0,<1.8.0") - - -class PyTest(TestCommand): - def finalize_options(self): - TestCommand.finalize_options(self) - self.test_args = [] - self.test_suite = True - - def run_tests(self): - # import here, cause outside the eggs aren't loaded - import pytest - - errno = pytest.main("-v --cov=./") - sys.exit(errno) - - setup( name="stream-python", version=__version__, @@ -49,14 +24,21 @@ def run_tests(self): description="Client for getstream.io. Build scalable newsfeeds & activity streams in a few hours instead of weeks.", long_description=long_description, long_description_content_type="text/markdown", + project_urls={ + "Bug Tracker": "https://github.com/GetStream/stream-python/issues", + "Documentation": "https://getstream.io/activity-feeds/docs/python/?language=python", + "Release Notes": "https://github.com/GetStream/stream-python/releases/tag/v{}".format( + __version__ + ), + }, license=__license__, - packages=find_packages(), + packages=find_packages(exclude=["*tests*"]), zip_safe=False, install_requires=install_requires, - extras_require={"test": tests_require}, - cmdclass={"test": PyTest}, + extras_require={"test": tests_require, "ci": ci_require}, tests_require=tests_require, include_package_data=True, + python_requires=">=3.7", classifiers=[ "Intended Audience :: Developers", "Intended Audience :: System Administrators", @@ -65,13 +47,12 @@ def run_tests(self): "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: BSD License", "Natural Language :: English", - "Programming Language :: Python :: 2.6", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Topic :: Software Development :: Libraries :: Python Modules", ], ) diff --git a/stream/__init__.py b/stream/__init__.py index ea7bf9b..c4fbba8 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -1,11 +1,11 @@ -import re import os +import re __author__ = "Thierry Schellenbach" -__copyright__ = "Copyright 2014, Stream.io, Inc" +__copyright__ = "Copyright 2022, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "3.1.1" +__version__ = "5.4.0" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" @@ -19,6 +19,7 @@ def connect( timeout=3.0, location=None, base_url=None, + use_async=False, ): """ Returns a Client object @@ -26,8 +27,12 @@ def connect( :param api_key: your api key or heroku url :param api_secret: the api secret :param app_id: the app id (used for listening to feed changes) + :param use_async: flag to set AsyncClient """ - from stream.client import StreamClient + from stream.client import AsyncStreamClient, StreamClient + + if location is None: + location = os.environ.get("STREAM_REGION") stream_url = os.environ.get("STREAM_URL") # support for the heroku STREAM_URL syntax @@ -42,6 +47,17 @@ def connect( else: raise ValueError("Invalid api key or heroku url") + if use_async: + return AsyncStreamClient( + api_key, + api_secret, + app_id, + version, + timeout, + location=location, + base_url=base_url, + ) + return StreamClient( api_key, api_secret, diff --git a/stream/client/__init__.py b/stream/client/__init__.py new file mode 100644 index 0000000..5d8511e --- /dev/null +++ b/stream/client/__init__.py @@ -0,0 +1,2 @@ +from .async_client import AsyncStreamClient +from .client import StreamClient diff --git a/stream/client/async_client.py b/stream/client/async_client.py new file mode 100644 index 0000000..02eacc4 --- /dev/null +++ b/stream/client/async_client.py @@ -0,0 +1,275 @@ +import logging + +import aiohttp +from aiohttp import ClientConnectionError + +from stream import serializer +from stream.client.base import BaseStreamClient +from stream.collections import AsyncCollections +from stream.feed.feeds import AsyncFeed +from stream.personalization import AsyncPersonalization +from stream.reactions import AsyncReactions +from stream.serializer import _datetime_encoder +from stream.users import AsyncUsers +from stream.utils import ( + get_reaction_params, + validate_feed_slug, + validate_foreign_id_time, + validate_user_id, +) + +logger = logging.getLogger(__name__) + + +class AsyncStreamClient(BaseStreamClient): + def __init__( + self, + api_key, + api_secret, + app_id, + version="v1.0", + timeout=6.0, + base_url=None, + location=None, + ): + super().__init__( + api_key, + api_secret, + app_id, + version=version, + timeout=timeout, + base_url=base_url, + location=location, + ) + token = self.create_jwt_token("collections", "*", feed_id="*", user_id="*") + self.collections = AsyncCollections(self, token) + + token = self.create_jwt_token("personalization", "*", feed_id="*", user_id="*") + self.personalization = AsyncPersonalization(self, token) + + token = self.create_jwt_token("reactions", "*", feed_id="*") + self.reactions = AsyncReactions(self, token) + + token = self.create_jwt_token("users", "*", feed_id="*") + self.users = AsyncUsers(self, token) + + def feed(self, feed_slug, user_id): + feed_slug = validate_feed_slug(feed_slug) + user_id = validate_user_id(user_id) + token = self.create_jwt_token("feed", "*", feed_id="*") + return AsyncFeed(self, feed_slug, user_id, token) + + async def put(self, *args, **kwargs): + return await self._make_request("PUT", *args, **kwargs) + + async def post(self, *args, **kwargs): + return await self._make_request("POST", *args, **kwargs) + + async def get(self, *args, **kwargs): + return await self._make_request("GET", *args, **kwargs) + + async def delete(self, *args, **kwargs): + return await self._make_request("DELETE", *args, **kwargs) + + async def add_to_many(self, activity, feeds): + data = {"activity": activity, "feeds": feeds} + token = self.create_jwt_token("feed", "*", feed_id="*") + return await self.post("feed/add_to_many/", token, data=data) + + async def follow_many(self, follows, activity_copy_limit=None): + params = None + + if activity_copy_limit is not None: + params = dict(activity_copy_limit=activity_copy_limit) + token = self.create_jwt_token("follower", "*", feed_id="*") + return await self.post("follow_many/", token, params=params, data=follows) + + async def unfollow_many(self, unfollows): + params = None + + token = self.create_jwt_token("follower", "*", feed_id="*") + return await self.post("unfollow_many/", token, params=params, data=unfollows) + + async def update_activities(self, activities): + if not isinstance(activities, (list, tuple, set)): + raise TypeError("Activities parameter should be of type list") + + auth_token = self.create_jwt_token("activities", "*", feed_id="*") + data = dict(activities=activities) + return await self.post("activities/", auth_token, data=data) + + async def update_activity(self, activity): + return await self.update_activities([activity]) + + async def get_activities( + self, ids=None, foreign_id_times=None, enrich=False, reactions=None, **params + ): + auth_token = self.create_jwt_token("activities", "*", feed_id="*") + + if ids is None and foreign_id_times is None: + raise TypeError( + "One the parameters ids or foreign_id_time must be provided and not None" + ) + + if ids is not None and foreign_id_times is not None: + raise TypeError( + "At most one of the parameters ids or foreign_id_time must be provided" + ) + + endpoint = "activities/" + if enrich or reactions is not None: + endpoint = "enrich/" + endpoint + + query_params = {**params} + + if ids is not None: + query_params["ids"] = ",".join(ids) + + if foreign_id_times is not None: + validate_foreign_id_time(foreign_id_times) + foreign_ids, timestamps = zip(*foreign_id_times) + timestamps = map(_datetime_encoder, timestamps) + query_params["foreign_ids"] = ",".join(foreign_ids) + query_params["timestamps"] = ",".join(timestamps) + + query_params.update(get_reaction_params(reactions)) + + return await self.get(endpoint, auth_token, params=query_params) + + async def activity_partial_update( + self, id=None, foreign_id=None, time=None, set=None, unset=None + ): + if id is None and (foreign_id is None or time is None): + raise TypeError( + "The id or foreign_id+time parameters must be provided and not be None" + ) + if id is not None and (foreign_id is not None or time is not None): + raise TypeError( + "Only one of the id or the foreign_id+time parameters can be provided" + ) + + data = {"set": set or {}, "unset": unset or []} + + if id is not None: + data["id"] = id + else: + data["foreign_id"] = foreign_id + data["time"] = time + + return await self.activities_partial_update(updates=[data]) + + async def activities_partial_update(self, updates=None): + auth_token = self.create_jwt_token("activities", "*", feed_id="*") + + data = {"changes": updates or []} + + return await self.post("activity/", auth_token, data=data) + + async def track_engagements(self, engagements): + auth_token = self.create_jwt_token("*", "*", feed_id="*") + await self.post( + "engagement/", + auth_token, + data={"content_list": engagements}, + service_name="analytics", + ) + + async def track_impressions(self, impressions): + auth_token = self.create_jwt_token("*", "*", feed_id="*") + await self.post( + "impression/", auth_token, data=impressions, service_name="analytics" + ) + + async def og(self, target_url): + auth_token = self.create_jwt_token("*", "*", feed_id="*") + params = {"url": target_url} + return await self.get("og/", auth_token, params=params) + + async def follow_stats(self, feed_id, followers_slugs=None, following_slugs=None): + auth_token = self.create_jwt_token("*", "*", feed_id="*") + params = {"followers": feed_id, "following": feed_id} + + if followers_slugs: + params["followers_slugs"] = ( + ",".join(followers_slugs) + if isinstance(followers_slugs, list) + else followers_slugs + ) + + if following_slugs: + params["following_slugs"] = ( + ",".join(following_slugs) + if isinstance(following_slugs, list) + else following_slugs + ) + + return await self.get("stats/follow/", auth_token, params=params) + + async def _make_request( + self, + method, + relative_url, + signature, + service_name="api", + params=None, + data=None, + ): + params = params or {} + data = data or {} + serialized = None + default_params = self.get_default_params() + params = self._check_params(params) + default_params.update(params) + headers = self.get_default_header() + headers["Authorization"] = signature + headers["stream-auth-type"] = "jwt" + + if not relative_url.endswith("/"): + relative_url += "/" + + url = self.get_full_url(service_name, relative_url) + + if method.lower() in ["post", "put", "delete"]: + serialized = serializer.dumps(data) + + async with aiohttp.ClientSession() as session: + async with session.request( + method, + url, + data=serialized, + headers=headers, + params=default_params, + timeout=self.timeout, + ) as response: + # remove JWT from logs + headers_to_log = headers.copy() + headers_to_log.pop("Authorization", None) + logger.debug( + f"stream api call {response}, headers {headers_to_log} data {data}", + ) + return await self._parse_response(response) + + async def _parse_response(self, response): + try: + parsed_result = serializer.loads(await response.text()) + except (ValueError, ClientConnectionError): + parsed_result = None + if ( + parsed_result is None + or parsed_result.get("exception") + or response.status >= 500 + ): + self.raise_exception(parsed_result, status_code=response.status) + + return parsed_result + + def _check_params(self, params): + """There is no standard for boolean representation of boolean values in YARL""" + if not isinstance(params, dict): + raise TypeError("Invalid params type") + + for key, value in params.items(): + if isinstance(value, bool): + params[key] = str(value) + + return params diff --git a/stream/client.py b/stream/client/base.py similarity index 50% rename from stream/client.py rename to stream/client/base.py index 705a252..ee7100f 100644 --- a/stream/client.py +++ b/stream/client/base.py @@ -1,105 +1,21 @@ import json -import logging import os +from abc import ABC, abstractmethod -import jwt import requests -from stream.serializer import _datetime_encoder -from stream import exceptions, serializer -from stream.users import Users -from stream.utils import validate_feed_slug, validate_user_id, validate_foreign_id_time -from requests import Request -from stream.reactions import Reactions -from stream.collections import Collections -from stream.personalization import Personalization -from stream.feed import Feed +from stream import exceptions try: from urllib.parse import urlparse except ImportError: from urlparse import urlparse -logger = logging.getLogger(__name__) - - -class StreamClient(object): - def __init__( - self, - api_key, - api_secret, - app_id, - version="v1.0", - timeout=6.0, - base_url=None, - location=None, - ): - """ - Initialize the client with the given api key and secret - - :param api_key: the api key - :param api_secret: the api secret - :param app_id: the app id - - **Example usage**:: - - import stream - # initialize the client - client = stream.connect('key', 'secret') - # get a feed object - feed = client.feed('aggregated:1') - # write data to the feed - activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} - activity_id = feed.add_activity(activity_data)['id'] - activities = feed.get() - - feed.follow('flat:3') - activities = feed.get() - feed.unfollow('flat:3') - feed.remove_activity(activity_id) - """ - self.api_key = api_key - self.api_secret = api_secret - self.app_id = app_id - self.version = version - self.timeout = timeout - self.location = location - - self.base_domain_name = "stream-io-api.com" - self.api_location = location - self.custom_api_port = None - self.protocol = "https" - - if os.environ.get("LOCAL"): - self.base_domain_name = "localhost" - self.protocol = "http" - self.custom_api_port = 8000 - self.timeout = 20 - elif base_url is not None: - parsed_url = urlparse(base_url) - self.base_domain_name = parsed_url.hostname - self.protocol = parsed_url.scheme - self.custom_api_port = parsed_url.port - self.api_location = "" - elif location is not None: - self.location = location - - self.base_analytics_url = "https://analytics.stream-io-api.com/analytics/" - - self.session = requests.Session() - - token = self.create_jwt_token("personalization", "*", feed_id="*", user_id="*") - self.personalization = Personalization(self, token) - - token = self.create_jwt_token("collections", "*", feed_id="*", user_id="*") - self.collections = Collections(self, token) - - token = self.create_jwt_token("reactions", "*", feed_id="*") - self.reactions = Reactions(self, token) +import jwt - token = self.create_jwt_token("users", "*", feed_id="*") - self.users = Users(self, token) +class AbstractStreamClient(ABC): + @abstractmethod def feed(self, feed_slug, user_id): """ Returns a Feed object @@ -107,195 +23,81 @@ def feed(self, feed_slug, user_id): :param feed_slug: the slug of the feed :param user_id: the user id """ - feed_slug = validate_feed_slug(feed_slug) - user_id = validate_user_id(user_id) - token = self.create_jwt_token("feed", "*", feed_id="*") - return Feed(self, feed_slug, user_id, token) + pass + @abstractmethod def get_default_params(self): """ Returns the params with the API key present """ - params = dict(api_key=self.api_key) - return params + pass + @abstractmethod def get_default_header(self): - base_headers = { - "Content-type": "application/json", - "X-Stream-Client": self.get_user_agent(), - } - return base_headers + pass + @abstractmethod def get_full_url(self, service_name, relative_url): - if self.api_location: - hostname = "%s-%s.%s" % ( - self.api_location, - service_name, - self.base_domain_name, - ) - elif service_name: - hostname = "%s.%s" % (service_name, self.base_domain_name) - else: - hostname = self.base_domain_name - - if self.base_domain_name == "localhost": - hostname = "localhost" - - base_url = "%s://%s" % (self.protocol, hostname) - - if self.custom_api_port: - base_url = "%s:%s" % (base_url, self.custom_api_port) - - url = base_url + "/" + service_name + "/" + self.version + "/" + relative_url - return url + pass + @abstractmethod def get_user_agent(self): - from stream import __version__ - - agent = "stream-python-client-%s" % __version__ - return agent - - def _parse_response(self, response): - try: - parsed_result = serializer.loads(response.text) - except ValueError: - parsed_result = None - if ( - parsed_result is None - or parsed_result.get("exception") - or response.status_code >= 500 - ): - self.raise_exception(parsed_result, status_code=response.status_code) - return parsed_result + pass + @abstractmethod def create_user_token(self, user_id, **extra_data): - """Setup the payload for the given user_id with optional + """ + Setup the payload for the given user_id with optional extra data (key, value pairs) and encode it using jwt """ - payload = {"user_id": user_id} - for k, v in extra_data.items(): - payload[k] = v - return jwt.encode(payload, self.api_secret, algorithm="HS256").decode("utf-8") + pass - def create_user_session_token(self, user_id, **extra_data): - return self.create_user_token(user_id, **extra_data) - - def create_jwt_token(self, resource, action, feed_id=None, user_id=None): + @abstractmethod + def create_jwt_token(self, resource, action, feed_id=None, user_id=None, **params): """ - Setup the payload for the given resource, action, feed or user + Set up the payload for the given resource, action, feed or user and encode it using jwt """ - payload = {"action": action, "resource": resource} - if feed_id is not None: - payload["feed_id"] = feed_id - if user_id is not None: - payload["user_id"] = user_id - return jwt.encode(payload, self.api_secret).decode("utf-8") - - def _make_request( - self, - method, - relative_url, - signature, - service_name="api", - params=None, - data=None, - ): - params = params or {} - data = data or {} - serialized = None - default_params = self.get_default_params() - default_params.update(params) - headers = self.get_default_header() - headers["Authorization"] = signature - headers["stream-auth-type"] = "jwt" - - if not relative_url.endswith("/"): - relative_url += "/" - - url = self.get_full_url(service_name, relative_url) - - if method.__name__ in ["post", "put", "delete"]: - serialized = serializer.dumps(data) - response = method( - url, - data=serialized, - headers=headers, - params=default_params, - timeout=self.timeout, - ) - logger.debug( - "stream api call %s, headers %s data %s", response.url, headers, data - ) - return self._parse_response(response) + pass + @abstractmethod def raise_exception(self, result, status_code): """ Map the exception code to an exception class and raise it If result.exception and result.detail are available use that Otherwise just raise a generic error """ - from stream.exceptions import get_exception_dict - - exception_class = exceptions.StreamApiException - - def errors_from_fields(exception_fields): - result = [] - if not isinstance(exception_fields, dict): - return exception_fields - - for field, errors in exception_fields.items(): - result.append('Field "%s" errors: %s' % (field, repr(errors))) - return result - - if result is not None: - error_message = result["detail"] - exception_fields = result.get("exception_fields") - if exception_fields is not None: - if isinstance(exception_fields, list): - errors = [ - errors_from_fields(exception_dict) - for exception_dict in exception_fields - ] - errors = [item for sublist in errors for item in sublist] - else: - errors = errors_from_fields(exception_fields) - - error_message = "\n".join(errors) - error_code = result.get("code") - exception_dict = get_exception_dict() - exception_class = exception_dict.get( - error_code, exceptions.StreamApiException - ) - else: - error_message = "GetStreamAPI%s" % status_code - exception = exception_class(error_message, status_code=status_code) - raise exception + pass + @abstractmethod def put(self, *args, **kwargs): """ Shortcut for make request """ - return self._make_request(self.session.put, *args, **kwargs) + pass + @abstractmethod def post(self, *args, **kwargs): """ Shortcut for make request """ - return self._make_request(self.session.post, *args, **kwargs) + pass + @abstractmethod def get(self, *args, **kwargs): """ Shortcut for make request """ - return self._make_request(self.session.get, *args, **kwargs) + pass + @abstractmethod def delete(self, *args, **kwargs): """ Shortcut for make request """ - return self._make_request(self.session.delete, *args, **kwargs) + pass + @abstractmethod def add_to_many(self, activity, feeds): """ Adds an activity to many feeds @@ -304,10 +106,9 @@ def add_to_many(self, activity, feeds): :param feeds: the list of follows (eg. ['feed:1', 'feed:2']) """ - data = {"activity": activity, "feeds": feeds} - token = self.create_jwt_token("feed", "*", feed_id="*") - return self.post("feed/add_to_many/", token, data=data) + pass + @abstractmethod def follow_many(self, follows, activity_copy_limit=None): """ Creates many follows @@ -316,65 +117,49 @@ def follow_many(self, follows, activity_copy_limit=None): eg. [{'source': source, 'target': target}] """ - params = None + pass - if activity_copy_limit is not None: - params = dict(activity_copy_limit=activity_copy_limit) - token = self.create_jwt_token("follower", "*", feed_id="*") - return self.post("follow_many/", token, params=params, data=follows) + @abstractmethod + def unfollow_many(self, unfollows): + """ + Unfollows many feeds at batch + :param unfollows: the list of unfollow relations + eg. [{'source': source, 'target': target, 'keep_history': keep_history}] + """ + pass + + @abstractmethod def update_activities(self, activities): """ Update or create activities """ - if not isinstance(activities, (list, tuple, set)): - raise TypeError("Activities parameter should be of type list") - - auth_token = self.create_jwt_token("activities", "*", feed_id="*") - data = dict(activities=activities) - return self.post("activities/", auth_token, data=data) + pass + @abstractmethod def update_activity(self, activity): """ Update a single activity """ - return self.update_activities([activity]) + pass - def get_activities(self, ids=None, foreign_id_times=None): + @abstractmethod + def get_activities( + self, ids=None, foreign_id_times=None, enrich=False, reactions=None, **params + ): """ Retrieves activities by their ID or foreign_id + time combination + Pass enrich and reactions options for enrichment + ids: list of activity IDs foreign_id_time: list of tuples (foreign_id, time) """ - auth_token = self.create_jwt_token("activities", "*", feed_id="*") - - if ids is None and foreign_id_times is None: - raise TypeError( - "One the parameters ids or foreign_id_time must be provided and not None" - ) - - if ids is not None and foreign_id_times is not None: - raise TypeError( - "At most one of the parameters ids or foreign_id_time must be provided" - ) - - query_params = {} - - if ids is not None: - query_params["ids"] = ",".join(ids) - - if foreign_id_times is not None: - validate_foreign_id_time(foreign_id_times) - foreign_ids, timestamps = zip(*foreign_id_times) - timestamps = map(_datetime_encoder, timestamps) - query_params["foreign_ids"] = ",".join(foreign_ids) - query_params["timestamps"] = ",".join(timestamps) - - return self.get("activities/", auth_token, params=query_params) + pass + @abstractmethod def activity_partial_update( - self, id=None, foreign_id=None, time=None, set={}, unset=[] + self, id=None, foreign_id=None, time=None, set=None, unset=None ): """ Partial update activity, via activity ID or Foreign ID + timestamp @@ -385,27 +170,10 @@ def activity_partial_update( set: object containing the set operations unset: list of unset operations """ + pass - if id is None and (foreign_id is None or time is None): - raise TypeError( - "The id or foreign_id+time parameters must be provided and not be None" - ) - if id is not None and (foreign_id is not None or time is not None): - raise TypeError( - "Only one of the id or the foreign_id+time parameters can be provided" - ) - - data = {"set": set, "unset": unset} - - if id is not None: - data["id"] = id - else: - data["foreign_id"] = foreign_id - data["time"] = time - - return self.activities_partial_update(updates=[data]) - - def activities_partial_update(self, updates=[]): + @abstractmethod + def activities_partial_update(self, updates=None): """ Partial update activity, via activity ID or Foreign ID + timestamp @@ -426,19 +194,236 @@ def activities_partial_update(self, updates=[]): } ] """ + pass - auth_token = self.create_jwt_token("activities", "*", feed_id="*") - - data = {"changes": updates} - - return self.post("activity/", auth_token, data=data) - + @abstractmethod def create_redirect_url(self, target_url, user_id, events): """ Creates a redirect url for tracking the given events in the context of an email using Stream's analytics platform. Learn more at getstream.io/personalization """ + pass + + @abstractmethod + def track_engagements(self, engagements): + """ + Creates a list of engagements + + ;param engagements: Slice of engagements to create. + + eg. + [ + { + "content": "1", + "label": "click", + "features": [ + {"group": "topic", "value": "js"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": "tommaso", + }, + { + "content": "2", + "label": "click", + "features": [ + {"group": "topic", "value": "go"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": {"id": "486892", "alias": "Julian"}, + }, + { + "content": "3", + "label": "click", + "features": [{"group": "topic", "value": "go"}], + "user_data": {"id": "tommaso", "alias": "tommaso"}, + }, + ] + """ + pass + + @abstractmethod + def track_impressions(self, impressions): + """ + Creates a list of impressions + + ;param impressions: Slice of impressions to create. + + eg. + [ + { + "content_list": ["1", "2", "3"], + "features": [ + {"group": "topic", "value": "js"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": {"id": "tommaso", "alias": "tommaso"}, + }, + { + "content_list": ["2", "3", "5"], + "features": [{"group": "topic", "value": "js"}], + "user_data": {"id": "486892", "alias": "Julian"}, + }, + ] + """ + pass + + @abstractmethod + def og(self, target_url): + """ + Retrieve open graph information from a URL which you can + then use to add images and a description to activities. + """ + pass + + @abstractmethod + def follow_stats(self, feed_id, followers_slugs=None, following_slugs=None): + """ + Retrieve the number of follower and following feed stats of a given feed. + For each count, feed slugs can be provided to filter counts accordingly. + + eg. + client.follow_stats( + me, followers_slugs=['user'], following_slugs=['commodities'] + ) + this means to find counts of users following me and count + of commodities I am following + """ + pass + + @abstractmethod + def _make_request( + self, + method, + relative_url, + signature, + service_name="api", + params=None, + data=None, + ): + pass + + @abstractmethod + def _parse_response(self, response): + pass + + +class BaseStreamClient(AbstractStreamClient, ABC): + """ + Initialize the client with the given api key and secret + + :param api_key: the api key + :param api_secret: the api secret + :param app_id: the app id + + **Example usage**:: + + import stream + # initialize the client + client = stream.connect('key', 'secret') + # get a feed object + feed = client.feed('aggregated:1') + # write data to the feed + activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} + activity_id = feed.add_activity(activity_data)['id'] + activities = feed.get() + + feed.follow('flat:3') + activities = feed.get() + feed.unfollow('flat:3') + feed.remove_activity(activity_id) + """ + + def __init__( + self, + api_key, + api_secret, + app_id, + version="v1.0", + timeout=6.0, + base_url=None, + location=None, + ): + self.api_key = api_key + self.api_secret = api_secret + self.app_id = app_id + self.version = version + self.timeout = timeout + self.location = location + self.base_domain_name = "stream-io-api.com" + self.api_location = location + self.custom_api_port = None + self.protocol = "https" + + if os.environ.get("LOCAL"): + self.base_domain_name = "localhost" + self.protocol = "http" + self.custom_api_port = 8000 + self.timeout = 20 + elif base_url is not None: + parsed_url = urlparse(base_url) + self.base_domain_name = parsed_url.hostname + self.protocol = parsed_url.scheme + self.custom_api_port = parsed_url.port + self.api_location = "" + elif location is not None: + self.location = location + + self.base_analytics_url = "https://analytics.stream-io-api.com/analytics/" + + def create_user_token(self, user_id, **extra_data): + payload = {"user_id": user_id} + for k, v in extra_data.items(): + payload[k] = v + return jwt.encode(payload, self.api_secret, algorithm="HS256") + + def create_jwt_token(self, resource, action, feed_id=None, user_id=None, **params): + payload = {**params, "action": action, "resource": resource} + if feed_id is not None: + payload["feed_id"] = feed_id + if user_id is not None: + payload["user_id"] = user_id + return jwt.encode(payload, self.api_secret, algorithm="HS256") + + def raise_exception(self, result, status_code): + from stream.exceptions import get_exception_dict + + exception_class = exceptions.StreamApiException + + def errors_from_fields(exception_fields): + result = [] + if not isinstance(exception_fields, dict): + return exception_fields + + for field, errors in exception_fields.items(): + result.append(f'Field "{field}" errors: {repr(errors)}') + return result + + if result is not None: + error_message = result["detail"] + exception_fields = result.get("exception_fields") + if exception_fields is not None: + if isinstance(exception_fields, list): + errors = [ + errors_from_fields(exception_dict) + for exception_dict in exception_fields + ] + errors = [item for sublist in errors for item in sublist] + else: + errors = errors_from_fields(exception_fields) + + error_message = "\n".join(errors) + error_code = result.get("code") + exception_dict = get_exception_dict() + exception_class = exception_dict.get( + error_code, exceptions.StreamApiException + ) + else: + error_message = f"GetStreamAPI{status_code}" + exception = exception_class(error_message, status_code=status_code) + raise exception + + def create_redirect_url(self, target_url, user_id, events): # generate the JWT token auth_token = self.create_jwt_token( "redirect_and_track", "*", "*", user_id=user_id @@ -447,11 +432,61 @@ def create_redirect_url(self, target_url, user_id, events): params = dict(auth_type="jwt", authorization=auth_token, url=target_url) params["api_key"] = self.api_key params["events"] = json.dumps(events) - url = self.base_analytics_url + "redirect/" + url = f"{self.base_analytics_url}redirect/" # we get the url from the prepare request, this skips issues with # python's urlencode implementation - request = Request("GET", url, params=params) + request = requests.Request("GET", url, params=params) prepared_request = request.prepare() # validate the target url is valid - Request("GET", target_url).prepare() + requests.Request("GET", target_url).prepare() return prepared_request.url + + def get_full_url(self, service_name, relative_url): + if self.api_location: + hostname = "{}{}.{}".format( + self.api_location, + "" if service_name == "analytics" else f"-{service_name}", + self.base_domain_name, + ) + elif service_name: + hostname = f"{service_name}.{self.base_domain_name}" + else: + hostname = self.base_domain_name + + if self.base_domain_name == "localhost": + hostname = "localhost" + + base_url = f"{self.protocol}://{hostname}" + + if self.custom_api_port: + base_url = f"{base_url}:{self.custom_api_port}" + + url = ( + base_url + + "/" + + service_name + + "/" + + self.version + + "/" + + relative_url.replace( + "//", "/" + ) # non-standard url will cause redirect and so can lose its body + ) + + return url + + def get_default_params(self): + params = dict(api_key=self.api_key) + return params + + def get_default_header(self): + base_headers = { + "Content-type": "application/json", + "X-Stream-Client": self.get_user_agent(), + } + return base_headers + + def get_user_agent(self): + from stream import __version__ + + return f"stream-python-client-{__version__}" diff --git a/stream/client/client.py b/stream/client/client.py new file mode 100644 index 0000000..0345360 --- /dev/null +++ b/stream/client/client.py @@ -0,0 +1,289 @@ +import json +import logging + +import requests +from requests import Request + +from stream import serializer +from stream.client.base import BaseStreamClient +from stream.collections.collections import Collections +from stream.feed import Feed +from stream.personalization import Personalization +from stream.reactions import Reactions +from stream.serializer import _datetime_encoder +from stream.users import Users +from stream.utils import ( + get_reaction_params, + validate_feed_slug, + validate_foreign_id_time, + validate_user_id, +) + +try: + from urllib.parse import urlparse +except ImportError: + pass + # from urlparse import urlparse + +logger = logging.getLogger(__name__) + + +class StreamClient(BaseStreamClient): + def __init__( + self, + api_key, + api_secret, + app_id, + version="v1.0", + timeout=6.0, + base_url=None, + location=None, + ): + super().__init__( + api_key, + api_secret, + app_id, + version=version, + timeout=timeout, + base_url=base_url, + location=location, + ) + + self.session = requests.Session() + + token = self.create_jwt_token("personalization", "*", feed_id="*", user_id="*") + self.personalization = Personalization(self, token) + + token = self.create_jwt_token("collections", "*", feed_id="*", user_id="*") + self.collections = Collections(self, token) + + token = self.create_jwt_token("reactions", "*", feed_id="*") + self.reactions = Reactions(self, token) + + token = self.create_jwt_token("users", "*", feed_id="*") + self.users = Users(self, token) + + def feed(self, feed_slug, user_id): + feed_slug = validate_feed_slug(feed_slug) + user_id = validate_user_id(user_id) + token = self.create_jwt_token("feed", "*", feed_id="*") + return Feed(self, feed_slug, user_id, token) + + def put(self, *args, **kwargs): + return self._make_request(self.session.put, *args, **kwargs) + + def post(self, *args, **kwargs): + return self._make_request(self.session.post, *args, **kwargs) + + def get(self, *args, **kwargs): + return self._make_request(self.session.get, *args, **kwargs) + + def delete(self, *args, **kwargs): + return self._make_request(self.session.delete, *args, **kwargs) + + def add_to_many(self, activity, feeds): + data = {"activity": activity, "feeds": feeds} + token = self.create_jwt_token("feed", "*", feed_id="*") + return self.post("feed/add_to_many/", token, data=data) + + def follow_many(self, follows, activity_copy_limit=None): + params = None + + if activity_copy_limit is not None: + params = dict(activity_copy_limit=activity_copy_limit) + token = self.create_jwt_token("follower", "*", feed_id="*") + return self.post("follow_many/", token, params=params, data=follows) + + def unfollow_many(self, unfollows): + params = None + + token = self.create_jwt_token("follower", "*", feed_id="*") + return self.post("unfollow_many/", token, params=params, data=unfollows) + + def update_activities(self, activities): + if not isinstance(activities, (list, tuple, set)): + raise TypeError("Activities parameter should be of type list") + + auth_token = self.create_jwt_token("activities", "*", feed_id="*") + data = dict(activities=activities) + return self.post("activities/", auth_token, data=data) + + def update_activity(self, activity): + return self.update_activities([activity]) + + def get_activities( + self, ids=None, foreign_id_times=None, enrich=False, reactions=None, **params + ): + auth_token = self.create_jwt_token("activities", "*", feed_id="*") + + if ids is None and foreign_id_times is None: + raise TypeError( + "One the parameters ids or foreign_id_time must be provided and not None" + ) + + if ids is not None and foreign_id_times is not None: + raise TypeError( + "At most one of the parameters ids or foreign_id_time must be provided" + ) + + endpoint = "activities/" + if enrich or reactions is not None: + endpoint = "enrich/" + endpoint + + query_params = {**params} + + if ids is not None: + query_params["ids"] = ",".join(ids) + + if foreign_id_times is not None: + validate_foreign_id_time(foreign_id_times) + foreign_ids, timestamps = zip(*foreign_id_times) + timestamps = map(_datetime_encoder, timestamps) + query_params["foreign_ids"] = ",".join(foreign_ids) + query_params["timestamps"] = ",".join(timestamps) + + query_params.update(get_reaction_params(reactions)) + + return self.get(endpoint, auth_token, params=query_params) + + def activity_partial_update( + self, id=None, foreign_id=None, time=None, set=None, unset=None + ): + if id is None and (foreign_id is None or time is None): + raise TypeError( + "The id or foreign_id+time parameters must be provided and not be None" + ) + if id is not None and (foreign_id is not None or time is not None): + raise TypeError( + "Only one of the id or the foreign_id+time parameters can be provided" + ) + + data = {"set": set or {}, "unset": unset or []} + + if id is not None: + data["id"] = id + else: + data["foreign_id"] = foreign_id + data["time"] = time + + return self.activities_partial_update(updates=[data]) + + def activities_partial_update(self, updates=None): + auth_token = self.create_jwt_token("activities", "*", feed_id="*") + + data = {"changes": updates or []} + + return self.post("activity/", auth_token, data=data) + + def create_redirect_url(self, target_url, user_id, events): + # generate the JWT token + auth_token = self.create_jwt_token( + "redirect_and_track", "*", "*", user_id=user_id + ) + # setup the params + params = dict(auth_type="jwt", authorization=auth_token, url=target_url) + params["api_key"] = self.api_key + params["events"] = json.dumps(events) + url = f"{self.base_analytics_url}redirect/" + # we get the url from the prepare request, this skips issues with + # python's urlencode implementation + request = Request("GET", url, params=params) + prepared_request = request.prepare() + # validate the target url is valid + Request("GET", target_url).prepare() + return prepared_request.url + + def track_engagements(self, engagements): + auth_token = self.create_jwt_token("*", "*", feed_id="*") + self.post( + "engagement/", + auth_token, + data={"content_list": engagements}, + service_name="analytics", + ) + + def track_impressions(self, impressions): + auth_token = self.create_jwt_token("*", "*", feed_id="*") + self.post("impression/", auth_token, data=impressions, service_name="analytics") + + def og(self, target_url): + auth_token = self.create_jwt_token("*", "*", feed_id="*") + params = {"url": target_url} + return self.get("og/", auth_token, params=params) + + def follow_stats(self, feed_id, followers_slugs=None, following_slugs=None): + auth_token = self.create_jwt_token("*", "*", feed_id="*") + params = { + "followers": feed_id, + "following": feed_id, + } + + if followers_slugs: + params["followers_slugs"] = ( + ",".join(followers_slugs) + if isinstance(followers_slugs, list) + else followers_slugs + ) + + if following_slugs: + params["following_slugs"] = ( + ",".join(following_slugs) + if isinstance(following_slugs, list) + else following_slugs + ) + + return self.get("stats/follow/", auth_token, params=params) + + def _make_request( + self, + method, + relative_url, + signature, + service_name="api", + params=None, + data=None, + ): + params = params or {} + data = data or {} + serialized = None + default_params = self.get_default_params() + default_params.update(params) + headers = self.get_default_header() + headers["Authorization"] = signature + headers["stream-auth-type"] = "jwt" + + if not relative_url.endswith("/"): + relative_url += "/" + + url = self.get_full_url(service_name, relative_url) + + if method.__name__ in ["post", "put", "delete"]: + serialized = serializer.dumps(data) + response = method( + url, + data=serialized, + headers=headers, + params=default_params, + timeout=self.timeout, + ) + # remove JWT from logs + headers_to_log = headers.copy() + headers_to_log.pop("Authorization", None) + logger.debug( + f"stream api call {response.url}, headers {headers_to_log} data {data}" + ) + return self._parse_response(response) + + def _parse_response(self, response): + try: + parsed_result = serializer.loads(response.text) + except ValueError: + parsed_result = None + if ( + parsed_result is None + or parsed_result.get("exception") + or response.status_code >= 500 + ): + self.raise_exception(parsed_result, status_code=response.status_code) + + return parsed_result diff --git a/stream/collections.py b/stream/collections.py deleted file mode 100644 index f8950f0..0000000 --- a/stream/collections.py +++ /dev/null @@ -1,135 +0,0 @@ -class Collections(object): - def __init__(self, client, token): - """ - Used to manipulate data at the 'meta' endpoint - :param client: the api client - :param token: the token - """ - - self.client = client - self.token = token - - def create_reference(self, collection_name=None, id=None, entry=None): - if isinstance(entry, (dict,)): - _collection = entry["collection"] - _id = entry["id"] - elif collection_name is not None and id is not None: - _collection = collection_name - _id = id - else: - raise ValueError( - "must call with collection_name and id or with entry arguments" - ) - return "SO:%s:%s" % (_collection, _id) - - def upsert(self, collection_name, data): - """ - "Insert new or update existing data. - :param collection_name: Collection Name i.e 'user' - :param data: list of dictionaries - :return: http response, 201 if successful along with data posted. - - **Example**:: - client.collection.upsert('user', [{"id": '1', "name": "Juniper", "hobbies": ["Playing", "Sleeping", "Eating"]}, - {"id": '2', "name": "Ruby", "interests": ["Sunbeams", "Surprise Attacks"]}]) - """ - - if type(data) != list: - data = [data] - - data_json = {collection_name: data} - - response = self.client.post( - "collections/", - service_name="api", - signature=self.token, - data={"data": data_json}, - ) - return response - - def select(self, collection_name, ids): - """ - Retrieve data from meta endpoint, can include data you've uploaded or personalization/analytic data - created by the stream team. - :param collection_name: Collection Name i.e 'user' - :param ids: list of ids of feed group i.e [123,456] - :return: meta data as json blob - - **Example**:: - client.collection.select('user', 1) - client.collection.select('user', [1,2,3]) - """ - - if type(ids) != list: - ids = [ids] - ids = [str(i) for i in ids] - - foreign_ids = [] - for i in range(len(ids)): - foreign_ids.append("%s:%s" % (collection_name, ids[i])) - foreign_ids = ",".join(foreign_ids) - - response = self.client.get( - "collections/", - service_name="api", - params={"foreign_ids": foreign_ids}, - signature=self.token, - ) - - return response - - def delete_many(self, collection_name, ids): - """ - Delete data from meta. - :param collection_name: Collection Name i.e 'user' - :param ids: list of ids to delete i.e [123,456] - :return: data that was deleted if successful or not. - - **Example**:: - client.collections.delete('user', '1') - collections.delete('user', ['1','2','3']) - """ - - if type(ids) != list: - ids = [ids] - ids = [str(i) for i in ids] - - params = {"collection_name": collection_name, "ids": ids} - - response = self.client.delete( - "collections/", service_name="api", params=params, signature=self.token - ) - - return response - - def add(self, collection_name, data, id=None, user_id=None): - payload = dict(id=id, data=data, user_id=user_id) - return self.client.post( - "collections/%s" % collection_name, - service_name="api", - signature=self.token, - data=payload, - ) - - def get(self, collection_name, id): - return self.client.get( - "collections/%s/%s" % (collection_name, id), - service_name="api", - signature=self.token, - ) - - def update(self, collection_name, id, data=None): - payload = dict(data=data) - return self.client.put( - "collections/%s/%s" % (collection_name, id), - service_name="api", - signature=self.token, - data=payload, - ) - - def delete(self, collection_name, id): - return self.client.delete( - "collections/%s/%s" % (collection_name, id), - service_name="api", - signature=self.token, - ) diff --git a/stream/collections/__init__.py b/stream/collections/__init__.py new file mode 100644 index 0000000..8264c83 --- /dev/null +++ b/stream/collections/__init__.py @@ -0,0 +1 @@ +from .collections import AsyncCollections, Collections diff --git a/stream/collections/base.py b/stream/collections/base.py new file mode 100644 index 0000000..10c0805 --- /dev/null +++ b/stream/collections/base.py @@ -0,0 +1,99 @@ +from abc import ABC, abstractmethod + + +class AbstractCollection(ABC): + @abstractmethod + def create_reference(self, collection_name=None, id=None, entry=None): + pass + + @abstractmethod + def upsert(self, collection_name, data): + """ + "Insert new or update existing data. + :param collection_name: Collection Name i.e 'user' + :param data: list of dictionaries + :return: http response, 201 if successful along with data posted. + + **Example**:: + client.collections.upsert( + 'user', [ + {"id": '1', "name": "Juniper", "hobbies": ["Playing", "Sleeping", "Eating"]}, + {"id": '2', "name": "Ruby", "interests": ["Sunbeams", "Surprise Attacks"]} + ] + ) + """ + pass + + @abstractmethod + def select(self, collection_name, ids): + """ + Retrieve data from meta endpoint, can include data you've uploaded or + personalization/analytic data + created by the stream team. + :param collection_name: Collection Name i.e 'user' + :param ids: list of ids of feed group i.e [123,456] + :return: meta data as json blob + + **Example**:: + client.collections.select('user', 1) + client.collections.select('user', [1,2,3]) + """ + pass + + @abstractmethod + def delete_many(self, collection_name, ids): + """ + Delete data from meta. + :param collection_name: Collection Name i.e 'user' + :param ids: list of ids to delete i.e [123,456] + :return: data that was deleted if successful or not. + + **Example**:: + client.collections.delete('user', '1') + client.collections.delete('user', ['1','2','3']) + """ + pass + + @abstractmethod + def add(self, collection_name, data, id=None, user_id=None): + pass + + @abstractmethod + def get(self, collection_name, id): + pass + + @abstractmethod + def update(self, collection_name, id, data=None): + pass + + @abstractmethod + def delete(self, collection_name, id): + pass + + +class BaseCollection(AbstractCollection, ABC): + URL = "collections/" + SERVICE_NAME = "api" + + def __init__(self, client, token): + """ + Used to manipulate data at the 'meta' endpoint + :param client: the api client + :param token: the token + """ + + self.client = client + self.token = token + + def create_reference(self, collection_name=None, id=None, entry=None): + if isinstance(entry, dict): + _collection = entry["collection"] + _id = entry["id"] + elif collection_name is not None and id is not None: + _collection = collection_name + _id = id + else: + raise ValueError( + "must call with collection_name and id or with entry arguments" + ) + return f"SO:{_collection}:{_id}" diff --git a/stream/collections/collections.py b/stream/collections/collections.py new file mode 100644 index 0000000..eebc730 --- /dev/null +++ b/stream/collections/collections.py @@ -0,0 +1,148 @@ +from stream.collections.base import BaseCollection + + +class Collections(BaseCollection): + def upsert(self, collection_name, data): + if not isinstance(data, list): + data = [data] + + data_json = {collection_name: data} + + return self.client.post( + self.URL, + service_name=self.SERVICE_NAME, + signature=self.token, + data={"data": data_json}, + ) + + def select(self, collection_name, ids): + if not isinstance(ids, list): + ids = [ids] + + foreign_ids = ",".join(f"{collection_name}:{k}" for i, k in enumerate(ids)) + + return self.client.get( + self.URL, + service_name=self.SERVICE_NAME, + params={"foreign_ids": foreign_ids}, + signature=self.token, + ) + + def delete_many(self, collection_name, ids): + if not isinstance(ids, list): + ids = [ids] + ids = [str(i) for i in ids] + + params = {"collection_name": collection_name, "ids": ids} + + return self.client.delete( + self.URL, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) + + def add(self, collection_name, data, id=None, user_id=None): + payload = dict(id=id, data=data, user_id=user_id) + return self.client.post( + f"{self.URL}/{collection_name}", + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + def get(self, collection_name, id): + return self.client.get( + f"{self.URL}/{collection_name}/{id}", + service_name=self.SERVICE_NAME, + signature=self.token, + ) + + def update(self, collection_name, id, data=None): + payload = dict(data=data) + return self.client.put( + f"{self.URL}/{collection_name}/{id}", + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + def delete(self, collection_name, id): + return self.client.delete( + f"{self.URL}/{collection_name}/{id}", + service_name=self.SERVICE_NAME, + signature=self.token, + ) + + +class AsyncCollections(BaseCollection): + async def upsert(self, collection_name, data): + if not isinstance(data, list): + data = [data] + + data_json = {collection_name: data} + + return await self.client.post( + self.URL, + service_name=self.SERVICE_NAME, + signature=self.token, + data={"data": data_json}, + ) + + async def select(self, collection_name, ids): + if not isinstance(ids, list): + ids = [ids] + + foreign_ids = ",".join(f"{collection_name}:{k}" for i, k in enumerate(ids)) + + return await self.client.get( + self.URL, + service_name=self.SERVICE_NAME, + params={"foreign_ids": foreign_ids}, + signature=self.token, + ) + + async def delete_many(self, collection_name, ids): + if not isinstance(ids, list): + ids = [ids] + ids = [str(i) for i in ids] + + params = {"collection_name": collection_name, "ids": ids} + return await self.client.delete( + self.URL, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) + + async def get(self, collection_name, id): + return await self.client.get( + f"{self.URL}/{collection_name}/{id}", + service_name=self.SERVICE_NAME, + signature=self.token, + ) + + async def add(self, collection_name, data, id=None, user_id=None): + payload = dict(id=id, data=data, user_id=user_id) + return await self.client.post( + f"{self.URL}/{collection_name}", + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + async def update(self, collection_name, id, data=None): + payload = dict(data=data) + return await self.client.put( + f"{self.URL}/{collection_name}/{id}", + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + async def delete(self, collection_name, id): + return await self.client.delete( + f"{self.URL}/{collection_name}/{id}", + service_name=self.SERVICE_NAME, + signature=self.token, + ) diff --git a/stream/exceptions.py b/stream/exceptions.py index cd35f2c..00e9b70 100644 --- a/stream/exceptions.py +++ b/stream/exceptions.py @@ -8,14 +8,13 @@ def __init__(self, error_message, status_code=None): code = 1 def __repr__(self): - return "%s (%s)" % (self.__class__.__name__, self.detail) + return f"{self.__class__.__name__} ({self.detail})" def __unicode__(self): - return "%s (%s)" % (self.__class__.__name__, self.detail) + return f"{self.__class__.__name__} ({self.detail})" class ApiKeyException(StreamApiException): - """ Raised when there is an issue with your Access Key """ @@ -25,7 +24,6 @@ class ApiKeyException(StreamApiException): class SignatureException(StreamApiException): - """ Raised when there is an issue with the signature you provided """ @@ -35,7 +33,6 @@ class SignatureException(StreamApiException): class InputException(StreamApiException): - """ Raised when you send the wrong data to the API """ @@ -45,7 +42,6 @@ class InputException(StreamApiException): class CustomFieldException(StreamApiException): - """ Raised when there are missing or misconfigured custom fields """ @@ -55,7 +51,6 @@ class CustomFieldException(StreamApiException): class FeedConfigException(StreamApiException): - """ Raised when there are missing or misconfigured custom fields """ @@ -65,7 +60,6 @@ class FeedConfigException(StreamApiException): class SiteSuspendedException(StreamApiException): - """ Raised when the site requesting the data is suspended """ @@ -75,7 +69,6 @@ class SiteSuspendedException(StreamApiException): class InvalidPaginationException(StreamApiException): - """ Raised when there is an issue with your Access Key """ @@ -108,7 +101,6 @@ class RankingException(FeedConfigException): class RateLimitReached(StreamApiException): - """ Raised when too many requests are performed """ diff --git a/stream/feed.py b/stream/feed.py deleted file mode 100644 index d21feba..0000000 --- a/stream/feed.py +++ /dev/null @@ -1,246 +0,0 @@ -from stream.utils import validate_feed_id, validate_user_id, validate_feed_slug - - -class Feed(object): - def __init__(self, client, feed_slug, user_id, token): - """ - Initializes the Feed class - - :param client: the api client - :param slug: the slug of the feed, ie user, flat, notification - :param user_id: the id of the user - :param token: the token - """ - self.client = client - self.slug = feed_slug - self.user_id = str(user_id) - self.id = "%s:%s" % (feed_slug, user_id) - self.token = token - - self.feed_url = "feed/%s/" % self.id.replace(":", "/") - self.enriched_feed_url = "enrich/feed/%s/" % self.id.replace(":", "/") - self.feed_targets_url = "feed_targets/%s/" % self.id.replace(":", "/") - self.feed_together = self.id.replace(":", "") - self.signature = self.feed_together + " " + self.token - - def create_scope_token(self, resource, action): - """ - creates the JWT token to perform an action on a owned resource - """ - return self.client.create_jwt_token( - resource, action, feed_id=self.feed_together - ) - - def get_readonly_token(self): - """ - creates the JWT token to perform readonly operations - """ - return self.create_scope_token("*", "read") - - def add_activity(self, activity_data): - """ - Adds an activity to the feed, this will also trigger an update - to all the feeds which follow this feed - - :param activity_data: a dict with the activity data - - **Example**:: - - activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} - activity_id = feed.add_activity(activity_data) - """ - if activity_data.get("to") and not isinstance( - activity_data.get("to"), (list, tuple, set) - ): - raise TypeError( - "please provide the activity's to field as a list not a string" - ) - - if activity_data.get("to"): - activity_data = activity_data.copy() - activity_data["to"] = self.add_to_signature(activity_data["to"]) - - token = self.create_scope_token("feed", "write") - result = self.client.post(self.feed_url, data=activity_data, signature=token) - return result - - def add_activities(self, activity_list): - """ - Adds a list of activities to the feed - - :param activity_list: a list with the activity data dicts - - **Example**:: - - activity_data = [ - {'actor': 1, 'verb': 'tweet', 'object': 1}, - {'actor': 2, 'verb': 'watch', 'object': 2}, - ] - result = feed.add_activities(activity_data) - """ - activities = [] - for activity_data in activity_list: - activity_data = activity_data.copy() - activities.append(activity_data) - if activity_data.get("to"): - activity_data["to"] = self.add_to_signature(activity_data["to"]) - token = self.create_scope_token("feed", "write") - data = dict(activities=activities) - if activities: - result = self.client.post(self.feed_url, data=data, signature=token) - return result - - def remove_activity(self, activity_id=None, foreign_id=None): - """ - Removes an activity from the feed - - :param activity_id: the activity id to remove from this feed - (note this will also remove the activity from feeds which follow this feed) - :param foreign_id: the foreign id you provided when adding the activity - """ - identifier = activity_id or foreign_id - if not identifier: - raise ValueError("please either provide activity_id or foreign_id") - url = self.feed_url + "%s/" % identifier - params = dict() - token = self.create_scope_token("feed", "delete") - if foreign_id is not None: - params["foreign_id"] = "1" - result = self.client.delete(url, signature=token, params=params) - return result - - def get(self, enrich=False, reactions=None, **params): - """ - Get the activities in this feed - - **Example**:: - - # fast pagination using id filtering - feed.get(limit=10, id_lte=100292310) - - # slow pagination using offset - feed.get(limit=10, offset=10) - """ - for field in ["mark_read", "mark_seen"]: - value = params.get(field) - if isinstance(value, (list, tuple)): - params[field] = ",".join(value) - token = self.create_scope_token("feed", "read") - - if enrich or reactions is not None: - feed_url = self.enriched_feed_url - else: - feed_url = self.feed_url - - if reactions is not None and not isinstance(reactions, (dict,)): - raise TypeError("reactions argument should be a dictionary") - - if reactions is not None: - if reactions.get("own"): - params["withOwnReactions"] = True - if reactions.get("recent"): - params["withRecentReactions"] = True - if reactions.get("counts"): - params["withReactionCounts"] = True - - response = self.client.get(feed_url, params=params, signature=token) - return response - - def follow( - self, target_feed_slug, target_user_id, activity_copy_limit=None, **extra_data - ): - """ - Follows the given feed - - :param activity_copy_limit: how many activities should be copied from target feed - :param target_feed_slug: the slug of the target feed - :param target_user_id: the user id - """ - target_feed_slug = validate_feed_slug(target_feed_slug) - target_user_id = validate_user_id(target_user_id) - target_feed_id = "%s:%s" % (target_feed_slug, target_user_id) - url = self.feed_url + "follows/" - data = { - "target": target_feed_id, - "target_token": self.client.feed(target_feed_slug, target_user_id).token, - } - if activity_copy_limit is not None: - data["activity_copy_limit"] = activity_copy_limit - token = self.create_scope_token("follower", "write") - data.update(extra_data) - response = self.client.post(url, data=data, signature=token) - return response - - def unfollow(self, target_feed_slug, target_user_id, keep_history=False): - """ - Unfollow the given feed - """ - target_feed_slug = validate_feed_slug(target_feed_slug) - target_user_id = validate_user_id(target_user_id) - target_feed_id = "%s:%s" % (target_feed_slug, target_user_id) - token = self.create_scope_token("follower", "delete") - url = self.feed_url + "follows/%s/" % target_feed_id - params = {} - if keep_history: - params["keep_history"] = True - response = self.client.delete(url, signature=token, params=params) - return response - - def followers(self, offset=0, limit=25, feeds=None): - """ - Lists the followers for the given feed - """ - feeds = feeds is not None and ",".join(feeds) or "" - params = {"limit": limit, "offset": offset, "filter": feeds} - url = self.feed_url + "followers/" - token = self.create_scope_token("follower", "read") - response = self.client.get(url, params=params, signature=token) - return response - - def following(self, offset=0, limit=25, feeds=None): - """ - List the feeds which this feed is following - """ - if feeds is not None: - feeds = feeds is not None and ",".join(feeds) or "" - params = {"offset": offset, "limit": limit, "filter": feeds} - url = self.feed_url + "follows/" - token = self.create_scope_token("follower", "read") - response = self.client.get(url, params=params, signature=token) - return response - - def add_to_signature(self, recipients): - """ - Takes a list of recipients such as ['user:1', 'user:2'] - and turns it into a list with the tokens included - ['user:1 token', 'user:2 token'] - """ - data = [] - for recipient in recipients: - validate_feed_id(recipient) - feed_slug, user_id = recipient.split(":") - feed = self.client.feed(feed_slug, user_id) - data.append("%s %s" % (recipient, feed.token)) - return data - - def update_activity_to_targets( - self, - foreign_id, - time, - new_targets=None, - added_targets=None, - removed_targets=None, - ): - data = {"foreign_id": foreign_id, "time": time} - - if new_targets is not None: - data["new_targets"] = new_targets - if added_targets is not None: - data["added_targets"] = added_targets - if removed_targets is not None: - data["removed_targets"] = removed_targets - - url = self.feed_targets_url + "activity_to_targets/" - - token = self.create_scope_token("feed_targets", "write") - return self.client.post(url, data=data, signature=token) diff --git a/stream/feed/__init__.py b/stream/feed/__init__.py new file mode 100644 index 0000000..1f3c784 --- /dev/null +++ b/stream/feed/__init__.py @@ -0,0 +1 @@ +from .feeds import AsyncFeed, Feed diff --git a/stream/feed/base.py b/stream/feed/base.py new file mode 100644 index 0000000..dc76fce --- /dev/null +++ b/stream/feed/base.py @@ -0,0 +1,172 @@ +from abc import ABC, abstractmethod + +from stream.utils import validate_feed_id + + +class AbstractFeed(ABC): + @abstractmethod + def create_scope_token(self, resource, action): + """ + creates the JWT token to perform an action on a owned resource + """ + pass + + @abstractmethod + def get_readonly_token(self): + """ + creates the JWT token to perform readonly operations + """ + pass + + @abstractmethod + def add_activity(self, activity_data): + """ + Adds an activity to the feed, this will also trigger an update + to all the feeds which follow this feed + + :param activity_data: a dict with the activity data + + **Example**:: + + activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} + activity_id = feed.add_activity(activity_data) + """ + pass + + @abstractmethod + def add_activities(self, activity_list): + """ + Adds a list of activities to the feed + + :param activity_list: a list with the activity data dicts + + **Example**:: + + activity_data = [ + {'actor': 1, 'verb': 'tweet', 'object': 1}, + {'actor': 2, 'verb': 'watch', 'object': 2}, + ] + result = feed.add_activities(activity_data) + """ + pass + + @abstractmethod + def remove_activity(self, activity_id=None, foreign_id=None): + """ + Removes an activity from the feed + + :param activity_id: the activity id to remove from this feed + (note this will also remove the activity from feeds which follow this feed) + :param foreign_id: the foreign id you provided when adding the activity + """ + pass + + @abstractmethod + def get(self, enrich=False, reactions=None, **params): + """ + Get the activities in this feed + + **Example**:: + + # fast pagination using id filtering + feed.get(limit=10, id_lte=100292310) + + # slow pagination using offset + feed.get(limit=10, offset=10) + """ + pass + + @abstractmethod + def follow( + self, target_feed_slug, target_user_id, activity_copy_limit=None, **extra_data + ): + """ + Follows the given feed + + :param activity_copy_limit: how many activities should be copied from target + feed + :param target_feed_slug: the slug of the target feed + :param target_user_id: the user id + """ + pass + + @abstractmethod + def unfollow(self, target_feed_slug, target_user_id, keep_history=False): + """ + Unfollow the given feed + """ + pass + + @abstractmethod + def followers(self, offset=0, limit=25, feeds=None): + """ + Lists the followers for the given feed + """ + pass + + @abstractmethod + def following(self, offset=0, limit=25, feeds=None): + """ + List the feeds which this feed is following + """ + pass + + @abstractmethod + def add_to_signature(self, recipients): + """ + Takes a list of recipients such as ['user:1', 'user:2'] + and turns it into a list with the tokens included + ['user:1 token', 'user:2 token'] + """ + pass + + @abstractmethod + def update_activity_to_targets( + self, + foreign_id, + time, + new_targets=None, + added_targets=None, + removed_targets=None, + ): + pass + + +class BaseFeed(AbstractFeed, ABC): + def __init__(self, client, feed_slug, user_id, token): + """ + Initializes the Feed class + + :param client: the api client + :param feed_slug: the slug of the feed, ie user, flat, notification + :param user_id: the id of the user + :param token: the token + """ + self.client = client + self.slug = feed_slug + self.user_id = f"{user_id}" + self.id = f"{feed_slug}:{user_id}" + self.token = token.decode("utf-8") if isinstance(token, bytes) else token + _id = self.id.replace(":", "/") + self.feed_url = f"feed/{_id}/" + self.enriched_feed_url = f"enrich/feed/{_id}/" + self.feed_targets_url = f"feed_targets/{_id}/" + self.feed_together = self.id.replace(":", "") + self.signature = f"{self.feed_together} {self.token}" + + def create_scope_token(self, resource, action): + return self.client.create_jwt_token( + resource, action, feed_id=self.feed_together + ) + + def get_readonly_token(self): + return self.create_scope_token("*", "read") + + def add_to_signature(self, recipients): + data = [] + for recipient in recipients: + validate_feed_id(recipient) + feed_slug, user_id = recipient.split(":") + feed = self.client.feed(feed_slug, user_id) + data.append(f"{recipient} {feed.token}") + return data diff --git a/stream/feed/feeds.py b/stream/feed/feeds.py new file mode 100644 index 0000000..5305427 --- /dev/null +++ b/stream/feed/feeds.py @@ -0,0 +1,238 @@ +from stream.feed.base import BaseFeed +from stream.utils import get_reaction_params, validate_feed_slug, validate_user_id + + +class Feed(BaseFeed): + def add_activity(self, activity_data): + if activity_data.get("to") and not isinstance( + activity_data.get("to"), (list, tuple, set) + ): + raise TypeError( + "please provide the activity's to field as a list not a string" + ) + + if activity_data.get("to"): + activity_data = activity_data.copy() + activity_data["to"] = self.add_to_signature(activity_data["to"]) + + token = self.create_scope_token("feed", "write") + return self.client.post(self.feed_url, data=activity_data, signature=token) + + def add_activities(self, activity_list): + activities = [] + for activity_data in activity_list: + activity_data = activity_data.copy() + activities.append(activity_data) + if activity_data.get("to"): + activity_data["to"] = self.add_to_signature(activity_data["to"]) + token = self.create_scope_token("feed", "write") + data = dict(activities=activities) + if activities: + return self.client.post(self.feed_url, data=data, signature=token) + return None + + def remove_activity(self, activity_id=None, foreign_id=None): + identifier = activity_id or foreign_id + if not identifier: + raise ValueError("please either provide activity_id or foreign_id") + url = f"{self.feed_url}{identifier}/" + params = dict() + token = self.create_scope_token("feed", "delete") + if foreign_id is not None: + params["foreign_id"] = "1" + return self.client.delete(url, signature=token, params=params) + + def get(self, enrich=False, reactions=None, **params): + for field in ["mark_read", "mark_seen"]: + value = params.get(field) + if isinstance(value, (list, tuple)): + params[field] = ",".join(value) + token = self.create_scope_token("feed", "read") + + if enrich or reactions is not None: + feed_url = self.enriched_feed_url + else: + feed_url = self.feed_url + + params.update(get_reaction_params(reactions)) + return self.client.get(feed_url, params=params, signature=token) + + def follow( + self, target_feed_slug, target_user_id, activity_copy_limit=None, **extra_data + ): + target_feed_slug = validate_feed_slug(target_feed_slug) + target_user_id = validate_user_id(target_user_id) + target_feed_id = f"{target_feed_slug}:{target_user_id}" + url = f"{self.feed_url}follows/" + target_token = self.client.feed(target_feed_slug, target_user_id).token + data = {"target": target_feed_id, "target_token": target_token} + if activity_copy_limit is not None: + data["activity_copy_limit"] = activity_copy_limit + token = self.create_scope_token("follower", "write") + data.update(extra_data) + return self.client.post(url, data=data, signature=token) + + def unfollow(self, target_feed_slug, target_user_id, keep_history=False): + target_feed_slug = validate_feed_slug(target_feed_slug) + target_user_id = validate_user_id(target_user_id) + target_feed_id = f"{target_feed_slug}:{target_user_id}" + token = self.create_scope_token("follower", "delete") + url = f"{self.feed_url}follows/{target_feed_id}/" + params = {} + if keep_history: + params["keep_history"] = True + return self.client.delete(url, signature=token, params=params) + + def followers(self, offset=0, limit=25, feeds=None): + feeds = ",".join(feeds) if feeds is not None else "" + params = {"limit": limit, "offset": offset, "filter": feeds} + url = f"{self.feed_url}followers/" + token = self.create_scope_token("follower", "read") + return self.client.get(url, params=params, signature=token) + + def following(self, offset=0, limit=25, feeds=None): + feeds = ",".join(feeds) if feeds is not None else "" + params = {"offset": offset, "limit": limit, "filter": feeds} + url = f"{self.feed_url}follows/" + token = self.create_scope_token("follower", "read") + return self.client.get(url, params=params, signature=token) + + def update_activity_to_targets( + self, + foreign_id, + time, + new_targets=None, + added_targets=None, + removed_targets=None, + ): + data = {"foreign_id": foreign_id, "time": time} + + if new_targets is not None: + data["new_targets"] = new_targets + if added_targets is not None: + data["added_targets"] = added_targets + if removed_targets is not None: + data["removed_targets"] = removed_targets + + url = f"{self.feed_targets_url}activity_to_targets/" + token = self.create_scope_token("feed_targets", "write") + return self.client.post(url, data=data, signature=token) + + +class AsyncFeed(BaseFeed): + async def add_activity(self, activity_data): + if activity_data.get("to") and not isinstance( + activity_data.get("to"), (list, tuple, set) + ): + raise TypeError( + "please provide the activity's to field as a list not a string" + ) + + if activity_data.get("to"): + activity_data = activity_data.copy() + activity_data["to"] = self.add_to_signature(activity_data["to"]) + + token = self.create_scope_token("feed", "write") + return await self.client.post( + self.feed_url, data=activity_data, signature=token + ) + + async def add_activities(self, activity_list): + activities = [] + for activity_data in activity_list: + activity_data = activity_data.copy() + activities.append(activity_data) + if activity_data.get("to"): + activity_data["to"] = self.add_to_signature(activity_data["to"]) + token = self.create_scope_token("feed", "write") + data = dict(activities=activities) + if not activities: + return + + return await self.client.post(self.feed_url, data=data, signature=token) + + async def remove_activity(self, activity_id=None, foreign_id=None): + identifier = activity_id or foreign_id + if not identifier: + raise ValueError("please either provide activity_id or foreign_id") + url = f"{self.feed_url}{identifier}/" + params = dict() + token = self.create_scope_token("feed", "delete") + if foreign_id is not None: + params["foreign_id"] = "1" + return await self.client.delete(url, signature=token, params=params) + + async def get(self, enrich=False, reactions=None, **params): + for field in ["mark_read", "mark_seen"]: + value = params.get(field) + if isinstance(value, (list, tuple)): + params[field] = ",".join(value) + + token = self.create_scope_token("feed", "read") + if enrich or reactions is not None: + feed_url = self.enriched_feed_url + else: + feed_url = self.feed_url + + params.update(get_reaction_params(reactions)) + return await self.client.get(feed_url, params=params, signature=token) + + async def follow( + self, target_feed_slug, target_user_id, activity_copy_limit=None, **extra_data + ): + target_feed_slug = validate_feed_slug(target_feed_slug) + target_user_id = validate_user_id(target_user_id) + target_feed_id = f"{target_feed_slug}:{target_user_id}" + url = f"{self.feed_url}follows/" + target_token = self.client.feed(target_feed_slug, target_user_id).token + data = {"target": target_feed_id, "target_token": target_token} + if activity_copy_limit is not None: + data["activity_copy_limit"] = activity_copy_limit + token = self.create_scope_token("follower", "write") + data.update(extra_data) + return await self.client.post(url, data=data, signature=token) + + async def unfollow(self, target_feed_slug, target_user_id, keep_history=False): + target_feed_slug = validate_feed_slug(target_feed_slug) + target_user_id = validate_user_id(target_user_id) + target_feed_id = f"{target_feed_slug}:{target_user_id}" + token = self.create_scope_token("follower", "delete") + url = f"{self.feed_url}follows/{target_feed_id}/" + params = {} + if keep_history: + params["keep_history"] = True + return await self.client.delete(url, signature=token, params=params) + + async def followers(self, offset=0, limit=25, feeds=None): + feeds = ",".join(feeds) if feeds is not None else "" + params = {"limit": limit, "offset": offset, "filter": feeds} + url = f"{self.feed_url}followers/" + token = self.create_scope_token("follower", "read") + return await self.client.get(url, params=params, signature=token) + + async def following(self, offset=0, limit=25, feeds=None): + feeds = ",".join(feeds) if feeds is not None else "" + params = {"offset": offset, "limit": limit, "filter": feeds} + url = f"{self.feed_url}follows/" + token = self.create_scope_token("follower", "read") + return await self.client.get(url, params=params, signature=token) + + async def update_activity_to_targets( + self, + foreign_id, + time, + new_targets=None, + added_targets=None, + removed_targets=None, + ): + data = {"foreign_id": foreign_id, "time": time} + if new_targets is not None: + data["new_targets"] = new_targets + if added_targets is not None: + data["added_targets"] = added_targets + if removed_targets is not None: + data["removed_targets"] = removed_targets + + url = f"{self.feed_targets_url}activity_to_targets/" + token = self.create_scope_token("feed_targets", "write") + return await self.client.post(url, data=data, signature=token) diff --git a/stream/personalization.py b/stream/personalization.py deleted file mode 100644 index d24b503..0000000 --- a/stream/personalization.py +++ /dev/null @@ -1,70 +0,0 @@ -class Personalization(object): - def __init__(self, client, token): - """ - Methods to interact with personalized feeds. - :param client: the api client - :param token: the token - """ - - self.client = client - self.token = token - - def get(self, resource, **params): - """ - Get personalized activities for this feed - :param resource: personalized resource endpoint i.e "follow_recommendations" - :param params: params to pass to url i.e user_id = "user:123" - :return: personalized feed - - **Example**:: - personalization.get('follow_recommendations', user_id=123, limit=10, offset=10) - """ - - response = self.client.get( - resource, - service_name="personalization", - params=params, - signature=self.token, - ) - return response - - def post(self, resource, **params): - """ - "Generic function to post data to personalization endpoint - :param resource: personalized resource endpoint i.e "follow_recommendations" - :param params: params to pass to url (data is a reserved keyword to post to body) - - - **Example**:: - #Accept or reject recommendations. - personalization.post('follow_recommendations', user_id=123, accepted=[123,345], - rejected=[456]) - """ - - data = params["data"] or None - - response = self.client.post( - resource, - service_name="personalization", - params=params, - signature=self.token, - data=data, - ) - return response - - def delete(self, resource, **params): - """ - shortcut to delete metadata or activities - :param resource: personalized url endpoint typical "meta" - :param params: params to pass to url i.e user_id = "user:123" - :return: data that was deleted if if successful or not. - """ - - response = self.client.delete( - resource, - service_name="personalization", - params=params, - signature=self.token, - ) - - return response diff --git a/stream/personalization/__init__.py b/stream/personalization/__init__.py new file mode 100644 index 0000000..99dfa3b --- /dev/null +++ b/stream/personalization/__init__.py @@ -0,0 +1 @@ +from .personalizations import AsyncPersonalization, Personalization diff --git a/stream/personalization/base.py b/stream/personalization/base.py new file mode 100644 index 0000000..04f823f --- /dev/null +++ b/stream/personalization/base.py @@ -0,0 +1,29 @@ +from abc import ABC, abstractmethod + + +class AbstractPersonalization(ABC): + @abstractmethod + def get(self, resource, **params): + pass + + @abstractmethod + def post(self, resource, **params): + pass + + @abstractmethod + def delete(self, resource, **params): + pass + + +class BasePersonalization(AbstractPersonalization, ABC): + SERVICE_NAME = "personalization" + + def __init__(self, client, token): + """ + Methods to interact with personalized feeds. + :param client: the api client + :param token: the token + """ + + self.client = client + self.token = token diff --git a/stream/personalization/personalizations.py b/stream/personalization/personalizations.py new file mode 100644 index 0000000..61d9909 --- /dev/null +++ b/stream/personalization/personalizations.py @@ -0,0 +1,117 @@ +from stream.personalization.base import BasePersonalization + + +class Personalization(BasePersonalization): + def get(self, resource, **params): + """ + Get personalized activities for this feed + :param resource: personalized resource endpoint i.e "follow_recommendations" + :param params: params to pass to url i.e user_id = "user:123" + :return: personalized feed + + **Example**:: + personalization.get('follow_recommendations', user_id=123, limit=10, offset=10) + """ + + return self.client.get( + resource, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) + + def post(self, resource, **params): + """ + Generic function to post data to personalization endpoint + :param resource: personalized resource endpoint i.e "follow_recommendations" + :param params: params to pass to url (data is a reserved keyword to post to body) + + + **Example**:: + #Accept or reject recommendations. + personalization.post('follow_recommendations', user_id=123, accepted=[123,345], + rejected=[456]) + """ + + data = params["data"] or None + + return self.client.post( + resource, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + data=data, + ) + + def delete(self, resource, **params): + """ + shortcut to delete metadata or activities + :param resource: personalized url endpoint typical "meta" + :param params: params to pass to url i.e user_id = "user:123" + :return: data that was deleted if successful or not. + """ + + return self.client.delete( + resource, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) + + +class AsyncPersonalization(BasePersonalization): + async def get(self, resource, **params): + """ + Get personalized activities for this feed + :param resource: personalized resource endpoint i.e "follow_recommendations" + :param params: params to pass to url i.e user_id = "user:123" + :return: personalized feed + + **Example**:: + personalization.get('follow_recommendations', user_id=123, limit=10, offset=10) + """ + + return await self.client.get( + resource, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) + + async def post(self, resource, **params): + """ + Generic function to post data to personalization endpoint + :param resource: personalized resource endpoint i.e "follow_recommendations" + :param params: params to pass to url (data is a reserved keyword to post to body) + + + **Example**:: + #Accept or reject recommendations. + personalization.post('follow_recommendations', user_id=123, accepted=[123,345], + rejected=[456]) + """ + + data = params["data"] or None + + return await self.client.post( + resource, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + data=data, + ) + + async def delete(self, resource, **params): + """ + shortcut to delete metadata or activities + :param resource: personalized url endpoint typical "meta" + :param params: params to pass to url i.e user_id = "user:123" + :return: data that was deleted if successful or not. + """ + + return await self.client.delete( + resource, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) diff --git a/stream/reactions.py b/stream/reactions.py deleted file mode 100644 index e7e1152..0000000 --- a/stream/reactions.py +++ /dev/null @@ -1,71 +0,0 @@ -class Reactions(object): - def __init__(self, client, token): - self.client = client - self.token = token - - def add(self, kind, activity_id, user_id, data=None, target_feeds=None): - payload = dict( - kind=kind, - activity_id=activity_id, - data=data, - target_feeds=target_feeds, - user_id=user_id, - ) - return self.client.post( - "reaction/", service_name="api", signature=self.token, data=payload - ) - - def get(self, reaction_id): - return self.client.get( - "reaction/%s" % reaction_id, service_name="api", signature=self.token - ) - - def update(self, reaction_id, data=None, target_feeds=None): - payload = dict(data=data, target_feeds=target_feeds) - return self.client.put( - "reaction/%s" % reaction_id, - service_name="api", - signature=self.token, - data=payload, - ) - - def delete(self, reaction_id): - return self.client.delete( - "reaction/%s" % reaction_id, service_name="api", signature=self.token - ) - - def add_child(self, kind, parent_id, user_id, data=None, target_feeds=None): - payload = dict( - kind=kind, - parent=parent_id, - data=data, - target_feeds=target_feeds, - user_id=user_id, - ) - return self.client.post( - "reaction/", service_name="api", signature=self.token, data=payload - ) - - def filter(self, **params): - lookup_field = "" - lookup_value = "" - - kind = params.pop("kind", None) - - if "reaction_id" in params: - lookup_field = "reaction_id" - lookup_value = params.pop("reaction_id") - elif "activity_id" in params: - lookup_field = "activity_id" - lookup_value = params.pop("activity_id") - elif "user_id" in params: - lookup_field = "user_id" - lookup_value = params.pop("user_id") - - endpoint = "reaction/%s/%s/" % (lookup_field, lookup_value) - if kind is not None: - endpoint = "reaction/%s/%s/%s/" % (lookup_field, lookup_value, kind) - - return self.client.get( - endpoint, service_name="api", signature=self.token, params=params - ) diff --git a/stream/reactions/__init__.py b/stream/reactions/__init__.py new file mode 100644 index 0000000..e550051 --- /dev/null +++ b/stream/reactions/__init__.py @@ -0,0 +1 @@ +from .reaction import AsyncReactions, Reactions diff --git a/stream/reactions/base.py b/stream/reactions/base.py new file mode 100644 index 0000000..7038a90 --- /dev/null +++ b/stream/reactions/base.py @@ -0,0 +1,80 @@ +from abc import ABC, abstractmethod + + +class AbstractReactions(ABC): + @abstractmethod + def add( + self, + kind, + activity_id, + user_id, + data=None, + target_feeds=None, + target_feeds_extra_data=None, + moderation_template=None, + ): + pass + + @abstractmethod + def get(self, reaction_id): + pass + + @abstractmethod + def update(self, reaction_id, data=None, target_feeds=None): + pass + + @abstractmethod + def delete(self, reaction_id, soft=False): + pass + + @abstractmethod + def restore(self, reaction_id): + pass + + @abstractmethod + def add_child( + self, + kind, + parent_id, + user_id, + data=None, + target_feeds=None, + target_feeds_extra_data=None, + moderation_template=None, + ): + pass + + @abstractmethod + def filter(self, **params): + pass + + +class BaseReactions(AbstractReactions, ABC): + API_ENDPOINT = "reaction/" + SERVICE_NAME = "api" + + def __init__(self, client, token): + self.client = client + self.token = token + + def _prepare_endpoint_for_filter(self, **params): + lookup_field = "" + lookup_value = "" + + kind = params.pop("kind", None) + + if params.get("reaction_id"): + lookup_field = "reaction_id" + lookup_value = params.pop("reaction_id") + elif params.get("activity_id"): + lookup_field = "activity_id" + lookup_value = params.pop("activity_id") + elif params.get("user_id"): + lookup_field = "user_id" + lookup_value = params.pop("user_id") + + endpoint = f"{self.API_ENDPOINT}{lookup_field}/{lookup_value}/" + if kind is not None: + endpoint += f"{kind}/" + + return endpoint diff --git a/stream/reactions/reaction.py b/stream/reactions/reaction.py new file mode 100644 index 0000000..ac2ec2e --- /dev/null +++ b/stream/reactions/reaction.py @@ -0,0 +1,193 @@ +from stream.reactions.base import BaseReactions + + +class Reactions(BaseReactions): + def add( + self, + kind, + activity_id, + user_id, + data=None, + target_feeds=None, + target_feeds_extra_data=None, + moderation_template=None, + ): + payload = dict( + kind=kind, + activity_id=activity_id, + data=data, + target_feeds=target_feeds, + target_feeds_extra_data=target_feeds_extra_data, + user_id=user_id, + ) + if moderation_template is not None: + payload["moderation_template"] = moderation_template + return self.client.post( + self.API_ENDPOINT, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + def get(self, reaction_id): + url = f"{self.API_ENDPOINT}{reaction_id}" + return self.client.get( + url, service_name=self.SERVICE_NAME, signature=self.token + ) + + def update(self, reaction_id, data=None, target_feeds=None): + payload = dict(data=data, target_feeds=target_feeds) + url = f"{self.API_ENDPOINT}{reaction_id}" + return self.client.put( + url, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + def delete(self, reaction_id, soft=False): + url = f"{self.API_ENDPOINT}{reaction_id}" + return self.client.delete( + url, + service_name=self.SERVICE_NAME, + signature=self.token, + params={"soft": soft}, + ) + + def restore(self, reaction_id): + url = f"{self.API_ENDPOINT}{reaction_id}/restore" + return self.client.put( + url, service_name=self.SERVICE_NAME, signature=self.token + ) + + def add_child( + self, + kind, + parent_id, + user_id, + data=None, + target_feeds=None, + target_feeds_extra_data=None, + moderation_template=None, + ): + payload = dict( + kind=kind, + parent=parent_id, + data=data, + target_feeds=target_feeds, + target_feeds_extra_data=target_feeds_extra_data, + user_id=user_id, + ) + if moderation_template is not None: + payload["moderation_template"] = moderation_template + return self.client.post( + self.API_ENDPOINT, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + def filter(self, **params): + endpoint = self._prepare_endpoint_for_filter(**params) + return self.client.get( + endpoint, + service_name=self.SERVICE_NAME, + signature=self.token, + params=params, + ) + + +class AsyncReactions(BaseReactions): + async def add( + self, + kind, + activity_id, + user_id, + data=None, + target_feeds=None, + target_feeds_extra_data=None, + moderation_template=None, + ): + payload = dict( + kind=kind, + activity_id=activity_id, + data=data, + target_feeds=target_feeds, + target_feeds_extra_data=target_feeds_extra_data, + user_id=user_id, + ) + if moderation_template is not None: + payload["moderation_template"] = moderation_template + return await self.client.post( + self.API_ENDPOINT, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + async def get(self, reaction_id): + url = f"{self.API_ENDPOINT}{reaction_id}" + return await self.client.get( + url, service_name=self.SERVICE_NAME, signature=self.token + ) + + async def update(self, reaction_id, data=None, target_feeds=None): + payload = dict(data=data, target_feeds=target_feeds) + url = f"{self.API_ENDPOINT}{reaction_id}" + return await self.client.put( + url, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + async def delete(self, reaction_id, soft=False): + url = f"{self.API_ENDPOINT}{reaction_id}" + return await self.client.delete( + url, + service_name=self.SERVICE_NAME, + signature=self.token, + params={"soft": soft}, + ) + + async def restore(self, reaction_id): + url = f"{self.API_ENDPOINT}{reaction_id}/restore" + return await self.client.put( + url, service_name=self.SERVICE_NAME, signature=self.token + ) + + async def add_child( + self, + kind, + parent_id, + user_id, + data=None, + target_feeds=None, + target_feeds_extra_data=None, + moderation_template=None, + ): + payload = dict( + kind=kind, + parent=parent_id, + data=data, + target_feeds=target_feeds, + target_feeds_extra_data=target_feeds_extra_data, + user_id=user_id, + ) + if moderation_template is not None: + payload["moderation_template"] = moderation_template + return await self.client.post( + self.API_ENDPOINT, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + async def filter(self, **params): + endpoint = self._prepare_endpoint_for_filter(**params) + return await self.client.get( + endpoint, + service_name=self.SERVICE_NAME, + signature=self.token, + params=params, + ) diff --git a/stream/serializer.py b/stream/serializer.py index fdbc68b..3dd96cf 100644 --- a/stream/serializer.py +++ b/stream/serializer.py @@ -1,6 +1,7 @@ import datetime import json -import six + +import pytz """ Adds the ability to send date and datetime objects to the API @@ -13,9 +14,12 @@ def _datetime_encoder(obj): if isinstance(obj, datetime.datetime): - return datetime.datetime.strftime(obj, DATETIME_FORMAT) + if obj.utcoffset() is None: # 3.5 + obj = pytz.utc.localize(obj) + return datetime.datetime.strftime(obj.astimezone(pytz.utc), DATETIME_FORMAT) if isinstance(obj, datetime.date): return datetime.datetime.strftime(obj, DATE_FORMAT) + return None def _datetime_decoder(dict_): @@ -27,17 +31,21 @@ def _datetime_decoder(dict_): dict_[key] = "" continue - if value is not None and isinstance(value, six.string_types): + if value is not None and isinstance(value, str): try: # The api always returns times like this # 2014-07-25T09:12:24.735 - datetime_obj = datetime.datetime.strptime(value, DATETIME_FORMAT) + datetime_obj = pytz.utc.localize( + datetime.datetime.strptime(value, DATETIME_FORMAT) + ) dict_[key] = datetime_obj except (ValueError, TypeError): try: # The api always returns times like this # 2014-07-25T09:12:24.735 - datetime_obj = datetime.datetime.strptime(value, DATE_FORMAT) + datetime_obj = pytz.utc.localize( + datetime.datetime.strptime(value, DATE_FORMAT) + ) dict_[key] = datetime_obj.date() except (ValueError, TypeError): continue diff --git a/stream/tests/conftest.py b/stream/tests/conftest.py new file mode 100644 index 0000000..c700997 --- /dev/null +++ b/stream/tests/conftest.py @@ -0,0 +1,74 @@ +import asyncio +import os +import sys +import pytest_asyncio +from uuid import uuid4 + + +from stream import connect + + +def wrapper(meth): + async def _parse_response(*args, **kwargs): + response = await meth(*args, **kwargs) + assert "duration" in response + return response + + return _parse_response + + +@pytest_asyncio.fixture(scope="module") +def event_loop(): + """Create an instance of the default event loop for each test case.""" + loop = asyncio.get_event_loop_policy().new_event_loop() + yield loop + loop.close() + + +@pytest_asyncio.fixture +async def async_client(): + key = os.getenv("STREAM_KEY") + secret = os.getenv("STREAM_SECRET") + if not key or not secret: + print( + "To run the tests the STREAM_KEY and STREAM_SECRET variables " + "need to be available. \n" + "Please create a pull request if you are an external " + "contributor, because these variables are automatically added " + "by Travis." + ) + sys.exit(1) + + client = connect(key, secret, location="qa", timeout=30, use_async=True) + wrapper(client._parse_response) + yield client + + +@pytest_asyncio.fixture +def user1(async_client): + return async_client.feed("user", f"1-{uuid4()}") + + +@pytest_asyncio.fixture +def user2(async_client): + return async_client.feed("user", f"2-{uuid4()}") + + +@pytest_asyncio.fixture +def aggregated2(async_client): + return async_client.feed("aggregated", f"2-{uuid4()}") + + +@pytest_asyncio.fixture +def aggregated3(async_client): + return async_client.feed("aggregated", f"3-{uuid4()}") + + +@pytest_asyncio.fixture +def topic(async_client): + return async_client.feed("topic", f"1-{uuid4()}") + + +@pytest_asyncio.fixture +def flat3(async_client): + return async_client.feed("flat", f"3-{uuid4()}") diff --git a/stream/tests/test_async_client.py b/stream/tests/test_async_client.py new file mode 100644 index 0000000..9c4bc2f --- /dev/null +++ b/stream/tests/test_async_client.py @@ -0,0 +1,1468 @@ +import asyncio +import random +from datetime import datetime, timedelta +from uuid import uuid1, uuid4 + +import pytest +import pytz +from dateutil.tz import tzlocal + +import stream +from stream.exceptions import ApiKeyException, InputException, DoesNotExistException + + +def assert_first_activity_id_equal(activities, correct_activity_id): + activity_id = None + if activities: + activity_id = activities[0]["id"] + assert activity_id == correct_activity_id + + +def assert_first_activity_id_not_equal(activities, correct_activity_id): + activity_id = None + if activities: + activity_id = activities[0]["id"] + assert activity_id != correct_activity_id + + +def _get_first_aggregated_activity(activities): + try: + return activities[0]["activities"][0] + except IndexError: + pass + + +def _get_first_activity(activities): + try: + return activities[0] + except IndexError: + pass + + +def assert_datetime_almost_equal(a, b): + difference = abs(a - b) + if difference > timedelta(milliseconds=1): + assert a == b + + +def assert_clearly_not_equal(a, b): + difference = abs(a - b) + if difference < timedelta(milliseconds=1): + raise ValueError("the dates are too close") + + +async def _test_sleep(production_wait): + """ + when testing against a live API, sometimes we need a small sleep to + ensure data stability, however when testing locally the wait does + not need to be as long + :param production_wait: float, number of seconds to sleep when hitting real API + :return: None + """ + sleep_time = production_wait + await asyncio.sleep(sleep_time) + + +@pytest.mark.asyncio +async def test_update_activities_create(async_client): + activities = [ + { + "actor": "user:1", + "verb": "do", + "object": "object:1", + "foreign_id": "object:1", + "time": datetime.utcnow().isoformat(), + } + ] + + response = await async_client.update_activities(activities) + assert response + + +@pytest.mark.asyncio +async def test_add_activity(async_client): + feed = async_client.feed("user", f"py1-{uuid4()}") + activity_data = {"actor": 1, "verb": "tweet", "object": 1} + response = await feed.add_activity(activity_data) + activity_id = response["id"] + response = await feed.get(limit=1) + activities = response["results"] + assert activities[0]["id"] == activity_id + + +@pytest.mark.asyncio +async def test_add_activity_to_inplace_change(async_client): + feed = async_client.feed("user", f"py1-{uuid4()}") + team_feed = async_client.feed("user", "teamy") + activity_data = {"actor": 1, "verb": "tweet", "object": 1} + activity_data["to"] = [team_feed.id] + await feed.add_activity(activity_data) + assert activity_data["to"] == [team_feed.id] + + +@pytest.mark.asyncio +async def test_add_activities_to_inplace_change(async_client): + feed = async_client.feed("user", f"py1-{uuid4()}") + team_feed = async_client.feed("user", f"teamy-{uuid4()}") + activity_data = {"actor": 1, "verb": "tweet", "object": 1} + activity_data["to"] = [team_feed.id] + await feed.add_activities([activity_data]) + assert activity_data["to"] == [team_feed.id] + + +@pytest.mark.asyncio +async def test_add_activity_to(async_client): + # test for sending an activities to the team feed using to + feeds = ["user", "teamy", "team_follower"] + user_feed, team_feed, team_follower_feed = map( + lambda x: async_client.feed("user", f"{x}-{uuid4()}"), feeds + ) + await team_follower_feed.follow(team_feed.slug, team_feed.user_id) + activity_data = {"actor": 1, "verb": "tweet", "object": 1, "to": [team_feed.id]} + activity = await user_feed.add_activity(activity_data) + activity_id = activity["id"] + + # see if the new activity is also in the team feed + response = await team_feed.get(limit=1) + activities = response["results"] + assert activities[0]["id"] == activity_id + assert activities[0]["origin"] is None + # see if the fanout process also works + response = await team_follower_feed.get(limit=1) + activities = response["results"] + assert activities[0]["id"] == activity_id + assert activities[0]["origin"] == team_feed.id + # and validate removing also works + await user_feed.remove_activity(activity["id"]) + # check the user pyto feed + response = await team_feed.get(limit=1) + activities = response["results"] + assert_first_activity_id_not_equal(activities, activity_id) + # and the flat feed + response = await team_follower_feed.get(limit=1) + activities = response["results"] + assert_first_activity_id_not_equal(activities, activity_id) + + +@pytest.mark.asyncio +async def test_remove_activity(user1): + activity_data = {"actor": 1, "verb": "tweet", "object": 1} + activity = await user1.add_activity(activity_data) + activity_id = activity["id"] + response = await user1.get(limit=8) + activities = response["results"] + assert len(activities) == 1 + + await user1.remove_activity(activity_id) + # verify that no activities were returned + response = await user1.get(limit=8) + activities = response["results"] + assert len(activities) == 0 + + +@pytest.mark.asyncio +async def test_remove_activity_by_foreign_id(user1): + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "foreign_id": "tweet:10", + } + + await user1.add_activity(activity_data) + response = await user1.get(limit=8) + activities = response["results"] + assert len(activities) == 1 + assert activities[0]["id"] != "" + assert activities[0]["foreign_id"] == "tweet:10" + + await user1.remove_activity(foreign_id="tweet:10") + # verify that no activities were returned + response = await user1.get(limit=8) + activities = response["results"] + assert len(activities) == 0 + # verify this doesn't raise an error, but fails silently + await user1.remove_activity(foreign_id="tweet:unknownandmissing") + + +@pytest.mark.asyncio +async def test_add_activities(user1): + activity_data = [ + {"actor": 1, "verb": "tweet", "object": 1}, + {"actor": 2, "verb": "watch", "object": 2}, + ] + response = await user1.add_activities(activity_data) + activity_ids = [a["id"] for a in response["activities"]] + response = await user1.get(limit=2) + activities = response["results"] + get_activity_ids = [a["id"] for a in activities] + assert get_activity_ids == activity_ids[::-1] + + +@pytest.mark.asyncio +async def test_add_activities_to(async_client, user1): + pyto2 = async_client.feed("user", f"pyto2-{uuid4()}") + pyto3 = async_client.feed("user", f"pyto3-{uuid4()}") + + to = [pyto2.id, pyto3.id] + activity_data = [ + {"actor": 1, "verb": "tweet", "object": 1, "to": to}, + {"actor": 2, "verb": "watch", "object": 2, "to": to}, + ] + response = await user1.add_activities(activity_data) + activity_ids = [a["id"] for a in response["activities"]] + response = await user1.get(limit=2) + activities = response["results"] + get_activity_ids = [a["id"] for a in activities] + assert get_activity_ids == activity_ids[::-1] + # test first target + response = await pyto2.get(limit=2) + activities = response["results"] + get_activity_ids = [a["id"] for a in activities] + assert get_activity_ids == activity_ids[::-1] + # test second target + response = await pyto3.get(limit=2) + activities = response["results"] + get_activity_ids = [a["id"] for a in activities] + assert get_activity_ids == activity_ids[::-1] + + +@pytest.mark.asyncio +async def test_follow_and_source(async_client): + feed = async_client.feed("user", f"test_follow-{uuid4()}") + agg_feed = async_client.feed("aggregated", "test_follow") + actor_id = random.randint(10, 100000) + activity_data = {"actor": actor_id, "verb": "tweet", "object": 1} + response = await feed.add_activity(activity_data) + activity_id = response["id"] + await agg_feed.follow(feed.slug, feed.user_id) + + response = await agg_feed.get(limit=3) + activities = response["results"] + activity = _get_first_aggregated_activity(activities) + activity_id_found = activity["id"] if activity is not None else None + assert activity["origin"] == feed.id + assert activity_id_found == activity_id + + +@pytest.mark.asyncio +async def test_empty_followings(async_client): + asocial = async_client.feed("user", f"asocialpython-{uuid4()}") + followings = await asocial.following() + assert followings["results"] == [] + + +@pytest.mark.asyncio +async def test_get_followings(async_client): + social = async_client.feed("user", f"psocial-{uuid4()}") + await social.follow("user", "apy") + await social.follow("user", "bpy") + await social.follow("user", "cpy") + followings = await social.following(offset=0, limit=2) + assert len(followings["results"]) == 2 + assert followings["results"][0]["feed_id"] == social.id + assert followings["results"][0]["target_id"] == "user:cpy" + followings = await social.following(offset=1, limit=2) + assert len(followings["results"]) == 2 + assert followings["results"][0]["feed_id"] == social.id + assert followings["results"][0]["target_id"] == "user:bpy" + + +@pytest.mark.asyncio +async def test_empty_followers(async_client): + asocial = async_client.feed("user", f"asocialpython-{uuid4()}") + followers = await asocial.followers() + assert followers["results"] == [] + + +@pytest.mark.asyncio +async def test_get_followers(async_client): + social = async_client.feed("user", f"psocial-{uuid4()}") + spammy1 = async_client.feed("user", f"spammy1-{uuid4()}") + spammy2 = async_client.feed("user", f"spammy2-{uuid4()}") + spammy3 = async_client.feed("user", f"spammy3-{uuid4()}") + for feed in [spammy1, spammy2, spammy3]: + await feed.follow("user", social.user_id) + followers = await social.followers(offset=0, limit=2) + assert len(followers["results"]) == 2 + assert followers["results"][0]["feed_id"] == spammy3.id + assert followers["results"][0]["target_id"] == social.id + followers = await social.followers(offset=1, limit=2) + assert len(followers["results"]) == 2 + assert followers["results"][0]["feed_id"] == spammy2.id + assert followers["results"][0]["target_id"] == social.id + + +@pytest.mark.asyncio +async def test_empty_do_i_follow(async_client): + social = async_client.feed("user", f"psocial-{uuid4()}") + await social.follow("user", "apy") + await social.follow("user", "bpy") + followings = await social.following(feeds=["user:missingpy"]) + assert followings["results"] == [] + + +@pytest.mark.asyncio +async def test_do_i_follow(async_client): + social = async_client.feed("user", f"psocial-{uuid4()}") + await social.follow("user", "apy") + await social.follow("user", "bpy") + followings = await social.following(feeds=["user:apy"]) + assert len(followings["results"]) == 1 + assert followings["results"][0]["feed_id"] == social.id + assert followings["results"][0]["target_id"] == "user:apy" + + +@pytest.mark.asyncio +async def test_update_activity_to_targets(user1): + now = datetime.utcnow().isoformat() + foreign_id = "user:1" + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "foreign_id": foreign_id, + "time": now, + "to": ["user:1", "user:2"], + } + await user1.add_activity(activity_data) + + ret = await user1.update_activity_to_targets( + foreign_id, now, new_targets=["user:3", "user:2"] + ) + assert len(ret["activity"]["to"]) == 2 + assert "user:2" in ret["activity"]["to"] + assert "user:3" in ret["activity"]["to"] + + ret = await user1.update_activity_to_targets( + foreign_id, + now, + added_targets=["user:4", "user:5"], + removed_targets=["user:3"], + ) + assert len(ret["activity"]["to"]) == 3 + assert "user:2" in ret["activity"]["to"] + assert "user:4" in ret["activity"]["to"] + assert "user:5" in ret["activity"]["to"] + + +@pytest.mark.asyncio +async def test_get(user1): + activity_data = {"actor": 1, "verb": "tweet", "object": 1} + response1 = await user1.add_activity(activity_data) + activity_id = response1["id"] + activity_data = {"actor": 2, "verb": "add", "object": 2} + response2 = await user1.add_activity(activity_data) + activity_id_two = response2["id"] + activity_data = {"actor": 3, "verb": "watch", "object": 2} + response3 = await user1.add_activity(activity_data) + activity_id_three = response3["id"] + response = await user1.get(limit=2) + activities = response["results"] + # verify the first two results + assert len(activities) == 2 + assert activities[0]["id"] == activity_id_three + assert activities[1]["id"] == activity_id_two + # try offset based + response = await user1.get(limit=2, offset=1) + activities = response["results"] + assert activities[0]["id"] == activity_id_two + # try id_lt based + response = await user1.get(limit=2, id_lt=activity_id_two) + activities = response["results"] + assert activities[0]["id"] == activity_id + + +@pytest.mark.asyncio +async def test_get_not_marked_seen(async_client): + notification_feed = async_client.feed("notification", f"test_mark_seen-{uuid4()}") + response = await notification_feed.get(limit=3) + activities = response["results"] + for activity in activities: + assert not activity["is_seen"] + + +@pytest.mark.asyncio +async def test_mark_seen_on_get(async_client): + notification_feed = async_client.feed("notification", f"test_mark_seen-{uuid4()}") + response = await notification_feed.get(limit=100) + activities = response["results"] + for activity in activities: + await notification_feed.remove_activity(activity["id"]) + + old_activities = [ + await notification_feed.add_activity( + {"actor": 1, "verb": "tweet", "object": 1} + ), + await notification_feed.add_activity( + {"actor": 2, "verb": "add", "object": 2} + ), + await notification_feed.add_activity( + {"actor": 3, "verb": "watch", "object": 3} + ), + ] + + await notification_feed.get( + mark_seen=[old_activities[0]["id"], old_activities[1]["id"]] + ) + + response = await notification_feed.get(limit=3) + activities = response["results"] + + # is the seen state correct + for activity in activities: + # using a loop in case we're retrieving activities in a different + # order than old_activities + if old_activities[0]["id"] == activity["id"]: + assert activity["is_seen"] + if old_activities[1]["id"] == activity["id"]: + assert activity["is_seen"] + if old_activities[2]["id"] == activity["id"]: + assert not activity["is_seen"] + + # see if the state properly resets after we add another activity + await notification_feed.add_activity( + {"actor": 3, "verb": "watch", "object": 3} + ) # ['id'] + response = await notification_feed.get(limit=3) + activities = response["results"] + assert not activities[0]["is_seen"] + assert len(activities[0]["activities"]) == 2 + + +@pytest.mark.asyncio +async def test_mark_read_by_id(async_client): + notification_feed = async_client.feed("notification", f"py2-{uuid4()}") + response = await notification_feed.get(limit=3) + activities = response["results"] + ids = [] + for activity in activities: + ids.append(activity["id"]) + assert not activity["is_read"] + ids = ids[:2] + await notification_feed.get(mark_read=ids) + response = await notification_feed.get(limit=3) + activities = response["results"] + for activity in activities: + if activity["id"] in ids: + assert activity["is_read"] + assert not activity["is_seen"] + + +@pytest.mark.asyncio +async def test_api_key_exception(): + client = stream.connect( + "5crf3bhfzesnMISSING", + "tfq2sdqpj9g446sbv653x3aqmgn33hsn8uzdc9jpskaw8mj6vsnhzswuwptuj9su", + use_async=True, + ) + user1 = client.feed("user", "1") + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "debug_example_undefined": "test", + } + with pytest.raises(ApiKeyException): + await user1.add_activity(activity_data) + + +@pytest.mark.asyncio +async def test_complex_field(user1): + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "participants": ["Tommaso", "Thierry"], + } + response = await user1.add_activity(activity_data) + activity_id = response["id"] + response = await user1.get(limit=1) + activities = response["results"] + assert activities[0]["id"] == activity_id + assert activities[0]["participants"] == ["Tommaso", "Thierry"] + + +@pytest.mark.asyncio +async def test_uniqueness(user1): + """ + In order for things to be considere unique they need: + a.) The same time and activity data + b.) The same time and foreign id + """ + + utcnow = datetime.now(tz=pytz.UTC) + activity_data = {"actor": 1, "verb": "tweet", "object": 1, "time": utcnow} + await user1.add_activity(activity_data) + await user1.add_activity(activity_data) + response = await user1.get(limit=2) + activities = response["results"] + assert_datetime_almost_equal(activities[0]["time"], utcnow) + if len(activities) > 1: + assert_clearly_not_equal(activities[1]["time"], utcnow) + + +@pytest.mark.asyncio +async def test_uniqueness_topic(flat3, topic, user1): + """ + In order for things to be considere unique they need: + a.) The same time and activity data, or + b.) The same time and foreign id + """ + # follow both the topic and the user + await flat3.follow("topic", topic.user_id) + await flat3.follow("user", user1.user_id) + # add the same activity twice + now = datetime.now(tzlocal()) + tweet = f"My Way {uuid4()}" + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "time": now, + "tweet": tweet, + } + await topic.add_activity(activity_data) + await user1.add_activity(activity_data) + # verify that flat3 contains the activity exactly once + response = await flat3.get(limit=3) + activity_tweets = [a.get("tweet") for a in response["results"]] + assert activity_tweets.count(tweet) == 1 + + +@pytest.mark.asyncio +async def test_uniqueness_foreign_id(user1): + now = datetime.now(tzlocal()) + utcnow = now.astimezone(pytz.utc) + + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "foreign_id": "tweet:11", + "time": utcnow, + } + await user1.add_activity(activity_data) + + activity_data = { + "actor": 2, + "verb": "tweet", + "object": 3, + "foreign_id": "tweet:11", + "time": utcnow, + } + await user1.add_activity(activity_data) + response = await user1.get(limit=10) + activities = response["results"] + # the second post should have overwritten the first one (because they + # had same id) + + assert len(activities) == 1 + assert activities[0]["object"] == "3" + assert activities[0]["foreign_id"] == "tweet:11" + assert_datetime_almost_equal(activities[0]["time"], utcnow) + + +@pytest.mark.asyncio +async def test_time_ordering(user2): + """ + datetime.datetime.now(tz=pytz.utc) is our recommended approach + so if we add an activity + add one using time + add another activity it should be in the right spot + """ + + # timedelta is used to "make sure" that ordering is known even though + # server time is not + custom_time = datetime.now(tz=pytz.utc) - timedelta(days=1) + + feed = user2 + for index, activity_time in enumerate([None, custom_time, None]): + await _test_sleep(1) # so times are a bit different + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "foreign_id": f"tweet:{index}", + "time": activity_time, + } + await feed.add_activity(activity_data) + + response = await feed.get(limit=3) + activities = response["results"] + # the second post should have overwritten the first one (because they + # had same id) + assert activities[0]["foreign_id"] == "tweet:2" + assert activities[1]["foreign_id"] == "tweet:0" + assert activities[2]["foreign_id"] == "tweet:1" + assert_datetime_almost_equal(activities[2]["time"], custom_time) + + +@pytest.mark.asyncio +async def test_missing_actor(user1): + activity_data = { + "verb": "tweet", + "object": 1, + "debug_example_undefined": "test", + } + try: + await user1.add_activity(activity_data) + raise ValueError("should have raised InputException") + except InputException: + pass + + +@pytest.mark.asyncio +async def test_follow_many(async_client): + sources = [async_client.feed("user", f"{i}-{uuid4()}").id for i in range(10)] + targets = [async_client.feed("flat", f"{i}-{uuid4()}").id for i in range(10)] + feeds = [{"source": s, "target": t} for s, t in zip(sources, targets)] + await async_client.follow_many(feeds) + + for target in targets: + response = await async_client.feed(*target.split(":")).followers() + follows = response["results"] + assert len(follows) == 1 + assert follows[0]["feed_id"] in sources + assert follows[0]["target_id"] == target + + for source in sources: + response = await async_client.feed(*source.split(":")).following() + follows = response["results"] + assert len(follows) == 1 + assert follows[0]["feed_id"] == source + assert follows[0]["target_id"] in targets + + +@pytest.mark.asyncio +async def test_follow_many_acl(async_client): + sources = [async_client.feed("user", f"{i}-{uuid4()}") for i in range(10)] + # ensure every source is empty first + for feed in sources: + response = await feed.get(limit=100) + activities = response["results"] + for activity in activities: + await feed.remove_activity(activity["id"]) + + targets = [async_client.feed("flat", f"{i}-{uuid4()}") for i in range(10)] + # ensure every source is empty first + for feed in targets: + response = await feed.get(limit=100) + activities = response["results"] + for activity in activities: + await feed.remove_activity(activity["id"]) + # add activity to each target feed + activity = { + "actor": "barry", + "object": "09", + "verb": "tweet", + "time": datetime.utcnow().isoformat(), + } + for feed in targets: + await feed.add_activity(activity) + response = await feed.get(limit=5) + assert len(response["results"]) == 1 + + sources_id = [feed.id for feed in sources] + targets_id = [target.id for target in targets] + feeds = [{"source": s, "target": t} for s, t in zip(sources_id, targets_id)] + + await async_client.follow_many(feeds, activity_copy_limit=0) + + for feed in sources: + response = await feed.get(limit=5) + activities = response["results"] + assert len(activities) == 0 + + +@pytest.mark.asyncio +async def test_unfollow_many(async_client): + unfollows = [ + {"source": "user:1", "target": "timeline:1"}, + {"source": "user:2", "target": "timeline:2", "keep_history": False}, + ] + + await async_client.unfollow_many(unfollows) + unfollows.append({"source": "user:1", "target": 42}) + + async def failing_unfollow(): + await async_client.unfollow_many(unfollows) + + with pytest.raises(InputException): + await failing_unfollow() + + +@pytest.mark.asyncio +async def test_add_to_many(async_client): + activity = {"actor": 1, "verb": "tweet", "object": 1, "custom": "data"} + feeds = [async_client.feed("flat", f"{i}-{uuid4()}").id for i in range(10, 20)] + await async_client.add_to_many(activity, feeds) + + for feed in feeds: + feed = async_client.feed(*feed.split(":")) + response = await feed.get() + assert response["results"][0]["custom"] == "data" + + +@pytest.mark.asyncio +async def test_get_activities_empty_ids(async_client): + response = await async_client.get_activities(ids=[str(uuid1())]) + assert len(response["results"]) == 0 + + +@pytest.mark.asyncio +async def test_get_activities_empty_foreign_ids(async_client): + response = await async_client.get_activities( + foreign_id_times=[("fid-x", datetime.utcnow())] + ) + assert len(response["results"]) == 0 + + +@pytest.mark.asyncio +async def test_get_activities_full(async_client): + dt = datetime.utcnow() + fid = "awesome-test" + + activity = { + "actor": "barry", + "object": "09", + "verb": "tweet", + "time": dt, + "foreign_id": fid, + } + + feed = async_client.feed("user", f"test_get_activity-{uuid4()}") + response = await feed.add_activity(activity) + + response = await async_client.get_activities(ids=[response["id"]]) + assert len(response["results"]) == 1 + foreign_id = response["results"][0]["foreign_id"] + assert activity["foreign_id"] == foreign_id + + response = await async_client.get_activities(foreign_id_times=[(fid, dt)]) + assert len(response["results"]) == 1 + foreign_id = response["results"][0]["foreign_id"] + assert activity["foreign_id"] == foreign_id + + +@pytest.mark.asyncio +async def test_get_activities_full_with_enrichment(async_client): + dt = datetime.utcnow() + fid = "awesome-test" + + actor = await async_client.users.add(str(uuid1()), data={"name": "barry"}) + activity = { + "actor": async_client.users.create_reference(actor["id"]), + "object": "09", + "verb": "tweet", + "time": dt, + "foreign_id": fid, + } + + feed = async_client.feed("user", f"test_get_activity-{uuid4()}") + activity = await feed.add_activity(activity) + + reaction1 = await async_client.reactions.add("like", activity["id"], "liker") + reaction2 = await async_client.reactions.add("reshare", activity["id"], "sharer") + + def validate(response): + assert len(response["results"]) == 1 + assert response["results"][0]["id"] == activity["id"] + assert response["results"][0]["foreign_id"] == activity["foreign_id"] + assert response["results"][0]["actor"]["data"]["name"] == "barry" + latest_reactions = response["results"][0]["latest_reactions"] + assert len(latest_reactions) == 2 + assert latest_reactions["like"][0]["id"] == reaction1["id"] + assert latest_reactions["reshare"][0]["id"] == reaction2["id"] + assert response["results"][0]["reaction_counts"] == {"like": 1, "reshare": 1} + + reactions = {"recent": True, "counts": True} + validate( + await async_client.get_activities(ids=[activity["id"]], reactions=reactions) + ) + validate( + await async_client.get_activities( + foreign_id_times=[(fid, dt)], reactions=reactions + ) + ) + + +@pytest.mark.asyncio +async def test_get_activities_full_with_enrichment_and_reaction_kinds(async_client): + dt = datetime.utcnow() + fid = "awesome-test" + + actor = await async_client.users.add(str(uuid1()), data={"name": "barry"}) + activity = { + "actor": async_client.users.create_reference(actor["id"]), + "object": "09", + "verb": "tweet", + "time": dt, + "foreign_id": fid, + } + + feed = async_client.feed("user", f"test_get_activity-{uuid4()}") + activity = await feed.add_activity(activity) + + await async_client.reactions.add("like", activity["id"], "liker") + await async_client.reactions.add("reshare", activity["id"], "sharer") + await async_client.reactions.add("comment", activity["id"], "commenter") + + reactions = {"recent": True, "counts": True, "kinds": "like,comment"} + response = await async_client.get_activities( + ids=[activity["id"]], reactions=reactions + ) + assert len(response["results"]) == 1 + assert response["results"][0]["id"] == activity["id"] + assert sorted(response["results"][0]["latest_reactions"].keys()) == [ + "comment", + "like", + ] + + assert response["results"][0]["reaction_counts"] == {"like": 1, "comment": 1} + + reactions = { + "recent": True, + "counts": True, + "kinds": ["", "reshare ", "comment\n"], + } + response = await async_client.get_activities( + foreign_id_times=[(fid, dt)], reactions=reactions + ) + assert len(response["results"]) == 1 + assert response["results"][0]["id"] == activity["id"] + assert sorted(response["results"][0]["latest_reactions"].keys()) == [ + "comment", + "reshare", + ] + assert response["results"][0]["reaction_counts"] == {"comment": 1, "reshare": 1} + + +@pytest.mark.asyncio +async def test_activity_partial_update(async_client): + now = datetime.utcnow() + feed = async_client.feed("user", uuid4()) + await feed.add_activity( + { + "actor": "barry", + "object": "09", + "verb": "tweet", + "time": now, + "foreign_id": "fid:123", + "product": {"name": "shoes", "price": 9.99, "color": "blue"}, + } + ) + response = await feed.get() + activity = response["results"][0] + + to_set = { + "product.name": "boots", + "product.price": 7.99, + "popularity": 1000, + "foo": {"bar": {"baz": "qux"}}, + } + to_unset = ["product.color"] + + # partial update by ID + await async_client.activity_partial_update( + id=activity["id"], set=to_set, unset=to_unset + ) + response = await feed.get() + updated = response["results"][0] + expected = activity + expected["product"] = {"name": "boots", "price": 7.99} + expected["popularity"] = 1000 + expected["foo"] = {"bar": {"baz": "qux"}} + assert updated == expected + + # partial update by foreign ID + time + to_set = {"foo.bar.baz": 42, "popularity": 9000} + to_unset = ["product.price"] + await async_client.activity_partial_update( + foreign_id=activity["foreign_id"], + time=activity["time"], + set=to_set, + unset=to_unset, + ) + response = await feed.get() + updated = response["results"][0] + expected["product"] = {"name": "boots"} + expected["foo"] = {"bar": {"baz": 42}} + expected["popularity"] = 9000 + assert updated == expected + + +@pytest.mark.asyncio +async def test_activities_partial_update(async_client): + feed = async_client.feed("user", uuid4()) + await feed.add_activities( + [ + { + "actor": "barry", + "object": "09", + "verb": "tweet", + "time": datetime.utcnow(), + "foreign_id": "fid:123", + "product": {"name": "shoes", "price": 9.99, "color": "blue"}, + }, + { + "actor": "jerry", + "object": "10", + "verb": "tweet", + "time": datetime.utcnow(), + "foreign_id": "fid:456", + "product": {"name": "shoes", "price": 9.99, "color": "blue"}, + }, + { + "actor": "tommy", + "object": "09", + "verb": "tweet", + "time": datetime.utcnow(), + "foreign_id": "fid:789", + "product": {"name": "shoes", "price": 9.99, "color": "blue"}, + }, + ] + ) + response = await feed.get() + activities = response["results"] + + batch = [ + { + "id": activities[0]["id"], + "set": {"product.color": "purple", "custom": {"some": "extra data"}}, + "unset": ["product.price"], + }, + { + "id": activities[2]["id"], + "set": {"product.price": 9001, "on_sale": True}, + }, + ] + + # partial update by ID + await async_client.activities_partial_update(batch) + response = await feed.get() + updated = response["results"] + expected = activities + expected[0]["product"] = {"name": "shoes", "color": "purple"} + expected[0]["custom"] = {"some": "extra data"} + expected[2]["product"] = {"name": "shoes", "price": 9001, "color": "blue"} + expected[2]["on_sale"] = True + assert updated == expected + + # partial update by foreign ID + time + batch = [ + { + "foreign_id": activities[1]["foreign_id"], + "time": activities[1]["time"], + "set": {"product.color": "beeeeeeige", "custom": {"modified_by": "me"}}, + "unset": ["product.name"], + }, + { + "foreign_id": activities[2]["foreign_id"], + "time": activities[2]["time"], + "unset": ["on_sale"], + }, + ] + await async_client.activities_partial_update(batch) + response = await feed.get() + updated = response["results"] + + expected[1]["product"] = {"price": 9.99, "color": "beeeeeeige"} + expected[1]["custom"] = {"modified_by": "me"} + del expected[2]["on_sale"] + assert updated == expected + + +@pytest.mark.asyncio +async def test_reaction_add(async_client): + await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + + +@pytest.mark.asyncio +async def test_reaction_add_to_target_feeds(async_client): + feed_id = f"user:michelle-{uuid4()}" + r = await async_client.reactions.add( + "superlike", + "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", + "mike", + data={"popularity": 50}, + target_feeds=[feed_id], + target_feeds_extra_data={"popularity": 100}, + ) + assert r["data"]["popularity"] == 50 + feed = async_client.feed(*feed_id.split(":")) + response = await feed.get(limit=1) + a = response["results"][0] + assert r["id"] in a["reaction"] + assert a["verb"] == "superlike" + assert a["popularity"] == 100 + + child = await async_client.reactions.add_child( + "superlike", + r["id"], + "rob", + data={"popularity": 60}, + target_feeds=[feed_id], + target_feeds_extra_data={"popularity": 200}, + ) + + assert child["data"]["popularity"] == 60 + response = await feed.get(limit=1) + a = response["results"][0] + assert child["id"] in a["reaction"] + assert a["verb"] == "superlike" + assert a["popularity"] == 200 + + +@pytest.mark.asyncio +async def test_reaction_get(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + reaction = await async_client.reactions.get(response["id"]) + assert reaction["parent"] == "" + assert reaction["data"] == {} + assert reaction["latest_children"] == {} + assert reaction["children_counts"] == {} + assert reaction["activity_id"] == "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4" + assert reaction["kind"] == "like" + assert "created_at" in reaction + assert "updated_at" in reaction + assert "id" in reaction + + +@pytest.mark.asyncio +async def test_reaction_update(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + await async_client.reactions.update(response["id"], {"changed": True}) + + +@pytest.mark.asyncio +async def test_reaction_delete(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + await async_client.reactions.delete(response["id"]) + + +@pytest.mark.asyncio +async def test_reaction_hard_delete(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + await async_client.reactions.delete(response["id"], soft=False) + + +@pytest.mark.asyncio +async def test_reaction_soft_delete(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + await async_client.reactions.delete(response["id"], soft=True) + + +@pytest.mark.asyncio +async def test_reaction_soft_delete_and_restore(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + await async_client.reactions.delete(response["id"], soft=True) + r1 = await async_client.reactions.get(response["id"]) + assert r1.get("deleted_at", None) is not None + await async_client.reactions.restore(response["id"]) + r1 = await async_client.reactions.get(response["id"]) + assert "deleted_at" not in r1 + + +@pytest.mark.asyncio +async def test_reaction_invalid_restore(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + with pytest.raises(DoesNotExistException): + await async_client.reactions.restore(response["id"]) + + +@pytest.mark.asyncio +async def test_reaction_add_child(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + await async_client.reactions.add_child("like", response["id"], "rob") + + +@pytest.mark.asyncio +async def test_reaction_filter_random(async_client): + await async_client.reactions.filter( + kind="like", + reaction_id="87a9eec0-fd5f-11e8-8080-80013fed2f5b", + id_lte="87a9eec0-fd5f-11e8-8080-80013fed2f5b", + ) + await async_client.reactions.filter( + activity_id="87a9eec0-fd5f-11e8-8080-80013fed2f5b", + id_lte="87a9eec0-fd5f-11e8-8080-80013fed2f5b", + ) + await async_client.reactions.filter( + user_id="mike", id_lte="87a9eec0-fd5f-11e8-8080-80013fed2f5b" + ) + + +def _first_result_should_be(response, element): + el = element.copy() + el.pop("duration") + assert len(response["results"]) == 1 + assert response["results"][0] == el + + +@pytest.mark.asyncio +async def test_reaction_filter(async_client): + activity_id = str(uuid1()) + user = str(uuid1()) + + response = await async_client.reactions.add("like", activity_id, user) + child = await async_client.reactions.add_child("like", response["id"], user) + reaction = await async_client.reactions.get(response["id"]) + + response = await async_client.reactions.add("comment", activity_id, user) + reaction_comment = await async_client.reactions.get(response["id"]) + + r = await async_client.reactions.filter(reaction_id=reaction["id"]) + _first_result_should_be(r, child) + + r = await async_client.reactions.filter( + kind="like", activity_id=activity_id, id_lte=reaction["id"] + ) + _first_result_should_be(r, reaction) + + r = await async_client.reactions.filter( + kind="like", user_id=user, id_lte=reaction["id"] + ) + _first_result_should_be(r, reaction) + + r = await async_client.reactions.filter(kind="comment", activity_id=activity_id) + _first_result_should_be(r, reaction_comment) + + +@pytest.mark.asyncio +async def test_reaction_add_with_moderation_template(async_client): + """Test adding a reaction with moderation template""" + try: + response = await async_client.reactions.add( + "like", + "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", + "mike", + moderation_template="test_moderation_template", + ) + # If moderation is enabled, verify the reaction was created + assert "id" in response + reaction = await async_client.reactions.get(response["id"]) + assert reaction["kind"] == "like" + assert reaction["user_id"] == "mike" + except Exception as e: + # If moderation is not enabled, we expect a specific error + # The important thing is that the moderation_template parameter + # was accepted and passed to the API without causing a client-side error + error_message = str(e) + assert ( + "moderation not enabled" in error_message + ), f"Expected moderation error, but got: {error_message}" + + +@pytest.mark.asyncio +async def test_reaction_add_child_with_moderation_template(async_client): + """Test adding a child reaction with moderation template""" + # First create a parent reaction + parent_response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + + try: + # Add child with moderation template + child_response = await async_client.reactions.add_child( + "reply", + parent_response["id"], + "rob", + data={"text": "Great post!"}, + moderation_template="child_moderation_template", + ) + # If moderation is enabled, verify the child reaction was created + assert "id" in child_response + child_reaction = await async_client.reactions.get(child_response["id"]) + assert child_reaction["kind"] == "reply" + assert child_reaction["user_id"] == "rob" + assert child_reaction["parent"] == parent_response["id"] + except Exception as e: + # If moderation is not enabled, we expect a specific error + # The important thing is that the moderation_template parameter + # was accepted and passed to the API without causing a client-side error + error_message = str(e) + assert ( + "moderation not enabled" in error_message + ), f"Expected moderation error, but got: {error_message}" + + +@pytest.mark.asyncio +async def test_reaction_add_without_moderation_template_backwards_compatibility( + async_client, +): + """Test that existing functionality still works without moderation template""" + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + assert "id" in response + reaction = await async_client.reactions.get(response["id"]) + assert reaction["kind"] == "like" + + +@pytest.mark.asyncio +async def test_reaction_add_child_without_moderation_template_backwards_compatibility( + async_client, +): + """Test that existing child functionality still works without moderation template""" + parent_response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + child_response = await async_client.reactions.add_child( + "reply", parent_response["id"], "rob" + ) + assert "id" in child_response + child_reaction = await async_client.reactions.get(child_response["id"]) + assert child_reaction["parent"] == parent_response["id"] + + +@pytest.mark.asyncio +async def test_user_add(async_client): + await async_client.users.add(str(uuid1())) + + +@pytest.mark.asyncio +async def test_user_add_get_or_create(async_client): + user_id = str(uuid1()) + r1 = await async_client.users.add(user_id) + r2 = await async_client.users.add(user_id, get_or_create=True) + assert r1["id"] == r2["id"] + assert r1["created_at"] == r2["created_at"] + assert r1["updated_at"] == r2["updated_at"] + + +@pytest.mark.asyncio +async def test_user_get(async_client): + response = await async_client.users.add(str(uuid1())) + user = await async_client.users.get(response["id"]) + assert user["data"] == {} + assert "created_at" in user + assert "updated_at" in user + assert "id" in user + + +@pytest.mark.asyncio +async def test_user_get_with_follow_counts(async_client): + response = await async_client.users.add(str(uuid1())) + user = await async_client.users.get(response["id"], with_follow_counts=True) + assert user["id"] == response["id"] + assert "followers_count" in user + assert "following_count" in user + + +@pytest.mark.asyncio +async def test_user_update(async_client): + response = await async_client.users.add(str(uuid1())) + await async_client.users.update(response["id"], {"changed": True}) + + +@pytest.mark.asyncio +async def test_user_delete(async_client): + response = await async_client.users.add(str(uuid1())) + await async_client.users.delete(response["id"]) + + +@pytest.mark.asyncio +async def test_collections_add(async_client): + await async_client.collections.add( + "items", {"data": 1}, id=str(uuid1()), user_id="tom" + ) + + +@pytest.mark.asyncio +async def test_collections_add_no_id(async_client): + await async_client.collections.add("items", {"data": 1}) + + +@pytest.mark.asyncio +async def test_collections_get(async_client): + response = await async_client.collections.add("items", {"data": 1}, id=str(uuid1())) + entry = await async_client.collections.get("items", response["id"]) + assert entry["data"] == {"data": 1} + assert "created_at" in entry + assert "updated_at" in entry + assert "id" in entry + + +@pytest.mark.asyncio +async def test_collections_update(async_client): + response = await async_client.collections.add("items", {"data": 1}, str(uuid1())) + await async_client.collections.update( + "items", response["id"], data={"changed": True} + ) + entry = await async_client.collections.get("items", response["id"]) + assert entry["data"] == {"changed": True} + + +@pytest.mark.asyncio +async def test_collections_delete(async_client): + response = await async_client.collections.add("items", {"data": 1}, str(uuid1())) + await async_client.collections.delete("items", response["id"]) + + +@pytest.mark.asyncio +async def test_feed_enrichment_collection(async_client): + entry = await async_client.collections.add("items", {"name": "time machine"}) + entry.pop("duration") + f = async_client.feed("user", f"mike-{uuid4()}") + activity_data = { + "actor": "mike", + "verb": "buy", + "object": async_client.collections.create_reference(entry=entry), + } + await f.add_activity(activity_data) + response = await f.get() + assert set(activity_data.items()).issubset(set(response["results"][0].items())) + enriched_response = await f.get(enrich=True) + assert enriched_response["results"][0]["object"] == entry + + +@pytest.mark.asyncio +async def test_feed_enrichment_user(async_client): + user = await async_client.users.add(str(uuid1()), {"name": "Mike"}) + user.pop("duration") + f = async_client.feed("user", f"mike-{uuid4()}") + activity_data = { + "actor": async_client.users.create_reference(user), + "verb": "buy", + "object": "time machine", + } + await f.add_activity(activity_data) + response = await f.get() + assert set(activity_data.items()).issubset(set(response["results"][0].items())) + enriched_response = await f.get(enrich=True) + assert enriched_response["results"][0]["actor"] == user + + +@pytest.mark.asyncio +async def test_feed_enrichment_own_reaction(async_client): + f = async_client.feed("user", f"mike-{uuid4()}") + activity_data = {"actor": "mike", "verb": "buy", "object": "object"} + response = await f.add_activity(activity_data) + reaction = await async_client.reactions.add("like", response["id"], "mike") + reaction.pop("duration") + enriched_response = await f.get(reactions={"own": True}, user_id="mike") + assert enriched_response["results"][0]["own_reactions"]["like"][0] == reaction + + +@pytest.mark.asyncio +async def test_feed_enrichment_recent_reaction(async_client): + f = async_client.feed("user", f"mike-{uuid4()}") + activity_data = {"actor": "mike", "verb": "buy", "object": "object"} + response = await f.add_activity(activity_data) + reaction = await async_client.reactions.add("like", response["id"], "mike") + reaction.pop("duration") + enriched_response = await f.get(reactions={"recent": True}) + assert enriched_response["results"][0]["latest_reactions"]["like"][0] == reaction + + +@pytest.mark.asyncio +async def test_feed_enrichment_reaction_counts(async_client): + f = async_client.feed("user", f"mike-{uuid4()}") + activity_data = {"actor": "mike", "verb": "buy", "object": "object"} + response = await f.add_activity(activity_data) + reaction = await async_client.reactions.add("like", response["id"], "mike") + reaction.pop("duration") + enriched_response = await f.get(reactions={"counts": True}) + assert enriched_response["results"][0]["reaction_counts"]["like"] == 1 + + +@pytest.mark.asyncio +async def test_track_engagements(async_client): + engagements = [ + { + "content": "1", + "label": "click", + "features": [ + {"group": "topic", "value": "js"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": "tommaso", + }, + { + "content": "2", + "label": "click", + "features": [ + {"group": "topic", "value": "go"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": {"id": "486892", "alias": "Julian"}, + }, + { + "content": "3", + "label": "click", + "features": [{"group": "topic", "value": "go"}], + "user_data": {"id": "tommaso", "alias": "tommaso"}, + }, + ] + await async_client.track_engagements(engagements) + + +@pytest.mark.asyncio +async def test_track_impressions(async_client): + impressions = [ + { + "content_list": ["1", "2", "3"], + "features": [ + {"group": "topic", "value": "js"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": {"id": "tommaso", "alias": "tommaso"}, + }, + { + "content_list": ["2", "3", "5"], + "features": [{"group": "topic", "value": "js"}], + "user_data": {"id": "486892", "alias": "Julian"}, + }, + ] + await async_client.track_impressions(impressions) + + +@pytest.mark.asyncio +async def test_og(async_client): + response = await async_client.og("https://google.com") + assert "title" in response + assert "description" in response + + +@pytest.mark.asyncio +async def test_follow_stats(async_client): + uniq = uuid4() + f = async_client.feed("user", uniq) + await f.follow("user", uuid4()) + await f.follow("user", uuid4()) + await f.follow("user", uuid4()) + + await async_client.feed("user", uuid4()).follow("user", uniq) + await async_client.feed("timeline", uuid4()).follow("user", uniq) + + feed_id = "user:" + str(uniq) + response = await async_client.follow_stats(feed_id) + result = response["results"] + assert result["following"]["count"] == 3 + assert result["followers"]["count"] == 2 + + response = await async_client.follow_stats( + feed_id, followers_slugs=["timeline"], following_slugs=["timeline"] + ) + result = response["results"] + assert result["following"]["count"] == 0 + assert result["followers"]["count"] == 1 diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index f873c64..f30171b 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1,32 +1,24 @@ -from dateutil.tz import tzlocal -import stream -import time -from stream.exceptions import ApiKeyException, InputException -import random -import jwt - -try: - from unittest.case import TestCase -except ImportError: - from unittest import TestCase +import copy +import datetime import json - import os +import random import sys -import datetime -import datetime as dt -import copy +import time +from uuid import uuid1, uuid4 + +import jwt +import pytz import requests -from stream import serializer +from dateutil.tz import tzlocal from requests.exceptions import MissingSchema -from itertools import count -from uuid import uuid1 -from uuid import uuid4 +from urllib.parse import parse_qs, urlparse +from unittest import TestCase -try: - from urlparse import urlparse, parse_qs -except ImportError: - from urllib.parse import urlparse, parse_qs +import stream +from stream import serializer +from stream.exceptions import ApiKeyException, InputException, DoesNotExistException +from stream.feed import Feed def connect_debug(): @@ -43,24 +35,17 @@ def connect_debug(): ) sys.exit(1) - return stream.connect(key, secret, location="qa", timeout=30) + return stream.connect(key, secret, location="qa", timeout=30, use_async=False) client = connect_debug() -counter = count() -test_identifier = uuid4() - - -def get_unique_postfix(): - return "---test_%s-feed_%s" % (test_identifier, next(counter)) - def getfeed(feed_slug, user_id): """ Adds the random postfix to the user id """ - return client.feed(feed_slug, user_id + get_unique_postfix()) + return client.feed(feed_slug, f"user_id-{uuid4()}") def api_request_parse_validator(test): @@ -125,6 +110,20 @@ def test_collections_url(self): feed_url, "https://qa-api.stream-io-api.com/api/v1.0/meta/" ) + def test_analytics_url(self): + feed_url = client.get_full_url( + relative_url="engagement/", service_name="analytics" + ) + + if self.local_tests: + self.assertEqual( + feed_url, "http://localhost:8000/analytics/v1.0/engagement/" + ) + else: + self.assertEqual( + feed_url, "https://qa.stream-io-api.com/analytics/v1.0/engagement/" + ) + def test_personalization_url(self): feed_url = client.get_full_url( relative_url="recommended", service_name="personalization" @@ -151,15 +150,15 @@ def test_api_url(self): ) def test_collections_url_default(self): - client = stream.connect("key", "secret") - feed_url = client.get_full_url(relative_url="meta/", service_name="api") + c = stream.connect("key", "secret", location="") + feed_url = c.get_full_url(relative_url="meta/", service_name="api") if not self.local_tests: self.assertEqual(feed_url, "https://api.stream-io-api.com/api/v1.0/meta/") def test_personalization_url_default(self): - client = stream.connect("key", "secret") - feed_url = client.get_full_url( + c = stream.connect("key", "secret", location="") + feed_url = c.get_full_url( relative_url="recommended", service_name="personalization" ) @@ -170,15 +169,15 @@ def test_personalization_url_default(self): ) def test_api_url_default(self): - client = stream.connect("key", "secret") - feed_url = client.get_full_url(service_name="api", relative_url="feed/") + c = stream.connect("key", "secret", location="") + feed_url = c.get_full_url(service_name="api", relative_url="feed/") if not self.local_tests: self.assertEqual(feed_url, "https://api.stream-io-api.com/api/v1.0/feed/") def test_collections_url_location(self): - client = stream.connect("key", "secret", location="tokyo") - feed_url = client.get_full_url(relative_url="meta/", service_name="api") + c = stream.connect("key", "secret", location="tokyo") + feed_url = c.get_full_url(relative_url="meta/", service_name="api") if not self.local_tests: self.assertEqual( @@ -186,8 +185,8 @@ def test_collections_url_location(self): ) def test_personalization_url_location(self): - client = stream.connect("key", "secret", location="tokyo") - feed_url = client.get_full_url( + c = stream.connect("key", "secret", location="tokyo") + feed_url = c.get_full_url( relative_url="recommended", service_name="personalization" ) @@ -198,8 +197,8 @@ def test_personalization_url_location(self): ) def test_api_url_location(self): - client = stream.connect("key", "secret", location="tokyo") - feed_url = client.get_full_url(service_name="api", relative_url="feed/") + c = stream.connect("key", "secret", location="tokyo") + feed_url = c.get_full_url(service_name="api", relative_url="feed/") if not self.local_tests: self.assertEqual( @@ -258,22 +257,22 @@ def test_update_activities_update(self): def test_heroku(self): url = "https://thierry:pass@getstream.io/?app_id=1" os.environ["STREAM_URL"] = url - client = stream.connect() - self.assertEqual(client.api_key, "thierry") - self.assertEqual(client.api_secret, "pass") - self.assertEqual(client.app_id, "1") + c = stream.connect() + self.assertEqual(c.api_key, "thierry") + self.assertEqual(c.api_secret, "pass") + self.assertEqual(c.app_id, "1") def test_heroku_no_location(self): url = "https://bvt88g4kvc63:twc5ywfste5bm2ngqkzs7ukxk3pn96yweghjrxcmcrarnt3j4dqj3tucbhym5wfd@stream-io-api.com/?app_id=669" os.environ["STREAM_URL"] = url - client = stream.connect() - self.assertEqual(client.api_key, "bvt88g4kvc63") + c = stream.connect() + self.assertEqual(c.api_key, "bvt88g4kvc63") self.assertEqual( - client.api_secret, + c.api_secret, "twc5ywfste5bm2ngqkzs7ukxk3pn96yweghjrxcmcrarnt3j4dqj3tucbhym5wfd", ) - self.assertEqual(client.app_id, "669") - feed_url = client.get_full_url("api", "feed/") + self.assertEqual(c.app_id, "669") + feed_url = c.get_full_url("api", "feed/") if self.local_tests: self.assertEqual(feed_url, "http://localhost:8000/api/v1.0/feed/") @@ -283,14 +282,14 @@ def test_heroku_no_location(self): def test_heroku_location_compat(self): url = "https://ahj2ndz7gsan:gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy@us-east.getstream.io/?app_id=1" os.environ["STREAM_URL"] = url - client = stream.connect() - self.assertEqual(client.api_key, "ahj2ndz7gsan") + c = stream.connect() + self.assertEqual(c.api_key, "ahj2ndz7gsan") self.assertEqual( - client.api_secret, + c.api_secret, "gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy", ) - feed_url = client.get_full_url("api", "feed/") + feed_url = c.get_full_url("api", "feed/") if self.local_tests: self.assertEqual(feed_url, "http://localhost:8000/api/v1.0/feed/") else: @@ -298,52 +297,52 @@ def test_heroku_location_compat(self): feed_url, "https://us-east-api.stream-io-api.com/api/v1.0/feed/" ) - self.assertEqual(client.app_id, "1") + self.assertEqual(c.app_id, "1") def test_heroku_location(self): url = "https://ahj2ndz7gsan:gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy@us-east.stream-io-api.com/?app_id=1" os.environ["STREAM_URL"] = url - client = stream.connect() - self.assertEqual(client.api_key, "ahj2ndz7gsan") + c = stream.connect() + self.assertEqual(c.api_key, "ahj2ndz7gsan") self.assertEqual( - client.api_secret, + c.api_secret, "gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy", ) - feed_url = client.get_full_url("api", "feed/") + feed_url = c.get_full_url("api", "feed/") if self.local_tests: self.assertEqual(feed_url, "http://localhost:8000/api/v1.0/feed/") else: self.assertEqual( feed_url, "https://us-east-api.stream-io-api.com/api/v1.0/feed/" ) - self.assertEqual(client.app_id, "1") + self.assertEqual(c.app_id, "1") def test_heroku_overwrite(self): url = "https://thierry:pass@getstream.io/?app_id=1" os.environ["STREAM_URL"] = url - client = stream.connect("a", "b", "c") - self.assertEqual(client.api_key, "a") - self.assertEqual(client.api_secret, "b") - self.assertEqual(client.app_id, "c") + c = stream.connect("a", "b", "c") + self.assertEqual(c.api_key, "a") + self.assertEqual(c.api_secret, "b") + self.assertEqual(c.app_id, "c") def test_location_support(self): - client = stream.connect("a", "b", "c", location="us-east") + c = stream.connect("a", "b", "c", location="us-east") full_location = "https://us-east-api.stream-io-api.com/api/v1.0/feed/" if self.local_tests: full_location = "http://localhost:8000/api/v1.0/feed/" - self.assertEqual(client.location, "us-east") - feed_url = client.get_full_url("api", "feed/") + self.assertEqual(c.location, "us-east") + feed_url = c.get_full_url("api", "feed/") self.assertEqual(feed_url, full_location) # test a wrong location, can only work on non-local test running if not self.local_tests: - client = stream.connect("a", "b", "c", location="nonexistant") + c = stream.connect("a", "b", "c", location="nonexistant") def get_feed(): - client.feed("user", "1").get() + c.feed("user", "1").get() self.assertRaises(requests.exceptions.ConnectionError, get_feed) @@ -369,16 +368,15 @@ def invalid_follow_user_id(): self.assertRaises(ValueError, invalid_follow_user_id) def test_token_retrieval(self): - self.user1.token - self.user1.get_readonly_token() + _ = self.user1.token + _ = self.user1.get_readonly_token() - def test_user_session_token(self): - client = stream.connect(self.c.api_key, self.c.api_secret) - token = client.create_user_session_token("user") - payload = jwt.decode(token, self.c.api_secret) + def test_user_token(self): + token = self.c.create_user_token("user") + payload = jwt.decode(token, self.c.api_secret, algorithms=["HS256"]) self.assertEqual(payload["user_id"], "user") - token = client.create_user_session_token("user", client="python", testing=True) - payload = jwt.decode(token, self.c.api_secret) + token = self.c.create_user_token("user", client="python", testing=True) + payload = jwt.decode(token, self.c.api_secret, algorithms=["HS256"]) self.assertEqual(payload["client"], "python") self.assertEqual(payload["testing"], True) @@ -474,17 +472,19 @@ def test_remove_activity_by_foreign_id(self): "foreign_id": "tweet:10", } - self.user1.add_activity(activity_data)["id"] + self.user1.add_activity(activity_data) activities = self.user1.get(limit=8)["results"] self.assertEqual(len(activities), 1) + self.assertNotEqual(activities[0]["id"], "") + self.assertEqual(activities[0]["foreign_id"], "tweet:10") self.user1.remove_activity(foreign_id="tweet:10") # verify that no activities were returned activities = self.user1.get(limit=8)["results"] self.assertEqual(len(activities), 0) - # verify this doesnt raise an error, but fails silently - self.user1.remove_activity(foreign_id="tweet:unknowandmissing") + # verify this doesn't raise an error, but fails silently + self.user1.remove_activity(foreign_id="tweet:unknownandmissing") def test_add_activities(self): activity_data = [ @@ -578,7 +578,7 @@ def test_flat_follow_no_copy(self): feed = getfeed("user", "test_flat_follow_no_copy") follower = getfeed("flat", "test_flat_follow_no_copy") activity_data = {"actor": 1, "verb": "tweet", "object": 1} - feed.add_activity(activity_data)["id"] + feed.add_activity(activity_data) follower.follow(feed.slug, feed.user_id, activity_copy_limit=0) activities = follower.get(limit=3)["results"] @@ -593,14 +593,14 @@ def test_flat_follow_copy_one(self): "object": 1, "foreign_id": "test:1", } - feed.add_activity(activity_data)["id"] + feed.add_activity(activity_data) activity_data = { "actor": 1, "verb": "tweet", "object": 1, "foreign_id": "test:2", } - feed.add_activity(activity_data)["id"] + feed.add_activity(activity_data) follower.follow(feed.slug, feed.user_id, activity_copy_limit=1) activities = follower.get(limit=3)["results"] @@ -678,20 +678,20 @@ def test_do_i_follow(self): self.assertEqual(followings["results"][0]["target_id"], "user:apy") def test_update_activity_to_targets(self): - time = datetime.datetime.utcnow().isoformat() + now = datetime.datetime.utcnow().isoformat() foreign_id = "user:1" activity_data = { "actor": 1, "verb": "tweet", "object": 1, "foreign_id": foreign_id, - "time": time, + "time": now, + "to": ["user:1", "user:2"], } - activity_data["to"] = ["user:1", "user:2"] self.user1.add_activity(activity_data) ret = self.user1.update_activity_to_targets( - foreign_id, time, new_targets=["user:3", "user:2"] + foreign_id, now, new_targets=["user:3", "user:2"] ) self.assertEqual(len(ret["activity"]["to"]), 2) self.assertTrue("user:2" in ret["activity"]["to"]) @@ -699,7 +699,7 @@ def test_update_activity_to_targets(self): ret = self.user1.update_activity_to_targets( foreign_id, - time, + now, added_targets=["user:4", "user:5"], removed_targets=["user:3"], ) @@ -831,7 +831,7 @@ def test_uniqueness(self): b.) The same time and foreign id """ - utcnow = datetime.datetime.utcnow() + utcnow = datetime.datetime.now(tz=pytz.utc) activity_data = {"actor": 1, "verb": "tweet", "object": 1, "time": utcnow} self.user1.add_activity(activity_data) self.user1.add_activity(activity_data) @@ -852,7 +852,7 @@ def test_uniqueness_topic(self): self.flat3.follow("user", self.user1.user_id) # add the same activity twice now = datetime.datetime.now(tzlocal()) - tweet = "My Way %s" % get_unique_postfix() + tweet = f"My Way {uuid4()}" activity_data = { "actor": 1, "verb": "tweet", @@ -869,7 +869,7 @@ def test_uniqueness_topic(self): def test_uniqueness_foreign_id(self): now = datetime.datetime.now(tzlocal()) - utcnow = (now - now.utcoffset()).replace(tzinfo=None) + utcnow = now.astimezone(pytz.utc) activity_data = { "actor": 1, @@ -900,7 +900,7 @@ def test_uniqueness_foreign_id(self): def test_time_ordering(self): """ - datetime.datetime.utcnow() is our recommended approach + datetime.datetime.now(tz=pytz.utc) is our recommended approach so if we add an activity add one using time add another activity it should be in the right spot @@ -908,7 +908,7 @@ def test_time_ordering(self): # timedelta is used to "make sure" that ordering is known even though # server time is not - custom_time = datetime.datetime.utcnow() - dt.timedelta(days=1) + custom_time = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=1) feed = self.user2 for index, activity_time in enumerate([None, custom_time, None]): @@ -936,24 +936,19 @@ def test_missing_actor(self): "object": 1, "debug_example_undefined": "test", } - doit = lambda: self.user1.add_activity(activity_data) try: - doit() + self.user1.add_activity(activity_data) raise ValueError("should have raised InputException") except InputException: pass def test_wrong_feed_spec(self): - self.c = stream.connect( - "5crf3bhfzesnMISSING", - "tfq2sdqpj9g446sbv653x3aqmgn33hsn8uzdc9jpskaw8mj6vsnhzswuwptuj9su", - ) self.assertRaises(TypeError, lambda: getfeed("user1")) def test_serialization(self): today = datetime.date.today() - then = datetime.datetime.now().replace(microsecond=0) - now = datetime.datetime.now() + now = datetime.datetime.now(tz=pytz.utc) + then = now.replace(microsecond=0) data = dict( string="string", float=0.1, @@ -1019,6 +1014,21 @@ def test_follow_many_acl(self): activities = feed.get(limit=5)["results"] self.assertEqual(len(activities), 0) + def test_unfollow_many(self): + unfollows = [ + {"source": "user:1", "target": "timeline:1"}, + {"source": "user:2", "target": "timeline:2", "keep_history": False}, + ] + + self.c.unfollow_many(unfollows) + + unfollows.append({"source": "user:1", "target": 42}) + + def failing_unfollow(): + self.c.unfollow_many(unfollows) + + self.assertRaises(InputException, failing_unfollow) + def test_add_to_many(self): activity = {"actor": 1, "verb": "tweet", "object": 1, "custom": "data"} feeds = [getfeed("flat", str(i)).id for i in range(10, 20)] @@ -1053,7 +1063,9 @@ def test_create_email_redirect(self): parsed_url = urlparse(redirect_url) qs = parse_qs(parsed_url.query) - decoded = jwt.decode(qs["authorization"][0], self.c.api_secret) + decoded = jwt.decode( + qs["authorization"][0], self.c.api_secret, algorithms=["HS256"] + ) self.assertEqual( decoded, @@ -1095,10 +1107,11 @@ def test_email_redirect_invalid_target(self): # no protocol specified, this should raise an error target_url = "google.com" user_id = "tommaso" - create_redirect = lambda: self.c.create_redirect_url( - target_url, user_id, events - ) - self.assertRaises(MissingSchema, create_redirect) + + def redirect(): + self.c.create_redirect_url(target_url, user_id, events) + + self.assertRaises(MissingSchema, redirect) def test_follow_redirect_url(self): target_url = "http://google.com/?a=b&c=d" @@ -1143,6 +1156,96 @@ def test_get_activities_full(self): self.assertEqual(len(response["results"]), 1) self.assertEqual(activity["foreign_id"], response["results"][0]["foreign_id"]) + def test_get_activities_full_with_enrichment(self): + dt = datetime.datetime.utcnow() + fid = "awesome-test" + + actor = self.c.users.add(str(uuid1()), data={"name": "barry"}) + activity = { + "actor": self.c.users.create_reference(actor["id"]), + "object": "09", + "verb": "tweet", + "time": dt, + "foreign_id": fid, + } + + feed = getfeed("user", "test_get_activity") + activity = feed.add_activity(activity) + + reaction1 = self.c.reactions.add("like", activity["id"], "liker") + reaction2 = self.c.reactions.add("reshare", activity["id"], "sharer") + + def validate(response): + self.assertEqual(len(response["results"]), 1) + self.assertEqual(response["results"][0]["id"], activity["id"]) + self.assertEqual( + response["results"][0]["foreign_id"], activity["foreign_id"] + ) + self.assertEqual(response["results"][0]["actor"]["data"]["name"], "barry") + latest_reactions = response["results"][0]["latest_reactions"] + self.assertEqual(len(latest_reactions), 2) + self.assertEqual(latest_reactions["like"][0]["id"], reaction1["id"]) + self.assertEqual(latest_reactions["reshare"][0]["id"], reaction2["id"]) + self.assertEqual( + response["results"][0]["reaction_counts"], {"like": 1, "reshare": 1} + ) + + reactions = {"recent": True, "counts": True} + validate(self.c.get_activities(ids=[activity["id"]], reactions=reactions)) + validate( + self.c.get_activities(foreign_id_times=[(fid, dt)], reactions=reactions) + ) + + def test_get_activities_full_with_enrichment_and_reaction_kinds(self): + dt = datetime.datetime.utcnow() + fid = "awesome-test" + + actor = self.c.users.add(str(uuid1()), data={"name": "barry"}) + activity = { + "actor": self.c.users.create_reference(actor["id"]), + "object": "09", + "verb": "tweet", + "time": dt, + "foreign_id": fid, + } + + feed = getfeed("user", "test_get_activity") + activity = feed.add_activity(activity) + + self.c.reactions.add("like", activity["id"], "liker") + self.c.reactions.add("reshare", activity["id"], "sharer") + self.c.reactions.add("comment", activity["id"], "commenter") + + reactions = {"recent": True, "counts": True, "kinds": "like,comment"} + response = self.c.get_activities(ids=[activity["id"]], reactions=reactions) + self.assertEqual(len(response["results"]), 1) + self.assertEqual(response["results"][0]["id"], activity["id"]) + self.assertEqual( + sorted(response["results"][0]["latest_reactions"].keys()), + ["comment", "like"], + ) + self.assertEqual( + response["results"][0]["reaction_counts"], {"like": 1, "comment": 1} + ) + + reactions = { + "recent": True, + "counts": True, + "kinds": ["", "reshare ", "comment\n"], + } + response = self.c.get_activities( + foreign_id_times=[(fid, dt)], reactions=reactions + ) + self.assertEqual(len(response["results"]), 1) + self.assertEqual(response["results"][0]["id"], activity["id"]) + self.assertEqual( + sorted(response["results"][0]["latest_reactions"].keys()), + ["comment", "reshare"], + ) + self.assertEqual( + response["results"][0]["reaction_counts"], {"comment": 1, "reshare": 1} + ) + def test_activity_partial_update(self): now = datetime.datetime.utcnow() feed = self.c.feed("user", uuid4()) @@ -1158,16 +1261,16 @@ def test_activity_partial_update(self): ) activity = feed.get()["results"][0] - set = { + to_set = { "product.name": "boots", "product.price": 7.99, "popularity": 1000, "foo": {"bar": {"baz": "qux"}}, } - unset = ["product.color"] + to_unset = ["product.color"] # partial update by ID - self.c.activity_partial_update(id=activity["id"], set=set, unset=unset) + self.c.activity_partial_update(id=activity["id"], set=to_set, unset=to_unset) updated = feed.get()["results"][0] expected = activity expected["product"] = {"name": "boots", "price": 7.99} @@ -1176,13 +1279,13 @@ def test_activity_partial_update(self): self.assertEqual(updated, expected) # partial update by foreign ID + time - set = {"foo.bar.baz": 42, "popularity": 9000} - unset = ["product.price"] + to_set = {"foo.bar.baz": 42, "popularity": 9000} + to_unset = ["product.price"] self.c.activity_partial_update( foreign_id=activity["foreign_id"], time=activity["time"], - set=set, - unset=unset, + set=to_set, + unset=to_unset, ) updated = feed.get()["results"][0] expected["product"] = {"name": "boots"} @@ -1191,7 +1294,6 @@ def test_activity_partial_update(self): self.assertEqual(updated, expected) def test_activities_partial_update(self): - feed = self.c.feed("user", uuid4()) feed.add_activities( [ @@ -1278,6 +1380,36 @@ def test_create_user_reference(self): def test_reaction_add(self): self.c.reactions.add("like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike") + def test_reaction_add_to_target_feeds(self): + r = self.c.reactions.add( + "superlike", + "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", + "mike", + data={"popularity": 50}, + target_feeds=["user:michelle"], + target_feeds_extra_data={"popularity": 100}, + ) + self.assertEqual(r["data"]["popularity"], 50) + a = self.c.feed("user", "michelle").get(limit=1)["results"][0] + self.assertTrue(r["id"] in a["reaction"]) + self.assertEqual(a["verb"], "superlike") + self.assertEqual(a["popularity"], 100) + + child = self.c.reactions.add_child( + "superlike", + r["id"], + "rob", + data={"popularity": 60}, + target_feeds=["user:michelle"], + target_feeds_extra_data={"popularity": 200}, + ) + + self.assertEqual(child["data"]["popularity"], 60) + a = self.c.feed("user", "michelle").get(limit=1)["results"][0] + self.assertTrue(child["id"] in a["reaction"]) + self.assertEqual(a["verb"], "superlike") + self.assertEqual(a["popularity"], 200) + def test_reaction_get(self): response = self.c.reactions.add( "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" @@ -1307,12 +1439,120 @@ def test_reaction_delete(self): ) self.c.reactions.delete(response["id"]) + def test_reaction_hard_delete(self): + response = self.c.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + self.c.reactions.delete(response["id"], soft=False) + + def test_reaction_soft_delete(self): + response = self.c.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + self.c.reactions.delete(response["id"], soft=True) + + def test_reaction_soft_delete_and_restore(self): + response = self.c.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + self.c.reactions.delete(response["id"], soft=True) + r1 = self.c.reactions.get(response["id"]) + self.assertIsNot(r1["deleted_at"], None) + self.c.reactions.restore(response["id"]) + r1 = self.c.reactions.get(response["id"]) + self.assertTrue("deleted_at" not in r1) + + def test_reaction_invalid_restore(self): + response = self.c.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + self.assertRaises( + DoesNotExistException, lambda: self.c.reactions.restore(response["id"]) + ) + def test_reaction_add_child(self): response = self.c.reactions.add( "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" ) self.c.reactions.add_child("like", response["id"], "rob") + def test_reaction_add_with_moderation_template(self): + """Test adding a reaction with moderation template""" + try: + response = self.c.reactions.add( + "like", + "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", + "mike", + moderation_template="test_moderation_template", + ) + # If moderation is enabled, verify the reaction was created + self.assertTrue("id" in response) + reaction = self.c.reactions.get(response["id"]) + self.assertEqual(reaction["kind"], "like") + self.assertEqual(reaction["user_id"], "mike") + except Exception as e: + # If moderation is not enabled, we expect a specific error + # The important thing is that the moderation_template parameter + # was accepted and passed to the API without causing a client-side error + error_message = str(e) + self.assertTrue( + "moderation not enabled" in error_message, + f"Expected moderation error, but got: {error_message}", + ) + + def test_reaction_add_child_with_moderation_template(self): + """Test adding a child reaction with moderation template""" + # First create a parent reaction + parent_response = self.c.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + + try: + # Add child with moderation template + child_response = self.c.reactions.add_child( + "reply", + parent_response["id"], + "rob", + data={"text": "Great post!"}, + moderation_template="child_moderation_template", + ) + # If moderation is enabled, verify the child reaction was created + self.assertTrue("id" in child_response) + child_reaction = self.c.reactions.get(child_response["id"]) + self.assertEqual(child_reaction["kind"], "reply") + self.assertEqual(child_reaction["user_id"], "rob") + self.assertEqual(child_reaction["parent"], parent_response["id"]) + except Exception as e: + # If moderation is not enabled, we expect a specific error + # The important thing is that the moderation_template parameter + # was accepted and passed to the API without causing a client-side error + error_message = str(e) + self.assertTrue( + "moderation not enabled" in error_message, + f"Expected moderation error, but got: {error_message}", + ) + + def test_reaction_add_without_moderation_template(self): + """Test that existing functionality still works without moderation template""" + response = self.c.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + self.assertTrue("id" in response) + reaction = self.c.reactions.get(response["id"]) + self.assertEqual(reaction["kind"], "like") + + def test_reaction_add_child_without_moderation_template(self): + """Test that existing child functionality still works without moderation template""" + parent_response = self.c.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + child_response = self.c.reactions.add_child( + "reply", parent_response["id"], "rob" + ) + self.assertTrue("id" in child_response) + child_reaction = self.c.reactions.get(child_response["id"]) + self.assertEqual(child_reaction["parent"], parent_response["id"]) + def test_reaction_filter_random(self): self.c.reactions.filter( kind="like", @@ -1377,6 +1617,13 @@ def test_user_get(self): self.assertTrue("updated_at" in user) self.assertTrue("id" in user) + def test_user_get_with_follow_counts(self): + response = self.c.users.add(str(uuid1())) + user = self.c.users.get(response["id"], with_follow_counts=True) + self.assertEqual(user["id"], response["id"]) + self.assertTrue("followers_count" in user) + self.assertTrue("following_count" in user) + def test_user_update(self): response = self.c.users.add(str(uuid1())) self.c.users.update(response["id"], {"changed": True}) @@ -1473,3 +1720,86 @@ def test_feed_enrichment_reaction_counts(self): reaction.pop("duration") enriched_response = f.get(reactions={"counts": True}) self.assertEqual(enriched_response["results"][0]["reaction_counts"]["like"], 1) + + def test_track_engagements(self): + engagements = [ + { + "content": "1", + "label": "click", + "features": [ + {"group": "topic", "value": "js"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": "tommaso", + }, + { + "content": "2", + "label": "click", + "features": [ + {"group": "topic", "value": "go"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": {"id": "486892", "alias": "Julian"}, + }, + { + "content": "3", + "label": "click", + "features": [{"group": "topic", "value": "go"}], + "user_data": {"id": "tommaso", "alias": "tommaso"}, + }, + ] + client.track_engagements(engagements) + + def test_track_impressions(self): + impressions = [ + { + "content_list": ["1", "2", "3"], + "features": [ + {"group": "topic", "value": "js"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": {"id": "tommaso", "alias": "tommaso"}, + }, + { + "content_list": ["2", "3", "5"], + "features": [{"group": "topic", "value": "js"}], + "user_data": {"id": "486892", "alias": "Julian"}, + }, + ] + client.track_impressions(impressions) + + def test_og(self): + response = client.og("https://google.com") + self.assertTrue("title" in response) + self.assertTrue("description" in response) + + def test_follow_stats(self): + uniq = uuid4() + f = client.feed("user", uniq) + f.follow("user", uuid4()) + f.follow("user", uuid4()) + f.follow("user", uuid4()) + + client.feed("user", uuid4()).follow("user", uniq) + client.feed("timeline", uuid4()).follow("user", uniq) + + feed_id = "user:" + str(uniq) + response = client.follow_stats(feed_id)["results"] + self.assertEqual(response["following"]["count"], 3) + self.assertEqual(response["followers"]["count"], 2) + + response = client.follow_stats( + feed_id, followers_slugs=["timeline"], following_slugs=["timeline"] + )["results"] + self.assertEqual(response["following"]["count"], 0) + self.assertEqual(response["followers"]["count"], 1) + + def test_token_type(self): + """ + test to check whether token is a byte or string + """ + with_bytes = Feed(client, "user", "1", b"token") + self.assertEqual(with_bytes.token, "token") + + with_str = Feed(client, "user", "1", "token") + self.assertEqual(with_str.token, "token") diff --git a/stream/users.py b/stream/users.py deleted file mode 100644 index c786208..0000000 --- a/stream/users.py +++ /dev/null @@ -1,36 +0,0 @@ -class Users(object): - def __init__(self, client, token): - self.client = client - self.token = token - - def create_reference(self, id): - _id = id - if isinstance(id, (dict,)) and id.get("id") is not None: - _id = id.get("id") - return "SU:%s" % _id - - def add(self, user_id, data=None, get_or_create=False): - payload = dict(id=user_id, data=data) - return self.client.post( - "user/", - service_name="api", - signature=self.token, - data=payload, - params={"get_or_create": get_or_create}, - ) - - def get(self, user_id): - return self.client.get( - "user/%s" % user_id, service_name="api", signature=self.token - ) - - def update(self, user_id, data=None): - payload = dict(data=data) - return self.client.put( - "user/%s" % user_id, service_name="api", signature=self.token, data=payload - ) - - def delete(self, user_id): - return self.client.delete( - "user/%s" % user_id, service_name="api", signature=self.token - ) diff --git a/stream/users/__init__.py b/stream/users/__init__.py new file mode 100644 index 0000000..f1cfbef --- /dev/null +++ b/stream/users/__init__.py @@ -0,0 +1 @@ +from .user import AsyncUsers, Users diff --git a/stream/users/base.py b/stream/users/base.py new file mode 100644 index 0000000..21d3d8d --- /dev/null +++ b/stream/users/base.py @@ -0,0 +1,38 @@ +from abc import ABC, abstractmethod + + +class AbstractUsers(ABC): + @abstractmethod + def create_reference(self, id): + pass + + @abstractmethod + def add(self, user_id, data=None, get_or_create=False): + pass + + @abstractmethod + def get(self, user_id, **params): + pass + + @abstractmethod + def update(self, user_id, data=None): + pass + + @abstractmethod + def delete(self, user_id): + pass + + +class BaseUsers(AbstractUsers, ABC): + API_ENDPOINT = "user/" + SERVICE_NAME = "api" + + def __init__(self, client, token): + self.client = client + self.token = token + + def create_reference(self, id): + _id = id + if isinstance(id, (dict,)) and id.get("id") is not None: + _id = id.get("id") + return f"SU:{_id}" diff --git a/stream/users/user.py b/stream/users/user.py new file mode 100644 index 0000000..b96048d --- /dev/null +++ b/stream/users/user.py @@ -0,0 +1,73 @@ +from stream.users.base import BaseUsers + + +class Users(BaseUsers): + def add(self, user_id, data=None, get_or_create=False): + payload = dict(id=user_id, data=data) + return self.client.post( + self.API_ENDPOINT, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + params={"get_or_create": get_or_create}, + ) + + def get(self, user_id, **params): + return self.client.get( + f"{self.API_ENDPOINT}/{user_id}", + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) + + def update(self, user_id, data=None): + payload = dict(data=data) + return self.client.put( + f"{self.API_ENDPOINT}/{user_id}", + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + def delete(self, user_id): + return self.client.delete( + f"{self.API_ENDPOINT}/{user_id}", + service_name=self.SERVICE_NAME, + signature=self.token, + ) + + +class AsyncUsers(BaseUsers): + async def add(self, user_id, data=None, get_or_create=False): + payload = dict(id=user_id, data=data) + return await self.client.post( + self.API_ENDPOINT, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + params={"get_or_create": str(get_or_create)}, + ) + + async def get(self, user_id, **params): + return await self.client.get( + f"{self.API_ENDPOINT}/{user_id}", + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) + + async def update(self, user_id, data=None): + payload = dict(data=data) + return await self.client.put( + f"{self.API_ENDPOINT}/{user_id}", + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + async def delete(self, user_id): + return await self.client.delete( + f"{self.API_ENDPOINT}/{user_id}", + service_name=self.SERVICE_NAME, + signature=self.token, + ) diff --git a/stream/utils.py b/stream/utils.py index c6840a7..bd0dbc5 100644 --- a/stream/utils.py +++ b/stream/utils.py @@ -13,8 +13,11 @@ def validate_feed_id(feed_id): """ feed_id = str(feed_id) if len(feed_id.split(":")) != 2: - msg = "Invalid feed_id spec %s, please specify the feed_id as feed_slug:feed_id" - raise ValueError(msg % feed_id) + msg = ( + f"Invalid feed_id spec {feed_id}, " + f"please specify the feed_id as feed_slug:feed_id" + ) + raise ValueError(msg) feed_slug, user_id = feed_id.split(":") validate_feed_slug(feed_slug) @@ -28,8 +31,8 @@ def validate_feed_slug(feed_slug): """ feed_slug = str(feed_slug) if not valid_re.match(feed_slug): - msg = "Invalid feed slug %s, please only use letters, numbers and _" - raise ValueError(msg % feed_slug) + msg = f"Invalid feed slug {feed_slug}, please only use letters, numbers and _" + raise ValueError(msg) return feed_slug @@ -39,8 +42,8 @@ def validate_user_id(user_id): """ user_id = str(user_id) if not valid_re.match(user_id): - msg = "Invalid user id %s, please only use letters, numbers and _" - raise ValueError(msg % user_id) + msg = f"Invalid user id {user_id}, please only use letters, numbers and _" + raise ValueError(msg) return user_id @@ -54,3 +57,23 @@ def validate_foreign_id_time(foreign_id_time): if len(v) != 2: raise ValueError("foreign_id_time elements should have two elements") + + +def get_reaction_params(reactions): + if reactions is not None and not isinstance(reactions, (dict,)): + raise TypeError("reactions argument should be a dictionary") + + params = {} + if reactions is not None: + if reactions.get("own"): + params["withOwnReactions"] = True + if reactions.get("recent"): + params["withRecentReactions"] = True + if reactions.get("counts"): + params["withReactionCounts"] = True + kinds = reactions.get("kinds") + if kinds: + if isinstance(kinds, list): + kinds = ",".join(k.strip() for k in kinds if k.strip()) + params["reactionKindsFilter"] = kinds + return params