From 627ab7fd7298c5cc3c91c089c42b5d2c6c11f06c Mon Sep 17 00:00:00 2001 From: Jelte Fennema Date: Tue, 10 Jan 2017 15:14:58 +0100 Subject: [PATCH 001/208] Travis: Test against recently released python 3.6 --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index dd66e6f..3812f19 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,6 +5,7 @@ python: - 3.3 - 3.4 - 3.5 + - 3.6 # These are allowed to fail - '3.5-dev' # 3.5 development branch - '3.6-dev' # 3.6 development branch From 0b3096dc3346a471e1394ea93ceda930d1c3ebd8 Mon Sep 17 00:00:00 2001 From: Jelte Fennema Date: Wed, 8 Feb 2017 13:01:02 +0100 Subject: [PATCH 002/208] Travis: Update pypy3 so no tests fail --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index dd66e6f..03d6399 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,7 +10,7 @@ python: - '3.6-dev' # 3.6 development branch - 'nightly' # currently points to 3.7-dev - 'pypy' - - 'pypy3' + - 'pypy3.3-5.2-alpha1' matrix: fast_finish: true @@ -19,7 +19,7 @@ matrix: - python: '3.6-dev' # 3.6 development branch - python: 'nightly' - python: 'pypy' - - python: 'pypy3' + - python: 'pypy3.3-5.2-alpha1' cache: pip notifications: From 589244e99f1dde1c107d3db1ad3eb8269f4a3752 Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Wed, 15 Mar 2017 17:00:38 +0100 Subject: [PATCH 003/208] Update copyright for 2017. Add copyright and license section to README --- LICENSE | 42 +++++++++++++++++++++--------------------- README.md | 6 ++++++ 2 files changed, 27 insertions(+), 21 deletions(-) diff --git a/LICENSE b/LICENSE index c220d2a..f883096 100644 --- a/LICENSE +++ b/LICENSE @@ -1,27 +1,27 @@ -Copyright (c) 2014, Stream.io, Inc +Copyright (c) 2014-2017 Stream.io Inc, and individual contributors. + All rights reserved. -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: +Redistribution and use in source and binary forms, with or without modification, are permitted +provided that the following conditions are met: -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. + 1. Redistributions of source code must retain the above copyright notice, this list of + conditions and the following disclaimer. -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. + 2. Redistributions in binary form must reproduce the above copyright notice, this list of + conditions and the following disclaimer in the documentation and/or other materials + provided with the distribution. -* Neither the name of the {organization} nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. + 3. Neither the name of the copyright holder nor the names of its contributors may + be used to endorse or promote products derived from this software without specific prior + written permission. -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/README.md b/README.md index 7d6df8e..53a269e 100644 --- a/README.md +++ b/README.md @@ -137,3 +137,9 @@ py.test stream/tests.py --cov stream --cov-report html # against a local API backend LOCAL=true py.test stream/tests.py ``` + +### Copyright and License Information + +Copyright (c) 2014-2017 Stream.io Inc, and individual contributors. All rights reserved. + +See the file "LICENSE" for information on the history of this software, terms & conditions for usage, and a DISCLAIMER OF ALL WARRANTIES. From e0c2c298cff2452d566a24616ebedf87a636ba25 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Tue, 11 Apr 2017 19:56:28 +0200 Subject: [PATCH 004/208] remove old tests --- stream/tests.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/stream/tests.py b/stream/tests.py index c08b602..8e6cd04 100644 --- a/stream/tests.py +++ b/stream/tests.py @@ -416,11 +416,8 @@ def test_follow_and_delete(self): actor_id = random.randint(10, 100000) activity_data = {'actor': actor_id, 'verb': 'tweet', 'object': 1} activity_id = user_feed.add_activity(activity_data)['id'] - - agg_feed.follow(user_feed.slug, user_feed.user_id) user_feed.remove_activity(activity_id) - activities = agg_feed.get(limit=3)['results'] activity = self._get_first_aggregated_activity(activities) activity_id_found = (activity['id'] if activity is not None @@ -813,12 +810,6 @@ def test_serialization(self): loaded = serializer.loads(serialized) self.assertEqual(data, loaded) - def test_signed_request_post(self): - self.c._make_signed_request('post', 'test/auth/digest/', {}, {}) - - def test_signed_request_get(self): - self.c._make_signed_request('post', 'test/auth/digest/', {}, {}) - def test_follow_many(self): sources = [getfeed('user', str(i)).id for i in range(10)] targets = [getfeed('flat', str(i)).id for i in range(10)] From 2a0dfc4e4494ab489247347ca3a550d39b125a15 Mon Sep 17 00:00:00 2001 From: Jelte Fennema Date: Fri, 14 Apr 2017 12:30:35 +0200 Subject: [PATCH 005/208] Ignore .venv file --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 09c25b2..6cfbe24 100644 --- a/.gitignore +++ b/.gitignore @@ -54,3 +54,5 @@ docs/_build/ secrets.*sh .idea + +.venv From e1c6b1bde1ebad54e82bcd07e8c5fea4abeb6eb3 Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Tue, 25 Apr 2017 17:17:46 +0200 Subject: [PATCH 006/208] Tidied up readme documentation references --- README.md | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index 53a269e..c0b980b 100644 --- a/README.md +++ b/README.md @@ -3,18 +3,24 @@ stream-python [![Build Status](https://travis-ci.org/GetStream/stream-python.svg?branch=master)](https://travis-ci.org/GetStream/stream-python) [![Coverage Status](https://coveralls.io/repos/github/GetStream/stream-python/badge.svg?branch=master)](https://coveralls.io/r/GetStream/stream-python?branch=master) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) -stream-python is the official Python client for [Stream](https://getstream.io/), a web service for building scalable newsfeeds and activity streams. -The full documentation is available on [GetStream.io/docs](http://getstream.io/docs/?language=python). Note that there is also a [higher level Django integration](https://github.com/getstream/stream-django) which hooks into your ORM. +[stream-python](https://github.com/GetStream/stream-python) is the official Python client for [Stream](https://getstream.io/), a web service for building scalable newsfeeds and activity streams. + +Note there is also a higher level [Django - Stream integration](https://github.com/getstream/stream-django) library which hooks into the Django ORM. + +You can sign up for a Stream account at https://getstream.io/get_started. ### Installation #### Install from Pypi - ```bash pip install stream-python ``` +### Full documentation + +Documentation for this Python client are available at the [Stream website](https://getstream.io/docs/?language=python) or on [Read the Docs](http://stream-python.readthedocs.org/en/latest/). + ### Usage ```python @@ -38,7 +44,7 @@ result = user_feed_1.get(limit=5, id_lt="e561de8f-00f1-11e4-b400-0cc47a024be0") activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1, 'foreign_id': 'tweet:1'} activity_response = user_feed_1.add_activity(activity_data) # Create a bit more complex activity -activity_data = {'actor': 1, 'verb': 'run', 'object': 1, 'foreign_id': 'run:1', +activity_data = {'actor': 1, 'verb': 'run', 'object': 1, 'foreign_id': 'run:1', 'course': {'name': 'Golden Gate park', 'distance': 10}, 'participants': ['Thierry', 'Tommaso'], 'started_at': datetime.datetime.now() @@ -97,19 +103,19 @@ readonly_token = user_feed_1.get_readonly_token() # Generate a redirect url for the Stream Analytics platform to track # events/impressions on url clicks impression = { - 'content_list': ['tweet:1', 'tweet:2', 'tweet:3'], - 'user_data': 'tommaso', + 'content_list': ['tweet:1', 'tweet:2', 'tweet:3'], + 'user_data': 'tommaso', 'location': 'email', 'feed_id': 'user:global' } engagement = { - 'content': 'tweet:2', + 'content': 'tweet:2', 'label': 'click', - 'position': 1, - 'user_data': 'tommaso', + 'position': 1, + 'user_data': 'tommaso', 'location': 'email', - 'feed_id': + 'feed_id': 'user:global' } @@ -118,12 +124,6 @@ events = [impression, engagement] redirect_url = client.create_redirect_url('http://google.com/', 'user_id', events) ``` - - -API docs are on [Read the -docs](http://stream-python.readthedocs.org/en/latest/). - - [JS client](http://github.com/getstream/stream-js). ### Contributing From 7125b4f49d5b0348941c6ee59b65803d0157972b Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Sat, 29 Apr 2017 19:48:52 +0200 Subject: [PATCH 007/208] make sure validate field is present --- stream/tests.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/stream/tests.py b/stream/tests.py index 8e6cd04..23602e0 100644 --- a/stream/tests.py +++ b/stream/tests.py @@ -61,9 +61,21 @@ def getfeed(feed_slug, user_id): return client.feed(feed_slug, user_id + get_unique_postfix()) +def api_request_parse_validator(test): + def wrapper(meth): + def _parse_response(*args, **kwargs): + response = meth(*args, **kwargs) + test.assertIn('duration', response) + return response + return _parse_response + return wrapper + + class ClientTest(TestCase): def setUp(self): + client._parse_response = api_request_parse_validator(self)(client._parse_response) + # DEBUG account details user1 = getfeed('user', '1') user2 = getfeed('user', '2') From 4dffa923b521396d4dcc6fd592a90da272fbcd53 Mon Sep 17 00:00:00 2001 From: Thierry Schellenbach Date: Mon, 22 May 2017 13:52:11 -0600 Subject: [PATCH 008/208] Update .travis.yml --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 03d6399..d2d087b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,7 @@ language: python sudo: false python: + - 2.6 - 2.7 - 3.3 - 3.4 From 63241c04d0db882615bfb0addfe7b35687a8d968 Mon Sep 17 00:00:00 2001 From: ian douglas Date: Mon, 22 May 2017 15:28:41 -0600 Subject: [PATCH 009/208] Python 2.6.9 support --- .travis.yml | 1 + setup.py | 2 +- stream/exceptions.py | 5 +++-- stream/tests.py | 28 +++++++++++++--------------- 4 files changed, 18 insertions(+), 18 deletions(-) diff --git a/.travis.yml b/.travis.yml index 03d6399..d2d087b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,7 @@ language: python sudo: false python: + - 2.6 - 2.7 - 3.3 - 3.4 diff --git a/setup.py b/setup.py index c6427be..618e8be 100644 --- a/setup.py +++ b/setup.py @@ -28,7 +28,7 @@ install_requires = [ 'pyjwt==1.3.0', - 'requests>=2.3.0', + 'requests>=2.2.1', 'six>=1.8.0', 'httpsig==1.1.2' ] diff --git a/stream/exceptions.py b/stream/exceptions.py index 58d8753..a0ec5bf 100644 --- a/stream/exceptions.py +++ b/stream/exceptions.py @@ -85,6 +85,7 @@ def get_exceptions(): def get_exception_dict(): - classes = get_exceptions() - exception_dict = {c.code: c for c in classes} + exception_dict = {} + for c in get_exceptions(): + exception_dict[c.code] = c return exception_dict diff --git a/stream/tests.py b/stream/tests.py index c08b602..6aea143 100644 --- a/stream/tests.py +++ b/stream/tests.py @@ -4,7 +4,10 @@ from stream.exceptions import ApiKeyException, InputException import random import jwt -from unittest.case import TestCase +try: + from unittest.case import TestCase +except ImportError: + from unittest import TestCase import json import os @@ -42,6 +45,7 @@ def connect_debug(): location='us-east', timeout=30, base_url='http://qa-api.getstream.io/api/', + # base_url='http://localhost-api.getstream.io:8000/api/', ) client = connect_debug() @@ -296,7 +300,6 @@ def test_add_activity_to(self): activities = team_follower_feed.get(limit=1)['results'] self.assertFirstActivityIDNotEqual(activities, activity_id) - def test_add_activity_to_type_error(self): user_feed = getfeed('user', '1') activity_data = { @@ -304,8 +307,7 @@ def test_add_activity_to_type_error(self): 'to': 'string' } - with self.assertRaises(TypeError): - user_feed.add_activity(activity_data) + self.assertRaises(TypeError, user_feed.add_activity, activity_data) def assertFirstActivityIDEqual(self, activities, correct_activity_id): activity_id = None @@ -601,7 +603,6 @@ def test_get_not_marked_seen(self): print(notification_feed.add_activity({'actor': 2, 'verb': 'tweet', 'object': 2})['id']) print(notification_feed.add_activity({'actor': 3, 'verb': 'tweet', 'object': 3})['id']) - activities = notification_feed.get(limit=3)['results'] from pprint import pprint print(len(activities)) @@ -649,7 +650,6 @@ def test_mark_read_by_id(self): print(notification_feed.add_activity({'actor': 2, 'verb': 'tweet', 'object': 2})['id']) # ['id'] print(notification_feed.add_activity({'actor': 3, 'verb': 'tweet', 'object': 2})['id']) # ['id'] - activities = notification_feed.get(limit=3)['results'] ids = [] from pprint import pprint @@ -813,11 +813,11 @@ def test_serialization(self): loaded = serializer.loads(serialized) self.assertEqual(data, loaded) - def test_signed_request_post(self): - self.c._make_signed_request('post', 'test/auth/digest/', {}, {}) - - def test_signed_request_get(self): - self.c._make_signed_request('post', 'test/auth/digest/', {}, {}) + # def test_signed_request_post(self): + # self.c._make_signed_request('post', 'test/auth/digest/', {}, {}) + # + # def test_signed_request_get(self): + # self.c._make_signed_request('post', 'test/auth/digest/', {}, {}) def test_follow_many(self): sources = [getfeed('user', str(i)).id for i in range(10)] @@ -828,14 +828,14 @@ def test_follow_many(self): for target in targets: follows = self.c.feed(*target.split(':')).followers()['results'] self.assertEqual(len(follows), 1) - self.assertIn(follows[0]['feed_id'], sources) + self.assertTrue(follows[0]['feed_id'] in sources) self.assertEqual(follows[0]['target_id'], target) for source in sources: follows = self.c.feed(*source.split(':')).following()['results'] self.assertEqual(len(follows), 1) self.assertEqual(follows[0]['feed_id'], source) - self.assertIn(follows[0]['target_id'], targets) + self.assertTrue(follows[0]['target_id'] in targets) def test_follow_many_acl(self): sources = [getfeed('user', str(i)) for i in range(10)] @@ -930,8 +930,6 @@ def test_create_email_redirect(self): self.assertEqual(json.loads(qs['events'][0]), events) - - def test_email_redirect_invalid_target(self): engagement = {'foreign_id': 'tweet:1', 'label': 'click', 'position': 3, 'user_id': 'tommaso', 'location': 'email', 'feed_id': 'user:global'} impression = {'foreign_ids': ['tweet:1', 'tweet:2', 'tweet:3', 'tweet:4', 'tweet:5'], 'user_id': From 1a6808fbfd5a4f4c84170a95c258f856105077cd Mon Sep 17 00:00:00 2001 From: ian douglas Date: Mon, 22 May 2017 15:35:38 -0600 Subject: [PATCH 010/208] updated changelog and docs --- CHANGELOG | 10 +++++++++- setup.py | 1 + 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index 07ccddb..77c885a 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ Change history ================ +2.3.10 +====== +:release-date: 2017-05-22 +:by: Ian Douglas + +* Added support for Python 2.6.9 and downgrade to requests 2.2.1 + + 2.3.9 ========== :release-date: 2016-12-20 @@ -87,7 +95,7 @@ :by: Thierry Schellenbach * Breaking change: New style feed syntax, client.feed('user', '1') instead of client.feed('user:3') -* Breaking change: New style follow syntax, feed.follow('user', 3) +* Breaking change: New style follow syntax, feed.follow('user', 3) * API versioning support * Configurable timeouts * Python 3 support diff --git a/setup.py b/setup.py index 618e8be..2a7e999 100644 --- a/setup.py +++ b/setup.py @@ -72,6 +72,7 @@ def run_tests(self): 'Development Status :: 5 - Production/Stable', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', + 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', From da9d3745d9e40d91f423ce37c856eaf458adeac0 Mon Sep 17 00:00:00 2001 From: ian douglas Date: Mon, 22 May 2017 15:47:34 -0600 Subject: [PATCH 011/208] Bug fix for python 2.6.9 support --- stream/tests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/tests.py b/stream/tests.py index c8ce5ca..7f6d259 100644 --- a/stream/tests.py +++ b/stream/tests.py @@ -69,7 +69,7 @@ def api_request_parse_validator(test): def wrapper(meth): def _parse_response(*args, **kwargs): response = meth(*args, **kwargs) - test.assertIn('duration', response) + test.assertTrue('duration' in response) return response return _parse_response return wrapper From 5664abefb71ff3a043fa4f49176ea763cf66fc6c Mon Sep 17 00:00:00 2001 From: ian douglas Date: Mon, 22 May 2017 15:52:53 -0600 Subject: [PATCH 012/208] updated versioning for pypi --- stream/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/__init__.py b/stream/__init__.py index 6ee4f53..22b3416 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.3.9' +__version__ = '2.3.10' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From ae4838a889e630ebe117ce510774d37915432bb5 Mon Sep 17 00:00:00 2001 From: Thierry Schellenbach Date: Mon, 22 May 2017 16:23:58 -0600 Subject: [PATCH 013/208] bumping the version --- CHANGELOG | 3 +-- stream/__init__.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index 77c885a..f15f178 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,7 +2,7 @@ Change history ================ -2.3.10 +2.3.11 ====== :release-date: 2017-05-22 :by: Ian Douglas @@ -114,4 +114,3 @@ :by: Tommaso Barbugli * Add support for mark read (notifications feeds) - diff --git a/stream/__init__.py b/stream/__init__.py index 22b3416..a6e06db 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.3.10' +__version__ = '2.3.11' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From bc77cdc61077b02ed6ce5d6f432604697dde424b Mon Sep 17 00:00:00 2001 From: Jelte Fennema Date: Mon, 24 Jul 2017 17:34:01 +0200 Subject: [PATCH 014/208] Add method for new API entry point to change to targets --- stream/feed.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/stream/feed.py b/stream/feed.py index 9be6dfe..c4bdaf6 100644 --- a/stream/feed.py +++ b/stream/feed.py @@ -211,3 +211,18 @@ def add_to_signature(self, recipients): feed = self.client.feed(feed_slug, user_id) data.append("%s %s" % (recipient, feed.token)) return data + + def change_activity_to(self, activity_id, replace=None, add=None, + delete=None): + data = {} + if replace is not None: + data['replace'] = replace + if add is not None: + data['add'] = add + if delete is not None: + data['delete'] = delete + + url = self.feed_url + ('change_activity_to/%s/' % activity_id) + + token = self.create_scope_token('feed_targets', 'write') + return self.client.post(url, data=data, signature=token) From 56e83b529df6f30e9ea30c64542a2a3f31d0e3cc Mon Sep 17 00:00:00 2001 From: Jelte Fennema Date: Tue, 25 Jul 2017 14:12:17 +0200 Subject: [PATCH 015/208] Change to: Fix url --- stream/feed.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/stream/feed.py b/stream/feed.py index c4bdaf6..3f3b105 100644 --- a/stream/feed.py +++ b/stream/feed.py @@ -19,6 +19,7 @@ def __init__(self, client, feed_slug, user_id, token): self.token = token self.feed_url = 'feed/%s/' % self.id.replace(':', '/') + self.feed_targets_url = 'feed_targets/%s/' % self.id.replace(':', '/') self.feed_together = self.id.replace(':', '') self.signature = self.feed_together + ' ' + self.token @@ -222,7 +223,7 @@ def change_activity_to(self, activity_id, replace=None, add=None, if delete is not None: data['delete'] = delete - url = self.feed_url + ('change_activity_to/%s/' % activity_id) + url = self.feed_targets_url + ('change_activity_to/%s/' % activity_id) token = self.create_scope_token('feed_targets', 'write') return self.client.post(url, data=data, signature=token) From 014a48579d856bc353a55b94b9a7d84578ba633e Mon Sep 17 00:00:00 2001 From: Jelte Fennema Date: Tue, 25 Jul 2017 16:40:25 +0200 Subject: [PATCH 016/208] Add test for change_activity_to --- stream/tests.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/stream/tests.py b/stream/tests.py index 7f6d259..1a33e71 100644 --- a/stream/tests.py +++ b/stream/tests.py @@ -565,6 +565,23 @@ def test_do_i_follow(self): self.assertEqual(followings['results'][0]['feed_id'], social.id) self.assertEqual(followings['results'][0]['target_id'], 'user:apy') + def test_change_activity_to(self): + activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} + activity_data['to'] = ['user:1', 'user:2'] + activity_id = self.user1.add_activity(activity_data)['id'] + + ret = self.user1.change_activity_to(activity_id, replace=['user:3', 'user:2']) + self.assertEqual(len(ret['activity']['to']), 2) + self.assertIn('user:2', ret['activity']['to']) + self.assertIn('user:3', ret['activity']['to']) + + ret = self.user1.change_activity_to(activity_id, add=['user:4', 'user:5'], delete=['user:3']) + self.assertEqual(len(ret['activity']['to']), 3) + self.assertIn('user:2', ret['activity']['to']) + self.assertIn('user:4', ret['activity']['to']) + self.assertIn('user:5', ret['activity']['to']) + + def test_get(self): activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} activity_id = self.user1.add_activity(activity_data)['id'] From c0fcfb25f6090d0857b0ba4c630d61ea13d5c7f5 Mon Sep 17 00:00:00 2001 From: Jelte Fennema Date: Tue, 25 Jul 2017 17:26:10 +0200 Subject: [PATCH 017/208] Tests: Replace assertIn with assertTrue for 2.6 tests --- stream/tests.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/stream/tests.py b/stream/tests.py index 1a33e71..ad6163f 100644 --- a/stream/tests.py +++ b/stream/tests.py @@ -572,14 +572,14 @@ def test_change_activity_to(self): ret = self.user1.change_activity_to(activity_id, replace=['user:3', 'user:2']) self.assertEqual(len(ret['activity']['to']), 2) - self.assertIn('user:2', ret['activity']['to']) - self.assertIn('user:3', ret['activity']['to']) + self.assertTrue('user:2' in ret['activity']['to']) + self.assertTrue('user:3' in ret['activity']['to']) ret = self.user1.change_activity_to(activity_id, add=['user:4', 'user:5'], delete=['user:3']) self.assertEqual(len(ret['activity']['to']), 3) - self.assertIn('user:2', ret['activity']['to']) - self.assertIn('user:4', ret['activity']['to']) - self.assertIn('user:5', ret['activity']['to']) + self.assertTrue('user:2' in ret['activity']['to']) + self.assertTrue('user:4' in ret['activity']['to']) + self.assertTrue('user:5' in ret['activity']['to']) def test_get(self): From 6c29a3fa2187d0b59e280f2c17cf465402a1b0d4 Mon Sep 17 00:00:00 2001 From: Jelte Fennema Date: Wed, 26 Jul 2017 11:21:02 +0200 Subject: [PATCH 018/208] Add new exception classes --- stream/exceptions.py | 71 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 71 insertions(+) diff --git a/stream/exceptions.py b/stream/exceptions.py index a0ec5bf..39391bd 100644 --- a/stream/exceptions.py +++ b/stream/exceptions.py @@ -70,6 +70,77 @@ class SiteSuspendedException(StreamApiException): status_code = 401 code = 7 +class InvalidPaginationException(StreamApiException): + + ''' + Raised when there is an issue with your Access Key + ''' + status_code = 401 + code = 8 + + +class MissingRankingException(FeedConfigException): + ''' + Raised when you didn't configure the ranking for the given feed + ''' + status_code = 400 + code = 12 + + +class MissingUserException(MissingRankingException): + status_code = 400 + code = 10 + + +class RankingException(FeedConfigException): + ''' + Raised when there is a runtime issue with ranking the feed + ''' + status_code = 400 + code = 11 + + +class RateLimitReached(StreamApiException): + + ''' + Raised when too many requests are performed + ''' + status_code = 429 + code = 9 + + +class OldStorageBackend(StreamApiException): + ''' + Raised if you try to perform an action which only works with the new storage + ''' + status_code = 400 + code = 13 + + +class BestPracticeException(StreamApiException): + ''' + Raised if best practices are enforced and you do something that + would break a high volume integration + ''' + status_code = 400 + code = 15 + + +class DoesNotExistException(StreamApiException): + ''' + Raised when the requested resource could not be found. + ''' + status_code = 404 + code = 16 + + +class NotAllowedException(StreamApiException): + ''' + Raised when the requested action is not allowed for some reason. + ''' + status_code = 403 + code = 17 + def get_exceptions(): from stream import exceptions From 75fe5d872cbad9010dfe3253f8ca97bef79a6533 Mon Sep 17 00:00:00 2001 From: Jelte Fennema Date: Thu, 27 Jul 2017 14:52:10 +0200 Subject: [PATCH 019/208] Change to: Implement changes --- stream/feed.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/stream/feed.py b/stream/feed.py index 3f3b105..1de5e7b 100644 --- a/stream/feed.py +++ b/stream/feed.py @@ -213,17 +213,21 @@ def add_to_signature(self, recipients): data.append("%s %s" % (recipient, feed.token)) return data - def change_activity_to(self, activity_id, replace=None, add=None, - delete=None): - data = {} + def update_activity_to_targets(self, foreign_id, time, + replace=None, add=None, remove=None): + data = { + 'foreign_id': foreign_id, + 'time': time, + } + if replace is not None: data['replace'] = replace if add is not None: data['add'] = add - if delete is not None: - data['delete'] = delete + if remove is not None: + data['remove'] = remove - url = self.feed_targets_url + ('change_activity_to/%s/' % activity_id) + url = self.feed_targets_url + 'update_activity_to_targets/' token = self.create_scope_token('feed_targets', 'write') return self.client.post(url, data=data, signature=token) From a7ffbd529292fbf66f9f3ebdc5f26e5c1d3d342c Mon Sep 17 00:00:00 2001 From: Jelte Fennema Date: Thu, 27 Jul 2017 16:04:40 +0200 Subject: [PATCH 020/208] Change url a bit again --- stream/feed.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/feed.py b/stream/feed.py index 1de5e7b..f14980a 100644 --- a/stream/feed.py +++ b/stream/feed.py @@ -227,7 +227,7 @@ def update_activity_to_targets(self, foreign_id, time, if remove is not None: data['remove'] = remove - url = self.feed_targets_url + 'update_activity_to_targets/' + url = self.feed_targets_url + 'activity_to_targets/' token = self.create_scope_token('feed_targets', 'write') return self.client.post(url, data=data, signature=token) From d3bba639c402e3ab4eb7c9f5b66fd2466dd2c3e1 Mon Sep 17 00:00:00 2001 From: Jelte Fennema Date: Fri, 28 Jul 2017 10:02:55 +0200 Subject: [PATCH 021/208] Fix tests --- stream/tests.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/stream/tests.py b/stream/tests.py index ad6163f..2a4bf42 100644 --- a/stream/tests.py +++ b/stream/tests.py @@ -565,17 +565,25 @@ def test_do_i_follow(self): self.assertEqual(followings['results'][0]['feed_id'], social.id) self.assertEqual(followings['results'][0]['target_id'], 'user:apy') - def test_change_activity_to(self): - activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} + def test_update_activity_to_targets(self): + time = datetime.datetime.utcnow().isoformat() + foreign_id = 'user:1' + activity_data = { + 'actor': 1, + 'verb': 'tweet', + 'object': 1, + 'foreign_id': foreign_id, + 'time': time, + } activity_data['to'] = ['user:1', 'user:2'] - activity_id = self.user1.add_activity(activity_data)['id'] + self.user1.add_activity(activity_data) - ret = self.user1.change_activity_to(activity_id, replace=['user:3', 'user:2']) + ret = self.user1.update_activity_to_targets(foreign_id, time, replace=['user:3', 'user:2']) self.assertEqual(len(ret['activity']['to']), 2) self.assertTrue('user:2' in ret['activity']['to']) self.assertTrue('user:3' in ret['activity']['to']) - ret = self.user1.change_activity_to(activity_id, add=['user:4', 'user:5'], delete=['user:3']) + ret = self.user1.update_activity_to_targets(foreign_id, time, add=['user:4', 'user:5'], remove=['user:3']) self.assertEqual(len(ret['activity']['to']), 3) self.assertTrue('user:2' in ret['activity']['to']) self.assertTrue('user:4' in ret['activity']['to']) From 725f3f390667eb1624773cc9f7329354dece70e8 Mon Sep 17 00:00:00 2001 From: Jelte Fennema Date: Tue, 1 Aug 2017 15:34:33 +0200 Subject: [PATCH 022/208] Change to: Change naming of arguments --- stream/feed.py | 15 ++++++++------- stream/tests.py | 4 ++-- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/stream/feed.py b/stream/feed.py index f14980a..9893a8e 100644 --- a/stream/feed.py +++ b/stream/feed.py @@ -214,18 +214,19 @@ def add_to_signature(self, recipients): return data def update_activity_to_targets(self, foreign_id, time, - replace=None, add=None, remove=None): + new_targets=None, added_targets=None, + removed_targets=None): data = { 'foreign_id': foreign_id, 'time': time, } - if replace is not None: - data['replace'] = replace - if add is not None: - data['add'] = add - if remove is not None: - data['remove'] = remove + if new_targets is not None: + data['new_targets'] = new_targets + if added_targets is not None: + data['added_targets'] = added_targets + if removed_targets is not None: + data['removed_targets'] = removed_targets url = self.feed_targets_url + 'activity_to_targets/' diff --git a/stream/tests.py b/stream/tests.py index 2a4bf42..0b145a3 100644 --- a/stream/tests.py +++ b/stream/tests.py @@ -578,12 +578,12 @@ def test_update_activity_to_targets(self): activity_data['to'] = ['user:1', 'user:2'] self.user1.add_activity(activity_data) - ret = self.user1.update_activity_to_targets(foreign_id, time, replace=['user:3', 'user:2']) + ret = self.user1.update_activity_to_targets(foreign_id, time, new_targets=['user:3', 'user:2']) self.assertEqual(len(ret['activity']['to']), 2) self.assertTrue('user:2' in ret['activity']['to']) self.assertTrue('user:3' in ret['activity']['to']) - ret = self.user1.update_activity_to_targets(foreign_id, time, add=['user:4', 'user:5'], remove=['user:3']) + ret = self.user1.update_activity_to_targets(foreign_id, time, added_targets=['user:4', 'user:5'], removed_targets=['user:3']) self.assertEqual(len(ret['activity']['to']), 3) self.assertTrue('user:2' in ret['activity']['to']) self.assertTrue('user:4' in ret['activity']['to']) From 4e38b1cae81942ce23b73593cfb858372f66d324 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 31 Aug 2017 12:43:58 +0200 Subject: [PATCH 023/208] Update CHANGELOG --- CHANGELOG | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CHANGELOG b/CHANGELOG index f15f178..374dcc6 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,13 @@ Change history ================ +2.4.0 +====== +:release-date: 2017-08-31 +:by: Tommaso Barbugli + +* Added support for To target update endpoint + 2.3.11 ====== :release-date: 2017-05-22 From fae92626396b6b11bcc22fab6b705eaf4e1583b9 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 31 Aug 2017 12:48:37 +0200 Subject: [PATCH 024/208] release 2.4.0 --- stream/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/__init__.py b/stream/__init__.py index a6e06db..4fce3d3 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.3.11' +__version__ = '2.4.0' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From a33d8f13227dec5a4cb70546497cadddc10a47c6 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 13 Oct 2017 13:34:15 +0200 Subject: [PATCH 025/208] Let requests install SSL libs on old Python Also tidies up travis config. --- .travis.yml | 12 ------------ setup.py | 8 ++++++-- 2 files changed, 6 insertions(+), 14 deletions(-) diff --git a/.travis.yml b/.travis.yml index c0a5f03..38a62a8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,21 +7,9 @@ python: - 3.4 - 3.5 - 3.6 - # These are allowed to fail - - '3.5-dev' # 3.5 development branch - - '3.6-dev' # 3.6 development branch - - 'nightly' # currently points to 3.7-dev - - 'pypy' - - 'pypy3.3-5.2-alpha1' matrix: fast_finish: true - allow_failures: - - python: '3.5-dev' # 3.5 development branch - - python: '3.6-dev' # 3.6 development branch - - python: 'nightly' - - python: 'pypy' - - python: 'pypy3.3-5.2-alpha1' cache: pip notifications: diff --git a/setup.py b/setup.py index 2a7e999..b13eabc 100644 --- a/setup.py +++ b/setup.py @@ -26,14 +26,18 @@ .. _`Github`: https://github.com/GetStream/stream-python ''' +requests = 'requests>=2.3.0,<3' + +if sys.version_info < (2, 7, 9): + requests = 'requests[security]>=2.4.1,<3' + install_requires = [ 'pyjwt==1.3.0', - 'requests>=2.2.1', + requests, 'six>=1.8.0', 'httpsig==1.1.2' ] - class PyTest(TestCommand): def finalize_options(self): From 55deb5b67c34c661ddff01af1ea8bc3cf141bd71 Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Thu, 12 Oct 2017 18:13:15 +0200 Subject: [PATCH 026/208] Updated domain to stream-io-api.com Updated tests and added extra heroku compatibility tests --- stream/__init__.py | 2 +- stream/client.py | 6 +++--- stream/tests.py | 28 ++++++++++++++++++++++------ 3 files changed, 26 insertions(+), 10 deletions(-) diff --git a/stream/__init__.py b/stream/__init__.py index 4fce3d3..8ab4f64 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -29,7 +29,7 @@ def connect(api_key=None, api_secret=None, app_id=None, version='v1.0', result = pattern.match(stream_url) if result and len(result.groups()) == 4: api_key, api_secret, location, app_id = result.groups() - location = None if location == 'getstream' else location + location = None if location in ('getstream', 'stream-io-api') else location else: raise ValueError('Invalid api key or heroku url') diff --git a/stream/client.py b/stream/client.py index f97b110..2bbdc87 100644 --- a/stream/client.py +++ b/stream/client.py @@ -17,7 +17,7 @@ class StreamClient(object): - base_url = 'https://api.getstream.io/api/' + base_url = 'https://api.stream-io-api.com/api/' def __init__(self, api_key, api_secret, app_id, version='v1.0', timeout=6.0, base_url=None, location=None): ''' @@ -57,9 +57,9 @@ def __init__(self, api_key, api_secret, app_id, version='v1.0', timeout=6.0, bas elif base_url is not None: self.base_url = base_url elif location is not None: - self.base_url = 'https://%s-api.getstream.io/api/' % location + self.base_url = 'https://%s-api.stream-io-api.com/api/' % location - self.base_analytics_url = 'https://analytics.getstream.io/analytics/' + self.base_analytics_url = 'https://analytics.stream-io-api.com/analytics/' self.session = requests.Session() # TODO: turn this back on after we verify it doesnt retry on slower requests diff --git a/stream/tests.py b/stream/tests.py index 0b145a3..2c2eb2e 100644 --- a/stream/tests.py +++ b/stream/tests.py @@ -44,7 +44,7 @@ def connect_debug(): secret, location='us-east', timeout=30, - base_url='http://qa-api.getstream.io/api/', + base_url='https://qa.stream-io-api.com/api/', # base_url='http://localhost-api.getstream.io:8000/api/', ) @@ -174,7 +174,7 @@ def test_heroku(self): self.assertEqual(client.app_id, '1') def test_heroku_no_location(self): - url = 'https://bvt88g4kvc63:twc5ywfste5bm2ngqkzs7ukxk3pn96yweghjrxcmcrarnt3j4dqj3tucbhym5wfd@getstream.io/?app_id=669' + url = 'https://bvt88g4kvc63:twc5ywfste5bm2ngqkzs7ukxk3pn96yweghjrxcmcrarnt3j4dqj3tucbhym5wfd@stream-io-api.com/?app_id=669' os.environ['STREAM_URL'] = url client = stream.connect() self.assertEqual(client.api_key, 'bvt88g4kvc63') @@ -187,9 +187,9 @@ def test_heroku_no_location(self): client.base_url, 'http://localhost:8000/api/') else: self.assertEqual( - client.base_url, 'https://api.getstream.io/api/') + client.base_url, 'https://api.stream-io-api.com/api/') - def test_heroku_location(self): + def test_heroku_location_compat(self): url = 'https://ahj2ndz7gsan:gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy@us-east.getstream.io/?app_id=1' os.environ['STREAM_URL'] = url client = stream.connect() @@ -202,7 +202,23 @@ def test_heroku_location(self): client.base_url, 'http://localhost:8000/api/') else: self.assertEqual( - client.base_url, 'https://us-east-api.getstream.io/api/') + client.base_url, 'https://us-east-api.stream-io-api.com/api/') + self.assertEqual(client.app_id, '1') + + def test_heroku_location(self): + url = 'https://ahj2ndz7gsan:gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy@us-east.stream-io-api.com/?app_id=1' + os.environ['STREAM_URL'] = url + client = stream.connect() + self.assertEqual(client.api_key, 'ahj2ndz7gsan') + self.assertEqual( + client.api_secret, 'gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy') + + if self.local_tests: + self.assertEqual( + client.base_url, 'http://localhost:8000/api/') + else: + self.assertEqual( + client.base_url, 'https://us-east-api.stream-io-api.com/api/') self.assertEqual(client.app_id, '1') def test_heroku_overwrite(self): @@ -216,7 +232,7 @@ def test_heroku_overwrite(self): def test_location_support(self): client = stream.connect('a', 'b', 'c', location='us-east') - full_location = 'https://us-east-api.getstream.io/api/' + full_location = 'https://us-east-api.stream-io-api.com/api/' if self.local_tests: full_location = 'http://localhost:8000/api/' From 86436237042ba9a10ec5552ec5f5b3a195e9e8c9 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 19 Oct 2017 14:09:08 +0200 Subject: [PATCH 027/208] release 2.5.0 --- CHANGELOG | 7 +++++++ stream/__init__.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index 374dcc6..ae6fd48 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,13 @@ Change history ================ +2.5.0 +====== +:release-date: 2017-10-19 +:by: Tommaso Barbugli + +* Use new .com domain for API and Analytics + 2.4.0 ====== :release-date: 2017-08-31 diff --git a/stream/__init__.py b/stream/__init__.py index 8ab4f64..53241a2 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.4.0' +__version__ = '2.5.0' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From 8a6e83fc6945b6352cf49012a910a4ac5480fc89 Mon Sep 17 00:00:00 2001 From: Aaron McMillin Date: Fri, 17 Nov 2017 11:29:43 -0500 Subject: [PATCH 028/208] Calling self._make_signed_request should return the response --- stream/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stream/client.py b/stream/client.py index 2bbdc87..98b089e 100644 --- a/stream/client.py +++ b/stream/client.py @@ -236,7 +236,7 @@ def add_to_many(self, activity, feeds): ''' data = {'activity': activity, 'feeds': feeds} - self._make_signed_request('post', 'feed/add_to_many/', data=data) + return self._make_signed_request('post', 'feed/add_to_many/', data=data) def follow_many(self, follows, activity_copy_limit=None): ''' @@ -251,7 +251,7 @@ def follow_many(self, follows, activity_copy_limit=None): if activity_copy_limit != None: params = dict(activity_copy_limit=activity_copy_limit) - self._make_signed_request('post', 'follow_many/', params=params, data=follows) + return self._make_signed_request('post', 'follow_many/', params=params, data=follows) def update_activities(self, activities): ''' From ae922d46f7db34d6d2274a7476b28c46ea7cad8f Mon Sep 17 00:00:00 2001 From: Aaron McMillin Date: Tue, 5 Dec 2017 10:02:17 -0500 Subject: [PATCH 029/208] update CHANGELOG --- CHANGELOG | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CHANGELOG b/CHANGELOG index ae6fd48..0813786 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,13 @@ Change history ================ +2.6.0 +====== +:release-date: ?? +:by: Aaron McMillin + +* All client methods that make requests will return the response + 2.5.0 ====== :release-date: 2017-10-19 From 9d8608e4b1059df9307b8a16e81c77416e8cc1bf Mon Sep 17 00:00:00 2001 From: Balazs Horanyi Date: Wed, 6 Dec 2017 11:40:53 -0700 Subject: [PATCH 030/208] inital personalization commit --- stream/client.py | 21 ++++++++++-- stream/feed.py | 2 ++ stream/personalization.py | 70 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 91 insertions(+), 2 deletions(-) create mode 100644 stream/personalization.py diff --git a/stream/client.py b/stream/client.py index 2bbdc87..b4266a9 100644 --- a/stream/client.py +++ b/stream/client.py @@ -83,6 +83,16 @@ def feed(self, feed_slug, user_id): return Feed(self, feed_slug, user_id, token) + + def personalization(self): + """ + Returns a Personalized Feed object + """ + from stream.personalization import Personalization + token = self.create_jwt_token('*', '*', feed_id='*', user_id='*') + + return Personalization(self, token) + def get_default_params(self): ''' Returns the params with the API key present @@ -101,6 +111,10 @@ def get_full_url(self, relative_url): url = self.base_url + self.version + '/' + relative_url return url + def get_full_personal_url(self, relative_url): + url = self.base_url + '/' + relative_url + '/' + return url + def get_user_agent(self): from stream import __version__ agent = 'stream-python-client-%s' % __version__ @@ -151,7 +165,7 @@ def create_jwt_token(self, resource, action, feed_id=None, user_id=None): payload['user_id'] = user_id return jwt.encode(payload, self.api_secret).decode("utf-8") - def _make_request(self, method, relative_url, signature, params=None, data=None): + def _make_request(self, method, relative_url, signature, personal=False, params=None, data=None): params = params or {} data = data or {} serialized = None @@ -160,7 +174,10 @@ def _make_request(self, method, relative_url, signature, params=None, data=None) headers = self.get_default_header() headers['Authorization'] = signature headers['stream-auth-type'] = 'jwt' - url = self.get_full_url(relative_url) + if personal: + url = self.get_full_personal_url(relative_url) + else: + url = self.get_full_url(relative_url) if method.__name__ in ['post', 'put']: serialized = serializer.dumps(data) response = method(url, data=serialized, headers=headers, diff --git a/stream/feed.py b/stream/feed.py index 9893a8e..5167cff 100644 --- a/stream/feed.py +++ b/stream/feed.py @@ -128,6 +128,8 @@ def get(self, **params): self.feed_url, params=params, signature=token) return response + + def follow(self, target_feed_slug, target_user_id, activity_copy_limit=None, **extra_data): ''' Follows the given feed diff --git a/stream/personalization.py b/stream/personalization.py new file mode 100644 index 0000000..f4f0f53 --- /dev/null +++ b/stream/personalization.py @@ -0,0 +1,70 @@ + +class Personalization(object): + def __init__(self, client, token): + + self.client = client + self.token = token + + def get(self, url, **params): + """ + Get personalized activities for this feed + + :param params: + :return: + """ + + response = self.client.get(url, personal=True, params=params, + signature=self.token) + return response + + def post(self, url, *args, **params): + """ + "Generic function to post data to personalization endpoint + :param url: personalization endpoint ex: "meta" + :param args: If endpoint has required args insert them here. + :param kwargs: data is a reserved keyword to post to body + + """ + + args = args or None + data = params['data'] or None + print(data) + if args is not None: + url = url + '/' + '/'.join(list(args)) + + response = self.client.post(url, personal=True, params=params, + signature=self.token, data=data) + return response + + def upsert_data(self, item_type, ids, data): + + if type(ids) != list: + ids = [ids] + if type(data) != list: + data = [data] + + assert len(ids) == len(data), "number of ids must match number of data points" + + # format data to expected json blob + data_json = {} + for i in range(len(ids)): + data_json['%s:%s' % (item_type, ids[i])] = data[i] + + response = self.post("meta", data={'data': data_json}) + + return response + + def select_data(self, item_type, ids): + + if type(ids) != list: + ids = [ids] + + foreign_ids = [] + for i in range(len(ids)): + foreign_ids.append('%s:%s' % (item_type, ids[i])) + + response = self.get('meta', foreign_ids=foreign_ids) + + return response + + From 97229076628960638fab20a034897cd02781be7e Mon Sep 17 00:00:00 2001 From: Balazs Horanyi Date: Wed, 6 Dec 2017 13:38:36 -0700 Subject: [PATCH 031/208] pep8 cleanup --- stream/feed.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/stream/feed.py b/stream/feed.py index 5167cff..9893a8e 100644 --- a/stream/feed.py +++ b/stream/feed.py @@ -128,8 +128,6 @@ def get(self, **params): self.feed_url, params=params, signature=token) return response - - def follow(self, target_feed_slug, target_user_id, activity_copy_limit=None, **extra_data): ''' Follows the given feed From 8a36c13ee8fa676f977358c008e307a39ec933e9 Mon Sep 17 00:00:00 2001 From: Balazs Horanyi Date: Wed, 6 Dec 2017 14:20:21 -0700 Subject: [PATCH 032/208] doc love --- stream/personalization.py | 56 +++++++++++++++++++++++++++------------ 1 file changed, 39 insertions(+), 17 deletions(-) diff --git a/stream/personalization.py b/stream/personalization.py index f4f0f53..40fe87d 100644 --- a/stream/personalization.py +++ b/stream/personalization.py @@ -1,6 +1,10 @@ - class Personalization(object): def __init__(self, client, token): + """ + + :param client: the api client + :param token: the token + """ self.client = client self.token = token @@ -8,35 +12,45 @@ def __init__(self, client, token): def get(self, url, **params): """ Get personalized activities for this feed + :param url: personalized url endpoint i.e "follow recommendations" + :param params: params to pass to url i.e user_id = "user:123" + :return: personalized feed - :param params: - :return: + **Example**:: + + personalization.get('follow_recommendations', limit=10, offset=10) """ response = self.client.get(url, personal=True, params=params, signature=self.token) return response - def post(self, url, *args, **params): + def post(self, url, **params): """ "Generic function to post data to personalization endpoint - :param url: personalization endpoint ex: "meta" - :param args: If endpoint has required args insert them here. - :param kwargs: data is a reserved keyword to post to body + :param url: personalized url endpoint i.e "follow recommendations" + :param params: params to pass to url (data is a reserved keyword to post to body) """ - args = args or None data = params['data'] or None - print(data) - if args is not None: - url = url + '/' + '/'.join(list(args)) response = self.client.post(url, personal=True, params=params, signature=self.token, data=data) return response - def upsert_data(self, item_type, ids, data): + def upsert_data(self, feed_group, ids, data): + """ + + :param feed_group: Feed Group i.e 'user' + :param ids: list of ids of feed group i.e [123,456] + :param data: list of dictionaries + :return: http response, 201 if successful along with data posted. + + **Example**:: + personalization.upsert_data('user', [1, 2], [{"name": "Juniper", "hobbies": ["Playing", "Sleeping", "Eating"]}, + {"name": "Ruby", "interests": ["Sunbeams", "Surprise Attacks"]}]) + """ if type(ids) != list: ids = [ids] @@ -48,23 +62,31 @@ def upsert_data(self, item_type, ids, data): # format data to expected json blob data_json = {} for i in range(len(ids)): - data_json['%s:%s' % (item_type, ids[i])] = data[i] + data_json['%s:%s' % (feed_group, ids[i])] = data[i] response = self.post("meta", data={'data': data_json}) return response - def select_data(self, item_type, ids): + def select_data(self, feed_group, ids): + """ + + :param feed_group: Feed Group i.e 'user' + :param ids: list of ids of feed group i.e [123,456] + :return: meta data as json blob + + **Example**:: + personalization.select_data('user', 1) + personalization.select_data('user', [1,2,3]) + """ if type(ids) != list: ids = [ids] foreign_ids = [] for i in range(len(ids)): - foreign_ids.append('%s:%s' % (item_type, ids[i])) + foreign_ids.append('%s:%s' % (feed_group, ids[i])) response = self.get('meta', foreign_ids=foreign_ids) return response - - From 0a430119250c3faa9f21338c253cadba560021e3 Mon Sep 17 00:00:00 2001 From: Balazs Horanyi Date: Wed, 6 Dec 2017 15:09:04 -0700 Subject: [PATCH 033/208] small doc update --- stream/personalization.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/personalization.py b/stream/personalization.py index 40fe87d..7e199b4 100644 --- a/stream/personalization.py +++ b/stream/personalization.py @@ -18,7 +18,7 @@ def get(self, url, **params): **Example**:: - personalization.get('follow_recommendations', limit=10, offset=10) + personalization.get('follow_recommendations', user_id=123, limit=10, offset=10) """ response = self.client.get(url, personal=True, params=params, From f3c9033577021bb3c99a5f0f7cdaccc676bbb7cb Mon Sep 17 00:00:00 2001 From: Balazs Horanyi Date: Wed, 6 Dec 2017 15:53:19 -0700 Subject: [PATCH 034/208] add elete meta method, feed_group -> collection_name, ids and data in one blob --- stream/client.py | 2 +- stream/personalization.py | 53 +++++++++++++++++++++++++++++++-------- 2 files changed, 43 insertions(+), 12 deletions(-) diff --git a/stream/client.py b/stream/client.py index b4266a9..bc4a8fb 100644 --- a/stream/client.py +++ b/stream/client.py @@ -112,7 +112,7 @@ def get_full_url(self, relative_url): return url def get_full_personal_url(self, relative_url): - url = self.base_url + '/' + relative_url + '/' + url = self.base_url + '/personalization/' + relative_url + '/' return url def get_user_agent(self): diff --git a/stream/personalization.py b/stream/personalization.py index 7e199b4..d5c5546 100644 --- a/stream/personalization.py +++ b/stream/personalization.py @@ -39,39 +39,50 @@ def post(self, url, **params): signature=self.token, data=data) return response - def upsert_data(self, feed_group, ids, data): + def delete(self, url, **params): + """ + shortcut to delete metadata or activites + :param url: personalized url endpoint typical "meta" + :param params: params to pass to url i.e user_id = "user:123" + :return: http response + """ + + response = self.client.delete(url, personal=True, params=params, + signature=self.token) + + return response + + def upsert_data(self, collection_name, ids, data): """ - :param feed_group: Feed Group i.e 'user' + :param collection_name: Collection Name i.e 'user' :param ids: list of ids of feed group i.e [123,456] :param data: list of dictionaries :return: http response, 201 if successful along with data posted. **Example**:: - personalization.upsert_data('user', [1, 2], [{"name": "Juniper", "hobbies": ["Playing", "Sleeping", "Eating"]}, - {"name": "Ruby", "interests": ["Sunbeams", "Surprise Attacks"]}]) + personalization.upsert_data('user', [{"id": 1, "name": "Juniper", "hobbies": ["Playing", "Sleeping", "Eating"]}, + {"id": 2, "name": "Ruby", "interests": ["Sunbeams", "Surprise Attacks"]}]) """ - if type(ids) != list: - ids = [ids] if type(data) != list: data = [data] - assert len(ids) == len(data), "number of ids must match number of data points" + ids = [i['id'] for i in data] # format data to expected json blob data_json = {} for i in range(len(ids)): - data_json['%s:%s' % (feed_group, ids[i])] = data[i] + data_json['%s:%s' % (collection_name, ids[i])] = data[i] response = self.post("meta", data={'data': data_json}) return response - def select_data(self, feed_group, ids): + def select_data(self, collection_name, ids): """ - :param feed_group: Feed Group i.e 'user' + :param collection_name: Collection Name i.e 'user' :param ids: list of ids of feed group i.e [123,456] :return: meta data as json blob @@ -85,8 +96,28 @@ def select_data(self, feed_group, ids): foreign_ids = [] for i in range(len(ids)): - foreign_ids.append('%s:%s' % (feed_group, ids[i])) + foreign_ids.append('%s:%s' % (collection_name, ids[i])) response = self.get('meta', foreign_ids=foreign_ids) return response + + def delete_data(self, collection_name, ids): + """ + + :param collection_name: Collection Name i.e 'user' + :param ids: list of ids to delete i.e [123,456] + :return: + """ + + if type(ids) != list: + ids = [ids] + + foreign_ids = [] + for i in range(len(ids)): + foreign_ids.append('%s:%s' % (collection_name, ids[i])) + + response = self.delete('meta', foreign_ids=foreign_ids) + + return response + From 0af79948455bc5253b5fb5d147fb1753a3afa2b2 Mon Sep 17 00:00:00 2001 From: Balazs Horanyi Date: Wed, 6 Dec 2017 15:58:31 -0700 Subject: [PATCH 035/208] forgot to remove ids from params in upsert_data --- stream/personalization.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/stream/personalization.py b/stream/personalization.py index d5c5546..907e940 100644 --- a/stream/personalization.py +++ b/stream/personalization.py @@ -52,11 +52,10 @@ def delete(self, url, **params): return response - def upsert_data(self, collection_name, ids, data): + def upsert_data(self, collection_name, data): """ :param collection_name: Collection Name i.e 'user' - :param ids: list of ids of feed group i.e [123,456] :param data: list of dictionaries :return: http response, 201 if successful along with data posted. From 56d074ef2f85be1f57ea701c109b7ad7a5a3c691 Mon Sep 17 00:00:00 2001 From: Balazs Horanyi Date: Thu, 7 Dec 2017 10:39:06 -0700 Subject: [PATCH 036/208] break up personalized feeds and meta data to make more clear + other minor updates to make more clear --- stream/client.py | 11 ++++- stream/collections.py | 85 +++++++++++++++++++++++++++++++++++ stream/personalization.py | 94 +++++++-------------------------------- 3 files changed, 110 insertions(+), 80 deletions(-) create mode 100644 stream/collections.py diff --git a/stream/client.py b/stream/client.py index bc4a8fb..06c4165 100644 --- a/stream/client.py +++ b/stream/client.py @@ -83,7 +83,6 @@ def feed(self, feed_slug, user_id): return Feed(self, feed_slug, user_id, token) - def personalization(self): """ Returns a Personalized Feed object @@ -93,6 +92,15 @@ def personalization(self): return Personalization(self, token) + def collection(self): + """ + Returns a collection object (used for meta data endpoint) + """ + from stream.collections import Collections + token = self.create_jwt_token('*', '*', feed_id='*', user_id='*') + + return Collections(self, token) + def get_default_params(self): ''' Returns the params with the API key present @@ -180,6 +188,7 @@ def _make_request(self, method, relative_url, signature, personal=False, params= url = self.get_full_url(relative_url) if method.__name__ in ['post', 'put']: serialized = serializer.dumps(data) + print(url) response = method(url, data=serialized, headers=headers, params=default_params, timeout=self.timeout) logger.debug('stream api call %s, headers %s data %s', diff --git a/stream/collections.py b/stream/collections.py new file mode 100644 index 0000000..ceffcbb --- /dev/null +++ b/stream/collections.py @@ -0,0 +1,85 @@ +class Collections(object): + def __init__(self, client, token): + """ + Used to manipulate data at the 'meta' endpoint + :param client: the api client + :param token: the token + """ + + self.client = client + self.token = token + + def upsert(self, collection_name, data): + """ + "Insert new or update existing data. + :param collection_name: Collection Name i.e 'user' + :param data: list of dictionaries + :return: http response, 201 if successful along with data posted. + + **Example**:: + collections.upsert('user', [{"id": 1, "name": "Juniper", "hobbies": ["Playing", "Sleeping", "Eating"]}, + {"id": 2, "name": "Ruby", "interests": ["Sunbeams", "Surprise Attacks"]}]) + """ + + if type(data) != list: + data = [data] + + ids = [i['id'] for i in data] + + # format data to expected json blob + data_json = {} + for i in range(len(ids)): + data_json['%s:%s' % (collection_name, ids[i])] = data[i] + + response = self.client.post('meta', personal=True, + signature=self.token, data={'data': data_json}) + return response + + def select(self, collection_name, ids): + """ + Retrieve data from meta endpoint, can include data you've uploaded or personalization/analytic data + created by the stream team. + :param collection_name: Collection Name i.e 'user' + :param ids: list of ids of feed group i.e [123,456] + :return: meta data as json blob + + **Example**:: + collections.select('user', 1) + collections.select('user', [1,2,3]) + """ + + if type(ids) != list: + ids = [ids] + + foreign_ids = [] + for i in range(len(ids)): + foreign_ids.append('%s:%s' % (collection_name, ids[i])) + + response = self.client.get('meta', personal=True, params={'foreign_ids': foreign_ids}, + signature=self.token) + + return response + + def delete(self, collection_name, ids): + """ + Delete data from meta. + :param collection_name: Collection Name i.e 'user' + :param ids: list of ids to delete i.e [123,456] + :return: http response. + + **Example**:: + collections.delete('user', 1) + collections.delete('user', [1,2,3]) + """ + + if type(ids) != list: + ids = [ids] + + foreign_ids = [] + for i in range(len(ids)): + foreign_ids.append('%s:%s' % (collection_name, ids[i])) + + response = self.client.delete('meta', personal=True, foreign_ids=foreign_ids, + signature=self.token) + + return response diff --git a/stream/personalization.py b/stream/personalization.py index 907e940..79300bc 100644 --- a/stream/personalization.py +++ b/stream/personalization.py @@ -1,7 +1,7 @@ class Personalization(object): def __init__(self, client, token): """ - + Methods to interact with personalized feeds. :param client: the api client :param token: the token """ @@ -9,10 +9,10 @@ def __init__(self, client, token): self.client = client self.token = token - def get(self, url, **params): + def get(self, resource, **params): """ Get personalized activities for this feed - :param url: personalized url endpoint i.e "follow recommendations" + :param resource: personalized resource endpoint i.e "follow_recommendations" :param params: params to pass to url i.e user_id = "user:123" :return: personalized feed @@ -21,102 +21,38 @@ def get(self, url, **params): personalization.get('follow_recommendations', user_id=123, limit=10, offset=10) """ - response = self.client.get(url, personal=True, params=params, + response = self.client.get(resource, personal=True, params=params, signature=self.token) return response - def post(self, url, **params): + def post(self, resource, **params): """ "Generic function to post data to personalization endpoint - :param url: personalized url endpoint i.e "follow recommendations" + :param resource: personalized resource endpoint i.e "follow_recommendations" :param params: params to pass to url (data is a reserved keyword to post to body) + + **Example**:: + #Accept or reject recommendations. + personalization.post('follow_recommendations', user_id=123, accepted=[123,345], + rejected=[456]) """ data = params['data'] or None - response = self.client.post(url, personal=True, params=params, + response = self.client.post(resource, personal=True, params=params, signature=self.token, data=data) return response - def delete(self, url, **params): + def delete(self, resource, **params): """ shortcut to delete metadata or activites - :param url: personalized url endpoint typical "meta" + :param resource: personalized url endpoint typical "meta" :param params: params to pass to url i.e user_id = "user:123" :return: http response """ - response = self.client.delete(url, personal=True, params=params, + response = self.client.delete(resource, personal=True, params=params, signature=self.token) return response - - def upsert_data(self, collection_name, data): - """ - - :param collection_name: Collection Name i.e 'user' - :param data: list of dictionaries - :return: http response, 201 if successful along with data posted. - - **Example**:: - personalization.upsert_data('user', [{"id": 1, "name": "Juniper", "hobbies": ["Playing", "Sleeping", "Eating"]}, - {"id": 2, "name": "Ruby", "interests": ["Sunbeams", "Surprise Attacks"]}]) - """ - - if type(data) != list: - data = [data] - - ids = [i['id'] for i in data] - - # format data to expected json blob - data_json = {} - for i in range(len(ids)): - data_json['%s:%s' % (collection_name, ids[i])] = data[i] - - response = self.post("meta", data={'data': data_json}) - - return response - - def select_data(self, collection_name, ids): - """ - - :param collection_name: Collection Name i.e 'user' - :param ids: list of ids of feed group i.e [123,456] - :return: meta data as json blob - - **Example**:: - personalization.select_data('user', 1) - personalization.select_data('user', [1,2,3]) - """ - - if type(ids) != list: - ids = [ids] - - foreign_ids = [] - for i in range(len(ids)): - foreign_ids.append('%s:%s' % (collection_name, ids[i])) - - response = self.get('meta', foreign_ids=foreign_ids) - - return response - - def delete_data(self, collection_name, ids): - """ - - :param collection_name: Collection Name i.e 'user' - :param ids: list of ids to delete i.e [123,456] - :return: - """ - - if type(ids) != list: - ids = [ids] - - foreign_ids = [] - for i in range(len(ids)): - foreign_ids.append('%s:%s' % (collection_name, ids[i])) - - response = self.delete('meta', foreign_ids=foreign_ids) - - return response - From 2ced274c0b04cfd048672f737c42d01722ec1e8e Mon Sep 17 00:00:00 2001 From: Balazs Horanyi Date: Thu, 7 Dec 2017 11:16:58 -0700 Subject: [PATCH 037/208] remove debug print statement --- stream/client.py | 1 - stream/personalization.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/stream/client.py b/stream/client.py index 06c4165..ad9d0ff 100644 --- a/stream/client.py +++ b/stream/client.py @@ -188,7 +188,6 @@ def _make_request(self, method, relative_url, signature, personal=False, params= url = self.get_full_url(relative_url) if method.__name__ in ['post', 'put']: serialized = serializer.dumps(data) - print(url) response = method(url, data=serialized, headers=headers, params=default_params, timeout=self.timeout) logger.debug('stream api call %s, headers %s data %s', diff --git a/stream/personalization.py b/stream/personalization.py index 79300bc..a6c6d86 100644 --- a/stream/personalization.py +++ b/stream/personalization.py @@ -1,7 +1,7 @@ class Personalization(object): def __init__(self, client, token): """ - Methods to interact with personalized feeds. + Methods to interact with personalized feeds. :param client: the api client :param token: the token """ From 09453752a1df46dcdc9438110ebccffd52f43c47 Mon Sep 17 00:00:00 2001 From: Balazs Horanyi Date: Thu, 7 Dec 2017 15:25:44 -0700 Subject: [PATCH 038/208] adding dns routing for personal and meta endpoints --- stream/client.py | 24 ++++++++++++++++++++---- stream/collections.py | 6 +++--- stream/personalization.py | 6 +++--- 3 files changed, 26 insertions(+), 10 deletions(-) diff --git a/stream/client.py b/stream/client.py index ad9d0ff..9cc82f2 100644 --- a/stream/client.py +++ b/stream/client.py @@ -120,7 +120,18 @@ def get_full_url(self, relative_url): return url def get_full_personal_url(self, relative_url): - url = self.base_url + '/personalization/' + relative_url + '/' + base_url = self.base_url.split('.') # company.getstream.io + if len(base_url) > 1: + DNS_change = base_url[0] + '-personalization' + base_url[0] = DNS_change + base_url = '.'.join(base_url) + else: + base_url = self.base_url # if running on localhost + url = base_url + 'personalization/' + relative_url + '/' + return url + + def get_full_meta_url(self): + url = self.base_url + 'personalization/' + self.version + '/api/meta/' return url def get_user_agent(self): @@ -173,7 +184,7 @@ def create_jwt_token(self, resource, action, feed_id=None, user_id=None): payload['user_id'] = user_id return jwt.encode(payload, self.api_secret).decode("utf-8") - def _make_request(self, method, relative_url, signature, personal=False, params=None, data=None): + def _make_request(self, method, relative_url, signature, personal=None, params=None, data=None): params = params or {} data = data or {} serialized = None @@ -182,8 +193,13 @@ def _make_request(self, method, relative_url, signature, personal=False, params= headers = self.get_default_header() headers['Authorization'] = signature headers['stream-auth-type'] = 'jwt' - if personal: - url = self.get_full_personal_url(relative_url) + if personal is not None: + if personal == 'personal': + url = self.get_full_personal_url(relative_url) + elif personal == 'meta': + url = self.get_full_meta_url() + else: + raise Exception("keyword 'personal' must be None, personal, or meta") else: url = self.get_full_url(relative_url) if method.__name__ in ['post', 'put']: diff --git a/stream/collections.py b/stream/collections.py index ceffcbb..bcf80ef 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -31,7 +31,7 @@ def upsert(self, collection_name, data): for i in range(len(ids)): data_json['%s:%s' % (collection_name, ids[i])] = data[i] - response = self.client.post('meta', personal=True, + response = self.client.post('meta', personal='meta', signature=self.token, data={'data': data_json}) return response @@ -55,7 +55,7 @@ def select(self, collection_name, ids): for i in range(len(ids)): foreign_ids.append('%s:%s' % (collection_name, ids[i])) - response = self.client.get('meta', personal=True, params={'foreign_ids': foreign_ids}, + response = self.client.get('meta', personal='meta', params={'foreign_ids': foreign_ids}, signature=self.token) return response @@ -79,7 +79,7 @@ def delete(self, collection_name, ids): for i in range(len(ids)): foreign_ids.append('%s:%s' % (collection_name, ids[i])) - response = self.client.delete('meta', personal=True, foreign_ids=foreign_ids, + response = self.client.delete('meta', personal='meta', foreign_ids=foreign_ids, signature=self.token) return response diff --git a/stream/personalization.py b/stream/personalization.py index a6c6d86..87ff6d0 100644 --- a/stream/personalization.py +++ b/stream/personalization.py @@ -21,7 +21,7 @@ def get(self, resource, **params): personalization.get('follow_recommendations', user_id=123, limit=10, offset=10) """ - response = self.client.get(resource, personal=True, params=params, + response = self.client.get(resource, personal='personal', params=params, signature=self.token) return response @@ -40,7 +40,7 @@ def post(self, resource, **params): data = params['data'] or None - response = self.client.post(resource, personal=True, params=params, + response = self.client.post(resource, personal='personal', params=params, signature=self.token, data=data) return response @@ -52,7 +52,7 @@ def delete(self, resource, **params): :return: http response """ - response = self.client.delete(resource, personal=True, params=params, + response = self.client.delete(resource, personal='personal', params=params, signature=self.token) return response From b9dd4553aa1215ab0b1ff06bb0c55f9ae371b990 Mon Sep 17 00:00:00 2001 From: Balazs Horanyi Date: Fri, 8 Dec 2017 10:44:52 -0700 Subject: [PATCH 039/208] update format to match new server json expectations and make collection and personalization properties of client --- stream/client.py | 12 ++++++++---- stream/collections.py | 26 ++++++++++---------------- 2 files changed, 18 insertions(+), 20 deletions(-) diff --git a/stream/client.py b/stream/client.py index 9cc82f2..696e7b9 100644 --- a/stream/client.py +++ b/stream/client.py @@ -83,21 +83,23 @@ def feed(self, feed_slug, user_id): return Feed(self, feed_slug, user_id, token) + @property def personalization(self): """ Returns a Personalized Feed object """ from stream.personalization import Personalization - token = self.create_jwt_token('*', '*', feed_id='*', user_id='*') + token = self.create_jwt_token('personalization', '*', feed_id='*', user_id='*') return Personalization(self, token) + @property def collection(self): """ Returns a collection object (used for meta data endpoint) """ from stream.collections import Collections - token = self.create_jwt_token('*', '*', feed_id='*', user_id='*') + token = self.create_jwt_token('personalization', '*', feed_id='*', user_id='*') return Collections(self, token) @@ -131,7 +133,7 @@ def get_full_personal_url(self, relative_url): return url def get_full_meta_url(self): - url = self.base_url + 'personalization/' + self.version + '/api/meta/' + url = self.base_url + 'personalization/' + self.version + '/meta/' return url def get_user_agent(self): @@ -202,8 +204,10 @@ def _make_request(self, method, relative_url, signature, personal=None, params=N raise Exception("keyword 'personal' must be None, personal, or meta") else: url = self.get_full_url(relative_url) - if method.__name__ in ['post', 'put']: + if method.__name__ in ['post', 'put', 'delete']: serialized = serializer.dumps(data) + print(url) + print(serialized) response = method(url, data=serialized, headers=headers, params=default_params, timeout=self.timeout) logger.debug('stream api call %s, headers %s data %s', diff --git a/stream/collections.py b/stream/collections.py index bcf80ef..1cd8336 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -17,19 +17,14 @@ def upsert(self, collection_name, data): :return: http response, 201 if successful along with data posted. **Example**:: - collections.upsert('user', [{"id": 1, "name": "Juniper", "hobbies": ["Playing", "Sleeping", "Eating"]}, - {"id": 2, "name": "Ruby", "interests": ["Sunbeams", "Surprise Attacks"]}]) + client.collection.upsert('user', [{"id": '1', "name": "Juniper", "hobbies": ["Playing", "Sleeping", "Eating"]}, + {"id": '2', "name": "Ruby", "interests": ["Sunbeams", "Surprise Attacks"]}]) """ if type(data) != list: data = [data] - ids = [i['id'] for i in data] - - # format data to expected json blob - data_json = {} - for i in range(len(ids)): - data_json['%s:%s' % (collection_name, ids[i])] = data[i] + data_json = {collection_name: data} response = self.client.post('meta', personal='meta', signature=self.token, data={'data': data_json}) @@ -44,8 +39,8 @@ def select(self, collection_name, ids): :return: meta data as json blob **Example**:: - collections.select('user', 1) - collections.select('user', [1,2,3]) + client.collection.select('user', '1') + client.collection.select('user', ['1','2','3']) """ if type(ids) != list: @@ -54,6 +49,7 @@ def select(self, collection_name, ids): foreign_ids = [] for i in range(len(ids)): foreign_ids.append('%s:%s' % (collection_name, ids[i])) + foreign_ids = ','.join(foreign_ids) response = self.client.get('meta', personal='meta', params={'foreign_ids': foreign_ids}, signature=self.token) @@ -68,18 +64,16 @@ def delete(self, collection_name, ids): :return: http response. **Example**:: - collections.delete('user', 1) - collections.delete('user', [1,2,3]) + client.collections.delete('user', '1') + collections.delete('user', ['1','2','3']) """ if type(ids) != list: ids = [ids] - foreign_ids = [] - for i in range(len(ids)): - foreign_ids.append('%s:%s' % (collection_name, ids[i])) + data = {'collection_name': collection_name, 'ids': ids} - response = self.client.delete('meta', personal='meta', foreign_ids=foreign_ids, + response = self.client.delete('meta', personal='meta', data=data, signature=self.token) return response From a863b4cccd9c049ec705a3cb3b1c071eb647bbff Mon Sep 17 00:00:00 2001 From: Balazs Horanyi Date: Fri, 8 Dec 2017 10:53:17 -0700 Subject: [PATCH 040/208] remove print statements --- stream/client.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/stream/client.py b/stream/client.py index 696e7b9..20b5bf5 100644 --- a/stream/client.py +++ b/stream/client.py @@ -206,8 +206,6 @@ def _make_request(self, method, relative_url, signature, personal=None, params=N url = self.get_full_url(relative_url) if method.__name__ in ['post', 'put', 'delete']: serialized = serializer.dumps(data) - print(url) - print(serialized) response = method(url, data=serialized, headers=headers, params=default_params, timeout=self.timeout) logger.debug('stream api call %s, headers %s data %s', From 81a1efe9565a7e7f369cab46bb3b6994b0be4d1e Mon Sep 17 00:00:00 2001 From: Balazs Horanyi Date: Fri, 8 Dec 2017 11:19:30 -0700 Subject: [PATCH 041/208] force ids into strings --- stream/collections.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/stream/collections.py b/stream/collections.py index 1cd8336..406f69b 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -39,12 +39,13 @@ def select(self, collection_name, ids): :return: meta data as json blob **Example**:: - client.collection.select('user', '1') - client.collection.select('user', ['1','2','3']) + client.collection.select('user', 1) + client.collection.select('user', [1,2,3]) """ if type(ids) != list: ids = [ids] + ids = [str(i) for i in ids] foreign_ids = [] for i in range(len(ids)): @@ -70,6 +71,7 @@ def delete(self, collection_name, ids): if type(ids) != list: ids = [ids] + ids = [str(i) for i in ids] data = {'collection_name': collection_name, 'ids': ids} From 8950d919ecbc1bdc6c92f3de41b46fa68dbff3ac Mon Sep 17 00:00:00 2001 From: Balazs Horanyi Date: Fri, 8 Dec 2017 11:45:01 -0700 Subject: [PATCH 042/208] small doc updates --- stream/collections.py | 2 +- stream/personalization.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/stream/collections.py b/stream/collections.py index 406f69b..a7b54e1 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -62,7 +62,7 @@ def delete(self, collection_name, ids): Delete data from meta. :param collection_name: Collection Name i.e 'user' :param ids: list of ids to delete i.e [123,456] - :return: http response. + :return: data that was deleted if if successful or not. **Example**:: client.collections.delete('user', '1') diff --git a/stream/personalization.py b/stream/personalization.py index 87ff6d0..852060e 100644 --- a/stream/personalization.py +++ b/stream/personalization.py @@ -17,7 +17,6 @@ def get(self, resource, **params): :return: personalized feed **Example**:: - personalization.get('follow_recommendations', user_id=123, limit=10, offset=10) """ @@ -49,7 +48,7 @@ def delete(self, resource, **params): shortcut to delete metadata or activites :param resource: personalized url endpoint typical "meta" :param params: params to pass to url i.e user_id = "user:123" - :return: http response + :return: data that was deleted if if successful or not. """ response = self.client.delete(resource, personal='personal', params=params, From 690988fbc49e9acf26bf4ed46b6556668f0e3abc Mon Sep 17 00:00:00 2001 From: Thierry Schellenbach Date: Fri, 8 Dec 2017 12:14:51 -0700 Subject: [PATCH 043/208] bump the version --- CHANGELOG | 8 ++++++++ stream/__init__.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index ae6fd48..79b87cc 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ Change history ================ + +2.6.0 +===== +:release-date 2017-12-08 +:by: Balazs + +Support the new collections endpoint and flexible get requests for personalization + 2.5.0 ====== :release-date: 2017-10-19 diff --git a/stream/__init__.py b/stream/__init__.py index 53241a2..6374776 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.5.0' +__version__ = '2.6.0' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From 0a38299a1c96c517f219cef3291b3b3558b08fdb Mon Sep 17 00:00:00 2001 From: Thierry Schellenbach Date: Fri, 8 Dec 2017 14:26:42 -0700 Subject: [PATCH 044/208] bump --- stream/__init__.py | 2 +- stream/client.py | 30 ++++++++++-------------------- 2 files changed, 11 insertions(+), 21 deletions(-) diff --git a/stream/__init__.py b/stream/__init__.py index 6374776..15252c1 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.6.0' +__version__ = '2.6.1' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' diff --git a/stream/client.py b/stream/client.py index 20b5bf5..c5ff438 100644 --- a/stream/client.py +++ b/stream/client.py @@ -65,6 +65,16 @@ def __init__(self, api_key, api_secret, app_id, version='v1.0', timeout=6.0, bas # TODO: turn this back on after we verify it doesnt retry on slower requests self.session.mount(self.base_url, HTTPAdapter(max_retries=0)) self.auth = HTTPSignatureAuth(api_key, secret=api_secret) + + # setup personalization + from stream.personalization import Personalization + token = self.create_jwt_token('personalization', '*', feed_id='*', user_id='*') + self.personalization = Personalization(self, token) + # setup the collection + from stream.collections import Collections + token = self.create_jwt_token('collections', '*', feed_id='*', user_id='*') + self.collections = Collections(self, token) + def feed(self, feed_slug, user_id): ''' @@ -83,26 +93,6 @@ def feed(self, feed_slug, user_id): return Feed(self, feed_slug, user_id, token) - @property - def personalization(self): - """ - Returns a Personalized Feed object - """ - from stream.personalization import Personalization - token = self.create_jwt_token('personalization', '*', feed_id='*', user_id='*') - - return Personalization(self, token) - - @property - def collection(self): - """ - Returns a collection object (used for meta data endpoint) - """ - from stream.collections import Collections - token = self.create_jwt_token('personalization', '*', feed_id='*', user_id='*') - - return Collections(self, token) - def get_default_params(self): ''' Returns the params with the API key present From 8b9ad3bd21709a79ee355c536207e3dbee4bebd1 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Sun, 10 Dec 2017 19:38:30 +0100 Subject: [PATCH 045/208] compose urls in a cleaner way --- stream/client.py | 70 +++++++++++++++++++++++---------------- stream/collections.py | 7 ++-- stream/personalization.py | 6 ++-- stream/tests.py | 4 +-- 4 files changed, 49 insertions(+), 38 deletions(-) diff --git a/stream/client.py b/stream/client.py index c5ff438..9e880c2 100644 --- a/stream/client.py +++ b/stream/client.py @@ -12,6 +12,10 @@ from stream.utils import validate_feed_slug, validate_user_id from requests import Request +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse logger = logging.getLogger(__name__) @@ -51,13 +55,24 @@ def __init__(self, api_key, api_secret, app_id, version='v1.0', timeout=6.0, bas self.timeout = timeout self.location = location + self.base_domain_name = 'stream-io-api.com' + self.api_location = location + self.custom_api_port = None + self.protocol = 'https' + if os.environ.get('LOCAL'): - self.base_url = 'http://localhost:8000/api/' + self.base_domain_name = 'localhost' + self.protocol = 'http' + self.custom_api_port = 8000 self.timeout = 20 elif base_url is not None: - self.base_url = base_url + parsed_url = urlparse(base_url) + self.base_domain_name = parsed_url.hostname + self.protocol = parsed_url.scheme + self.custom_api_port = parsed_url.port + self.api_location = "" elif location is not None: - self.base_url = 'https://%s-api.stream-io-api.com/api/' % location + self.location = location self.base_analytics_url = 'https://analytics.stream-io-api.com/analytics/' @@ -107,23 +122,23 @@ def get_default_header(self): } return base_headers - def get_full_url(self, relative_url): - url = self.base_url + self.version + '/' + relative_url - return url - - def get_full_personal_url(self, relative_url): - base_url = self.base_url.split('.') # company.getstream.io - if len(base_url) > 1: - DNS_change = base_url[0] + '-personalization' - base_url[0] = DNS_change - base_url = '.'.join(base_url) + def get_full_url(self, service_name, relative_url): + if self.api_location: + hostname = '%s-%s.%s' % (self.api_location, service_name, self.base_domain_name) + elif service_name: + hostname = '%s.%s' % (service_name, self.base_domain_name) else: - base_url = self.base_url # if running on localhost - url = base_url + 'personalization/' + relative_url + '/' - return url + hostname = self.base_domain_name + + if self.base_domain_name == 'localhost': + hostname = 'localhost' + + base_url = "%s://%s" % (self.protocol, hostname) - def get_full_meta_url(self): - url = self.base_url + 'personalization/' + self.version + '/meta/' + if self.custom_api_port: + base_url = "%s:%s" % (base_url, self.custom_api_port) + + url = base_url + '/' + service_name + '/' + self.version + '/' + relative_url return url def get_user_agent(self): @@ -150,7 +165,7 @@ def _make_signed_request(self, method_name, relative_url, params=None, data=None headers['Date'] = date_header default_params = self.get_default_params() default_params.update(params) - url = self.get_full_url(relative_url) + url = self.get_full_url('api', relative_url) serialized = serializer.dumps(data) method = getattr(self.session, method_name) if method_name in ['post', 'put']: @@ -176,7 +191,7 @@ def create_jwt_token(self, resource, action, feed_id=None, user_id=None): payload['user_id'] = user_id return jwt.encode(payload, self.api_secret).decode("utf-8") - def _make_request(self, method, relative_url, signature, personal=None, params=None, data=None): + def _make_request(self, method, relative_url, signature, service_name='api', params=None, data=None): params = params or {} data = data or {} serialized = None @@ -185,15 +200,12 @@ def _make_request(self, method, relative_url, signature, personal=None, params=N headers = self.get_default_header() headers['Authorization'] = signature headers['stream-auth-type'] = 'jwt' - if personal is not None: - if personal == 'personal': - url = self.get_full_personal_url(relative_url) - elif personal == 'meta': - url = self.get_full_meta_url() - else: - raise Exception("keyword 'personal' must be None, personal, or meta") - else: - url = self.get_full_url(relative_url) + + if not relative_url.endswith('/'): + relative_url += '/' + + url = self.get_full_url(service_name, relative_url) + if method.__name__ in ['post', 'put', 'delete']: serialized = serializer.dumps(data) response = method(url, data=serialized, headers=headers, diff --git a/stream/collections.py b/stream/collections.py index a7b54e1..2d8bda4 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -1,4 +1,5 @@ class Collections(object): + def __init__(self, client, token): """ Used to manipulate data at the 'meta' endpoint @@ -26,7 +27,7 @@ def upsert(self, collection_name, data): data_json = {collection_name: data} - response = self.client.post('meta', personal='meta', + response = self.client.post('meta/', service_name='personalization', signature=self.token, data={'data': data_json}) return response @@ -52,7 +53,7 @@ def select(self, collection_name, ids): foreign_ids.append('%s:%s' % (collection_name, ids[i])) foreign_ids = ','.join(foreign_ids) - response = self.client.get('meta', personal='meta', params={'foreign_ids': foreign_ids}, + response = self.client.get('meta/', service_name='personalization', params={'foreign_ids': foreign_ids}, signature=self.token) return response @@ -75,7 +76,7 @@ def delete(self, collection_name, ids): data = {'collection_name': collection_name, 'ids': ids} - response = self.client.delete('meta', personal='meta', data=data, + response = self.client.delete('meta/', service_name='personalization', data=data, signature=self.token) return response diff --git a/stream/personalization.py b/stream/personalization.py index 852060e..1b0d9df 100644 --- a/stream/personalization.py +++ b/stream/personalization.py @@ -20,7 +20,7 @@ def get(self, resource, **params): personalization.get('follow_recommendations', user_id=123, limit=10, offset=10) """ - response = self.client.get(resource, personal='personal', params=params, + response = self.client.get(resource, service_name='personalization', params=params, signature=self.token) return response @@ -39,7 +39,7 @@ def post(self, resource, **params): data = params['data'] or None - response = self.client.post(resource, personal='personal', params=params, + response = self.client.post(resource, service_name='personalization', params=params, signature=self.token, data=data) return response @@ -51,7 +51,7 @@ def delete(self, resource, **params): :return: data that was deleted if if successful or not. """ - response = self.client.delete(resource, personal='personal', params=params, + response = self.client.delete(resource, service_name='personalization', params=params, signature=self.token) return response diff --git a/stream/tests.py b/stream/tests.py index 2c2eb2e..8bc3e9b 100644 --- a/stream/tests.py +++ b/stream/tests.py @@ -42,10 +42,8 @@ def connect_debug(): return stream.connect( key, secret, - location='us-east', + location='qa', timeout=30, - base_url='https://qa.stream-io-api.com/api/', - # base_url='http://localhost-api.getstream.io:8000/api/', ) client = connect_debug() From 9855e76316f9b1afe52281dfc3920e47de8c39f9 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Sun, 10 Dec 2017 19:58:08 +0100 Subject: [PATCH 046/208] get rid of base url --- stream/client.py | 3 --- stream/tests.py | 11 +++++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/stream/client.py b/stream/client.py index 9e880c2..3442a19 100644 --- a/stream/client.py +++ b/stream/client.py @@ -21,7 +21,6 @@ class StreamClient(object): - base_url = 'https://api.stream-io-api.com/api/' def __init__(self, api_key, api_secret, app_id, version='v1.0', timeout=6.0, base_url=None, location=None): ''' @@ -77,8 +76,6 @@ def __init__(self, api_key, api_secret, app_id, version='v1.0', timeout=6.0, bas self.base_analytics_url = 'https://analytics.stream-io-api.com/analytics/' self.session = requests.Session() - # TODO: turn this back on after we verify it doesnt retry on slower requests - self.session.mount(self.base_url, HTTPAdapter(max_retries=0)) self.auth = HTTPSignatureAuth(api_key, secret=api_secret) # setup personalization diff --git a/stream/tests.py b/stream/tests.py index 8bc3e9b..f2ba049 100644 --- a/stream/tests.py +++ b/stream/tests.py @@ -195,12 +195,14 @@ def test_heroku_location_compat(self): self.assertEqual( client.api_secret, 'gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy') + feed_url = client.get_full_url('api', 'feed/') if self.local_tests: self.assertEqual( - client.base_url, 'http://localhost:8000/api/') + feed_url, 'http://localhost:8000/api/v1.0/feed/') else: self.assertEqual( - client.base_url, 'https://us-east-api.stream-io-api.com/api/') + feed_url, 'https://us-east-api.stream-io-api.com/api/v1.0/feed/') + self.assertEqual(client.app_id, '1') def test_heroku_location(self): @@ -211,12 +213,13 @@ def test_heroku_location(self): self.assertEqual( client.api_secret, 'gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy') + feed_url = client.get_full_url('api', 'feed/') if self.local_tests: self.assertEqual( - client.base_url, 'http://localhost:8000/api/') + feed_url, 'http://localhost:8000/api/v1.0/feed/') else: self.assertEqual( - client.base_url, 'https://us-east-api.stream-io-api.com/api/') + feed_url, 'https://us-east-api.stream-io-api.com/api/v1.0/feed/') self.assertEqual(client.app_id, '1') def test_heroku_overwrite(self): From 0e2c5404a3ba235c35bf3c5c31e7d8c78994cc9c Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Sun, 10 Dec 2017 20:03:54 +0100 Subject: [PATCH 047/208] fix more heroku tests --- stream/tests.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/stream/tests.py b/stream/tests.py index f2ba049..f177b1c 100644 --- a/stream/tests.py +++ b/stream/tests.py @@ -179,13 +179,14 @@ def test_heroku_no_location(self): self.assertEqual( client.api_secret, 'twc5ywfste5bm2ngqkzs7ukxk3pn96yweghjrxcmcrarnt3j4dqj3tucbhym5wfd') self.assertEqual(client.app_id, '669') + feed_url = client.get_full_url('api', 'feed/') if self.local_tests: self.assertEqual( - client.base_url, 'http://localhost:8000/api/') + feed_url, 'http://localhost:8000/api/v1.0/feed/') else: self.assertEqual( - client.base_url, 'https://api.stream-io-api.com/api/') + feed_url, 'https://api.stream-io-api.com/api/v1.0/feed/') def test_heroku_location_compat(self): url = 'https://ahj2ndz7gsan:gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy@us-east.getstream.io/?app_id=1' @@ -233,12 +234,13 @@ def test_heroku_overwrite(self): def test_location_support(self): client = stream.connect('a', 'b', 'c', location='us-east') - full_location = 'https://us-east-api.stream-io-api.com/api/' + full_location = 'https://us-east-api.stream-io-api.com/api/v1.0/feed/' if self.local_tests: - full_location = 'http://localhost:8000/api/' + full_location = 'http://localhost:8000/api/v1.0/feed/' self.assertEqual(client.location, 'us-east') - self.assertEqual(client.base_url, full_location) + feed_url = client.get_full_url('api', 'feed/') + self.assertEqual(feed_url, full_location) # test a wrong location, can only work on non-local test running if not self.local_tests: From f284779196b710388f593482a488fc22f39d9020 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Sun, 10 Dec 2017 20:19:56 +0100 Subject: [PATCH 048/208] collections are an API service --- stream/collections.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/stream/collections.py b/stream/collections.py index 2d8bda4..cd58bc5 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -27,7 +27,7 @@ def upsert(self, collection_name, data): data_json = {collection_name: data} - response = self.client.post('meta/', service_name='personalization', + response = self.client.post('meta/', service_name='api', signature=self.token, data={'data': data_json}) return response @@ -53,7 +53,7 @@ def select(self, collection_name, ids): foreign_ids.append('%s:%s' % (collection_name, ids[i])) foreign_ids = ','.join(foreign_ids) - response = self.client.get('meta/', service_name='personalization', params={'foreign_ids': foreign_ids}, + response = self.client.get('meta/', service_name='api', params={'foreign_ids': foreign_ids}, signature=self.token) return response @@ -76,7 +76,7 @@ def delete(self, collection_name, ids): data = {'collection_name': collection_name, 'ids': ids} - response = self.client.delete('meta/', service_name='personalization', data=data, + response = self.client.delete('meta/', service_name='api', data=data, signature=self.token) return response From 10725ee3be5ce66a42792c4f1dd04910dd43273a Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Sun, 10 Dec 2017 20:29:36 +0100 Subject: [PATCH 049/208] add more tests --- stream/tests.py | 99 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 99 insertions(+) diff --git a/stream/tests.py b/stream/tests.py index f177b1c..11090ec 100644 --- a/stream/tests.py +++ b/stream/tests.py @@ -118,6 +118,105 @@ def _test_sleep(self, production_wait, local_wait): sleep_time = local_wait time.sleep(sleep_time) + def test_collections_url(self): + feed_url = client.get_full_url(relative_url='meta/', service_name='api') + + if self.local_tests: + self.assertEqual( + feed_url, 'http://localhost:8000/api/v1.0/meta/') + else: + self.assertEqual( + feed_url, 'https://qa-api.stream-io-api.com/api/v1.0/meta/') + + def test_personalization_url(self): + feed_url = client.get_full_url(relative_url='recommended', service_name='personalization') + + if self.local_tests: + self.assertEqual( + feed_url, 'http://localhost:8000/personalization/v1.0/recommended') + else: + self.assertEqual( + feed_url, 'https://qa-personalization.stream-io-api.com/personalization/v1.0/recommended') + + def test_api_url(self): + feed_url = client.get_full_url(service_name='api', relative_url='feed/') + + if self.local_tests: + self.assertEqual( + feed_url, 'http://localhost:8000/api/v1.0/feed/') + else: + self.assertEqual( + feed_url, 'https://qa-api.stream-io-api.com/api/v1.0/feed/') + + def test_collections_url_default(self): + client = stream.connect( + 'key', + 'secret', + ) + feed_url = client.get_full_url(relative_url='meta/', service_name='api') + + if not self.local_tests: + self.assertEqual( + feed_url, 'https://api.stream-io-api.com/api/v1.0/meta/') + + def test_personalization_url_default(self): + client = stream.connect( + 'key', + 'secret', + ) + feed_url = client.get_full_url(relative_url='recommended', service_name='personalization') + + if not self.local_tests: + self.assertEqual( + feed_url, 'https://personalization.stream-io-api.com/personalization/v1.0/recommended') + + def test_api_url_default(self): + client = stream.connect( + 'key', + 'secret', + ) + feed_url = client.get_full_url(service_name='api', relative_url='feed/') + + if not self.local_tests: + self.assertEqual( + feed_url, 'https://api.stream-io-api.com/api/v1.0/feed/') + + def test_collections_url_location(self): + client = stream.connect( + 'key', + 'secret', + location='tokyo', + ) + feed_url = client.get_full_url(relative_url='meta/', service_name='api') + + if not self.local_tests: + self.assertEqual( + feed_url, 'https://api-tokyo.stream-io-api.com/api/v1.0/meta/') + + def test_personalization_url_location(self): + client = stream.connect( + 'key', + 'secret', + location='tokyo', + ) + feed_url = client.get_full_url(relative_url='recommended', service_name='personalization') + + if not self.local_tests: + self.assertEqual( + feed_url, 'https://personalization-tokyo.stream-io-api.com/personalization/v1.0/recommended') + + def test_api_url_location(self): + client = stream.connect( + 'key', + 'secret', + location='tokyo', + ) + feed_url = client.get_full_url(service_name='api', relative_url='feed/') + + if not self.local_tests: + self.assertEqual( + feed_url, 'https://api-tokyo.stream-io-api.com/api/v1.0/feed/') + def test_update_activities_create(self): activities = [{ 'actor': 'user:1', From 7ccc97c70c6de09b5b82a5a6af22048dd4ce8864 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Tue, 12 Dec 2017 10:59:05 +0100 Subject: [PATCH 050/208] adjust wrong assertion in tests --- stream/tests.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/stream/tests.py b/stream/tests.py index 11090ec..ca171b4 100644 --- a/stream/tests.py +++ b/stream/tests.py @@ -191,7 +191,7 @@ def test_collections_url_location(self): if not self.local_tests: self.assertEqual( - feed_url, 'https://api-tokyo.stream-io-api.com/api/v1.0/meta/') + feed_url, 'https://tokyo-api.stream-io-api.com/api/v1.0/meta/') def test_personalization_url_location(self): client = stream.connect( @@ -203,7 +203,7 @@ def test_personalization_url_location(self): if not self.local_tests: self.assertEqual( - feed_url, 'https://personalization-tokyo.stream-io-api.com/personalization/v1.0/recommended') + feed_url, 'https://tokyo-personalization.stream-io-api.com/personalization/v1.0/recommended') def test_api_url_location(self): client = stream.connect( @@ -215,7 +215,7 @@ def test_api_url_location(self): if not self.local_tests: self.assertEqual( - feed_url, 'https://api-tokyo.stream-io-api.com/api/v1.0/feed/') + feed_url, 'https://tokyo-api.stream-io-api.com/api/v1.0/feed/') def test_update_activities_create(self): activities = [{ From 2e1e56160a191cd7987933c09c4cdeac8e8de535 Mon Sep 17 00:00:00 2001 From: Balazs Horanyi Date: Tue, 12 Dec 2017 12:32:33 -0700 Subject: [PATCH 051/208] add catch all if google image api fails --- stream/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/client.py b/stream/client.py index 3442a19..e058c7a 100644 --- a/stream/client.py +++ b/stream/client.py @@ -202,7 +202,7 @@ def _make_request(self, method, relative_url, signature, service_name='api', par relative_url += '/' url = self.get_full_url(service_name, relative_url) - + print(url) if method.__name__ in ['post', 'put', 'delete']: serialized = serializer.dumps(data) response = method(url, data=serialized, headers=headers, From 036e69fb4d65dbb145d7b7bdcc39f30d77de7dcf Mon Sep 17 00:00:00 2001 From: Balazs Horanyi Date: Tue, 12 Dec 2017 12:36:00 -0700 Subject: [PATCH 052/208] remove print url --- stream/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/client.py b/stream/client.py index e058c7a..68d5f9f 100644 --- a/stream/client.py +++ b/stream/client.py @@ -202,7 +202,7 @@ def _make_request(self, method, relative_url, signature, service_name='api', par relative_url += '/' url = self.get_full_url(service_name, relative_url) - print(url) + if method.__name__ in ['post', 'put', 'delete']: serialized = serializer.dumps(data) response = method(url, data=serialized, headers=headers, From eed3be1b4011e77049d485501ce18cbeb9d3f1e4 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Wed, 13 Dec 2017 11:12:14 +0100 Subject: [PATCH 053/208] Update CHANGELOG --- CHANGELOG | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CHANGELOG b/CHANGELOG index 79b87cc..711ab7b 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -3,6 +3,13 @@ ================ +2.7.0 +===== +:release-date 2017-12-08 +:by: Balazs + +Consolidate API URL generation across API, Collections and Personalization services + 2.6.0 ===== :release-date 2017-12-08 From 0508838fe6016da8d086a6753b636c62506f8eda Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Wed, 13 Dec 2017 11:13:52 +0100 Subject: [PATCH 054/208] release 2.7.0 --- stream/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/__init__.py b/stream/__init__.py index 15252c1..542fa72 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.6.1' +__version__ = '2.7.0' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From f321caaf08a1ec6145bf720116bc2d6b640111e2 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Wed, 13 Dec 2017 19:02:25 +0100 Subject: [PATCH 055/208] Update CHANGELOG --- CHANGELOG | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index 711ab7b..e19dc6f 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -3,7 +3,7 @@ ================ -2.7.0 +2.6.2 ===== :release-date 2017-12-08 :by: Balazs From 8ebba43951efa2475eaf58e9fa5995171c2b043e Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Thu, 14 Dec 2017 10:51:36 +0100 Subject: [PATCH 056/208] Bump version --- stream/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/__init__.py b/stream/__init__.py index 15252c1..78ce5f1 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.6.1' +__version__ = '2.6.2' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From e40b8f15e57bd3ad0f61f37750d42e03d4cd7c1f Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Thu, 14 Dec 2017 11:08:10 +0100 Subject: [PATCH 057/208] Bumping package version --- stream/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/__init__.py b/stream/__init__.py index 78ce5f1..542fa72 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.6.2' +__version__ = '2.7.0' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From c58a4f56032ba3c5ee618c1eca725ff84c977c6f Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Wed, 20 Dec 2017 11:50:28 +0100 Subject: [PATCH 058/208] Vendored httpsig --- LICENSE | 21 +++ setup.py | 4 +- stream/httpsig/__init__.py | 2 + stream/httpsig/requests_auth.py | 37 +++++ stream/httpsig/sign.py | 106 ++++++++++++++ stream/httpsig/tests/__init__.py | 3 + stream/httpsig/tests/rsa_private.pem | 15 ++ stream/httpsig/tests/rsa_public.pem | 6 + stream/httpsig/tests/test_signature.py | 70 ++++++++++ stream/httpsig/tests/test_utils.py | 17 +++ stream/httpsig/tests/test_verify.py | 167 ++++++++++++++++++++++ stream/httpsig/utils.py | 186 +++++++++++++++++++++++++ stream/httpsig/verify.py | 90 ++++++++++++ 13 files changed, 722 insertions(+), 2 deletions(-) create mode 100644 stream/httpsig/__init__.py create mode 100644 stream/httpsig/requests_auth.py create mode 100644 stream/httpsig/sign.py create mode 100644 stream/httpsig/tests/__init__.py create mode 100644 stream/httpsig/tests/rsa_private.pem create mode 100644 stream/httpsig/tests/rsa_public.pem create mode 100755 stream/httpsig/tests/test_signature.py create mode 100755 stream/httpsig/tests/test_utils.py create mode 100755 stream/httpsig/tests/test_verify.py create mode 100644 stream/httpsig/utils.py create mode 100644 stream/httpsig/verify.py diff --git a/LICENSE b/LICENSE index f883096..73bc810 100644 --- a/LICENSE +++ b/LICENSE @@ -25,3 +25,24 @@ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSE THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + httpsig + - https://github.com/ahknight/httpsig + + Copyright (c) 2014 Adam Knight + Copyright (c) 2012 Adam T. Lindsay (original author) + + Permission is hereby granted, free of charge, to any person obtaining a copy of this + software and associated documentation files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or + substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/setup.py b/setup.py index b13eabc..5b37615 100644 --- a/setup.py +++ b/setup.py @@ -32,10 +32,10 @@ requests = 'requests[security]>=2.4.1,<3' install_requires = [ + 'pycryptodome==3.4.7', 'pyjwt==1.3.0', requests, - 'six>=1.8.0', - 'httpsig==1.1.2' + 'six>=1.8.0' ] class PyTest(TestCommand): diff --git a/stream/httpsig/__init__.py b/stream/httpsig/__init__.py new file mode 100644 index 0000000..18aa3f9 --- /dev/null +++ b/stream/httpsig/__init__.py @@ -0,0 +1,2 @@ +from .sign import Signer, HeaderSigner +from .verify import Verifier, HeaderVerifier diff --git a/stream/httpsig/requests_auth.py b/stream/httpsig/requests_auth.py new file mode 100644 index 0000000..6a02896 --- /dev/null +++ b/stream/httpsig/requests_auth.py @@ -0,0 +1,37 @@ +from requests.auth import AuthBase +try: + # Python 3 + from urllib.parse import urlparse +except ImportError: + # Python 2 + from urlparse import urlparse + +from .sign import HeaderSigner + + +class HTTPSignatureAuth(AuthBase): + ''' + Sign a request using the http-signature scheme. + https://github.com/joyent/node-http-signature/blob/master/http_signing.md + + key_id is the mandatory label indicating to the server which secret to use + secret is the filename of a pem file in the case of rsa, a password string in the case of an hmac algorithm + algorithm is one of the six specified algorithms + headers is a list of http headers to be included in the signing string, defaulting to "Date" alone. + ''' + def __init__(self, key_id='', secret='', algorithm=None, headers=None): + headers = headers or [] + self.header_signer = HeaderSigner(key_id=key_id, secret=secret, + algorithm=algorithm, headers=headers) + self.uses_host = 'host' in [h.lower() for h in headers] + + def __call__(self, r): + headers = self.header_signer.sign( + r.headers, + # 'Host' header unavailable in request object at this point + # if 'host' header is needed, extract it from the url + host=urlparse(r.url).netloc if self.uses_host else None, + method=r.method, + path=r.path_url) + r.headers.update(headers) + return r diff --git a/stream/httpsig/sign.py b/stream/httpsig/sign.py new file mode 100644 index 0000000..7125035 --- /dev/null +++ b/stream/httpsig/sign.py @@ -0,0 +1,106 @@ +import base64 +import six + +from Crypto.Hash import HMAC +from Crypto.PublicKey import RSA +from Crypto.Signature import PKCS1_v1_5 + +from .utils import * + + +DEFAULT_SIGN_ALGORITHM = "hmac-sha256" + + +class Signer(object): + """ + When using an RSA algo, the secret is a PEM-encoded private key. + When using an HMAC algo, the secret is the HMAC signing secret. + + Password-protected keyfiles are not supported. + """ + def __init__(self, secret, algorithm=None): + if algorithm is None: + algorithm = DEFAULT_SIGN_ALGORITHM + + assert algorithm in ALGORITHMS, "Unknown algorithm" + if isinstance(secret, six.string_types): secret = secret.encode("ascii") + + self._rsa = None + self._hash = None + self.sign_algorithm, self.hash_algorithm = algorithm.split('-') + + if self.sign_algorithm == 'rsa': + try: + rsa_key = RSA.importKey(secret) + self._rsa = PKCS1_v1_5.new(rsa_key) + self._hash = HASHES[self.hash_algorithm] + except ValueError: + raise HttpSigException("Invalid key.") + + elif self.sign_algorithm == 'hmac': + self._hash = HMAC.new(secret, digestmod=HASHES[self.hash_algorithm]) + + @property + def algorithm(self): + return '%s-%s' % (self.sign_algorithm, self.hash_algorithm) + + def _sign_rsa(self, data): + if isinstance(data, six.string_types): data = data.encode("ascii") + h = self._hash.new() + h.update(data) + return self._rsa.sign(h) + + def _sign_hmac(self, data): + if isinstance(data, six.string_types): data = data.encode("ascii") + hmac = self._hash.copy() + hmac.update(data) + return hmac.digest() + + def _sign(self, data): + if isinstance(data, six.string_types): data = data.encode("ascii") + signed = None + if self._rsa: + signed = self._sign_rsa(data) + elif self._hash: + signed = self._sign_hmac(data) + if not signed: + raise SystemError('No valid encryptor found.') + return base64.b64encode(signed).decode("ascii") + + +class HeaderSigner(Signer): + ''' + Generic object that will sign headers as a dictionary using the http-signature scheme. + https://github.com/joyent/node-http-signature/blob/master/http_signing.md + + :arg key_id: the mandatory label indicating to the server which secret to use + :arg secret: a PEM-encoded RSA private key or an HMAC secret (must match the algorithm) + :arg algorithm: one of the six specified algorithms + :arg headers: a list of http headers to be included in the signing string, defaulting to ['date']. + ''' + def __init__(self, key_id, secret, algorithm=None, headers=None): + if algorithm is None: + algorithm = DEFAULT_SIGN_ALGORITHM + + super(HeaderSigner, self).__init__(secret=secret, algorithm=algorithm) + self.headers = headers or ['date'] + self.signature_template = build_signature_template(key_id, algorithm, headers) + + def sign(self, headers, host=None, method=None, path=None): + """ + Add Signature Authorization header to case-insensitive header dict. + + headers is a case-insensitive dict of mutable headers. + host is a override for the 'host' header (defaults to value in headers). + method is the HTTP method (required when using '(request-target)'). + path is the HTTP path (required when using '(request-target)'). + """ + headers = CaseInsensitiveDict(headers) + required_headers = self.headers or ['date'] + signable = generate_message(required_headers, headers, host, method, path) + + signature = self._sign(signable) + headers['authorization'] = self.signature_template % signature + + return headers + diff --git a/stream/httpsig/tests/__init__.py b/stream/httpsig/tests/__init__.py new file mode 100644 index 0000000..72d4383 --- /dev/null +++ b/stream/httpsig/tests/__init__.py @@ -0,0 +1,3 @@ +from .test_signature import * +from .test_utils import * +from .test_verify import * \ No newline at end of file diff --git a/stream/httpsig/tests/rsa_private.pem b/stream/httpsig/tests/rsa_private.pem new file mode 100644 index 0000000..425518a --- /dev/null +++ b/stream/httpsig/tests/rsa_private.pem @@ -0,0 +1,15 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXgIBAAKBgQDCFENGw33yGihy92pDjZQhl0C36rPJj+CvfSC8+q28hxA161QF +NUd13wuCTUcq0Qd2qsBe/2hFyc2DCJJg0h1L78+6Z4UMR7EOcpfdUE9Hf3m/hs+F +UR45uBJeDK1HSFHD8bHKD6kv8FPGfJTotc+2xjJwoYi+1hqp1fIekaxsyQIDAQAB +AoGBAJR8ZkCUvx5kzv+utdl7T5MnordT1TvoXXJGXK7ZZ+UuvMNUCdN2QPc4sBiA +QWvLw1cSKt5DsKZ8UETpYPy8pPYnnDEz2dDYiaew9+xEpubyeW2oH4Zx71wqBtOK +kqwrXa/pzdpiucRRjk6vE6YY7EBBs/g7uanVpGibOVAEsqH1AkEA7DkjVH28WDUg +f1nqvfn2Kj6CT7nIcE3jGJsZZ7zlZmBmHFDONMLUrXR/Zm3pR5m0tCmBqa5RK95u +412jt1dPIwJBANJT3v8pnkth48bQo/fKel6uEYyboRtA5/uHuHkZ6FQF7OUkGogc +mSJluOdc5t6hI1VsLn0QZEjQZMEOWr+wKSMCQQCC4kXJEsHAve77oP6HtG/IiEn7 +kpyUXRNvFsDE0czpJJBvL/aRFUJxuRK91jhjC68sA7NsKMGg5OXb5I5Jj36xAkEA +gIT7aFOYBFwGgQAQkWNKLvySgKbAZRTeLBacpHMuQdl1DfdntvAyqpAZ0lY0RKmW +G6aFKaqQfOXKCyWoUiVknQJAXrlgySFci/2ueKlIE1QqIiLSZ8V8OlpFLRnb1pzI +7U1yQXnTAEFYM560yJlzUpOb1V4cScGd365tiSMvxLOvTA== +-----END RSA PRIVATE KEY----- diff --git a/stream/httpsig/tests/rsa_public.pem b/stream/httpsig/tests/rsa_public.pem new file mode 100644 index 0000000..b3bbf6c --- /dev/null +++ b/stream/httpsig/tests/rsa_public.pem @@ -0,0 +1,6 @@ +-----BEGIN PUBLIC KEY----- +MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDCFENGw33yGihy92pDjZQhl0C3 +6rPJj+CvfSC8+q28hxA161QFNUd13wuCTUcq0Qd2qsBe/2hFyc2DCJJg0h1L78+6 +Z4UMR7EOcpfdUE9Hf3m/hs+FUR45uBJeDK1HSFHD8bHKD6kv8FPGfJTotc+2xjJw +oYi+1hqp1fIekaxsyQIDAQAB +-----END PUBLIC KEY----- diff --git a/stream/httpsig/tests/test_signature.py b/stream/httpsig/tests/test_signature.py new file mode 100755 index 0000000..c5a61ff --- /dev/null +++ b/stream/httpsig/tests/test_signature.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +import sys +import os +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) + +import json +import unittest + +import stream.httpsig.sign as sign +from stream.httpsig.utils import parse_authorization_header + + +sign.DEFAULT_SIGN_ALGORITHM = "rsa-sha256" + + +class TestSign(unittest.TestCase): + + def setUp(self): + self.key_path = os.path.join(os.path.dirname(__file__), 'rsa_private.pem') + with open(self.key_path, 'rb') as f: + self.key = f.read() + + def test_default(self): + hs = sign.HeaderSigner(key_id='Test', secret=self.key) + unsigned = { + 'Date': 'Thu, 05 Jan 2012 21:31:40 GMT' + } + signed = hs.sign(unsigned) + self.assertIn('Date', signed) + self.assertEqual(unsigned['Date'], signed['Date']) + self.assertIn('Authorization', signed) + auth = parse_authorization_header(signed['authorization']) + params = auth[1] + self.assertIn('keyId', params) + self.assertIn('algorithm', params) + self.assertIn('signature', params) + self.assertEqual(params['keyId'], 'Test') + self.assertEqual(params['algorithm'], 'rsa-sha256') + self.assertEqual(params['signature'], 'ATp0r26dbMIxOopqw0OfABDT7CKMIoENumuruOtarj8n/97Q3htHFYpH8yOSQk3Z5zh8UxUym6FYTb5+A0Nz3NRsXJibnYi7brE/4tx5But9kkFGzG+xpUmimN4c3TMN7OFH//+r8hBf7BT9/GmHDUVZT2JzWGLZES2xDOUuMtA=') + + def test_all(self): + hs = sign.HeaderSigner(key_id='Test', secret=self.key, headers=[ + '(request-target)', + 'host', + 'date', + 'content-type', + 'content-md5', + 'content-length' + ]) + unsigned = { + 'Host': 'example.com', + 'Date': 'Thu, 05 Jan 2012 21:31:40 GMT', + 'Content-Type': 'application/json', + 'Content-MD5': 'Sd/dVLAcvNLSq16eXua5uQ==', + 'Content-Length': '18', + } + signed = hs.sign(unsigned, method='POST', path='/foo?param=value&pet=dog') + + self.assertIn('Date', signed) + self.assertEqual(unsigned['Date'], signed['Date']) + self.assertIn('Authorization', signed) + auth = parse_authorization_header(signed['authorization']) + params = auth[1] + self.assertIn('keyId', params) + self.assertIn('algorithm', params) + self.assertIn('signature', params) + self.assertEqual(params['keyId'], 'Test') + self.assertEqual(params['algorithm'], 'rsa-sha256') + self.assertEqual(params['headers'], '(request-target) host date content-type content-md5 content-length') + self.assertEqual(params['signature'], 'G8/Uh6BBDaqldRi3VfFfklHSFoq8CMt5NUZiepq0q66e+fS3Up3BmXn0NbUnr3L1WgAAZGplifRAJqp2LgeZ5gXNk6UX9zV3hw5BERLWscWXlwX/dvHQES27lGRCvyFv3djHP6Plfd5mhPWRkmjnvqeOOSS0lZJYFYHJz994s6w=') diff --git a/stream/httpsig/tests/test_utils.py b/stream/httpsig/tests/test_utils.py new file mode 100755 index 0000000..f0a4341 --- /dev/null +++ b/stream/httpsig/tests/test_utils.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python +import os +import re +import sys +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) + +import unittest + +from stream.httpsig.utils import get_fingerprint + +class TestUtils(unittest.TestCase): + + def test_get_fingerprint(self): + with open(os.path.join(os.path.dirname(__file__), 'rsa_public.pem'), 'r') as k: + key = k.read() + fingerprint = get_fingerprint(key) + self.assertEqual(fingerprint, "73:61:a2:21:67:e0:df:be:7e:4b:93:1e:15:98:a5:b7") diff --git a/stream/httpsig/tests/test_verify.py b/stream/httpsig/tests/test_verify.py new file mode 100755 index 0000000..9d34393 --- /dev/null +++ b/stream/httpsig/tests/test_verify.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python +import sys +import os +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) + +import json +import unittest + +from stream.httpsig.sign import HeaderSigner, Signer +from stream.httpsig.verify import HeaderVerifier, Verifier + +class BaseTestCase(unittest.TestCase): + def _parse_auth(self, auth): + """Basic Authorization header parsing.""" + # split 'Signature kvpairs' + s, param_str = auth.split(' ', 1) + self.assertEqual(s, 'Signature') + # split k1="v1",k2="v2",... + param_list = param_str.split(',') + # convert into [(k1,"v1"), (k2, "v2"), ...] + param_pairs = [p.split('=', 1) for p in param_list] + # convert into {k1:v1, k2:v2, ...} + param_dict = {k: v.strip('"') for k, v in param_pairs} + return param_dict + + +class TestVerifyHMACSHA1(BaseTestCase): + def setUp(self): + secret = b"something special goes here" + + self.keyId = "Test" + self.algorithm = "hmac-sha1" + self.sign_secret = secret + self.verify_secret = secret + + def test_basic_sign(self): + signer = Signer(secret=self.sign_secret, algorithm=self.algorithm) + verifier = Verifier(secret=self.verify_secret, algorithm=self.algorithm) + + GOOD = b"this is a test" + BAD = b"this is not the signature you were looking for..." + + # generate signed string + signature = signer._sign(GOOD) + self.assertTrue(verifier._verify(data=GOOD, signature=signature)) + self.assertFalse(verifier._verify(data=BAD, signature=signature)) + + def test_default(self): + unsigned = { + 'Date': 'Thu, 05 Jan 2012 21:31:40 GMT' + } + + hs = HeaderSigner(key_id="Test", secret=self.sign_secret, algorithm=self.algorithm) + signed = hs.sign(unsigned) + hv = HeaderVerifier(headers=signed, secret=self.verify_secret) + self.assertTrue(hv.verify()) + + def test_signed_headers(self): + HOST = "example.com" + METHOD = "POST" + PATH = '/foo?param=value&pet=dog' + hs = HeaderSigner(key_id="Test", secret=self.sign_secret, algorithm=self.algorithm, headers=[ + '(request-target)', + 'host', + 'date', + 'content-type', + 'content-md5', + 'content-length' + ]) + unsigned = { + 'Host': HOST, + 'Date': 'Thu, 05 Jan 2012 21:31:40 GMT', + 'Content-Type': 'application/json', + 'Content-MD5': 'Sd/dVLAcvNLSq16eXua5uQ==', + 'Content-Length': '18', + } + signed = hs.sign(unsigned, method=METHOD, path=PATH) + + hv = HeaderVerifier(headers=signed, secret=self.verify_secret, host=HOST, method=METHOD, path=PATH) + self.assertTrue(hv.verify()) + + def test_incorrect_headers(self): + HOST = "example.com" + METHOD = "POST" + PATH = '/foo?param=value&pet=dog' + hs = HeaderSigner(secret=self.sign_secret, + key_id="Test", + algorithm=self.algorithm, + headers=[ + '(request-target)', + 'host', + 'date', + 'content-type', + 'content-md5', + 'content-length']) + unsigned = { + 'Host': HOST, + 'Date': 'Thu, 05 Jan 2012 21:31:40 GMT', + 'Content-Type': 'application/json', + 'Content-MD5': 'Sd/dVLAcvNLSq16eXua5uQ==', + 'Content-Length': '18', + } + signed = hs.sign(unsigned, method=METHOD, path=PATH) + + hv = HeaderVerifier(headers=signed, secret=self.verify_secret, required_headers=["some-other-header"], host=HOST, method=METHOD, path=PATH) + with self.assertRaises(Exception) as ex: + hv.verify() + + def test_extra_auth_headers(self): + HOST = "example.com" + METHOD = "POST" + PATH = '/foo?param=value&pet=dog' + hs = HeaderSigner(key_id="Test", secret=self.sign_secret, algorithm=self.algorithm, headers=[ + '(request-target)', + 'host', + 'date', + 'content-type', + 'content-md5', + 'content-length' + ]) + unsigned = { + 'Host': HOST, + 'Date': 'Thu, 05 Jan 2012 21:31:40 GMT', + 'Content-Type': 'application/json', + 'Content-MD5': 'Sd/dVLAcvNLSq16eXua5uQ==', + 'Content-Length': '18', + } + signed = hs.sign(unsigned, method=METHOD, path=PATH) + hv = HeaderVerifier(headers=signed, secret=self.verify_secret, method=METHOD, path=PATH, required_headers=['date', '(request-target)']) + self.assertTrue(hv.verify()) + + +class TestVerifyHMACSHA256(TestVerifyHMACSHA1): + def setUp(self): + super(TestVerifyHMACSHA256, self).setUp() + self.algorithm = "hmac-sha256" + +class TestVerifyHMACSHA512(TestVerifyHMACSHA1): + def setUp(self): + super(TestVerifyHMACSHA512, self).setUp() + self.algorithm = "hmac-sha512" + + +class TestVerifyRSASHA1(TestVerifyHMACSHA1): + def setUp(self): + private_key_path = os.path.join(os.path.dirname(__file__), 'rsa_private.pem') + with open(private_key_path, 'rb') as f: + private_key = f.read() + + public_key_path = os.path.join(os.path.dirname(__file__), 'rsa_public.pem') + with open(public_key_path, 'rb') as f: + public_key = f.read() + + self.keyId = "Test" + self.algorithm = "rsa-sha1" + self.sign_secret = private_key + self.verify_secret = public_key + +class TestVerifyRSASHA256(TestVerifyRSASHA1): + def setUp(self): + super(TestVerifyRSASHA256, self).setUp() + self.algorithm = "rsa-sha256" + +class TestVerifyRSASHA512(TestVerifyRSASHA1): + def setUp(self): + super(TestVerifyRSASHA512, self).setUp() + self.algorithm = "rsa-sha512" diff --git a/stream/httpsig/utils.py b/stream/httpsig/utils.py new file mode 100644 index 0000000..b34e3fa --- /dev/null +++ b/stream/httpsig/utils.py @@ -0,0 +1,186 @@ +import re +import struct +import hashlib +import base64 +import six + +try: + # Python 3 + from urllib.request import parse_http_list +except ImportError: + # Python 2 + from urllib2 import parse_http_list + +from Crypto.PublicKey import RSA +from Crypto.Hash import SHA, SHA256, SHA512 + +ALGORITHMS = frozenset(['rsa-sha1', 'rsa-sha256', 'rsa-sha512', 'hmac-sha1', 'hmac-sha256', 'hmac-sha512']) +HASHES = {'sha1': SHA, + 'sha256': SHA256, + 'sha512': SHA512} + + +class HttpSigException(Exception): + pass + +""" +Constant-time string compare. +http://codahale.com/a-lesson-in-timing-attacks/ +""" +def ct_bytes_compare(a, b): + if not isinstance(a, six.binary_type): + a = a.decode('utf8') + if not isinstance(b, six.binary_type): + b = b.decode('utf8') + + if len(a) != len(b): + return False + + result = 0 + for x, y in zip(a, b): + if six.PY2: + result |= ord(x) ^ ord(y) + else: + result |= x ^ y + + return (result == 0) + +def generate_message(required_headers, headers, host=None, method=None, path=None): + headers = CaseInsensitiveDict(headers) + + if not required_headers: + required_headers = ['date'] + + signable_list = [] + for h in required_headers: + h = h.lower() + if h == '(request-target)': + if not method or not path: + raise Exception('method and path arguments required when using "(request-target)"') + signable_list.append('%s: %s %s' % (h, method.lower(), path)) + + elif h == 'host': + # 'host' special case due to requests lib restrictions + # 'host' is not available when adding auth so must use a param + # if no param used, defaults back to the 'host' header + if not host: + if 'host' in headers: + host = headers[h] + else: + raise Exception('missing required header "%s"' % (h)) + signable_list.append('%s: %s' % (h, host)) + else: + if h not in headers: + raise Exception('missing required header "%s"' % (h)) + + signable_list.append('%s: %s' % (h, headers[h])) + + signable = '\n'.join(signable_list).encode("ascii") + return signable + + +def parse_authorization_header(header): + if not isinstance(header, six.string_types): + header = header.decode("ascii") #HTTP headers cannot be Unicode. + + auth = header.split(" ", 1) + if len(auth) > 2: + raise ValueError('Invalid authorization header. (eg. Method key1=value1,key2="value, \"2\"")') + + # Split up any args into a dictionary. + values = {} + if len(auth) == 2: + auth_value = auth[1] + if auth_value and len(auth_value): + # This is tricky string magic. Let urllib do it. + fields = parse_http_list(auth_value) + + for item in fields: + # Only include keypairs. + if '=' in item: + # Split on the first '=' only. + key, value = item.split('=', 1) + if not (len(key) and len(value)): + continue + + # Unquote values, if quoted. + if value[0] == '"': + value = value[1:-1] + + values[key] = value + + # ("Signature", {"headers": "date", "algorithm": "hmac-sha256", ... }) + return (auth[0], CaseInsensitiveDict(values)) + +def build_signature_template(key_id, algorithm, headers): + """ + Build the Signature template for use with the Authorization header. + + key_id is the mandatory label indicating to the server which secret to use + algorithm is one of the six specified algorithms + headers is a list of http headers to be included in the signing string. + + The signature must be interpolated into the template to get the final Authorization header value. + """ + param_map = {'keyId': key_id, + 'algorithm': algorithm, + 'signature': '%s'} + if headers: + headers = [h.lower() for h in headers] + param_map['headers'] = ' '.join(headers) + kv = map('{0[0]}="{0[1]}"'.format, param_map.items()) + kv_string = ','.join(kv) + sig_string = 'Signature {0}'.format(kv_string) + return sig_string + + +def lkv(d): + parts = [] + while d: + len = struct.unpack('>I', d[:4])[0] + bits = d[4:len+4] + parts.append(bits) + d = d[len+4:] + return parts + +def sig(d): + return lkv(d)[1] + +def is_rsa(keyobj): + return lkv(keyobj.blob)[0] == "ssh-rsa" + +# based on http://stackoverflow.com/a/2082169/151401 +class CaseInsensitiveDict(dict): + def __init__(self, d=None, **kwargs): + super(CaseInsensitiveDict, self).__init__(**kwargs) + if d: + self.update((k.lower(), v) for k, v in six.iteritems(d)) + + def __setitem__(self, key, value): + super(CaseInsensitiveDict, self).__setitem__(key.lower(), value) + + def __getitem__(self, key): + return super(CaseInsensitiveDict, self).__getitem__(key.lower()) + + def __contains__(self, key): + return super(CaseInsensitiveDict, self).__contains__(key.lower()) + +# currently busted... +def get_fingerprint(key): + """ + Takes an ssh public key and generates the fingerprint. + + See: http://tools.ietf.org/html/rfc4716 for more info + """ + if key.startswith('ssh-rsa'): + key = key.split(' ')[1] + else: + regex = r'\-{4,5}[\w|| ]+\-{4,5}' + key = re.split(regex, key)[1] + + key = key.replace('\n', '') + key = key.strip().encode('ascii') + key = base64.b64decode(key) + fp_plain = hashlib.md5(key).hexdigest() + return ':'.join(a+b for a,b in zip(fp_plain[::2], fp_plain[1::2])) + diff --git a/stream/httpsig/verify.py b/stream/httpsig/verify.py new file mode 100644 index 0000000..a6e1ba3 --- /dev/null +++ b/stream/httpsig/verify.py @@ -0,0 +1,90 @@ +""" +Module to assist in verifying a signed header. +""" +import six + +from Crypto.Hash import HMAC +from Crypto.PublicKey import RSA +from Crypto.Signature import PKCS1_v1_5 +from base64 import b64decode + +from .sign import Signer +from .utils import * + + +class Verifier(Signer): + """ + Verifies signed text against a secret. + For HMAC, the secret is the shared secret. + For RSA, the secret is the PUBLIC key. + """ + def _verify(self, data, signature): + """ + Verifies the data matches a signed version with the given signature. + `data` is the message to verify + `signature` is a base64-encoded signature to verify against `data` + """ + + if isinstance(data, six.string_types): data = data.encode("ascii") + if isinstance(signature, six.string_types): signature = signature.encode("ascii") + + if self.sign_algorithm == 'rsa': + h = self._hash.new() + h.update(data) + return self._rsa.verify(h, b64decode(signature)) + + elif self.sign_algorithm == 'hmac': + h = self._sign_hmac(data) + s = b64decode(signature) + return ct_bytes_compare(h, s) + + else: + raise HttpSigException("Unsupported algorithm.") + + +class HeaderVerifier(Verifier): + """ + Verifies an HTTP signature from given headers. + """ + def __init__(self, headers, secret, required_headers=None, method=None, path=None, host=None): + """ + Instantiate a HeaderVerifier object. + + :param headers: A dictionary of headers from the HTTP request. + :param secret: The HMAC secret or RSA *public* key. + :param required_headers: Optional. A list of headers required to be present to validate, even if the signature is otherwise valid. Defaults to ['date']. + :param method: Optional. The HTTP method used in the request (eg. "GET"). Required for the '(request-target)' header. + :param path: Optional. The HTTP path requested, exactly as sent (including query arguments and fragments). Required for the '(request-target)' header. + :param host: Optional. The value to use for the Host header, if not supplied in :param:headers. + """ + required_headers = required_headers or ['date'] + + auth = parse_authorization_header(headers['authorization']) + if len(auth) == 2: + self.auth_dict = auth[1] + else: + raise HttpSigException("Invalid authorization header.") + + self.headers = CaseInsensitiveDict(headers) + self.required_headers = [s.lower() for s in required_headers] + self.method = method + self.path = path + self.host = host + + super(HeaderVerifier, self).__init__(secret, algorithm=self.auth_dict['algorithm']) + + def verify(self): + """ + Verify the headers based on the arguments passed at creation and current properties. + + Raises an Exception if a required header (:param:required_headers) is not found in the signature. + Returns True or False. + """ + auth_headers = self.auth_dict.get('headers', 'date').split(' ') + + if len(set(self.required_headers) - set(auth_headers)) > 0: + raise Exception('{} is a required header(s)'.format(', '.join(set(self.required_headers)-set(auth_headers)))) + + signing_str = generate_message(auth_headers, self.headers, self.host, self.method, self.path) + + return self._verify(signing_str, self.auth_dict['signature']) From 10ec1a19f6ca0e9aba6df3eda285d3877b0464cb Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Thu, 21 Dec 2017 10:47:30 +0100 Subject: [PATCH 059/208] Converted to pycryptodomex to decouple from Pycrypto --- setup.py | 2 +- stream/httpsig/sign.py | 22 +++++++++++----------- stream/httpsig/utils.py | 24 ++++++++++++------------ stream/httpsig/verify.py | 30 +++++++++++++++--------------- 4 files changed, 39 insertions(+), 39 deletions(-) diff --git a/setup.py b/setup.py index 5b37615..8b76efc 100644 --- a/setup.py +++ b/setup.py @@ -32,7 +32,7 @@ requests = 'requests[security]>=2.4.1,<3' install_requires = [ - 'pycryptodome==3.4.7', + 'pycryptodomex==3.4.7', 'pyjwt==1.3.0', requests, 'six>=1.8.0' diff --git a/stream/httpsig/sign.py b/stream/httpsig/sign.py index 7125035..6187b59 100644 --- a/stream/httpsig/sign.py +++ b/stream/httpsig/sign.py @@ -1,9 +1,9 @@ import base64 import six -from Crypto.Hash import HMAC -from Crypto.PublicKey import RSA -from Crypto.Signature import PKCS1_v1_5 +from Cryptodome.Hash import HMAC +from Cryptodome.PublicKey import RSA +from Cryptodome.Signature import PKCS1_v1_5 from .utils import * @@ -15,20 +15,20 @@ class Signer(object): """ When using an RSA algo, the secret is a PEM-encoded private key. When using an HMAC algo, the secret is the HMAC signing secret. - + Password-protected keyfiles are not supported. """ def __init__(self, secret, algorithm=None): if algorithm is None: algorithm = DEFAULT_SIGN_ALGORITHM - + assert algorithm in ALGORITHMS, "Unknown algorithm" if isinstance(secret, six.string_types): secret = secret.encode("ascii") - + self._rsa = None self._hash = None self.sign_algorithm, self.hash_algorithm = algorithm.split('-') - + if self.sign_algorithm == 'rsa': try: rsa_key = RSA.importKey(secret) @@ -36,7 +36,7 @@ def __init__(self, secret, algorithm=None): self._hash = HASHES[self.hash_algorithm] except ValueError: raise HttpSigException("Invalid key.") - + elif self.sign_algorithm == 'hmac': self._hash = HMAC.new(secret, digestmod=HASHES[self.hash_algorithm]) @@ -81,7 +81,7 @@ class HeaderSigner(Signer): def __init__(self, key_id, secret, algorithm=None, headers=None): if algorithm is None: algorithm = DEFAULT_SIGN_ALGORITHM - + super(HeaderSigner, self).__init__(secret=secret, algorithm=algorithm) self.headers = headers or ['date'] self.signature_template = build_signature_template(key_id, algorithm, headers) @@ -98,9 +98,9 @@ def sign(self, headers, host=None, method=None, path=None): headers = CaseInsensitiveDict(headers) required_headers = self.headers or ['date'] signable = generate_message(required_headers, headers, host, method, path) - + signature = self._sign(signable) headers['authorization'] = self.signature_template % signature - + return headers diff --git a/stream/httpsig/utils.py b/stream/httpsig/utils.py index b34e3fa..baa066b 100644 --- a/stream/httpsig/utils.py +++ b/stream/httpsig/utils.py @@ -11,8 +11,8 @@ # Python 2 from urllib2 import parse_http_list -from Crypto.PublicKey import RSA -from Crypto.Hash import SHA, SHA256, SHA512 +from Cryptodome.PublicKey import RSA +from Cryptodome.Hash import SHA, SHA256, SHA512 ALGORITHMS = frozenset(['rsa-sha1', 'rsa-sha256', 'rsa-sha512', 'hmac-sha1', 'hmac-sha256', 'hmac-sha512']) HASHES = {'sha1': SHA, @@ -42,15 +42,15 @@ def ct_bytes_compare(a, b): result |= ord(x) ^ ord(y) else: result |= x ^ y - + return (result == 0) def generate_message(required_headers, headers, host=None, method=None, path=None): headers = CaseInsensitiveDict(headers) - + if not required_headers: required_headers = ['date'] - + signable_list = [] for h in required_headers: h = h.lower() @@ -58,7 +58,7 @@ def generate_message(required_headers, headers, host=None, method=None, path=Non if not method or not path: raise Exception('method and path arguments required when using "(request-target)"') signable_list.append('%s: %s %s' % (h, method.lower(), path)) - + elif h == 'host': # 'host' special case due to requests lib restrictions # 'host' is not available when adding auth so must use a param @@ -82,11 +82,11 @@ def generate_message(required_headers, headers, host=None, method=None, path=Non def parse_authorization_header(header): if not isinstance(header, six.string_types): header = header.decode("ascii") #HTTP headers cannot be Unicode. - + auth = header.split(" ", 1) if len(auth) > 2: raise ValueError('Invalid authorization header. (eg. Method key1=value1,key2="value, \"2\"")') - + # Split up any args into a dictionary. values = {} if len(auth) == 2: @@ -94,7 +94,7 @@ def parse_authorization_header(header): if auth_value and len(auth_value): # This is tricky string magic. Let urllib do it. fields = parse_http_list(auth_value) - + for item in fields: # Only include keypairs. if '=' in item: @@ -102,13 +102,13 @@ def parse_authorization_header(header): key, value = item.split('=', 1) if not (len(key) and len(value)): continue - + # Unquote values, if quoted. if value[0] == '"': value = value[1:-1] - + values[key] = value - + # ("Signature", {"headers": "date", "algorithm": "hmac-sha256", ... }) return (auth[0], CaseInsensitiveDict(values)) diff --git a/stream/httpsig/verify.py b/stream/httpsig/verify.py index a6e1ba3..a3f3074 100644 --- a/stream/httpsig/verify.py +++ b/stream/httpsig/verify.py @@ -3,9 +3,9 @@ """ import six -from Crypto.Hash import HMAC -from Crypto.PublicKey import RSA -from Crypto.Signature import PKCS1_v1_5 +from Cryptodome.Hash import HMAC +from Cryptodome.PublicKey import RSA +from Cryptodome.Signature import PKCS1_v1_5 from base64 import b64decode from .sign import Signer @@ -24,20 +24,20 @@ def _verify(self, data, signature): `data` is the message to verify `signature` is a base64-encoded signature to verify against `data` """ - + if isinstance(data, six.string_types): data = data.encode("ascii") if isinstance(signature, six.string_types): signature = signature.encode("ascii") - + if self.sign_algorithm == 'rsa': h = self._hash.new() h.update(data) return self._rsa.verify(h, b64decode(signature)) - + elif self.sign_algorithm == 'hmac': h = self._sign_hmac(data) s = b64decode(signature) return ct_bytes_compare(h, s) - + else: raise HttpSigException("Unsupported algorithm.") @@ -49,7 +49,7 @@ class HeaderVerifier(Verifier): def __init__(self, headers, secret, required_headers=None, method=None, path=None, host=None): """ Instantiate a HeaderVerifier object. - + :param headers: A dictionary of headers from the HTTP request. :param secret: The HMAC secret or RSA *public* key. :param required_headers: Optional. A list of headers required to be present to validate, even if the signature is otherwise valid. Defaults to ['date']. @@ -58,33 +58,33 @@ def __init__(self, headers, secret, required_headers=None, method=None, path=Non :param host: Optional. The value to use for the Host header, if not supplied in :param:headers. """ required_headers = required_headers or ['date'] - + auth = parse_authorization_header(headers['authorization']) if len(auth) == 2: self.auth_dict = auth[1] else: raise HttpSigException("Invalid authorization header.") - + self.headers = CaseInsensitiveDict(headers) self.required_headers = [s.lower() for s in required_headers] self.method = method self.path = path self.host = host - + super(HeaderVerifier, self).__init__(secret, algorithm=self.auth_dict['algorithm']) def verify(self): """ Verify the headers based on the arguments passed at creation and current properties. - + Raises an Exception if a required header (:param:required_headers) is not found in the signature. Returns True or False. """ auth_headers = self.auth_dict.get('headers', 'date').split(' ') - + if len(set(self.required_headers) - set(auth_headers)) > 0: raise Exception('{} is a required header(s)'.format(', '.join(set(self.required_headers)-set(auth_headers)))) - + signing_str = generate_message(auth_headers, self.headers, self.host, self.method, self.path) - + return self._verify(signing_str, self.auth_dict['signature']) From 57e5ecd2e8aed1af09240e8600ac628c295c9839 Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Thu, 21 Dec 2017 12:10:37 +0100 Subject: [PATCH 060/208] Re-organized tests to enable pytest to discover the full suite --- README.md | 6 +++--- setup.py | 2 +- stream/httpsig/tests/test_signature.py | 8 +++++--- stream/tests/__init__.py | 1 + stream/{tests.py => tests/test_client.py} | 0 5 files changed, 10 insertions(+), 7 deletions(-) create mode 100644 stream/tests/__init__.py rename stream/{tests.py => tests/test_client.py} (100%) diff --git a/README.md b/README.md index c0b980b..5e03d93 100644 --- a/README.md +++ b/README.md @@ -131,11 +131,11 @@ redirect_url = client.create_redirect_url('http://google.com/', 'user_id', event First, make sure you can run the test suite. Tests are run via py.test ```bash -py.test stream/tests.py +py.test # with coverage -py.test stream/tests.py --cov stream --cov-report html +py.test --cov stream --cov-report html # against a local API backend -LOCAL=true py.test stream/tests.py +LOCAL=true py.test ``` ### Copyright and License Information diff --git a/setup.py b/setup.py index 8b76efc..86ff028 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,7 @@ def run_tests(self): # import here, cause outside the eggs aren't loaded import pytest errno = pytest.main( - 'stream/tests.py --cov stream --cov-report term-missing -v') + '--cov stream --cov-report term-missing -v') sys.exit(errno) setup( diff --git a/stream/httpsig/tests/test_signature.py b/stream/httpsig/tests/test_signature.py index c5a61ff..546f1c4 100755 --- a/stream/httpsig/tests/test_signature.py +++ b/stream/httpsig/tests/test_signature.py @@ -10,16 +10,18 @@ from stream.httpsig.utils import parse_authorization_header -sign.DEFAULT_SIGN_ALGORITHM = "rsa-sha256" - - class TestSign(unittest.TestCase): + DEFAULT_SIGN_ALGORITHM = sign.DEFAULT_SIGN_ALGORITHM def setUp(self): + sign.DEFAULT_SIGN_ALGORITHM = "rsa-sha256" self.key_path = os.path.join(os.path.dirname(__file__), 'rsa_private.pem') with open(self.key_path, 'rb') as f: self.key = f.read() + def tearDown(self): + sign.DEFAULT_SIGN_ALGORITHM = self.DEFAULT_SIGN_ALGORITHM + def test_default(self): hs = sign.HeaderSigner(key_id='Test', secret=self.key) unsigned = { diff --git a/stream/tests/__init__.py b/stream/tests/__init__.py new file mode 100644 index 0000000..bbd3b0d --- /dev/null +++ b/stream/tests/__init__.py @@ -0,0 +1 @@ +from .test_client import * diff --git a/stream/tests.py b/stream/tests/test_client.py similarity index 100% rename from stream/tests.py rename to stream/tests/test_client.py From 695494d3067322cabea520289b94772cd9d607f4 Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Thu, 21 Dec 2017 12:11:44 +0100 Subject: [PATCH 061/208] Fixed / updated test dependencies --- dev_requirements.txt | 5 +++-- setup.py | 5 ++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/dev_requirements.txt b/dev_requirements.txt index bde7bd2..5d6b6de 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,5 +1,6 @@ -pep8 +pytest==3.3.1 python-coveralls +unittest2 +pytest-cov==2.5.1 python-dateutil -pytest-cov -e . diff --git a/setup.py b/setup.py index 86ff028..52333f0 100644 --- a/setup.py +++ b/setup.py @@ -9,12 +9,11 @@ unit = 'unittest2py3k' if sys.version_info > (3, 0, 0) else 'unittest2' tests_require = [ - 'pep8', unit, - #'pytest', + 'pytest==3.3.1', 'python-coveralls', 'unittest2', - 'pytest-cov==1.8.1', + 'pytest-cov==2.5.1', 'python-dateutil' ] From 5277ce65597a93aa351ee47f2e50bc95edae5eb3 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 21 Dec 2017 12:27:48 +0100 Subject: [PATCH 062/208] 2.8.0 release --- CHANGELOG | 11 +++++++++++ stream/__init__.py | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index 8e5f5a7..b39eeb5 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,17 @@ Change history ================ +======= +2.8.0 +====== +:release-date: 2017-12-21 +:by: Tommaso Barbugli + +Fixes install issues on Windows + +* Bundle http-sig library +* Use pycryptodomex instead of the discontinued pycrypto library + ======= 2.7.0 ====== diff --git a/stream/__init__.py b/stream/__init__.py index 542fa72..ad0495b 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.7.0' +__version__ = '2.8.0' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From dad97e718295b960fd331f51502de3f335a7c8de Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 21 Dec 2017 14:00:32 +0100 Subject: [PATCH 063/208] Update .travis.yml --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 38a62a8..413b2a5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -20,7 +20,7 @@ notifications: install: - pip install -r dev_requirements.txt script: - - py.test stream/tests.py -lv --cov stream --cov-report term-missing + - py.test -lv --cov stream --cov-report term-missing after_script: # ensure we validate against pep standards - "pep8 --exclude=migrations --ignore=E501,E225,W293 stream" From 30d5646842299c584e8193315ec4a9f25f8ef519 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 21 Dec 2017 14:05:22 +0100 Subject: [PATCH 064/208] support test build for py 2.6 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 52333f0..82fc203 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ unit = 'unittest2py3k' if sys.version_info > (3, 0, 0) else 'unittest2' tests_require = [ unit, - 'pytest==3.3.1', + 'pytest==3.2.5', 'python-coveralls', 'unittest2', 'pytest-cov==2.5.1', From b0c16fa062e142e3d78f7107d773606d7e7edcaf Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 21 Dec 2017 14:23:04 +0100 Subject: [PATCH 065/208] nuke coveralls, fix import --- .travis.yml | 6 ------ setup.py | 3 +-- stream/client.py | 2 +- 3 files changed, 2 insertions(+), 9 deletions(-) diff --git a/.travis.yml b/.travis.yml index 413b2a5..40f27d3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,10 +12,6 @@ matrix: fast_finish: true cache: pip -notifications: - email: - - thierryschellenbach@gmail.com - - tbarbugli@gmail.com install: - pip install -r dev_requirements.txt @@ -24,5 +20,3 @@ script: after_script: # ensure we validate against pep standards - "pep8 --exclude=migrations --ignore=E501,E225,W293 stream" -after_success: - - coveralls diff --git a/setup.py b/setup.py index 82fc203..ec21eb2 100644 --- a/setup.py +++ b/setup.py @@ -11,9 +11,8 @@ tests_require = [ unit, 'pytest==3.2.5', - 'python-coveralls', 'unittest2', - 'pytest-cov==2.5.1', + 'pytest-cov', 'python-dateutil' ] diff --git a/stream/client.py b/stream/client.py index cd4fd52..d790535 100644 --- a/stream/client.py +++ b/stream/client.py @@ -3,13 +3,13 @@ import logging import os -from httpsig.requests_auth import HTTPSignatureAuth import jwt import requests from requests.adapters import HTTPAdapter from stream import exceptions, serializer from stream.signing import sign from stream.utils import validate_feed_slug, validate_user_id +from stream.httpsig.requests_auth import HTTPSignatureAuth from requests import Request try: From 94d8fded093762542e6339785db4f20bc1679102 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 21 Dec 2017 14:23:40 +0100 Subject: [PATCH 066/208] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5e03d93..c6a8a41 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ stream-python ============= -[![Build Status](https://travis-ci.org/GetStream/stream-python.svg?branch=master)](https://travis-ci.org/GetStream/stream-python) [![Coverage Status](https://coveralls.io/repos/github/GetStream/stream-python/badge.svg?branch=master)](https://coveralls.io/r/GetStream/stream-python?branch=master) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) +[![Build Status](https://travis-ci.org/GetStream/stream-python.svg?branch=master)](https://travis-ci.org/GetStream/stream-python) /stream-python?branch=master) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) [stream-python](https://github.com/GetStream/stream-python) is the official Python client for [Stream](https://getstream.io/), a web service for building scalable newsfeeds and activity streams. From 4eea62cb40ee6efe9928dd7f3951027cef2861e6 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 21 Dec 2017 14:24:34 +0100 Subject: [PATCH 067/208] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index c6a8a41..a3243f9 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ stream-python ============= -[![Build Status](https://travis-ci.org/GetStream/stream-python.svg?branch=master)](https://travis-ci.org/GetStream/stream-python) /stream-python?branch=master) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) +[![Build Status](https://travis-ci.org/GetStream/stream-python.svg?branch=master)](https://travis-ci.org/GetStream/stream-python) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) [stream-python](https://github.com/GetStream/stream-python) is the official Python client for [Stream](https://getstream.io/), a web service for building scalable newsfeeds and activity streams. From 8ac549457d6aa676e2ab9e4f17558329005fd642 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 21 Dec 2017 14:31:58 +0100 Subject: [PATCH 068/208] update dev_requirements.txt as well to work with py2.6 --- dev_requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev_requirements.txt b/dev_requirements.txt index 5d6b6de..ad475c8 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,4 +1,4 @@ -pytest==3.3.1 +pytest==3.2.5 python-coveralls unittest2 pytest-cov==2.5.1 From 8c17369d80dba2af3225cdee0889c0d8a4f0e8de Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 21 Dec 2017 14:49:38 +0100 Subject: [PATCH 069/208] remove dict comprehension for 2.6 compat --- stream/httpsig/tests/test_verify.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/stream/httpsig/tests/test_verify.py b/stream/httpsig/tests/test_verify.py index 9d34393..7ed0f7b 100755 --- a/stream/httpsig/tests/test_verify.py +++ b/stream/httpsig/tests/test_verify.py @@ -20,7 +20,9 @@ def _parse_auth(self, auth): # convert into [(k1,"v1"), (k2, "v2"), ...] param_pairs = [p.split('=', 1) for p in param_list] # convert into {k1:v1, k2:v2, ...} - param_dict = {k: v.strip('"') for k, v in param_pairs} + param_dict = {} + for k, v in param_pairs: + param_dict[k] = v.strip('"') return param_dict From 7b7e54ff50c179545d59860a656af25f0fd2971a Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 21 Dec 2017 14:54:58 +0100 Subject: [PATCH 070/208] dont use assertRaises as ctx manager (2.6 compat) --- stream/httpsig/tests/test_verify.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/stream/httpsig/tests/test_verify.py b/stream/httpsig/tests/test_verify.py index 7ed0f7b..8d4bf36 100755 --- a/stream/httpsig/tests/test_verify.py +++ b/stream/httpsig/tests/test_verify.py @@ -105,8 +105,7 @@ def test_incorrect_headers(self): signed = hs.sign(unsigned, method=METHOD, path=PATH) hv = HeaderVerifier(headers=signed, secret=self.verify_secret, required_headers=["some-other-header"], host=HOST, method=METHOD, path=PATH) - with self.assertRaises(Exception) as ex: - hv.verify() + self.assertRaises(Exception, hv.verify) def test_extra_auth_headers(self): HOST = "example.com" From 089c7cc83bdc33d13a833d2e56be66424ef2f3ed Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 21 Dec 2017 14:57:51 +0100 Subject: [PATCH 071/208] 2.8.1 --- CHANGELOG | 8 ++++++++ stream/__init__.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index b39eeb5..0287946 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ Change history ================ +======= +2.8.1 +====== +:release-date: 2017-12-21 +:by: Tommaso Barbugli + +Fixes a regression with embedded httpsig and Python 3 + ======= 2.8.0 ====== diff --git a/stream/__init__.py b/stream/__init__.py index ad0495b..dab0237 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.8.0' +__version__ = '2.8.1' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From b1b652462898b1c389f189fe1222c5fdf64f13e0 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 21 Dec 2017 15:03:03 +0100 Subject: [PATCH 072/208] remove assertIn --- stream/httpsig/tests/test_signature.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/stream/httpsig/tests/test_signature.py b/stream/httpsig/tests/test_signature.py index 546f1c4..bab679a 100755 --- a/stream/httpsig/tests/test_signature.py +++ b/stream/httpsig/tests/test_signature.py @@ -28,14 +28,14 @@ def test_default(self): 'Date': 'Thu, 05 Jan 2012 21:31:40 GMT' } signed = hs.sign(unsigned) - self.assertIn('Date', signed) + self.assertTrue('Date' in signed) self.assertEqual(unsigned['Date'], signed['Date']) - self.assertIn('Authorization', signed) + self.assertTrue('Authorization' in signed) auth = parse_authorization_header(signed['authorization']) params = auth[1] - self.assertIn('keyId', params) - self.assertIn('algorithm', params) - self.assertIn('signature', params) + self.assertTrue('keyId' in params) + self.assertTrue('algorithm' in params) + self.assertTrue('signature' in params) self.assertEqual(params['keyId'], 'Test') self.assertEqual(params['algorithm'], 'rsa-sha256') self.assertEqual(params['signature'], 'ATp0r26dbMIxOopqw0OfABDT7CKMIoENumuruOtarj8n/97Q3htHFYpH8yOSQk3Z5zh8UxUym6FYTb5+A0Nz3NRsXJibnYi7brE/4tx5But9kkFGzG+xpUmimN4c3TMN7OFH//+r8hBf7BT9/GmHDUVZT2JzWGLZES2xDOUuMtA=') @@ -58,14 +58,14 @@ def test_all(self): } signed = hs.sign(unsigned, method='POST', path='/foo?param=value&pet=dog') - self.assertIn('Date', signed) + self.assertTrue('Date' in signed) self.assertEqual(unsigned['Date'], signed['Date']) - self.assertIn('Authorization', signed) + self.assertTrue('Authorization' in signed) auth = parse_authorization_header(signed['authorization']) params = auth[1] - self.assertIn('keyId', params) - self.assertIn('algorithm', params) - self.assertIn('signature', params) + self.assertTrue('keyId' in params) + self.assertTrue('algorithm' in params) + self.assertTrue('signature' in params) self.assertEqual(params['keyId'], 'Test') self.assertEqual(params['algorithm'], 'rsa-sha256') self.assertEqual(params['headers'], '(request-target) host date content-type content-md5 content-length') From 5dede7b8fc2804f71a98986ff301f01e2298a417 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 21 Dec 2017 15:12:00 +0100 Subject: [PATCH 073/208] try installing the package as final step --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 40f27d3..571100d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -18,5 +18,6 @@ install: script: - py.test -lv --cov stream --cov-report term-missing after_script: - # ensure we validate against pep standards - "pep8 --exclude=migrations --ignore=E501,E225,W293 stream" + - "python setup.py install" + From 8445c53b6f9a92f786a2fa2571528395b4a53c39 Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Thu, 21 Dec 2017 14:04:28 +0100 Subject: [PATCH 074/208] Updated relative import --- stream/client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/stream/client.py b/stream/client.py index d790535..92b911a 100644 --- a/stream/client.py +++ b/stream/client.py @@ -7,6 +7,7 @@ import requests from requests.adapters import HTTPAdapter from stream import exceptions, serializer +from stream.httpsig.requests_auth import HTTPSignatureAuth from stream.signing import sign from stream.utils import validate_feed_slug, validate_user_id from stream.httpsig.requests_auth import HTTPSignatureAuth From 3a5de5d187e63a8a7ecb9bbd64961a2009316bda Mon Sep 17 00:00:00 2001 From: Hannes Van De Vreken Date: Thu, 5 Apr 2018 17:41:52 +0200 Subject: [PATCH 075/208] Fixed typo --- stream/collections.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/collections.py b/stream/collections.py index cd58bc5..9408518 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -63,7 +63,7 @@ def delete(self, collection_name, ids): Delete data from meta. :param collection_name: Collection Name i.e 'user' :param ids: list of ids to delete i.e [123,456] - :return: data that was deleted if if successful or not. + :return: data that was deleted if successful or not. **Example**:: client.collections.delete('user', '1') From c84a74c7ce2af87a7c5f71ffa6e75bfe8f804aca Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 12 Apr 2018 09:51:37 -0600 Subject: [PATCH 076/208] HTTP DELETE: use query parameters instead of body --- stream/collections.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stream/collections.py b/stream/collections.py index 9408518..55a5599 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -74,9 +74,9 @@ def delete(self, collection_name, ids): ids = [ids] ids = [str(i) for i in ids] - data = {'collection_name': collection_name, 'ids': ids} + params = {'collection_name': collection_name, 'ids': ids} - response = self.client.delete('meta/', service_name='api', data=data, + response = self.client.delete('meta/', service_name='api', params=params, signature=self.token) return response From dd2270b0ce0abb74b252e1826e20ef9d2f6f9cf6 Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Wed, 16 May 2018 18:02:46 +0200 Subject: [PATCH 077/208] Add feed_id to JWT for url redirects --- stream/client.py | 2 +- stream/tests/test_client.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/stream/client.py b/stream/client.py index 92b911a..8883c33 100644 --- a/stream/client.py +++ b/stream/client.py @@ -322,7 +322,7 @@ def create_redirect_url(self, target_url, user_id, events): getstream.io/personalization ''' # generate the JWT token - auth_token = self.create_jwt_token('redirect_and_track', '*', user_id=user_id) + auth_token = self.create_jwt_token('redirect_and_track', '*', '*', user_id=user_id) # setup the params params = dict(auth_type='jwt', authorization=auth_token, url=target_url) params['api_key'] = self.api_key diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index ca171b4..f24562a 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1068,6 +1068,7 @@ def test_create_email_redirect(self): self.assertEqual(decoded, { 'resource': 'redirect_and_track', 'action': '*', + 'feed_id': '*', 'user_id': 'tommaso' }) From 6b895e9abc4d9e34d60e4a3c1b863af2a1059cbf Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Mon, 28 May 2018 16:21:48 +0200 Subject: [PATCH 078/208] Restrict dependency pyOpenSSL to < 18.0.0 on python 2.6 --- setup.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/setup.py b/setup.py index ec21eb2..9052f87 100644 --- a/setup.py +++ b/setup.py @@ -36,6 +36,9 @@ 'six>=1.8.0' ] +if sys.version_info < (2, 7, 0): + install_requires.append('pyOpenSSL<18.0.0') + class PyTest(TestCommand): def finalize_options(self): From 2925918a0e8a20339cb0893823870819faab1105 Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Thu, 17 May 2018 11:36:40 +0200 Subject: [PATCH 079/208] Added codecov config and removed coveralls --- .coveralls.yml | 2 -- .travis.yml | 4 ++-- dev_requirements.txt | 4 ++-- setup.py | 2 +- 4 files changed, 5 insertions(+), 7 deletions(-) delete mode 100644 .coveralls.yml diff --git a/.coveralls.yml b/.coveralls.yml deleted file mode 100644 index 711c814..0000000 --- a/.coveralls.yml +++ /dev/null @@ -1,2 +0,0 @@ -repo_token: WkTPYotawAoZyEqzLyK0BcGxyPvxIbaiQ -service_name: circle-ci \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index 571100d..f622ccf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,8 +16,8 @@ cache: pip install: - pip install -r dev_requirements.txt script: - - py.test -lv --cov stream --cov-report term-missing + - py.test -lv --cov=./ after_script: - "pep8 --exclude=migrations --ignore=E501,E225,W293 stream" - "python setup.py install" - + - "codecov" diff --git a/dev_requirements.txt b/dev_requirements.txt index ad475c8..551d254 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,6 +1,6 @@ pytest==3.2.5 -python-coveralls -unittest2 +codecov==2.0.15 +unittest2==1.1.0 pytest-cov==2.5.1 python-dateutil -e . diff --git a/setup.py b/setup.py index ec21eb2..c494160 100644 --- a/setup.py +++ b/setup.py @@ -47,7 +47,7 @@ def run_tests(self): # import here, cause outside the eggs aren't loaded import pytest errno = pytest.main( - '--cov stream --cov-report term-missing -v') + '-v --cov=./') sys.exit(errno) setup( From 7006e350422828f388635c0c067e318a17549913 Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Thu, 17 May 2018 11:54:08 +0200 Subject: [PATCH 080/208] Added code coverage badge to readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a3243f9..ffc8546 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ stream-python ============= -[![Build Status](https://travis-ci.org/GetStream/stream-python.svg?branch=master)](https://travis-ci.org/GetStream/stream-python) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) +[![Build Status](https://travis-ci.org/GetStream/stream-python.svg?branch=master)](https://travis-ci.org/GetStream/stream-python) [![codecov](https://codecov.io/gh/GetStream/stream-python/branch/master/graph/badge.svg)](https://codecov.io/gh/GetStream/stream-python) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) [stream-python](https://github.com/GetStream/stream-python) is the official Python client for [Stream](https://getstream.io/), a web service for building scalable newsfeeds and activity streams. From 209af5b5f5e2423eaa5f099410102a615c5a33c7 Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Wed, 16 May 2018 18:02:46 +0200 Subject: [PATCH 081/208] Add feed_id to JWT for url redirects --- stream/client.py | 2 +- stream/tests/test_client.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/stream/client.py b/stream/client.py index 92b911a..8883c33 100644 --- a/stream/client.py +++ b/stream/client.py @@ -322,7 +322,7 @@ def create_redirect_url(self, target_url, user_id, events): getstream.io/personalization ''' # generate the JWT token - auth_token = self.create_jwt_token('redirect_and_track', '*', user_id=user_id) + auth_token = self.create_jwt_token('redirect_and_track', '*', '*', user_id=user_id) # setup the params params = dict(auth_type='jwt', authorization=auth_token, url=target_url) params['api_key'] = self.api_key diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index ca171b4..f24562a 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1068,6 +1068,7 @@ def test_create_email_redirect(self): self.assertEqual(decoded, { 'resource': 'redirect_and_track', 'action': '*', + 'feed_id': '*', 'user_id': 'tommaso' }) From f34972bcaf94e88dead95f82da13917de095ad93 Mon Sep 17 00:00:00 2001 From: Dwight Gunning Date: Mon, 28 May 2018 16:21:48 +0200 Subject: [PATCH 082/208] Restrict dependency pyOpenSSL to < 18.0.0 on python 2.6 --- setup.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/setup.py b/setup.py index c494160..9e97859 100644 --- a/setup.py +++ b/setup.py @@ -36,6 +36,9 @@ 'six>=1.8.0' ] +if sys.version_info < (2, 7, 0): + install_requires.append('pyOpenSSL<18.0.0') + class PyTest(TestCommand): def finalize_options(self): From 2256d6d74992ec042dc955f00abf1885e7c21bf8 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 5 Jul 2018 16:40:03 +0200 Subject: [PATCH 083/208] add new get activity API endpoint --- stream/client.py | 32 +- stream/tests/test_client.py | 1123 ++++++++++++++++++----------------- 2 files changed, 602 insertions(+), 553 deletions(-) diff --git a/stream/client.py b/stream/client.py index 8883c33..d327ede 100644 --- a/stream/client.py +++ b/stream/client.py @@ -6,6 +6,8 @@ import jwt import requests from requests.adapters import HTTPAdapter +from stream.serializer import _datetime_encoder + from stream import exceptions, serializer from stream.httpsig.requests_auth import HTTPSignatureAuth from stream.signing import sign @@ -304,9 +306,7 @@ def update_activities(self, activities): raise TypeError('Activities parameter should be of type list') auth_token = self.create_jwt_token('activities', '*', feed_id='*') - data = dict(activities=activities) - return self.post('activities/', auth_token, data=data) def update_activity(self, activity): @@ -315,6 +315,34 @@ def update_activity(self, activity): ''' return self.update_activities([activity]) + def get_activities(self, ids=None, foreign_id_time=None): + ''' + Retrieves activities by their ID or foreign_id + time combination + + ids: list of activity IDs + foreign_id_time: list of tuples (foreign_id, time) + ''' + auth_token = self.create_jwt_token('activities', '*', feed_id='*') + + if ids is None and foreign_id_time is None: + raise TypeError('One the parameters ids or foreign_id_time must be provided and not None') + + if ids is not None and foreign_id_time is not None: + raise TypeError('At most one of the parameters ids or foreign_id_time must be provided') + + query_params = {} + + if ids is not None: + query_params['ids'] = ','.join(ids) + + if foreign_id_time is not None: + foreign_ids, timestamps = zip(*foreign_id_time) + timestamps = map(_datetime_encoder, timestamps) + query_params['foreign_ids'] = ','.join(foreign_ids) + query_params['timestamps'] = ','.join(timestamps) + + return self.get('activities/', auth_token, params=query_params) + def create_redirect_url(self, target_url, user_id, events): ''' Creates a redirect url for tracking the given events in the context diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index f24562a..50827bf 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -4,6 +4,7 @@ from stream.exceptions import ApiKeyException, InputException import random import jwt + try: from unittest.case import TestCase except ImportError: @@ -19,6 +20,7 @@ from stream import serializer from requests.exceptions import MissingSchema from itertools import count +from uuid import uuid1 from uuid import uuid4 try: @@ -29,22 +31,20 @@ def connect_debug(): try: - key = os.environ['STREAM_KEY'] - secret = os.environ['STREAM_SECRET'] + key = os.environ["STREAM_KEY"] + secret = os.environ["STREAM_SECRET"] except KeyError: - print('To run the tests the STREAM_KEY and STREAM_SECRET variables ' - 'need to be available. \n' - 'Please create a pull request if you are an external ' - 'contributor, because these variables are automatically added ' - 'by Travis.') + print( + "To run the tests the STREAM_KEY and STREAM_SECRET variables " + "need to be available. \n" + "Please create a pull request if you are an external " + "contributor, because these variables are automatically added " + "by Travis." + ) sys.exit(1) - return stream.connect( - key, - secret, - location='qa', - timeout=30, - ) + return stream.connect(key, secret, location="qa", timeout=30) + client = connect_debug() @@ -53,13 +53,13 @@ def connect_debug(): def get_unique_postfix(): - return '---test_%s-feed_%s' % (test_identifier, next(counter)) + return "---test_%s-feed_%s" % (test_identifier, next(counter)) def getfeed(feed_slug, user_id): - ''' + """ Adds the random postfix to the user id - ''' + """ return client.feed(feed_slug, user_id + get_unique_postfix()) @@ -67,24 +67,27 @@ def api_request_parse_validator(test): def wrapper(meth): def _parse_response(*args, **kwargs): response = meth(*args, **kwargs) - test.assertTrue('duration' in response) + test.assertTrue("duration" in response) return response + return _parse_response + return wrapper class ClientTest(TestCase): - def setUp(self): - client._parse_response = api_request_parse_validator(self)(client._parse_response) + client._parse_response = api_request_parse_validator(self)( + client._parse_response + ) # DEBUG account details - user1 = getfeed('user', '1') - user2 = getfeed('user', '2') - aggregated2 = getfeed('aggregated', '2') - aggregated3 = getfeed('aggregated', '3') - topic1 = getfeed('topic', '1') - flat3 = getfeed('flat', '3') + user1 = getfeed("user", "1") + user2 = getfeed("user", "2") + aggregated2 = getfeed("aggregated", "2") + aggregated3 = getfeed("aggregated", "3") + topic1 = getfeed("topic", "1") + flat3 = getfeed("flat", "3") self.c = client self.user1 = user1 @@ -93,16 +96,10 @@ def setUp(self): self.aggregated3 = aggregated3 self.topic1 = topic1 self.flat3 = flat3 - print(user1.id) - print(user2.id) - print(aggregated2.id) - print(aggregated3.id) - print(topic1.id) - print(flat3.id) self.local_tests = False - if 'LOCAL' in os.environ: - self.local_tests = os.environ['LOCAL'] + if "LOCAL" in os.environ: + self.local_tests = os.environ["LOCAL"] def _test_sleep(self, production_wait, local_wait): """ @@ -119,112 +116,106 @@ def _test_sleep(self, production_wait, local_wait): time.sleep(sleep_time) def test_collections_url(self): - feed_url = client.get_full_url(relative_url='meta/', service_name='api') + feed_url = client.get_full_url(relative_url="meta/", service_name="api") if self.local_tests: - self.assertEqual( - feed_url, 'http://localhost:8000/api/v1.0/meta/') + self.assertEqual(feed_url, "http://localhost:8000/api/v1.0/meta/") else: self.assertEqual( - feed_url, 'https://qa-api.stream-io-api.com/api/v1.0/meta/') + feed_url, "https://qa-api.stream-io-api.com/api/v1.0/meta/" + ) def test_personalization_url(self): - feed_url = client.get_full_url(relative_url='recommended', service_name='personalization') + feed_url = client.get_full_url( + relative_url="recommended", service_name="personalization" + ) if self.local_tests: self.assertEqual( - feed_url, 'http://localhost:8000/personalization/v1.0/recommended') + feed_url, "http://localhost:8000/personalization/v1.0/recommended" + ) else: self.assertEqual( - feed_url, 'https://qa-personalization.stream-io-api.com/personalization/v1.0/recommended') + feed_url, + "https://qa-personalization.stream-io-api.com/personalization/v1.0/recommended", + ) def test_api_url(self): - feed_url = client.get_full_url(service_name='api', relative_url='feed/') + feed_url = client.get_full_url(service_name="api", relative_url="feed/") if self.local_tests: - self.assertEqual( - feed_url, 'http://localhost:8000/api/v1.0/feed/') + self.assertEqual(feed_url, "http://localhost:8000/api/v1.0/feed/") else: self.assertEqual( - feed_url, 'https://qa-api.stream-io-api.com/api/v1.0/feed/') + feed_url, "https://qa-api.stream-io-api.com/api/v1.0/feed/" + ) def test_collections_url_default(self): - client = stream.connect( - 'key', - 'secret', - ) - feed_url = client.get_full_url(relative_url='meta/', service_name='api') + client = stream.connect("key", "secret") + feed_url = client.get_full_url(relative_url="meta/", service_name="api") if not self.local_tests: - self.assertEqual( - feed_url, 'https://api.stream-io-api.com/api/v1.0/meta/') + self.assertEqual(feed_url, "https://api.stream-io-api.com/api/v1.0/meta/") def test_personalization_url_default(self): - client = stream.connect( - 'key', - 'secret', + client = stream.connect("key", "secret") + feed_url = client.get_full_url( + relative_url="recommended", service_name="personalization" ) - feed_url = client.get_full_url(relative_url='recommended', service_name='personalization') if not self.local_tests: self.assertEqual( - feed_url, 'https://personalization.stream-io-api.com/personalization/v1.0/recommended') + feed_url, + "https://personalization.stream-io-api.com/personalization/v1.0/recommended", + ) def test_api_url_default(self): - client = stream.connect( - 'key', - 'secret', - ) - feed_url = client.get_full_url(service_name='api', relative_url='feed/') + client = stream.connect("key", "secret") + feed_url = client.get_full_url(service_name="api", relative_url="feed/") if not self.local_tests: - self.assertEqual( - feed_url, 'https://api.stream-io-api.com/api/v1.0/feed/') + self.assertEqual(feed_url, "https://api.stream-io-api.com/api/v1.0/feed/") def test_collections_url_location(self): - client = stream.connect( - 'key', - 'secret', - location='tokyo', - ) - feed_url = client.get_full_url(relative_url='meta/', service_name='api') + client = stream.connect("key", "secret", location="tokyo") + feed_url = client.get_full_url(relative_url="meta/", service_name="api") if not self.local_tests: self.assertEqual( - feed_url, 'https://tokyo-api.stream-io-api.com/api/v1.0/meta/') + feed_url, "https://tokyo-api.stream-io-api.com/api/v1.0/meta/" + ) def test_personalization_url_location(self): - client = stream.connect( - 'key', - 'secret', - location='tokyo', + client = stream.connect("key", "secret", location="tokyo") + feed_url = client.get_full_url( + relative_url="recommended", service_name="personalization" ) - feed_url = client.get_full_url(relative_url='recommended', service_name='personalization') if not self.local_tests: self.assertEqual( - feed_url, 'https://tokyo-personalization.stream-io-api.com/personalization/v1.0/recommended') + feed_url, + "https://tokyo-personalization.stream-io-api.com/personalization/v1.0/recommended", + ) def test_api_url_location(self): - client = stream.connect( - 'key', - 'secret', - location='tokyo', - ) - feed_url = client.get_full_url(service_name='api', relative_url='feed/') + client = stream.connect("key", "secret", location="tokyo") + feed_url = client.get_full_url(service_name="api", relative_url="feed/") if not self.local_tests: self.assertEqual( - feed_url, 'https://tokyo-api.stream-io-api.com/api/v1.0/feed/') + feed_url, "https://tokyo-api.stream-io-api.com/api/v1.0/feed/" + ) def test_update_activities_create(self): - activities = [{ - 'actor': 'user:1', - 'verb': 'do', - 'object': 'object:1', - 'foreign_id': 'object:1', - 'time': datetime.datetime.utcnow().isoformat() - }] + activities = [ + { + "actor": "user:1", + "verb": "do", + "object": "object:1", + "foreign_id": "object:1", + "time": datetime.datetime.utcnow().isoformat(), + } + ] self.c.update_activities(activities) @@ -233,139 +224,148 @@ def test_update_activities_illegal_argument(self): def invalid_activities(): self.c.update_activities(activities) + self.assertRaises(TypeError, invalid_activities) def test_update_activities_update(self): activities = [] for i in range(0, 10): - activities.append({ - 'actor': 'user:1', - 'verb': 'do', - 'object': 'object:%s' % i, - 'foreign_id': 'object:%s' % i, - 'time': datetime.datetime.utcnow().isoformat() - }) - activities_created = self.user1.add_activities(activities)['activities'] + activities.append( + { + "actor": "user:1", + "verb": "do", + "object": "object:%s" % i, + "foreign_id": "object:%s" % i, + "time": datetime.datetime.utcnow().isoformat(), + } + ) + activities_created = self.user1.add_activities(activities)["activities"] activities = copy.deepcopy(activities_created) - for activity in activities: - activity.pop('id') - activity['popularity'] = 100 + activity.pop("id") + activity["popularity"] = 100 self.c.update_activities(activities) - activities_updated = self.user1.get(limit=len(activities))['results'] + activities_updated = self.user1.get(limit=len(activities))["results"] activities_updated.reverse() for i, activity in enumerate(activities_updated): - self.assertEqual(activities_created[i].get('id'), activity.get('id')) - self.assertEquals(activity['popularity'], 100) + self.assertEqual(activities_created[i].get("id"), activity.get("id")) + self.assertEqual(activity["popularity"], 100) def test_heroku(self): - url = 'https://thierry:pass@getstream.io/?app_id=1' - os.environ['STREAM_URL'] = url + url = "https://thierry:pass@getstream.io/?app_id=1" + os.environ["STREAM_URL"] = url client = stream.connect() - self.assertEqual(client.api_key, 'thierry') - self.assertEqual(client.api_secret, 'pass') - self.assertEqual(client.app_id, '1') + self.assertEqual(client.api_key, "thierry") + self.assertEqual(client.api_secret, "pass") + self.assertEqual(client.app_id, "1") def test_heroku_no_location(self): - url = 'https://bvt88g4kvc63:twc5ywfste5bm2ngqkzs7ukxk3pn96yweghjrxcmcrarnt3j4dqj3tucbhym5wfd@stream-io-api.com/?app_id=669' - os.environ['STREAM_URL'] = url + url = "https://bvt88g4kvc63:twc5ywfste5bm2ngqkzs7ukxk3pn96yweghjrxcmcrarnt3j4dqj3tucbhym5wfd@stream-io-api.com/?app_id=669" + os.environ["STREAM_URL"] = url client = stream.connect() - self.assertEqual(client.api_key, 'bvt88g4kvc63') + self.assertEqual(client.api_key, "bvt88g4kvc63") self.assertEqual( - client.api_secret, 'twc5ywfste5bm2ngqkzs7ukxk3pn96yweghjrxcmcrarnt3j4dqj3tucbhym5wfd') - self.assertEqual(client.app_id, '669') - feed_url = client.get_full_url('api', 'feed/') + client.api_secret, + "twc5ywfste5bm2ngqkzs7ukxk3pn96yweghjrxcmcrarnt3j4dqj3tucbhym5wfd", + ) + self.assertEqual(client.app_id, "669") + feed_url = client.get_full_url("api", "feed/") if self.local_tests: - self.assertEqual( - feed_url, 'http://localhost:8000/api/v1.0/feed/') + self.assertEqual(feed_url, "http://localhost:8000/api/v1.0/feed/") else: - self.assertEqual( - feed_url, 'https://api.stream-io-api.com/api/v1.0/feed/') + self.assertEqual(feed_url, "https://api.stream-io-api.com/api/v1.0/feed/") def test_heroku_location_compat(self): - url = 'https://ahj2ndz7gsan:gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy@us-east.getstream.io/?app_id=1' - os.environ['STREAM_URL'] = url + url = "https://ahj2ndz7gsan:gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy@us-east.getstream.io/?app_id=1" + os.environ["STREAM_URL"] = url client = stream.connect() - self.assertEqual(client.api_key, 'ahj2ndz7gsan') + self.assertEqual(client.api_key, "ahj2ndz7gsan") self.assertEqual( - client.api_secret, 'gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy') + client.api_secret, + "gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy", + ) - feed_url = client.get_full_url('api', 'feed/') + feed_url = client.get_full_url("api", "feed/") if self.local_tests: - self.assertEqual( - feed_url, 'http://localhost:8000/api/v1.0/feed/') + self.assertEqual(feed_url, "http://localhost:8000/api/v1.0/feed/") else: self.assertEqual( - feed_url, 'https://us-east-api.stream-io-api.com/api/v1.0/feed/') + feed_url, "https://us-east-api.stream-io-api.com/api/v1.0/feed/" + ) - self.assertEqual(client.app_id, '1') + self.assertEqual(client.app_id, "1") def test_heroku_location(self): - url = 'https://ahj2ndz7gsan:gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy@us-east.stream-io-api.com/?app_id=1' - os.environ['STREAM_URL'] = url + url = "https://ahj2ndz7gsan:gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy@us-east.stream-io-api.com/?app_id=1" + os.environ["STREAM_URL"] = url client = stream.connect() - self.assertEqual(client.api_key, 'ahj2ndz7gsan') + self.assertEqual(client.api_key, "ahj2ndz7gsan") self.assertEqual( - client.api_secret, 'gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy') + client.api_secret, + "gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy", + ) - feed_url = client.get_full_url('api', 'feed/') + feed_url = client.get_full_url("api", "feed/") if self.local_tests: - self.assertEqual( - feed_url, 'http://localhost:8000/api/v1.0/feed/') + self.assertEqual(feed_url, "http://localhost:8000/api/v1.0/feed/") else: self.assertEqual( - feed_url, 'https://us-east-api.stream-io-api.com/api/v1.0/feed/') - self.assertEqual(client.app_id, '1') + feed_url, "https://us-east-api.stream-io-api.com/api/v1.0/feed/" + ) + self.assertEqual(client.app_id, "1") def test_heroku_overwrite(self): - url = 'https://thierry:pass@getstream.io/?app_id=1' - os.environ['STREAM_URL'] = url - client = stream.connect('a', 'b', 'c') - self.assertEqual(client.api_key, 'a') - self.assertEqual(client.api_secret, 'b') - self.assertEqual(client.app_id, 'c') + url = "https://thierry:pass@getstream.io/?app_id=1" + os.environ["STREAM_URL"] = url + client = stream.connect("a", "b", "c") + self.assertEqual(client.api_key, "a") + self.assertEqual(client.api_secret, "b") + self.assertEqual(client.app_id, "c") def test_location_support(self): - client = stream.connect('a', 'b', 'c', location='us-east') + client = stream.connect("a", "b", "c", location="us-east") - full_location = 'https://us-east-api.stream-io-api.com/api/v1.0/feed/' + full_location = "https://us-east-api.stream-io-api.com/api/v1.0/feed/" if self.local_tests: - full_location = 'http://localhost:8000/api/v1.0/feed/' + full_location = "http://localhost:8000/api/v1.0/feed/" - self.assertEqual(client.location, 'us-east') - feed_url = client.get_full_url('api', 'feed/') + self.assertEqual(client.location, "us-east") + feed_url = client.get_full_url("api", "feed/") self.assertEqual(feed_url, full_location) # test a wrong location, can only work on non-local test running if not self.local_tests: - client = stream.connect('a', - 'b', - 'c', - location='nonexistant') + client = stream.connect("a", "b", "c", location="nonexistant") + def get_feed(): - f = client.feed('user', '1').get() + f = client.feed("user", "1").get() + self.assertRaises(requests.exceptions.ConnectionError, get_feed) def test_invalid_feed_values(self): def invalid_feed_slug(): - client.feed('user:', '1') + client.feed("user:", "1") + self.assertRaises(ValueError, invalid_feed_slug) def invalid_user_id(): - client.feed('user:', '1-a') + client.feed("user:", "1-a") + self.assertRaises(ValueError, invalid_user_id) def invalid_follow_feed_slug(): - self.user1.follow('user:', '1') + self.user1.follow("user:", "1") + self.assertRaises(ValueError, invalid_follow_feed_slug) def invalid_follow_user_id(): - self.user1.follow('user', '1-:a') + self.user1.follow("user", "1-:a") + self.assertRaises(ValueError, invalid_follow_user_id) def test_token_retrieval(self): @@ -373,246 +373,233 @@ def test_token_retrieval(self): self.user1.get_readonly_token() def test_add_activity(self): - feed = getfeed('user', 'py1') - activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} + feed = getfeed("user", "py1") + activity_data = {"actor": 1, "verb": "tweet", "object": 1} response = feed.add_activity(activity_data) - activity_id = response['id'] - activities = feed.get(limit=1)['results'] - self.assertEqual(activities[0]['id'], activity_id) + activity_id = response["id"] + activities = feed.get(limit=1)["results"] + self.assertEqual(activities[0]["id"], activity_id) def test_add_activity_to_inplace_change(self): - feed = getfeed('user', 'py1') - team_feed = getfeed('user', 'teamy') - activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} - activity_data['to'] = [team_feed.id] + feed = getfeed("user", "py1") + team_feed = getfeed("user", "teamy") + activity_data = {"actor": 1, "verb": "tweet", "object": 1} + activity_data["to"] = [team_feed.id] feed.add_activity(activity_data) - - self.assertEqual(activity_data['to'], [team_feed.id]) + self.assertEqual(activity_data["to"], [team_feed.id]) def test_add_activities_to_inplace_change(self): - feed = getfeed('user', 'py1') - team_feed = getfeed('user', 'teamy') - activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} - activity_data['to'] = [team_feed.id] + feed = getfeed("user", "py1") + team_feed = getfeed("user", "teamy") + activity_data = {"actor": 1, "verb": "tweet", "object": 1} + activity_data["to"] = [team_feed.id] feed.add_activities([activity_data]) - - self.assertEqual(activity_data['to'], [team_feed.id]) + self.assertEqual(activity_data["to"], [team_feed.id]) def test_add_activity_to(self): # test for sending an activities to the team feed using to - feeds = ['user', 'teamy', 'team_follower'] - user_feed, team_feed, team_follower_feed = map(lambda x: getfeed('user', x), feeds) + feeds = ["user", "teamy", "team_follower"] + user_feed, team_feed, team_follower_feed = map( + lambda x: getfeed("user", x), feeds + ) team_follower_feed.follow(team_feed.slug, team_feed.user_id) - activity_data = { - 'actor': 1, 'verb': 'tweet', 'object': 1, - 'to': [team_feed.id] - } + activity_data = {"actor": 1, "verb": "tweet", "object": 1, "to": [team_feed.id]} response = user_feed.add_activity(activity_data) - activity_id = response['id'] - + activity_id = response["id"] # see if the new activity is also in the team feed - activities = team_feed.get(limit=1)['results'] - self.assertEqual(activities[0]['id'], activity_id) - self.assertEqual(activities[0]['origin'], None) + activities = team_feed.get(limit=1)["results"] + self.assertEqual(activities[0]["id"], activity_id) + self.assertEqual(activities[0]["origin"], None) # see if the fanout process also works - activities = team_follower_feed.get(limit=1)['results'] - self.assertEqual(activities[0]['id'], activity_id) - self.assertEqual(activities[0]['origin'], team_feed.id) + activities = team_follower_feed.get(limit=1)["results"] + self.assertEqual(activities[0]["id"], activity_id) + self.assertEqual(activities[0]["origin"], team_feed.id) # and validate removing also works - user_feed.remove_activity(response['id']) + user_feed.remove_activity(response["id"]) # check the user pyto feed - activities = team_feed.get(limit=1)['results'] + activities = team_feed.get(limit=1)["results"] self.assertFirstActivityIDNotEqual(activities, activity_id) # and the flat feed - activities = team_follower_feed.get(limit=1)['results'] + activities = team_follower_feed.get(limit=1)["results"] self.assertFirstActivityIDNotEqual(activities, activity_id) def test_add_activity_to_type_error(self): - user_feed = getfeed('user', '1') - activity_data = { - 'actor': 1, 'verb': 'tweet', 'object': 1, - 'to': 'string' - } + user_feed = getfeed("user", "1") + activity_data = {"actor": 1, "verb": "tweet", "object": 1, "to": "string"} self.assertRaises(TypeError, user_feed.add_activity, activity_data) def assertFirstActivityIDEqual(self, activities, correct_activity_id): activity_id = None if activities: - activity_id = activities[0]['id'] + activity_id = activities[0]["id"] self.assertEqual(activity_id, correct_activity_id) def assertFirstActivityIDNotEqual(self, activities, correct_activity_id): activity_id = None if activities: - activity_id = activities[0]['id'] + activity_id = activities[0]["id"] self.assertNotEqual(activity_id, correct_activity_id) def test_remove_activity(self): - activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} + activity_data = {"actor": 1, "verb": "tweet", "object": 1} - activity_id = self.user1.add_activity(activity_data)['id'] - activities = self.user1.get(limit=8)['results'] + activity_id = self.user1.add_activity(activity_data)["id"] + activities = self.user1.get(limit=8)["results"] self.assertEqual(len(activities), 1) self.user1.remove_activity(activity_id) # verify that no activities were returned - activities = self.user1.get(limit=8)['results'] + activities = self.user1.get(limit=8)["results"] self.assertEqual(len(activities), 0) def test_remove_activity_by_foreign_id(self): activity_data = { - 'actor': 1, 'verb': 'tweet', 'object': 1, 'foreign_id': 'tweet:10'} + "actor": 1, + "verb": "tweet", + "object": 1, + "foreign_id": "tweet:10", + } - self.user1.add_activity(activity_data)['id'] - activities = self.user1.get(limit=8)['results'] + self.user1.add_activity(activity_data)["id"] + activities = self.user1.get(limit=8)["results"] self.assertEqual(len(activities), 1) - self.user1.remove_activity(foreign_id='tweet:10') + self.user1.remove_activity(foreign_id="tweet:10") # verify that no activities were returned - activities = self.user1.get(limit=8)['results'] + activities = self.user1.get(limit=8)["results"] self.assertEqual(len(activities), 0) # verify this doesnt raise an error, but fails silently - self.user1.remove_activity(foreign_id='tweet:unknowandmissing') + self.user1.remove_activity(foreign_id="tweet:unknowandmissing") def test_add_activities(self): activity_data = [ - {'actor': 1, 'verb': 'tweet', 'object': 1}, - {'actor': 2, 'verb': 'watch', 'object': 2}, + {"actor": 1, "verb": "tweet", "object": 1}, + {"actor": 2, "verb": "watch", "object": 2}, ] response = self.user1.add_activities(activity_data) - activity_ids = [a['id'] for a in response['activities']] - activities = self.user1.get(limit=2)['results'] - get_activity_ids = [a['id'] for a in activities] + activity_ids = [a["id"] for a in response["activities"]] + activities = self.user1.get(limit=2)["results"] + get_activity_ids = [a["id"] for a in activities] self.assertEqual(get_activity_ids, activity_ids[::-1]) def test_add_activities_to(self): - pyto2 = getfeed('user', 'pyto2') - pyto3 = getfeed('user', 'pyto3') - print(pyto2.id) - print(pyto3.id) + pyto2 = getfeed("user", "pyto2") + pyto3 = getfeed("user", "pyto3") to = [pyto2.id, pyto3.id] activity_data = [ - {'actor': 1, 'verb': 'tweet', 'object': 1, 'to': to}, - {'actor': 2, 'verb': 'watch', 'object': 2, 'to': to}, + {"actor": 1, "verb": "tweet", "object": 1, "to": to}, + {"actor": 2, "verb": "watch", "object": 2, "to": to}, ] response = self.user1.add_activities(activity_data) - activity_ids = [a['id'] for a in response['activities']] - activities = self.user1.get(limit=2)['results'] - get_activity_ids = [a['id'] for a in activities] + activity_ids = [a["id"] for a in response["activities"]] + activities = self.user1.get(limit=2)["results"] + get_activity_ids = [a["id"] for a in activities] self.assertEqual(get_activity_ids, activity_ids[::-1]) # test first target - activities = pyto2.get(limit=2)['results'] - get_activity_ids = [a['id'] for a in activities] + activities = pyto2.get(limit=2)["results"] + get_activity_ids = [a["id"] for a in activities] self.assertEqual(get_activity_ids, activity_ids[::-1]) # test second target - activities = pyto3.get(limit=2)['results'] - get_activity_ids = [a['id'] for a in activities] + activities = pyto3.get(limit=2)["results"] + get_activity_ids = [a["id"] for a in activities] self.assertEqual(get_activity_ids, activity_ids[::-1]) def test_follow_and_source(self): - feed = getfeed('user', 'test_follow') - agg_feed = getfeed('aggregated', 'test_follow') + feed = getfeed("user", "test_follow") + agg_feed = getfeed("aggregated", "test_follow") actor_id = random.randint(10, 100000) - activity_data = {'actor': actor_id, 'verb': 'tweet', 'object': 1} - activity_id = feed.add_activity(activity_data)['id'] + activity_data = {"actor": actor_id, "verb": "tweet", "object": 1} + activity_id = feed.add_activity(activity_data)["id"] agg_feed.follow(feed.slug, feed.user_id) - - activities = agg_feed.get(limit=3)['results'] + activities = agg_feed.get(limit=3)["results"] activity = self._get_first_aggregated_activity(activities) - activity_id_found = activity['id'] if activity is not None else None - self.assertEqual(activity['origin'], feed.id) + activity_id_found = activity["id"] if activity is not None else None + self.assertEqual(activity["origin"], feed.id) self.assertEqual(activity_id_found, activity_id) def test_follow_activity_copy_limit(self): - feed = getfeed('user', 'test_follow_acl') - feed1 = getfeed('user', 'test_follow_acl1') + feed = getfeed("user", "test_follow_acl") + feed1 = getfeed("user", "test_follow_acl1") actor_id = random.randint(10, 100000) - feed1.add_activity({ 'actor': actor_id, 'verb': 'tweet', 'object': 1 }) + feed1.add_activity({"actor": actor_id, "verb": "tweet", "object": 1}) feed.follow(feed1.slug, feed1.user_id, activity_copy_limit=0) - - activities = feed.get(limit=5)['results'] + activities = feed.get(limit=5)["results"] self.assertEqual(len(activities), 0) def test_follow_and_delete(self): - user_feed = getfeed('user', 'test_follow') - agg_feed = getfeed('aggregated', 'test_follow') + user_feed = getfeed("user", "test_follow") + agg_feed = getfeed("aggregated", "test_follow") actor_id = random.randint(10, 100000) - activity_data = {'actor': actor_id, 'verb': 'tweet', 'object': 1} - activity_id = user_feed.add_activity(activity_data)['id'] + activity_data = {"actor": actor_id, "verb": "tweet", "object": 1} + activity_id = user_feed.add_activity(activity_data)["id"] agg_feed.follow(user_feed.slug, user_feed.user_id) user_feed.remove_activity(activity_id) - activities = agg_feed.get(limit=3)['results'] + activities = agg_feed.get(limit=3)["results"] activity = self._get_first_aggregated_activity(activities) - activity_id_found = (activity['id'] if activity is not None - else None) + activity_id_found = activity["id"] if activity is not None else None self.assertNotEqual(activity_id_found, activity_id) - # def test_follow_private(self): - # feed = getfeed('secret', 'py1') - # agg_feed = getfeed('aggregated', 'test_follow_private') - # actor_id = random.randint(10, 100000) - # activity_data = {'actor': actor_id, 'verb': 'tweet', 'object': 1} - # activity_id = feed.add_activity(activity_data)['id'] - # agg_feed.follow(feed.slug, feed.user_id) - # activities = agg_feed.get(limit=3)['results'] - # activity = self._get_first_aggregated_activity(activities) - # activity_id_found = activity['id'] if activity is not None else None - # self.assertEqual(activity_id_found, activity_id) - def test_flat_follow(self): - feed = getfeed('user', 'test_flat_follow') - activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} - activity_id = feed.add_activity(activity_data)['id'] + feed = getfeed("user", "test_flat_follow") + activity_data = {"actor": 1, "verb": "tweet", "object": 1} + activity_id = feed.add_activity(activity_data)["id"] self.flat3.follow(feed.slug, feed.user_id) - - activities = self.flat3.get(limit=3)['results'] + activities = self.flat3.get(limit=3)["results"] activity = self._get_first_activity(activities) - activity_id_found = activity['id'] if activity is not None else None + activity_id_found = activity["id"] if activity is not None else None self.assertEqual(activity_id_found, activity_id) self.flat3.unfollow(feed.slug, feed.user_id) - activities = self.flat3.get(limit=3)['results'] + activities = self.flat3.get(limit=3)["results"] self.assertEqual(len(activities), 0) def test_flat_follow_no_copy(self): - feed = getfeed('user', 'test_flat_follow_no_copy') - follower = getfeed('flat', 'test_flat_follow_no_copy') - activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} - feed.add_activity(activity_data)['id'] + feed = getfeed("user", "test_flat_follow_no_copy") + follower = getfeed("flat", "test_flat_follow_no_copy") + activity_data = {"actor": 1, "verb": "tweet", "object": 1} + feed.add_activity(activity_data)["id"] follower.follow(feed.slug, feed.user_id, activity_copy_limit=0) - - activities = follower.get(limit=3)['results'] + activities = follower.get(limit=3)["results"] self.assertEqual(activities, []) def test_flat_follow_copy_one(self): - feed = getfeed('user', 'test_flat_follow_copy_one') - follower = getfeed('flat', 'test_flat_follow_copy_one') - activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1, 'foreign_id': 'test:1'} - feed.add_activity(activity_data)['id'] - activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1, 'foreign_id': 'test:2'} - feed.add_activity(activity_data)['id'] + feed = getfeed("user", "test_flat_follow_copy_one") + follower = getfeed("flat", "test_flat_follow_copy_one") + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "foreign_id": "test:1", + } + feed.add_activity(activity_data)["id"] + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "foreign_id": "test:2", + } + feed.add_activity(activity_data)["id"] follower.follow(feed.slug, feed.user_id, activity_copy_limit=1) - - activities = follower.get(limit=3)['results'] + activities = follower.get(limit=3)["results"] # verify we get the latest activity - self.assertEqual(activities[0]['foreign_id'], 'test:2') + self.assertEqual(activities[0]["foreign_id"], "test:2") def _get_first_aggregated_activity(self, activities): try: - return activities[0]['activities'][0] + return activities[0]["activities"][0] except IndexError as e: pass @@ -622,222 +609,200 @@ def _get_first_activity(self, activities): except IndexError as e: pass - def test_unfollow(self): - f = getfeed('user', 'asocialpython').id.split(':') - def test_empty_followings(self): - asocial = getfeed('user', 'asocialpython') + asocial = getfeed("user", "asocialpython") followings = asocial.following() - self.assertEqual(followings['results'], []) + self.assertEqual(followings["results"], []) def test_get_followings(self): - social = getfeed('user', 'psocial') - social.follow('user', 'apy') - social.follow('user', 'bpy') - social.follow('user', 'cpy') + social = getfeed("user", "psocial") + social.follow("user", "apy") + social.follow("user", "bpy") + social.follow("user", "cpy") followings = social.following(offset=0, limit=2) - self.assertEqual(len(followings['results']), 2) - self.assertEqual(followings['results'][0]['feed_id'], social.id) - self.assertEqual(followings['results'][0]['target_id'], 'user:cpy') + self.assertEqual(len(followings["results"]), 2) + self.assertEqual(followings["results"][0]["feed_id"], social.id) + self.assertEqual(followings["results"][0]["target_id"], "user:cpy") followings = social.following(offset=1, limit=2) - self.assertEqual(len(followings['results']), 2) - self.assertEqual(followings['results'][0]['feed_id'], social.id) - self.assertEqual(followings['results'][0]['target_id'], 'user:bpy') + self.assertEqual(len(followings["results"]), 2) + self.assertEqual(followings["results"][0]["feed_id"], social.id) + self.assertEqual(followings["results"][0]["target_id"], "user:bpy") def test_empty_followers(self): - asocial = getfeed('user', 'asocialpython') + asocial = getfeed("user", "asocialpython") followers = asocial.followers() - self.assertEqual(len(followers['results']), 0) - self.assertEqual(followers['results'], []) + self.assertEqual(len(followers["results"]), 0) + self.assertEqual(followers["results"], []) def test_get_followers(self): - social = getfeed('user', 'psocial') - spammy1 = getfeed('user', 'spammy1') - spammy2 = getfeed('user', 'spammy2') - spammy3 = getfeed('user', 'spammy3') + social = getfeed("user", "psocial") + spammy1 = getfeed("user", "spammy1") + spammy2 = getfeed("user", "spammy2") + spammy3 = getfeed("user", "spammy3") for feed in [spammy1, spammy2, spammy3]: - feed.follow('user', social.user_id) + feed.follow("user", social.user_id) followers = social.followers(offset=0, limit=2) - self.assertEqual(len(followers['results']), 2) - self.assertEqual(followers['results'][0]['feed_id'], spammy3.id) - self.assertEqual(followers['results'][0]['target_id'], social.id) + self.assertEqual(len(followers["results"]), 2) + self.assertEqual(followers["results"][0]["feed_id"], spammy3.id) + self.assertEqual(followers["results"][0]["target_id"], social.id) followers = social.followers(offset=1, limit=2) - self.assertEqual(len(followers['results']), 2) - self.assertEqual(followers['results'][0]['feed_id'], spammy2.id) - self.assertEqual(followers['results'][0]['target_id'], social.id) + self.assertEqual(len(followers["results"]), 2) + self.assertEqual(followers["results"][0]["feed_id"], spammy2.id) + self.assertEqual(followers["results"][0]["target_id"], social.id) def test_empty_do_i_follow(self): - social = getfeed('user', 'psocial') - social.follow('user', 'apy') - social.follow('user', 'bpy') - followings = social.following(feeds=['user:missingpy']) - self.assertEqual(len(followings['results']), 0) - self.assertEqual(followings['results'], []) + social = getfeed("user", "psocial") + social.follow("user", "apy") + social.follow("user", "bpy") + followings = social.following(feeds=["user:missingpy"]) + self.assertEqual(len(followings["results"]), 0) + self.assertEqual(followings["results"], []) def test_do_i_follow(self): - social = getfeed('user', 'psocial') - social.follow('user', 'apy') - social.follow('user', 'bpy') - followings = social.following(feeds=['user:apy']) - self.assertEqual(len(followings['results']), 1) - self.assertEqual(followings['results'][0]['feed_id'], social.id) - self.assertEqual(followings['results'][0]['target_id'], 'user:apy') + social = getfeed("user", "psocial") + social.follow("user", "apy") + social.follow("user", "bpy") + followings = social.following(feeds=["user:apy"]) + self.assertEqual(len(followings["results"]), 1) + self.assertEqual(followings["results"][0]["feed_id"], social.id) + self.assertEqual(followings["results"][0]["target_id"], "user:apy") def test_update_activity_to_targets(self): time = datetime.datetime.utcnow().isoformat() - foreign_id = 'user:1' + foreign_id = "user:1" activity_data = { - 'actor': 1, - 'verb': 'tweet', - 'object': 1, - 'foreign_id': foreign_id, - 'time': time, + "actor": 1, + "verb": "tweet", + "object": 1, + "foreign_id": foreign_id, + "time": time, } - activity_data['to'] = ['user:1', 'user:2'] + activity_data["to"] = ["user:1", "user:2"] self.user1.add_activity(activity_data) - ret = self.user1.update_activity_to_targets(foreign_id, time, new_targets=['user:3', 'user:2']) - self.assertEqual(len(ret['activity']['to']), 2) - self.assertTrue('user:2' in ret['activity']['to']) - self.assertTrue('user:3' in ret['activity']['to']) - - ret = self.user1.update_activity_to_targets(foreign_id, time, added_targets=['user:4', 'user:5'], removed_targets=['user:3']) - self.assertEqual(len(ret['activity']['to']), 3) - self.assertTrue('user:2' in ret['activity']['to']) - self.assertTrue('user:4' in ret['activity']['to']) - self.assertTrue('user:5' in ret['activity']['to']) - + ret = self.user1.update_activity_to_targets( + foreign_id, time, new_targets=["user:3", "user:2"] + ) + self.assertEqual(len(ret["activity"]["to"]), 2) + self.assertTrue("user:2" in ret["activity"]["to"]) + self.assertTrue("user:3" in ret["activity"]["to"]) + + ret = self.user1.update_activity_to_targets( + foreign_id, + time, + added_targets=["user:4", "user:5"], + removed_targets=["user:3"], + ) + self.assertEqual(len(ret["activity"]["to"]), 3) + self.assertTrue("user:2" in ret["activity"]["to"]) + self.assertTrue("user:4" in ret["activity"]["to"]) + self.assertTrue("user:5" in ret["activity"]["to"]) def test_get(self): - activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} - activity_id = self.user1.add_activity(activity_data)['id'] - activity_data = {'actor': 2, 'verb': 'add', 'object': 2} - activity_id_two = self.user1.add_activity(activity_data)['id'] - activity_data = {'actor': 3, 'verb': 'watch', 'object': 2} - activity_id_three = self.user1.add_activity(activity_data)['id'] - activities = self.user1.get(limit=2)['results'] + activity_data = {"actor": 1, "verb": "tweet", "object": 1} + activity_id = self.user1.add_activity(activity_data)["id"] + activity_data = {"actor": 2, "verb": "add", "object": 2} + activity_id_two = self.user1.add_activity(activity_data)["id"] + activity_data = {"actor": 3, "verb": "watch", "object": 2} + activity_id_three = self.user1.add_activity(activity_data)["id"] + activities = self.user1.get(limit=2)["results"] # verify the first two results self.assertEqual(len(activities), 2) - self.assertEqual(activities[0]['id'], activity_id_three) - self.assertEqual(activities[1]['id'], activity_id_two) + self.assertEqual(activities[0]["id"], activity_id_three) + self.assertEqual(activities[1]["id"], activity_id_two) # try offset based - activities = self.user1.get(limit=2, offset=1)['results'] - self.assertEqual(activities[0]['id'], activity_id_two) + activities = self.user1.get(limit=2, offset=1)["results"] + self.assertEqual(activities[0]["id"], activity_id_two) # try id_lt based - activities = self.user1.get(limit=2, id_lt=activity_id_two)['results'] - self.assertEqual(activities[0]['id'], activity_id) - - def test_mark_read(self): - notification_feed = getfeed('notification', 'py3') - activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} - print(notification_feed.add_activity(activity_data)['id']) - activity_data = {'actor': 2, 'verb': 'add', 'object': 2} - print(notification_feed.add_activity(activity_data)['id']) - activity_data = {'actor': 3, 'verb': 'watch', 'object': 2} - print(notification_feed.add_activity(activity_data)['id']) - - - activities = notification_feed.get(limit=3)['results'] - from pprint import pprint - print(len(activities)) - pprint(activities) - for activity in activities: - pprint(activity) - self.assertFalse(activity['is_read']) - activities = notification_feed.get(mark_read=True)['results'] - activities = notification_feed.get(limit=2)['results'] - self.assertTrue(activities[0]['is_read']) - self.assertTrue(activities[1]['is_read']) + activities = self.user1.get(limit=2, id_lt=activity_id_two)["results"] + self.assertEqual(activities[0]["id"], activity_id) def test_get_not_marked_seen(self): - notification_feed = getfeed('notification', 'test_mark_seen') - print(notification_feed.add_activity({'actor': 1, 'verb': 'tweet', 'object': 1})['id']) - print(notification_feed.add_activity({'actor': 2, 'verb': 'tweet', 'object': 2})['id']) - print(notification_feed.add_activity({'actor': 3, 'verb': 'tweet', 'object': 3})['id']) - - activities = notification_feed.get(limit=3)['results'] - from pprint import pprint - print(len(activities)) - pprint(activities) + notification_feed = getfeed("notification", "test_mark_seen") + activities = notification_feed.get(limit=3)["results"] for activity in activities: - pprint(activity) - self.assertFalse(activity['is_seen']) + self.assertFalse(activity["is_seen"]) def test_mark_seen_on_get(self): - notification_feed = getfeed('notification', 'test_mark_seen') - activities = notification_feed.get(limit=100)['results'] + notification_feed = getfeed("notification", "test_mark_seen") + activities = notification_feed.get(limit=100)["results"] for activity in activities: - notification_feed.remove_activity(activity['id']) - + notification_feed.remove_activity(activity["id"]) old_activities = [ - notification_feed.add_activity({'actor': 1, 'verb': 'tweet', 'object': 1}), - notification_feed.add_activity({'actor': 2, 'verb': 'add', 'object': 2}), - notification_feed.add_activity({'actor': 3, 'verb': 'watch', 'object': 3}), + notification_feed.add_activity({"actor": 1, "verb": "tweet", "object": 1}), + notification_feed.add_activity({"actor": 2, "verb": "add", "object": 2}), + notification_feed.add_activity({"actor": 3, "verb": "watch", "object": 3}), ] - notification_feed.get(mark_seen=[old_activities[0]['id'], old_activities[1]['id']]) + notification_feed.get( + mark_seen=[old_activities[0]["id"], old_activities[1]["id"]] + ) - activities = notification_feed.get(limit=3)['results'] + activities = notification_feed.get(limit=3)["results"] # is the seen state correct for activity in activities: # using a loop in case we're retrieving activities in a different order than old_activities - if old_activities[0]['id'] == activity['id']: - self.assertTrue(activity['is_seen']) - if old_activities[1]['id'] == activity['id']: - self.assertTrue(activity['is_seen']) - if old_activities[2]['id'] == activity['id']: - self.assertFalse(activity['is_seen']) + if old_activities[0]["id"] == activity["id"]: + self.assertTrue(activity["is_seen"]) + if old_activities[1]["id"] == activity["id"]: + self.assertTrue(activity["is_seen"]) + if old_activities[2]["id"] == activity["id"]: + self.assertFalse(activity["is_seen"]) # see if the state properly resets after we add another activity - notification_feed.add_activity({'actor': 3, 'verb': 'watch', 'object': 3}) # ['id'] - activities = notification_feed.get(limit=3)['results'] - self.assertFalse(activities[0]['is_seen']) - self.assertEqual(len(activities[0]['activities']), 2) + notification_feed.add_activity( + {"actor": 3, "verb": "watch", "object": 3} + ) # ['id'] + activities = notification_feed.get(limit=3)["results"] + self.assertFalse(activities[0]["is_seen"]) + self.assertEqual(len(activities[0]["activities"]), 2) def test_mark_read_by_id(self): - notification_feed = getfeed('notification', 'py2') - print(notification_feed.add_activity({'actor': 1, 'verb': 'tweet', 'object': 1})['id']) # ['id'] - print(notification_feed.add_activity({'actor': 2, 'verb': 'tweet', 'object': 2})['id']) # ['id'] - print(notification_feed.add_activity({'actor': 3, 'verb': 'tweet', 'object': 2})['id']) # ['id'] + notification_feed = getfeed("notification", "py2") - activities = notification_feed.get(limit=3)['results'] + activities = notification_feed.get(limit=3)["results"] ids = [] - from pprint import pprint - print(len(activities)) - pprint(activities) for activity in activities: - pprint(activity) - ids.append(activity['id']) - self.assertFalse(activity['is_read']) + ids.append(activity["id"]) + self.assertFalse(activity["is_read"]) ids = ids[:2] notification_feed.get(mark_read=ids) - activities = notification_feed.get(limit=3)['results'] + activities = notification_feed.get(limit=3)["results"] for activity in activities: - if activity['id'] in ids: - self.assertTrue(activity['is_read']) - self.assertFalse(activity['is_seen']) + if activity["id"] in ids: + self.assertTrue(activity["is_read"]) + self.assertFalse(activity["is_seen"]) def test_api_key_exception(self): self.c = stream.connect( - '5crf3bhfzesnMISSING', - 'tfq2sdqpj9g446sbv653x3aqmgn33hsn8uzdc9jpskaw8mj6vsnhzswuwptuj9su' + "5crf3bhfzesnMISSING", + "tfq2sdqpj9g446sbv653x3aqmgn33hsn8uzdc9jpskaw8mj6vsnhzswuwptuj9su", + ) + self.user1 = self.c.feed("user", "1") + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "debug_example_undefined": "test", + } + self.assertRaises( + ApiKeyException, lambda: self.user1.add_activity(activity_data) ) - self.user1 = self.c.feed('user', '1') - activity_data = {'actor': 1, 'verb': 'tweet', - 'object': 1, 'debug_example_undefined': 'test'} - self.assertRaises(ApiKeyException, lambda: - self.user1.add_activity(activity_data)) def test_complex_field(self): - activity_data = {'actor': 1, 'verb': 'tweet', - 'object': 1, 'participants': ['Tommaso', 'Thierry']} + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "participants": ["Tommaso", "Thierry"], + } response = self.user1.add_activity(activity_data) - activity_id = response['id'] - activities = self.user1.get(limit=1)['results'] - self.assertEqual(activities[0]['id'], activity_id) - self.assertEqual(activities[0]['participants'], ['Tommaso', 'Thierry']) + activity_id = response["id"] + activities = self.user1.get(limit=1)["results"] + self.assertEqual(activities[0]["id"], activity_id) + self.assertEqual(activities[0]["participants"], ["Tommaso", "Thierry"]) def assertDatetimeAlmostEqual(self, a, b): difference = abs(a - b) @@ -847,78 +812,89 @@ def assertDatetimeAlmostEqual(self, a, b): def assertClearlyNotEqual(self, a, b): difference = abs(a - b) if difference < datetime.timedelta(milliseconds=1): - raise ValueError('the dates are too close') + raise ValueError("the dates are too close") def test_uniqueness(self): - ''' + """ In order for things to be considere unique they need: a.) The same time and activity data b.) The same time and foreign id - ''' - from pprint import pprint + """ + utcnow = datetime.datetime.utcnow() - activity_data = { - 'actor': 1, 'verb': 'tweet', 'object': 1, 'time': utcnow} + activity_data = {"actor": 1, "verb": "tweet", "object": 1, "time": utcnow} response = self.user1.add_activity(activity_data) response = self.user1.add_activity(activity_data) - - activities = self.user1.get(limit=2)['results'] - self.assertDatetimeAlmostEqual(activities[0]['time'], utcnow) - if (len(activities) > 1): - self.assertClearlyNotEqual(activities[1]['time'], utcnow) + activities = self.user1.get(limit=2)["results"] + self.assertDatetimeAlmostEqual(activities[0]["time"], utcnow) + if len(activities) > 1: + self.assertClearlyNotEqual(activities[1]["time"], utcnow) def test_uniqueness_topic(self): - ''' + """ In order for things to be considere unique they need: a.) The same time and activity data, or b.) The same time and foreign id - ''' + """ # follow both the topic and the user - self.flat3.follow('topic', self.topic1.user_id) - self.flat3.follow('user', self.user1.user_id) + self.flat3.follow("topic", self.topic1.user_id) + self.flat3.follow("user", self.user1.user_id) # add the same activity twice now = datetime.datetime.now(tzlocal()) - tweet = 'My Way %s' % get_unique_postfix() + tweet = "My Way %s" % get_unique_postfix() activity_data = { - 'actor': 1, 'verb': 'tweet', 'object': 1, 'time': now, 'tweet': tweet} + "actor": 1, + "verb": "tweet", + "object": 1, + "time": now, + "tweet": tweet, + } self.topic1.add_activity(activity_data) self.user1.add_activity(activity_data) # verify that flat3 contains the activity exactly once response = self.flat3.get(limit=3) - activity_tweets = [a.get('tweet') for a in response['results']] - print(response) - print(activity_tweets) + activity_tweets = [a.get("tweet") for a in response["results"]] self.assertEqual(activity_tweets.count(tweet), 1) def test_uniqueness_foreign_id(self): now = datetime.datetime.now(tzlocal()) utcnow = (now - now.utcoffset()).replace(tzinfo=None) - activity_data = {'actor': 1, 'verb': 'tweet', - 'object': 1, 'foreign_id': 'tweet:11', 'time': now} - response = self.user1.add_activity(activity_data) + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "foreign_id": "tweet:11", + "time": now, + } + self.user1.add_activity(activity_data) - activity_data = {'actor': 2, 'verb': 'tweet', - 'object': 3, 'foreign_id': 'tweet:11', 'time': now} - response = self.user1.add_activity(activity_data) + activity_data = { + "actor": 2, + "verb": "tweet", + "object": 3, + "foreign_id": "tweet:11", + "time": now, + } + self.user1.add_activity(activity_data) - activities = self.user1.get(limit=10)['results'] + activities = self.user1.get(limit=10)["results"] # the second post should have overwritten the first one (because they # had same id) self.assertEqual(len(activities), 1) - self.assertEqual(activities[0]['object'], '3') - self.assertEqual(activities[0]['foreign_id'], 'tweet:11') - self.assertDatetimeAlmostEqual(activities[0]['time'], utcnow) + self.assertEqual(activities[0]["object"], "3") + self.assertEqual(activities[0]["foreign_id"], "tweet:11") + self.assertDatetimeAlmostEqual(activities[0]["time"], utcnow) def test_time_ordering(self): - ''' + """ datetime.datetime.utcnow() is our recommended approach so if we add an activity add one using time add another activity it should be in the right spot - ''' + """ # timedelta is used to "make sure" that ordering is known even though # server time is not @@ -926,135 +902,131 @@ def test_time_ordering(self): feed = self.user2 for index, activity_time in enumerate([None, custom_time, None]): - self._test_sleep(1, 1) # so times are a bit different - activity_data = {'actor': 1, 'verb': 'tweet', - 'object': 1, 'foreign_id': 'tweet:%s' % index, 'time': activity_time} + self._test_sleep(1, 1) # so times are a bit different + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "foreign_id": "tweet:%s" % index, + "time": activity_time, + } feed.add_activity(activity_data) - activities = feed.get(limit=3)['results'] + activities = feed.get(limit=3)["results"] # the second post should have overwritten the first one (because they # had same id) - self.assertEqual(activities[0]['foreign_id'], 'tweet:2') - self.assertEqual(activities[1]['foreign_id'], 'tweet:0') - self.assertEqual(activities[2]['foreign_id'], 'tweet:1') - self.assertDatetimeAlmostEqual(activities[2]['time'], custom_time) + self.assertEqual(activities[0]["foreign_id"], "tweet:2") + self.assertEqual(activities[1]["foreign_id"], "tweet:0") + self.assertEqual(activities[2]["foreign_id"], "tweet:1") + self.assertDatetimeAlmostEqual(activities[2]["time"], custom_time) def test_missing_actor(self): - activity_data = {'verb': 'tweet', 'object': - 1, 'debug_example_undefined': 'test'} + activity_data = { + "verb": "tweet", + "object": 1, + "debug_example_undefined": "test", + } doit = lambda: self.user1.add_activity(activity_data) try: doit() - raise ValueError('should have raised InputException') + raise ValueError("should have raised InputException") except InputException as e: pass def test_wrong_feed_spec(self): self.c = stream.connect( - '5crf3bhfzesnMISSING', - 'tfq2sdqpj9g446sbv653x3aqmgn33hsn8uzdc9jpskaw8mj6vsnhzswuwptuj9su' + "5crf3bhfzesnMISSING", + "tfq2sdqpj9g446sbv653x3aqmgn33hsn8uzdc9jpskaw8mj6vsnhzswuwptuj9su", ) - self.assertRaises(TypeError, lambda: getfeed('user1')) + self.assertRaises(TypeError, lambda: getfeed("user1")) def test_serialization(self): today = datetime.date.today() now = datetime.datetime.now() - data = dict( - string='string', float=0.1, int=1, date=today, datetime=now) + data = dict(string="string", float=0.1, int=1, date=today, datetime=now) serialized = serializer.dumps(data) loaded = serializer.loads(serialized) self.assertEqual(data, loaded) - # def test_signed_request_post(self): - # self.c._make_signed_request('post', 'test/auth/digest/', {}, {}) - # - # def test_signed_request_get(self): - # self.c._make_signed_request('post', 'test/auth/digest/', {}, {}) - def test_follow_many(self): - sources = [getfeed('user', str(i)).id for i in range(10)] - targets = [getfeed('flat', str(i)).id for i in range(10)] - feeds = [{'source': s, 'target': t} for s,t in zip(sources, targets)] + sources = [getfeed("user", str(i)).id for i in range(10)] + targets = [getfeed("flat", str(i)).id for i in range(10)] + feeds = [{"source": s, "target": t} for s, t in zip(sources, targets)] self.c.follow_many(feeds) for target in targets: - follows = self.c.feed(*target.split(':')).followers()['results'] + follows = self.c.feed(*target.split(":")).followers()["results"] self.assertEqual(len(follows), 1) - self.assertTrue(follows[0]['feed_id'] in sources) - self.assertEqual(follows[0]['target_id'], target) + self.assertTrue(follows[0]["feed_id"] in sources) + self.assertEqual(follows[0]["target_id"], target) for source in sources: - follows = self.c.feed(*source.split(':')).following()['results'] + follows = self.c.feed(*source.split(":")).following()["results"] self.assertEqual(len(follows), 1) - self.assertEqual(follows[0]['feed_id'], source) - self.assertTrue(follows[0]['target_id'] in targets) + self.assertEqual(follows[0]["feed_id"], source) + self.assertTrue(follows[0]["target_id"] in targets) def test_follow_many_acl(self): - sources = [getfeed('user', str(i)) for i in range(10)] + sources = [getfeed("user", str(i)) for i in range(10)] # ensure every source is empty first for feed in sources: - activities = feed.get(limit=100)['results'] + activities = feed.get(limit=100)["results"] for activity in activities: - feed.remove_activity(activity['id']) + feed.remove_activity(activity["id"]) - targets = [getfeed('flat', str(i)) for i in range(10)] + targets = [getfeed("flat", str(i)) for i in range(10)] # ensure every source is empty first for feed in targets: - activities = feed.get(limit=100)['results'] + activities = feed.get(limit=100)["results"] for activity in activities: - feed.remove_activity(activity['id']) + feed.remove_activity(activity["id"]) # add activity to each target feed activity = { - 'actor': 'barry', - 'object': '09', - 'verb': 'tweet', - 'time': datetime.datetime.utcnow().isoformat() + "actor": "barry", + "object": "09", + "verb": "tweet", + "time": datetime.datetime.utcnow().isoformat(), } for feed in targets: feed.add_activity(activity) - self.assertEqual(len(feed.get(limit=5)['results']), 1) + self.assertEqual(len(feed.get(limit=5)["results"]), 1) sources_id = [feed.id for feed in sources] targets_id = [target.id for target in targets] - feeds = [{'source': s, 'target': t} for s, t in zip(sources_id, targets_id)] + feeds = [{"source": s, "target": t} for s, t in zip(sources_id, targets_id)] self.c.follow_many(feeds, activity_copy_limit=0) for feed in sources: - activities = feed.get(limit=5)['results'] + activities = feed.get(limit=5)["results"] self.assertEqual(len(activities), 0) def test_add_to_many(self): - activity = {'actor': 1, 'verb': 'tweet', 'object': 1, 'custom': 'data'} - feeds = [getfeed('flat', str(i)).id for i in range(10, 20)] + activity = {"actor": 1, "verb": "tweet", "object": 1, "custom": "data"} + feeds = [getfeed("flat", str(i)).id for i in range(10, 20)] self.c.add_to_many(activity, feeds) - for feed in feeds: - feed = self.c.feed(*feed.split(':')) - self.assertEqual(feed.get()['results'][0]['custom'], 'data') + feed = self.c.feed(*feed.split(":")) + self.assertEqual(feed.get()["results"][0]["custom"], "data") def test_create_email_redirect(self): - target_url = 'http://google.com/?a=b&c=d' - user_id = 'tommaso' + target_url = "http://google.com/?a=b&c=d" + user_id = "tommaso" impression = { - 'foreign_ids': ['tweet:1', - 'tweet:2', - 'tweet:3', - 'tweet:4', - 'tweet:5'], - 'feed_id': 'user:global', - 'user_id': user_id, - 'location': 'email' + "foreign_ids": ["tweet:1", "tweet:2", "tweet:3", "tweet:4", "tweet:5"], + "feed_id": "user:global", + "user_id": user_id, + "location": "email", } engagement = { - 'user_id': user_id, - 'label': 'click', - 'feed_id': 'user:global', - 'location': 'email', - 'position': 3, - 'foreign_id': 'tweet:1' + "user_id": user_id, + "label": "click", + "feed_id": "user:global", + "location": "email", + "position": 3, + "foreign_id": "tweet:1", } events = [impression, engagement] @@ -1063,43 +1035,92 @@ def test_create_email_redirect(self): parsed_url = urlparse(redirect_url) qs = parse_qs(parsed_url.query) - decoded = jwt.decode(qs['authorization'][0], self.c.api_secret) + decoded = jwt.decode(qs["authorization"][0], self.c.api_secret) - self.assertEqual(decoded, { - 'resource': 'redirect_and_track', - 'action': '*', - 'feed_id': '*', - 'user_id': 'tommaso' - }) + self.assertEqual( + decoded, + { + "resource": "redirect_and_track", + "action": "*", + "feed_id": "*", + "user_id": "tommaso", + }, + ) expected_params = { - 'auth_type': 'jwt', - 'url': target_url, - 'api_key': self.c.api_key, + "auth_type": "jwt", + "url": target_url, + "api_key": self.c.api_key, } for k, v in expected_params.items(): self.assertEqual(qs[k][0], v) - self.assertEqual(json.loads(qs['events'][0]), events) + self.assertEqual(json.loads(qs["events"][0]), events) def test_email_redirect_invalid_target(self): - engagement = {'foreign_id': 'tweet:1', 'label': 'click', 'position': 3, 'user_id': 'tommaso', 'location': 'email', 'feed_id': 'user:global'} - impression = {'foreign_ids': ['tweet:1', 'tweet:2', 'tweet:3', 'tweet:4', 'tweet:5'], 'user_id': - 'tommaso', 'location': 'email', 'feed_id': 'user:global'} + engagement = { + "foreign_id": "tweet:1", + "label": "click", + "position": 3, + "user_id": "tommaso", + "location": "email", + "feed_id": "user:global", + } + impression = { + "foreign_ids": ["tweet:1", "tweet:2", "tweet:3", "tweet:4", "tweet:5"], + "user_id": "tommaso", + "location": "email", + "feed_id": "user:global", + } events = [impression, engagement] # no protocol specified, this should raise an error - target_url = 'google.com' - user_id = 'tommaso' - create_redirect = lambda : self.c.create_redirect_url(target_url, user_id, events) + target_url = "google.com" + user_id = "tommaso" + create_redirect = lambda: self.c.create_redirect_url( + target_url, user_id, events + ) self.assertRaises(MissingSchema, create_redirect) def test_follow_redirect_url(self): - target_url = 'http://google.com/?a=b&c=d' + target_url = "http://google.com/?a=b&c=d" events = [] - user_id = 'tommaso' + user_id = "tommaso" redirect_url = self.c.create_redirect_url(target_url, user_id, events) res = requests.get(redirect_url) res.raise_for_status() - self.assertTrue('google' in res.url) + self.assertTrue("google" in res.url) + + def test_get_activities_empty_ids(self): + response = self.c.get_activities(ids=[str(uuid1())]) + self.assertEqual(len(response["results"]), 0) + + def test_get_activities_empty_foreign_ids(self): + response = self.c.get_activities( + foreign_id_time=[("fid-x", datetime.datetime.utcnow())] + ) + self.assertEqual(len(response["results"]), 0) + + def test_get_activities_full(self): + dt = datetime.datetime.utcnow() + fid = "awesome-test" + + activity = { + "actor": "barry", + "object": "09", + "verb": "tweet", + "time": dt, + "foreign_id": fid, + } + + feed = getfeed("user", "test_get_activity") + response = feed.add_activity(activity) + + response = self.c.get_activities(ids=[response["id"]]) + self.assertEqual(len(response["results"]), 1) + self.assertEqual(activity["foreign_id"], response["results"][0]["foreign_id"]) + + response = self.c.get_activities(foreign_id_time=[(fid, dt)]) + self.assertEqual(len(response["results"]), 1) + self.assertEqual(activity["foreign_id"], response["results"][0]["foreign_id"]) From 2a6085403ebfcb107dd382e7abde63c2097b55a6 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 5 Jul 2018 16:48:27 +0200 Subject: [PATCH 084/208] mention the new API endpoint in the readme --- README.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/README.md b/README.md index ffc8546..925f226 100644 --- a/README.md +++ b/README.md @@ -90,6 +90,14 @@ activity = { } user_feed_1.add_activity(activity) +# Retrieve an activity by its ID +client.get_activities(ids=[activity_id]) + +# Retrieve an activity by the combination of foreign_id and time +client.get_activities(foreign_id_time=[ + (foreign_id, activity_time), +]) + # Generating tokens for client side usage (JS client) token = user_feed_1.token # Javascript client side feed initialization From 8b6cf17f6c220ee9b64f8b8b91b29915caa667c6 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 5 Jul 2018 17:48:01 +0200 Subject: [PATCH 085/208] add some validation for foreign_id_time param --- stream/client.py | 5 ++--- stream/utils.py | 11 +++++++++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/stream/client.py b/stream/client.py index d327ede..b1e6a92 100644 --- a/stream/client.py +++ b/stream/client.py @@ -5,13 +5,11 @@ import jwt import requests -from requests.adapters import HTTPAdapter from stream.serializer import _datetime_encoder from stream import exceptions, serializer -from stream.httpsig.requests_auth import HTTPSignatureAuth from stream.signing import sign -from stream.utils import validate_feed_slug, validate_user_id +from stream.utils import validate_feed_slug, validate_user_id, validate_foreign_id_time from stream.httpsig.requests_auth import HTTPSignatureAuth from requests import Request @@ -336,6 +334,7 @@ def get_activities(self, ids=None, foreign_id_time=None): query_params['ids'] = ','.join(ids) if foreign_id_time is not None: + validate_foreign_id_time(foreign_id_time) foreign_ids, timestamps = zip(*foreign_id_time) timestamps = map(_datetime_encoder, timestamps) query_params['foreign_ids'] = ','.join(foreign_ids) diff --git a/stream/utils.py b/stream/utils.py index 228a74e..04ed771 100644 --- a/stream/utils.py +++ b/stream/utils.py @@ -43,3 +43,14 @@ def validate_user_id(user_id): raise ValueError(msg % user_id) return user_id + +def validate_foreign_id_time(foreign_id_time): + if not isinstance(foreign_id_time, (list, tuple)): + raise ValueError('foreign_id_time should be a list of tuples') + + for v in foreign_id_time: + if not isinstance(v, (list, tuple)): + raise ValueError('foreign_id_time elements should be lists or tuples') + + if len(v) != 2: + raise ValueError('foreign_id_time elements should have two elements') From 38873e019d02362557c8f6f4dcfa8cbc9c651692 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 5 Jul 2018 17:51:20 +0200 Subject: [PATCH 086/208] 2.9.0 release --- CHANGELOG | 8 ++++++++ stream/__init__.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index 0287946..57fa4cc 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ Change history ================ +======= +2.9.0 +====== +:release-date: 2017-07-05 +:by: Tommaso Barbugli + +Add support for get activity API endpoint + ======= 2.8.1 ====== diff --git a/stream/__init__.py b/stream/__init__.py index dab0237..2c590c0 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.8.1' +__version__ = '2.9.0' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From 8a3ac2522c9eee5c46518abc78bebf240c7f5eae Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Wed, 18 Jul 2018 10:39:40 +0200 Subject: [PATCH 087/208] rename foreign_id_time in 2.9.1 --- CHANGELOG | 9 +++++++++ README.md | 2 +- stream/__init__.py | 2 +- stream/client.py | 12 ++++++------ stream/tests/test_client.py | 4 ++-- 5 files changed, 19 insertions(+), 10 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index 57fa4cc..5d749b2 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,15 @@ Change history ================ +======= +2.9.1 +====== +:release-date: 2017-07-18 +:by: Tommaso Barbugli + +Renamed client.get_activities' foreign_id_time param to foreign_id_times + + ======= 2.9.0 ====== diff --git a/README.md b/README.md index 925f226..536eb33 100644 --- a/README.md +++ b/README.md @@ -94,7 +94,7 @@ user_feed_1.add_activity(activity) client.get_activities(ids=[activity_id]) # Retrieve an activity by the combination of foreign_id and time -client.get_activities(foreign_id_time=[ +client.get_activities(foreign_id_times=[ (foreign_id, activity_time), ]) diff --git a/stream/__init__.py b/stream/__init__.py index 2c590c0..544a742 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.9.0' +__version__ = '2.9.1' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' diff --git a/stream/client.py b/stream/client.py index b1e6a92..15cfd8a 100644 --- a/stream/client.py +++ b/stream/client.py @@ -313,7 +313,7 @@ def update_activity(self, activity): ''' return self.update_activities([activity]) - def get_activities(self, ids=None, foreign_id_time=None): + def get_activities(self, ids=None, foreign_id_times=None): ''' Retrieves activities by their ID or foreign_id + time combination @@ -322,10 +322,10 @@ def get_activities(self, ids=None, foreign_id_time=None): ''' auth_token = self.create_jwt_token('activities', '*', feed_id='*') - if ids is None and foreign_id_time is None: + if ids is None and foreign_id_times is None: raise TypeError('One the parameters ids or foreign_id_time must be provided and not None') - if ids is not None and foreign_id_time is not None: + if ids is not None and foreign_id_times is not None: raise TypeError('At most one of the parameters ids or foreign_id_time must be provided') query_params = {} @@ -333,9 +333,9 @@ def get_activities(self, ids=None, foreign_id_time=None): if ids is not None: query_params['ids'] = ','.join(ids) - if foreign_id_time is not None: - validate_foreign_id_time(foreign_id_time) - foreign_ids, timestamps = zip(*foreign_id_time) + if foreign_id_times is not None: + validate_foreign_id_time(foreign_id_times) + foreign_ids, timestamps = zip(*foreign_id_times) timestamps = map(_datetime_encoder, timestamps) query_params['foreign_ids'] = ','.join(foreign_ids) query_params['timestamps'] = ','.join(timestamps) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 50827bf..0043cb5 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1098,7 +1098,7 @@ def test_get_activities_empty_ids(self): def test_get_activities_empty_foreign_ids(self): response = self.c.get_activities( - foreign_id_time=[("fid-x", datetime.datetime.utcnow())] + foreign_id_times=[("fid-x", datetime.datetime.utcnow())] ) self.assertEqual(len(response["results"]), 0) @@ -1121,6 +1121,6 @@ def test_get_activities_full(self): self.assertEqual(len(response["results"]), 1) self.assertEqual(activity["foreign_id"], response["results"][0]["foreign_id"]) - response = self.c.get_activities(foreign_id_time=[(fid, dt)]) + response = self.c.get_activities(foreign_id_times=[(fid, dt)]) self.assertEqual(len(response["results"]), 1) self.assertEqual(activity["foreign_id"], response["results"][0]["foreign_id"]) From 377995e2ec8a3a6873c2eeebe6b004ff0a21b2dc Mon Sep 17 00:00:00 2001 From: Ben Stovold Date: Thu, 19 Jul 2018 15:20:38 +1000 Subject: [PATCH 088/208] Update serialization test to verify dates with minutes only Currently not passing. --- stream/tests/test_client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 0043cb5..2913c33 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -942,8 +942,9 @@ def test_wrong_feed_spec(self): def test_serialization(self): today = datetime.date.today() + then = datetime.datetime.now().replace(microsecond=0) now = datetime.datetime.now() - data = dict(string="string", float=0.1, int=1, date=today, datetime=now) + data = dict(string="string", float=0.1, int=1, date=today, datetime=now, datetimenomicro=then) serialized = serializer.dumps(data) loaded = serializer.loads(serialized) self.assertEqual(data, loaded) From 386ba08bec88a9568bfb35e769d5f2f0882e547b Mon Sep 17 00:00:00 2001 From: Ben Stovold Date: Thu, 19 Jul 2018 15:35:29 +1000 Subject: [PATCH 089/208] Ensure microsecs are encoded when sending datetime objects to the API --- stream/serializer.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/stream/serializer.py b/stream/serializer.py index 1df202b..517e375 100644 --- a/stream/serializer.py +++ b/stream/serializer.py @@ -4,13 +4,18 @@ ''' Adds the ability to send date and datetime objects to the API +Datetime objects will be encoded/ decoded with microseconds The date and datetime formats from the API are automatically supported and parsed ''' +DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" +DATE_FORMAT = "%Y-%m-%d" def _datetime_encoder(obj): - if isinstance(obj, (datetime.datetime, datetime.date)): - return obj.isoformat() + if isinstance(obj, datetime.datetime): + return datetime.datetime.strftime(obj, DATETIME_FORMAT) + if isinstance(obj, datetime.date): + return datetime.datetime.strftime(obj, DATE_FORMAT) def _datetime_decoder(dict_): @@ -26,15 +31,13 @@ def _datetime_decoder(dict_): try: # The api always returns times like this # 2014-07-25T09:12:24.735 - datetime_obj = datetime.datetime.strptime( - value, "%Y-%m-%dT%H:%M:%S.%f") + datetime_obj = datetime.datetime.strptime(value, DATETIME_FORMAT) dict_[key] = datetime_obj except (ValueError, TypeError): try: # The api always returns times like this # 2014-07-25T09:12:24.735 - datetime_obj = datetime.datetime.strptime( - value, "%Y-%m-%d") + datetime_obj = datetime.datetime.strptime(value, DATE_FORMAT) dict_[key] = datetime_obj.date() except (ValueError, TypeError): continue From 04d1f09ba0e4cf08384c90fc4616699521965541 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 20 Jul 2018 11:16:26 +0200 Subject: [PATCH 090/208] use newer pyJWT version --- CHANGELOG | 20 +++++++++++++++----- setup.py | 8 +++++--- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index 5d749b2..e4cd9e8 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,7 +2,17 @@ Change history ================ -======= +====== +2.9.2 +====== +:release-date: 2017-07-20 +:by: Tommaso Barbugli + +Fixed deserialization problem with datetime objects with zeroed microseconds +Support newer versions of the pyJWT lib + + +====== 2.9.1 ====== :release-date: 2017-07-18 @@ -11,7 +21,7 @@ Renamed client.get_activities' foreign_id_time param to foreign_id_times -======= +====== 2.9.0 ====== :release-date: 2017-07-05 @@ -19,7 +29,7 @@ Renamed client.get_activities' foreign_id_time param to foreign_id_times Add support for get activity API endpoint -======= +====== 2.8.1 ====== :release-date: 2017-12-21 @@ -27,7 +37,7 @@ Add support for get activity API endpoint Fixes a regression with embedded httpsig and Python 3 -======= +====== 2.8.0 ====== :release-date: 2017-12-21 @@ -38,7 +48,7 @@ Fixes install issues on Windows * Bundle http-sig library * Use pycryptodomex instead of the discontinued pycrypto library -======= +====== 2.7.0 ====== :release-date: 2017-12-14 diff --git a/setup.py b/setup.py index 9e97859..3b9ea10 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,6 @@ from stream import __version__, __maintainer__, __email__, __license__ import sys - unit = 'unittest2py3k' if sys.version_info > (3, 0, 0) else 'unittest2' tests_require = [ unit, @@ -31,13 +30,16 @@ install_requires = [ 'pycryptodomex==3.4.7', - 'pyjwt==1.3.0', requests, 'six>=1.8.0' ] if sys.version_info < (2, 7, 0): install_requires.append('pyOpenSSL<18.0.0') + install_requires.append('pyjwt>=1.3.0,<1.6.0') +else: + install_requires.append('pyjwt>=1.3.0,<1.7.0') + class PyTest(TestCommand): @@ -53,6 +55,7 @@ def run_tests(self): '-v --cov=./') sys.exit(errno) + setup( name='stream-python', version=__version__, @@ -80,7 +83,6 @@ def run_tests(self): 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries :: Python Modules', From 8f65382a25a8ba264e5801157d3ccdd8f5d547a8 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 20 Jul 2018 11:19:50 +0200 Subject: [PATCH 091/208] specify Python support --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 536eb33..bbf1ef8 100644 --- a/README.md +++ b/README.md @@ -11,6 +11,10 @@ You can sign up for a Stream account at https://getstream.io/get_started. ### Installation +stream-python supports: + +- Python (2.6, 2.7, 3.4, 3.5, 3.6) + #### Install from Pypi ```bash From 63df99db46461ca4d2ac66e6e4fae0d302f3c079 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 20 Jul 2018 11:22:28 +0200 Subject: [PATCH 092/208] stop building 3.3 and add 3.7 to build matrix --- .travis.yml | 2 +- stream/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index f622ccf..27cd45e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,10 +3,10 @@ sudo: false python: - 2.6 - 2.7 - - 3.3 - 3.4 - 3.5 - 3.6 + - 3.7 matrix: fast_finish: true diff --git a/stream/__init__.py b/stream/__init__.py index 544a742..c1b2c98 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.9.1' +__version__ = '2.9.2' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From 658237674a5d3b461ea907d9edd769b7d7d2f063 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 20 Jul 2018 11:29:22 +0200 Subject: [PATCH 093/208] travis thinks 3.7 is still dev :( --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 27cd45e..8aae191 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,7 +6,7 @@ python: - 3.4 - 3.5 - 3.6 - - 3.7 + - 3.7-dev matrix: fast_finish: true From a3b790f12604165f3b7cf4f37e7416ee54b380fb Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 20 Jul 2018 11:35:58 +0200 Subject: [PATCH 094/208] python 3.7 now officially supported --- README.md | 2 +- setup.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index bbf1ef8..d4b84c5 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ You can sign up for a Stream account at https://getstream.io/get_started. stream-python supports: -- Python (2.6, 2.7, 3.4, 3.5, 3.6) +- Python (2.6, 2.7, 3.4, 3.5, 3.6, 3.7) #### Install from Pypi diff --git a/setup.py b/setup.py index 3b9ea10..7b63ced 100644 --- a/setup.py +++ b/setup.py @@ -85,6 +85,8 @@ def run_tests(self): 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', 'Topic :: Software Development :: Libraries :: Python Modules', ], ) From 4744f38eb02abae2ece413fde137a9dbe263df04 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 20 Jul 2018 11:47:42 +0200 Subject: [PATCH 095/208] use readme for package description --- setup.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/setup.py b/setup.py index 7b63ced..1d09607 100644 --- a/setup.py +++ b/setup.py @@ -15,13 +15,7 @@ 'python-dateutil' ] -long_description = ''' -Documentation -------------- -Full documentation is available on `Github`_. - -.. _`Github`: https://github.com/GetStream/stream-python -''' +long_description = open('README.md', 'r').read() requests = 'requests>=2.3.0,<3' @@ -64,6 +58,7 @@ def run_tests(self): url='http://github.com/GetStream/stream-python', description='Client for getstream.io. Build scalable newsfeeds & activity streams in a few hours instead of weeks.', long_description=long_description, + long_description_content_type='text/markdown', license=__license__, packages=find_packages(), zip_safe=False, From 30eb4b4c2f67612553f85006460531a3351060fa Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 20 Jul 2018 11:56:01 +0200 Subject: [PATCH 096/208] 2.9.3 --- CHANGELOG | 8 ++++++++ stream/__init__.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index e4cd9e8..3732697 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ Change history ================ +====== +2.9.3 +====== +:release-date: 2017-07-20 +:by: Tommaso Barbugli + +Use Readme.md content as package long description + ====== 2.9.2 ====== diff --git a/stream/__init__.py b/stream/__init__.py index c1b2c98..eff7d46 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.9.2' +__version__ = '2.9.3' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From 3bd6e9c0068ec91000c7d8cd4fa9bbec10c7b029 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 20 Jul 2018 11:57:27 +0200 Subject: [PATCH 097/208] 2.9.4 --- stream/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/__init__.py b/stream/__init__.py index eff7d46..4625479 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.9.3' +__version__ = '2.9.4' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From a880300864174809000843e869bfba308007a478 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 20 Jul 2018 12:06:15 +0200 Subject: [PATCH 098/208] add release notes --- README.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/README.md b/README.md index d4b84c5..47c78ea 100644 --- a/README.md +++ b/README.md @@ -150,6 +150,19 @@ py.test --cov stream --cov-report html LOCAL=true py.test ``` +### Releasing a new version + +In order to release new version you need to be a maintainer on Pypi. + +- Update CHANGELOG +- Update the version on setup.py +- Commit and push to Github +- Create a new tag for the version (eg. `v2.9.0`) +- Create a new dist with python `python setup.py sdist` +- Upload the new distributable with wine `twine upload dist/stream-python-VERSION-NAME.tar.gz` + +If unsure you can also test using the Pypi test servers `twine upload --repository-url https://test.pypi.org/legacy/ dist/stream-python-VERSION-NAME.tar.gz` + ### Copyright and License Information Copyright (c) 2014-2017 Stream.io Inc, and individual contributors. All rights reserved. From 2e050e938703a65ddcf556acc5b56c74880129b8 Mon Sep 17 00:00:00 2001 From: Federico Ruggi Date: Tue, 24 Jul 2018 12:06:49 +0200 Subject: [PATCH 099/208] partial activity update endpoint --- stream/client.py | 31 ++++++++++++++++++ stream/tests/test_client.py | 64 +++++++++++++++++++++++++++++++++++++ 2 files changed, 95 insertions(+) diff --git a/stream/client.py b/stream/client.py index 15cfd8a..9240399 100644 --- a/stream/client.py +++ b/stream/client.py @@ -342,6 +342,37 @@ def get_activities(self, ids=None, foreign_id_times=None): return self.get('activities/', auth_token, params=query_params) + def activity_partial_update(self, id=None, foreign_id=None, time=None, set={}, unset=[]): + ''' + Partial update activity, via foreign ID or Foreign ID + timestamp + + id: the activity ID + foreign_id: the activity foreign ID + time: the activity time + set: object containing the set operations + unset: list of unset operations + ''' + + auth_token = self.create_jwt_token('activities', '*', feed_id='*') + + if id is None and (foreign_id is None or time is None): + raise TypeError('The id or foreign_id+time parameters must be provided and not be None') + if id is not None and (foreign_id is not None or time is not None): + raise TypeError('Only one of the id or the foreign_id+time parameters can be provided') + + data = { + 'set': set, + 'unset': unset, + } + + if id: + data['id'] = id + else: + data['foreign_id'] = foreign_id + data['time'] = time + + return self.post('activity/', auth_token, data=data) + def create_redirect_url(self, target_url, user_id, events): ''' Creates a redirect url for tracking the given events in the context diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 2913c33..7850939 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1125,3 +1125,67 @@ def test_get_activities_full(self): response = self.c.get_activities(foreign_id_times=[(fid, dt)]) self.assertEqual(len(response["results"]), 1) self.assertEqual(activity["foreign_id"], response["results"][0]["foreign_id"]) + + def test_activity_partial_update(self): + now = datetime.datetime.utcnow() + feed = self.c.feed('user', uuid4()) + feed.add_activity({ + "actor": "barry", + "object": "09", + "verb": "tweet", + "time": now, + "foreign_id": 'fid:123', + 'product': { + 'name': 'shoes', + 'price': 9.99, + 'color': 'blue' + } + }) + activity = feed.get()['results'][0] + + set = { + 'product.name': 'boots', + 'product.price': 7.99, + 'popularity': 1000, + 'foo': { + 'bar': { + 'baz': 'qux', + } + } + } + unset = [ 'product.color' ] + + # partial update by ID + self.c.activity_partial_update(id=activity['id'], set=set, unset=unset) + updated = feed.get()['results'][0] + expected = activity + expected['product'] = { + 'name': 'boots', + 'price': 7.99 + } + expected['popularity'] = 1000 + expected['foo'] = { + 'bar': { + 'baz': 'qux' + } + } + self.assertEqual(updated, expected) + + # partial update by foreign ID + time + set = { + 'foo.bar.baz': 42, + 'popularity': 9000 + } + unset = [ 'product.price' ] + self.c.activity_partial_update(foreign_id=activity['foreign_id'], time=activity['time'], set=set, unset=unset) + updated = feed.get()['results'][0] + expected['product'] = { + 'name': 'boots' + } + expected['foo'] = { + 'bar': { + 'baz': 42 + } + } + expected['popularity'] = 9000 + self.assertEqual(updated, expected) From 0530c5add355b4f2f3149143a893332f3ad43183 Mon Sep 17 00:00:00 2001 From: Federico Ruggi Date: Tue, 24 Jul 2018 12:06:58 +0200 Subject: [PATCH 100/208] fix broken test --- stream/tests/test_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 7850939..6dbb4ec 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -866,7 +866,7 @@ def test_uniqueness_foreign_id(self): "verb": "tweet", "object": 1, "foreign_id": "tweet:11", - "time": now, + "time": utcnow, } self.user1.add_activity(activity_data) @@ -875,7 +875,7 @@ def test_uniqueness_foreign_id(self): "verb": "tweet", "object": 3, "foreign_id": "tweet:11", - "time": now, + "time": utcnow, } self.user1.add_activity(activity_data) From ad11e16158717ad68c783bcaedf7816b6f70d951 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Mon, 30 Jul 2018 16:14:48 +0200 Subject: [PATCH 101/208] Update CHANGELOG --- CHANGELOG | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/CHANGELOG b/CHANGELOG index 3732697..918799d 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ Change history ================ +====== +2.10.0 +====== +:release-date: 2017-07-30 +:by: Tommaso Barbugli + +Partial activity API endpoint + ====== 2.9.3 ====== From a9f6cb7e59d69cd2c140655736c9b9a8a6992bc3 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Mon, 30 Jul 2018 16:16:08 +0200 Subject: [PATCH 102/208] 2.10.0 --- stream/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/__init__.py b/stream/__init__.py index 4625479..3831f9b 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.9.4' +__version__ = '2.10.0' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From 0ee0faf0e926cf30c1f5c9f773b0a68a56ae852d Mon Sep 17 00:00:00 2001 From: Federico Ruggi Date: Mon, 30 Jul 2018 16:26:07 +0200 Subject: [PATCH 103/208] partial update example --- README.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/README.md b/README.md index 47c78ea..51b927e 100644 --- a/README.md +++ b/README.md @@ -102,6 +102,20 @@ client.get_activities(foreign_id_times=[ (foreign_id, activity_time), ]) +# Update some parts of an activity with activity_partial_update +set = { + 'product.name': 'boots', + 'colors': { + 'red': '0xFF0000', + 'green': '0x00FF00' + } +} +unset = [ 'popularity', 'details.info' ] +# ...by ID +client.activity_partial_update(id=activity_id, set=set, unset=unset) +# ...or by combination of foreign_id and time +client.activity_partial_update(foreign_id=foreign_id, time=activity_time, set=set, unset=unset) + # Generating tokens for client side usage (JS client) token = user_feed_1.token # Javascript client side feed initialization From d19d51d35623264e65f5ea8b87e7cd534ad3a537 Mon Sep 17 00:00:00 2001 From: Federico Ruggi Date: Fri, 3 Aug 2018 10:37:32 +0200 Subject: [PATCH 104/208] remove unused filter from following --- stream/feed.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/feed.py b/stream/feed.py index 9893a8e..cdc5abb 100644 --- a/stream/feed.py +++ b/stream/feed.py @@ -182,7 +182,7 @@ def followers(self, offset=0, limit=25, feeds=None): url, params=params, signature=token) return response - def following(self, offset=0, limit=25, feeds=None, filter=None): + def following(self, offset=0, limit=25, feeds=None): ''' List the feeds which this feed is following ''' From 67808ae073165cd962548828005b0c02686ab78f Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Mon, 6 Aug 2018 10:10:50 +0200 Subject: [PATCH 105/208] Update README.md --- README.md | 175 ++---------------------------------------------------- 1 file changed, 5 insertions(+), 170 deletions(-) diff --git a/README.md b/README.md index 51b927e..169c831 100644 --- a/README.md +++ b/README.md @@ -1,184 +1,19 @@ -stream-python -============= - -[![Build Status](https://travis-ci.org/GetStream/stream-python.svg?branch=master)](https://travis-ci.org/GetStream/stream-python) [![codecov](https://codecov.io/gh/GetStream/stream-python/branch/master/graph/badge.svg)](https://codecov.io/gh/GetStream/stream-python) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) - -[stream-python](https://github.com/GetStream/stream-python) is the official Python client for [Stream](https://getstream.io/), a web service for building scalable newsfeeds and activity streams. - -Note there is also a higher level [Django - Stream integration](https://github.com/getstream/stream-django) library which hooks into the Django ORM. - -You can sign up for a Stream account at https://getstream.io/get_started. +Stream-JS +=========== ### Installation -stream-python supports: - -- Python (2.6, 2.7, 3.4, 3.5, 3.6, 3.7) - -#### Install from Pypi - ```bash -pip install stream-python +npm i @stream-io/react-native ``` -### Full documentation - -Documentation for this Python client are available at the [Stream website](https://getstream.io/docs/?language=python) or on [Read the Docs](http://stream-python.readthedocs.org/en/latest/). - ### Usage -```python -# Instantiate a new client -import stream -client = stream.connect('YOUR_API_KEY', 'API_KEY_SECRET') - -# INstantiate a new client specifying datacenter location -client = stream.connect('YOUR_API_KEY', 'API_KEY_SECRET', location='us-east') -# Find your API keys here https://getstream.io/dashboard/ - -# Instantiate a feed object -user_feed_1 = client.feed('user', '1') - -# Get activities from 5 to 10 (slow pagination) -result = user_feed_1.get(limit=5, offset=5) -# (Recommended & faster) Filter on an id less than the given UUID -result = user_feed_1.get(limit=5, id_lt="e561de8f-00f1-11e4-b400-0cc47a024be0") - -# Create a new activity -activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1, 'foreign_id': 'tweet:1'} -activity_response = user_feed_1.add_activity(activity_data) -# Create a bit more complex activity -activity_data = {'actor': 1, 'verb': 'run', 'object': 1, 'foreign_id': 'run:1', - 'course': {'name': 'Golden Gate park', 'distance': 10}, - 'participants': ['Thierry', 'Tommaso'], - 'started_at': datetime.datetime.now() -} -user_feed_1.add_activity(activity_data) - -# Remove an activity by its id -user_feed_1.remove_activity("e561de8f-00f1-11e4-b400-0cc47a024be0") -# or by foreign id -user_feed_1.remove_activity(foreign_id='tweet:1') - -# Follow another feed -user_feed_1.follow('flat', '42') - -# Stop following another feed -user_feed_1.unfollow('flat', '42') - -# List followers/following -following = user_feed_1.following(offset=0, limit=2) -followers = user_feed_1.followers(offset=0, limit=10) - -# Creates many follow relationships in one request -follows = [ - {'source': 'flat:1', 'target': 'user:1'}, - {'source': 'flat:1', 'target': 'user:2'}, - {'source': 'flat:1', 'target': 'user:3'} -] -client.follow_many(follows) - -# Batch adding activities -activities = [ - {'actor': 1, 'verb': 'tweet', 'object': 1}, - {'actor': 2, 'verb': 'watch', 'object': 3} -] -user_feed_1.add_activities(activities) - -# Add an activity and push it to other feeds too using the `to` field -activity = { - "actor":"1", - "verb":"like", - "object":"3", - "to":["user:44", "user:45"] -} -user_feed_1.add_activity(activity) - -# Retrieve an activity by its ID -client.get_activities(ids=[activity_id]) - -# Retrieve an activity by the combination of foreign_id and time -client.get_activities(foreign_id_times=[ - (foreign_id, activity_time), -]) - -# Update some parts of an activity with activity_partial_update -set = { - 'product.name': 'boots', - 'colors': { - 'red': '0xFF0000', - 'green': '0x00FF00' - } -} -unset = [ 'popularity', 'details.info' ] -# ...by ID -client.activity_partial_update(id=activity_id, set=set, unset=unset) -# ...or by combination of foreign_id and time -client.activity_partial_update(foreign_id=foreign_id, time=activity_time, set=set, unset=unset) - -# Generating tokens for client side usage (JS client) -token = user_feed_1.token -# Javascript client side feed initialization -# user1 = client.feed('user', '1', '{{ token }}'); - -# Generate a read-only token for client side usage (JS client) -readonly_token = user_feed_1.get_readonly_token() -# Javascript client side feed initialization -# user1 = client.feed('user', '1', '{{ readonly_token }}'); - -# Generate a redirect url for the Stream Analytics platform to track -# events/impressions on url clicks -impression = { - 'content_list': ['tweet:1', 'tweet:2', 'tweet:3'], - 'user_data': 'tommaso', - 'location': 'email', - 'feed_id': 'user:global' -} - -engagement = { - 'content': 'tweet:2', - 'label': 'click', - 'position': 1, - 'user_data': 'tommaso', - 'location': 'email', - 'feed_id': - 'user:global' -} - -events = [impression, engagement] - -redirect_url = client.create_redirect_url('http://google.com/', 'user_id', events) +```javascript ``` -[JS client](http://github.com/getstream/stream-js). - -### Contributing - -First, make sure you can run the test suite. Tests are run via py.test - -```bash -py.test -# with coverage -py.test --cov stream --cov-report html -# against a local API backend -LOCAL=true py.test -``` - -### Releasing a new version - -In order to release new version you need to be a maintainer on Pypi. - -- Update CHANGELOG -- Update the version on setup.py -- Commit and push to Github -- Create a new tag for the version (eg. `v2.9.0`) -- Create a new dist with python `python setup.py sdist` -- Upload the new distributable with wine `twine upload dist/stream-python-VERSION-NAME.tar.gz` - -If unsure you can also test using the Pypi test servers `twine upload --repository-url https://test.pypi.org/legacy/ dist/stream-python-VERSION-NAME.tar.gz` - ### Copyright and License Information -Copyright (c) 2014-2017 Stream.io Inc, and individual contributors. All rights reserved. +Copyright (c) 2015-2018 Stream.io Inc, and individual contributors. All rights reserved. See the file "LICENSE" for information on the history of this software, terms & conditions for usage, and a DISCLAIMER OF ALL WARRANTIES. From ce16688de22f855d525149ff7f57899259f77e39 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Mon, 6 Aug 2018 10:11:34 +0200 Subject: [PATCH 106/208] Revert "Update README.md" This reverts commit 67808ae073165cd962548828005b0c02686ab78f. --- README.md | 175 ++++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 170 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 169c831..51b927e 100644 --- a/README.md +++ b/README.md @@ -1,19 +1,184 @@ -Stream-JS -=========== +stream-python +============= + +[![Build Status](https://travis-ci.org/GetStream/stream-python.svg?branch=master)](https://travis-ci.org/GetStream/stream-python) [![codecov](https://codecov.io/gh/GetStream/stream-python/branch/master/graph/badge.svg)](https://codecov.io/gh/GetStream/stream-python) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) + +[stream-python](https://github.com/GetStream/stream-python) is the official Python client for [Stream](https://getstream.io/), a web service for building scalable newsfeeds and activity streams. + +Note there is also a higher level [Django - Stream integration](https://github.com/getstream/stream-django) library which hooks into the Django ORM. + +You can sign up for a Stream account at https://getstream.io/get_started. ### Installation +stream-python supports: + +- Python (2.6, 2.7, 3.4, 3.5, 3.6, 3.7) + +#### Install from Pypi + ```bash -npm i @stream-io/react-native +pip install stream-python ``` +### Full documentation + +Documentation for this Python client are available at the [Stream website](https://getstream.io/docs/?language=python) or on [Read the Docs](http://stream-python.readthedocs.org/en/latest/). + ### Usage -```javascript +```python +# Instantiate a new client +import stream +client = stream.connect('YOUR_API_KEY', 'API_KEY_SECRET') + +# INstantiate a new client specifying datacenter location +client = stream.connect('YOUR_API_KEY', 'API_KEY_SECRET', location='us-east') +# Find your API keys here https://getstream.io/dashboard/ + +# Instantiate a feed object +user_feed_1 = client.feed('user', '1') + +# Get activities from 5 to 10 (slow pagination) +result = user_feed_1.get(limit=5, offset=5) +# (Recommended & faster) Filter on an id less than the given UUID +result = user_feed_1.get(limit=5, id_lt="e561de8f-00f1-11e4-b400-0cc47a024be0") + +# Create a new activity +activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1, 'foreign_id': 'tweet:1'} +activity_response = user_feed_1.add_activity(activity_data) +# Create a bit more complex activity +activity_data = {'actor': 1, 'verb': 'run', 'object': 1, 'foreign_id': 'run:1', + 'course': {'name': 'Golden Gate park', 'distance': 10}, + 'participants': ['Thierry', 'Tommaso'], + 'started_at': datetime.datetime.now() +} +user_feed_1.add_activity(activity_data) + +# Remove an activity by its id +user_feed_1.remove_activity("e561de8f-00f1-11e4-b400-0cc47a024be0") +# or by foreign id +user_feed_1.remove_activity(foreign_id='tweet:1') + +# Follow another feed +user_feed_1.follow('flat', '42') + +# Stop following another feed +user_feed_1.unfollow('flat', '42') + +# List followers/following +following = user_feed_1.following(offset=0, limit=2) +followers = user_feed_1.followers(offset=0, limit=10) + +# Creates many follow relationships in one request +follows = [ + {'source': 'flat:1', 'target': 'user:1'}, + {'source': 'flat:1', 'target': 'user:2'}, + {'source': 'flat:1', 'target': 'user:3'} +] +client.follow_many(follows) + +# Batch adding activities +activities = [ + {'actor': 1, 'verb': 'tweet', 'object': 1}, + {'actor': 2, 'verb': 'watch', 'object': 3} +] +user_feed_1.add_activities(activities) + +# Add an activity and push it to other feeds too using the `to` field +activity = { + "actor":"1", + "verb":"like", + "object":"3", + "to":["user:44", "user:45"] +} +user_feed_1.add_activity(activity) + +# Retrieve an activity by its ID +client.get_activities(ids=[activity_id]) + +# Retrieve an activity by the combination of foreign_id and time +client.get_activities(foreign_id_times=[ + (foreign_id, activity_time), +]) + +# Update some parts of an activity with activity_partial_update +set = { + 'product.name': 'boots', + 'colors': { + 'red': '0xFF0000', + 'green': '0x00FF00' + } +} +unset = [ 'popularity', 'details.info' ] +# ...by ID +client.activity_partial_update(id=activity_id, set=set, unset=unset) +# ...or by combination of foreign_id and time +client.activity_partial_update(foreign_id=foreign_id, time=activity_time, set=set, unset=unset) + +# Generating tokens for client side usage (JS client) +token = user_feed_1.token +# Javascript client side feed initialization +# user1 = client.feed('user', '1', '{{ token }}'); + +# Generate a read-only token for client side usage (JS client) +readonly_token = user_feed_1.get_readonly_token() +# Javascript client side feed initialization +# user1 = client.feed('user', '1', '{{ readonly_token }}'); + +# Generate a redirect url for the Stream Analytics platform to track +# events/impressions on url clicks +impression = { + 'content_list': ['tweet:1', 'tweet:2', 'tweet:3'], + 'user_data': 'tommaso', + 'location': 'email', + 'feed_id': 'user:global' +} + +engagement = { + 'content': 'tweet:2', + 'label': 'click', + 'position': 1, + 'user_data': 'tommaso', + 'location': 'email', + 'feed_id': + 'user:global' +} + +events = [impression, engagement] + +redirect_url = client.create_redirect_url('http://google.com/', 'user_id', events) ``` +[JS client](http://github.com/getstream/stream-js). + +### Contributing + +First, make sure you can run the test suite. Tests are run via py.test + +```bash +py.test +# with coverage +py.test --cov stream --cov-report html +# against a local API backend +LOCAL=true py.test +``` + +### Releasing a new version + +In order to release new version you need to be a maintainer on Pypi. + +- Update CHANGELOG +- Update the version on setup.py +- Commit and push to Github +- Create a new tag for the version (eg. `v2.9.0`) +- Create a new dist with python `python setup.py sdist` +- Upload the new distributable with wine `twine upload dist/stream-python-VERSION-NAME.tar.gz` + +If unsure you can also test using the Pypi test servers `twine upload --repository-url https://test.pypi.org/legacy/ dist/stream-python-VERSION-NAME.tar.gz` + ### Copyright and License Information -Copyright (c) 2015-2018 Stream.io Inc, and individual contributors. All rights reserved. +Copyright (c) 2014-2017 Stream.io Inc, and individual contributors. All rights reserved. See the file "LICENSE" for information on the history of this software, terms & conditions for usage, and a DISCLAIMER OF ALL WARRANTIES. From c1afb9384c7823dad1ed41b01e454855715163f5 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 23 Aug 2018 15:22:06 +0200 Subject: [PATCH 107/208] create_reference and create_user_reference helpers --- stream/collections.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/stream/collections.py b/stream/collections.py index 55a5599..4c7a229 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -10,6 +10,12 @@ def __init__(self, client, token): self.client = client self.token = token + def create_reference(self, collection_name, id): + return "SO:%s:%s" % (collection_name, id) + + def create_user_reference(self, id): + return self.create_reference("user", id) + def upsert(self, collection_name, data): """ "Insert new or update existing data. From 1c83cda218564efd091d6ad932258584f9af8c77 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 23 Aug 2018 15:22:08 +0200 Subject: [PATCH 108/208] create_reference and create_user_reference helpers --- stream/collections.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/stream/collections.py b/stream/collections.py index 55a5599..4c7a229 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -10,6 +10,12 @@ def __init__(self, client, token): self.client = client self.token = token + def create_reference(self, collection_name, id): + return "SO:%s:%s" % (collection_name, id) + + def create_user_reference(self, id): + return self.create_reference("user", id) + def upsert(self, collection_name, data): """ "Insert new or update existing data. From 32c9e77d088650404404f23b519e03551bc035af Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 23 Aug 2018 15:35:22 +0200 Subject: [PATCH 109/208] add some basic tests --- stream/tests/test_client.py | 100 ++++++++++++++++++------------------ 1 file changed, 49 insertions(+), 51 deletions(-) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 6dbb4ec..11d0eb3 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -944,7 +944,14 @@ def test_serialization(self): today = datetime.date.today() then = datetime.datetime.now().replace(microsecond=0) now = datetime.datetime.now() - data = dict(string="string", float=0.1, int=1, date=today, datetime=now, datetimenomicro=then) + data = dict( + string="string", + float=0.1, + int=1, + date=today, + datetime=now, + datetimenomicro=then, + ) serialized = serializer.dumps(data) loaded = serializer.loads(serialized) self.assertEqual(data, loaded) @@ -1128,64 +1135,55 @@ def test_get_activities_full(self): def test_activity_partial_update(self): now = datetime.datetime.utcnow() - feed = self.c.feed('user', uuid4()) - feed.add_activity({ - "actor": "barry", - "object": "09", - "verb": "tweet", - "time": now, - "foreign_id": 'fid:123', - 'product': { - 'name': 'shoes', - 'price': 9.99, - 'color': 'blue' + feed = self.c.feed("user", uuid4()) + feed.add_activity( + { + "actor": "barry", + "object": "09", + "verb": "tweet", + "time": now, + "foreign_id": "fid:123", + "product": {"name": "shoes", "price": 9.99, "color": "blue"}, } - }) - activity = feed.get()['results'][0] + ) + activity = feed.get()["results"][0] set = { - 'product.name': 'boots', - 'product.price': 7.99, - 'popularity': 1000, - 'foo': { - 'bar': { - 'baz': 'qux', - } - } + "product.name": "boots", + "product.price": 7.99, + "popularity": 1000, + "foo": {"bar": {"baz": "qux"}}, } - unset = [ 'product.color' ] + unset = ["product.color"] # partial update by ID - self.c.activity_partial_update(id=activity['id'], set=set, unset=unset) - updated = feed.get()['results'][0] + self.c.activity_partial_update(id=activity["id"], set=set, unset=unset) + updated = feed.get()["results"][0] expected = activity - expected['product'] = { - 'name': 'boots', - 'price': 7.99 - } - expected['popularity'] = 1000 - expected['foo'] = { - 'bar': { - 'baz': 'qux' - } - } + expected["product"] = {"name": "boots", "price": 7.99} + expected["popularity"] = 1000 + expected["foo"] = {"bar": {"baz": "qux"}} self.assertEqual(updated, expected) # partial update by foreign ID + time - set = { - 'foo.bar.baz': 42, - 'popularity': 9000 - } - unset = [ 'product.price' ] - self.c.activity_partial_update(foreign_id=activity['foreign_id'], time=activity['time'], set=set, unset=unset) - updated = feed.get()['results'][0] - expected['product'] = { - 'name': 'boots' - } - expected['foo'] = { - 'bar': { - 'baz': 42 - } - } - expected['popularity'] = 9000 + set = {"foo.bar.baz": 42, "popularity": 9000} + unset = ["product.price"] + self.c.activity_partial_update( + foreign_id=activity["foreign_id"], + time=activity["time"], + set=set, + unset=unset, + ) + updated = feed.get()["results"][0] + expected["product"] = {"name": "boots"} + expected["foo"] = {"bar": {"baz": 42}} + expected["popularity"] = 9000 self.assertEqual(updated, expected) + + def test_create_reference(self): + ref = self.c.collections.create_reference("item", "42") + self.assertEqual(ref, "SO:item:42") + + def test_create_user_reference(self): + ref = self.c.collections.create_user_reference("42") + self.assertEqual(ref, "SO:user:42") From 4bdf369e3ae86d801f87a979725c55d2f1480caa Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 23 Aug 2018 15:36:40 +0200 Subject: [PATCH 110/208] 2.11.0 --- CHANGELOG | 8 ++++++++ stream/__init__.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index 918799d..14e207c 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ Change history ================ +====== +2.11.0 +====== +:release-date: 2017-08-23 +:by: Tommaso Barbugli + +Add collection helpers to create refs + ====== 2.10.0 ====== diff --git a/stream/__init__.py b/stream/__init__.py index 3831f9b..b7fa730 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.10.0' +__version__ = '2.11.0' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From 2db98f61be524d5e9681a54df8e3d87a519240a7 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Tue, 4 Sep 2018 15:51:28 +0200 Subject: [PATCH 111/208] Update .travis.yml --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 8aae191..27cd45e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,7 +6,7 @@ python: - 3.4 - 3.5 - 3.6 - - 3.7-dev + - 3.7 matrix: fast_finish: true From 20b98619b6373ac640d5ebbe8d10ffbbc2369853 Mon Sep 17 00:00:00 2001 From: Peter van Kampen Date: Mon, 8 Oct 2018 22:52:07 +0200 Subject: [PATCH 112/208] Add user-session-token support --- stream/client.py | 11 +++++++++++ stream/tests/test_client.py | 12 ++++++++++++ 2 files changed, 23 insertions(+) diff --git a/stream/client.py b/stream/client.py index 9240399..cc18100 100644 --- a/stream/client.py +++ b/stream/client.py @@ -174,6 +174,17 @@ def _make_signed_request(self, method_name, relative_url, params=None, data=None response.url, headers, data) return self._parse_response(response) + def create_user_session_token(self, user_id, **extra_data): + '''Setup the payload for the given user_id with optional + extra data (key, value pairs) and encode it using jwt + ''' + payload = { + 'user_id': user_id, + } + for k, v in extra_data.items(): + payload[k] = v + return jwt.encode(payload, self.api_secret).decode("utf-8") + def create_jwt_token(self, resource, action, feed_id=None, user_id=None): ''' Setup the payload for the given resource, action, feed or user diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 11d0eb3..991f3b3 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -372,6 +372,18 @@ def test_token_retrieval(self): self.user1.token self.user1.get_readonly_token() + def test_user_session_token(self): + client = stream.connect(self.c.api_key, self.c.api_secret) + token = client.create_user_session_token("user") + self.assertEqual(token[:59], "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VyX2lkIjoidXNlci") + payload = jwt.decode(token, self.c.api_secret) + self.assertEqual(payload["user_id"], "user") + token = client.create_user_session_token("user", client="python", testing=True) + self.assertEqual(token[:59], "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VyX2lkIjoidXNlci") + payload = jwt.decode(token, self.c.api_secret) + self.assertEqual(payload["client"], "python") + self.assertEqual(payload["testing"], True) + def test_add_activity(self): feed = getfeed("user", "py1") activity_data = {"actor": 1, "verb": "tweet", "object": 1} From f556de0ad23903c4b64184175783de3bc6874c5d Mon Sep 17 00:00:00 2001 From: Peter van Kampen Date: Mon, 8 Oct 2018 23:02:04 +0200 Subject: [PATCH 113/208] prepare release --- CHANGELOG | 8 ++++++++ stream/__init__.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index 14e207c..c186807 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ Change history ================ +====== +2.12.0 +====== +:release-date: 2018-10-08 +:by: Peter van Kampen + +Add user-session-token support + ====== 2.11.0 ====== diff --git a/stream/__init__.py b/stream/__init__.py index b7fa730..f114681 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = 'Copyright 2014, Stream.io, Inc' __credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] __license__ = 'BSD-3-Clause' -__version__ = '2.11.0' +__version__ = '2.12.0' __maintainer__ = 'Thierry Schellenbach' __email__ = 'support@getstream.io' __status__ = 'Production' From 8c168d6b349a6f063023d70522e845c85a99c719 Mon Sep 17 00:00:00 2001 From: Peter van Kampen Date: Mon, 8 Oct 2018 23:47:45 +0200 Subject: [PATCH 114/208] ensure jwt algorithm is HS256 --- stream/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/client.py b/stream/client.py index cc18100..82025e4 100644 --- a/stream/client.py +++ b/stream/client.py @@ -183,7 +183,7 @@ def create_user_session_token(self, user_id, **extra_data): } for k, v in extra_data.items(): payload[k] = v - return jwt.encode(payload, self.api_secret).decode("utf-8") + return jwt.encode(payload, self.api_secret, algorithm='HS256').decode("utf-8") def create_jwt_token(self, resource, action, feed_id=None, user_id=None): ''' From 2591d221892de3c71fcd60bea784c26538bb39b5 Mon Sep 17 00:00:00 2001 From: Peter van Kampen Date: Tue, 9 Oct 2018 00:35:00 +0200 Subject: [PATCH 115/208] fix for python2.6 --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 1d09607..b1ca294 100644 --- a/setup.py +++ b/setup.py @@ -31,6 +31,7 @@ if sys.version_info < (2, 7, 0): install_requires.append('pyOpenSSL<18.0.0') install_requires.append('pyjwt>=1.3.0,<1.6.0') + install_requires.append('pycparser<2.19') else: install_requires.append('pyjwt>=1.3.0,<1.7.0') From a5e2f37cc8d51ee71d78daf2fcda0c14d2d76502 Mon Sep 17 00:00:00 2001 From: Peter van Kampen Date: Tue, 9 Oct 2018 00:36:52 +0200 Subject: [PATCH 116/208] only test relevant outcome --- stream/tests/test_client.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 991f3b3..baa1a9e 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -375,11 +375,9 @@ def test_token_retrieval(self): def test_user_session_token(self): client = stream.connect(self.c.api_key, self.c.api_secret) token = client.create_user_session_token("user") - self.assertEqual(token[:59], "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VyX2lkIjoidXNlci") payload = jwt.decode(token, self.c.api_secret) self.assertEqual(payload["user_id"], "user") token = client.create_user_session_token("user", client="python", testing=True) - self.assertEqual(token[:59], "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VyX2lkIjoidXNlci") payload = jwt.decode(token, self.c.api_secret) self.assertEqual(payload["client"], "python") self.assertEqual(payload["testing"], True) From b46b50bd9f400f24273c424efce40cfbc8403c90 Mon Sep 17 00:00:00 2001 From: Peter van Kampen Date: Tue, 9 Oct 2018 00:44:53 +0200 Subject: [PATCH 117/208] fix for python3.7 --- .travis.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 27cd45e..b292747 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,10 +6,13 @@ python: - 3.4 - 3.5 - 3.6 - - 3.7 matrix: fast_finish: true + include: + - python: 3.7 + dist: xenial + sudo: true cache: pip From 23f52d0636b776f5d8162650f5aa9fef5bde442d Mon Sep 17 00:00:00 2001 From: Horatiu Ion Date: Wed, 14 Nov 2018 11:30:43 +0100 Subject: [PATCH 118/208] proper none check --- stream/client.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/stream/client.py b/stream/client.py index 82025e4..a773d62 100644 --- a/stream/client.py +++ b/stream/client.py @@ -78,7 +78,7 @@ def __init__(self, api_key, api_secret, app_id, version='v1.0', timeout=6.0, bas self.session = requests.Session() self.auth = HTTPSignatureAuth(api_key, secret=api_secret) - + # setup personalization from stream.personalization import Personalization token = self.create_jwt_token('personalization', '*', feed_id='*', user_id='*') @@ -87,7 +87,7 @@ def __init__(self, api_key, api_secret, app_id, version='v1.0', timeout=6.0, bas from stream.collections import Collections token = self.create_jwt_token('collections', '*', feed_id='*', user_id='*') self.collections = Collections(self, token) - + def feed(self, feed_slug, user_id): ''' @@ -214,7 +214,7 @@ def _make_request(self, method, relative_url, signature, service_name='api', par relative_url += '/' url = self.get_full_url(service_name, relative_url) - + if method.__name__ in ['post', 'put', 'delete']: serialized = serializer.dumps(data) response = method(url, data=serialized, headers=headers, @@ -376,12 +376,12 @@ def activity_partial_update(self, id=None, foreign_id=None, time=None, set={}, u 'unset': unset, } - if id: + if id is not None: data['id'] = id else: data['foreign_id'] = foreign_id data['time'] = time - + return self.post('activity/', auth_token, data=data) def create_redirect_url(self, target_url, user_id, events): From 45a7322e5cf72365f81fa1784d1793ed72311acf Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 29 Nov 2018 17:50:26 +0100 Subject: [PATCH 119/208] add support for reaction endpoints --- stream/client.py | 306 +++++++++++++++++++++--------------- stream/reactions.py | 65 ++++++++ stream/tests/test_client.py | 40 +++++ 3 files changed, 287 insertions(+), 124 deletions(-) create mode 100644 stream/reactions.py diff --git a/stream/client.py b/stream/client.py index a773d62..afea61a 100644 --- a/stream/client.py +++ b/stream/client.py @@ -12,6 +12,9 @@ from stream.utils import validate_feed_slug, validate_user_id, validate_foreign_id_time from stream.httpsig.requests_auth import HTTPSignatureAuth from requests import Request +from stream.reactions import Reactions +from stream.collections import Collections +from stream.personalization import Personalization try: from urllib.parse import urlparse @@ -22,9 +25,17 @@ class StreamClient(object): - - def __init__(self, api_key, api_secret, app_id, version='v1.0', timeout=6.0, base_url=None, location=None): - ''' + def __init__( + self, + api_key, + api_secret, + app_id, + version="v1.0", + timeout=6.0, + base_url=None, + location=None, + ): + """ Initialize the client with the given api key and secret :param api_key: the api key @@ -47,7 +58,7 @@ def __init__(self, api_key, api_secret, app_id, version='v1.0', timeout=6.0, bas activities = feed.get() feed.unfollow('flat:3') feed.remove_activity(activity_id) - ''' + """ self.api_key = api_key self.api_secret = api_secret self.app_id = app_id @@ -55,14 +66,14 @@ def __init__(self, api_key, api_secret, app_id, version='v1.0', timeout=6.0, bas self.timeout = timeout self.location = location - self.base_domain_name = 'stream-io-api.com' + self.base_domain_name = "stream-io-api.com" self.api_location = location self.custom_api_port = None - self.protocol = 'https' + self.protocol = "https" - if os.environ.get('LOCAL'): - self.base_domain_name = 'localhost' - self.protocol = 'http' + if os.environ.get("LOCAL"): + self.base_domain_name = "localhost" + self.protocol = "http" self.custom_api_port = 8000 self.timeout = 20 elif base_url is not None: @@ -74,74 +85,79 @@ def __init__(self, api_key, api_secret, app_id, version='v1.0', timeout=6.0, bas elif location is not None: self.location = location - self.base_analytics_url = 'https://analytics.stream-io-api.com/analytics/' + self.base_analytics_url = "https://analytics.stream-io-api.com/analytics/" self.session = requests.Session() self.auth = HTTPSignatureAuth(api_key, secret=api_secret) - # setup personalization - from stream.personalization import Personalization - token = self.create_jwt_token('personalization', '*', feed_id='*', user_id='*') + token = self.create_jwt_token("personalization", "*", feed_id="*", user_id="*") self.personalization = Personalization(self, token) - # setup the collection - from stream.collections import Collections - token = self.create_jwt_token('collections', '*', feed_id='*', user_id='*') + + token = self.create_jwt_token("collections", "*", feed_id="*", user_id="*") self.collections = Collections(self, token) + token = self.create_jwt_token("reactions", "*", feed_id="*") + self.reactions = Reactions(self, token) def feed(self, feed_slug, user_id): - ''' + """ Returns a Feed object :param feed_slug: the slug of the feed :param user_id: the user id - ''' + """ from stream.feed import Feed + feed_slug = validate_feed_slug(feed_slug) user_id = validate_user_id(user_id) # generate the token - feed_id = '%s%s' % (feed_slug, user_id) + feed_id = "%s%s" % (feed_slug, user_id) token = sign(self.api_secret, feed_id) return Feed(self, feed_slug, user_id, token) def get_default_params(self): - ''' + """ Returns the params with the API key present - ''' + """ params = dict(api_key=self.api_key) return params def get_default_header(self): base_headers = { - 'Content-type': 'application/json', - 'X-Stream-Client': self.get_user_agent() + "Content-type": "application/json", + "X-Stream-Client": self.get_user_agent(), } return base_headers def get_full_url(self, service_name, relative_url): if self.api_location: - hostname = '%s-%s.%s' % (self.api_location, service_name, self.base_domain_name) + hostname = "%s-%s.%s" % ( + self.api_location, + service_name, + self.base_domain_name, + ) elif service_name: - hostname = '%s.%s' % (service_name, self.base_domain_name) + hostname = "%s.%s" % (service_name, self.base_domain_name) else: hostname = self.base_domain_name - if self.base_domain_name == 'localhost': - hostname = 'localhost' + if self.base_domain_name == "localhost": + hostname = "localhost" base_url = "%s://%s" % (self.protocol, hostname) if self.custom_api_port: base_url = "%s:%s" % (base_url, self.custom_api_port) - url = base_url + '/' + service_name + '/' + self.version + '/' + relative_url + url = base_url + "/" + service_name + "/" + self.version + "/" + relative_url return url def get_user_agent(self): from stream import __version__ - agent = 'stream-python-client-%s' % __version__ + + agent = "stream-python-client-%s" % __version__ return agent def _parse_response(self, response): @@ -149,7 +165,11 @@ def _parse_response(self, response): parsed_result = serializer.loads(response.text) except ValueError: parsed_result = None - if parsed_result is None or parsed_result.get('exception') or response.status_code >= 500: + if ( + parsed_result is None + or parsed_result.get("exception") + or response.status_code >= 500 + ): self.raise_exception(parsed_result, status_code=response.status_code) return parsed_result @@ -158,78 +178,95 @@ def _make_signed_request(self, method_name, relative_url, params=None, data=None data = data or {} serialized = None headers = self.get_default_header() - headers['X-Api-Key'] = self.api_key - date_header = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT') - headers['Date'] = date_header + headers["X-Api-Key"] = self.api_key + date_header = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S GMT") + headers["Date"] = date_header default_params = self.get_default_params() default_params.update(params) - url = self.get_full_url('api', relative_url) + url = self.get_full_url("api", relative_url) serialized = serializer.dumps(data) method = getattr(self.session, method_name) - if method_name in ['post', 'put']: + if method_name in ["post", "put"]: serialized = serializer.dumps(data) - response = method(url, auth=self.auth, data=serialized, headers=headers, - params=default_params, timeout=self.timeout) - logger.debug('stream api call %s, headers %s data %s', - response.url, headers, data) + response = method( + url, + auth=self.auth, + data=serialized, + headers=headers, + params=default_params, + timeout=self.timeout, + ) + logger.debug( + "stream api call %s, headers %s data %s", response.url, headers, data + ) return self._parse_response(response) def create_user_session_token(self, user_id, **extra_data): - '''Setup the payload for the given user_id with optional + """Setup the payload for the given user_id with optional extra data (key, value pairs) and encode it using jwt - ''' - payload = { - 'user_id': user_id, - } + """ + payload = {"user_id": user_id} for k, v in extra_data.items(): payload[k] = v - return jwt.encode(payload, self.api_secret, algorithm='HS256').decode("utf-8") + return jwt.encode(payload, self.api_secret, algorithm="HS256").decode("utf-8") def create_jwt_token(self, resource, action, feed_id=None, user_id=None): - ''' + """ Setup the payload for the given resource, action, feed or user and encode it using jwt - ''' - payload = { - 'action': action, - 'resource': resource - } + """ + payload = {"action": action, "resource": resource} if feed_id is not None: - payload['feed_id'] = feed_id + payload["feed_id"] = feed_id if user_id is not None: - payload['user_id'] = user_id + payload["user_id"] = user_id return jwt.encode(payload, self.api_secret).decode("utf-8") - def _make_request(self, method, relative_url, signature, service_name='api', params=None, data=None): + def _make_request( + self, + method, + relative_url, + signature, + service_name="api", + params=None, + data=None, + ): params = params or {} data = data or {} serialized = None default_params = self.get_default_params() default_params.update(params) headers = self.get_default_header() - headers['Authorization'] = signature - headers['stream-auth-type'] = 'jwt' + headers["Authorization"] = signature + headers["stream-auth-type"] = "jwt" - if not relative_url.endswith('/'): - relative_url += '/' + if not relative_url.endswith("/"): + relative_url += "/" url = self.get_full_url(service_name, relative_url) - if method.__name__ in ['post', 'put', 'delete']: + if method.__name__ in ["post", "put", "delete"]: serialized = serializer.dumps(data) - response = method(url, data=serialized, headers=headers, - params=default_params, timeout=self.timeout) - logger.debug('stream api call %s, headers %s data %s', - response.url, headers, data) + response = method( + url, + data=serialized, + headers=headers, + params=default_params, + timeout=self.timeout, + ) + logger.debug( + "stream api call %s, headers %s data %s", response.url, headers, data + ) return self._parse_response(response) def raise_exception(self, result, status_code): - ''' + """ Map the exception code to an exception class and raise it If result.exception and result.detail are available use that Otherwise just raise a generic error - ''' + """ from stream.exceptions import get_exception_dict + exception_class = exceptions.StreamApiException def errors_from_fields(exception_fields): @@ -242,119 +279,137 @@ def errors_from_fields(exception_fields): return result if result is not None: - error_message = result['detail'] - exception_fields = result.get('exception_fields') + error_message = result["detail"] + exception_fields = result.get("exception_fields") if exception_fields is not None: errors = [] if isinstance(exception_fields, list): - errors = [errors_from_fields(exception_dict) for exception_dict in exception_fields] + errors = [ + errors_from_fields(exception_dict) + for exception_dict in exception_fields + ] errors = [item for sublist in errors for item in sublist] else: errors = errors_from_fields(exception_fields) - error_message = '\n'.join(errors) - error_code = result.get('code') + error_message = "\n".join(errors) + error_code = result.get("code") exception_dict = get_exception_dict() exception_class = exception_dict.get( - error_code, exceptions.StreamApiException) + error_code, exceptions.StreamApiException + ) else: - error_message = 'GetStreamAPI%s' % status_code + error_message = "GetStreamAPI%s" % status_code exception = exception_class(error_message, status_code=status_code) raise exception + def put(self, *args, **kwargs): + """ + Shortcut for make request + """ + return self._make_request(self.session.put, *args, **kwargs) + def post(self, *args, **kwargs): - ''' + """ Shortcut for make request - ''' + """ return self._make_request(self.session.post, *args, **kwargs) def get(self, *args, **kwargs): - ''' + """ Shortcut for make request - ''' + """ return self._make_request(self.session.get, *args, **kwargs) def delete(self, *args, **kwargs): - ''' + """ Shortcut for make request - ''' + """ return self._make_request(self.session.delete, *args, **kwargs) def add_to_many(self, activity, feeds): - ''' + """ Adds an activity to many feeds :param activity: the activity data :param feeds: the list of follows (eg. ['feed:1', 'feed:2']) - ''' - data = {'activity': activity, 'feeds': feeds} - return self._make_signed_request('post', 'feed/add_to_many/', data=data) + """ + data = {"activity": activity, "feeds": feeds} + return self._make_signed_request("post", "feed/add_to_many/", data=data) def follow_many(self, follows, activity_copy_limit=None): - ''' + """ Creates many follows :param follows: the list of follow relations eg. [{'source': source, 'target': target}] - ''' + """ params = None if activity_copy_limit != None: params = dict(activity_copy_limit=activity_copy_limit) - return self._make_signed_request('post', 'follow_many/', params=params, data=follows) + return self._make_signed_request( + "post", "follow_many/", params=params, data=follows + ) def update_activities(self, activities): - ''' + """ Update or create activities - ''' + """ if not isinstance(activities, (list, tuple, set)): - raise TypeError('Activities parameter should be of type list') + raise TypeError("Activities parameter should be of type list") - auth_token = self.create_jwt_token('activities', '*', feed_id='*') + auth_token = self.create_jwt_token("activities", "*", feed_id="*") data = dict(activities=activities) - return self.post('activities/', auth_token, data=data) + return self.post("activities/", auth_token, data=data) def update_activity(self, activity): - ''' + """ Update a single activity - ''' + """ return self.update_activities([activity]) def get_activities(self, ids=None, foreign_id_times=None): - ''' + """ Retrieves activities by their ID or foreign_id + time combination ids: list of activity IDs foreign_id_time: list of tuples (foreign_id, time) - ''' - auth_token = self.create_jwt_token('activities', '*', feed_id='*') + """ + auth_token = self.create_jwt_token("activities", "*", feed_id="*") if ids is None and foreign_id_times is None: - raise TypeError('One the parameters ids or foreign_id_time must be provided and not None') + raise TypeError( + "One the parameters ids or foreign_id_time must be provided and not None" + ) if ids is not None and foreign_id_times is not None: - raise TypeError('At most one of the parameters ids or foreign_id_time must be provided') + raise TypeError( + "At most one of the parameters ids or foreign_id_time must be provided" + ) query_params = {} if ids is not None: - query_params['ids'] = ','.join(ids) + query_params["ids"] = ",".join(ids) if foreign_id_times is not None: validate_foreign_id_time(foreign_id_times) foreign_ids, timestamps = zip(*foreign_id_times) timestamps = map(_datetime_encoder, timestamps) - query_params['foreign_ids'] = ','.join(foreign_ids) - query_params['timestamps'] = ','.join(timestamps) + query_params["foreign_ids"] = ",".join(foreign_ids) + query_params["timestamps"] = ",".join(timestamps) - return self.get('activities/', auth_token, params=query_params) + return self.get("activities/", auth_token, params=query_params) - def activity_partial_update(self, id=None, foreign_id=None, time=None, set={}, unset=[]): - ''' + def activity_partial_update( + self, id=None, foreign_id=None, time=None, set={}, unset=[] + ): + """ Partial update activity, via foreign ID or Foreign ID + timestamp id: the activity ID @@ -362,45 +417,48 @@ def activity_partial_update(self, id=None, foreign_id=None, time=None, set={}, u time: the activity time set: object containing the set operations unset: list of unset operations - ''' + """ - auth_token = self.create_jwt_token('activities', '*', feed_id='*') + auth_token = self.create_jwt_token("activities", "*", feed_id="*") if id is None and (foreign_id is None or time is None): - raise TypeError('The id or foreign_id+time parameters must be provided and not be None') + raise TypeError( + "The id or foreign_id+time parameters must be provided and not be None" + ) if id is not None and (foreign_id is not None or time is not None): - raise TypeError('Only one of the id or the foreign_id+time parameters can be provided') + raise TypeError( + "Only one of the id or the foreign_id+time parameters can be provided" + ) - data = { - 'set': set, - 'unset': unset, - } + data = {"set": set, "unset": unset} if id is not None: - data['id'] = id + data["id"] = id else: - data['foreign_id'] = foreign_id - data['time'] = time + data["foreign_id"] = foreign_id + data["time"] = time - return self.post('activity/', auth_token, data=data) + return self.post("activity/", auth_token, data=data) def create_redirect_url(self, target_url, user_id, events): - ''' + """ Creates a redirect url for tracking the given events in the context of an email using Stream's analytics platform. Learn more at getstream.io/personalization - ''' + """ # generate the JWT token - auth_token = self.create_jwt_token('redirect_and_track', '*', '*', user_id=user_id) + auth_token = self.create_jwt_token( + "redirect_and_track", "*", "*", user_id=user_id + ) # setup the params - params = dict(auth_type='jwt', authorization=auth_token, url=target_url) - params['api_key'] = self.api_key - params['events'] = json.dumps(events) - url = self.base_analytics_url + 'redirect/' + params = dict(auth_type="jwt", authorization=auth_token, url=target_url) + params["api_key"] = self.api_key + params["events"] = json.dumps(events) + url = self.base_analytics_url + "redirect/" # we get the url from the prepare request, this skips issues with # python's urlencode implementation - request = Request('GET', url, params=params) + request = Request("GET", url, params=params) prepared_request = request.prepare() # validate the target url is valid - Request('GET', target_url).prepare() + Request("GET", target_url).prepare() return prepared_request.url diff --git a/stream/reactions.py b/stream/reactions.py new file mode 100644 index 0000000..47bee72 --- /dev/null +++ b/stream/reactions.py @@ -0,0 +1,65 @@ +class Reactions(object): + def __init__(self, client, token): + self.client = client + self.token = token + + def add(self, kind, activity_id, user_id, data=None, target_feeds=None): + payload = dict( + kind=kind, + activity_id=activity_id, + data=data, + target_feeds=target_feeds, + user_id=user_id, + ) + return self.client.post( + "reaction/", service_name="api", signature=self.token, data=payload + ) + + def get(self, reaction_id): + return self.client.get( + "reaction/%s" % reaction_id, service_name="api", signature=self.token + ) + + def update(self, reaction_id, data=None, target_feeds=None): + payload = dict(data=data, target_feeds=target_feeds) + return self.client.put( + "reaction/%s" % reaction_id, + service_name="api", + signature=self.token, + data=payload, + ) + + def delete(self, reaction_id): + return self.client.delete( + "reaction/%s" % reaction_id, service_name="api", signature=self.token + ) + + def add_child(self, kind, parent_id, user_id, data=None, target_feeds=None): + payload = dict( + kind=kind, + parent=parent_id, + data=data, + target_feeds=target_feeds, + user_id=user_id, + ) + return self.client.post( + "reaction/", service_name="api", signature=self.token, data=payload + ) + + def filter(self, **params): + lookup_field = "" + lookup_value = "" + + if "reaction_id" in params: + lookup_field = "reaction_id" + lookup_value = params.pop("reaction_id") + elif "activity_id" in params: + lookup_field = "activity_id" + lookup_value = params.pop("activity_id") + elif "user_id" in params: + lookup_field = "user_id" + lookup_value = params.pop("user_id") + + return self.client.get( + "reaction/%s/%s/" % (lookup_field, lookup_value), service_name="api", signature=self.token, params=params + ) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index baa1a9e..e4a606b 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1197,3 +1197,43 @@ def test_create_reference(self): def test_create_user_reference(self): ref = self.c.collections.create_user_reference("42") self.assertEqual(ref, "SO:user:42") + + def test_reaction_add(self): + self.c.reactions.add("like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike") + + def test_reaction_get(self): + response = self.c.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + self.c.reactions.get(response["reaction"]["id"]) + + def test_reaction_update(self): + response = self.c.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + self.c.reactions.update(response["reaction"]["id"], {"changed": True}) + + def test_reaction_delete(self): + response = self.c.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + self.c.reactions.delete(response["reaction"]["id"]) + + def test_reaction_add_child(self): + response = self.c.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + self.c.reactions.add_child("like", response["reaction"]["id"], "rob") + + def test_reaction_filter(self): + self.c.reactions.filter( + reaction_id="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", + id_lte="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", + ) + self.c.reactions.filter( + activity_id="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", + id_lte="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", + ) + self.c.reactions.filter( + user_id="mike", id_lte="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4" + ) From b303ca0d866ff29f8346449011f35cd903c0ad07 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 30 Nov 2018 10:44:06 +0100 Subject: [PATCH 120/208] add more tests for reactions --- stream/tests/test_client.py | 50 +++++++++++++++++++++++++++++++++---- 1 file changed, 45 insertions(+), 5 deletions(-) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index e4a606b..b423083 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1205,27 +1205,36 @@ def test_reaction_get(self): response = self.c.reactions.add( "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" ) - self.c.reactions.get(response["reaction"]["id"]) + reaction = self.c.reactions.get(response["id"]) + self.assertEqual(reaction["parent"], "") + self.assertEqual(reaction["data"], {}) + self.assertEqual(reaction["latest_children"], {}) + self.assertEqual(reaction["children_counts"], {}) + self.assertEqual(reaction["activity_id"], "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4") + self.assertEqual(reaction["kind"], "like") + self.assertIn("created_at", reaction) + self.assertIn("updated_at", reaction) + self.assertIn("id", reaction) def test_reaction_update(self): response = self.c.reactions.add( "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" ) - self.c.reactions.update(response["reaction"]["id"], {"changed": True}) + self.c.reactions.update(response["id"], {"changed": True}) def test_reaction_delete(self): response = self.c.reactions.add( "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" ) - self.c.reactions.delete(response["reaction"]["id"]) + self.c.reactions.delete(response["id"]) def test_reaction_add_child(self): response = self.c.reactions.add( "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" ) - self.c.reactions.add_child("like", response["reaction"]["id"], "rob") + self.c.reactions.add_child("like", response["id"], "rob") - def test_reaction_filter(self): + def test_reaction_filter_random(self): self.c.reactions.filter( reaction_id="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", id_lte="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", @@ -1237,3 +1246,34 @@ def test_reaction_filter(self): self.c.reactions.filter( user_id="mike", id_lte="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4" ) + + def _first_result_should_be(self, response, element): + el = element.copy() + el.pop('duration') + self.assertEqual(len(response["results"]), 1) + self.assertEqual(response["results"][0], el) + + def test_reaction_filter(self): + activity_id = str(uuid1()) + user = str(uuid1()) + + response = self.c.reactions.add( + "like", activity_id, user + ) + child = self.c.reactions.add_child( + "like", response["id"], user + ) + reaction = self.c.reactions.get(response["id"]) + r = self.c.reactions.filter( + reaction_id=reaction["id"], + ) + self._first_result_should_be(r, child) + + r = self.c.reactions.filter( + activity_id=activity_id, + id_lte=reaction["id"], + ) + self._first_result_should_be(r, reaction) + + r = self.c.reactions.filter(user_id=user, id_lte=reaction["id"]) + self._first_result_should_be(r, reaction) From b267f54b01539eb343c22ec51845ce094091aba8 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 30 Nov 2018 11:30:41 +0100 Subject: [PATCH 121/208] add user endpoints --- stream/client.py | 4 +++ stream/tests/test_client.py | 58 +++++++++++++++++++++++++++---------- stream/users.py | 33 +++++++++++++++++++++ 3 files changed, 79 insertions(+), 16 deletions(-) create mode 100644 stream/users.py diff --git a/stream/client.py b/stream/client.py index afea61a..13d5563 100644 --- a/stream/client.py +++ b/stream/client.py @@ -9,6 +9,7 @@ from stream import exceptions, serializer from stream.signing import sign +from stream.users import Users from stream.utils import validate_feed_slug, validate_user_id, validate_foreign_id_time from stream.httpsig.requests_auth import HTTPSignatureAuth from requests import Request @@ -99,6 +100,9 @@ def __init__( token = self.create_jwt_token("reactions", "*", feed_id="*") self.reactions = Reactions(self, token) + token = self.create_jwt_token("users", "*", feed_id="*") + self.users = Users(self, token) + def feed(self, feed_slug, user_id): """ Returns a Feed object diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index b423083..5f9e82a 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1,7 +1,7 @@ from dateutil.tz import tzlocal import stream import time -from stream.exceptions import ApiKeyException, InputException +from stream.exceptions import ApiKeyException, InputException, StreamApiException import random import jwt @@ -1210,7 +1210,9 @@ def test_reaction_get(self): self.assertEqual(reaction["data"], {}) self.assertEqual(reaction["latest_children"], {}) self.assertEqual(reaction["children_counts"], {}) - self.assertEqual(reaction["activity_id"], "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4") + self.assertEqual( + reaction["activity_id"], "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4" + ) self.assertEqual(reaction["kind"], "like") self.assertIn("created_at", reaction) self.assertIn("updated_at", reaction) @@ -1249,7 +1251,7 @@ def test_reaction_filter_random(self): def _first_result_should_be(self, response, element): el = element.copy() - el.pop('duration') + el.pop("duration") self.assertEqual(len(response["results"]), 1) self.assertEqual(response["results"][0], el) @@ -1257,23 +1259,47 @@ def test_reaction_filter(self): activity_id = str(uuid1()) user = str(uuid1()) - response = self.c.reactions.add( - "like", activity_id, user - ) - child = self.c.reactions.add_child( - "like", response["id"], user - ) + response = self.c.reactions.add("like", activity_id, user) + child = self.c.reactions.add_child("like", response["id"], user) reaction = self.c.reactions.get(response["id"]) - r = self.c.reactions.filter( - reaction_id=reaction["id"], - ) + r = self.c.reactions.filter(reaction_id=reaction["id"]) self._first_result_should_be(r, child) - r = self.c.reactions.filter( - activity_id=activity_id, - id_lte=reaction["id"], - ) + r = self.c.reactions.filter(activity_id=activity_id, id_lte=reaction["id"]) self._first_result_should_be(r, reaction) r = self.c.reactions.filter(user_id=user, id_lte=reaction["id"]) self._first_result_should_be(r, reaction) + + def test_user_add(self): + self.c.users.add(str(uuid1())) + + def test_user_add_twice(self): + user_id = str(uuid1()) + self.c.users.add(user_id) + with self.assertRaises(StreamApiException): + self.c.users.add(user_id) + + def test_user_add_get_or_create(self): + user_id = str(uuid1()) + r1 = self.c.users.add(user_id) + r2 = self.c.users.add(user_id, get_or_create=True) + self.assertEqual(r1["id"], r2["id"]) + self.assertEqual(r1["created_at"], r2["created_at"]) + self.assertEqual(r1["updated_at"], r2["updated_at"]) + + def test_user_get(self): + response = self.c.users.add(str(uuid1())) + user = self.c.users.get(response["id"]) + self.assertEqual(user["data"], {}) + self.assertIn("created_at", user) + self.assertIn("updated_at", user) + self.assertIn("id", user) + + def test_user_update(self): + response = self.c.users.add(str(uuid1())) + self.c.users.update(response["id"], {"changed": True}) + + def test_user_delete(self): + response = self.c.users.add(str(uuid1())) + self.c.users.delete(response["id"]) diff --git a/stream/users.py b/stream/users.py new file mode 100644 index 0000000..c1738a9 --- /dev/null +++ b/stream/users.py @@ -0,0 +1,33 @@ +class Users(object): + def __init__(self, client, token): + self.client = client + self.token = token + + def add(self, user_id, data=None, get_or_create=False): + payload = dict(id=user_id, data=data) + return self.client.post( + "user/", + service_name="api", + signature=self.token, + data=payload, + params={"get_or_create": get_or_create}, + ) + + def get(self, user_id): + return self.client.get( + "user/%s" % user_id, service_name="api", signature=self.token + ) + + def update(self, user_id, data=None): + payload = dict(data=data) + return self.client.put( + "user/%s" % user_id, + service_name="api", + signature=self.token, + data=payload, + ) + + def delete(self, user_id): + return self.client.delete( + "user/%s" % user_id, service_name="api", signature=self.token + ) From ca6674f5885e83d4a4dabdcf403ae500a808a8bb Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 30 Nov 2018 12:27:48 +0100 Subject: [PATCH 122/208] add CRUD methods for collections --- stream/collections.py | 38 +++++++++++++++++++++++++++++++++---- stream/tests/test_client.py | 30 +++++++++++++++++++++++++++++ 2 files changed, 64 insertions(+), 4 deletions(-) diff --git a/stream/collections.py b/stream/collections.py index 4c7a229..ba7ef14 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -33,7 +33,7 @@ def upsert(self, collection_name, data): data_json = {collection_name: data} - response = self.client.post('meta/', service_name='api', + response = self.client.post('collections/', service_name='api', signature=self.token, data={'data': data_json}) return response @@ -59,12 +59,12 @@ def select(self, collection_name, ids): foreign_ids.append('%s:%s' % (collection_name, ids[i])) foreign_ids = ','.join(foreign_ids) - response = self.client.get('meta/', service_name='api', params={'foreign_ids': foreign_ids}, + response = self.client.get('collections/', service_name='api', params={'foreign_ids': foreign_ids}, signature=self.token) return response - def delete(self, collection_name, ids): + def delete_many(self, collection_name, ids): """ Delete data from meta. :param collection_name: Collection Name i.e 'user' @@ -82,7 +82,37 @@ def delete(self, collection_name, ids): params = {'collection_name': collection_name, 'ids': ids} - response = self.client.delete('meta/', service_name='api', params=params, + response = self.client.delete('collections/', service_name='api', params=params, signature=self.token) return response + + def add(self, collection_name, data, id=None, user_id=None): + payload = dict( + id=id, data=data, user_id=user_id, + ) + return self.client.post( + "collections/%s" % collection_name, + service_name="api", + signature=self.token, + data=payload, + ) + + def get(self, collection_name, id): + return self.client.get( + "collections/%s/%s" % (collection_name, id), service_name="api", signature=self.token + ) + + def update(self, collection_name, id, data=None): + payload = dict(data=data) + return self.client.put( + "collections/%s/%s" % (collection_name, id), + service_name="api", + signature=self.token, + data=payload, + ) + + def delete(self, collection_name, id): + return self.client.delete( + "collections/%s/%s" % (collection_name, id), service_name="api", signature=self.token + ) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 5f9e82a..79566c3 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1303,3 +1303,33 @@ def test_user_update(self): def test_user_delete(self): response = self.c.users.add(str(uuid1())) self.c.users.delete(response["id"]) + + def test_collections_add(self): + self.c.collections.add("items", {"data": 1}, id=str(uuid1()), user_id="tom") + + def test_collections_add_twice(self): + id = str(uuid1()) + r1 = self.c.collections.add("items", {"data": 1}, id=id) + r2 = self.c.collections.add("items", {"data": 2}, id=id) + entry = self.c.collections.get("items", id) + self.assertEqual(entry["data"], r2["data"]) + self.assertEqual(r1["created_at"], r2["created_at"]) + self.assertNotEqual(r1["updated_at"], r2["updated_at"]) + + def test_collections_get(self): + response = self.c.collections.add("items", {"data": 1}, id=str(uuid1())) + entry = self.c.collections.get("items", response["id"]) + self.assertEqual(entry["data"], {"data": 1}) + self.assertIn("created_at", entry) + self.assertIn("updated_at", entry) + self.assertIn("id", entry) + + def test_collections_update(self): + response = self.c.collections.add("items", {"data": 1}, str(uuid1())) + self.c.collections.update("items", response["id"], data={"changed": True}) + entry = self.c.collections.get("items", response["id"]) + self.assertEqual(entry["data"], {"changed": True}) + + def test_collections_delete(self): + response = self.c.collections.add("items", {"data": 1}, str(uuid1())) + self.c.collections.delete("items", response["id"]) From cfb69e244e181e4a5c459aaef5ddf83a7c4ae868 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 30 Nov 2018 12:31:25 +0100 Subject: [PATCH 123/208] fix one test --- stream/tests/test_client.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 79566c3..0cc24dd 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1307,14 +1307,14 @@ def test_user_delete(self): def test_collections_add(self): self.c.collections.add("items", {"data": 1}, id=str(uuid1()), user_id="tom") + def test_collections_add_no_id(self): + self.c.collections.add("items", {"data": 1}) + def test_collections_add_twice(self): id = str(uuid1()) - r1 = self.c.collections.add("items", {"data": 1}, id=id) - r2 = self.c.collections.add("items", {"data": 2}, id=id) - entry = self.c.collections.get("items", id) - self.assertEqual(entry["data"], r2["data"]) - self.assertEqual(r1["created_at"], r2["created_at"]) - self.assertNotEqual(r1["updated_at"], r2["updated_at"]) + self.c.collections.add("items", {"data": 1}, id=id) + with self.assertRaises(StreamApiException): + self.c.collections.add("items", {"data": 2}, id=id) def test_collections_get(self): response = self.c.collections.add("items", {"data": 1}, id=str(uuid1())) From 31420aef4427dd051dd30534de54eca09aabe71e Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 30 Nov 2018 14:21:09 +0100 Subject: [PATCH 124/208] add enrichment params for feed.get --- stream/collections.py | 8 +- stream/feed.py | 220 +++++++++++++++++++----------------- stream/tests/test_client.py | 81 ++++++++++++- stream/users.py | 6 + 4 files changed, 205 insertions(+), 110 deletions(-) diff --git a/stream/collections.py b/stream/collections.py index ba7ef14..c119e22 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -11,11 +11,11 @@ def __init__(self, client, token): self.token = token def create_reference(self, collection_name, id): - return "SO:%s:%s" % (collection_name, id) + _id = id + if isinstance(id, (dict,)) and id.get("id") is not None: + _id = id.get("id") + return "SO:%s:%s" % (collection_name, _id) - def create_user_reference(self, id): - return self.create_reference("user", id) - def upsert(self, collection_name, data): """ "Insert new or update existing data. diff --git a/stream/feed.py b/stream/feed.py index cdc5abb..2cc3ecd 100644 --- a/stream/feed.py +++ b/stream/feed.py @@ -2,41 +2,43 @@ class Feed(object): - def __init__(self, client, feed_slug, user_id, token): - ''' + """ Initializes the Feed class :param client: the api client :param slug: the slug of the feed, ie user, flat, notification :param user_id: the id of the user :param token: the token - ''' + """ self.client = client self.slug = feed_slug self.user_id = str(user_id) - self.id = '%s:%s' % (feed_slug, user_id) + self.id = "%s:%s" % (feed_slug, user_id) self.token = token - self.feed_url = 'feed/%s/' % self.id.replace(':', '/') - self.feed_targets_url = 'feed_targets/%s/' % self.id.replace(':', '/') - self.feed_together = self.id.replace(':', '') - self.signature = self.feed_together + ' ' + self.token + self.feed_url = "feed/%s/" % self.id.replace(":", "/") + self.enriched_feed_url = "enrich/feed/%s/" % self.id.replace(":", "/") + self.feed_targets_url = "feed_targets/%s/" % self.id.replace(":", "/") + self.feed_together = self.id.replace(":", "") + self.signature = self.feed_together + " " + self.token def create_scope_token(self, resource, action): - ''' + """ creates the JWT token to perform an action on a owned resource - ''' - return self.client.create_jwt_token(resource, action, feed_id=self.feed_together) + """ + return self.client.create_jwt_token( + resource, action, feed_id=self.feed_together + ) def get_readonly_token(self): - ''' + """ creates the JWT token to perform readonly operations - ''' - return self.create_scope_token('*', 'read') + """ + return self.create_scope_token("*", "read") def add_activity(self, activity_data): - ''' + """ Adds an activity to the feed, this will also trigger an update to all the feeds which follow this feed @@ -46,21 +48,24 @@ def add_activity(self, activity_data): activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} activity_id = feed.add_activity(activity_data) - ''' - if activity_data.get('to') and not isinstance(activity_data.get('to'), (list, tuple, set)): - raise TypeError('please provide the activity\'s to field as a list not a string') - - if activity_data.get('to'): + """ + if activity_data.get("to") and not isinstance( + activity_data.get("to"), (list, tuple, set) + ): + raise TypeError( + "please provide the activity's to field as a list not a string" + ) + + if activity_data.get("to"): activity_data = activity_data.copy() - activity_data['to'] = self.add_to_signature(activity_data['to']) + activity_data["to"] = self.add_to_signature(activity_data["to"]) - token = self.create_scope_token('feed', 'write') - result = self.client.post( - self.feed_url, data=activity_data, signature=token) + token = self.create_scope_token("feed", "write") + result = self.client.post(self.feed_url, data=activity_data, signature=token) return result def add_activities(self, activity_list): - ''' + """ Adds a list of activities to the feed :param activity_list: a list with the activity data dicts @@ -72,43 +77,40 @@ def add_activities(self, activity_list): {'actor': 2, 'verb': 'watch', 'object': 2}, ] result = feed.add_activities(activity_data) - ''' + """ activities = [] for activity_data in activity_list: activity_data = activity_data.copy() activities.append(activity_data) - if activity_data.get('to'): - activity_data['to'] = self.add_to_signature( - activity_data['to']) - token = self.create_scope_token('feed', 'write') + if activity_data.get("to"): + activity_data["to"] = self.add_to_signature(activity_data["to"]) + token = self.create_scope_token("feed", "write") data = dict(activities=activities) if activities: - result = self.client.post( - self.feed_url, data=data, signature=token) + result = self.client.post(self.feed_url, data=data, signature=token) return result def remove_activity(self, activity_id=None, foreign_id=None): - ''' + """ Removes an activity from the feed :param activity_id: the activity id to remove from this feed (note this will also remove the activity from feeds which follow this feed) :param foreign_id: the foreign id you provided when adding the activity - ''' + """ identifier = activity_id or foreign_id if not identifier: - raise ValueError('please either provide activity_id or foreign_id') - url = self.feed_url + '%s/' % identifier + raise ValueError("please either provide activity_id or foreign_id") + url = self.feed_url + "%s/" % identifier params = dict() - token = self.create_scope_token('feed', 'delete') + token = self.create_scope_token("feed", "delete") if foreign_id is not None: - params['foreign_id'] = '1' - result = self.client.delete( - url, signature=token, params=params) + params["foreign_id"] = "1" + result = self.client.delete(url, signature=token, params=params) return result - def get(self, **params): - ''' + def get(self, enrich=False, reactions=None, **params): + """ Get the activities in this feed **Example**:: @@ -118,117 +120,127 @@ def get(self, **params): # slow pagination using offset feed.get(limit=10, offset=10) - ''' - for field in ['mark_read', 'mark_seen']: + """ + for field in ["mark_read", "mark_seen"]: value = params.get(field) if isinstance(value, (list, tuple)): - params[field] = ','.join(value) - token = self.create_scope_token('feed', 'read') - response = self.client.get( - self.feed_url, params=params, signature=token) + params[field] = ",".join(value) + token = self.create_scope_token("feed", "read") + + if enrich or reactions is not None: + feed_url = self.enriched_feed_url + else: + feed_url = self.feed_url + + if reactions is not None and not isinstance(reactions, (dict, )): + raise TypeError("reactions argument should be a dictionary") + + if reactions is not None: + if reactions.get('own'): + params['withOwnReactions'] = True + if reactions.get('recent'): + params['withRecentReactions'] = True + if reactions.get('counts'): + params['withReactionCounts'] = True + + response = self.client.get(feed_url, params=params, signature=token) return response - def follow(self, target_feed_slug, target_user_id, activity_copy_limit=None, **extra_data): - ''' + def follow( + self, target_feed_slug, target_user_id, activity_copy_limit=None, **extra_data + ): + """ Follows the given feed + :param activity_copy_limit: how many activities should be copied from target feed :param target_feed_slug: the slug of the target feed :param target_user_id: the user id - ''' + """ target_feed_slug = validate_feed_slug(target_feed_slug) target_user_id = validate_user_id(target_user_id) - target_feed_id = '%s:%s' % (target_feed_slug, target_user_id) - url = self.feed_url + 'follows/' + target_feed_id = "%s:%s" % (target_feed_slug, target_user_id) + url = self.feed_url + "follows/" data = { - 'target': target_feed_id, - 'target_token': self.client.feed(target_feed_slug, target_user_id).token + "target": target_feed_id, + "target_token": self.client.feed(target_feed_slug, target_user_id).token, } if activity_copy_limit != None: - data['activity_copy_limit'] = activity_copy_limit - token = self.create_scope_token('follower', 'write') + data["activity_copy_limit"] = activity_copy_limit + token = self.create_scope_token("follower", "write") data.update(extra_data) - response = self.client.post( - url, data=data, signature=token) + response = self.client.post(url, data=data, signature=token) return response def unfollow(self, target_feed_slug, target_user_id, keep_history=False): - ''' + """ Unfollow the given feed - ''' + """ target_feed_slug = validate_feed_slug(target_feed_slug) target_user_id = validate_user_id(target_user_id) - target_feed_id = '%s:%s' % (target_feed_slug, target_user_id) - token = self.create_scope_token('follower', 'delete') - url = self.feed_url + 'follows/%s/' % target_feed_id + target_feed_id = "%s:%s" % (target_feed_slug, target_user_id) + token = self.create_scope_token("follower", "delete") + url = self.feed_url + "follows/%s/" % target_feed_id params = {} if keep_history: - params['keep_history'] = True + params["keep_history"] = True response = self.client.delete(url, signature=token, params=params) return response def followers(self, offset=0, limit=25, feeds=None): - ''' + """ Lists the followers for the given feed - ''' - feeds = feeds is not None and ','.join(feeds) or '' - params = { - 'limit': limit, - 'offset': offset, - 'filter': feeds - } - url = self.feed_url + 'followers/' - token = self.create_scope_token('follower', 'read') - response = self.client.get( - url, params=params, signature=token) + """ + feeds = feeds is not None and ",".join(feeds) or "" + params = {"limit": limit, "offset": offset, "filter": feeds} + url = self.feed_url + "followers/" + token = self.create_scope_token("follower", "read") + response = self.client.get(url, params=params, signature=token) return response def following(self, offset=0, limit=25, feeds=None): - ''' + """ List the feeds which this feed is following - ''' + """ if feeds is not None: - feeds = feeds is not None and ','.join(feeds) or '' - params = { - 'offset': offset, - 'limit': limit, - 'filter': feeds - } - url = self.feed_url + 'follows/' - token = self.create_scope_token('follower', 'read') - response = self.client.get( - url, params=params, signature=token) + feeds = feeds is not None and ",".join(feeds) or "" + params = {"offset": offset, "limit": limit, "filter": feeds} + url = self.feed_url + "follows/" + token = self.create_scope_token("follower", "read") + response = self.client.get(url, params=params, signature=token) return response def add_to_signature(self, recipients): - ''' + """ Takes a list of recipients such as ['user:1', 'user:2'] and turns it into a list with the tokens included ['user:1 token', 'user:2 token'] - ''' + """ data = [] for recipient in recipients: validate_feed_id(recipient) - feed_slug, user_id = recipient.split(':') + feed_slug, user_id = recipient.split(":") feed = self.client.feed(feed_slug, user_id) data.append("%s %s" % (recipient, feed.token)) return data - def update_activity_to_targets(self, foreign_id, time, - new_targets=None, added_targets=None, - removed_targets=None): - data = { - 'foreign_id': foreign_id, - 'time': time, - } + def update_activity_to_targets( + self, + foreign_id, + time, + new_targets=None, + added_targets=None, + removed_targets=None, + ): + data = {"foreign_id": foreign_id, "time": time} if new_targets is not None: - data['new_targets'] = new_targets + data["new_targets"] = new_targets if added_targets is not None: - data['added_targets'] = added_targets + data["added_targets"] = added_targets if removed_targets is not None: - data['removed_targets'] = removed_targets + data["removed_targets"] = removed_targets - url = self.feed_targets_url + 'activity_to_targets/' + url = self.feed_targets_url + "activity_to_targets/" - token = self.create_scope_token('feed_targets', 'write') + token = self.create_scope_token("feed_targets", "write") return self.client.post(url, data=data, signature=token) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 0cc24dd..0509384 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1195,8 +1195,8 @@ def test_create_reference(self): self.assertEqual(ref, "SO:item:42") def test_create_user_reference(self): - ref = self.c.collections.create_user_reference("42") - self.assertEqual(ref, "SO:user:42") + ref = self.c.users.create_reference("42") + self.assertEqual(ref, "SU:42") def test_reaction_add(self): self.c.reactions.add("like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike") @@ -1333,3 +1333,80 @@ def test_collections_update(self): def test_collections_delete(self): response = self.c.collections.add("items", {"data": 1}, str(uuid1())) self.c.collections.delete("items", response["id"]) + + def test_feed_enrichment_bad(self): + with self.assertRaises(TypeError): + self.c.feed("user", "mike").get(enrich=True, reactions=True) + + def test_feed_enrichment_collection(self): + entry = self.c.collections.add("items", {"name": "time machine"}) + entry.pop("duration") + f = getfeed("user", "mike") + activity_data = { + "actor": "mike", + "verb": "buy", + "object": self.c.collections.create_reference("items", entry), + } + f.add_activity(activity_data) + response = f.get() + self.assertTrue( + set(activity_data.items()).issubset(set(response["results"][0].items())) + ) + enriched_response = f.get(enrich=True) + self.assertEqual(enriched_response["results"][0]["object"], entry) + + def test_feed_enrichment_user(self): + user = self.c.users.add(str(uuid1()), {"name": "Mike"}) + user.pop("duration") + f = getfeed("user", "mike") + activity_data = { + "actor": self.c.users.create_reference(user), + "verb": "buy", + "object": "time machine", + } + f.add_activity(activity_data) + response = f.get() + self.assertTrue( + set(activity_data.items()).issubset(set(response["results"][0].items())) + ) + enriched_response = f.get(enrich=True) + self.assertEqual(enriched_response["results"][0]["actor"], user) + + def test_feed_enrichment_own_reaction(self): + f = getfeed("user", "mike") + activity_data = { + "actor": "mike", + "verb": "buy", + "object": "object", + } + response = f.add_activity(activity_data) + reaction = self.c.reactions.add("like", response["id"], "mike") + reaction.pop("duration") + enriched_response = f.get(reactions={"own": True}, user_id="mike") + self.assertEqual(enriched_response["results"][0]["own_reactions"]["like"][0], reaction) + + def test_feed_enrichment_recent_reaction(self): + f = getfeed("user", "mike") + activity_data = { + "actor": "mike", + "verb": "buy", + "object": "object", + } + response = f.add_activity(activity_data) + reaction = self.c.reactions.add("like", response["id"], "mike") + reaction.pop("duration") + enriched_response = f.get(reactions={"recent": True}) + self.assertEqual(enriched_response["results"][0]["latest_reactions"]["like"][0], reaction) + + def test_feed_enrichment_reaction_counts(self): + f = getfeed("user", "mike") + activity_data = { + "actor": "mike", + "verb": "buy", + "object": "object", + } + response = f.add_activity(activity_data) + reaction = self.c.reactions.add("like", response["id"], "mike") + reaction.pop("duration") + enriched_response = f.get(reactions={"counts": True}) + self.assertEqual(enriched_response["results"][0]["reaction_counts"]["like"], 1) diff --git a/stream/users.py b/stream/users.py index c1738a9..e4cbf82 100644 --- a/stream/users.py +++ b/stream/users.py @@ -3,6 +3,12 @@ def __init__(self, client, token): self.client = client self.token = token + def create_reference(self, id): + _id = id + if isinstance(id, (dict,)) and id.get("id") is not None: + _id = id.get("id") + return "SU:%s" % _id + def add(self, user_id, data=None, get_or_create=False): payload = dict(id=user_id, data=data) return self.client.post( From e0b65c49fa3013e5692b40f80bb1d7314710b338 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 30 Nov 2018 14:42:42 +0100 Subject: [PATCH 125/208] ran black on entire project --- stream/__init__.py | 52 +++++--- stream/collections.py | 42 ++++--- stream/exceptions.py | 79 +++++++------ stream/feed.py | 14 +-- stream/httpsig/requests_auth.py | 28 +++-- stream/httpsig/sign.py | 35 +++--- stream/httpsig/tests/__init__.py | 2 +- stream/httpsig/tests/test_signature.py | 96 ++++++++------- stream/httpsig/tests/test_utils.py | 7 +- stream/httpsig/tests/test_verify.py | 157 +++++++++++++++---------- stream/httpsig/utils.py | 84 +++++++------ stream/httpsig/verify.py | 38 ++++-- stream/personalization.py | 27 +++-- stream/reactions.py | 5 +- stream/serializer.py | 12 +- stream/signing.py | 15 ++- stream/tests/test_client.py | 26 ++-- stream/users.py | 5 +- stream/utils.py | 36 +++--- 19 files changed, 443 insertions(+), 317 deletions(-) diff --git a/stream/__init__.py b/stream/__init__.py index f114681..70e0986 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -1,37 +1,53 @@ import re import os -__author__ = 'Thierry Schellenbach' -__copyright__ = 'Copyright 2014, Stream.io, Inc' -__credits__ = ['Thierry Schellenbach, mellowmorning.com, @tschellenbach'] -__license__ = 'BSD-3-Clause' -__version__ = '2.12.0' -__maintainer__ = 'Thierry Schellenbach' -__email__ = 'support@getstream.io' -__status__ = 'Production' +__author__ = "Thierry Schellenbach" +__copyright__ = "Copyright 2014, Stream.io, Inc" +__credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] +__license__ = "BSD-3-Clause" +__version__ = "2.12.0" +__maintainer__ = "Thierry Schellenbach" +__email__ = "support@getstream.io" +__status__ = "Production" -def connect(api_key=None, api_secret=None, app_id=None, version='v1.0', - timeout=3.0, location=None, base_url=None): - ''' +def connect( + api_key=None, + api_secret=None, + app_id=None, + version="v1.0", + timeout=3.0, + location=None, + base_url=None, +): + """ Returns a Client object :param api_key: your api key or heroku url :param api_secret: the api secret :param app_id: the app id (used for listening to feed changes) - ''' + """ from stream.client import StreamClient - stream_url = os.environ.get('STREAM_URL') + + stream_url = os.environ.get("STREAM_URL") # support for the heroku STREAM_URL syntax if stream_url and not api_key: pattern = re.compile( - 'https\:\/\/(\w+)\:(\w+)\@([\w-]*).*\?app_id=(\d+)', re.IGNORECASE) + "https\:\/\/(\w+)\:(\w+)\@([\w-]*).*\?app_id=(\d+)", re.IGNORECASE + ) result = pattern.match(stream_url) if result and len(result.groups()) == 4: api_key, api_secret, location, app_id = result.groups() - location = None if location in ('getstream', 'stream-io-api') else location + location = None if location in ("getstream", "stream-io-api") else location else: - raise ValueError('Invalid api key or heroku url') + raise ValueError("Invalid api key or heroku url") - return StreamClient(api_key, api_secret, app_id, version, timeout, - location=location, base_url=base_url) + return StreamClient( + api_key, + api_secret, + app_id, + version, + timeout, + location=location, + base_url=base_url, + ) diff --git a/stream/collections.py b/stream/collections.py index c119e22..6e153eb 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -1,5 +1,4 @@ class Collections(object): - def __init__(self, client, token): """ Used to manipulate data at the 'meta' endpoint @@ -15,7 +14,7 @@ def create_reference(self, collection_name, id): if isinstance(id, (dict,)) and id.get("id") is not None: _id = id.get("id") return "SO:%s:%s" % (collection_name, _id) - + def upsert(self, collection_name, data): """ "Insert new or update existing data. @@ -33,8 +32,12 @@ def upsert(self, collection_name, data): data_json = {collection_name: data} - response = self.client.post('collections/', service_name='api', - signature=self.token, data={'data': data_json}) + response = self.client.post( + "collections/", + service_name="api", + signature=self.token, + data={"data": data_json}, + ) return response def select(self, collection_name, ids): @@ -56,11 +59,15 @@ def select(self, collection_name, ids): foreign_ids = [] for i in range(len(ids)): - foreign_ids.append('%s:%s' % (collection_name, ids[i])) - foreign_ids = ','.join(foreign_ids) + foreign_ids.append("%s:%s" % (collection_name, ids[i])) + foreign_ids = ",".join(foreign_ids) - response = self.client.get('collections/', service_name='api', params={'foreign_ids': foreign_ids}, - signature=self.token) + response = self.client.get( + "collections/", + service_name="api", + params={"foreign_ids": foreign_ids}, + signature=self.token, + ) return response @@ -80,17 +87,16 @@ def delete_many(self, collection_name, ids): ids = [ids] ids = [str(i) for i in ids] - params = {'collection_name': collection_name, 'ids': ids} + params = {"collection_name": collection_name, "ids": ids} - response = self.client.delete('collections/', service_name='api', params=params, - signature=self.token) + response = self.client.delete( + "collections/", service_name="api", params=params, signature=self.token + ) return response def add(self, collection_name, data, id=None, user_id=None): - payload = dict( - id=id, data=data, user_id=user_id, - ) + payload = dict(id=id, data=data, user_id=user_id) return self.client.post( "collections/%s" % collection_name, service_name="api", @@ -100,7 +106,9 @@ def add(self, collection_name, data, id=None, user_id=None): def get(self, collection_name, id): return self.client.get( - "collections/%s/%s" % (collection_name, id), service_name="api", signature=self.token + "collections/%s/%s" % (collection_name, id), + service_name="api", + signature=self.token, ) def update(self, collection_name, id, data=None): @@ -114,5 +122,7 @@ def update(self, collection_name, id, data=None): def delete(self, collection_name, id): return self.client.delete( - "collections/%s/%s" % (collection_name, id), service_name="api", signature=self.token + "collections/%s/%s" % (collection_name, id), + service_name="api", + signature=self.token, ) diff --git a/stream/exceptions.py b/stream/exceptions.py index 39391bd..cd35f2c 100644 --- a/stream/exceptions.py +++ b/stream/exceptions.py @@ -1,7 +1,4 @@ - - class StreamApiException(Exception): - def __init__(self, error_message, status_code=None): Exception.__init__(self, error_message) self.detail = error_message @@ -11,78 +8,87 @@ def __init__(self, error_message, status_code=None): code = 1 def __repr__(self): - return '%s (%s)' % (self.__class__.__name__, self.detail) + return "%s (%s)" % (self.__class__.__name__, self.detail) def __unicode__(self): - return '%s (%s)' % (self.__class__.__name__, self.detail) + return "%s (%s)" % (self.__class__.__name__, self.detail) class ApiKeyException(StreamApiException): - ''' + """ Raised when there is an issue with your Access Key - ''' + """ + status_code = 401 code = 2 class SignatureException(StreamApiException): - ''' + """ Raised when there is an issue with the signature you provided - ''' + """ + status_code = 401 code = 3 class InputException(StreamApiException): - ''' + """ Raised when you send the wrong data to the API - ''' + """ + status_code = 400 code = 4 class CustomFieldException(StreamApiException): - ''' + """ Raised when there are missing or misconfigured custom fields - ''' + """ + status_code = 400 code = 5 class FeedConfigException(StreamApiException): - ''' + """ Raised when there are missing or misconfigured custom fields - ''' + """ + status_code = 400 code = 6 class SiteSuspendedException(StreamApiException): - ''' + """ Raised when the site requesting the data is suspended - ''' + """ + status_code = 401 code = 7 + class InvalidPaginationException(StreamApiException): - ''' + """ Raised when there is an issue with your Access Key - ''' + """ + status_code = 401 code = 8 class MissingRankingException(FeedConfigException): - ''' + """ Raised when you didn't configure the ranking for the given feed - ''' + """ + status_code = 400 code = 12 @@ -93,57 +99,64 @@ class MissingUserException(MissingRankingException): class RankingException(FeedConfigException): - ''' + """ Raised when there is a runtime issue with ranking the feed - ''' + """ + status_code = 400 code = 11 class RateLimitReached(StreamApiException): - ''' + """ Raised when too many requests are performed - ''' + """ + status_code = 429 code = 9 class OldStorageBackend(StreamApiException): - ''' + """ Raised if you try to perform an action which only works with the new storage - ''' + """ + status_code = 400 code = 13 class BestPracticeException(StreamApiException): - ''' + """ Raised if best practices are enforced and you do something that would break a high volume integration - ''' + """ + status_code = 400 code = 15 class DoesNotExistException(StreamApiException): - ''' + """ Raised when the requested resource could not be found. - ''' + """ + status_code = 404 code = 16 class NotAllowedException(StreamApiException): - ''' + """ Raised when the requested action is not allowed for some reason. - ''' + """ + status_code = 403 code = 17 def get_exceptions(): from stream import exceptions + classes = [] for k in dir(exceptions): a = getattr(exceptions, k) diff --git a/stream/feed.py b/stream/feed.py index 2cc3ecd..5d71974 100644 --- a/stream/feed.py +++ b/stream/feed.py @@ -132,16 +132,16 @@ def get(self, enrich=False, reactions=None, **params): else: feed_url = self.feed_url - if reactions is not None and not isinstance(reactions, (dict, )): + if reactions is not None and not isinstance(reactions, (dict,)): raise TypeError("reactions argument should be a dictionary") if reactions is not None: - if reactions.get('own'): - params['withOwnReactions'] = True - if reactions.get('recent'): - params['withRecentReactions'] = True - if reactions.get('counts'): - params['withReactionCounts'] = True + if reactions.get("own"): + params["withOwnReactions"] = True + if reactions.get("recent"): + params["withRecentReactions"] = True + if reactions.get("counts"): + params["withReactionCounts"] = True response = self.client.get(feed_url, params=params, signature=token) return response diff --git a/stream/httpsig/requests_auth.py b/stream/httpsig/requests_auth.py index 6a02896..247cafa 100644 --- a/stream/httpsig/requests_auth.py +++ b/stream/httpsig/requests_auth.py @@ -1,4 +1,5 @@ from requests.auth import AuthBase + try: # Python 3 from urllib.parse import urlparse @@ -10,7 +11,7 @@ class HTTPSignatureAuth(AuthBase): - ''' + """ Sign a request using the http-signature scheme. https://github.com/joyent/node-http-signature/blob/master/http_signing.md @@ -18,20 +19,23 @@ class HTTPSignatureAuth(AuthBase): secret is the filename of a pem file in the case of rsa, a password string in the case of an hmac algorithm algorithm is one of the six specified algorithms headers is a list of http headers to be included in the signing string, defaulting to "Date" alone. - ''' - def __init__(self, key_id='', secret='', algorithm=None, headers=None): + """ + + def __init__(self, key_id="", secret="", algorithm=None, headers=None): headers = headers or [] - self.header_signer = HeaderSigner(key_id=key_id, secret=secret, - algorithm=algorithm, headers=headers) - self.uses_host = 'host' in [h.lower() for h in headers] + self.header_signer = HeaderSigner( + key_id=key_id, secret=secret, algorithm=algorithm, headers=headers + ) + self.uses_host = "host" in [h.lower() for h in headers] def __call__(self, r): headers = self.header_signer.sign( - r.headers, - # 'Host' header unavailable in request object at this point - # if 'host' header is needed, extract it from the url - host=urlparse(r.url).netloc if self.uses_host else None, - method=r.method, - path=r.path_url) + r.headers, + # 'Host' header unavailable in request object at this point + # if 'host' header is needed, extract it from the url + host=urlparse(r.url).netloc if self.uses_host else None, + method=r.method, + path=r.path_url, + ) r.headers.update(headers) return r diff --git a/stream/httpsig/sign.py b/stream/httpsig/sign.py index 6187b59..18a4abe 100644 --- a/stream/httpsig/sign.py +++ b/stream/httpsig/sign.py @@ -18,18 +18,20 @@ class Signer(object): Password-protected keyfiles are not supported. """ + def __init__(self, secret, algorithm=None): if algorithm is None: algorithm = DEFAULT_SIGN_ALGORITHM assert algorithm in ALGORITHMS, "Unknown algorithm" - if isinstance(secret, six.string_types): secret = secret.encode("ascii") + if isinstance(secret, six.string_types): + secret = secret.encode("ascii") self._rsa = None self._hash = None - self.sign_algorithm, self.hash_algorithm = algorithm.split('-') + self.sign_algorithm, self.hash_algorithm = algorithm.split("-") - if self.sign_algorithm == 'rsa': + if self.sign_algorithm == "rsa": try: rsa_key = RSA.importKey(secret) self._rsa = PKCS1_v1_5.new(rsa_key) @@ -37,39 +39,42 @@ def __init__(self, secret, algorithm=None): except ValueError: raise HttpSigException("Invalid key.") - elif self.sign_algorithm == 'hmac': + elif self.sign_algorithm == "hmac": self._hash = HMAC.new(secret, digestmod=HASHES[self.hash_algorithm]) @property def algorithm(self): - return '%s-%s' % (self.sign_algorithm, self.hash_algorithm) + return "%s-%s" % (self.sign_algorithm, self.hash_algorithm) def _sign_rsa(self, data): - if isinstance(data, six.string_types): data = data.encode("ascii") + if isinstance(data, six.string_types): + data = data.encode("ascii") h = self._hash.new() h.update(data) return self._rsa.sign(h) def _sign_hmac(self, data): - if isinstance(data, six.string_types): data = data.encode("ascii") + if isinstance(data, six.string_types): + data = data.encode("ascii") hmac = self._hash.copy() hmac.update(data) return hmac.digest() def _sign(self, data): - if isinstance(data, six.string_types): data = data.encode("ascii") + if isinstance(data, six.string_types): + data = data.encode("ascii") signed = None if self._rsa: signed = self._sign_rsa(data) elif self._hash: signed = self._sign_hmac(data) if not signed: - raise SystemError('No valid encryptor found.') + raise SystemError("No valid encryptor found.") return base64.b64encode(signed).decode("ascii") class HeaderSigner(Signer): - ''' + """ Generic object that will sign headers as a dictionary using the http-signature scheme. https://github.com/joyent/node-http-signature/blob/master/http_signing.md @@ -77,13 +82,14 @@ class HeaderSigner(Signer): :arg secret: a PEM-encoded RSA private key or an HMAC secret (must match the algorithm) :arg algorithm: one of the six specified algorithms :arg headers: a list of http headers to be included in the signing string, defaulting to ['date']. - ''' + """ + def __init__(self, key_id, secret, algorithm=None, headers=None): if algorithm is None: algorithm = DEFAULT_SIGN_ALGORITHM super(HeaderSigner, self).__init__(secret=secret, algorithm=algorithm) - self.headers = headers or ['date'] + self.headers = headers or ["date"] self.signature_template = build_signature_template(key_id, algorithm, headers) def sign(self, headers, host=None, method=None, path=None): @@ -96,11 +102,10 @@ def sign(self, headers, host=None, method=None, path=None): path is the HTTP path (required when using '(request-target)'). """ headers = CaseInsensitiveDict(headers) - required_headers = self.headers or ['date'] + required_headers = self.headers or ["date"] signable = generate_message(required_headers, headers, host, method, path) signature = self._sign(signable) - headers['authorization'] = self.signature_template % signature + headers["authorization"] = self.signature_template % signature return headers - diff --git a/stream/httpsig/tests/__init__.py b/stream/httpsig/tests/__init__.py index 72d4383..d9018eb 100644 --- a/stream/httpsig/tests/__init__.py +++ b/stream/httpsig/tests/__init__.py @@ -1,3 +1,3 @@ from .test_signature import * from .test_utils import * -from .test_verify import * \ No newline at end of file +from .test_verify import * diff --git a/stream/httpsig/tests/test_signature.py b/stream/httpsig/tests/test_signature.py index bab679a..2e33f6e 100755 --- a/stream/httpsig/tests/test_signature.py +++ b/stream/httpsig/tests/test_signature.py @@ -1,7 +1,8 @@ #!/usr/bin/env python import sys import os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) + +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) import json import unittest @@ -15,58 +16,69 @@ class TestSign(unittest.TestCase): def setUp(self): sign.DEFAULT_SIGN_ALGORITHM = "rsa-sha256" - self.key_path = os.path.join(os.path.dirname(__file__), 'rsa_private.pem') - with open(self.key_path, 'rb') as f: + self.key_path = os.path.join(os.path.dirname(__file__), "rsa_private.pem") + with open(self.key_path, "rb") as f: self.key = f.read() def tearDown(self): sign.DEFAULT_SIGN_ALGORITHM = self.DEFAULT_SIGN_ALGORITHM def test_default(self): - hs = sign.HeaderSigner(key_id='Test', secret=self.key) - unsigned = { - 'Date': 'Thu, 05 Jan 2012 21:31:40 GMT' - } + hs = sign.HeaderSigner(key_id="Test", secret=self.key) + unsigned = {"Date": "Thu, 05 Jan 2012 21:31:40 GMT"} signed = hs.sign(unsigned) - self.assertTrue('Date' in signed) - self.assertEqual(unsigned['Date'], signed['Date']) - self.assertTrue('Authorization' in signed) - auth = parse_authorization_header(signed['authorization']) + self.assertTrue("Date" in signed) + self.assertEqual(unsigned["Date"], signed["Date"]) + self.assertTrue("Authorization" in signed) + auth = parse_authorization_header(signed["authorization"]) params = auth[1] - self.assertTrue('keyId' in params) - self.assertTrue('algorithm' in params) - self.assertTrue('signature' in params) - self.assertEqual(params['keyId'], 'Test') - self.assertEqual(params['algorithm'], 'rsa-sha256') - self.assertEqual(params['signature'], 'ATp0r26dbMIxOopqw0OfABDT7CKMIoENumuruOtarj8n/97Q3htHFYpH8yOSQk3Z5zh8UxUym6FYTb5+A0Nz3NRsXJibnYi7brE/4tx5But9kkFGzG+xpUmimN4c3TMN7OFH//+r8hBf7BT9/GmHDUVZT2JzWGLZES2xDOUuMtA=') + self.assertTrue("keyId" in params) + self.assertTrue("algorithm" in params) + self.assertTrue("signature" in params) + self.assertEqual(params["keyId"], "Test") + self.assertEqual(params["algorithm"], "rsa-sha256") + self.assertEqual( + params["signature"], + "ATp0r26dbMIxOopqw0OfABDT7CKMIoENumuruOtarj8n/97Q3htHFYpH8yOSQk3Z5zh8UxUym6FYTb5+A0Nz3NRsXJibnYi7brE/4tx5But9kkFGzG+xpUmimN4c3TMN7OFH//+r8hBf7BT9/GmHDUVZT2JzWGLZES2xDOUuMtA=", + ) def test_all(self): - hs = sign.HeaderSigner(key_id='Test', secret=self.key, headers=[ - '(request-target)', - 'host', - 'date', - 'content-type', - 'content-md5', - 'content-length' - ]) + hs = sign.HeaderSigner( + key_id="Test", + secret=self.key, + headers=[ + "(request-target)", + "host", + "date", + "content-type", + "content-md5", + "content-length", + ], + ) unsigned = { - 'Host': 'example.com', - 'Date': 'Thu, 05 Jan 2012 21:31:40 GMT', - 'Content-Type': 'application/json', - 'Content-MD5': 'Sd/dVLAcvNLSq16eXua5uQ==', - 'Content-Length': '18', + "Host": "example.com", + "Date": "Thu, 05 Jan 2012 21:31:40 GMT", + "Content-Type": "application/json", + "Content-MD5": "Sd/dVLAcvNLSq16eXua5uQ==", + "Content-Length": "18", } - signed = hs.sign(unsigned, method='POST', path='/foo?param=value&pet=dog') + signed = hs.sign(unsigned, method="POST", path="/foo?param=value&pet=dog") - self.assertTrue('Date' in signed) - self.assertEqual(unsigned['Date'], signed['Date']) - self.assertTrue('Authorization' in signed) - auth = parse_authorization_header(signed['authorization']) + self.assertTrue("Date" in signed) + self.assertEqual(unsigned["Date"], signed["Date"]) + self.assertTrue("Authorization" in signed) + auth = parse_authorization_header(signed["authorization"]) params = auth[1] - self.assertTrue('keyId' in params) - self.assertTrue('algorithm' in params) - self.assertTrue('signature' in params) - self.assertEqual(params['keyId'], 'Test') - self.assertEqual(params['algorithm'], 'rsa-sha256') - self.assertEqual(params['headers'], '(request-target) host date content-type content-md5 content-length') - self.assertEqual(params['signature'], 'G8/Uh6BBDaqldRi3VfFfklHSFoq8CMt5NUZiepq0q66e+fS3Up3BmXn0NbUnr3L1WgAAZGplifRAJqp2LgeZ5gXNk6UX9zV3hw5BERLWscWXlwX/dvHQES27lGRCvyFv3djHP6Plfd5mhPWRkmjnvqeOOSS0lZJYFYHJz994s6w=') + self.assertTrue("keyId" in params) + self.assertTrue("algorithm" in params) + self.assertTrue("signature" in params) + self.assertEqual(params["keyId"], "Test") + self.assertEqual(params["algorithm"], "rsa-sha256") + self.assertEqual( + params["headers"], + "(request-target) host date content-type content-md5 content-length", + ) + self.assertEqual( + params["signature"], + "G8/Uh6BBDaqldRi3VfFfklHSFoq8CMt5NUZiepq0q66e+fS3Up3BmXn0NbUnr3L1WgAAZGplifRAJqp2LgeZ5gXNk6UX9zV3hw5BERLWscWXlwX/dvHQES27lGRCvyFv3djHP6Plfd5mhPWRkmjnvqeOOSS0lZJYFYHJz994s6w=", + ) diff --git a/stream/httpsig/tests/test_utils.py b/stream/httpsig/tests/test_utils.py index f0a4341..10d4d02 100755 --- a/stream/httpsig/tests/test_utils.py +++ b/stream/httpsig/tests/test_utils.py @@ -2,16 +2,17 @@ import os import re import sys -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) + +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) import unittest from stream.httpsig.utils import get_fingerprint -class TestUtils(unittest.TestCase): +class TestUtils(unittest.TestCase): def test_get_fingerprint(self): - with open(os.path.join(os.path.dirname(__file__), 'rsa_public.pem'), 'r') as k: + with open(os.path.join(os.path.dirname(__file__), "rsa_public.pem"), "r") as k: key = k.read() fingerprint = get_fingerprint(key) self.assertEqual(fingerprint, "73:61:a2:21:67:e0:df:be:7e:4b:93:1e:15:98:a5:b7") diff --git a/stream/httpsig/tests/test_verify.py b/stream/httpsig/tests/test_verify.py index 8d4bf36..2b9c0b9 100755 --- a/stream/httpsig/tests/test_verify.py +++ b/stream/httpsig/tests/test_verify.py @@ -1,7 +1,8 @@ #!/usr/bin/env python import sys import os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) + +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) import json import unittest @@ -9,16 +10,17 @@ from stream.httpsig.sign import HeaderSigner, Signer from stream.httpsig.verify import HeaderVerifier, Verifier + class BaseTestCase(unittest.TestCase): def _parse_auth(self, auth): """Basic Authorization header parsing.""" # split 'Signature kvpairs' - s, param_str = auth.split(' ', 1) - self.assertEqual(s, 'Signature') + s, param_str = auth.split(" ", 1) + self.assertEqual(s, "Signature") # split k1="v1",k2="v2",... - param_list = param_str.split(',') + param_list = param_str.split(",") # convert into [(k1,"v1"), (k2, "v2"), ...] - param_pairs = [p.split('=', 1) for p in param_list] + param_pairs = [p.split("=", 1) for p in param_list] # convert into {k1:v1, k2:v2, ...} param_dict = {} for k, v in param_pairs: @@ -48,11 +50,11 @@ def test_basic_sign(self): self.assertFalse(verifier._verify(data=BAD, signature=signature)) def test_default(self): - unsigned = { - 'Date': 'Thu, 05 Jan 2012 21:31:40 GMT' - } + unsigned = {"Date": "Thu, 05 Jan 2012 21:31:40 GMT"} - hs = HeaderSigner(key_id="Test", secret=self.sign_secret, algorithm=self.algorithm) + hs = HeaderSigner( + key_id="Test", secret=self.sign_secret, algorithm=self.algorithm + ) signed = hs.sign(unsigned) hv = HeaderVerifier(headers=signed, secret=self.verify_secret) self.assertTrue(hv.verify()) @@ -60,74 +62,106 @@ def test_default(self): def test_signed_headers(self): HOST = "example.com" METHOD = "POST" - PATH = '/foo?param=value&pet=dog' - hs = HeaderSigner(key_id="Test", secret=self.sign_secret, algorithm=self.algorithm, headers=[ - '(request-target)', - 'host', - 'date', - 'content-type', - 'content-md5', - 'content-length' - ]) + PATH = "/foo?param=value&pet=dog" + hs = HeaderSigner( + key_id="Test", + secret=self.sign_secret, + algorithm=self.algorithm, + headers=[ + "(request-target)", + "host", + "date", + "content-type", + "content-md5", + "content-length", + ], + ) unsigned = { - 'Host': HOST, - 'Date': 'Thu, 05 Jan 2012 21:31:40 GMT', - 'Content-Type': 'application/json', - 'Content-MD5': 'Sd/dVLAcvNLSq16eXua5uQ==', - 'Content-Length': '18', + "Host": HOST, + "Date": "Thu, 05 Jan 2012 21:31:40 GMT", + "Content-Type": "application/json", + "Content-MD5": "Sd/dVLAcvNLSq16eXua5uQ==", + "Content-Length": "18", } signed = hs.sign(unsigned, method=METHOD, path=PATH) - hv = HeaderVerifier(headers=signed, secret=self.verify_secret, host=HOST, method=METHOD, path=PATH) + hv = HeaderVerifier( + headers=signed, + secret=self.verify_secret, + host=HOST, + method=METHOD, + path=PATH, + ) self.assertTrue(hv.verify()) def test_incorrect_headers(self): HOST = "example.com" METHOD = "POST" - PATH = '/foo?param=value&pet=dog' - hs = HeaderSigner(secret=self.sign_secret, - key_id="Test", - algorithm=self.algorithm, - headers=[ - '(request-target)', - 'host', - 'date', - 'content-type', - 'content-md5', - 'content-length']) + PATH = "/foo?param=value&pet=dog" + hs = HeaderSigner( + secret=self.sign_secret, + key_id="Test", + algorithm=self.algorithm, + headers=[ + "(request-target)", + "host", + "date", + "content-type", + "content-md5", + "content-length", + ], + ) unsigned = { - 'Host': HOST, - 'Date': 'Thu, 05 Jan 2012 21:31:40 GMT', - 'Content-Type': 'application/json', - 'Content-MD5': 'Sd/dVLAcvNLSq16eXua5uQ==', - 'Content-Length': '18', + "Host": HOST, + "Date": "Thu, 05 Jan 2012 21:31:40 GMT", + "Content-Type": "application/json", + "Content-MD5": "Sd/dVLAcvNLSq16eXua5uQ==", + "Content-Length": "18", } signed = hs.sign(unsigned, method=METHOD, path=PATH) - hv = HeaderVerifier(headers=signed, secret=self.verify_secret, required_headers=["some-other-header"], host=HOST, method=METHOD, path=PATH) + hv = HeaderVerifier( + headers=signed, + secret=self.verify_secret, + required_headers=["some-other-header"], + host=HOST, + method=METHOD, + path=PATH, + ) self.assertRaises(Exception, hv.verify) def test_extra_auth_headers(self): HOST = "example.com" METHOD = "POST" - PATH = '/foo?param=value&pet=dog' - hs = HeaderSigner(key_id="Test", secret=self.sign_secret, algorithm=self.algorithm, headers=[ - '(request-target)', - 'host', - 'date', - 'content-type', - 'content-md5', - 'content-length' - ]) + PATH = "/foo?param=value&pet=dog" + hs = HeaderSigner( + key_id="Test", + secret=self.sign_secret, + algorithm=self.algorithm, + headers=[ + "(request-target)", + "host", + "date", + "content-type", + "content-md5", + "content-length", + ], + ) unsigned = { - 'Host': HOST, - 'Date': 'Thu, 05 Jan 2012 21:31:40 GMT', - 'Content-Type': 'application/json', - 'Content-MD5': 'Sd/dVLAcvNLSq16eXua5uQ==', - 'Content-Length': '18', + "Host": HOST, + "Date": "Thu, 05 Jan 2012 21:31:40 GMT", + "Content-Type": "application/json", + "Content-MD5": "Sd/dVLAcvNLSq16eXua5uQ==", + "Content-Length": "18", } signed = hs.sign(unsigned, method=METHOD, path=PATH) - hv = HeaderVerifier(headers=signed, secret=self.verify_secret, method=METHOD, path=PATH, required_headers=['date', '(request-target)']) + hv = HeaderVerifier( + headers=signed, + secret=self.verify_secret, + method=METHOD, + path=PATH, + required_headers=["date", "(request-target)"], + ) self.assertTrue(hv.verify()) @@ -136,6 +170,7 @@ def setUp(self): super(TestVerifyHMACSHA256, self).setUp() self.algorithm = "hmac-sha256" + class TestVerifyHMACSHA512(TestVerifyHMACSHA1): def setUp(self): super(TestVerifyHMACSHA512, self).setUp() @@ -144,12 +179,12 @@ def setUp(self): class TestVerifyRSASHA1(TestVerifyHMACSHA1): def setUp(self): - private_key_path = os.path.join(os.path.dirname(__file__), 'rsa_private.pem') - with open(private_key_path, 'rb') as f: + private_key_path = os.path.join(os.path.dirname(__file__), "rsa_private.pem") + with open(private_key_path, "rb") as f: private_key = f.read() - public_key_path = os.path.join(os.path.dirname(__file__), 'rsa_public.pem') - with open(public_key_path, 'rb') as f: + public_key_path = os.path.join(os.path.dirname(__file__), "rsa_public.pem") + with open(public_key_path, "rb") as f: public_key = f.read() self.keyId = "Test" @@ -157,11 +192,13 @@ def setUp(self): self.sign_secret = private_key self.verify_secret = public_key + class TestVerifyRSASHA256(TestVerifyRSASHA1): def setUp(self): super(TestVerifyRSASHA256, self).setUp() self.algorithm = "rsa-sha256" + class TestVerifyRSASHA512(TestVerifyRSASHA1): def setUp(self): super(TestVerifyRSASHA512, self).setUp() diff --git a/stream/httpsig/utils.py b/stream/httpsig/utils.py index baa066b..dc81ce5 100644 --- a/stream/httpsig/utils.py +++ b/stream/httpsig/utils.py @@ -14,24 +14,27 @@ from Cryptodome.PublicKey import RSA from Cryptodome.Hash import SHA, SHA256, SHA512 -ALGORITHMS = frozenset(['rsa-sha1', 'rsa-sha256', 'rsa-sha512', 'hmac-sha1', 'hmac-sha256', 'hmac-sha512']) -HASHES = {'sha1': SHA, - 'sha256': SHA256, - 'sha512': SHA512} +ALGORITHMS = frozenset( + ["rsa-sha1", "rsa-sha256", "rsa-sha512", "hmac-sha1", "hmac-sha256", "hmac-sha512"] +) +HASHES = {"sha1": SHA, "sha256": SHA256, "sha512": SHA512} class HttpSigException(Exception): pass + """ Constant-time string compare. http://codahale.com/a-lesson-in-timing-attacks/ """ + + def ct_bytes_compare(a, b): if not isinstance(a, six.binary_type): - a = a.decode('utf8') + a = a.decode("utf8") if not isinstance(b, six.binary_type): - b = b.decode('utf8') + b = b.decode("utf8") if len(a) != len(b): return False @@ -43,49 +46,54 @@ def ct_bytes_compare(a, b): else: result |= x ^ y - return (result == 0) + return result == 0 + def generate_message(required_headers, headers, host=None, method=None, path=None): headers = CaseInsensitiveDict(headers) if not required_headers: - required_headers = ['date'] + required_headers = ["date"] signable_list = [] for h in required_headers: h = h.lower() - if h == '(request-target)': + if h == "(request-target)": if not method or not path: - raise Exception('method and path arguments required when using "(request-target)"') - signable_list.append('%s: %s %s' % (h, method.lower(), path)) + raise Exception( + 'method and path arguments required when using "(request-target)"' + ) + signable_list.append("%s: %s %s" % (h, method.lower(), path)) - elif h == 'host': + elif h == "host": # 'host' special case due to requests lib restrictions # 'host' is not available when adding auth so must use a param # if no param used, defaults back to the 'host' header if not host: - if 'host' in headers: + if "host" in headers: host = headers[h] else: raise Exception('missing required header "%s"' % (h)) - signable_list.append('%s: %s' % (h, host)) + signable_list.append("%s: %s" % (h, host)) else: if h not in headers: raise Exception('missing required header "%s"' % (h)) - signable_list.append('%s: %s' % (h, headers[h])) + signable_list.append("%s: %s" % (h, headers[h])) - signable = '\n'.join(signable_list).encode("ascii") + signable = "\n".join(signable_list).encode("ascii") return signable def parse_authorization_header(header): if not isinstance(header, six.string_types): - header = header.decode("ascii") #HTTP headers cannot be Unicode. + header = header.decode("ascii") # HTTP headers cannot be Unicode. auth = header.split(" ", 1) if len(auth) > 2: - raise ValueError('Invalid authorization header. (eg. Method key1=value1,key2="value, \"2\"")') + raise ValueError( + 'Invalid authorization header. (eg. Method key1=value1,key2="value, "2"")' + ) # Split up any args into a dictionary. values = {} @@ -97,9 +105,9 @@ def parse_authorization_header(header): for item in fields: # Only include keypairs. - if '=' in item: + if "=" in item: # Split on the first '=' only. - key, value = item.split('=', 1) + key, value = item.split("=", 1) if not (len(key) and len(value)): continue @@ -112,6 +120,7 @@ def parse_authorization_header(header): # ("Signature", {"headers": "date", "algorithm": "hmac-sha256", ... }) return (auth[0], CaseInsensitiveDict(values)) + def build_signature_template(key_id, algorithm, headers): """ Build the Signature template for use with the Authorization header. @@ -122,33 +131,34 @@ def build_signature_template(key_id, algorithm, headers): The signature must be interpolated into the template to get the final Authorization header value. """ - param_map = {'keyId': key_id, - 'algorithm': algorithm, - 'signature': '%s'} + param_map = {"keyId": key_id, "algorithm": algorithm, "signature": "%s"} if headers: headers = [h.lower() for h in headers] - param_map['headers'] = ' '.join(headers) + param_map["headers"] = " ".join(headers) kv = map('{0[0]}="{0[1]}"'.format, param_map.items()) - kv_string = ','.join(kv) - sig_string = 'Signature {0}'.format(kv_string) + kv_string = ",".join(kv) + sig_string = "Signature {0}".format(kv_string) return sig_string def lkv(d): parts = [] while d: - len = struct.unpack('>I', d[:4])[0] - bits = d[4:len+4] - parts.append(bits) - d = d[len+4:] + len = struct.unpack(">I", d[:4])[0] + bits = d[4 : len + 4] + parts.append(bits) + d = d[len + 4 :] return parts + def sig(d): return lkv(d)[1] + def is_rsa(keyobj): return lkv(keyobj.blob)[0] == "ssh-rsa" + # based on http://stackoverflow.com/a/2082169/151401 class CaseInsensitiveDict(dict): def __init__(self, d=None, **kwargs): @@ -165,6 +175,7 @@ def __getitem__(self, key): def __contains__(self, key): return super(CaseInsensitiveDict, self).__contains__(key.lower()) + # currently busted... def get_fingerprint(key): """ @@ -172,15 +183,14 @@ def get_fingerprint(key): See: http://tools.ietf.org/html/rfc4716 for more info """ - if key.startswith('ssh-rsa'): - key = key.split(' ')[1] + if key.startswith("ssh-rsa"): + key = key.split(" ")[1] else: - regex = r'\-{4,5}[\w|| ]+\-{4,5}' + regex = r"\-{4,5}[\w|| ]+\-{4,5}" key = re.split(regex, key)[1] - key = key.replace('\n', '') - key = key.strip().encode('ascii') + key = key.replace("\n", "") + key = key.strip().encode("ascii") key = base64.b64decode(key) fp_plain = hashlib.md5(key).hexdigest() - return ':'.join(a+b for a,b in zip(fp_plain[::2], fp_plain[1::2])) - + return ":".join(a + b for a, b in zip(fp_plain[::2], fp_plain[1::2])) diff --git a/stream/httpsig/verify.py b/stream/httpsig/verify.py index a3f3074..27b325f 100644 --- a/stream/httpsig/verify.py +++ b/stream/httpsig/verify.py @@ -18,6 +18,7 @@ class Verifier(Signer): For HMAC, the secret is the shared secret. For RSA, the secret is the PUBLIC key. """ + def _verify(self, data, signature): """ Verifies the data matches a signed version with the given signature. @@ -25,15 +26,17 @@ def _verify(self, data, signature): `signature` is a base64-encoded signature to verify against `data` """ - if isinstance(data, six.string_types): data = data.encode("ascii") - if isinstance(signature, six.string_types): signature = signature.encode("ascii") + if isinstance(data, six.string_types): + data = data.encode("ascii") + if isinstance(signature, six.string_types): + signature = signature.encode("ascii") - if self.sign_algorithm == 'rsa': + if self.sign_algorithm == "rsa": h = self._hash.new() h.update(data) return self._rsa.verify(h, b64decode(signature)) - elif self.sign_algorithm == 'hmac': + elif self.sign_algorithm == "hmac": h = self._sign_hmac(data) s = b64decode(signature) return ct_bytes_compare(h, s) @@ -46,7 +49,10 @@ class HeaderVerifier(Verifier): """ Verifies an HTTP signature from given headers. """ - def __init__(self, headers, secret, required_headers=None, method=None, path=None, host=None): + + def __init__( + self, headers, secret, required_headers=None, method=None, path=None, host=None + ): """ Instantiate a HeaderVerifier object. @@ -57,9 +63,9 @@ def __init__(self, headers, secret, required_headers=None, method=None, path=Non :param path: Optional. The HTTP path requested, exactly as sent (including query arguments and fragments). Required for the '(request-target)' header. :param host: Optional. The value to use for the Host header, if not supplied in :param:headers. """ - required_headers = required_headers or ['date'] + required_headers = required_headers or ["date"] - auth = parse_authorization_header(headers['authorization']) + auth = parse_authorization_header(headers["authorization"]) if len(auth) == 2: self.auth_dict = auth[1] else: @@ -71,7 +77,9 @@ def __init__(self, headers, secret, required_headers=None, method=None, path=Non self.path = path self.host = host - super(HeaderVerifier, self).__init__(secret, algorithm=self.auth_dict['algorithm']) + super(HeaderVerifier, self).__init__( + secret, algorithm=self.auth_dict["algorithm"] + ) def verify(self): """ @@ -80,11 +88,17 @@ def verify(self): Raises an Exception if a required header (:param:required_headers) is not found in the signature. Returns True or False. """ - auth_headers = self.auth_dict.get('headers', 'date').split(' ') + auth_headers = self.auth_dict.get("headers", "date").split(" ") if len(set(self.required_headers) - set(auth_headers)) > 0: - raise Exception('{} is a required header(s)'.format(', '.join(set(self.required_headers)-set(auth_headers)))) + raise Exception( + "{} is a required header(s)".format( + ", ".join(set(self.required_headers) - set(auth_headers)) + ) + ) - signing_str = generate_message(auth_headers, self.headers, self.host, self.method, self.path) + signing_str = generate_message( + auth_headers, self.headers, self.host, self.method, self.path + ) - return self._verify(signing_str, self.auth_dict['signature']) + return self._verify(signing_str, self.auth_dict["signature"]) diff --git a/stream/personalization.py b/stream/personalization.py index 1b0d9df..8628798 100644 --- a/stream/personalization.py +++ b/stream/personalization.py @@ -20,8 +20,12 @@ def get(self, resource, **params): personalization.get('follow_recommendations', user_id=123, limit=10, offset=10) """ - response = self.client.get(resource, service_name='personalization', params=params, - signature=self.token) + response = self.client.get( + resource, + service_name="personalization", + params=params, + signature=self.token, + ) return response def post(self, resource, **params): @@ -37,10 +41,15 @@ def post(self, resource, **params): rejected=[456]) """ - data = params['data'] or None + data = params["data"] or None - response = self.client.post(resource, service_name='personalization', params=params, - signature=self.token, data=data) + response = self.client.post( + resource, + service_name="personalization", + params=params, + signature=self.token, + data=data, + ) return response def delete(self, resource, **params): @@ -51,7 +60,11 @@ def delete(self, resource, **params): :return: data that was deleted if if successful or not. """ - response = self.client.delete(resource, service_name='personalization', params=params, - signature=self.token) + response = self.client.delete( + resource, + service_name="personalization", + params=params, + signature=self.token, + ) return response diff --git a/stream/reactions.py b/stream/reactions.py index 47bee72..7b45791 100644 --- a/stream/reactions.py +++ b/stream/reactions.py @@ -61,5 +61,8 @@ def filter(self, **params): lookup_value = params.pop("user_id") return self.client.get( - "reaction/%s/%s/" % (lookup_field, lookup_value), service_name="api", signature=self.token, params=params + "reaction/%s/%s/" % (lookup_field, lookup_value), + service_name="api", + signature=self.token, + params=params, ) diff --git a/stream/serializer.py b/stream/serializer.py index 517e375..9fcc887 100644 --- a/stream/serializer.py +++ b/stream/serializer.py @@ -2,11 +2,11 @@ import json import six -''' +""" Adds the ability to send date and datetime objects to the API Datetime objects will be encoded/ decoded with microseconds The date and datetime formats from the API are automatically supported and parsed -''' +""" DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" DATE_FORMAT = "%Y-%m-%d" @@ -23,8 +23,8 @@ def _datetime_decoder(dict_): # The built-in `json` library will `unicode` strings, except for empty # strings which are of type `str`. `jsondate` patches this for # consistency so that `unicode` is always returned. - if value == '': - dict_[key] = u'' + if value == "": + dict_[key] = u"" continue if value is not None and isinstance(value, six.string_types): @@ -45,10 +45,10 @@ def _datetime_decoder(dict_): def dumps(*args, **kwargs): - kwargs['default'] = _datetime_encoder + kwargs["default"] = _datetime_encoder return json.dumps(*args, **kwargs) def loads(*args, **kwargs): - kwargs['object_hook'] = _datetime_decoder + kwargs["object_hook"] = _datetime_decoder return json.loads(*args, **kwargs) diff --git a/stream/signing.py b/stream/signing.py index a971d0b..117e5be 100644 --- a/stream/signing.py +++ b/stream/signing.py @@ -4,10 +4,11 @@ def b64_encode(s): - return base64.urlsafe_b64encode(s).strip(b'=') + return base64.urlsafe_b64encode(s).strip(b"=") + def sign(api_secret, feed_id): - ''' + """ Base64 encoded sha1 signature :param api_secret: the api secret @@ -16,12 +17,10 @@ def sign(api_secret, feed_id): **Example**:: signature = sign('secret', 'user1') - ''' - hashed_secret = hashlib.sha1((api_secret).encode('utf-8')).digest() - signed = hmac.new( - hashed_secret, msg=feed_id.encode('utf8'), digestmod=hashlib.sha1) + """ + hashed_secret = hashlib.sha1((api_secret).encode("utf-8")).digest() + signed = hmac.new(hashed_secret, msg=feed_id.encode("utf8"), digestmod=hashlib.sha1) digest = signed.digest() urlsafe_digest = b64_encode(digest) - token = urlsafe_digest.decode('ascii') + token = urlsafe_digest.decode("ascii") return token - diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 0509384..cdecac1 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1374,37 +1374,29 @@ def test_feed_enrichment_user(self): def test_feed_enrichment_own_reaction(self): f = getfeed("user", "mike") - activity_data = { - "actor": "mike", - "verb": "buy", - "object": "object", - } + activity_data = {"actor": "mike", "verb": "buy", "object": "object"} response = f.add_activity(activity_data) reaction = self.c.reactions.add("like", response["id"], "mike") reaction.pop("duration") enriched_response = f.get(reactions={"own": True}, user_id="mike") - self.assertEqual(enriched_response["results"][0]["own_reactions"]["like"][0], reaction) + self.assertEqual( + enriched_response["results"][0]["own_reactions"]["like"][0], reaction + ) def test_feed_enrichment_recent_reaction(self): f = getfeed("user", "mike") - activity_data = { - "actor": "mike", - "verb": "buy", - "object": "object", - } + activity_data = {"actor": "mike", "verb": "buy", "object": "object"} response = f.add_activity(activity_data) reaction = self.c.reactions.add("like", response["id"], "mike") reaction.pop("duration") enriched_response = f.get(reactions={"recent": True}) - self.assertEqual(enriched_response["results"][0]["latest_reactions"]["like"][0], reaction) + self.assertEqual( + enriched_response["results"][0]["latest_reactions"]["like"][0], reaction + ) def test_feed_enrichment_reaction_counts(self): f = getfeed("user", "mike") - activity_data = { - "actor": "mike", - "verb": "buy", - "object": "object", - } + activity_data = {"actor": "mike", "verb": "buy", "object": "object"} response = f.add_activity(activity_data) reaction = self.c.reactions.add("like", response["id"], "mike") reaction.pop("duration") diff --git a/stream/users.py b/stream/users.py index e4cbf82..c786208 100644 --- a/stream/users.py +++ b/stream/users.py @@ -27,10 +27,7 @@ def get(self, user_id): def update(self, user_id, data=None): payload = dict(data=data) return self.client.put( - "user/%s" % user_id, - service_name="api", - signature=self.token, - data=payload, + "user/%s" % user_id, service_name="api", signature=self.token, data=payload ) def delete(self, user_id): diff --git a/stream/utils.py b/stream/utils.py index 04ed771..71a7f74 100644 --- a/stream/utils.py +++ b/stream/utils.py @@ -1,56 +1,56 @@ import re -valid_re = re.compile('^[\w-]+$') +valid_re = re.compile("^[\w-]+$") def validate_feed_id(feed_id): - ''' + """ Validates the input is in the format of user:1 :param feed_id: a feed such as user:1 Raises ValueError if the format doesnt match - ''' + """ feed_id = str(feed_id) - if len(feed_id.split(':')) != 2: - msg = 'Invalid feed_id spec %s, please specify the feed_id as feed_slug:feed_id' + if len(feed_id.split(":")) != 2: + msg = "Invalid feed_id spec %s, please specify the feed_id as feed_slug:feed_id" raise ValueError(msg % feed_id) - - feed_slug, user_id = feed_id.split(':') + + feed_slug, user_id = feed_id.split(":") feed_slug = validate_feed_slug(feed_slug) user_id = validate_user_id(user_id) return feed_id - + def validate_feed_slug(feed_slug): - ''' + """ Validates the feed slug falls into \w - ''' + """ feed_slug = str(feed_slug) if not valid_re.match(feed_slug): - msg = 'Invalid feed slug %s, please only use letters, numbers and _' + msg = "Invalid feed slug %s, please only use letters, numbers and _" raise ValueError(msg % feed_slug) return feed_slug def validate_user_id(user_id): - ''' + """ Validates the user id falls into \w - ''' + """ user_id = str(user_id) if not valid_re.match(user_id): - msg = 'Invalid user id %s, please only use letters, numbers and _' + msg = "Invalid user id %s, please only use letters, numbers and _" raise ValueError(msg % user_id) return user_id - + def validate_foreign_id_time(foreign_id_time): if not isinstance(foreign_id_time, (list, tuple)): - raise ValueError('foreign_id_time should be a list of tuples') + raise ValueError("foreign_id_time should be a list of tuples") for v in foreign_id_time: if not isinstance(v, (list, tuple)): - raise ValueError('foreign_id_time elements should be lists or tuples') + raise ValueError("foreign_id_time elements should be lists or tuples") if len(v) != 2: - raise ValueError('foreign_id_time elements should have two elements') + raise ValueError("foreign_id_time elements should have two elements") From 1577927b4262a714916eb6df0cdadb1886f46dd8 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 30 Nov 2018 15:06:30 +0100 Subject: [PATCH 126/208] remove httpsig and other signing code superseeded by JWT --- stream/client.py | 51 +----- stream/httpsig/__init__.py | 2 - stream/httpsig/requests_auth.py | 41 ----- stream/httpsig/sign.py | 111 ------------- stream/httpsig/tests/__init__.py | 3 - stream/httpsig/tests/rsa_private.pem | 15 -- stream/httpsig/tests/rsa_public.pem | 6 - stream/httpsig/tests/test_signature.py | 84 ---------- stream/httpsig/tests/test_utils.py | 18 --- stream/httpsig/tests/test_verify.py | 205 ------------------------- stream/httpsig/utils.py | 196 ----------------------- stream/httpsig/verify.py | 104 ------------- stream/signing.py | 26 ---- stream/utils.py | 4 +- 14 files changed, 9 insertions(+), 857 deletions(-) delete mode 100644 stream/httpsig/__init__.py delete mode 100644 stream/httpsig/requests_auth.py delete mode 100644 stream/httpsig/sign.py delete mode 100644 stream/httpsig/tests/__init__.py delete mode 100644 stream/httpsig/tests/rsa_private.pem delete mode 100644 stream/httpsig/tests/rsa_public.pem delete mode 100755 stream/httpsig/tests/test_signature.py delete mode 100755 stream/httpsig/tests/test_utils.py delete mode 100755 stream/httpsig/tests/test_verify.py delete mode 100644 stream/httpsig/utils.py delete mode 100644 stream/httpsig/verify.py delete mode 100644 stream/signing.py diff --git a/stream/client.py b/stream/client.py index 13d5563..2d1943f 100644 --- a/stream/client.py +++ b/stream/client.py @@ -8,14 +8,13 @@ from stream.serializer import _datetime_encoder from stream import exceptions, serializer -from stream.signing import sign from stream.users import Users from stream.utils import validate_feed_slug, validate_user_id, validate_foreign_id_time -from stream.httpsig.requests_auth import HTTPSignatureAuth from requests import Request from stream.reactions import Reactions from stream.collections import Collections from stream.personalization import Personalization +from stream.feed import Feed try: from urllib.parse import urlparse @@ -89,7 +88,6 @@ def __init__( self.base_analytics_url = "https://analytics.stream-io-api.com/analytics/" self.session = requests.Session() - self.auth = HTTPSignatureAuth(api_key, secret=api_secret) token = self.create_jwt_token("personalization", "*", feed_id="*", user_id="*") self.personalization = Personalization(self, token) @@ -110,15 +108,9 @@ def feed(self, feed_slug, user_id): :param feed_slug: the slug of the feed :param user_id: the user id """ - from stream.feed import Feed - feed_slug = validate_feed_slug(feed_slug) user_id = validate_user_id(user_id) - - # generate the token - feed_id = "%s%s" % (feed_slug, user_id) - token = sign(self.api_secret, feed_id) - + token = self.create_jwt_token("feed", "*", feed_id="*") return Feed(self, feed_slug, user_id, token) def get_default_params(self): @@ -177,34 +169,6 @@ def _parse_response(self, response): self.raise_exception(parsed_result, status_code=response.status_code) return parsed_result - def _make_signed_request(self, method_name, relative_url, params=None, data=None): - params = params or {} - data = data or {} - serialized = None - headers = self.get_default_header() - headers["X-Api-Key"] = self.api_key - date_header = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S GMT") - headers["Date"] = date_header - default_params = self.get_default_params() - default_params.update(params) - url = self.get_full_url("api", relative_url) - serialized = serializer.dumps(data) - method = getattr(self.session, method_name) - if method_name in ["post", "put"]: - serialized = serializer.dumps(data) - response = method( - url, - auth=self.auth, - data=serialized, - headers=headers, - params=default_params, - timeout=self.timeout, - ) - logger.debug( - "stream api call %s, headers %s data %s", response.url, headers, data - ) - return self._parse_response(response) - def create_user_session_token(self, user_id, **extra_data): """Setup the payload for the given user_id with optional extra data (key, value pairs) and encode it using jwt @@ -286,8 +250,6 @@ def errors_from_fields(exception_fields): error_message = result["detail"] exception_fields = result.get("exception_fields") if exception_fields is not None: - errors = [] - if isinstance(exception_fields, list): errors = [ errors_from_fields(exception_dict) @@ -341,7 +303,8 @@ def add_to_many(self, activity, feeds): """ data = {"activity": activity, "feeds": feeds} - return self._make_signed_request("post", "feed/add_to_many/", data=data) + token = self.create_jwt_token("feed", "*", feed_id="*") + return self.post("feed/add_to_many/", token, data=data) def follow_many(self, follows, activity_copy_limit=None): """ @@ -355,9 +318,9 @@ def follow_many(self, follows, activity_copy_limit=None): if activity_copy_limit != None: params = dict(activity_copy_limit=activity_copy_limit) - - return self._make_signed_request( - "post", "follow_many/", params=params, data=follows + token = self.create_jwt_token("follower", "*", feed_id="*") + return self.post( + "follow_many/", token, params=params, data=follows ) def update_activities(self, activities): diff --git a/stream/httpsig/__init__.py b/stream/httpsig/__init__.py deleted file mode 100644 index 18aa3f9..0000000 --- a/stream/httpsig/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .sign import Signer, HeaderSigner -from .verify import Verifier, HeaderVerifier diff --git a/stream/httpsig/requests_auth.py b/stream/httpsig/requests_auth.py deleted file mode 100644 index 247cafa..0000000 --- a/stream/httpsig/requests_auth.py +++ /dev/null @@ -1,41 +0,0 @@ -from requests.auth import AuthBase - -try: - # Python 3 - from urllib.parse import urlparse -except ImportError: - # Python 2 - from urlparse import urlparse - -from .sign import HeaderSigner - - -class HTTPSignatureAuth(AuthBase): - """ - Sign a request using the http-signature scheme. - https://github.com/joyent/node-http-signature/blob/master/http_signing.md - - key_id is the mandatory label indicating to the server which secret to use - secret is the filename of a pem file in the case of rsa, a password string in the case of an hmac algorithm - algorithm is one of the six specified algorithms - headers is a list of http headers to be included in the signing string, defaulting to "Date" alone. - """ - - def __init__(self, key_id="", secret="", algorithm=None, headers=None): - headers = headers or [] - self.header_signer = HeaderSigner( - key_id=key_id, secret=secret, algorithm=algorithm, headers=headers - ) - self.uses_host = "host" in [h.lower() for h in headers] - - def __call__(self, r): - headers = self.header_signer.sign( - r.headers, - # 'Host' header unavailable in request object at this point - # if 'host' header is needed, extract it from the url - host=urlparse(r.url).netloc if self.uses_host else None, - method=r.method, - path=r.path_url, - ) - r.headers.update(headers) - return r diff --git a/stream/httpsig/sign.py b/stream/httpsig/sign.py deleted file mode 100644 index 18a4abe..0000000 --- a/stream/httpsig/sign.py +++ /dev/null @@ -1,111 +0,0 @@ -import base64 -import six - -from Cryptodome.Hash import HMAC -from Cryptodome.PublicKey import RSA -from Cryptodome.Signature import PKCS1_v1_5 - -from .utils import * - - -DEFAULT_SIGN_ALGORITHM = "hmac-sha256" - - -class Signer(object): - """ - When using an RSA algo, the secret is a PEM-encoded private key. - When using an HMAC algo, the secret is the HMAC signing secret. - - Password-protected keyfiles are not supported. - """ - - def __init__(self, secret, algorithm=None): - if algorithm is None: - algorithm = DEFAULT_SIGN_ALGORITHM - - assert algorithm in ALGORITHMS, "Unknown algorithm" - if isinstance(secret, six.string_types): - secret = secret.encode("ascii") - - self._rsa = None - self._hash = None - self.sign_algorithm, self.hash_algorithm = algorithm.split("-") - - if self.sign_algorithm == "rsa": - try: - rsa_key = RSA.importKey(secret) - self._rsa = PKCS1_v1_5.new(rsa_key) - self._hash = HASHES[self.hash_algorithm] - except ValueError: - raise HttpSigException("Invalid key.") - - elif self.sign_algorithm == "hmac": - self._hash = HMAC.new(secret, digestmod=HASHES[self.hash_algorithm]) - - @property - def algorithm(self): - return "%s-%s" % (self.sign_algorithm, self.hash_algorithm) - - def _sign_rsa(self, data): - if isinstance(data, six.string_types): - data = data.encode("ascii") - h = self._hash.new() - h.update(data) - return self._rsa.sign(h) - - def _sign_hmac(self, data): - if isinstance(data, six.string_types): - data = data.encode("ascii") - hmac = self._hash.copy() - hmac.update(data) - return hmac.digest() - - def _sign(self, data): - if isinstance(data, six.string_types): - data = data.encode("ascii") - signed = None - if self._rsa: - signed = self._sign_rsa(data) - elif self._hash: - signed = self._sign_hmac(data) - if not signed: - raise SystemError("No valid encryptor found.") - return base64.b64encode(signed).decode("ascii") - - -class HeaderSigner(Signer): - """ - Generic object that will sign headers as a dictionary using the http-signature scheme. - https://github.com/joyent/node-http-signature/blob/master/http_signing.md - - :arg key_id: the mandatory label indicating to the server which secret to use - :arg secret: a PEM-encoded RSA private key or an HMAC secret (must match the algorithm) - :arg algorithm: one of the six specified algorithms - :arg headers: a list of http headers to be included in the signing string, defaulting to ['date']. - """ - - def __init__(self, key_id, secret, algorithm=None, headers=None): - if algorithm is None: - algorithm = DEFAULT_SIGN_ALGORITHM - - super(HeaderSigner, self).__init__(secret=secret, algorithm=algorithm) - self.headers = headers or ["date"] - self.signature_template = build_signature_template(key_id, algorithm, headers) - - def sign(self, headers, host=None, method=None, path=None): - """ - Add Signature Authorization header to case-insensitive header dict. - - headers is a case-insensitive dict of mutable headers. - host is a override for the 'host' header (defaults to value in headers). - method is the HTTP method (required when using '(request-target)'). - path is the HTTP path (required when using '(request-target)'). - """ - headers = CaseInsensitiveDict(headers) - required_headers = self.headers or ["date"] - signable = generate_message(required_headers, headers, host, method, path) - - signature = self._sign(signable) - headers["authorization"] = self.signature_template % signature - - return headers diff --git a/stream/httpsig/tests/__init__.py b/stream/httpsig/tests/__init__.py deleted file mode 100644 index d9018eb..0000000 --- a/stream/httpsig/tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .test_signature import * -from .test_utils import * -from .test_verify import * diff --git a/stream/httpsig/tests/rsa_private.pem b/stream/httpsig/tests/rsa_private.pem deleted file mode 100644 index 425518a..0000000 --- a/stream/httpsig/tests/rsa_private.pem +++ /dev/null @@ -1,15 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIICXgIBAAKBgQDCFENGw33yGihy92pDjZQhl0C36rPJj+CvfSC8+q28hxA161QF -NUd13wuCTUcq0Qd2qsBe/2hFyc2DCJJg0h1L78+6Z4UMR7EOcpfdUE9Hf3m/hs+F -UR45uBJeDK1HSFHD8bHKD6kv8FPGfJTotc+2xjJwoYi+1hqp1fIekaxsyQIDAQAB -AoGBAJR8ZkCUvx5kzv+utdl7T5MnordT1TvoXXJGXK7ZZ+UuvMNUCdN2QPc4sBiA -QWvLw1cSKt5DsKZ8UETpYPy8pPYnnDEz2dDYiaew9+xEpubyeW2oH4Zx71wqBtOK -kqwrXa/pzdpiucRRjk6vE6YY7EBBs/g7uanVpGibOVAEsqH1AkEA7DkjVH28WDUg -f1nqvfn2Kj6CT7nIcE3jGJsZZ7zlZmBmHFDONMLUrXR/Zm3pR5m0tCmBqa5RK95u -412jt1dPIwJBANJT3v8pnkth48bQo/fKel6uEYyboRtA5/uHuHkZ6FQF7OUkGogc -mSJluOdc5t6hI1VsLn0QZEjQZMEOWr+wKSMCQQCC4kXJEsHAve77oP6HtG/IiEn7 -kpyUXRNvFsDE0czpJJBvL/aRFUJxuRK91jhjC68sA7NsKMGg5OXb5I5Jj36xAkEA -gIT7aFOYBFwGgQAQkWNKLvySgKbAZRTeLBacpHMuQdl1DfdntvAyqpAZ0lY0RKmW -G6aFKaqQfOXKCyWoUiVknQJAXrlgySFci/2ueKlIE1QqIiLSZ8V8OlpFLRnb1pzI -7U1yQXnTAEFYM560yJlzUpOb1V4cScGd365tiSMvxLOvTA== ------END RSA PRIVATE KEY----- diff --git a/stream/httpsig/tests/rsa_public.pem b/stream/httpsig/tests/rsa_public.pem deleted file mode 100644 index b3bbf6c..0000000 --- a/stream/httpsig/tests/rsa_public.pem +++ /dev/null @@ -1,6 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDCFENGw33yGihy92pDjZQhl0C3 -6rPJj+CvfSC8+q28hxA161QFNUd13wuCTUcq0Qd2qsBe/2hFyc2DCJJg0h1L78+6 -Z4UMR7EOcpfdUE9Hf3m/hs+FUR45uBJeDK1HSFHD8bHKD6kv8FPGfJTotc+2xjJw -oYi+1hqp1fIekaxsyQIDAQAB ------END PUBLIC KEY----- diff --git a/stream/httpsig/tests/test_signature.py b/stream/httpsig/tests/test_signature.py deleted file mode 100755 index 2e33f6e..0000000 --- a/stream/httpsig/tests/test_signature.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python -import sys -import os - -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -import json -import unittest - -import stream.httpsig.sign as sign -from stream.httpsig.utils import parse_authorization_header - - -class TestSign(unittest.TestCase): - DEFAULT_SIGN_ALGORITHM = sign.DEFAULT_SIGN_ALGORITHM - - def setUp(self): - sign.DEFAULT_SIGN_ALGORITHM = "rsa-sha256" - self.key_path = os.path.join(os.path.dirname(__file__), "rsa_private.pem") - with open(self.key_path, "rb") as f: - self.key = f.read() - - def tearDown(self): - sign.DEFAULT_SIGN_ALGORITHM = self.DEFAULT_SIGN_ALGORITHM - - def test_default(self): - hs = sign.HeaderSigner(key_id="Test", secret=self.key) - unsigned = {"Date": "Thu, 05 Jan 2012 21:31:40 GMT"} - signed = hs.sign(unsigned) - self.assertTrue("Date" in signed) - self.assertEqual(unsigned["Date"], signed["Date"]) - self.assertTrue("Authorization" in signed) - auth = parse_authorization_header(signed["authorization"]) - params = auth[1] - self.assertTrue("keyId" in params) - self.assertTrue("algorithm" in params) - self.assertTrue("signature" in params) - self.assertEqual(params["keyId"], "Test") - self.assertEqual(params["algorithm"], "rsa-sha256") - self.assertEqual( - params["signature"], - "ATp0r26dbMIxOopqw0OfABDT7CKMIoENumuruOtarj8n/97Q3htHFYpH8yOSQk3Z5zh8UxUym6FYTb5+A0Nz3NRsXJibnYi7brE/4tx5But9kkFGzG+xpUmimN4c3TMN7OFH//+r8hBf7BT9/GmHDUVZT2JzWGLZES2xDOUuMtA=", - ) - - def test_all(self): - hs = sign.HeaderSigner( - key_id="Test", - secret=self.key, - headers=[ - "(request-target)", - "host", - "date", - "content-type", - "content-md5", - "content-length", - ], - ) - unsigned = { - "Host": "example.com", - "Date": "Thu, 05 Jan 2012 21:31:40 GMT", - "Content-Type": "application/json", - "Content-MD5": "Sd/dVLAcvNLSq16eXua5uQ==", - "Content-Length": "18", - } - signed = hs.sign(unsigned, method="POST", path="/foo?param=value&pet=dog") - - self.assertTrue("Date" in signed) - self.assertEqual(unsigned["Date"], signed["Date"]) - self.assertTrue("Authorization" in signed) - auth = parse_authorization_header(signed["authorization"]) - params = auth[1] - self.assertTrue("keyId" in params) - self.assertTrue("algorithm" in params) - self.assertTrue("signature" in params) - self.assertEqual(params["keyId"], "Test") - self.assertEqual(params["algorithm"], "rsa-sha256") - self.assertEqual( - params["headers"], - "(request-target) host date content-type content-md5 content-length", - ) - self.assertEqual( - params["signature"], - "G8/Uh6BBDaqldRi3VfFfklHSFoq8CMt5NUZiepq0q66e+fS3Up3BmXn0NbUnr3L1WgAAZGplifRAJqp2LgeZ5gXNk6UX9zV3hw5BERLWscWXlwX/dvHQES27lGRCvyFv3djHP6Plfd5mhPWRkmjnvqeOOSS0lZJYFYHJz994s6w=", - ) diff --git a/stream/httpsig/tests/test_utils.py b/stream/httpsig/tests/test_utils.py deleted file mode 100755 index 10d4d02..0000000 --- a/stream/httpsig/tests/test_utils.py +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python -import os -import re -import sys - -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -import unittest - -from stream.httpsig.utils import get_fingerprint - - -class TestUtils(unittest.TestCase): - def test_get_fingerprint(self): - with open(os.path.join(os.path.dirname(__file__), "rsa_public.pem"), "r") as k: - key = k.read() - fingerprint = get_fingerprint(key) - self.assertEqual(fingerprint, "73:61:a2:21:67:e0:df:be:7e:4b:93:1e:15:98:a5:b7") diff --git a/stream/httpsig/tests/test_verify.py b/stream/httpsig/tests/test_verify.py deleted file mode 100755 index 2b9c0b9..0000000 --- a/stream/httpsig/tests/test_verify.py +++ /dev/null @@ -1,205 +0,0 @@ -#!/usr/bin/env python -import sys -import os - -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -import json -import unittest - -from stream.httpsig.sign import HeaderSigner, Signer -from stream.httpsig.verify import HeaderVerifier, Verifier - - -class BaseTestCase(unittest.TestCase): - def _parse_auth(self, auth): - """Basic Authorization header parsing.""" - # split 'Signature kvpairs' - s, param_str = auth.split(" ", 1) - self.assertEqual(s, "Signature") - # split k1="v1",k2="v2",... - param_list = param_str.split(",") - # convert into [(k1,"v1"), (k2, "v2"), ...] - param_pairs = [p.split("=", 1) for p in param_list] - # convert into {k1:v1, k2:v2, ...} - param_dict = {} - for k, v in param_pairs: - param_dict[k] = v.strip('"') - return param_dict - - -class TestVerifyHMACSHA1(BaseTestCase): - def setUp(self): - secret = b"something special goes here" - - self.keyId = "Test" - self.algorithm = "hmac-sha1" - self.sign_secret = secret - self.verify_secret = secret - - def test_basic_sign(self): - signer = Signer(secret=self.sign_secret, algorithm=self.algorithm) - verifier = Verifier(secret=self.verify_secret, algorithm=self.algorithm) - - GOOD = b"this is a test" - BAD = b"this is not the signature you were looking for..." - - # generate signed string - signature = signer._sign(GOOD) - self.assertTrue(verifier._verify(data=GOOD, signature=signature)) - self.assertFalse(verifier._verify(data=BAD, signature=signature)) - - def test_default(self): - unsigned = {"Date": "Thu, 05 Jan 2012 21:31:40 GMT"} - - hs = HeaderSigner( - key_id="Test", secret=self.sign_secret, algorithm=self.algorithm - ) - signed = hs.sign(unsigned) - hv = HeaderVerifier(headers=signed, secret=self.verify_secret) - self.assertTrue(hv.verify()) - - def test_signed_headers(self): - HOST = "example.com" - METHOD = "POST" - PATH = "/foo?param=value&pet=dog" - hs = HeaderSigner( - key_id="Test", - secret=self.sign_secret, - algorithm=self.algorithm, - headers=[ - "(request-target)", - "host", - "date", - "content-type", - "content-md5", - "content-length", - ], - ) - unsigned = { - "Host": HOST, - "Date": "Thu, 05 Jan 2012 21:31:40 GMT", - "Content-Type": "application/json", - "Content-MD5": "Sd/dVLAcvNLSq16eXua5uQ==", - "Content-Length": "18", - } - signed = hs.sign(unsigned, method=METHOD, path=PATH) - - hv = HeaderVerifier( - headers=signed, - secret=self.verify_secret, - host=HOST, - method=METHOD, - path=PATH, - ) - self.assertTrue(hv.verify()) - - def test_incorrect_headers(self): - HOST = "example.com" - METHOD = "POST" - PATH = "/foo?param=value&pet=dog" - hs = HeaderSigner( - secret=self.sign_secret, - key_id="Test", - algorithm=self.algorithm, - headers=[ - "(request-target)", - "host", - "date", - "content-type", - "content-md5", - "content-length", - ], - ) - unsigned = { - "Host": HOST, - "Date": "Thu, 05 Jan 2012 21:31:40 GMT", - "Content-Type": "application/json", - "Content-MD5": "Sd/dVLAcvNLSq16eXua5uQ==", - "Content-Length": "18", - } - signed = hs.sign(unsigned, method=METHOD, path=PATH) - - hv = HeaderVerifier( - headers=signed, - secret=self.verify_secret, - required_headers=["some-other-header"], - host=HOST, - method=METHOD, - path=PATH, - ) - self.assertRaises(Exception, hv.verify) - - def test_extra_auth_headers(self): - HOST = "example.com" - METHOD = "POST" - PATH = "/foo?param=value&pet=dog" - hs = HeaderSigner( - key_id="Test", - secret=self.sign_secret, - algorithm=self.algorithm, - headers=[ - "(request-target)", - "host", - "date", - "content-type", - "content-md5", - "content-length", - ], - ) - unsigned = { - "Host": HOST, - "Date": "Thu, 05 Jan 2012 21:31:40 GMT", - "Content-Type": "application/json", - "Content-MD5": "Sd/dVLAcvNLSq16eXua5uQ==", - "Content-Length": "18", - } - signed = hs.sign(unsigned, method=METHOD, path=PATH) - hv = HeaderVerifier( - headers=signed, - secret=self.verify_secret, - method=METHOD, - path=PATH, - required_headers=["date", "(request-target)"], - ) - self.assertTrue(hv.verify()) - - -class TestVerifyHMACSHA256(TestVerifyHMACSHA1): - def setUp(self): - super(TestVerifyHMACSHA256, self).setUp() - self.algorithm = "hmac-sha256" - - -class TestVerifyHMACSHA512(TestVerifyHMACSHA1): - def setUp(self): - super(TestVerifyHMACSHA512, self).setUp() - self.algorithm = "hmac-sha512" - - -class TestVerifyRSASHA1(TestVerifyHMACSHA1): - def setUp(self): - private_key_path = os.path.join(os.path.dirname(__file__), "rsa_private.pem") - with open(private_key_path, "rb") as f: - private_key = f.read() - - public_key_path = os.path.join(os.path.dirname(__file__), "rsa_public.pem") - with open(public_key_path, "rb") as f: - public_key = f.read() - - self.keyId = "Test" - self.algorithm = "rsa-sha1" - self.sign_secret = private_key - self.verify_secret = public_key - - -class TestVerifyRSASHA256(TestVerifyRSASHA1): - def setUp(self): - super(TestVerifyRSASHA256, self).setUp() - self.algorithm = "rsa-sha256" - - -class TestVerifyRSASHA512(TestVerifyRSASHA1): - def setUp(self): - super(TestVerifyRSASHA512, self).setUp() - self.algorithm = "rsa-sha512" diff --git a/stream/httpsig/utils.py b/stream/httpsig/utils.py deleted file mode 100644 index dc81ce5..0000000 --- a/stream/httpsig/utils.py +++ /dev/null @@ -1,196 +0,0 @@ -import re -import struct -import hashlib -import base64 -import six - -try: - # Python 3 - from urllib.request import parse_http_list -except ImportError: - # Python 2 - from urllib2 import parse_http_list - -from Cryptodome.PublicKey import RSA -from Cryptodome.Hash import SHA, SHA256, SHA512 - -ALGORITHMS = frozenset( - ["rsa-sha1", "rsa-sha256", "rsa-sha512", "hmac-sha1", "hmac-sha256", "hmac-sha512"] -) -HASHES = {"sha1": SHA, "sha256": SHA256, "sha512": SHA512} - - -class HttpSigException(Exception): - pass - - -""" -Constant-time string compare. -http://codahale.com/a-lesson-in-timing-attacks/ -""" - - -def ct_bytes_compare(a, b): - if not isinstance(a, six.binary_type): - a = a.decode("utf8") - if not isinstance(b, six.binary_type): - b = b.decode("utf8") - - if len(a) != len(b): - return False - - result = 0 - for x, y in zip(a, b): - if six.PY2: - result |= ord(x) ^ ord(y) - else: - result |= x ^ y - - return result == 0 - - -def generate_message(required_headers, headers, host=None, method=None, path=None): - headers = CaseInsensitiveDict(headers) - - if not required_headers: - required_headers = ["date"] - - signable_list = [] - for h in required_headers: - h = h.lower() - if h == "(request-target)": - if not method or not path: - raise Exception( - 'method and path arguments required when using "(request-target)"' - ) - signable_list.append("%s: %s %s" % (h, method.lower(), path)) - - elif h == "host": - # 'host' special case due to requests lib restrictions - # 'host' is not available when adding auth so must use a param - # if no param used, defaults back to the 'host' header - if not host: - if "host" in headers: - host = headers[h] - else: - raise Exception('missing required header "%s"' % (h)) - signable_list.append("%s: %s" % (h, host)) - else: - if h not in headers: - raise Exception('missing required header "%s"' % (h)) - - signable_list.append("%s: %s" % (h, headers[h])) - - signable = "\n".join(signable_list).encode("ascii") - return signable - - -def parse_authorization_header(header): - if not isinstance(header, six.string_types): - header = header.decode("ascii") # HTTP headers cannot be Unicode. - - auth = header.split(" ", 1) - if len(auth) > 2: - raise ValueError( - 'Invalid authorization header. (eg. Method key1=value1,key2="value, "2"")' - ) - - # Split up any args into a dictionary. - values = {} - if len(auth) == 2: - auth_value = auth[1] - if auth_value and len(auth_value): - # This is tricky string magic. Let urllib do it. - fields = parse_http_list(auth_value) - - for item in fields: - # Only include keypairs. - if "=" in item: - # Split on the first '=' only. - key, value = item.split("=", 1) - if not (len(key) and len(value)): - continue - - # Unquote values, if quoted. - if value[0] == '"': - value = value[1:-1] - - values[key] = value - - # ("Signature", {"headers": "date", "algorithm": "hmac-sha256", ... }) - return (auth[0], CaseInsensitiveDict(values)) - - -def build_signature_template(key_id, algorithm, headers): - """ - Build the Signature template for use with the Authorization header. - - key_id is the mandatory label indicating to the server which secret to use - algorithm is one of the six specified algorithms - headers is a list of http headers to be included in the signing string. - - The signature must be interpolated into the template to get the final Authorization header value. - """ - param_map = {"keyId": key_id, "algorithm": algorithm, "signature": "%s"} - if headers: - headers = [h.lower() for h in headers] - param_map["headers"] = " ".join(headers) - kv = map('{0[0]}="{0[1]}"'.format, param_map.items()) - kv_string = ",".join(kv) - sig_string = "Signature {0}".format(kv_string) - return sig_string - - -def lkv(d): - parts = [] - while d: - len = struct.unpack(">I", d[:4])[0] - bits = d[4 : len + 4] - parts.append(bits) - d = d[len + 4 :] - return parts - - -def sig(d): - return lkv(d)[1] - - -def is_rsa(keyobj): - return lkv(keyobj.blob)[0] == "ssh-rsa" - - -# based on http://stackoverflow.com/a/2082169/151401 -class CaseInsensitiveDict(dict): - def __init__(self, d=None, **kwargs): - super(CaseInsensitiveDict, self).__init__(**kwargs) - if d: - self.update((k.lower(), v) for k, v in six.iteritems(d)) - - def __setitem__(self, key, value): - super(CaseInsensitiveDict, self).__setitem__(key.lower(), value) - - def __getitem__(self, key): - return super(CaseInsensitiveDict, self).__getitem__(key.lower()) - - def __contains__(self, key): - return super(CaseInsensitiveDict, self).__contains__(key.lower()) - - -# currently busted... -def get_fingerprint(key): - """ - Takes an ssh public key and generates the fingerprint. - - See: http://tools.ietf.org/html/rfc4716 for more info - """ - if key.startswith("ssh-rsa"): - key = key.split(" ")[1] - else: - regex = r"\-{4,5}[\w|| ]+\-{4,5}" - key = re.split(regex, key)[1] - - key = key.replace("\n", "") - key = key.strip().encode("ascii") - key = base64.b64decode(key) - fp_plain = hashlib.md5(key).hexdigest() - return ":".join(a + b for a, b in zip(fp_plain[::2], fp_plain[1::2])) diff --git a/stream/httpsig/verify.py b/stream/httpsig/verify.py deleted file mode 100644 index 27b325f..0000000 --- a/stream/httpsig/verify.py +++ /dev/null @@ -1,104 +0,0 @@ -""" -Module to assist in verifying a signed header. -""" -import six - -from Cryptodome.Hash import HMAC -from Cryptodome.PublicKey import RSA -from Cryptodome.Signature import PKCS1_v1_5 -from base64 import b64decode - -from .sign import Signer -from .utils import * - - -class Verifier(Signer): - """ - Verifies signed text against a secret. - For HMAC, the secret is the shared secret. - For RSA, the secret is the PUBLIC key. - """ - - def _verify(self, data, signature): - """ - Verifies the data matches a signed version with the given signature. - `data` is the message to verify - `signature` is a base64-encoded signature to verify against `data` - """ - - if isinstance(data, six.string_types): - data = data.encode("ascii") - if isinstance(signature, six.string_types): - signature = signature.encode("ascii") - - if self.sign_algorithm == "rsa": - h = self._hash.new() - h.update(data) - return self._rsa.verify(h, b64decode(signature)) - - elif self.sign_algorithm == "hmac": - h = self._sign_hmac(data) - s = b64decode(signature) - return ct_bytes_compare(h, s) - - else: - raise HttpSigException("Unsupported algorithm.") - - -class HeaderVerifier(Verifier): - """ - Verifies an HTTP signature from given headers. - """ - - def __init__( - self, headers, secret, required_headers=None, method=None, path=None, host=None - ): - """ - Instantiate a HeaderVerifier object. - - :param headers: A dictionary of headers from the HTTP request. - :param secret: The HMAC secret or RSA *public* key. - :param required_headers: Optional. A list of headers required to be present to validate, even if the signature is otherwise valid. Defaults to ['date']. - :param method: Optional. The HTTP method used in the request (eg. "GET"). Required for the '(request-target)' header. - :param path: Optional. The HTTP path requested, exactly as sent (including query arguments and fragments). Required for the '(request-target)' header. - :param host: Optional. The value to use for the Host header, if not supplied in :param:headers. - """ - required_headers = required_headers or ["date"] - - auth = parse_authorization_header(headers["authorization"]) - if len(auth) == 2: - self.auth_dict = auth[1] - else: - raise HttpSigException("Invalid authorization header.") - - self.headers = CaseInsensitiveDict(headers) - self.required_headers = [s.lower() for s in required_headers] - self.method = method - self.path = path - self.host = host - - super(HeaderVerifier, self).__init__( - secret, algorithm=self.auth_dict["algorithm"] - ) - - def verify(self): - """ - Verify the headers based on the arguments passed at creation and current properties. - - Raises an Exception if a required header (:param:required_headers) is not found in the signature. - Returns True or False. - """ - auth_headers = self.auth_dict.get("headers", "date").split(" ") - - if len(set(self.required_headers) - set(auth_headers)) > 0: - raise Exception( - "{} is a required header(s)".format( - ", ".join(set(self.required_headers) - set(auth_headers)) - ) - ) - - signing_str = generate_message( - auth_headers, self.headers, self.host, self.method, self.path - ) - - return self._verify(signing_str, self.auth_dict["signature"]) diff --git a/stream/signing.py b/stream/signing.py deleted file mode 100644 index 117e5be..0000000 --- a/stream/signing.py +++ /dev/null @@ -1,26 +0,0 @@ -import hmac -import hashlib -import base64 - - -def b64_encode(s): - return base64.urlsafe_b64encode(s).strip(b"=") - - -def sign(api_secret, feed_id): - """ - Base64 encoded sha1 signature - - :param api_secret: the api secret - :param feed_id: the feed_id to sign (commonly a feed id such as user1) - - **Example**:: - signature = sign('secret', 'user1') - - """ - hashed_secret = hashlib.sha1((api_secret).encode("utf-8")).digest() - signed = hmac.new(hashed_secret, msg=feed_id.encode("utf8"), digestmod=hashlib.sha1) - digest = signed.digest() - urlsafe_digest = b64_encode(digest) - token = urlsafe_digest.decode("ascii") - return token diff --git a/stream/utils.py b/stream/utils.py index 71a7f74..626a5e8 100644 --- a/stream/utils.py +++ b/stream/utils.py @@ -17,8 +17,8 @@ def validate_feed_id(feed_id): raise ValueError(msg % feed_id) feed_slug, user_id = feed_id.split(":") - feed_slug = validate_feed_slug(feed_slug) - user_id = validate_user_id(user_id) + validate_feed_slug(feed_slug) + validate_user_id(user_id) return feed_id From e890bd93b861b52ddc1a8058a093b2c13b31f93a Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 30 Nov 2018 15:33:02 +0100 Subject: [PATCH 127/208] add dotgit hooks --- .flake8 | 5 ++ dotgit/hooks-wrapper | 38 ++++++++++++++ dotgit/hooks/pre-commit-format.sh | 13 +++++ dotgit/setup-hooks.sh | 18 +++++++ fabfile.py | 85 ------------------------------- pyproject.toml | 25 +++++++++ setup.py | 77 ++++++++++++---------------- stream/__init__.py | 2 +- stream/client.py | 7 +-- stream/feed.py | 2 +- stream/serializer.py | 2 +- stream/tests/__init__.py | 1 - stream/tests/test_client.py | 30 +++-------- stream/utils.py | 6 +-- 14 files changed, 147 insertions(+), 164 deletions(-) create mode 100644 .flake8 create mode 100755 dotgit/hooks-wrapper create mode 100755 dotgit/hooks/pre-commit-format.sh create mode 100755 dotgit/setup-hooks.sh delete mode 100644 fabfile.py create mode 100644 pyproject.toml diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..4af7768 --- /dev/null +++ b/.flake8 @@ -0,0 +1,5 @@ +[flake8] +ignore = E501,W503,E203,E731 +max-line-length = 110 +select = C,E,F,W,B,B950 +exclude = .eggs/*,docs/*,lib,src,bin,include,share diff --git a/dotgit/hooks-wrapper b/dotgit/hooks-wrapper new file mode 100755 index 0000000..bc76559 --- /dev/null +++ b/dotgit/hooks-wrapper @@ -0,0 +1,38 @@ +#!/usr/bin/env bash + +# Runs all executable pre-commit-* hooks and exits after, +# if any of them was not successful. +# +# Based on +# https://github.com/ELLIOTTCABLE/Paws.js/blob/Master/Scripts/git-hooks/chain-hooks.sh +# http://osdir.com/ml/git/2009-01/msg00308.html +# +# assumes your scripts are located at /bin/git/hooks +exitcodes=() +hookname=`basename $0` +# our special hooks folder +CUSTOM_HOOKS_DIR=$(git rev-parse --show-toplevel)/dotgit/hooks +# find gits native hooks folder +NATIVE_HOOKS_DIR=$(git rev-parse --show-toplevel)/.git/hooks + +# Run each hook, passing through STDIN and storing the exit code. +# We don't want to bail at the first failure, as the user might +# then bypass the hooks without knowing about additional issues. + +for hook in $CUSTOM_HOOKS_DIR/$(basename $0)-*; do + test -x "$hook" || continue + $hook "$@" + exitcodes+=($?) +done + +# check if there was a local hook that was moved previously +if [ -f "$NATIVE_HOOKS_DIR/$hookname.local" ]; then + out=`$NATIVE_HOOKS_DIR/$hookname.local "$@"` + exitcodes+=($?) + echo "$out" +fi + +# If any exit code isn't 0, bail. +for i in "${exitcodes[@]}"; do + [ "$i" == 0 ] || exit $i +done \ No newline at end of file diff --git a/dotgit/hooks/pre-commit-format.sh b/dotgit/hooks/pre-commit-format.sh new file mode 100755 index 0000000..63259bf --- /dev/null +++ b/dotgit/hooks/pre-commit-format.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +set -e + +if ! black . --check -q; then + black . + echo "some files were not formatted correctly (black) commit aborted!" + echo "your changes are still staged, you can accept formatting changes with git add or ignore them by adding --no-verify to git commit" + exit 1 +fi + +flake8 + diff --git a/dotgit/setup-hooks.sh b/dotgit/setup-hooks.sh new file mode 100755 index 0000000..2bd3843 --- /dev/null +++ b/dotgit/setup-hooks.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +# based on http://stackoverflow.com/a/3464399/1383268 +# assumes that the hooks-wrapper script is located at /bin/git/hooks-wrapper + +HOOK_NAMES="applypatch-msg pre-applypatch post-applypatch pre-commit prepare-commit-msg commit-msg post-commit pre-rebase post-checkout post-merge pre-receive update post-receive post-update pre-auto-gc pre-push" +# find gits native hooks folder +HOOKS_DIR=$(git rev-parse --show-toplevel)/.git/hooks + +for hook in $HOOK_NAMES; do + # If the hook already exists, is a file, and is not a symlink + if [ ! -h $HOOKS_DIR/$hook ] && [ -f $HOOKS_DIR/$hook ]; then + mv $HOOKS_DIR/$hook $HOOKS_DIR/$hook.local + fi + # create the symlink, overwriting the file if it exists + # probably the only way this would happen is if you're using an old version of git + # -- back when the sample hooks were not executable, instead of being named ____.sample + ln -s -f ../../dotgit/hooks-wrapper $HOOKS_DIR/$hook +done \ No newline at end of file diff --git a/fabfile.py b/fabfile.py deleted file mode 100644 index ea9ea54..0000000 --- a/fabfile.py +++ /dev/null @@ -1,85 +0,0 @@ -from fabric.api import local, cd -import os -import time -import datetime -from fabric.operations import sudo -from fabric.state import env -from fabric.context_managers import settings - - -PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) -manage_py = os.path.join(PROJECT_ROOT, 'manage.py') - -env.hosts = ['getstream.io'] - -env.user = 'stream' -env.forward_agent = True - - -def publish(test='no'): - ''' - The whole merging stuff etc - ''' - if test == 'yes': - validate() - tag = get_new_tag() - time.sleep(1) - merge_master() - local('git tag %s' % tag) - local('git push origin production %s' % tag) - time.sleep(1) - local('git checkout master') - - -def merge_master(): - # update our local data - local('git fetch --all') - time.sleep(1) - # update with the changes - local('git checkout production') - time.sleep(1) - # merge the remote branch - local('git merge origin/production') - time.sleep(1) - # now merge master - local('git merge origin/master') - - -def get_new_tag(): - tag_command = local('git tag', capture=True) - - tags = [l.strip().replace('_', '-') - for l in tag_command.split('\n') if l.startswith('20')] - tags_dict = dict.fromkeys(tags) - tag_format = unicode(datetime.date.today()) + '-v%s' - version = 1 - - while tag_format % version in tags_dict: - version += 1 - - new_tag = tag_format % version - - return new_tag - - -def validate(): - with cd(PROJECT_ROOT): - local( - 'pep8 --exclude=migrations --ignore=E501,E225 .') - local('%s test' % manage_py) - - -def clean(): - # all dirs which contain python code - python_dirs = [] - for root, dirs, files in os.walk(PROJECT_ROOT): - python_dir = any(f.endswith('.py') for f in files) - if python_dir: - python_dirs.append(root) - for d in python_dirs: - print d - local('bash -c "autopep8 -i %s/*.py"' % d) - - -def docs(): - local('sphinx-build -Eav docs html') diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..edf85a6 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,25 @@ +[tool.black] +line-length = 88 +py36 = true +include = '\.pyi?$' +exclude = ''' +/( + \.git + | \.hg + | \.egg + | \.eggs + | \.mypy_cache + | \.tox + | _build + | \.venv + | src + | bin + | stream_python\.egg-info + | fabfile.py + | lib + | docs + | buck-out + | build + | dist +)/ +''' diff --git a/setup.py b/setup.py index b1ca294..661f9e9 100644 --- a/setup.py +++ b/setup.py @@ -6,38 +6,27 @@ from stream import __version__, __maintainer__, __email__, __license__ import sys -unit = 'unittest2py3k' if sys.version_info > (3, 0, 0) else 'unittest2' -tests_require = [ - unit, - 'pytest==3.2.5', - 'unittest2', - 'pytest-cov', - 'python-dateutil' -] +unit = "unittest2py3k" if sys.version_info > (3, 0, 0) else "unittest2" +tests_require = [unit, "pytest==3.2.5", "unittest2", "pytest-cov", "python-dateutil"] -long_description = open('README.md', 'r').read() +long_description = open("README.md", "r").read() -requests = 'requests>=2.3.0,<3' +requests = "requests>=2.3.0,<3" if sys.version_info < (2, 7, 9): - requests = 'requests[security]>=2.4.1,<3' + requests = "requests[security]>=2.4.1,<3" -install_requires = [ - 'pycryptodomex==3.4.7', - requests, - 'six>=1.8.0' -] +install_requires = ["pycryptodomex==3.4.7", requests, "six>=1.8.0"] if sys.version_info < (2, 7, 0): - install_requires.append('pyOpenSSL<18.0.0') - install_requires.append('pyjwt>=1.3.0,<1.6.0') - install_requires.append('pycparser<2.19') + install_requires.append("pyOpenSSL<18.0.0") + install_requires.append("pyjwt>=1.3.0,<1.6.0") + install_requires.append("pycparser<2.19") else: - install_requires.append('pyjwt>=1.3.0,<1.7.0') + install_requires.append("pyjwt>=1.3.0,<1.7.0") class PyTest(TestCommand): - def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] @@ -46,43 +35,43 @@ def finalize_options(self): def run_tests(self): # import here, cause outside the eggs aren't loaded import pytest - errno = pytest.main( - '-v --cov=./') + + errno = pytest.main("-v --cov=./") sys.exit(errno) setup( - name='stream-python', + name="stream-python", version=__version__, author=__maintainer__, author_email=__email__, - url='http://github.com/GetStream/stream-python', - description='Client for getstream.io. Build scalable newsfeeds & activity streams in a few hours instead of weeks.', + url="http://github.com/GetStream/stream-python", + description="Client for getstream.io. Build scalable newsfeeds & activity streams in a few hours instead of weeks.", long_description=long_description, - long_description_content_type='text/markdown', + long_description_content_type="text/markdown", license=__license__, packages=find_packages(), zip_safe=False, install_requires=install_requires, - extras_require={'test': tests_require}, - cmdclass={'test': PyTest}, + extras_require={"test": tests_require}, + cmdclass={"test": PyTest}, tests_require=tests_require, include_package_data=True, classifiers=[ - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Operating System :: OS Independent', - 'Topic :: Software Development', - 'Development Status :: 5 - Production/Stable', - 'License :: OSI Approved :: BSD License', - 'Natural Language :: English', - 'Programming Language :: Python :: 2.6', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Topic :: Software Development :: Libraries :: Python Modules', + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Operating System :: OS Independent", + "Topic :: Software Development", + "Development Status :: 5 - Production/Stable", + "License :: OSI Approved :: BSD License", + "Natural Language :: English", + "Programming Language :: Python :: 2.6", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Topic :: Software Development :: Libraries :: Python Modules", ], ) diff --git a/stream/__init__.py b/stream/__init__.py index 70e0986..f7a97f9 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -33,7 +33,7 @@ def connect( # support for the heroku STREAM_URL syntax if stream_url and not api_key: pattern = re.compile( - "https\:\/\/(\w+)\:(\w+)\@([\w-]*).*\?app_id=(\d+)", re.IGNORECASE + r"https\:\/\/(\w+)\:(\w+)\@([\w-]*).*\?app_id=(\d+)", re.IGNORECASE ) result = pattern.match(stream_url) if result and len(result.groups()) == 4: diff --git a/stream/client.py b/stream/client.py index 2d1943f..41770c5 100644 --- a/stream/client.py +++ b/stream/client.py @@ -1,4 +1,3 @@ -from datetime import datetime import json import logging import os @@ -316,12 +315,10 @@ def follow_many(self, follows, activity_copy_limit=None): """ params = None - if activity_copy_limit != None: + if activity_copy_limit is not None: params = dict(activity_copy_limit=activity_copy_limit) token = self.create_jwt_token("follower", "*", feed_id="*") - return self.post( - "follow_many/", token, params=params, data=follows - ) + return self.post("follow_many/", token, params=params, data=follows) def update_activities(self, activities): """ diff --git a/stream/feed.py b/stream/feed.py index 5d71974..d21feba 100644 --- a/stream/feed.py +++ b/stream/feed.py @@ -164,7 +164,7 @@ def follow( "target": target_feed_id, "target_token": self.client.feed(target_feed_slug, target_user_id).token, } - if activity_copy_limit != None: + if activity_copy_limit is not None: data["activity_copy_limit"] = activity_copy_limit token = self.create_scope_token("follower", "write") data.update(extra_data) diff --git a/stream/serializer.py b/stream/serializer.py index 9fcc887..fdbc68b 100644 --- a/stream/serializer.py +++ b/stream/serializer.py @@ -24,7 +24,7 @@ def _datetime_decoder(dict_): # strings which are of type `str`. `jsondate` patches this for # consistency so that `unicode` is always returned. if value == "": - dict_[key] = u"" + dict_[key] = "" continue if value is not None and isinstance(value, six.string_types): diff --git a/stream/tests/__init__.py b/stream/tests/__init__.py index bbd3b0d..e69de29 100644 --- a/stream/tests/__init__.py +++ b/stream/tests/__init__.py @@ -1 +0,0 @@ -from .test_client import * diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index cdecac1..18c47cc 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1,7 +1,7 @@ from dateutil.tz import tzlocal import stream import time -from stream.exceptions import ApiKeyException, InputException, StreamApiException +from stream.exceptions import ApiKeyException, InputException import random import jwt @@ -343,7 +343,7 @@ def test_location_support(self): client = stream.connect("a", "b", "c", location="nonexistant") def get_feed(): - f = client.feed("user", "1").get() + client.feed("user", "1").get() self.assertRaises(requests.exceptions.ConnectionError, get_feed) @@ -610,13 +610,13 @@ def test_flat_follow_copy_one(self): def _get_first_aggregated_activity(self, activities): try: return activities[0]["activities"][0] - except IndexError as e: + except IndexError: pass def _get_first_activity(self, activities): try: return activities[0] - except IndexError as e: + except IndexError: pass def test_empty_followings(self): @@ -833,8 +833,8 @@ def test_uniqueness(self): utcnow = datetime.datetime.utcnow() activity_data = {"actor": 1, "verb": "tweet", "object": 1, "time": utcnow} - response = self.user1.add_activity(activity_data) - response = self.user1.add_activity(activity_data) + self.user1.add_activity(activity_data) + self.user1.add_activity(activity_data) activities = self.user1.get(limit=2)["results"] self.assertDatetimeAlmostEqual(activities[0]["time"], utcnow) @@ -940,7 +940,7 @@ def test_missing_actor(self): try: doit() raise ValueError("should have raised InputException") - except InputException as e: + except InputException: pass def test_wrong_feed_spec(self): @@ -1274,12 +1274,6 @@ def test_reaction_filter(self): def test_user_add(self): self.c.users.add(str(uuid1())) - def test_user_add_twice(self): - user_id = str(uuid1()) - self.c.users.add(user_id) - with self.assertRaises(StreamApiException): - self.c.users.add(user_id) - def test_user_add_get_or_create(self): user_id = str(uuid1()) r1 = self.c.users.add(user_id) @@ -1310,12 +1304,6 @@ def test_collections_add(self): def test_collections_add_no_id(self): self.c.collections.add("items", {"data": 1}) - def test_collections_add_twice(self): - id = str(uuid1()) - self.c.collections.add("items", {"data": 1}, id=id) - with self.assertRaises(StreamApiException): - self.c.collections.add("items", {"data": 2}, id=id) - def test_collections_get(self): response = self.c.collections.add("items", {"data": 1}, id=str(uuid1())) entry = self.c.collections.get("items", response["id"]) @@ -1334,10 +1322,6 @@ def test_collections_delete(self): response = self.c.collections.add("items", {"data": 1}, str(uuid1())) self.c.collections.delete("items", response["id"]) - def test_feed_enrichment_bad(self): - with self.assertRaises(TypeError): - self.c.feed("user", "mike").get(enrich=True, reactions=True) - def test_feed_enrichment_collection(self): entry = self.c.collections.add("items", {"name": "time machine"}) entry.pop("duration") diff --git a/stream/utils.py b/stream/utils.py index 626a5e8..677a87e 100644 --- a/stream/utils.py +++ b/stream/utils.py @@ -1,6 +1,6 @@ import re -valid_re = re.compile("^[\w-]+$") +valid_re = re.compile(r"^[\w-]+$") def validate_feed_id(feed_id): @@ -24,7 +24,7 @@ def validate_feed_id(feed_id): def validate_feed_slug(feed_slug): """ - Validates the feed slug falls into \w + Validates the feed slug """ feed_slug = str(feed_slug) if not valid_re.match(feed_slug): @@ -35,7 +35,7 @@ def validate_feed_slug(feed_slug): def validate_user_id(user_id): """ - Validates the user id falls into \w + Validates the user id """ user_id = str(user_id) if not valid_re.match(user_id): From 9e2ce164dd3f405b5771281cdeb092c118a679ed Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 30 Nov 2018 15:38:37 +0100 Subject: [PATCH 128/208] update docs --- README.md | 21 ++++++++++++++------- stream/client.py | 5 ++++- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 51b927e..52d69bb 100644 --- a/README.md +++ b/README.md @@ -28,6 +28,8 @@ Documentation for this Python client are available at the [Stream website](https ### Usage ```python +import datetime + # Instantiate a new client import stream client = stream.connect('YOUR_API_KEY', 'API_KEY_SECRET') @@ -116,15 +118,11 @@ client.activity_partial_update(id=activity_id, set=set, unset=unset) # ...or by combination of foreign_id and time client.activity_partial_update(foreign_id=foreign_id, time=activity_time, set=set, unset=unset) -# Generating tokens for client side usage (JS client) -token = user_feed_1.token -# Javascript client side feed initialization -# user1 = client.feed('user', '1', '{{ token }}'); +# Generating user token for client side usage (JS client) +user_token = client.create_user_token("user-42") -# Generate a read-only token for client side usage (JS client) -readonly_token = user_feed_1.get_readonly_token() # Javascript client side feed initialization -# user1 = client.feed('user', '1', '{{ readonly_token }}'); +# client = stream.connect(apiKey, userToken, appId); # Generate a redirect url for the Stream Analytics platform to track # events/impressions on url clicks @@ -164,6 +162,15 @@ py.test --cov stream --cov-report html LOCAL=true py.test ``` +Install black and flake8 + +``` +pip install black +pip install flake8 +``` + +Install git hooks to avoid pushing invalid code (git commit will run black and flak8) + ### Releasing a new version In order to release new version you need to be a maintainer on Pypi. diff --git a/stream/client.py b/stream/client.py index 41770c5..63ae436 100644 --- a/stream/client.py +++ b/stream/client.py @@ -168,7 +168,7 @@ def _parse_response(self, response): self.raise_exception(parsed_result, status_code=response.status_code) return parsed_result - def create_user_session_token(self, user_id, **extra_data): + def create_user_token(self, user_id, **extra_data): """Setup the payload for the given user_id with optional extra data (key, value pairs) and encode it using jwt """ @@ -177,6 +177,9 @@ def create_user_session_token(self, user_id, **extra_data): payload[k] = v return jwt.encode(payload, self.api_secret, algorithm="HS256").decode("utf-8") + def create_user_session_token(self, user_id, **extra_data): + return self.create_user_token(user_id, **extra_data) + def create_jwt_token(self, resource, action, feed_id=None, user_id=None): """ Setup the payload for the given resource, action, feed or user From fad3811b27616c752cc2d184bc0d6184c365cd10 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 30 Nov 2018 15:41:53 +0100 Subject: [PATCH 129/208] print public key --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index b292747..9ae53a5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,6 +19,7 @@ cache: pip install: - pip install -r dev_requirements.txt script: + - echo $STREAM_KEY - py.test -lv --cov=./ after_script: - "pep8 --exclude=migrations --ignore=E501,E225,W293 stream" From 7d8a6d2cd9318222f071b5424e80917bf1aef835 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 30 Nov 2018 15:43:06 +0100 Subject: [PATCH 130/208] remove sudo false --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 9ae53a5..24a5af2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,4 @@ language: python -sudo: false python: - 2.6 - 2.7 From 3d792dc613069b76525fc670275ffc41363b902c Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 30 Nov 2018 15:43:30 +0100 Subject: [PATCH 131/208] remove sudo false --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 24a5af2..fe5fef6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,7 +11,6 @@ matrix: include: - python: 3.7 dist: xenial - sudo: true cache: pip From 6b7bf05579ac151576020da92e32ad9d35ea7a8d Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 30 Nov 2018 16:26:02 +0100 Subject: [PATCH 132/208] py 2.6 compat fix --- stream/tests/test_client.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 18c47cc..818459f 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1214,9 +1214,9 @@ def test_reaction_get(self): reaction["activity_id"], "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4" ) self.assertEqual(reaction["kind"], "like") - self.assertIn("created_at", reaction) - self.assertIn("updated_at", reaction) - self.assertIn("id", reaction) + self.assertTrue("created_at" in reaction) + self.assertTrue("updated_at" in reaction) + self.assertTrue("id" in reaction) def test_reaction_update(self): response = self.c.reactions.add( @@ -1286,9 +1286,9 @@ def test_user_get(self): response = self.c.users.add(str(uuid1())) user = self.c.users.get(response["id"]) self.assertEqual(user["data"], {}) - self.assertIn("created_at", user) - self.assertIn("updated_at", user) - self.assertIn("id", user) + self.assertTrue("created_at" in user) + self.assertTrue("updated_at" in user) + self.assertTrue("id" in user) def test_user_update(self): response = self.c.users.add(str(uuid1())) @@ -1308,9 +1308,9 @@ def test_collections_get(self): response = self.c.collections.add("items", {"data": 1}, id=str(uuid1())) entry = self.c.collections.get("items", response["id"]) self.assertEqual(entry["data"], {"data": 1}) - self.assertIn("created_at", entry) - self.assertIn("updated_at", entry) - self.assertIn("id", entry) + self.assertTrue("created_at" in entry) + self.assertTrue("updated_at" in entry) + self.assertTrue("id" in entry) def test_collections_update(self): response = self.c.collections.add("items", {"data": 1}, str(uuid1())) From 1d7f6353c73635da369fbe663ad529b40cc0addc Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Mon, 3 Dec 2018 09:10:33 +0100 Subject: [PATCH 133/208] v3 --- CHANGELOG | 12 ++++++++++++ stream/__init__.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index c186807..73e8d00 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,18 @@ Change history ================ +====== +3.0.0 +====== +:release-date: 2018-12-03 +:by: Tommaso Barbugli + +Add support for reactions +Add support for users +Removed HTTP Signatures based auth +Use JWT auth for everything +Add feed.get enrichment params + ====== 2.12.0 ====== diff --git a/stream/__init__.py b/stream/__init__.py index f7a97f9..c2a0be3 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "2.12.0" +__version__ = "3.0.0" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From 76fae7a80f2f1d234a4d3920295761ed8bab1cf7 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Tue, 4 Dec 2018 14:35:06 +0100 Subject: [PATCH 134/208] add shorthand version for collections.create_reference --- stream/collections.py | 17 ++++++++++++----- stream/tests/test_client.py | 2 +- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/stream/collections.py b/stream/collections.py index 6e153eb..f8950f0 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -9,11 +9,18 @@ def __init__(self, client, token): self.client = client self.token = token - def create_reference(self, collection_name, id): - _id = id - if isinstance(id, (dict,)) and id.get("id") is not None: - _id = id.get("id") - return "SO:%s:%s" % (collection_name, _id) + def create_reference(self, collection_name=None, id=None, entry=None): + if isinstance(entry, (dict,)): + _collection = entry["collection"] + _id = entry["id"] + elif collection_name is not None and id is not None: + _collection = collection_name + _id = id + else: + raise ValueError( + "must call with collection_name and id or with entry arguments" + ) + return "SO:%s:%s" % (_collection, _id) def upsert(self, collection_name, data): """ diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 818459f..7f418d3 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1329,7 +1329,7 @@ def test_feed_enrichment_collection(self): activity_data = { "actor": "mike", "verb": "buy", - "object": self.c.collections.create_reference("items", entry), + "object": self.c.collections.create_reference(entry=entry), } f.add_activity(activity_data) response = f.get() From 7d0969e2ab51d7edce1d8496aa31327103f8e9a6 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Tue, 4 Dec 2018 14:36:04 +0100 Subject: [PATCH 135/208] 3.0.1 --- CHANGELOG | 8 ++++++++ stream/__init__.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index 73e8d00..e2cb81f 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ Change history ================ +====== +3.0.1 +====== +:release-date: 2018-12-04 +:by: Tommaso Barbugli + +Add short-hand version for collections.create_reference() + ====== 3.0.0 ====== diff --git a/stream/__init__.py b/stream/__init__.py index c2a0be3..cf114d4 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "3.0.0" +__version__ = "3.0.1" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From 75cbed74bbe37eafa91dc77f5dcc3af297c987eb Mon Sep 17 00:00:00 2001 From: Horatiu Ion Date: Tue, 4 Dec 2018 17:42:26 +0200 Subject: [PATCH 136/208] proper reaction filtering --- stream/reactions.py | 4 ++-- stream/tests/test_client.py | 16 +++++++++++++--- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/stream/reactions.py b/stream/reactions.py index 7b45791..8148313 100644 --- a/stream/reactions.py +++ b/stream/reactions.py @@ -46,7 +46,7 @@ def add_child(self, kind, parent_id, user_id, data=None, target_feeds=None): "reaction/", service_name="api", signature=self.token, data=payload ) - def filter(self, **params): + def filter(self, kind, **params): lookup_field = "" lookup_value = "" @@ -61,7 +61,7 @@ def filter(self, **params): lookup_value = params.pop("user_id") return self.client.get( - "reaction/%s/%s/" % (lookup_field, lookup_value), + "reaction/%s/%s/%s/" % (lookup_field, lookup_value, kind), service_name="api", signature=self.token, params=params, diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 7f418d3..703ce94 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1238,14 +1238,17 @@ def test_reaction_add_child(self): def test_reaction_filter_random(self): self.c.reactions.filter( + kind="like", reaction_id="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", id_lte="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", ) self.c.reactions.filter( + kind="dunno", activity_id="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", id_lte="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", ) self.c.reactions.filter( + kind="val", user_id="mike", id_lte="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4" ) @@ -1262,15 +1265,22 @@ def test_reaction_filter(self): response = self.c.reactions.add("like", activity_id, user) child = self.c.reactions.add_child("like", response["id"], user) reaction = self.c.reactions.get(response["id"]) - r = self.c.reactions.filter(reaction_id=reaction["id"]) + + response = self.c.reactions.add("comment", activity_id, user) + reaction_comment = self.c.reactions.get(response["id"]) + + r = self.c.reactions.filter(kind="like", reaction_id=reaction["id"]) self._first_result_should_be(r, child) - r = self.c.reactions.filter(activity_id=activity_id, id_lte=reaction["id"]) + r = self.c.reactions.filter(kind="like", activity_id=activity_id, id_lte=reaction["id"]) self._first_result_should_be(r, reaction) - r = self.c.reactions.filter(user_id=user, id_lte=reaction["id"]) + r = self.c.reactions.filter(kind="like", user_id=user, id_lte=reaction["id"]) self._first_result_should_be(r, reaction) + r = self.c.reactions.filter(kind="comment", activity_id=activity_id) + self._first_result_should_be(r, reaction_comment) + def test_user_add(self): self.c.users.add(str(uuid1())) From 81c218d70a472580fe70fc1d99f30ccd86540f28 Mon Sep 17 00:00:00 2001 From: Horatiu Ion Date: Tue, 4 Dec 2018 18:06:06 +0200 Subject: [PATCH 137/208] kind should be optional --- stream/reactions.py | 12 ++++++++++-- stream/tests/test_client.py | 4 +--- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/stream/reactions.py b/stream/reactions.py index 8148313..47d0d21 100644 --- a/stream/reactions.py +++ b/stream/reactions.py @@ -46,9 +46,13 @@ def add_child(self, kind, parent_id, user_id, data=None, target_feeds=None): "reaction/", service_name="api", signature=self.token, data=payload ) - def filter(self, kind, **params): + def filter(self, **params): lookup_field = "" lookup_value = "" + kind = None + + if "kind" in params: + kind = params.pop("kind") if "reaction_id" in params: lookup_field = "reaction_id" @@ -60,8 +64,12 @@ def filter(self, kind, **params): lookup_field = "user_id" lookup_value = params.pop("user_id") + endpoint = "reaction/%s/%s/" % (lookup_field, lookup_value) + if kind is not None: + endpoint = "reaction/%s/%s/%s/" % (lookup_field, lookup_value, kind) + return self.client.get( - "reaction/%s/%s/%s/" % (lookup_field, lookup_value, kind), + endpoint, service_name="api", signature=self.token, params=params, diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 703ce94..030ef9e 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1243,12 +1243,10 @@ def test_reaction_filter_random(self): id_lte="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", ) self.c.reactions.filter( - kind="dunno", activity_id="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", id_lte="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", ) self.c.reactions.filter( - kind="val", user_id="mike", id_lte="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4" ) @@ -1269,7 +1267,7 @@ def test_reaction_filter(self): response = self.c.reactions.add("comment", activity_id, user) reaction_comment = self.c.reactions.get(response["id"]) - r = self.c.reactions.filter(kind="like", reaction_id=reaction["id"]) + r = self.c.reactions.filter(reaction_id=reaction["id"]) self._first_result_should_be(r, child) r = self.c.reactions.filter(kind="like", activity_id=activity_id, id_lte=reaction["id"]) From 9ec01d0c2bed2826e34b78e0f3f939f976764f58 Mon Sep 17 00:00:00 2001 From: Horatiu Ion Date: Wed, 5 Dec 2018 11:59:41 +0200 Subject: [PATCH 138/208] cleaner code --- stream/reactions.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/stream/reactions.py b/stream/reactions.py index 47d0d21..7bb8348 100644 --- a/stream/reactions.py +++ b/stream/reactions.py @@ -49,10 +49,8 @@ def add_child(self, kind, parent_id, user_id, data=None, target_feeds=None): def filter(self, **params): lookup_field = "" lookup_value = "" - kind = None - if "kind" in params: - kind = params.pop("kind") + kind = params.pop("kind", None) if "reaction_id" in params: lookup_field = "reaction_id" From 8857aa22f60e46b8674546468ce0e2eaf8eacd30 Mon Sep 17 00:00:00 2001 From: Horatiu Ion Date: Wed, 12 Dec 2018 09:59:40 +0100 Subject: [PATCH 139/208] user proper ids for tests --- stream/reactions.py | 5 +---- stream/tests/test_client.py | 14 ++++++++------ 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/stream/reactions.py b/stream/reactions.py index 7bb8348..e7e1152 100644 --- a/stream/reactions.py +++ b/stream/reactions.py @@ -67,8 +67,5 @@ def filter(self, **params): endpoint = "reaction/%s/%s/%s/" % (lookup_field, lookup_value, kind) return self.client.get( - endpoint, - service_name="api", - signature=self.token, - params=params, + endpoint, service_name="api", signature=self.token, params=params ) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 030ef9e..2888e00 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1239,15 +1239,15 @@ def test_reaction_add_child(self): def test_reaction_filter_random(self): self.c.reactions.filter( kind="like", - reaction_id="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", - id_lte="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", + reaction_id="87a9eec0-fd5f-11e8-8080-80013fed2f5b", + id_lte="87a9eec0-fd5f-11e8-8080-80013fed2f5b", ) self.c.reactions.filter( - activity_id="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", - id_lte="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", + activity_id="87a9eec0-fd5f-11e8-8080-80013fed2f5b", + id_lte="87a9eec0-fd5f-11e8-8080-80013fed2f5b", ) self.c.reactions.filter( - user_id="mike", id_lte="54a60c1e-4ee3-494b-a1e3-50c06acb5ed4" + user_id="mike", id_lte="87a9eec0-fd5f-11e8-8080-80013fed2f5b" ) def _first_result_should_be(self, response, element): @@ -1270,7 +1270,9 @@ def test_reaction_filter(self): r = self.c.reactions.filter(reaction_id=reaction["id"]) self._first_result_should_be(r, child) - r = self.c.reactions.filter(kind="like", activity_id=activity_id, id_lte=reaction["id"]) + r = self.c.reactions.filter( + kind="like", activity_id=activity_id, id_lte=reaction["id"] + ) self._first_result_should_be(r, reaction) r = self.c.reactions.filter(kind="like", user_id=user, id_lte=reaction["id"]) From c0a355fdefe8ec9e09446682e3fcc12e3e973f8e Mon Sep 17 00:00:00 2001 From: Horatiu Ion Date: Mon, 21 Jan 2019 17:31:27 +0100 Subject: [PATCH 140/208] add batch partial update --- stream/client.py | 32 +++++++++++++-- stream/tests/test_client.py | 77 +++++++++++++++++++++++++++++++++++++ 2 files changed, 106 insertions(+), 3 deletions(-) diff --git a/stream/client.py b/stream/client.py index 63ae436..705a252 100644 --- a/stream/client.py +++ b/stream/client.py @@ -377,7 +377,7 @@ def activity_partial_update( self, id=None, foreign_id=None, time=None, set={}, unset=[] ): """ - Partial update activity, via foreign ID or Foreign ID + timestamp + Partial update activity, via activity ID or Foreign ID + timestamp id: the activity ID foreign_id: the activity foreign ID @@ -386,8 +386,6 @@ def activity_partial_update( unset: list of unset operations """ - auth_token = self.create_jwt_token("activities", "*", feed_id="*") - if id is None and (foreign_id is None or time is None): raise TypeError( "The id or foreign_id+time parameters must be provided and not be None" @@ -405,6 +403,34 @@ def activity_partial_update( data["foreign_id"] = foreign_id data["time"] = time + return self.activities_partial_update(updates=[data]) + + def activities_partial_update(self, updates=[]): + """ + Partial update activity, via activity ID or Foreign ID + timestamp + + :param updates: list of partial updates to perform. + + eg. + [ + { + "foreign_id": "post:1", + "time": datetime.datetime.utcnow(), + "set": { + "product.name": "boots", + "product.price": 7.99, + "popularity": 1000, + "foo": {"bar": {"baz": "qux"}}, + }, + "unset": ["product.color"] + } + ] + """ + + auth_token = self.create_jwt_token("activities", "*", feed_id="*") + + data = {"changes": updates} + return self.post("activity/", auth_token, data=data) def create_redirect_url(self, target_url, user_id, events): diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 2888e00..f873c64 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1190,6 +1190,83 @@ def test_activity_partial_update(self): expected["popularity"] = 9000 self.assertEqual(updated, expected) + def test_activities_partial_update(self): + + feed = self.c.feed("user", uuid4()) + feed.add_activities( + [ + { + "actor": "barry", + "object": "09", + "verb": "tweet", + "time": datetime.datetime.utcnow(), + "foreign_id": "fid:123", + "product": {"name": "shoes", "price": 9.99, "color": "blue"}, + }, + { + "actor": "jerry", + "object": "10", + "verb": "tweet", + "time": datetime.datetime.utcnow(), + "foreign_id": "fid:456", + "product": {"name": "shoes", "price": 9.99, "color": "blue"}, + }, + { + "actor": "tommy", + "object": "09", + "verb": "tweet", + "time": datetime.datetime.utcnow(), + "foreign_id": "fid:789", + "product": {"name": "shoes", "price": 9.99, "color": "blue"}, + }, + ] + ) + activities = feed.get()["results"] + + batch = [ + { + "id": activities[0]["id"], + "set": {"product.color": "purple", "custom": {"some": "extra data"}}, + "unset": ["product.price"], + }, + { + "id": activities[2]["id"], + "set": {"product.price": 9001, "on_sale": True}, + }, + ] + + # partial update by ID + self.c.activities_partial_update(batch) + updated = feed.get()["results"] + expected = activities + expected[0]["product"] = {"name": "shoes", "color": "purple"} + expected[0]["custom"] = {"some": "extra data"} + expected[2]["product"] = {"name": "shoes", "price": 9001, "color": "blue"} + expected[2]["on_sale"] = True + self.assertEqual(updated, expected) + + # partial update by foreign ID + time + batch = [ + { + "foreign_id": activities[1]["foreign_id"], + "time": activities[1]["time"], + "set": {"product.color": "beeeeeeige", "custom": {"modified_by": "me"}}, + "unset": ["product.name"], + }, + { + "foreign_id": activities[2]["foreign_id"], + "time": activities[2]["time"], + "unset": ["on_sale"], + }, + ] + self.c.activities_partial_update(batch) + updated = feed.get()["results"] + + expected[1]["product"] = {"price": 9.99, "color": "beeeeeeige"} + expected[1]["custom"] = {"modified_by": "me"} + del expected[2]["on_sale"] + self.assertEqual(updated, expected) + def test_create_reference(self): ref = self.c.collections.create_reference("item", "42") self.assertEqual(ref, "SO:item:42") From 44f806283c9c1b85cce8fe4e6339fd15e6e1a526 Mon Sep 17 00:00:00 2001 From: Jelte Fennema Date: Fri, 24 May 2019 16:42:36 +0200 Subject: [PATCH 141/208] Bump version --- CHANGELOG | 9 +++++++++ stream/__init__.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index e2cb81f..e3e36f5 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,15 @@ Change history ================ +===== +3.0.2 +===== +:release-date: 2018-05-24 +:by: Jelte Fennema + +Fixes for filtering by reactions by kind + + ====== 3.0.1 ====== diff --git a/stream/__init__.py b/stream/__init__.py index cf114d4..dfaaf80 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "3.0.1" +__version__ = "3.0.2" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From 2d4714e5a893b1f532f7d17956be8ad1667836d2 Mon Sep 17 00:00:00 2001 From: Jelte Fennema Date: Fri, 24 May 2019 16:53:50 +0200 Subject: [PATCH 142/208] Bump version to 3.1.0 --- CHANGELOG | 8 ++++++++ stream/__init__.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index e3e36f5..f26090f 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ Change history ================ +===== +3.1.0 +===== +:release-date: 2018-05-24 +:by: Jelte Fennema + +Batch partial update + ===== 3.0.2 ===== diff --git a/stream/__init__.py b/stream/__init__.py index dfaaf80..7565702 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "3.0.2" +__version__ = "3.1.0" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From 24bb4410b9409686416a3f4ec281b7dd6711480c Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Mon, 1 Jul 2019 13:34:41 +0200 Subject: [PATCH 143/208] relax JWT requirement --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 661f9e9..c1bd352 100644 --- a/setup.py +++ b/setup.py @@ -23,7 +23,7 @@ install_requires.append("pyjwt>=1.3.0,<1.6.0") install_requires.append("pycparser<2.19") else: - install_requires.append("pyjwt>=1.3.0,<1.7.0") + install_requires.append("pyjwt>=1.3.0,<1.8.0") class PyTest(TestCommand): From c529230744ee71b473fe02968779d570bf07952f Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Mon, 9 Sep 2019 01:50:04 +0200 Subject: [PATCH 144/208] fix some typos --- stream/personalization.py | 2 +- stream/utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/stream/personalization.py b/stream/personalization.py index 8628798..d24b503 100644 --- a/stream/personalization.py +++ b/stream/personalization.py @@ -54,7 +54,7 @@ def post(self, resource, **params): def delete(self, resource, **params): """ - shortcut to delete metadata or activites + shortcut to delete metadata or activities :param resource: personalized url endpoint typical "meta" :param params: params to pass to url i.e user_id = "user:123" :return: data that was deleted if if successful or not. diff --git a/stream/utils.py b/stream/utils.py index 677a87e..c6840a7 100644 --- a/stream/utils.py +++ b/stream/utils.py @@ -9,7 +9,7 @@ def validate_feed_id(feed_id): :param feed_id: a feed such as user:1 - Raises ValueError if the format doesnt match + Raises ValueError if the format doesn't match """ feed_id = str(feed_id) if len(feed_id.split(":")) != 2: From 22e73f96e3e6d48a3ac687f4476ca47ed64ae172 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Sat, 2 Nov 2019 08:18:40 -0600 Subject: [PATCH 145/208] Update setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index c1bd352..ae76f49 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ if sys.version_info < (2, 7, 9): requests = "requests[security]>=2.4.1,<3" -install_requires = ["pycryptodomex==3.4.7", requests, "six>=1.8.0"] +install_requires = ["pycryptodomex>=3.4.7", requests, "six>=1.8.0"] if sys.version_info < (2, 7, 0): install_requires.append("pyOpenSSL<18.0.0") From 151bce5b0f0e6be6f7f6bdebb42f8450c70148e9 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Sat, 2 Nov 2019 08:19:17 -0600 Subject: [PATCH 146/208] Update setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index ae76f49..ae6d018 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ if sys.version_info < (2, 7, 9): requests = "requests[security]>=2.4.1,<3" -install_requires = ["pycryptodomex>=3.4.7", requests, "six>=1.8.0"] +install_requires = ["pycryptodomex>=3.4.7,<4", requests, "six>=1.8.0"] if sys.version_info < (2, 7, 0): install_requires.append("pyOpenSSL<18.0.0") From 51e84108504fd62fbe85fb531d4e7a640e52177a Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Sat, 2 Nov 2019 08:20:05 -0600 Subject: [PATCH 147/208] Update .travis.yml --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index fe5fef6..dbbc2b8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,6 +5,8 @@ python: - 3.4 - 3.5 - 3.6 + - 3.7 + - 3.8 matrix: fast_finish: true From 6262cccf1750acc0a11d903faa820cd981bae6c0 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Sat, 2 Nov 2019 12:28:28 -0600 Subject: [PATCH 148/208] Update .travis.yml --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index dbbc2b8..5b5d8ab 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,5 @@ language: python python: - - 2.6 - 2.7 - 3.4 - 3.5 From 8da5178cec97d0cdc3a470e783951beecd07146f Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Thu, 7 Nov 2019 09:22:32 -0700 Subject: [PATCH 149/208] 3.1.1 --- CHANGELOG | 8 ++++++++ stream/__init__.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index f26090f..fbc7a93 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ Change history ================ +===== +3.1.1 +===== +:release-date: 2019-11-07 +:by: Tommaso Barbuli + +Bump crypto deps + ===== 3.1.0 ===== diff --git a/stream/__init__.py b/stream/__init__.py index 7565702..ea7bf9b 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "3.1.0" +__version__ = "3.1.1" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From e66df7287254255f56544a53b7d04b07663aef69 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Tue, 19 Nov 2019 11:46:33 +0100 Subject: [PATCH 150/208] Fix a typo in readme (#99) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 52d69bb..6a72174 100644 --- a/README.md +++ b/README.md @@ -180,7 +180,7 @@ In order to release new version you need to be a maintainer on Pypi. - Commit and push to Github - Create a new tag for the version (eg. `v2.9.0`) - Create a new dist with python `python setup.py sdist` -- Upload the new distributable with wine `twine upload dist/stream-python-VERSION-NAME.tar.gz` +- Upload the new distributable with twine `twine upload dist/stream-python-VERSION-NAME.tar.gz` If unsure you can also test using the Pypi test servers `twine upload --repository-url https://test.pypi.org/legacy/ dist/stream-python-VERSION-NAME.tar.gz` From 7e85980752a979f283ce3105e3a249a2f11d5c61 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Tue, 19 Nov 2019 12:03:10 +0100 Subject: [PATCH 151/208] Fix docs in personalization (#100) --- stream/personalization.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stream/personalization.py b/stream/personalization.py index d24b503..bedbe07 100644 --- a/stream/personalization.py +++ b/stream/personalization.py @@ -30,7 +30,7 @@ def get(self, resource, **params): def post(self, resource, **params): """ - "Generic function to post data to personalization endpoint + Generic function to post data to personalization endpoint :param resource: personalized resource endpoint i.e "follow_recommendations" :param params: params to pass to url (data is a reserved keyword to post to body) @@ -57,7 +57,7 @@ def delete(self, resource, **params): shortcut to delete metadata or activities :param resource: personalized url endpoint typical "meta" :param params: params to pass to url i.e user_id = "user:123" - :return: data that was deleted if if successful or not. + :return: data that was deleted if successful or not. """ response = self.client.delete( From 8a85694c08731b4f13af04de5c20fa7af03535cd Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Fri, 13 Dec 2019 12:14:48 +0100 Subject: [PATCH 152/208] Update supported python versions in readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6a72174..1e078d8 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ You can sign up for a Stream account at https://getstream.io/get_started. stream-python supports: -- Python (2.6, 2.7, 3.4, 3.5, 3.6, 3.7) +- Python (2.7, 3.4, 3.5, 3.6, 3.7, 3.8) #### Install from Pypi From 3fb826d3c28646750baa1b83855a8bec018801b9 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Wed, 12 Feb 2020 17:56:54 +0100 Subject: [PATCH 153/208] Correct examples in collections --- stream/collections.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/stream/collections.py b/stream/collections.py index f8950f0..56affad 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -30,7 +30,7 @@ def upsert(self, collection_name, data): :return: http response, 201 if successful along with data posted. **Example**:: - client.collection.upsert('user', [{"id": '1', "name": "Juniper", "hobbies": ["Playing", "Sleeping", "Eating"]}, + client.collections.upsert('user', [{"id": '1', "name": "Juniper", "hobbies": ["Playing", "Sleeping", "Eating"]}, {"id": '2', "name": "Ruby", "interests": ["Sunbeams", "Surprise Attacks"]}]) """ @@ -56,8 +56,8 @@ def select(self, collection_name, ids): :return: meta data as json blob **Example**:: - client.collection.select('user', 1) - client.collection.select('user', [1,2,3]) + client.collections.select('user', 1) + client.collections.select('user', [1,2,3]) """ if type(ids) != list: @@ -87,7 +87,7 @@ def delete_many(self, collection_name, ids): **Example**:: client.collections.delete('user', '1') - collections.delete('user', ['1','2','3']) + client.collections.delete('user', ['1','2','3']) """ if type(ids) != list: From cd458326240d986947a5819bfe1a51569533f433 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Wed, 18 Mar 2020 17:25:22 +0100 Subject: [PATCH 154/208] Support follow count in user get (#104) Add query param support in user get for follow counts. --- stream/tests/test_client.py | 7 +++++++ stream/users.py | 4 ++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index f873c64..e29fa62 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1377,6 +1377,13 @@ def test_user_get(self): self.assertTrue("updated_at" in user) self.assertTrue("id" in user) + def test_user_get_with_follow_counts(self): + response = self.c.users.add(str(uuid1())) + user = self.c.users.get(response["id"], with_follow_counts=True) + self.assertEqual(user["id"], response["id"]) + self.assertTrue("followers_count" in user) + self.assertTrue("following_count" in user) + def test_user_update(self): response = self.c.users.add(str(uuid1())) self.c.users.update(response["id"], {"changed": True}) diff --git a/stream/users.py b/stream/users.py index c786208..9b0c539 100644 --- a/stream/users.py +++ b/stream/users.py @@ -19,9 +19,9 @@ def add(self, user_id, data=None, get_or_create=False): params={"get_or_create": get_or_create}, ) - def get(self, user_id): + def get(self, user_id, **params): return self.client.get( - "user/%s" % user_id, service_name="api", signature=self.token + "user/%s" % user_id, service_name="api", params=params, signature=self.token ) def update(self, user_id, data=None): From 5b426eec357a96a9e97e38325f5f21d6cde27c80 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Thu, 19 Mar 2020 09:45:08 +0100 Subject: [PATCH 155/208] Add open graph scrape (#105) Add og call support --- stream/client.py | 9 +++++++++ stream/tests/test_client.py | 5 +++++ 2 files changed, 14 insertions(+) diff --git a/stream/client.py b/stream/client.py index 705a252..8351943 100644 --- a/stream/client.py +++ b/stream/client.py @@ -455,3 +455,12 @@ def create_redirect_url(self, target_url, user_id, events): # validate the target url is valid Request("GET", target_url).prepare() return prepared_request.url + + def og(self, target_url): + """ + Retrieve open graph information from a URL which you can + then use to add images and a description to activities. + """ + auth_token = self.create_jwt_token("*", "*", feed_id="*") + params = {"url": target_url} + return self.get("og/", auth_token, params=params) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index e29fa62..eb162f4 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1480,3 +1480,8 @@ def test_feed_enrichment_reaction_counts(self): reaction.pop("duration") enriched_response = f.get(reactions={"counts": True}) self.assertEqual(enriched_response["results"][0]["reaction_counts"]["like"], 1) + + def test_og(self): + response = client.og("https://google.com") + self.assertTrue("title" in response) + self.assertTrue("description" in response) From 1c3b9cd40a1990b6323a782b977e972aed4d1e3b Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Thu, 19 Mar 2020 09:59:05 +0100 Subject: [PATCH 156/208] Release v3.2.0 --- CHANGELOG | 10 ++++++++++ setup.py | 2 +- stream/__init__.py | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index fbc7a93..33e5bec 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,16 @@ Change history ================ +===== +3.2.0 +===== +:release-date: 2020-03-17 +:by: Ferhat Elmas + +Add open graph scrape support +Update python support (drop 2.6, add 3.8) +Fixes in docs for collections and personalization + ===== 3.1.1 ===== diff --git a/setup.py b/setup.py index ae6d018..c119e0d 100644 --- a/setup.py +++ b/setup.py @@ -65,13 +65,13 @@ def run_tests(self): "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: BSD License", "Natural Language :: English", - "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", "Topic :: Software Development :: Libraries :: Python Modules", ], ) diff --git a/stream/__init__.py b/stream/__init__.py index ea7bf9b..e27e12a 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "3.1.1" +__version__ = "3.2.0" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From dbaaa1c90d7a1800c891dfe3889a119a725f0ffa Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Fri, 10 Apr 2020 13:16:45 +0200 Subject: [PATCH 157/208] Set timezone as utc in serialization hooks Fixes #108 --- setup.py | 2 +- stream/serializer.py | 13 ++++++++++--- stream/tests/test_client.py | 13 +++++++------ 3 files changed, 18 insertions(+), 10 deletions(-) diff --git a/setup.py b/setup.py index ae6d018..c2ec20a 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ if sys.version_info < (2, 7, 9): requests = "requests[security]>=2.4.1,<3" -install_requires = ["pycryptodomex>=3.4.7,<4", requests, "six>=1.8.0"] +install_requires = ["pycryptodomex>=3.4.7,<4", requests, "six>=1.8.0", "pytz>=2019.3"] if sys.version_info < (2, 7, 0): install_requires.append("pyOpenSSL<18.0.0") diff --git a/stream/serializer.py b/stream/serializer.py index fdbc68b..6827161 100644 --- a/stream/serializer.py +++ b/stream/serializer.py @@ -1,6 +1,7 @@ import datetime import json import six +import pytz """ Adds the ability to send date and datetime objects to the API @@ -13,7 +14,9 @@ def _datetime_encoder(obj): if isinstance(obj, datetime.datetime): - return datetime.datetime.strftime(obj, DATETIME_FORMAT) + if obj.utcoffset() is None: # support for <= 3.3 + obj = pytz.utc.localize(obj) + return datetime.datetime.strftime(obj.astimezone(pytz.utc), DATETIME_FORMAT) if isinstance(obj, datetime.date): return datetime.datetime.strftime(obj, DATE_FORMAT) @@ -31,13 +34,17 @@ def _datetime_decoder(dict_): try: # The api always returns times like this # 2014-07-25T09:12:24.735 - datetime_obj = datetime.datetime.strptime(value, DATETIME_FORMAT) + datetime_obj = pytz.utc.localize( + datetime.datetime.strptime(value, DATETIME_FORMAT) + ) dict_[key] = datetime_obj except (ValueError, TypeError): try: # The api always returns times like this # 2014-07-25T09:12:24.735 - datetime_obj = datetime.datetime.strptime(value, DATE_FORMAT) + datetime_obj = pytz.utc.localize( + datetime.datetime.strptime(value, DATE_FORMAT) + ) dict_[key] = datetime_obj.date() except (ValueError, TypeError): continue diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index eb162f4..17abfb3 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -4,6 +4,7 @@ from stream.exceptions import ApiKeyException, InputException import random import jwt +import pytz try: from unittest.case import TestCase @@ -831,7 +832,7 @@ def test_uniqueness(self): b.) The same time and foreign id """ - utcnow = datetime.datetime.utcnow() + utcnow = datetime.datetime.now(tz=pytz.utc) activity_data = {"actor": 1, "verb": "tweet", "object": 1, "time": utcnow} self.user1.add_activity(activity_data) self.user1.add_activity(activity_data) @@ -869,7 +870,7 @@ def test_uniqueness_topic(self): def test_uniqueness_foreign_id(self): now = datetime.datetime.now(tzlocal()) - utcnow = (now - now.utcoffset()).replace(tzinfo=None) + utcnow = now.astimezone(pytz.utc) activity_data = { "actor": 1, @@ -900,7 +901,7 @@ def test_uniqueness_foreign_id(self): def test_time_ordering(self): """ - datetime.datetime.utcnow() is our recommended approach + datetime.datetime.now(tz=pytz.utc) is our recommended approach so if we add an activity add one using time add another activity it should be in the right spot @@ -908,7 +909,7 @@ def test_time_ordering(self): # timedelta is used to "make sure" that ordering is known even though # server time is not - custom_time = datetime.datetime.utcnow() - dt.timedelta(days=1) + custom_time = datetime.datetime.now(tz=pytz.utc) - dt.timedelta(days=1) feed = self.user2 for index, activity_time in enumerate([None, custom_time, None]): @@ -952,8 +953,8 @@ def test_wrong_feed_spec(self): def test_serialization(self): today = datetime.date.today() - then = datetime.datetime.now().replace(microsecond=0) - now = datetime.datetime.now() + now = datetime.datetime.now(tz=pytz.utc) + then = now.replace(microsecond=0) data = dict( string="string", float=0.1, From 8e019e7df5c54d8a511ae8123e4227a382ced926 Mon Sep 17 00:00:00 2001 From: Peter van Kampen Date: Fri, 17 Apr 2020 19:21:13 +0200 Subject: [PATCH 158/208] prepare release --- CHANGELOG | 8 ++++++++ stream/__init__.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index 33e5bec..8a7f297 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ Change history ================ +===== +3.2.1 +===== +:release-date: 2020-03-17 +:by: Ferhat Elmas + +Set timezone as utc in serialization hooks + ===== 3.2.0 ===== diff --git a/stream/__init__.py b/stream/__init__.py index e27e12a..dde02b9 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "3.2.0" +__version__ = "3.2.1" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From 2c7a42a79a3eb343ef15cd3630a4af5446e42bd4 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Mon, 4 May 2020 17:49:02 +0200 Subject: [PATCH 159/208] Add unfollow many support (#111) Fixes #110 --- stream/client.py | 12 ++++++++++++ stream/tests/test_client.py | 15 +++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/stream/client.py b/stream/client.py index 8351943..fe7b0c2 100644 --- a/stream/client.py +++ b/stream/client.py @@ -323,6 +323,18 @@ def follow_many(self, follows, activity_copy_limit=None): token = self.create_jwt_token("follower", "*", feed_id="*") return self.post("follow_many/", token, params=params, data=follows) + def unfollow_many(self, unfollows): + """ + Unfollows many feeds at batch + :param unfollows: the list of unfollow relations + + eg. [{'source': source, 'target': target, 'keep_history': keep_history}] + """ + params = None + + token = self.create_jwt_token("follower", "*", feed_id="*") + return self.post("unfollow_many/", token, params=params, data=unfollows) + def update_activities(self, activities): """ Update or create activities diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 17abfb3..663eb79 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1020,6 +1020,21 @@ def test_follow_many_acl(self): activities = feed.get(limit=5)["results"] self.assertEqual(len(activities), 0) + def test_unfollow_many(self): + unfollows = [ + {"source": "user:1", "target": "timeline:1"}, + {"source": "user:2", "target": "timeline:2", "keep_history": False}, + ] + + self.c.unfollow_many(unfollows) + + unfollows.append({"source": "user:1", "target": 42}) + + def failing_unfollow(): + self.c.unfollow_many(unfollows) + + self.assertRaises(InputException, failing_unfollow) + def test_add_to_many(self): activity = {"actor": 1, "verb": "tweet", "object": 1, "custom": "data"} feeds = [getfeed("flat", str(i)).id for i in range(10, 20)] From 5aba62bf1a7149d0d20af0819bbcd41189b11b78 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Mon, 4 May 2020 17:55:38 +0200 Subject: [PATCH 160/208] Changelog for v3.3.0 --- CHANGELOG | 8 ++++++++ stream/__init__.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index 8a7f297..07b3589 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ Change history ================ +===== +3.3.0 +===== +:release-date: 2020-05-04 +:by: Ferhat Elmas + +Add batch unfollow support + ===== 3.2.1 ===== diff --git a/stream/__init__.py b/stream/__init__.py index dde02b9..d97dc2d 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "3.2.1" +__version__ = "3.3.0" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From d8757f94d193e3f1bca537b2f6309e7c0fc404f3 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Mon, 11 May 2020 13:05:41 +0200 Subject: [PATCH 161/208] Expose target feed extra data in reaction add (#112) Expose target_feeds_extra_data to add data to the activities when a reaction creates an activity in the given target feeds. --- stream/reactions.py | 22 ++++++++++++++++++++-- stream/tests/test_client.py | 30 ++++++++++++++++++++++++++++++ 2 files changed, 50 insertions(+), 2 deletions(-) diff --git a/stream/reactions.py b/stream/reactions.py index e7e1152..8dd16b4 100644 --- a/stream/reactions.py +++ b/stream/reactions.py @@ -3,12 +3,21 @@ def __init__(self, client, token): self.client = client self.token = token - def add(self, kind, activity_id, user_id, data=None, target_feeds=None): + def add( + self, + kind, + activity_id, + user_id, + data=None, + target_feeds=None, + target_feeds_extra_data=None, + ): payload = dict( kind=kind, activity_id=activity_id, data=data, target_feeds=target_feeds, + target_feeds_extra_data=target_feeds_extra_data, user_id=user_id, ) return self.client.post( @@ -34,12 +43,21 @@ def delete(self, reaction_id): "reaction/%s" % reaction_id, service_name="api", signature=self.token ) - def add_child(self, kind, parent_id, user_id, data=None, target_feeds=None): + def add_child( + self, + kind, + parent_id, + user_id, + data=None, + target_feeds=None, + target_feeds_extra_data=None, + ): payload = dict( kind=kind, parent=parent_id, data=data, target_feeds=target_feeds, + target_feeds_extra_data=target_feeds_extra_data, user_id=user_id, ) return self.client.post( diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 663eb79..de8dd44 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1294,6 +1294,36 @@ def test_create_user_reference(self): def test_reaction_add(self): self.c.reactions.add("like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike") + def test_reaction_add_to_target_feeds(self): + r = self.c.reactions.add( + "superlike", + "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", + "mike", + data={"popularity": 50}, + target_feeds=["user:michelle"], + target_feeds_extra_data={"popularity": 100}, + ) + self.assertEqual(r["data"]["popularity"], 50) + a = self.c.feed("user", "michelle").get(limit=1)["results"][0] + self.assertTrue(r["id"] in a["reaction"]) + self.assertEqual(a["verb"], "superlike") + self.assertEqual(a["popularity"], 100) + + child = self.c.reactions.add_child( + "superlike", + r["id"], + "rob", + data={"popularity": 60}, + target_feeds=["user:michelle"], + target_feeds_extra_data={"popularity": 200}, + ) + + self.assertEqual(child["data"]["popularity"], 60) + a = self.c.feed("user", "michelle").get(limit=1)["results"][0] + self.assertTrue(child["id"] in a["reaction"]) + self.assertEqual(a["verb"], "superlike") + self.assertEqual(a["popularity"], 200) + def test_reaction_get(self): response = self.c.reactions.add( "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" From 77d8312ace9183091915bb94c78c4ac76cb1ff7e Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Mon, 11 May 2020 13:08:03 +0200 Subject: [PATCH 162/208] Changelog for v3.4.0 --- CHANGELOG | 8 ++++++++ stream/__init__.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index 07b3589..0ed3a49 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ Change history ================ +===== +3.4.0 +===== +:release-date: 2020-05-11 +:by: Ferhat Elmas + +Expose target_feeds_extra_data to add extra data to activities from reactions + ===== 3.3.0 ===== diff --git a/stream/__init__.py b/stream/__init__.py index d97dc2d..5897d30 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "3.3.0" +__version__ = "3.4.0" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From 99e4b0e616cf5eba8f7a1facad8f678fe49de87b Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Mon, 8 Jun 2020 10:08:36 +0200 Subject: [PATCH 163/208] Add enrichment into activity get (#115) Fixes #114 --- README.md | 3 +++ stream/client.py | 25 +++++++++++++++++++++-- stream/tests/test_client.py | 40 +++++++++++++++++++++++++++++++++++++ 3 files changed, 66 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 1e078d8..0d7fee8 100644 --- a/README.md +++ b/README.md @@ -104,6 +104,9 @@ client.get_activities(foreign_id_times=[ (foreign_id, activity_time), ]) +# Enrich while getting activities +client.get_activities(ids=[activity_id], enrich=True, reactions={"counts": True}) + # Update some parts of an activity with activity_partial_update set = { 'product.name': 'boots', diff --git a/stream/client.py b/stream/client.py index fe7b0c2..7cf7b08 100644 --- a/stream/client.py +++ b/stream/client.py @@ -352,10 +352,14 @@ def update_activity(self, activity): """ return self.update_activities([activity]) - def get_activities(self, ids=None, foreign_id_times=None): + def get_activities( + self, ids=None, foreign_id_times=None, enrich=False, reactions=None, **params + ): """ Retrieves activities by their ID or foreign_id + time combination + Pass enrich and reactions options for enrichment + ids: list of activity IDs foreign_id_time: list of tuples (foreign_id, time) """ @@ -371,7 +375,13 @@ def get_activities(self, ids=None, foreign_id_times=None): "At most one of the parameters ids or foreign_id_time must be provided" ) + endpoint = "activities/" + if enrich or reactions is not None: + endpoint = "enrich/" + endpoint + query_params = {} + for key in params: + query_params[key] = params[key] if ids is not None: query_params["ids"] = ",".join(ids) @@ -383,7 +393,18 @@ def get_activities(self, ids=None, foreign_id_times=None): query_params["foreign_ids"] = ",".join(foreign_ids) query_params["timestamps"] = ",".join(timestamps) - return self.get("activities/", auth_token, params=query_params) + if reactions is not None and not isinstance(reactions, (dict,)): + raise TypeError("reactions argument should be a dictionary") + + if reactions is not None: + if reactions.get("own"): + query_params["withOwnReactions"] = True + if reactions.get("recent"): + query_params["withRecentReactions"] = True + if reactions.get("counts"): + query_params["withReactionCounts"] = True + + return self.get(endpoint, auth_token, params=query_params) def activity_partial_update( self, id=None, foreign_id=None, time=None, set={}, unset=[] diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index de8dd44..1338de9 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1159,6 +1159,46 @@ def test_get_activities_full(self): self.assertEqual(len(response["results"]), 1) self.assertEqual(activity["foreign_id"], response["results"][0]["foreign_id"]) + def test_get_activities_full_with_enrichment(self): + dt = datetime.datetime.utcnow() + fid = "awesome-test" + + actor = self.c.users.add(str(uuid1()), data={"name": "barry"}) + activity = { + "actor": self.c.users.create_reference(actor["id"]), + "object": "09", + "verb": "tweet", + "time": dt, + "foreign_id": fid, + } + + feed = getfeed("user", "test_get_activity") + activity = feed.add_activity(activity) + + reaction1 = self.c.reactions.add("like", activity["id"], "liker") + reaction2 = self.c.reactions.add("reshare", activity["id"], "sharer") + + def validate(response): + self.assertEqual(len(response["results"]), 1) + self.assertEqual(response["results"][0]["id"], activity["id"]) + self.assertEqual( + response["results"][0]["foreign_id"], activity["foreign_id"] + ) + self.assertEqual(response["results"][0]["actor"]["data"]["name"], "barry") + latest_reactions = response["results"][0]["latest_reactions"] + self.assertEqual(len(latest_reactions), 2) + self.assertEqual(latest_reactions["like"][0]["id"], reaction1["id"]) + self.assertEqual(latest_reactions["reshare"][0]["id"], reaction2["id"]) + self.assertEqual( + response["results"][0]["reaction_counts"], {"like": 1, "reshare": 1} + ) + + reactions = {"recent": True, "counts": True} + validate(self.c.get_activities(ids=[activity["id"]], reactions=reactions)) + validate( + self.c.get_activities(foreign_id_times=[(fid, dt)], reactions=reactions) + ) + def test_activity_partial_update(self): now = datetime.datetime.utcnow() feed = self.c.feed("user", uuid4()) From e48276a0ff3d900d2ed7e439d1601891926d0b71 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Mon, 8 Jun 2020 10:13:19 +0200 Subject: [PATCH 164/208] Changelog for v3.5.0 --- CHANGELOG | 8 ++++++++ stream/__init__.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index 0ed3a49..ce8c5b3 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ Change history ================ +===== +3.5.0 +===== +:release-date: 2020-06-08 +:by: Ferhat Elmas + +Add enrichment support to direct activity get + ===== 3.4.0 ===== diff --git a/stream/__init__.py b/stream/__init__.py index 5897d30..f0234ca 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "3.4.0" +__version__ = "3.5.0" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From 2000000d350d9527f3fba89aa9de5eac7919f655 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Mon, 8 Jun 2020 11:05:34 +0200 Subject: [PATCH 165/208] Drop by in changelog --- CHANGELOG | 38 -------------------------------------- 1 file changed, 38 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index ce8c5b3..36a27bb 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -6,7 +6,6 @@ 3.5.0 ===== :release-date: 2020-06-08 -:by: Ferhat Elmas Add enrichment support to direct activity get @@ -14,7 +13,6 @@ Add enrichment support to direct activity get 3.4.0 ===== :release-date: 2020-05-11 -:by: Ferhat Elmas Expose target_feeds_extra_data to add extra data to activities from reactions @@ -22,7 +20,6 @@ Expose target_feeds_extra_data to add extra data to activities from reactions 3.3.0 ===== :release-date: 2020-05-04 -:by: Ferhat Elmas Add batch unfollow support @@ -30,7 +27,6 @@ Add batch unfollow support 3.2.1 ===== :release-date: 2020-03-17 -:by: Ferhat Elmas Set timezone as utc in serialization hooks @@ -38,7 +34,6 @@ Set timezone as utc in serialization hooks 3.2.0 ===== :release-date: 2020-03-17 -:by: Ferhat Elmas Add open graph scrape support Update python support (drop 2.6, add 3.8) @@ -48,7 +43,6 @@ Fixes in docs for collections and personalization 3.1.1 ===== :release-date: 2019-11-07 -:by: Tommaso Barbuli Bump crypto deps @@ -56,7 +50,6 @@ Bump crypto deps 3.1.0 ===== :release-date: 2018-05-24 -:by: Jelte Fennema Batch partial update @@ -64,7 +57,6 @@ Batch partial update 3.0.2 ===== :release-date: 2018-05-24 -:by: Jelte Fennema Fixes for filtering by reactions by kind @@ -73,7 +65,6 @@ Fixes for filtering by reactions by kind 3.0.1 ====== :release-date: 2018-12-04 -:by: Tommaso Barbugli Add short-hand version for collections.create_reference() @@ -81,7 +72,6 @@ Add short-hand version for collections.create_reference() 3.0.0 ====== :release-date: 2018-12-03 -:by: Tommaso Barbugli Add support for reactions Add support for users @@ -93,7 +83,6 @@ Add feed.get enrichment params 2.12.0 ====== :release-date: 2018-10-08 -:by: Peter van Kampen Add user-session-token support @@ -101,7 +90,6 @@ Add user-session-token support 2.11.0 ====== :release-date: 2017-08-23 -:by: Tommaso Barbugli Add collection helpers to create refs @@ -109,7 +97,6 @@ Add collection helpers to create refs 2.10.0 ====== :release-date: 2017-07-30 -:by: Tommaso Barbugli Partial activity API endpoint @@ -117,7 +104,6 @@ Partial activity API endpoint 2.9.3 ====== :release-date: 2017-07-20 -:by: Tommaso Barbugli Use Readme.md content as package long description @@ -125,7 +111,6 @@ Use Readme.md content as package long description 2.9.2 ====== :release-date: 2017-07-20 -:by: Tommaso Barbugli Fixed deserialization problem with datetime objects with zeroed microseconds Support newer versions of the pyJWT lib @@ -135,7 +120,6 @@ Support newer versions of the pyJWT lib 2.9.1 ====== :release-date: 2017-07-18 -:by: Tommaso Barbugli Renamed client.get_activities' foreign_id_time param to foreign_id_times @@ -144,7 +128,6 @@ Renamed client.get_activities' foreign_id_time param to foreign_id_times 2.9.0 ====== :release-date: 2017-07-05 -:by: Tommaso Barbugli Add support for get activity API endpoint @@ -152,7 +135,6 @@ Add support for get activity API endpoint 2.8.1 ====== :release-date: 2017-12-21 -:by: Tommaso Barbugli Fixes a regression with embedded httpsig and Python 3 @@ -160,7 +142,6 @@ Fixes a regression with embedded httpsig and Python 3 2.8.0 ====== :release-date: 2017-12-21 -:by: Tommaso Barbugli Fixes install issues on Windows @@ -171,42 +152,36 @@ Fixes install issues on Windows 2.7.0 ====== :release-date: 2017-12-14 -:by: Aaron McMillin * All client methods that make requests will return the response 2.6.2 ===== :release-date 2017-12-08 -:by: Balazs Consolidate API URL generation across API, Collections and Personalization services 2.6.0 ===== :release-date 2017-12-08 -:by: Balazs Support the new collections endpoint and flexible get requests for personalization 2.5.0 ====== :release-date: 2017-10-19 -:by: Tommaso Barbugli * Use new .com domain for API and Analytics 2.4.0 ====== :release-date: 2017-08-31 -:by: Tommaso Barbugli * Added support for To target update endpoint 2.3.11 ====== :release-date: 2017-05-22 -:by: Ian Douglas * Added support for Python 2.6.9 and downgrade to requests 2.2.1 @@ -214,7 +189,6 @@ Support the new collections endpoint and flexible get requests for personalizati 2.3.9 ========== :release-date: 2016-12-20 -:by: Jelte Fennema * Fix errors_from_fields function so it displays the extra data returned by the server about InputException errors. @@ -223,14 +197,12 @@ Support the new collections endpoint and flexible get requests for personalizati 2.3.8 ===== :release-date: 2016-06-09 -:by: Tommaso Barbugli * Add support for keep_history on unfollow 2.3.7 ===== :release-date: 2016-06-02 -:by: Tommaso Barbugli * Add HTTP Signature auth method (for application auth resources) * Add support for follow_many batch operation @@ -242,7 +214,6 @@ Support the new collections endpoint and flexible get requests for personalizati 2.3.5 ===== :release-date: 2015-10-07 -:by: Thierry Schellenbach * Added support for activity update @@ -250,7 +221,6 @@ Support the new collections endpoint and flexible get requests for personalizati 2.3.3 ===== :release-date: 2015-10-07 -:by: Thierry Schellenbach * Added support for creating redirect urls @@ -258,42 +228,36 @@ Support the new collections endpoint and flexible get requests for personalizati 2.3.0 ===== :release-date: 2015-06-11 -:by: Tommaso Barbugli * Added support for read-only tokens 2.1.4 ===== :release-date: 2015-01-14 -:by: Tommaso Barbugli * Added support for extra data for follow actions 2.1.3 ===== :release-date: 2015-01-05 -:by: Thierry Schellenbach * Bugfix, mark_seen and mark_read now work 2.1.0 ===== :release-date: 2014-12-19 -:by: Thierry Schellenbach * Added location support to reduce latency 2.0.1 ===== :release-date: 2014-11-18 -:by: Thierry Schellenbach * Additional validation on feed_slug and user_id 2.0.0 ===== :release-date: 2014-11-10 -:by: Thierry Schellenbach * Breaking change: New style feed syntax, client.feed('user', '1') instead of client.feed('user:3') * Breaking change: New style follow syntax, feed.follow('user', 3) @@ -305,13 +269,11 @@ Support the new collections endpoint and flexible get requests for personalizati 1.1.1 ===== :release-date: 2014-09-20 08:00 A.M GMT -:by: Tommaso Barbugli * Add HTTP client retries 1.1.0 ===== :release-date: 2014-09-08 08:00 A.M GMT -:by: Tommaso Barbugli * Add support for mark read (notifications feeds) From 3282b95cc7cfd39041b9413a2c8839e8d9133672 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Mon, 8 Jun 2020 12:02:45 +0200 Subject: [PATCH 166/208] Set algo in jwt decode for deprecation notice (#116) JWT decode without an algorithm is deprecated and throws warning, so we set explicitly to silence warnings. --- stream/tests/test_client.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 1338de9..6757372 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -376,10 +376,10 @@ def test_token_retrieval(self): def test_user_session_token(self): client = stream.connect(self.c.api_key, self.c.api_secret) token = client.create_user_session_token("user") - payload = jwt.decode(token, self.c.api_secret) + payload = jwt.decode(token, self.c.api_secret, algorithms=["HS256"]) self.assertEqual(payload["user_id"], "user") token = client.create_user_session_token("user", client="python", testing=True) - payload = jwt.decode(token, self.c.api_secret) + payload = jwt.decode(token, self.c.api_secret, algorithms=["HS256"]) self.assertEqual(payload["client"], "python") self.assertEqual(payload["testing"], True) @@ -1069,7 +1069,9 @@ def test_create_email_redirect(self): parsed_url = urlparse(redirect_url) qs = parse_qs(parsed_url.query) - decoded = jwt.decode(qs["authorization"][0], self.c.api_secret) + decoded = jwt.decode( + qs["authorization"][0], self.c.api_secret, algorithms=["HS256"] + ) self.assertEqual( decoded, From 587e4afd385321fdd9adae88f6d9ad8ea770cb6d Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Mon, 8 Jun 2020 12:04:00 +0200 Subject: [PATCH 167/208] Changelog for v3.5.1 --- CHANGELOG | 7 +++++++ stream/__init__.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index 36a27bb..86aeb1b 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,13 @@ Change history ================ +===== +3.5.1 +===== +:release-date: 2020-06-08 + +Handle warning in JWT decode regarding missing algorithm + ===== 3.5.0 ===== diff --git a/stream/__init__.py b/stream/__init__.py index f0234ca..889a0e7 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "3.5.0" +__version__ = "3.5.1" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From f81f4a682046033c4596f43182c7fb050576cf1d Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Mon, 8 Jun 2020 13:18:34 +0200 Subject: [PATCH 168/208] Drop undocumented create_user_session_token (#102) This method on `client` is exactly same with `create_user_token` and the implementation delegates to create_user_token and it is undocumented. To keep the code clean, dropped it. --- stream/client.py | 6 ++---- stream/tests/test_client.py | 6 +++--- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/stream/client.py b/stream/client.py index 7cf7b08..487dc6e 100644 --- a/stream/client.py +++ b/stream/client.py @@ -169,7 +169,8 @@ def _parse_response(self, response): return parsed_result def create_user_token(self, user_id, **extra_data): - """Setup the payload for the given user_id with optional + """ + Setup the payload for the given user_id with optional extra data (key, value pairs) and encode it using jwt """ payload = {"user_id": user_id} @@ -177,9 +178,6 @@ def create_user_token(self, user_id, **extra_data): payload[k] = v return jwt.encode(payload, self.api_secret, algorithm="HS256").decode("utf-8") - def create_user_session_token(self, user_id, **extra_data): - return self.create_user_token(user_id, **extra_data) - def create_jwt_token(self, resource, action, feed_id=None, user_id=None): """ Setup the payload for the given resource, action, feed or user diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 6757372..135a96e 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -373,12 +373,12 @@ def test_token_retrieval(self): self.user1.token self.user1.get_readonly_token() - def test_user_session_token(self): + def test_user_token(self): client = stream.connect(self.c.api_key, self.c.api_secret) - token = client.create_user_session_token("user") + token = client.create_user_token("user") payload = jwt.decode(token, self.c.api_secret, algorithms=["HS256"]) self.assertEqual(payload["user_id"], "user") - token = client.create_user_session_token("user", client="python", testing=True) + token = client.create_user_token("user", client="python", testing=True) payload = jwt.decode(token, self.c.api_secret, algorithms=["HS256"]) self.assertEqual(payload["client"], "python") self.assertEqual(payload["testing"], True) From 52404107f2f84f0c3369a82e9d6ba2d1b0b27542 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Mon, 8 Jun 2020 13:49:00 +0200 Subject: [PATCH 169/208] Drop py2 support (#117) Fixes #113 --- .travis.yml | 7 +++---- README.md | 2 +- setup.py | 23 +++++++---------------- stream/serializer.py | 3 +-- 4 files changed, 12 insertions(+), 23 deletions(-) diff --git a/.travis.yml b/.travis.yml index 5b5d8ab..880ed85 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,5 @@ language: python python: - - 2.7 - 3.4 - 3.5 - 3.6 @@ -21,6 +20,6 @@ script: - echo $STREAM_KEY - py.test -lv --cov=./ after_script: - - "pep8 --exclude=migrations --ignore=E501,E225,W293 stream" - - "python setup.py install" - - "codecov" + - 'pep8 --exclude=migrations --ignore=E501,E225,W293 stream' + - 'python setup.py install' + - 'codecov' diff --git a/README.md b/README.md index 0d7fee8..822e95c 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ You can sign up for a Stream account at https://getstream.io/get_started. stream-python supports: -- Python (2.7, 3.4, 3.5, 3.6, 3.7, 3.8) +- Python (3.4, 3.5, 3.6, 3.7, 3.8) #### Install from Pypi diff --git a/setup.py b/setup.py index b9da7f5..d366bf4 100644 --- a/setup.py +++ b/setup.py @@ -6,24 +6,16 @@ from stream import __version__, __maintainer__, __email__, __license__ import sys -unit = "unittest2py3k" if sys.version_info > (3, 0, 0) else "unittest2" -tests_require = [unit, "pytest==3.2.5", "unittest2", "pytest-cov", "python-dateutil"] +tests_require = ["pytest==3.2.5", "unittest2", "pytest-cov", "python-dateutil"] long_description = open("README.md", "r").read() -requests = "requests>=2.3.0,<3" - -if sys.version_info < (2, 7, 9): - requests = "requests[security]>=2.4.1,<3" - -install_requires = ["pycryptodomex>=3.4.7,<4", requests, "six>=1.8.0", "pytz>=2019.3"] - -if sys.version_info < (2, 7, 0): - install_requires.append("pyOpenSSL<18.0.0") - install_requires.append("pyjwt>=1.3.0,<1.6.0") - install_requires.append("pycparser<2.19") -else: - install_requires.append("pyjwt>=1.3.0,<1.8.0") +install_requires = [ + "pycryptodomex>=3.4.7,<4", + "requests>=2.3.0,<3", + "pyjwt>=1.3.0,<1.8.0", + "pytz>=2019.3", +] class PyTest(TestCommand): @@ -65,7 +57,6 @@ def run_tests(self): "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: BSD License", "Natural Language :: English", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", diff --git a/stream/serializer.py b/stream/serializer.py index 6827161..8c84d45 100644 --- a/stream/serializer.py +++ b/stream/serializer.py @@ -1,6 +1,5 @@ import datetime import json -import six import pytz """ @@ -30,7 +29,7 @@ def _datetime_decoder(dict_): dict_[key] = "" continue - if value is not None and isinstance(value, six.string_types): + if value is not None and isinstance(value, str): try: # The api always returns times like this # 2014-07-25T09:12:24.735 From 4d9eb45a1af7b8a825f3e497fc8290bdc9aaa563 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Mon, 8 Jun 2020 13:58:01 +0200 Subject: [PATCH 170/208] Drop the link to outdated docs (#118) Fixes #101 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 822e95c..143e9f6 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ pip install stream-python ### Full documentation -Documentation for this Python client are available at the [Stream website](https://getstream.io/docs/?language=python) or on [Read the Docs](http://stream-python.readthedocs.org/en/latest/). +Documentation for this Python client are available at the [Stream website](https://getstream.io/docs/?language=python). ### Usage From 10a2614d43260cd54e73f15dd839be53db80a3f5 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Mon, 8 Jun 2020 20:04:28 +0200 Subject: [PATCH 171/208] Use github action (#119) * github action * drop py 3.4 (end of life more than 1 year, we will bump major anyway) * use black instead of pep8 / pycodestyle * ensure flake8 and fix issues * some readme tweaks --- .github/workflows/ci.yml | 44 +++++++++++++++++++++++++++++++ .travis.yml | 25 ------------------ README.md | 15 +++++------ dev_requirements.txt | 6 ----- dotgit/hooks/pre-commit-format.sh | 3 +-- pyproject.toml | 2 +- setup.py | 8 +++--- stream/tests/test_client.py | 12 ++++----- 8 files changed, 64 insertions(+), 51 deletions(-) create mode 100644 .github/workflows/ci.yml delete mode 100644 .travis.yml delete mode 100644 dev_requirements.txt diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..faa451a --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,44 @@ +name: build +on: + push: + branches: + - 'master' + pull_request: + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + python: [3.5, 3.6, 3.7, 3.8] + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + + - name: Add pip bin to PATH + run: | + echo "::add-path::/home/runner/.local/bin" + + - name: Upgrade setuptools + if: ${{ matrix.python == '3.5' }} + run: pip install --upgrade setuptools + + - name: Install deps with ${{ matrix.python }} + run: pip install ".[test, ci]" + + - name: Lint with ${{ matrix.python }} + if: ${{ matrix.python == '3.8' }} + run: | + black --check stream + flake8 --ignore=E501,E225,W293,W503 stream + + - name: Install, test and code coverage with ${{ matrix.python }} + env: + STREAM_KEY: ${{ secrets.STREAM_KEY }} + STREAM_SECRET: ${{ secrets.STREAM_SECRET }} + run: | + python setup.py test + python setup.py install + codecov diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 880ed85..0000000 --- a/.travis.yml +++ /dev/null @@ -1,25 +0,0 @@ -language: python -python: - - 3.4 - - 3.5 - - 3.6 - - 3.7 - - 3.8 - -matrix: - fast_finish: true - include: - - python: 3.7 - dist: xenial - -cache: pip - -install: - - pip install -r dev_requirements.txt -script: - - echo $STREAM_KEY - - py.test -lv --cov=./ -after_script: - - 'pep8 --exclude=migrations --ignore=E501,E225,W293 stream' - - 'python setup.py install' - - 'codecov' diff --git a/README.md b/README.md index 143e9f6..c463845 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ stream-python ============= -[![Build Status](https://travis-ci.org/GetStream/stream-python.svg?branch=master)](https://travis-ci.org/GetStream/stream-python) [![codecov](https://codecov.io/gh/GetStream/stream-python/branch/master/graph/badge.svg)](https://codecov.io/gh/GetStream/stream-python) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) +[![build](https://github.com/GetStream/stream-python/workflows/build/badge.svg)](https://github.com/GetStream/stream-python/actions) [![codecov](https://codecov.io/gh/GetStream/stream-python/branch/master/graph/badge.svg)](https://codecov.io/gh/GetStream/stream-python) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) [stream-python](https://github.com/GetStream/stream-python) is the official Python client for [Stream](https://getstream.io/), a web service for building scalable newsfeeds and activity streams. @@ -13,7 +13,7 @@ You can sign up for a Stream account at https://getstream.io/get_started. stream-python supports: -- Python (3.4, 3.5, 3.6, 3.7, 3.8) +- Python (3.5, 3.6, 3.7, 3.8) #### Install from Pypi @@ -30,15 +30,15 @@ Documentation for this Python client are available at the [Stream website](https ```python import datetime -# Instantiate a new client +# Create a new client import stream client = stream.connect('YOUR_API_KEY', 'API_KEY_SECRET') -# INstantiate a new client specifying datacenter location +# Create a new client specifying data center location client = stream.connect('YOUR_API_KEY', 'API_KEY_SECRET', location='us-east') # Find your API keys here https://getstream.io/dashboard/ -# Instantiate a feed object +# Create a feed object user_feed_1 = client.feed('user', '1') # Get activities from 5 to 10 (slow pagination) @@ -168,11 +168,10 @@ LOCAL=true py.test Install black and flake8 ``` -pip install black -pip install flake8 +pip install .[lint] ``` -Install git hooks to avoid pushing invalid code (git commit will run black and flak8) +Install git hooks to avoid pushing invalid code (git commit will run `black` and `flake8`) ### Releasing a new version diff --git a/dev_requirements.txt b/dev_requirements.txt deleted file mode 100644 index 551d254..0000000 --- a/dev_requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -pytest==3.2.5 -codecov==2.0.15 -unittest2==1.1.0 -pytest-cov==2.5.1 -python-dateutil --e . diff --git a/dotgit/hooks/pre-commit-format.sh b/dotgit/hooks/pre-commit-format.sh index 63259bf..4a52719 100755 --- a/dotgit/hooks/pre-commit-format.sh +++ b/dotgit/hooks/pre-commit-format.sh @@ -9,5 +9,4 @@ if ! black . --check -q; then exit 1 fi -flake8 - +flake8 --ignore=E501,E225,W293,W503 . diff --git a/pyproject.toml b/pyproject.toml index edf85a6..4891814 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.black] line-length = 88 -py36 = true +target-version = ['py38'] include = '\.pyi?$' exclude = ''' /( diff --git a/setup.py b/setup.py index d366bf4..03c20c3 100644 --- a/setup.py +++ b/setup.py @@ -7,6 +7,9 @@ import sys tests_require = ["pytest==3.2.5", "unittest2", "pytest-cov", "python-dateutil"] +ci_require = ["flake8", "codecov"] +if sys.version_info >= (3, 6, 0): + ci_require.append("black") long_description = open("README.md", "r").read() @@ -28,7 +31,7 @@ def run_tests(self): # import here, cause outside the eggs aren't loaded import pytest - errno = pytest.main("-v --cov=./") + errno = pytest.main(["-v", "--cov=./"]) sys.exit(errno) @@ -45,7 +48,7 @@ def run_tests(self): packages=find_packages(), zip_safe=False, install_requires=install_requires, - extras_require={"test": tests_require}, + extras_require={"test": tests_require, "ci": ci_require}, cmdclass={"test": PyTest}, tests_require=tests_require, include_package_data=True, @@ -58,7 +61,6 @@ def run_tests(self): "License :: OSI Approved :: BSD License", "Natural Language :: English", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 135a96e..638ca82 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -937,9 +937,8 @@ def test_missing_actor(self): "object": 1, "debug_example_undefined": "test", } - doit = lambda: self.user1.add_activity(activity_data) try: - doit() + self.user1.add_activity(activity_data) raise ValueError("should have raised InputException") except InputException: pass @@ -1113,10 +1112,11 @@ def test_email_redirect_invalid_target(self): # no protocol specified, this should raise an error target_url = "google.com" user_id = "tommaso" - create_redirect = lambda: self.c.create_redirect_url( - target_url, user_id, events - ) - self.assertRaises(MissingSchema, create_redirect) + + def redirect(): + self.c.create_redirect_url(target_url, user_id, events) + + self.assertRaises(MissingSchema, redirect) def test_follow_redirect_url(self): target_url = "http://google.com/?a=b&c=d" From 0ad5fc3ddd924fc962ab9f1a559fd4085690a02c Mon Sep 17 00:00:00 2001 From: Peter van Kampen Date: Fri, 12 Jun 2020 12:22:53 +0200 Subject: [PATCH 172/208] Allow custom payload in `client.create_jwt_token` (#121) Co-authored-by: ferhat elmas --- stream/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stream/client.py b/stream/client.py index 487dc6e..3abafe9 100644 --- a/stream/client.py +++ b/stream/client.py @@ -178,12 +178,12 @@ def create_user_token(self, user_id, **extra_data): payload[k] = v return jwt.encode(payload, self.api_secret, algorithm="HS256").decode("utf-8") - def create_jwt_token(self, resource, action, feed_id=None, user_id=None): + def create_jwt_token(self, resource, action, feed_id=None, user_id=None, **params): """ Setup the payload for the given resource, action, feed or user and encode it using jwt """ - payload = {"action": action, "resource": resource} + payload = {**params, "action": action, "resource": resource} if feed_id is not None: payload["feed_id"] = feed_id if user_id is not None: From d0d5814281b89524254eec194293f8e8b9de305c Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Fri, 12 Jun 2020 14:03:46 +0200 Subject: [PATCH 173/208] Some improvements (#122) * better usage of py3 * improve hook * make some parts more idiomatic * handle some error-prone parts --- README.md | 2 +- dotgit/hooks/pre-commit-format.sh | 12 ++- stream/__init__.py | 2 +- stream/client.py | 26 +++-- stream/collections.py | 27 ++--- stream/feed.py | 34 +++---- stream/personalization.py | 12 +-- stream/reactions.py | 2 +- stream/serializer.py | 4 +- stream/tests/test_client.py | 163 +++++++++++++++--------------- stream/users.py | 2 +- 11 files changed, 135 insertions(+), 151 deletions(-) diff --git a/README.md b/README.md index c463845..330dd2d 100644 --- a/README.md +++ b/README.md @@ -168,7 +168,7 @@ LOCAL=true py.test Install black and flake8 ``` -pip install .[lint] +pip install .[ci] ``` Install git hooks to avoid pushing invalid code (git commit will run `black` and `flake8`) diff --git a/dotgit/hooks/pre-commit-format.sh b/dotgit/hooks/pre-commit-format.sh index 4a52719..a1758b8 100755 --- a/dotgit/hooks/pre-commit-format.sh +++ b/dotgit/hooks/pre-commit-format.sh @@ -2,11 +2,17 @@ set -e -if ! black . --check -q; then - black . +if ! black stream --check -q; then + black stream + echo echo "some files were not formatted correctly (black) commit aborted!" echo "your changes are still staged, you can accept formatting changes with git add or ignore them by adding --no-verify to git commit" exit 1 fi -flake8 --ignore=E501,E225,W293,W503 . +if ! flake8 --ignore=E501,E225,W293,W503 stream; then + echo + echo "commit is aborted because there are some error prone issues in your changes as printed above" + echo "your changes are still staged, you can accept formatting changes with git add or ignore them by adding --no-verify to git commit" + exit 1 +fi diff --git a/stream/__init__.py b/stream/__init__.py index 889a0e7..71aae77 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -1,5 +1,5 @@ -import re import os +import re __author__ = "Thierry Schellenbach" __copyright__ = "Copyright 2014, Stream.io, Inc" diff --git a/stream/client.py b/stream/client.py index 3abafe9..de5d85a 100644 --- a/stream/client.py +++ b/stream/client.py @@ -4,16 +4,16 @@ import jwt import requests -from stream.serializer import _datetime_encoder +from requests import Request from stream import exceptions, serializer -from stream.users import Users -from stream.utils import validate_feed_slug, validate_user_id, validate_foreign_id_time -from requests import Request -from stream.reactions import Reactions from stream.collections import Collections -from stream.personalization import Personalization from stream.feed import Feed +from stream.personalization import Personalization +from stream.reactions import Reactions +from stream.serializer import _datetime_encoder +from stream.users import Users +from stream.utils import validate_feed_slug, validate_foreign_id_time, validate_user_id try: from urllib.parse import urlparse @@ -23,7 +23,7 @@ logger = logging.getLogger(__name__) -class StreamClient(object): +class StreamClient: def __init__( self, api_key, @@ -377,9 +377,7 @@ def get_activities( if enrich or reactions is not None: endpoint = "enrich/" + endpoint - query_params = {} - for key in params: - query_params[key] = params[key] + query_params = {**params} if ids is not None: query_params["ids"] = ",".join(ids) @@ -405,7 +403,7 @@ def get_activities( return self.get(endpoint, auth_token, params=query_params) def activity_partial_update( - self, id=None, foreign_id=None, time=None, set={}, unset=[] + self, id=None, foreign_id=None, time=None, set=None, unset=None ): """ Partial update activity, via activity ID or Foreign ID + timestamp @@ -426,7 +424,7 @@ def activity_partial_update( "Only one of the id or the foreign_id+time parameters can be provided" ) - data = {"set": set, "unset": unset} + data = {"set": set or {}, "unset": unset or []} if id is not None: data["id"] = id @@ -436,7 +434,7 @@ def activity_partial_update( return self.activities_partial_update(updates=[data]) - def activities_partial_update(self, updates=[]): + def activities_partial_update(self, updates=None): """ Partial update activity, via activity ID or Foreign ID + timestamp @@ -460,7 +458,7 @@ def activities_partial_update(self, updates=[]): auth_token = self.create_jwt_token("activities", "*", feed_id="*") - data = {"changes": updates} + data = {"changes": updates or []} return self.post("activity/", auth_token, data=data) diff --git a/stream/collections.py b/stream/collections.py index 56affad..967d5c6 100644 --- a/stream/collections.py +++ b/stream/collections.py @@ -1,4 +1,4 @@ -class Collections(object): +class Collections: def __init__(self, client, token): """ Used to manipulate data at the 'meta' endpoint @@ -34,18 +34,17 @@ def upsert(self, collection_name, data): {"id": '2', "name": "Ruby", "interests": ["Sunbeams", "Surprise Attacks"]}]) """ - if type(data) != list: + if not isinstance(data, list): data = [data] data_json = {collection_name: data} - response = self.client.post( + return self.client.post( "collections/", service_name="api", signature=self.token, data={"data": data_json}, ) - return response def select(self, collection_name, ids): """ @@ -60,24 +59,20 @@ def select(self, collection_name, ids): client.collections.select('user', [1,2,3]) """ - if type(ids) != list: + if not isinstance(ids, list): ids = [ids] - ids = [str(i) for i in ids] - foreign_ids = [] - for i in range(len(ids)): - foreign_ids.append("%s:%s" % (collection_name, ids[i])) - foreign_ids = ",".join(foreign_ids) + foreign_ids = ",".join( + "%s:%s" % (collection_name, k) for i, k in enumerate(ids) + ) - response = self.client.get( + return self.client.get( "collections/", service_name="api", params={"foreign_ids": foreign_ids}, signature=self.token, ) - return response - def delete_many(self, collection_name, ids): """ Delete data from meta. @@ -90,18 +85,16 @@ def delete_many(self, collection_name, ids): client.collections.delete('user', ['1','2','3']) """ - if type(ids) != list: + if not isinstance(ids, list): ids = [ids] ids = [str(i) for i in ids] params = {"collection_name": collection_name, "ids": ids} - response = self.client.delete( + return self.client.delete( "collections/", service_name="api", params=params, signature=self.token ) - return response - def add(self, collection_name, data, id=None, user_id=None): payload = dict(id=id, data=data, user_id=user_id) return self.client.post( diff --git a/stream/feed.py b/stream/feed.py index d21feba..e79dbee 100644 --- a/stream/feed.py +++ b/stream/feed.py @@ -1,7 +1,7 @@ -from stream.utils import validate_feed_id, validate_user_id, validate_feed_slug +from stream.utils import validate_feed_id, validate_feed_slug, validate_user_id -class Feed(object): +class Feed: def __init__(self, client, feed_slug, user_id, token): """ Initializes the Feed class @@ -61,8 +61,7 @@ def add_activity(self, activity_data): activity_data["to"] = self.add_to_signature(activity_data["to"]) token = self.create_scope_token("feed", "write") - result = self.client.post(self.feed_url, data=activity_data, signature=token) - return result + return self.client.post(self.feed_url, data=activity_data, signature=token) def add_activities(self, activity_list): """ @@ -87,8 +86,8 @@ def add_activities(self, activity_list): token = self.create_scope_token("feed", "write") data = dict(activities=activities) if activities: - result = self.client.post(self.feed_url, data=data, signature=token) - return result + return self.client.post(self.feed_url, data=data, signature=token) + return None def remove_activity(self, activity_id=None, foreign_id=None): """ @@ -106,8 +105,7 @@ def remove_activity(self, activity_id=None, foreign_id=None): token = self.create_scope_token("feed", "delete") if foreign_id is not None: params["foreign_id"] = "1" - result = self.client.delete(url, signature=token, params=params) - return result + return self.client.delete(url, signature=token, params=params) def get(self, enrich=False, reactions=None, **params): """ @@ -143,8 +141,7 @@ def get(self, enrich=False, reactions=None, **params): if reactions.get("counts"): params["withReactionCounts"] = True - response = self.client.get(feed_url, params=params, signature=token) - return response + return self.client.get(feed_url, params=params, signature=token) def follow( self, target_feed_slug, target_user_id, activity_copy_limit=None, **extra_data @@ -168,8 +165,7 @@ def follow( data["activity_copy_limit"] = activity_copy_limit token = self.create_scope_token("follower", "write") data.update(extra_data) - response = self.client.post(url, data=data, signature=token) - return response + return self.client.post(url, data=data, signature=token) def unfollow(self, target_feed_slug, target_user_id, keep_history=False): """ @@ -183,31 +179,27 @@ def unfollow(self, target_feed_slug, target_user_id, keep_history=False): params = {} if keep_history: params["keep_history"] = True - response = self.client.delete(url, signature=token, params=params) - return response + return self.client.delete(url, signature=token, params=params) def followers(self, offset=0, limit=25, feeds=None): """ Lists the followers for the given feed """ - feeds = feeds is not None and ",".join(feeds) or "" + feeds = ",".join(feeds) if feeds is not None else "" params = {"limit": limit, "offset": offset, "filter": feeds} url = self.feed_url + "followers/" token = self.create_scope_token("follower", "read") - response = self.client.get(url, params=params, signature=token) - return response + return self.client.get(url, params=params, signature=token) def following(self, offset=0, limit=25, feeds=None): """ List the feeds which this feed is following """ - if feeds is not None: - feeds = feeds is not None and ",".join(feeds) or "" + feeds = ",".join(feeds) if feeds is not None else "" params = {"offset": offset, "limit": limit, "filter": feeds} url = self.feed_url + "follows/" token = self.create_scope_token("follower", "read") - response = self.client.get(url, params=params, signature=token) - return response + return self.client.get(url, params=params, signature=token) def add_to_signature(self, recipients): """ diff --git a/stream/personalization.py b/stream/personalization.py index bedbe07..77f3174 100644 --- a/stream/personalization.py +++ b/stream/personalization.py @@ -1,4 +1,4 @@ -class Personalization(object): +class Personalization: def __init__(self, client, token): """ Methods to interact with personalized feeds. @@ -20,13 +20,12 @@ def get(self, resource, **params): personalization.get('follow_recommendations', user_id=123, limit=10, offset=10) """ - response = self.client.get( + return self.client.get( resource, service_name="personalization", params=params, signature=self.token, ) - return response def post(self, resource, **params): """ @@ -43,14 +42,13 @@ def post(self, resource, **params): data = params["data"] or None - response = self.client.post( + return self.client.post( resource, service_name="personalization", params=params, signature=self.token, data=data, ) - return response def delete(self, resource, **params): """ @@ -60,11 +58,9 @@ def delete(self, resource, **params): :return: data that was deleted if successful or not. """ - response = self.client.delete( + return self.client.delete( resource, service_name="personalization", params=params, signature=self.token, ) - - return response diff --git a/stream/reactions.py b/stream/reactions.py index 8dd16b4..88a60a4 100644 --- a/stream/reactions.py +++ b/stream/reactions.py @@ -1,4 +1,4 @@ -class Reactions(object): +class Reactions: def __init__(self, client, token): self.client = client self.token = token diff --git a/stream/serializer.py b/stream/serializer.py index 8c84d45..3dd96cf 100644 --- a/stream/serializer.py +++ b/stream/serializer.py @@ -1,5 +1,6 @@ import datetime import json + import pytz """ @@ -13,11 +14,12 @@ def _datetime_encoder(obj): if isinstance(obj, datetime.datetime): - if obj.utcoffset() is None: # support for <= 3.3 + if obj.utcoffset() is None: # 3.5 obj = pytz.utc.localize(obj) return datetime.datetime.strftime(obj.astimezone(pytz.utc), DATETIME_FORMAT) if isinstance(obj, datetime.date): return datetime.datetime.strftime(obj, DATE_FORMAT) + return None def _datetime_decoder(dict_): diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 638ca82..350116b 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1,28 +1,28 @@ -from dateutil.tz import tzlocal -import stream -import time -from stream.exceptions import ApiKeyException, InputException +import copy +import datetime +import json +import os import random +import sys +import time +from itertools import count +from uuid import uuid1, uuid4 + import jwt import pytz +import requests +from dateutil.tz import tzlocal +from requests.exceptions import MissingSchema + +import stream +from stream import serializer +from stream.exceptions import ApiKeyException, InputException try: from unittest.case import TestCase except ImportError: from unittest import TestCase -import json -import os -import sys -import datetime -import datetime as dt -import copy -import requests -from stream import serializer -from requests.exceptions import MissingSchema -from itertools import count -from uuid import uuid1 -from uuid import uuid4 try: from urlparse import urlparse, parse_qs @@ -152,15 +152,15 @@ def test_api_url(self): ) def test_collections_url_default(self): - client = stream.connect("key", "secret") - feed_url = client.get_full_url(relative_url="meta/", service_name="api") + c = stream.connect("key", "secret") + feed_url = c.get_full_url(relative_url="meta/", service_name="api") if not self.local_tests: self.assertEqual(feed_url, "https://api.stream-io-api.com/api/v1.0/meta/") def test_personalization_url_default(self): - client = stream.connect("key", "secret") - feed_url = client.get_full_url( + c = stream.connect("key", "secret") + feed_url = c.get_full_url( relative_url="recommended", service_name="personalization" ) @@ -171,15 +171,15 @@ def test_personalization_url_default(self): ) def test_api_url_default(self): - client = stream.connect("key", "secret") - feed_url = client.get_full_url(service_name="api", relative_url="feed/") + c = stream.connect("key", "secret") + feed_url = c.get_full_url(service_name="api", relative_url="feed/") if not self.local_tests: self.assertEqual(feed_url, "https://api.stream-io-api.com/api/v1.0/feed/") def test_collections_url_location(self): - client = stream.connect("key", "secret", location="tokyo") - feed_url = client.get_full_url(relative_url="meta/", service_name="api") + c = stream.connect("key", "secret", location="tokyo") + feed_url = c.get_full_url(relative_url="meta/", service_name="api") if not self.local_tests: self.assertEqual( @@ -187,8 +187,8 @@ def test_collections_url_location(self): ) def test_personalization_url_location(self): - client = stream.connect("key", "secret", location="tokyo") - feed_url = client.get_full_url( + c = stream.connect("key", "secret", location="tokyo") + feed_url = c.get_full_url( relative_url="recommended", service_name="personalization" ) @@ -199,8 +199,8 @@ def test_personalization_url_location(self): ) def test_api_url_location(self): - client = stream.connect("key", "secret", location="tokyo") - feed_url = client.get_full_url(service_name="api", relative_url="feed/") + c = stream.connect("key", "secret", location="tokyo") + feed_url = c.get_full_url(service_name="api", relative_url="feed/") if not self.local_tests: self.assertEqual( @@ -259,22 +259,22 @@ def test_update_activities_update(self): def test_heroku(self): url = "https://thierry:pass@getstream.io/?app_id=1" os.environ["STREAM_URL"] = url - client = stream.connect() - self.assertEqual(client.api_key, "thierry") - self.assertEqual(client.api_secret, "pass") - self.assertEqual(client.app_id, "1") + c = stream.connect() + self.assertEqual(c.api_key, "thierry") + self.assertEqual(c.api_secret, "pass") + self.assertEqual(c.app_id, "1") def test_heroku_no_location(self): url = "https://bvt88g4kvc63:twc5ywfste5bm2ngqkzs7ukxk3pn96yweghjrxcmcrarnt3j4dqj3tucbhym5wfd@stream-io-api.com/?app_id=669" os.environ["STREAM_URL"] = url - client = stream.connect() - self.assertEqual(client.api_key, "bvt88g4kvc63") + c = stream.connect() + self.assertEqual(c.api_key, "bvt88g4kvc63") self.assertEqual( - client.api_secret, + c.api_secret, "twc5ywfste5bm2ngqkzs7ukxk3pn96yweghjrxcmcrarnt3j4dqj3tucbhym5wfd", ) - self.assertEqual(client.app_id, "669") - feed_url = client.get_full_url("api", "feed/") + self.assertEqual(c.app_id, "669") + feed_url = c.get_full_url("api", "feed/") if self.local_tests: self.assertEqual(feed_url, "http://localhost:8000/api/v1.0/feed/") @@ -284,14 +284,14 @@ def test_heroku_no_location(self): def test_heroku_location_compat(self): url = "https://ahj2ndz7gsan:gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy@us-east.getstream.io/?app_id=1" os.environ["STREAM_URL"] = url - client = stream.connect() - self.assertEqual(client.api_key, "ahj2ndz7gsan") + c = stream.connect() + self.assertEqual(c.api_key, "ahj2ndz7gsan") self.assertEqual( - client.api_secret, + c.api_secret, "gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy", ) - feed_url = client.get_full_url("api", "feed/") + feed_url = c.get_full_url("api", "feed/") if self.local_tests: self.assertEqual(feed_url, "http://localhost:8000/api/v1.0/feed/") else: @@ -299,52 +299,52 @@ def test_heroku_location_compat(self): feed_url, "https://us-east-api.stream-io-api.com/api/v1.0/feed/" ) - self.assertEqual(client.app_id, "1") + self.assertEqual(c.app_id, "1") def test_heroku_location(self): url = "https://ahj2ndz7gsan:gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy@us-east.stream-io-api.com/?app_id=1" os.environ["STREAM_URL"] = url - client = stream.connect() - self.assertEqual(client.api_key, "ahj2ndz7gsan") + c = stream.connect() + self.assertEqual(c.api_key, "ahj2ndz7gsan") self.assertEqual( - client.api_secret, + c.api_secret, "gthc2t9gh7pzq52f6cky8w4r4up9dr6rju9w3fjgmkv6cdvvav2ufe5fv7e2r9qy", ) - feed_url = client.get_full_url("api", "feed/") + feed_url = c.get_full_url("api", "feed/") if self.local_tests: self.assertEqual(feed_url, "http://localhost:8000/api/v1.0/feed/") else: self.assertEqual( feed_url, "https://us-east-api.stream-io-api.com/api/v1.0/feed/" ) - self.assertEqual(client.app_id, "1") + self.assertEqual(c.app_id, "1") def test_heroku_overwrite(self): url = "https://thierry:pass@getstream.io/?app_id=1" os.environ["STREAM_URL"] = url - client = stream.connect("a", "b", "c") - self.assertEqual(client.api_key, "a") - self.assertEqual(client.api_secret, "b") - self.assertEqual(client.app_id, "c") + c = stream.connect("a", "b", "c") + self.assertEqual(c.api_key, "a") + self.assertEqual(c.api_secret, "b") + self.assertEqual(c.app_id, "c") def test_location_support(self): - client = stream.connect("a", "b", "c", location="us-east") + c = stream.connect("a", "b", "c", location="us-east") full_location = "https://us-east-api.stream-io-api.com/api/v1.0/feed/" if self.local_tests: full_location = "http://localhost:8000/api/v1.0/feed/" - self.assertEqual(client.location, "us-east") - feed_url = client.get_full_url("api", "feed/") + self.assertEqual(c.location, "us-east") + feed_url = c.get_full_url("api", "feed/") self.assertEqual(feed_url, full_location) # test a wrong location, can only work on non-local test running if not self.local_tests: - client = stream.connect("a", "b", "c", location="nonexistant") + c = stream.connect("a", "b", "c", location="nonexistant") def get_feed(): - client.feed("user", "1").get() + c.feed("user", "1").get() self.assertRaises(requests.exceptions.ConnectionError, get_feed) @@ -370,15 +370,14 @@ def invalid_follow_user_id(): self.assertRaises(ValueError, invalid_follow_user_id) def test_token_retrieval(self): - self.user1.token - self.user1.get_readonly_token() + _ = self.user1.token + _ = self.user1.get_readonly_token() def test_user_token(self): - client = stream.connect(self.c.api_key, self.c.api_secret) - token = client.create_user_token("user") + token = self.c.create_user_token("user") payload = jwt.decode(token, self.c.api_secret, algorithms=["HS256"]) self.assertEqual(payload["user_id"], "user") - token = client.create_user_token("user", client="python", testing=True) + token = self.c.create_user_token("user", client="python", testing=True) payload = jwt.decode(token, self.c.api_secret, algorithms=["HS256"]) self.assertEqual(payload["client"], "python") self.assertEqual(payload["testing"], True) @@ -475,17 +474,19 @@ def test_remove_activity_by_foreign_id(self): "foreign_id": "tweet:10", } - self.user1.add_activity(activity_data)["id"] + self.user1.add_activity(activity_data) activities = self.user1.get(limit=8)["results"] self.assertEqual(len(activities), 1) + self.assertNotEqual(activities[0]["id"], "") + self.assertEqual(activities[0]["foreign_id"], "tweet:10") self.user1.remove_activity(foreign_id="tweet:10") # verify that no activities were returned activities = self.user1.get(limit=8)["results"] self.assertEqual(len(activities), 0) - # verify this doesnt raise an error, but fails silently - self.user1.remove_activity(foreign_id="tweet:unknowandmissing") + # verify this doesn't raise an error, but fails silently + self.user1.remove_activity(foreign_id="tweet:unknownandmissing") def test_add_activities(self): activity_data = [ @@ -579,7 +580,7 @@ def test_flat_follow_no_copy(self): feed = getfeed("user", "test_flat_follow_no_copy") follower = getfeed("flat", "test_flat_follow_no_copy") activity_data = {"actor": 1, "verb": "tweet", "object": 1} - feed.add_activity(activity_data)["id"] + feed.add_activity(activity_data) follower.follow(feed.slug, feed.user_id, activity_copy_limit=0) activities = follower.get(limit=3)["results"] @@ -594,14 +595,14 @@ def test_flat_follow_copy_one(self): "object": 1, "foreign_id": "test:1", } - feed.add_activity(activity_data)["id"] + feed.add_activity(activity_data) activity_data = { "actor": 1, "verb": "tweet", "object": 1, "foreign_id": "test:2", } - feed.add_activity(activity_data)["id"] + feed.add_activity(activity_data) follower.follow(feed.slug, feed.user_id, activity_copy_limit=1) activities = follower.get(limit=3)["results"] @@ -679,20 +680,20 @@ def test_do_i_follow(self): self.assertEqual(followings["results"][0]["target_id"], "user:apy") def test_update_activity_to_targets(self): - time = datetime.datetime.utcnow().isoformat() + now = datetime.datetime.utcnow().isoformat() foreign_id = "user:1" activity_data = { "actor": 1, "verb": "tweet", "object": 1, "foreign_id": foreign_id, - "time": time, + "time": now, } activity_data["to"] = ["user:1", "user:2"] self.user1.add_activity(activity_data) ret = self.user1.update_activity_to_targets( - foreign_id, time, new_targets=["user:3", "user:2"] + foreign_id, now, new_targets=["user:3", "user:2"] ) self.assertEqual(len(ret["activity"]["to"]), 2) self.assertTrue("user:2" in ret["activity"]["to"]) @@ -700,7 +701,7 @@ def test_update_activity_to_targets(self): ret = self.user1.update_activity_to_targets( foreign_id, - time, + now, added_targets=["user:4", "user:5"], removed_targets=["user:3"], ) @@ -909,7 +910,7 @@ def test_time_ordering(self): # timedelta is used to "make sure" that ordering is known even though # server time is not - custom_time = datetime.datetime.now(tz=pytz.utc) - dt.timedelta(days=1) + custom_time = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=1) feed = self.user2 for index, activity_time in enumerate([None, custom_time, None]): @@ -944,10 +945,6 @@ def test_missing_actor(self): pass def test_wrong_feed_spec(self): - self.c = stream.connect( - "5crf3bhfzesnMISSING", - "tfq2sdqpj9g446sbv653x3aqmgn33hsn8uzdc9jpskaw8mj6vsnhzswuwptuj9su", - ) self.assertRaises(TypeError, lambda: getfeed("user1")) def test_serialization(self): @@ -1216,16 +1213,16 @@ def test_activity_partial_update(self): ) activity = feed.get()["results"][0] - set = { + to_set = { "product.name": "boots", "product.price": 7.99, "popularity": 1000, "foo": {"bar": {"baz": "qux"}}, } - unset = ["product.color"] + to_unset = ["product.color"] # partial update by ID - self.c.activity_partial_update(id=activity["id"], set=set, unset=unset) + self.c.activity_partial_update(id=activity["id"], set=to_set, unset=to_unset) updated = feed.get()["results"][0] expected = activity expected["product"] = {"name": "boots", "price": 7.99} @@ -1234,13 +1231,13 @@ def test_activity_partial_update(self): self.assertEqual(updated, expected) # partial update by foreign ID + time - set = {"foo.bar.baz": 42, "popularity": 9000} - unset = ["product.price"] + to_set = {"foo.bar.baz": 42, "popularity": 9000} + to_unset = ["product.price"] self.c.activity_partial_update( foreign_id=activity["foreign_id"], time=activity["time"], - set=set, - unset=unset, + set=to_set, + unset=to_unset, ) updated = feed.get()["results"][0] expected["product"] = {"name": "boots"} diff --git a/stream/users.py b/stream/users.py index 9b0c539..776e577 100644 --- a/stream/users.py +++ b/stream/users.py @@ -1,4 +1,4 @@ -class Users(object): +class Users: def __init__(self, client, token): self.client = client self.token = token From 876cd1ea6b33b46a52b3d83de7ab7cbf461afc77 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Mon, 15 Jun 2020 21:41:51 +0200 Subject: [PATCH 174/208] Add kind filter support for enrichment (#123) --- stream/client.py | 18 ++++++------- stream/feed.py | 19 ++++++-------- stream/tests/test_client.py | 50 +++++++++++++++++++++++++++++++++++++ stream/utils.py | 20 +++++++++++++++ 4 files changed, 84 insertions(+), 23 deletions(-) diff --git a/stream/client.py b/stream/client.py index de5d85a..3cabec5 100644 --- a/stream/client.py +++ b/stream/client.py @@ -13,7 +13,12 @@ from stream.reactions import Reactions from stream.serializer import _datetime_encoder from stream.users import Users -from stream.utils import validate_feed_slug, validate_foreign_id_time, validate_user_id +from stream.utils import ( + validate_feed_slug, + validate_foreign_id_time, + validate_user_id, + get_reaction_params, +) try: from urllib.parse import urlparse @@ -389,16 +394,7 @@ def get_activities( query_params["foreign_ids"] = ",".join(foreign_ids) query_params["timestamps"] = ",".join(timestamps) - if reactions is not None and not isinstance(reactions, (dict,)): - raise TypeError("reactions argument should be a dictionary") - - if reactions is not None: - if reactions.get("own"): - query_params["withOwnReactions"] = True - if reactions.get("recent"): - query_params["withRecentReactions"] = True - if reactions.get("counts"): - query_params["withReactionCounts"] = True + query_params.update(get_reaction_params(reactions)) return self.get(endpoint, auth_token, params=query_params) diff --git a/stream/feed.py b/stream/feed.py index e79dbee..5a17340 100644 --- a/stream/feed.py +++ b/stream/feed.py @@ -1,4 +1,9 @@ -from stream.utils import validate_feed_id, validate_feed_slug, validate_user_id +from stream.utils import ( + validate_feed_id, + validate_feed_slug, + validate_user_id, + get_reaction_params, +) class Feed: @@ -130,17 +135,7 @@ def get(self, enrich=False, reactions=None, **params): else: feed_url = self.feed_url - if reactions is not None and not isinstance(reactions, (dict,)): - raise TypeError("reactions argument should be a dictionary") - - if reactions is not None: - if reactions.get("own"): - params["withOwnReactions"] = True - if reactions.get("recent"): - params["withRecentReactions"] = True - if reactions.get("counts"): - params["withReactionCounts"] = True - + params.update(get_reaction_params(reactions)) return self.client.get(feed_url, params=params, signature=token) def follow( diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 350116b..2e0da72 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1198,6 +1198,56 @@ def validate(response): self.c.get_activities(foreign_id_times=[(fid, dt)], reactions=reactions) ) + def test_get_activities_full_with_enrichment_and_reaction_kinds(self): + dt = datetime.datetime.utcnow() + fid = "awesome-test" + + actor = self.c.users.add(str(uuid1()), data={"name": "barry"}) + activity = { + "actor": self.c.users.create_reference(actor["id"]), + "object": "09", + "verb": "tweet", + "time": dt, + "foreign_id": fid, + } + + feed = getfeed("user", "test_get_activity") + activity = feed.add_activity(activity) + + self.c.reactions.add("like", activity["id"], "liker") + self.c.reactions.add("reshare", activity["id"], "sharer") + self.c.reactions.add("comment", activity["id"], "commenter") + + reactions = {"recent": True, "counts": True, "kinds": "like,comment"} + response = self.c.get_activities(ids=[activity["id"]], reactions=reactions) + self.assertEqual(len(response["results"]), 1) + self.assertEqual(response["results"][0]["id"], activity["id"]) + self.assertEqual( + sorted(response["results"][0]["latest_reactions"].keys()), + ["comment", "like"], + ) + self.assertEqual( + response["results"][0]["reaction_counts"], {"like": 1, "comment": 1} + ) + + reactions = { + "recent": True, + "counts": True, + "kinds": ["", "reshare ", "comment\n"], + } + response = self.c.get_activities( + foreign_id_times=[(fid, dt)], reactions=reactions + ) + self.assertEqual(len(response["results"]), 1) + self.assertEqual(response["results"][0]["id"], activity["id"]) + self.assertEqual( + sorted(response["results"][0]["latest_reactions"].keys()), + ["comment", "reshare"], + ) + self.assertEqual( + response["results"][0]["reaction_counts"], {"comment": 1, "reshare": 1} + ) + def test_activity_partial_update(self): now = datetime.datetime.utcnow() feed = self.c.feed("user", uuid4()) diff --git a/stream/utils.py b/stream/utils.py index c6840a7..ea50635 100644 --- a/stream/utils.py +++ b/stream/utils.py @@ -54,3 +54,23 @@ def validate_foreign_id_time(foreign_id_time): if len(v) != 2: raise ValueError("foreign_id_time elements should have two elements") + + +def get_reaction_params(reactions): + if reactions is not None and not isinstance(reactions, (dict,)): + raise TypeError("reactions argument should be a dictionary") + + params = {} + if reactions is not None: + if reactions.get("own"): + params["withOwnReactions"] = True + if reactions.get("recent"): + params["withRecentReactions"] = True + if reactions.get("counts"): + params["withReactionCounts"] = True + kinds = reactions.get("kinds") + if kinds: + if isinstance(kinds, list): + kinds = ",".join(k.strip() for k in kinds if k.strip()) + params["reactionKindsFilter"] = kinds + return params From 51920e97452360bac28cab53a798b6c95ed513c9 Mon Sep 17 00:00:00 2001 From: Tommaso Barbugli Date: Fri, 19 Jun 2020 14:20:44 +0200 Subject: [PATCH 175/208] update min requirement for pycryptodomex --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 03c20c3..702f9bf 100644 --- a/setup.py +++ b/setup.py @@ -14,7 +14,7 @@ long_description = open("README.md", "r").read() install_requires = [ - "pycryptodomex>=3.4.7,<4", + "pycryptodomex>=3.8.1,<4", "requests>=2.3.0,<3", "pyjwt>=1.3.0,<1.8.0", "pytz>=2019.3", From 840c8c7e3970ec0602cd91f7af964602176d8f66 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Wed, 15 Jul 2020 09:01:31 +0200 Subject: [PATCH 176/208] Add follow stats (#124) Add endpoint support to get follow stats of a feed --- stream/client.py | 31 +++++++++++++++++++++++++++++++ stream/tests/test_client.py | 21 +++++++++++++++++++++ 2 files changed, 52 insertions(+) diff --git a/stream/client.py b/stream/client.py index 3cabec5..9c55732 100644 --- a/stream/client.py +++ b/stream/client.py @@ -489,3 +489,34 @@ def og(self, target_url): auth_token = self.create_jwt_token("*", "*", feed_id="*") params = {"url": target_url} return self.get("og/", auth_token, params=params) + + def follow_stats(self, feed_id, followers_slugs=None, following_slugs=None): + """ + Retrieve the number of follower and following feed stats of a given feed. + For each count, feed slugs can be provided to filter counts accordingly. + + eg. + client.follow_stats(me, followers_slugs=['user'], following_slugs=['commodities']) + this means to find counts of users following me and count of commodities I am following + """ + auth_token = self.create_jwt_token("*", "*", feed_id="*") + params = { + "followers": feed_id, + "following": feed_id, + } + + if followers_slugs: + params["followers_slugs"] = ( + ",".join(followers_slugs) + if isinstance(followers_slugs, list) + else followers_slugs + ) + + if following_slugs: + params["following_slugs"] = ( + ",".join(following_slugs) + if isinstance(following_slugs, list) + else following_slugs + ) + + return self.get("stats/follow/", auth_token, params=params) diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 2e0da72..7f39094 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1620,3 +1620,24 @@ def test_og(self): response = client.og("https://google.com") self.assertTrue("title" in response) self.assertTrue("description" in response) + + def test_follow_stats(self): + uniq = uuid4() + f = client.feed("user", uniq) + f.follow("user", uuid4()) + f.follow("user", uuid4()) + f.follow("user", uuid4()) + + client.feed("user", uuid4()).follow("user", uniq) + client.feed("timeline", uuid4()).follow("user", uniq) + + feed_id = "user:" + str(uniq) + response = client.follow_stats(feed_id)["results"] + self.assertEqual(response["following"]["count"], 3) + self.assertEqual(response["followers"]["count"], 2) + + response = client.follow_stats( + feed_id, followers_slugs=["timeline"], following_slugs=["timeline"] + )["results"] + self.assertEqual(response["following"]["count"], 0) + self.assertEqual(response["followers"]["count"], 1) From 45bb0607f1ef62143bf44174af35a8b229c50a51 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Wed, 2 Sep 2020 20:12:07 +0200 Subject: [PATCH 177/208] Changelog for v4.0.0 --- CHANGELOG | 14 ++++++++++++++ stream/__init__.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index 86aeb1b..c4e9f5e 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,20 @@ Change history ================ +===== +4.0.0 +===== +:release-date: 2020-09-02 + +Drop old create_user_session_token in favor create_user_token +Drop python support before 3.4 +Allow custom data in client.create_jwt_token +Add kind filter for reactions in enrichment +Add follow stat support +Move to github actions from travis and improve static analysis +Update readme for old docs +Update some crypto dependencies + ===== 3.5.1 ===== diff --git a/stream/__init__.py b/stream/__init__.py index 71aae77..7e95245 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "3.5.1" +__version__ = "4.0.0" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From 03a8ca734f79fcf8a48f18b7de9d06afea25cf04 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Fri, 18 Dec 2020 08:06:56 +0100 Subject: [PATCH 178/208] Prepare 5.0.0 (#126) --- .github/workflows/ci.yml | 20 ++- .gitignore | 1 + CHANGELOG | 300 --------------------------------------- CHANGELOG.md | 190 +++++++++++++++++++++++++ Makefile | 22 +++ README.md | 6 +- setup.py | 6 +- stream/__init__.py | 2 +- 8 files changed, 226 insertions(+), 321 deletions(-) delete mode 100644 CHANGELOG create mode 100644 CHANGELOG.md create mode 100644 Makefile diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index faa451a..cfa526e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: [3.5, 3.6, 3.7, 3.8] + python: [3.6, 3.7, 3.8, 3.9] steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 @@ -19,26 +19,24 @@ jobs: - name: Add pip bin to PATH run: | - echo "::add-path::/home/runner/.local/bin" - - - name: Upgrade setuptools - if: ${{ matrix.python == '3.5' }} - run: pip install --upgrade setuptools + echo "/home/runner/.local/bin" >> $GITHUB_PATH - name: Install deps with ${{ matrix.python }} run: pip install ".[test, ci]" - name: Lint with ${{ matrix.python }} if: ${{ matrix.python == '3.8' }} - run: | - black --check stream - flake8 --ignore=E501,E225,W293,W503 stream + run: make lint - name: Install, test and code coverage with ${{ matrix.python }} env: STREAM_KEY: ${{ secrets.STREAM_KEY }} STREAM_SECRET: ${{ secrets.STREAM_SECRET }} run: | - python setup.py test python setup.py install - codecov + make test + + - name: "Upload coverage to Codecov" + uses: codecov/codecov-action@v1 + with: + fail_ci_if_error: true diff --git a/.gitignore b/.gitignore index 6cfbe24..dde5bf2 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ var/ *.egg-info/ .installed.cfg *.egg +.eggs/ # Installer logs pip-log.txt diff --git a/CHANGELOG b/CHANGELOG deleted file mode 100644 index c4e9f5e..0000000 --- a/CHANGELOG +++ /dev/null @@ -1,300 +0,0 @@ -================ - Change history -================ - -===== -4.0.0 -===== -:release-date: 2020-09-02 - -Drop old create_user_session_token in favor create_user_token -Drop python support before 3.4 -Allow custom data in client.create_jwt_token -Add kind filter for reactions in enrichment -Add follow stat support -Move to github actions from travis and improve static analysis -Update readme for old docs -Update some crypto dependencies - -===== -3.5.1 -===== -:release-date: 2020-06-08 - -Handle warning in JWT decode regarding missing algorithm - -===== -3.5.0 -===== -:release-date: 2020-06-08 - -Add enrichment support to direct activity get - -===== -3.4.0 -===== -:release-date: 2020-05-11 - -Expose target_feeds_extra_data to add extra data to activities from reactions - -===== -3.3.0 -===== -:release-date: 2020-05-04 - -Add batch unfollow support - -===== -3.2.1 -===== -:release-date: 2020-03-17 - -Set timezone as utc in serialization hooks - -===== -3.2.0 -===== -:release-date: 2020-03-17 - -Add open graph scrape support -Update python support (drop 2.6, add 3.8) -Fixes in docs for collections and personalization - -===== -3.1.1 -===== -:release-date: 2019-11-07 - -Bump crypto deps - -===== -3.1.0 -===== -:release-date: 2018-05-24 - -Batch partial update - -===== -3.0.2 -===== -:release-date: 2018-05-24 - -Fixes for filtering by reactions by kind - - -====== -3.0.1 -====== -:release-date: 2018-12-04 - -Add short-hand version for collections.create_reference() - -====== -3.0.0 -====== -:release-date: 2018-12-03 - -Add support for reactions -Add support for users -Removed HTTP Signatures based auth -Use JWT auth for everything -Add feed.get enrichment params - -====== -2.12.0 -====== -:release-date: 2018-10-08 - -Add user-session-token support - -====== -2.11.0 -====== -:release-date: 2017-08-23 - -Add collection helpers to create refs - -====== -2.10.0 -====== -:release-date: 2017-07-30 - -Partial activity API endpoint - -====== -2.9.3 -====== -:release-date: 2017-07-20 - -Use Readme.md content as package long description - -====== -2.9.2 -====== -:release-date: 2017-07-20 - -Fixed deserialization problem with datetime objects with zeroed microseconds -Support newer versions of the pyJWT lib - - -====== -2.9.1 -====== -:release-date: 2017-07-18 - -Renamed client.get_activities' foreign_id_time param to foreign_id_times - - -====== -2.9.0 -====== -:release-date: 2017-07-05 - -Add support for get activity API endpoint - -====== -2.8.1 -====== -:release-date: 2017-12-21 - -Fixes a regression with embedded httpsig and Python 3 - -====== -2.8.0 -====== -:release-date: 2017-12-21 - -Fixes install issues on Windows - -* Bundle http-sig library -* Use pycryptodomex instead of the discontinued pycrypto library - -====== -2.7.0 -====== -:release-date: 2017-12-14 - -* All client methods that make requests will return the response - -2.6.2 -===== -:release-date 2017-12-08 - -Consolidate API URL generation across API, Collections and Personalization services - -2.6.0 -===== -:release-date 2017-12-08 - -Support the new collections endpoint and flexible get requests for personalization - -2.5.0 -====== -:release-date: 2017-10-19 - -* Use new .com domain for API and Analytics - -2.4.0 -====== -:release-date: 2017-08-31 - -* Added support for To target update endpoint - -2.3.11 -====== -:release-date: 2017-05-22 - -* Added support for Python 2.6.9 and downgrade to requests 2.2.1 - - -2.3.9 -========== -:release-date: 2016-12-20 - -* Fix errors_from_fields function so it displays the extra data returned by the - server about InputException errors. - - -2.3.8 -===== -:release-date: 2016-06-09 - -* Add support for keep_history on unfollow - -2.3.7 -===== -:release-date: 2016-06-02 - -* Add HTTP Signature auth method (for application auth resources) -* Add support for follow_many batch operation -* Add support for add_to_many batch operation -* Decode JWT from bytes to UTF-8 -* Skip add_activities API call if activity_list is empty -* Fix feed group and id validation, dashes are now allowed - -2.3.5 -===== -:release-date: 2015-10-07 - -* Added support for activity update - - -2.3.3 -===== -:release-date: 2015-10-07 - -* Added support for creating redirect urls - - -2.3.0 -===== -:release-date: 2015-06-11 - -* Added support for read-only tokens - -2.1.4 -===== -:release-date: 2015-01-14 - -* Added support for extra data for follow actions - -2.1.3 -===== -:release-date: 2015-01-05 - -* Bugfix, mark_seen and mark_read now work - -2.1.0 -===== -:release-date: 2014-12-19 - -* Added location support to reduce latency - -2.0.1 -===== -:release-date: 2014-11-18 - -* Additional validation on feed_slug and user_id - -2.0.0 -===== -:release-date: 2014-11-10 - -* Breaking change: New style feed syntax, client.feed('user', '1') instead of client.feed('user:3') -* Breaking change: New style follow syntax, feed.follow('user', 3) -* API versioning support -* Configurable timeouts -* Python 3 support - - -1.1.1 -===== -:release-date: 2014-09-20 08:00 A.M GMT - -* Add HTTP client retries - -1.1.0 -===== -:release-date: 2014-09-08 08:00 A.M GMT - -* Add support for mark read (notifications feeds) diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..4d3ab9c --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,190 @@ +## 5.0.0 - 2020-09-17 + +* Drop python 3.5 and add 3.9 +* Improve install and CI + +## 4.0.0 - 2020-09-02 + +* Drop old create_user_session_token in favor create_user_token +* Drop python support before 3.4 +* Allow custom data in client.create_jwt_token +* Add kind filter for reactions in enrichment +* Add follow stat support +* Move to github actions from travis and improve static analysis +* Update readme for old docs +* Update some crypto dependencies + +## 3.5.1 - 2020-06-08 + +* Handle warning in JWT decode regarding missing algorithm + +## 3.5.0 - 2020-06-08 + +* Add enrichment support to direct activity get + +## 3.4.0 - 2020-05-11 + +* Expose target_feeds_extra_data to add extra data to activities from reactions + +## 3.3.0 - 2020-05-04 + +* Add batch unfollow support + +## 3.2.1 - 2020-03-17 + +* Set timezone as utc in serialization hooks + +## 3.2.0 - 2020-03-17 + +* Add open graph scrape support +* Update python support (drop 2.6, add 3.8) +* Fixes in docs for collections and personalization + +## 3.1.1 - 2019-11-07 + +* Bump crypto deps + +## 3.1.0 - 2018-05-24 + +* Batch partial update + +## 3.0.2 - 2018-05-24 + +* Fixes for filtering by reactions by kind + +## 3.0.1 - 2018-12-04 + +* Add short-hand version for collections.create_reference() + +## 3.0.0 - 2018-12-03 + +* Add support for reactions +* Add support for users +* Removed HTTP Signatures based auth +* Use JWT auth for everything +* Add feed.get enrichment params + +## 2.12.0 - 2018-10-08 + +* Add user-session-token support + +## 2.11.0 - 2017-08-23 + +* Add collection helpers to create refs + +## 2.10.0 - 2017-07-30 + +* Partial activity API endpoint + +## 2.9.3 - 2017-07-20 + +* Use Readme.md content as package long description + +## 2.9.2 - 2017-07-20 + +* Fixed deserialization problem with datetime objects with zeroed microseconds +* Support newer versions of the pyJWT lib + +## 2.9.1 - 2017-07-18 + +Renamed client.get_activities' foreign_id_time param to foreign_id_times + +## 2.9.0 - 2017-07-05 + +* Add support for get activity API endpoint + +## 2.8.1 - 2017-12-21 + +* Fixes a regression with embedded httpsig and Python 3 + +## 2.8.0 - 2017-12-21 + +* Fixes install issues on Windows +* Bundle http-sig library +* Use pycryptodomex instead of the discontinued pycrypto library + +## 2.7.0 - 2017-12-14 + +* All client methods that make requests will return the response + +## 2.6.2 - 2017-12-08 + +* Consolidate API URL generation across API, Collections and Personalization services + +## 2.6.0 - 2017-12-08 + +Support the new collections endpoint and flexible get requests for personalization + +## 2.5.0 - 2017-10-19 + +* Use new .com domain for API and Analytics + +## 2.4.0 - 2017-08-31 + +* Added support for To target update endpoint + +## 2.3.11 - 2017-05-22 + +* Added support for Python 2.6.9 and downgrade to requests 2.2.1 + +## 2.3.9 - 2016-12-20 + +* Fix errors_from_fields function so it displays the extra data returned by the + server about InputException errors. + +## 2.3.8 - 2016-06-09 + +* Add support for keep_history on unfollow + +## 2.3.7 - 2016-06-02 + +* Add HTTP Signature auth method (for application auth resources) +* Add support for follow_many batch operation +* Add support for add_to_many batch operation +* Decode JWT from bytes to UTF-8 +* Skip add_activities API call if activity_list is empty +* Fix feed group and id validation, dashes are now allowed + +## 2.3.5 - 2015-10-07 + +* Added support for activity update + +## 2.3.3 - 2015-10-07 + +* Added support for creating redirect urls + +## 2.3.0 - 2015-06-11 + +* Added support for read-only tokens + +## 2.1.4 - 2015-01-14 + +* Added support for extra data for follow actions + +## 2.1.3 - 2015-01-05 + +* Bugfix, mark_seen and mark_read now work + +## 2.1.0 - 2014-12-19 + +* Added location support to reduce latency + +## 2.0.1 - 2014-11-18 + +* Additional validation on feed_slug and user_id + +## 2.0.0 - 2014-11-10 + +* Breaking change: New style feed syntax, client.feed('user', '1') instead of client.feed('user:3') +* Breaking change: New style follow syntax, feed.follow('user', 3) +* API versioning support +* Configurable timeouts +* Python 3 support + +## 1.1.1 - 2014-09-20 + +* Add HTTP client retries + +## 1.1.0 -2014-09-08 + +* Add support for mark read (notifications feeds) diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..37be744 --- /dev/null +++ b/Makefile @@ -0,0 +1,22 @@ +STREAM_KEY ?= NOT_EXIST +STREAM_SECRET ?= NOT_EXIST + +# These targets are not files +.PHONY: help check test lint lint-fix + +help: ## Display this help message + @echo "Please use \`make \` where is one of" + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; \ + {printf "\033[36m%-40s\033[0m %s\n", $$1, $$2}' + +lint: ## Run linters + black --check stream + flake8 --ignore=E501,E225,W293,W503,F401 stream + +lint-fix: + black stream + +test: ## Run tests + STREAM_KEY=$(STREAM_KEY) STREAM_SECRET=$(STREAM_SECRET) python setup.py test + +check: lint test ## Run linters + tests diff --git a/README.md b/README.md index 330dd2d..4c3a40b 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ stream-python ============= -[![build](https://github.com/GetStream/stream-python/workflows/build/badge.svg)](https://github.com/GetStream/stream-python/actions) [![codecov](https://codecov.io/gh/GetStream/stream-python/branch/master/graph/badge.svg)](https://codecov.io/gh/GetStream/stream-python) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) +[![build](https://github.com/GetStream/stream-python/workflows/build/badge.svg)](https://github.com/GetStream/stream-python/actions) [![codecov](https://codecov.io/gh/GetStream/stream-python/branch/master/graph/badge.svg)](https://codecov.io/gh/GetStream/stream-python) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/stream-python.svg) [stream-python](https://github.com/GetStream/stream-python) is the official Python client for [Stream](https://getstream.io/), a web service for building scalable newsfeeds and activity streams. @@ -11,10 +11,6 @@ You can sign up for a Stream account at https://getstream.io/get_started. ### Installation -stream-python supports: - -- Python (3.5, 3.6, 3.7, 3.8) - #### Install from Pypi ```bash diff --git a/setup.py b/setup.py index 702f9bf..d596675 100644 --- a/setup.py +++ b/setup.py @@ -7,9 +7,7 @@ import sys tests_require = ["pytest==3.2.5", "unittest2", "pytest-cov", "python-dateutil"] -ci_require = ["flake8", "codecov"] -if sys.version_info >= (3, 6, 0): - ci_require.append("black") +ci_require = ["black", "flake8", "pytest-cov"] long_description = open("README.md", "r").read() @@ -61,10 +59,10 @@ def run_tests(self): "License :: OSI Approved :: BSD License", "Natural Language :: English", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", "Topic :: Software Development :: Libraries :: Python Modules", ], ) diff --git a/stream/__init__.py b/stream/__init__.py index 7e95245..7191563 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "4.0.0" +__version__ = "5.0.0" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From b8186c842938157b605093ac5afd43af529e2758 Mon Sep 17 00:00:00 2001 From: Sha Date: Mon, 18 Jan 2021 17:45:57 +0800 Subject: [PATCH 179/208] Bump pyjwt to 2.x (#127) --- setup.py | 4 ++-- stream/client.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/setup.py b/setup.py index d596675..a0835e3 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ from stream import __version__, __maintainer__, __email__, __license__ import sys -tests_require = ["pytest==3.2.5", "unittest2", "pytest-cov", "python-dateutil"] +tests_require = ["pytest", "unittest2", "pytest-cov", "python-dateutil"] ci_require = ["black", "flake8", "pytest-cov"] long_description = open("README.md", "r").read() @@ -14,7 +14,7 @@ install_requires = [ "pycryptodomex>=3.8.1,<4", "requests>=2.3.0,<3", - "pyjwt>=1.3.0,<1.8.0", + "pyjwt>=2.0.0,<3", "pytz>=2019.3", ] diff --git a/stream/client.py b/stream/client.py index 9c55732..b79228b 100644 --- a/stream/client.py +++ b/stream/client.py @@ -181,7 +181,7 @@ def create_user_token(self, user_id, **extra_data): payload = {"user_id": user_id} for k, v in extra_data.items(): payload[k] = v - return jwt.encode(payload, self.api_secret, algorithm="HS256").decode("utf-8") + return jwt.encode(payload, self.api_secret, algorithm="HS256") def create_jwt_token(self, resource, action, feed_id=None, user_id=None, **params): """ @@ -193,7 +193,7 @@ def create_jwt_token(self, resource, action, feed_id=None, user_id=None, **param payload["feed_id"] = feed_id if user_id is not None: payload["user_id"] = user_id - return jwt.encode(payload, self.api_secret).decode("utf-8") + return jwt.encode(payload, self.api_secret, algorithm="HS256") def _make_request( self, From 8fb1b8e91982dcb23146d0a0d23833ec02ac668a Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Fri, 22 Jan 2021 18:18:08 +0100 Subject: [PATCH 180/208] Release v5.0.1 --- CHANGELOG.md | 4 ++++ stream/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4d3ab9c..ac132f5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,7 @@ +## 5.0.1 - 2021-01-22 + +* Bump pyjwt to 2.x + ## 5.0.0 - 2020-09-17 * Drop python 3.5 and add 3.9 diff --git a/stream/__init__.py b/stream/__init__.py index 7191563..023e477 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "5.0.0" +__version__ = "5.0.1" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From 6dee5be69445f44e579925a212ffd9584cd41112 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Fri, 16 Apr 2021 10:25:17 +0200 Subject: [PATCH 181/208] Drop codecov --- .github/workflows/ci.yml | 5 ----- README.md | 2 +- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cfa526e..4448d6c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -35,8 +35,3 @@ jobs: run: | python setup.py install make test - - - name: "Upload coverage to Codecov" - uses: codecov/codecov-action@v1 - with: - fail_ci_if_error: true diff --git a/README.md b/README.md index 4c3a40b..129660c 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ stream-python ============= -[![build](https://github.com/GetStream/stream-python/workflows/build/badge.svg)](https://github.com/GetStream/stream-python/actions) [![codecov](https://codecov.io/gh/GetStream/stream-python/branch/master/graph/badge.svg)](https://codecov.io/gh/GetStream/stream-python) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/stream-python.svg) +[![build](https://github.com/GetStream/stream-python/workflows/build/badge.svg)](https://github.com/GetStream/stream-python/actions) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/stream-python.svg) [stream-python](https://github.com/GetStream/stream-python) is the official Python client for [Stream](https://getstream.io/), a web service for building scalable newsfeeds and activity streams. From 187a692091c0f9fb591ee40b24e78ca8773fd9f5 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Fri, 16 Apr 2021 10:26:29 +0200 Subject: [PATCH 182/208] Update gitignore for vscode and envrc --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index dde5bf2..844f569 100644 --- a/.gitignore +++ b/.gitignore @@ -55,5 +55,7 @@ docs/_build/ secrets.*sh .idea +.vscode/ .venv +.envrc From 80c6b97d0c2d87c7477f8ad8ea67647e93bd5cbe Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Fri, 16 Apr 2021 13:56:58 +0200 Subject: [PATCH 183/208] Update for license --- LICENSE | 23 +---------------------- README.md | 4 +--- 2 files changed, 2 insertions(+), 25 deletions(-) diff --git a/LICENSE b/LICENSE index 73bc810..e97bd1f 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2014-2017 Stream.io Inc, and individual contributors. +Copyright (c) 2014-2021, Stream.io Inc, and individual contributors. All rights reserved. @@ -25,24 +25,3 @@ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSE THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - httpsig - - https://github.com/ahknight/httpsig - - Copyright (c) 2014 Adam Knight - Copyright (c) 2012 Adam T. Lindsay (original author) - - Permission is hereby granted, free of charge, to any person obtaining a copy of this - software and associated documentation files (the "Software"), to deal in the Software without - restriction, including without limitation the rights to use, copy, modify, merge, publish, - distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the - Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or - substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING - BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, - DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md index 129660c..3259fdc 100644 --- a/README.md +++ b/README.md @@ -184,6 +184,4 @@ If unsure you can also test using the Pypi test servers `twine upload --reposito ### Copyright and License Information -Copyright (c) 2014-2017 Stream.io Inc, and individual contributors. All rights reserved. - -See the file "LICENSE" for information on the history of this software, terms & conditions for usage, and a DISCLAIMER OF ALL WARRANTIES. +Project is licensed under the [BSD 3-Clause](LICENSE). From 5cce72ab7732b33a5eb892bc6756269bc0efaacd Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Fri, 16 Apr 2021 15:35:35 +0200 Subject: [PATCH 184/208] Add codeowners --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) create mode 100644 .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..02b87e1 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @ferhatelmas From 6d45fa5a157167931a601d15b61c088662796026 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Fri, 16 Apr 2021 18:14:13 +0200 Subject: [PATCH 185/208] Add analytics support (#128) --- stream/client.py | 76 +++++++++++++++++++++++++++++++++++-- stream/tests/test_client.py | 61 +++++++++++++++++++++++++++++ 2 files changed, 134 insertions(+), 3 deletions(-) diff --git a/stream/client.py b/stream/client.py index b79228b..7eb4be8 100644 --- a/stream/client.py +++ b/stream/client.py @@ -133,9 +133,9 @@ def get_default_header(self): def get_full_url(self, service_name, relative_url): if self.api_location: - hostname = "%s-%s.%s" % ( + hostname = "%s%s.%s" % ( self.api_location, - service_name, + "" if service_name == "analytics" else f"-{service_name}", self.base_domain_name, ) elif service_name: @@ -472,7 +472,7 @@ def create_redirect_url(self, target_url, user_id, events): params = dict(auth_type="jwt", authorization=auth_token, url=target_url) params["api_key"] = self.api_key params["events"] = json.dumps(events) - url = self.base_analytics_url + "redirect/" + url = f"{self.base_analytics_url}redirect/" # we get the url from the prepare request, this skips issues with # python's urlencode implementation request = Request("GET", url, params=params) @@ -481,6 +481,76 @@ def create_redirect_url(self, target_url, user_id, events): Request("GET", target_url).prepare() return prepared_request.url + def track_engagements(self, engagements): + """ + Creates a list of engagements + + ;param engagements: Slice of engagements to create. + + eg. + [ + { + "content": "1", + "label": "click", + "features": [ + {"group": "topic", "value": "js"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": "tommaso", + }, + { + "content": "2", + "label": "click", + "features": [ + {"group": "topic", "value": "go"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": {"id": "486892", "alias": "Julian"}, + }, + { + "content": "3", + "label": "click", + "features": [{"group": "topic", "value": "go"}], + "user_data": {"id": "tommaso", "alias": "tommaso"}, + }, + ] + """ + + auth_token = self.create_jwt_token("*", "*", feed_id="*") + self.post( + "engagement/", + auth_token, + data={"content_list": engagements}, + service_name="analytics", + ) + + def track_impressions(self, impressions): + """ + Creates a list of impressions + + ;param impressions: Slice of impressions to create. + + eg. + [ + { + "content_list": ["1", "2", "3"], + "features": [ + {"group": "topic", "value": "js"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": {"id": "tommaso", "alias": "tommaso"}, + }, + { + "content_list": ["2", "3", "5"], + "features": [{"group": "topic", "value": "js"}], + "user_data": {"id": "486892", "alias": "Julian"}, + }, + ] + """ + + auth_token = self.create_jwt_token("*", "*", feed_id="*") + self.post("impression/", auth_token, data=impressions, service_name="analytics") + def og(self, target_url): """ Retrieve open graph information from a URL which you can diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 7f39094..ae69175 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -126,6 +126,20 @@ def test_collections_url(self): feed_url, "https://qa-api.stream-io-api.com/api/v1.0/meta/" ) + def test_analytics_url(self): + feed_url = client.get_full_url( + relative_url="engagement/", service_name="analytics" + ) + + if self.local_tests: + self.assertEqual( + feed_url, "http://localhost:8000/analytics/v1.0/engagement/" + ) + else: + self.assertEqual( + feed_url, "https://qa.stream-io-api.com/analytics/v1.0/engagement/" + ) + def test_personalization_url(self): feed_url = client.get_full_url( relative_url="recommended", service_name="personalization" @@ -1616,6 +1630,53 @@ def test_feed_enrichment_reaction_counts(self): enriched_response = f.get(reactions={"counts": True}) self.assertEqual(enriched_response["results"][0]["reaction_counts"]["like"], 1) + def test_track_engagements(self): + engagements = [ + { + "content": "1", + "label": "click", + "features": [ + {"group": "topic", "value": "js"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": "tommaso", + }, + { + "content": "2", + "label": "click", + "features": [ + {"group": "topic", "value": "go"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": {"id": "486892", "alias": "Julian"}, + }, + { + "content": "3", + "label": "click", + "features": [{"group": "topic", "value": "go"}], + "user_data": {"id": "tommaso", "alias": "tommaso"}, + }, + ] + client.track_engagements(engagements) + + def test_track_impressions(self): + impressions = [ + { + "content_list": ["1", "2", "3"], + "features": [ + {"group": "topic", "value": "js"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": {"id": "tommaso", "alias": "tommaso"}, + }, + { + "content_list": ["2", "3", "5"], + "features": [{"group": "topic", "value": "js"}], + "user_data": {"id": "486892", "alias": "Julian"}, + }, + ] + client.track_impressions(impressions) + def test_og(self): response = client.og("https://google.com") self.assertTrue("title" in response) From aa3a5f58cd9f19c89b3d6cf1eb8ef25306dc74fe Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Fri, 16 Apr 2021 18:17:30 +0200 Subject: [PATCH 186/208] Release v5.1.0 --- CHANGELOG.md | 5 +++++ stream/__init__.py | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ac132f5..0e79ea1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +## 5.1.0 - 2021-04-16 + +* Add analytics support for `track_engagments` and `track_impressions` +* Update license to BSD-3 canonical description + ## 5.0.1 - 2021-01-22 * Bump pyjwt to 2.x diff --git a/stream/__init__.py b/stream/__init__.py index 023e477..cc1626e 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "5.0.1" +__version__ = "5.1.0" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From 30336eda18b7590d9f2c4909e0c24f8c40624757 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Fri, 16 Apr 2021 18:19:07 +0200 Subject: [PATCH 187/208] Fix typo in changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0e79ea1..64050b1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ ## 5.1.0 - 2021-04-16 -* Add analytics support for `track_engagments` and `track_impressions` +* Add analytics support for `track_engagements` and `track_impressions` * Update license to BSD-3 canonical description ## 5.0.1 - 2021-01-22 From b02bd20f7e8bbdf0e7b7972581d7df1a4f53c8ff Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Tue, 20 Apr 2021 14:16:40 +0200 Subject: [PATCH 188/208] Drop py specific docs --- docs/Makefile | 177 ------------------------- docs/conf.py | 340 ------------------------------------------------ docs/index.rst | 23 ---- docs/make.bat | 242 ---------------------------------- docs/stream.rst | 62 --------- 5 files changed, 844 deletions(-) delete mode 100644 docs/Makefile delete mode 100644 docs/conf.py delete mode 100644 docs/index.rst delete mode 100644 docs/make.bat delete mode 100644 docs/stream.rst diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index c34f348..0000000 --- a/docs/Makefile +++ /dev/null @@ -1,177 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = _build - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/stream-python.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/stream-python.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/stream-python" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/stream-python" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." - -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100644 index 74f0734..0000000 --- a/docs/conf.py +++ /dev/null @@ -1,340 +0,0 @@ -# -*- coding: utf-8 -*- -# -# stream-python documentation build configuration file, created by -# sphinx-quickstart on Tue May 27 16:29:21 2014. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os - -# on_rtd is whether we are on readthedocs.org -import os -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' - -if not on_rtd: # only import and set the theme if we're building docs locally - import sphinx_rtd_theme - html_theme = 'sphinx_rtd_theme' - html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.viewcode', -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'stream-python' -copyright = u'2014, Stream.io, Inc' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = '0.1.0' -# The full version, including alpha/beta/rc tags. -release = '0.1.0' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -#html_theme = 'default' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -#html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'stream-pythondoc' - - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - #'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ('index', 'stream-python.tex', u'stream-python Documentation', - u'Thierry Schellenbach', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'stream-python', u'stream-python Documentation', - [u'Thierry Schellenbach'], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'stream-python', u'stream-python Documentation', - u'Thierry Schellenbach', 'stream-python', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False - - -# -- Options for Epub output ---------------------------------------------- - -# Bibliographic Dublin Core info. -epub_title = u'stream-python' -epub_author = u'Thierry Schellenbach' -epub_publisher = u'Thierry Schellenbach' -epub_copyright = u'2014, Stream.io, Inc' - -# The basename for the epub file. It defaults to the project name. -#epub_basename = u'stream-python' - -# The HTML theme for the epub output. Since the default themes are not optimized -# for small screen space, using the same theme for HTML and epub output is -# usually not wise. This defaults to 'epub', a theme designed to save visual -# space. -#epub_theme = 'epub' - -# The language of the text. It defaults to the language option -# or en if the language is not set. -#epub_language = '' - -# The scheme of the identifier. Typical schemes are ISBN or URL. -#epub_scheme = '' - -# The unique identifier of the text. This can be a ISBN number -# or the project homepage. -#epub_identifier = '' - -# A unique identification for the text. -#epub_uid = '' - -# A tuple containing the cover image and cover page html template filenames. -#epub_cover = () - -# A sequence of (type, uri, title) tuples for the guide element of content.opf. -#epub_guide = () - -# HTML files that should be inserted before the pages created by sphinx. -# The format is a list of tuples containing the path and title. -#epub_pre_files = [] - -# HTML files shat should be inserted after the pages created by sphinx. -# The format is a list of tuples containing the path and title. -#epub_post_files = [] - -# A list of files that should not be packed into the epub file. -epub_exclude_files = ['search.html'] - -# The depth of the table of contents in toc.ncx. -#epub_tocdepth = 3 - -# Allow duplicate toc entries. -#epub_tocdup = True - -# Choose between 'default' and 'includehidden'. -#epub_tocscope = 'default' - -# Fix unsupported image types using the PIL. -#epub_fix_images = False - -# Scale large images. -#epub_max_image_width = 0 - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#epub_show_urls = 'inline' - -# If false, no index is generated. -#epub_use_index = True diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index 6c15351..0000000 --- a/docs/index.rst +++ /dev/null @@ -1,23 +0,0 @@ -.. stream-python documentation master file, created by - sphinx-quickstart on Tue May 27 16:29:21 2014. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to stream-python's documentation! -========================================= - -Contents: - -.. toctree:: - :maxdepth: 3 - - stream - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - diff --git a/docs/make.bat b/docs/make.bat deleted file mode 100644 index 541fca8..0000000 --- a/docs/make.bat +++ /dev/null @@ -1,242 +0,0 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set BUILDDIR=_build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . -set I18NSPHINXOPTS=%SPHINXOPTS% . -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% - set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. singlehtml to make a single large HTML file - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. devhelp to make HTML files and a Devhelp project - echo. epub to make an epub - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. text to make text files - echo. man to make manual pages - echo. texinfo to make Texinfo files - echo. gettext to make PO message catalogs - echo. changes to make an overview over all changed/added/deprecated items - echo. xml to make Docutils-native XML files - echo. pseudoxml to make pseudoxml-XML files for display purposes - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - - -%SPHINXBUILD% 2> nul -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "singlehtml" ( - %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\stream-python.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\stream-python.ghc - goto end -) - -if "%1" == "devhelp" ( - %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. - goto end -) - -if "%1" == "epub" ( - %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub file is in %BUILDDIR%/epub. - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdf" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf - cd %BUILDDIR%/.. - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdfja" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf-ja - cd %BUILDDIR%/.. - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "text" ( - %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The text files are in %BUILDDIR%/text. - goto end -) - -if "%1" == "man" ( - %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The manual pages are in %BUILDDIR%/man. - goto end -) - -if "%1" == "texinfo" ( - %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. - goto end -) - -if "%1" == "gettext" ( - %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The message catalogs are in %BUILDDIR%/locale. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - if errorlevel 1 exit /b 1 - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - if errorlevel 1 exit /b 1 - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - if errorlevel 1 exit /b 1 - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) - -if "%1" == "xml" ( - %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The XML files are in %BUILDDIR%/xml. - goto end -) - -if "%1" == "pseudoxml" ( - %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. - goto end -) - -:end diff --git a/docs/stream.rst b/docs/stream.rst deleted file mode 100644 index 5779489..0000000 --- a/docs/stream.rst +++ /dev/null @@ -1,62 +0,0 @@ -stream package -============== - -Submodules ----------- - -stream.client module --------------------- - -.. automodule:: stream.client - :members: - :undoc-members: - :show-inheritance: - -stream.exceptions module ------------------------- - -.. automodule:: stream.exceptions - :members: - :undoc-members: - :show-inheritance: - -stream.feed module ------------------- - -.. automodule:: stream.feed - :members: - :undoc-members: - :show-inheritance: - -stream.signing module ---------------------- - -.. automodule:: stream.signing - :members: - :undoc-members: - :show-inheritance: - -stream.tests module -------------------- - -.. automodule:: stream.tests - :members: - :undoc-members: - :show-inheritance: - -stream.utils module -------------------- - -.. automodule:: stream.utils - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: stream - :members: - :undoc-members: - :show-inheritance: From c9621bc89d10e76f0b4d002c8edcb82e6ee2b404 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Tue, 20 Apr 2021 14:16:56 +0200 Subject: [PATCH 189/208] Add hiring note --- README.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/README.md b/README.md index 3259fdc..a94f6ed 100644 --- a/README.md +++ b/README.md @@ -185,3 +185,10 @@ If unsure you can also test using the Pypi test servers `twine upload --reposito ### Copyright and License Information Project is licensed under the [BSD 3-Clause](LICENSE). + +## We are hiring! + +We've recently closed a [$38 million Series B funding round](https://techcrunch.com/2021/03/04/stream-raises-38m-as-its-chat-and-activity-feed-apis-power-communications-for-1b-users/) and we keep actively growing. +Our APIs are used by more than a billion end-users, and you'll have a chance to make a huge impact on the product within a team of the strongest engineers all over the world. + +Check out our current openings and apply via [Stream's website](https://getstream.io/team/#jobs). From 142b5b43c0a60a96c36f25b6fc5a224dd2e418cc Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Wed, 21 Apr 2021 13:03:52 +0200 Subject: [PATCH 190/208] Use main --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4448d6c..574b2d7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,7 +2,7 @@ name: build on: push: branches: - - 'master' + - 'main' pull_request: jobs: From d218179565a83d86dc0bb323cb9e6b789bf72cbb Mon Sep 17 00:00:00 2001 From: Siddharth <68214682+SidAtNSIO@users.noreply.github.com> Date: Wed, 19 Jan 2022 04:28:50 +0530 Subject: [PATCH 191/208] Issue #132 Fixed (#133) * Issue #132 Fixed * Backword compatibility support added for token * Test case added for Feed constructor --- stream/feed.py | 2 +- stream/tests/test_client.py | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/stream/feed.py b/stream/feed.py index 5a17340..32040e7 100644 --- a/stream/feed.py +++ b/stream/feed.py @@ -20,7 +20,7 @@ def __init__(self, client, feed_slug, user_id, token): self.slug = feed_slug self.user_id = str(user_id) self.id = "%s:%s" % (feed_slug, user_id) - self.token = token + self.token = token.decode('utf-8') if isinstance(token, bytes) else token self.feed_url = "feed/%s/" % self.id.replace(":", "/") self.enriched_feed_url = "enrich/feed/%s/" % self.id.replace(":", "/") diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index ae69175..bf0db9b 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -17,6 +17,7 @@ import stream from stream import serializer from stream.exceptions import ApiKeyException, InputException +from stream.feed import Feed try: from unittest.case import TestCase @@ -1702,3 +1703,13 @@ def test_follow_stats(self): )["results"] self.assertEqual(response["following"]["count"], 0) self.assertEqual(response["followers"]["count"], 1) + + def test_token_type(self): + """ + test to check whether token is a byte or string + """ + with_bytes = Feed(client, "user", "1", b"token") + self.assertEqual(with_bytes.token, "token") + + with_str = Feed(client, "user", "1", "token") + self.assertEqual(with_str.token, "token") From ff10fcc155166301a7660a696a89c28daed806d7 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Tue, 18 Jan 2022 23:59:49 +0100 Subject: [PATCH 192/208] Fix lint --- stream/feed.py | 2 +- stream/tests/test_client.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/stream/feed.py b/stream/feed.py index 32040e7..3ef9317 100644 --- a/stream/feed.py +++ b/stream/feed.py @@ -20,7 +20,7 @@ def __init__(self, client, feed_slug, user_id, token): self.slug = feed_slug self.user_id = str(user_id) self.id = "%s:%s" % (feed_slug, user_id) - self.token = token.decode('utf-8') if isinstance(token, bytes) else token + self.token = token.decode("utf-8") if isinstance(token, bytes) else token self.feed_url = "feed/%s/" % self.id.replace(":", "/") self.enriched_feed_url = "enrich/feed/%s/" % self.id.replace(":", "/") diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index bf0db9b..a06af67 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1704,9 +1704,9 @@ def test_follow_stats(self): self.assertEqual(response["following"]["count"], 0) self.assertEqual(response["followers"]["count"], 1) - def test_token_type(self): + def test_token_type(self): """ - test to check whether token is a byte or string + test to check whether token is a byte or string """ with_bytes = Feed(client, "user", "1", b"token") self.assertEqual(with_bytes.token, "token") From e12186112058c1659e8304fceea148f2ff5b36b9 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Wed, 19 Jan 2022 00:01:33 +0100 Subject: [PATCH 193/208] Release v5.1.1 --- CHANGELOG.md | 4 ++++ stream/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 64050b1..97e250e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,7 @@ +## 5.1.1 - 2022-01-18 + +* Handle backward compatible pyjwt 1.x support for token generation + ## 5.1.0 - 2021-04-16 * Add analytics support for `track_engagements` and `track_impressions` diff --git a/stream/__init__.py b/stream/__init__.py index cc1626e..87ad6f1 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2014, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "5.1.0" +__version__ = "5.1.1" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From c65a9a9099e576b448be49dc5efbb4a672c691f8 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Wed, 19 Jan 2022 00:06:33 +0100 Subject: [PATCH 194/208] Add Peter into code owners --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 02b87e1..8c2b60b 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @ferhatelmas +* @ferhatelmas @peterdeme From 87ea6b295bca0149ed342db3a1765bebb3219473 Mon Sep 17 00:00:00 2001 From: Peter Deme Date: Thu, 21 Apr 2022 08:05:44 +0200 Subject: [PATCH 195/208] chore(maintenance): general maintencance and ci flow (#137) --- .github/workflows/ci.yml | 29 ++++++---- .github/workflows/initiate_release.yml | 47 ++++++++++++++++ .github/workflows/release.yml | 48 +++++++++++++++++ .github/workflows/reviewdog.yml | 30 +++++++++++ .gitignore | 2 + .versionrc.js | 16 ++++++ Makefile | 6 ++- README.md | 75 ++++++++++---------------- assets/logo.svg | 16 ++++++ scripts/get_changelog_diff.js | 26 +++++++++ setup.py | 38 +++++-------- stream/__init__.py | 2 +- 12 files changed, 251 insertions(+), 84 deletions(-) create mode 100644 .github/workflows/initiate_release.yml create mode 100644 .github/workflows/release.yml create mode 100644 .github/workflows/reviewdog.yml create mode 100644 .versionrc.js create mode 100644 assets/logo.svg create mode 100644 scripts/get_changelog_diff.js diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 574b2d7..5b44d1a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,33 +5,40 @@ on: - 'main' pull_request: +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref }} + cancel-in-progress: true + jobs: build: + name: 🧪 Test & lint runs-on: ubuntu-latest strategy: matrix: - python: [3.6, 3.7, 3.8, 3.9] + python: ["3.7", "3.8", "3.9", "3.10"] steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/checkout@v3 with: - python-version: ${{ matrix.python }} + fetch-depth: 0 # gives the commit linter access to previous commits + + - name: Commit message linter + if: ${{ matrix.python == '3.7' }} + uses: wagoid/commitlint-github-action@v4 - - name: Add pip bin to PATH - run: | - echo "/home/runner/.local/bin" >> $GITHUB_PATH + - uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python }} - name: Install deps with ${{ matrix.python }} run: pip install ".[test, ci]" - name: Lint with ${{ matrix.python }} - if: ${{ matrix.python == '3.8' }} + if: ${{ matrix.python == '3.7' }} run: make lint - name: Install, test and code coverage with ${{ matrix.python }} env: STREAM_KEY: ${{ secrets.STREAM_KEY }} STREAM_SECRET: ${{ secrets.STREAM_SECRET }} - run: | - python setup.py install - make test + PYTHONPATH: ${{ github.workspace }} + run: make test diff --git a/.github/workflows/initiate_release.yml b/.github/workflows/initiate_release.yml new file mode 100644 index 0000000..0af41fd --- /dev/null +++ b/.github/workflows/initiate_release.yml @@ -0,0 +1,47 @@ +name: Create release PR + +on: + workflow_dispatch: + inputs: + version: + description: "The new version number with 'v' prefix. Example: v1.40.1" + required: true + +jobs: + init_release: + name: 🚀 Create release PR + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 # gives the changelog generator access to all previous commits + + - name: Update CHANGELOG.md, __pkg__.py and push release branch + env: + VERSION: ${{ github.event.inputs.version }} + run: | + npx --yes standard-version@9.3.2 --release-as "$VERSION" --skip.tag --skip.commit --tag-prefix=v + git config --global user.name 'github-actions' + git config --global user.email 'release@getstream.io' + git checkout -q -b "release-$VERSION" + git commit -am "chore(release): $VERSION" + git push -q -u origin "release-$VERSION" + + - name: Get changelog diff + uses: actions/github-script@v5 + with: + script: | + const get_change_log_diff = require('./scripts/get_changelog_diff.js') + core.exportVariable('CHANGELOG', get_change_log_diff()) + + - name: Open pull request + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + gh pr create \ + -t "chore(release): ${{ github.event.inputs.version }}" \ + -b "# :rocket: ${{ github.event.inputs.version }} + Make sure to use squash & merge when merging! + Once this is merged, another job will kick off automatically and publish the package. + # :memo: Changelog + ${{ env.CHANGELOG }}" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..5edd544 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,48 @@ +name: Release + +on: + pull_request: + types: [closed] + branches: + - main + +jobs: + Release: + name: 🚀 Release + if: github.event.pull_request.merged && startsWith(github.head_ref, 'release-') + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - uses: actions/github-script@v5 + with: + script: | + const get_change_log_diff = require('./scripts/get_changelog_diff.js') + core.exportVariable('CHANGELOG', get_change_log_diff()) + + // Getting the release version from the PR source branch + // Source branch looks like this: release-1.0.0 + const version = context.payload.pull_request.head.ref.split('-')[1] + core.exportVariable('VERSION', version) + + - uses: actions/setup-python@v3 + with: + python-version: "3.10" + + - name: Publish to PyPi + env: + TWINE_USERNAME: "__token__" + TWINE_PASSWORD: "${{ secrets.PYPI_TOKEN }}" + run: | + pip install -q twine==3.7.1 wheel==0.37.1 + python setup.py sdist bdist_wheel + twine upload --non-interactive dist/* + + - name: Create release on GitHub + uses: ncipollo/release-action@v1 + with: + body: ${{ env.CHANGELOG }} + tag: ${{ env.VERSION }} + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/reviewdog.yml b/.github/workflows/reviewdog.yml new file mode 100644 index 0000000..fc88763 --- /dev/null +++ b/.github/workflows/reviewdog.yml @@ -0,0 +1,30 @@ +name: reviewdog +on: + pull_request: + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref }} + cancel-in-progress: true + +jobs: + reviewdog: + name: 🐶 Reviewdog + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - uses: reviewdog/action-setup@v1 + with: + reviewdog_version: latest + + - uses: actions/setup-python@v3 + with: + python-version: "3.10" + + - name: Install deps + run: pip install ".[ci]" + + - name: Reviewdog + env: + REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: make reviewdog diff --git a/.gitignore b/.gitignore index 844f569..1716919 100644 --- a/.gitignore +++ b/.gitignore @@ -53,6 +53,8 @@ coverage.xml # Sphinx documentation docs/_build/ + +.python-version secrets.*sh .idea .vscode/ diff --git a/.versionrc.js b/.versionrc.js new file mode 100644 index 0000000..6131ae6 --- /dev/null +++ b/.versionrc.js @@ -0,0 +1,16 @@ +const pkgUpdater = { + VERSION_REGEX: /__version__ = "(.+)"/, + + readVersion: function (contents) { + const version = this.VERSION_REGEX.exec(contents)[1]; + return version; + }, + + writeVersion: function (contents, version) { + return contents.replace(this.VERSION_REGEX.exec(contents)[0], `__version__ = "${version}"`); + } +} + +module.exports = { + bumpFiles: [{ filename: './stream/__init__.py', updater: pkgUpdater }], +} diff --git a/Makefile b/Makefile index 37be744..808d80f 100644 --- a/Makefile +++ b/Makefile @@ -17,6 +17,10 @@ lint-fix: black stream test: ## Run tests - STREAM_KEY=$(STREAM_KEY) STREAM_SECRET=$(STREAM_SECRET) python setup.py test + STREAM_KEY=$(STREAM_KEY) STREAM_SECRET=$(STREAM_SECRET) pytest stream/tests check: lint test ## Run linters + tests + +reviewdog: + black --check --diff --quiet stream | reviewdog -f=diff -f.diff.strip=0 -filter-mode="diff_context" -name=black -reporter=github-pr-review + flake8 --ignore=E501,W503 stream | reviewdog -f=flake8 -name=flake8 -reporter=github-pr-review diff --git a/README.md b/README.md index a94f6ed..32a052d 100644 --- a/README.md +++ b/README.md @@ -1,27 +1,43 @@ -stream-python -============= +# Official Python SDK for [Stream Feeds](https://getstream.io/activity-feeds/) [![build](https://github.com/GetStream/stream-python/workflows/build/badge.svg)](https://github.com/GetStream/stream-python/actions) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/stream-python.svg) -[stream-python](https://github.com/GetStream/stream-python) is the official Python client for [Stream](https://getstream.io/), a web service for building scalable newsfeeds and activity streams. +

+ +

+

+ Official Python API client for Stream Feeds, a web service for building scalable newsfeeds and activity streams. +
+ Explore the docs » +
+
+ Django Code Sample + · + Report Bug + · + Request Feature +

-Note there is also a higher level [Django - Stream integration](https://github.com/getstream/stream-django) library which hooks into the Django ORM. +## 📝 About Stream -You can sign up for a Stream account at https://getstream.io/get_started. +You can sign up for a Stream account at our [Get Started](https://getstream.io/get_started/) page. -### Installation +You can use this library to access feeds API endpoints server-side. + +For the client-side integrations (web and mobile) have a look at the JavaScript, iOS and Android SDK libraries ([docs](https://getstream.io/activity-feeds/)). + +## ⚙️ Installation -#### Install from Pypi ```bash -pip install stream-python +$ pip install stream-python ``` -### Full documentation +## 📚 Full documentation Documentation for this Python client are available at the [Stream website](https://getstream.io/docs/?language=python). -### Usage +## ✨ Getting started ```python import datetime @@ -149,44 +165,11 @@ redirect_url = client.create_redirect_url('http://google.com/', 'user_id', event [JS client](http://github.com/getstream/stream-js). -### Contributing - -First, make sure you can run the test suite. Tests are run via py.test - -```bash -py.test -# with coverage -py.test --cov stream --cov-report html -# against a local API backend -LOCAL=true py.test -``` - -Install black and flake8 - -``` -pip install .[ci] -``` - -Install git hooks to avoid pushing invalid code (git commit will run `black` and `flake8`) - -### Releasing a new version - -In order to release new version you need to be a maintainer on Pypi. - -- Update CHANGELOG -- Update the version on setup.py -- Commit and push to Github -- Create a new tag for the version (eg. `v2.9.0`) -- Create a new dist with python `python setup.py sdist` -- Upload the new distributable with twine `twine upload dist/stream-python-VERSION-NAME.tar.gz` - -If unsure you can also test using the Pypi test servers `twine upload --repository-url https://test.pypi.org/legacy/ dist/stream-python-VERSION-NAME.tar.gz` - -### Copyright and License Information +## ✍️ Contributing -Project is licensed under the [BSD 3-Clause](LICENSE). +We welcome code changes that improve this library or fix a problem, please make sure to follow all best practices and add tests if applicable before submitting a Pull Request on Github. We are very happy to merge your code in the official repository. Make sure to sign our [Contributor License Agreement (CLA)](https://docs.google.com/forms/d/e/1FAIpQLScFKsKkAJI7mhCr7K9rEIOpqIDThrWxuvxnwUq2XkHyG154vQ/viewform) first. See our [license file](./LICENSE) for more details. -## We are hiring! +## 🧑‍💻 We are hiring! We've recently closed a [$38 million Series B funding round](https://techcrunch.com/2021/03/04/stream-raises-38m-as-its-chat-and-activity-feed-apis-power-communications-for-1b-users/) and we keep actively growing. Our APIs are used by more than a billion end-users, and you'll have a chance to make a huge impact on the product within a team of the strongest engineers all over the world. diff --git a/assets/logo.svg b/assets/logo.svg new file mode 100644 index 0000000..1c68c5c --- /dev/null +++ b/assets/logo.svg @@ -0,0 +1,16 @@ + + + + STREAM MARK + Created with Sketch. + + + + + + + + + + + \ No newline at end of file diff --git a/scripts/get_changelog_diff.js b/scripts/get_changelog_diff.js new file mode 100644 index 0000000..ce03438 --- /dev/null +++ b/scripts/get_changelog_diff.js @@ -0,0 +1,26 @@ +/* +Here we're trying to parse the latest changes from CHANGELOG.md file. +The changelog looks like this: + +## 0.0.3 +- Something #3 +## 0.0.2 +- Something #2 +## 0.0.1 +- Something #1 + +In this case we're trying to extract "- Something #3" since that's the latest change. +*/ +module.exports = () => { + const fs = require('fs') + + changelog = fs.readFileSync('CHANGELOG.md', 'utf8') + releases = changelog.match(/## [?[0-9](.+)/g) + + current_release = changelog.indexOf(releases[0]) + previous_release = changelog.indexOf(releases[1]) + + latest_changes = changelog.substr(current_release, previous_release - current_release) + + return latest_changes +} diff --git a/setup.py b/setup.py index a0835e3..7d906da 100644 --- a/setup.py +++ b/setup.py @@ -2,36 +2,17 @@ from setuptools import setup, find_packages -from setuptools.command.test import test as TestCommand from stream import __version__, __maintainer__, __email__, __license__ -import sys - -tests_require = ["pytest", "unittest2", "pytest-cov", "python-dateutil"] -ci_require = ["black", "flake8", "pytest-cov"] - -long_description = open("README.md", "r").read() install_requires = [ - "pycryptodomex>=3.8.1,<4", "requests>=2.3.0,<3", "pyjwt>=2.0.0,<3", "pytz>=2019.3", ] +tests_require = ["pytest", "pytest-cov", "python-dateutil"] +ci_require = ["black", "flake8", "pytest-cov"] - -class PyTest(TestCommand): - def finalize_options(self): - TestCommand.finalize_options(self) - self.test_args = [] - self.test_suite = True - - def run_tests(self): - # import here, cause outside the eggs aren't loaded - import pytest - - errno = pytest.main(["-v", "--cov=./"]) - sys.exit(errno) - +long_description = open("README.md", "r").read() setup( name="stream-python", @@ -42,14 +23,21 @@ def run_tests(self): description="Client for getstream.io. Build scalable newsfeeds & activity streams in a few hours instead of weeks.", long_description=long_description, long_description_content_type="text/markdown", + project_urls={ + "Bug Tracker": "https://github.com/GetStream/stream-python/issues", + "Documentation": "https://getstream.io/activity-feeds/docs/python/?language=python", + "Release Notes": "https://github.com/GetStream/stream-python/releases/tag/v{}".format( + __version__ + ), + }, license=__license__, - packages=find_packages(), + packages=find_packages(exclude=["*tests*"]), zip_safe=False, install_requires=install_requires, extras_require={"test": tests_require, "ci": ci_require}, - cmdclass={"test": PyTest}, tests_require=tests_require, include_package_data=True, + python_requires=">=3.7", classifiers=[ "Intended Audience :: Developers", "Intended Audience :: System Administrators", @@ -59,10 +47,10 @@ def run_tests(self): "License :: OSI Approved :: BSD License", "Natural Language :: English", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Topic :: Software Development :: Libraries :: Python Modules", ], ) diff --git a/stream/__init__.py b/stream/__init__.py index 87ad6f1..908c192 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -2,7 +2,7 @@ import re __author__ = "Thierry Schellenbach" -__copyright__ = "Copyright 2014, Stream.io, Inc" +__copyright__ = "Copyright 2022, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" __version__ = "5.1.1" From e5b33d6b34d53523d369f58a3bb118b9ffbf228e Mon Sep 17 00:00:00 2001 From: Roman Zavadski Date: Fri, 12 Nov 2021 21:09:05 +0300 Subject: [PATCH 196/208] feat: added async support --- .coveragerc | 2 + .github/workflows/ci.yml | 5 +- .gitignore | 1 + Makefile | 1 + README.md | 126 ++ setup.py | 3 +- stream/__init__.py | 15 +- stream/client/__init__.py | 2 + stream/client/async_client.py | 275 ++++ stream/{client.py => client/base.py} | 601 ++++----- stream/client/client.py | 292 +++++ stream/collections.py | 128 -- stream/collections/__init__.py | 1 + stream/collections/base.py | 100 ++ stream/collections/collections.py | 148 +++ stream/exceptions.py | 4 +- stream/feed.py | 233 ---- stream/feed/__init__.py | 1 + stream/feed/base.py | 172 +++ stream/feed/feeds.py | 238 ++++ stream/personalization.py | 66 - stream/personalization/__init__.py | 1 + stream/personalization/base.py | 30 + stream/personalization/personalizations.py | 117 ++ stream/reactions.py | 89 -- stream/reactions/__init__.py | 1 + stream/reactions/base.py | 75 ++ stream/reactions/reaction.py | 163 +++ stream/tests/conftest.py | 73 ++ stream/tests/test_async_client.py | 1342 ++++++++++++++++++++ stream/tests/test_client.py | 17 +- stream/users.py | 36 - stream/users/__init__.py | 1 + stream/users/base.py | 39 + stream/users/user.py | 73 ++ stream/utils.py | 15 +- 36 files changed, 3553 insertions(+), 933 deletions(-) create mode 100644 .coveragerc create mode 100644 stream/client/__init__.py create mode 100644 stream/client/async_client.py rename stream/{client.py => client/base.py} (56%) create mode 100644 stream/client/client.py delete mode 100644 stream/collections.py create mode 100644 stream/collections/__init__.py create mode 100644 stream/collections/base.py create mode 100644 stream/collections/collections.py delete mode 100644 stream/feed.py create mode 100644 stream/feed/__init__.py create mode 100644 stream/feed/base.py create mode 100644 stream/feed/feeds.py delete mode 100644 stream/personalization.py create mode 100644 stream/personalization/__init__.py create mode 100644 stream/personalization/base.py create mode 100644 stream/personalization/personalizations.py delete mode 100644 stream/reactions.py create mode 100644 stream/reactions/__init__.py create mode 100644 stream/reactions/base.py create mode 100644 stream/reactions/reaction.py create mode 100644 stream/tests/conftest.py create mode 100644 stream/tests/test_async_client.py delete mode 100644 stream/users.py create mode 100644 stream/users/__init__.py create mode 100644 stream/users/base.py create mode 100644 stream/users/user.py diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..241e4cc --- /dev/null +++ b/.coveragerc @@ -0,0 +1,2 @@ +[run] +omit = stream/tests/* \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5b44d1a..c83a6af 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,8 +14,9 @@ jobs: name: 🧪 Test & lint runs-on: ubuntu-latest strategy: + max-parallel: 1 matrix: - python: ["3.7", "3.8", "3.9", "3.10"] + python: ['3.7', '3.8', '3.9', '3.10'] steps: - uses: actions/checkout@v3 with: @@ -30,7 +31,7 @@ jobs: python-version: ${{ matrix.python }} - name: Install deps with ${{ matrix.python }} - run: pip install ".[test, ci]" + run: pip install -q ".[test, ci]" - name: Lint with ${{ matrix.python }} if: ${{ matrix.python == '3.7' }} diff --git a/.gitignore b/.gitignore index 1716919..4c239ea 100644 --- a/.gitignore +++ b/.gitignore @@ -58,6 +58,7 @@ docs/_build/ secrets.*sh .idea .vscode/ +.python-version .venv .envrc diff --git a/Makefile b/Makefile index 808d80f..4c2098a 100644 --- a/Makefile +++ b/Makefile @@ -19,6 +19,7 @@ lint-fix: test: ## Run tests STREAM_KEY=$(STREAM_KEY) STREAM_SECRET=$(STREAM_SECRET) pytest stream/tests + check: lint test ## Run linters + tests reviewdog: diff --git a/README.md b/README.md index 32a052d..8be6d5d 100644 --- a/README.md +++ b/README.md @@ -163,9 +163,135 @@ events = [impression, engagement] redirect_url = client.create_redirect_url('http://google.com/', 'user_id', events) ``` +### Async code usage +```python +import datetime +import stream +client = stream.connect('YOUR_API_KEY', 'API_KEY_SECRET', use_async=True) + + +# Create a new client specifying data center location +client = stream.connect('YOUR_API_KEY', 'API_KEY_SECRET', location='us-east', use_async=True) +# Find your API keys here https://getstream.io/dashboard/ + +# Create a feed object +user_feed_1 = client.feed('user', '1') + +# Get activities from 5 to 10 (slow pagination) +result = await user_feed_1.get(limit=5, offset=5) +# (Recommended & faster) Filter on an id less than the given UUID +result = await user_feed_1.get(limit=5, id_lt="e561de8f-00f1-11e4-b400-0cc47a024be0") + +# Create a new activity +activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1, 'foreign_id': 'tweet:1'} +activity_response = await user_feed_1.add_activity(activity_data) +# Create a bit more complex activity +activity_data = {'actor': 1, 'verb': 'run', 'object': 1, 'foreign_id': 'run:1', + 'course': {'name': 'Golden Gate park', 'distance': 10}, + 'participants': ['Thierry', 'Tommaso'], + 'started_at': datetime.datetime.now() +} +await user_feed_1.add_activity(activity_data) + +# Remove an activity by its id +await user_feed_1.remove_activity("e561de8f-00f1-11e4-b400-0cc47a024be0") +# or by foreign id +await user_feed_1.remove_activity(foreign_id='tweet:1') + +# Follow another feed +await user_feed_1.follow('flat', '42') + +# Stop following another feed +await user_feed_1.unfollow('flat', '42') + +# List followers/following +following = await user_feed_1.following(offset=0, limit=2) +followers = await user_feed_1.followers(offset=0, limit=10) + +# Creates many follow relationships in one request +follows = [ + {'source': 'flat:1', 'target': 'user:1'}, + {'source': 'flat:1', 'target': 'user:2'}, + {'source': 'flat:1', 'target': 'user:3'} +] +await client.follow_many(follows) + +# Batch adding activities +activities = [ + {'actor': 1, 'verb': 'tweet', 'object': 1}, + {'actor': 2, 'verb': 'watch', 'object': 3} +] +await user_feed_1.add_activities(activities) + +# Add an activity and push it to other feeds too using the `to` field +activity = { + "actor":"1", + "verb":"like", + "object":"3", + "to":["user:44", "user:45"] +} +await user_feed_1.add_activity(activity) + +# Retrieve an activity by its ID +await client.get_activities(ids=[activity_id]) + +# Retrieve an activity by the combination of foreign_id and time +await client.get_activities(foreign_id_times=[ + (foreign_id, activity_time), +]) + +# Enrich while getting activities +await client.get_activities(ids=[activity_id], enrich=True, reactions={"counts": True}) + +# Update some parts of an activity with activity_partial_update +set = { + 'product.name': 'boots', + 'colors': { + 'red': '0xFF0000', + 'green': '0x00FF00' + } +} +unset = [ 'popularity', 'details.info' ] +# ...by ID +await client.activity_partial_update(id=activity_id, set=set, unset=unset) +# ...or by combination of foreign_id and time +await client.activity_partial_update(foreign_id=foreign_id, time=activity_time, set=set, unset=unset) + +# Generating user token for client side usage (JS client) +user_token = client.create_user_token("user-42") + +# Javascript client side feed initialization +# client = stream.connect(apiKey, userToken, appId); + +# Generate a redirect url for the Stream Analytics platform to track +# events/impressions on url clicks +impression = { + 'content_list': ['tweet:1', 'tweet:2', 'tweet:3'], + 'user_data': 'tommaso', + 'location': 'email', + 'feed_id': 'user:global' +} + +engagement = { + 'content': 'tweet:2', + 'label': 'click', + 'position': 1, + 'user_data': 'tommaso', + 'location': 'email', + 'feed_id': + 'user:global' +} + +events = [impression, engagement] + +redirect_url = client.create_redirect_url('http://google.com/', 'user_id', events) + +``` + [JS client](http://github.com/getstream/stream-js). ## ✍️ Contributing +======= We welcome code changes that improve this library or fix a problem, please make sure to follow all best practices and add tests if applicable before submitting a Pull Request on Github. We are very happy to merge your code in the official repository. Make sure to sign our [Contributor License Agreement (CLA)](https://docs.google.com/forms/d/e/1FAIpQLScFKsKkAJI7mhCr7K9rEIOpqIDThrWxuvxnwUq2XkHyG154vQ/viewform) first. See our [license file](./LICENSE) for more details. diff --git a/setup.py b/setup.py index 7d906da..b404722 100644 --- a/setup.py +++ b/setup.py @@ -8,8 +8,9 @@ "requests>=2.3.0,<3", "pyjwt>=2.0.0,<3", "pytz>=2019.3", + "aiohttp>=3.6.0", ] -tests_require = ["pytest", "pytest-cov", "python-dateutil"] +tests_require = ["pytest", "pytest-cov", "python-dateutil", "pytest-asyncio"] ci_require = ["black", "flake8", "pytest-cov"] long_description = open("README.md", "r").read() diff --git a/stream/__init__.py b/stream/__init__.py index 908c192..e5de2db 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -19,6 +19,7 @@ def connect( timeout=3.0, location=None, base_url=None, + use_async=False, ): """ Returns a Client object @@ -26,8 +27,9 @@ def connect( :param api_key: your api key or heroku url :param api_secret: the api secret :param app_id: the app id (used for listening to feed changes) + :param use_async: flag to set AsyncClient """ - from stream.client import StreamClient + from stream.client import AsyncStreamClient, StreamClient stream_url = os.environ.get("STREAM_URL") # support for the heroku STREAM_URL syntax @@ -42,6 +44,17 @@ def connect( else: raise ValueError("Invalid api key or heroku url") + if use_async: + return AsyncStreamClient( + api_key, + api_secret, + app_id, + version, + timeout, + location=location, + base_url=base_url, + ) + return StreamClient( api_key, api_secret, diff --git a/stream/client/__init__.py b/stream/client/__init__.py new file mode 100644 index 0000000..5d8511e --- /dev/null +++ b/stream/client/__init__.py @@ -0,0 +1,2 @@ +from .async_client import AsyncStreamClient +from .client import StreamClient diff --git a/stream/client/async_client.py b/stream/client/async_client.py new file mode 100644 index 0000000..02eacc4 --- /dev/null +++ b/stream/client/async_client.py @@ -0,0 +1,275 @@ +import logging + +import aiohttp +from aiohttp import ClientConnectionError + +from stream import serializer +from stream.client.base import BaseStreamClient +from stream.collections import AsyncCollections +from stream.feed.feeds import AsyncFeed +from stream.personalization import AsyncPersonalization +from stream.reactions import AsyncReactions +from stream.serializer import _datetime_encoder +from stream.users import AsyncUsers +from stream.utils import ( + get_reaction_params, + validate_feed_slug, + validate_foreign_id_time, + validate_user_id, +) + +logger = logging.getLogger(__name__) + + +class AsyncStreamClient(BaseStreamClient): + def __init__( + self, + api_key, + api_secret, + app_id, + version="v1.0", + timeout=6.0, + base_url=None, + location=None, + ): + super().__init__( + api_key, + api_secret, + app_id, + version=version, + timeout=timeout, + base_url=base_url, + location=location, + ) + token = self.create_jwt_token("collections", "*", feed_id="*", user_id="*") + self.collections = AsyncCollections(self, token) + + token = self.create_jwt_token("personalization", "*", feed_id="*", user_id="*") + self.personalization = AsyncPersonalization(self, token) + + token = self.create_jwt_token("reactions", "*", feed_id="*") + self.reactions = AsyncReactions(self, token) + + token = self.create_jwt_token("users", "*", feed_id="*") + self.users = AsyncUsers(self, token) + + def feed(self, feed_slug, user_id): + feed_slug = validate_feed_slug(feed_slug) + user_id = validate_user_id(user_id) + token = self.create_jwt_token("feed", "*", feed_id="*") + return AsyncFeed(self, feed_slug, user_id, token) + + async def put(self, *args, **kwargs): + return await self._make_request("PUT", *args, **kwargs) + + async def post(self, *args, **kwargs): + return await self._make_request("POST", *args, **kwargs) + + async def get(self, *args, **kwargs): + return await self._make_request("GET", *args, **kwargs) + + async def delete(self, *args, **kwargs): + return await self._make_request("DELETE", *args, **kwargs) + + async def add_to_many(self, activity, feeds): + data = {"activity": activity, "feeds": feeds} + token = self.create_jwt_token("feed", "*", feed_id="*") + return await self.post("feed/add_to_many/", token, data=data) + + async def follow_many(self, follows, activity_copy_limit=None): + params = None + + if activity_copy_limit is not None: + params = dict(activity_copy_limit=activity_copy_limit) + token = self.create_jwt_token("follower", "*", feed_id="*") + return await self.post("follow_many/", token, params=params, data=follows) + + async def unfollow_many(self, unfollows): + params = None + + token = self.create_jwt_token("follower", "*", feed_id="*") + return await self.post("unfollow_many/", token, params=params, data=unfollows) + + async def update_activities(self, activities): + if not isinstance(activities, (list, tuple, set)): + raise TypeError("Activities parameter should be of type list") + + auth_token = self.create_jwt_token("activities", "*", feed_id="*") + data = dict(activities=activities) + return await self.post("activities/", auth_token, data=data) + + async def update_activity(self, activity): + return await self.update_activities([activity]) + + async def get_activities( + self, ids=None, foreign_id_times=None, enrich=False, reactions=None, **params + ): + auth_token = self.create_jwt_token("activities", "*", feed_id="*") + + if ids is None and foreign_id_times is None: + raise TypeError( + "One the parameters ids or foreign_id_time must be provided and not None" + ) + + if ids is not None and foreign_id_times is not None: + raise TypeError( + "At most one of the parameters ids or foreign_id_time must be provided" + ) + + endpoint = "activities/" + if enrich or reactions is not None: + endpoint = "enrich/" + endpoint + + query_params = {**params} + + if ids is not None: + query_params["ids"] = ",".join(ids) + + if foreign_id_times is not None: + validate_foreign_id_time(foreign_id_times) + foreign_ids, timestamps = zip(*foreign_id_times) + timestamps = map(_datetime_encoder, timestamps) + query_params["foreign_ids"] = ",".join(foreign_ids) + query_params["timestamps"] = ",".join(timestamps) + + query_params.update(get_reaction_params(reactions)) + + return await self.get(endpoint, auth_token, params=query_params) + + async def activity_partial_update( + self, id=None, foreign_id=None, time=None, set=None, unset=None + ): + if id is None and (foreign_id is None or time is None): + raise TypeError( + "The id or foreign_id+time parameters must be provided and not be None" + ) + if id is not None and (foreign_id is not None or time is not None): + raise TypeError( + "Only one of the id or the foreign_id+time parameters can be provided" + ) + + data = {"set": set or {}, "unset": unset or []} + + if id is not None: + data["id"] = id + else: + data["foreign_id"] = foreign_id + data["time"] = time + + return await self.activities_partial_update(updates=[data]) + + async def activities_partial_update(self, updates=None): + auth_token = self.create_jwt_token("activities", "*", feed_id="*") + + data = {"changes": updates or []} + + return await self.post("activity/", auth_token, data=data) + + async def track_engagements(self, engagements): + auth_token = self.create_jwt_token("*", "*", feed_id="*") + await self.post( + "engagement/", + auth_token, + data={"content_list": engagements}, + service_name="analytics", + ) + + async def track_impressions(self, impressions): + auth_token = self.create_jwt_token("*", "*", feed_id="*") + await self.post( + "impression/", auth_token, data=impressions, service_name="analytics" + ) + + async def og(self, target_url): + auth_token = self.create_jwt_token("*", "*", feed_id="*") + params = {"url": target_url} + return await self.get("og/", auth_token, params=params) + + async def follow_stats(self, feed_id, followers_slugs=None, following_slugs=None): + auth_token = self.create_jwt_token("*", "*", feed_id="*") + params = {"followers": feed_id, "following": feed_id} + + if followers_slugs: + params["followers_slugs"] = ( + ",".join(followers_slugs) + if isinstance(followers_slugs, list) + else followers_slugs + ) + + if following_slugs: + params["following_slugs"] = ( + ",".join(following_slugs) + if isinstance(following_slugs, list) + else following_slugs + ) + + return await self.get("stats/follow/", auth_token, params=params) + + async def _make_request( + self, + method, + relative_url, + signature, + service_name="api", + params=None, + data=None, + ): + params = params or {} + data = data or {} + serialized = None + default_params = self.get_default_params() + params = self._check_params(params) + default_params.update(params) + headers = self.get_default_header() + headers["Authorization"] = signature + headers["stream-auth-type"] = "jwt" + + if not relative_url.endswith("/"): + relative_url += "/" + + url = self.get_full_url(service_name, relative_url) + + if method.lower() in ["post", "put", "delete"]: + serialized = serializer.dumps(data) + + async with aiohttp.ClientSession() as session: + async with session.request( + method, + url, + data=serialized, + headers=headers, + params=default_params, + timeout=self.timeout, + ) as response: + # remove JWT from logs + headers_to_log = headers.copy() + headers_to_log.pop("Authorization", None) + logger.debug( + f"stream api call {response}, headers {headers_to_log} data {data}", + ) + return await self._parse_response(response) + + async def _parse_response(self, response): + try: + parsed_result = serializer.loads(await response.text()) + except (ValueError, ClientConnectionError): + parsed_result = None + if ( + parsed_result is None + or parsed_result.get("exception") + or response.status >= 500 + ): + self.raise_exception(parsed_result, status_code=response.status) + + return parsed_result + + def _check_params(self, params): + """There is no standard for boolean representation of boolean values in YARL""" + if not isinstance(params, dict): + raise TypeError("Invalid params type") + + for key, value in params.items(): + if isinstance(value, bool): + params[key] = str(value) + + return params diff --git a/stream/client.py b/stream/client/base.py similarity index 56% rename from stream/client.py rename to stream/client/base.py index 7eb4be8..e6a45ab 100644 --- a/stream/client.py +++ b/stream/client/base.py @@ -1,110 +1,21 @@ import json -import logging import os +from abc import ABC, abstractmethod -import jwt import requests -from requests import Request - -from stream import exceptions, serializer -from stream.collections import Collections -from stream.feed import Feed -from stream.personalization import Personalization -from stream.reactions import Reactions -from stream.serializer import _datetime_encoder -from stream.users import Users -from stream.utils import ( - validate_feed_slug, - validate_foreign_id_time, - validate_user_id, - get_reaction_params, -) + +from stream import exceptions try: from urllib.parse import urlparse except ImportError: from urlparse import urlparse -logger = logging.getLogger(__name__) - - -class StreamClient: - def __init__( - self, - api_key, - api_secret, - app_id, - version="v1.0", - timeout=6.0, - base_url=None, - location=None, - ): - """ - Initialize the client with the given api key and secret - - :param api_key: the api key - :param api_secret: the api secret - :param app_id: the app id - - **Example usage**:: - - import stream - # initialize the client - client = stream.connect('key', 'secret') - # get a feed object - feed = client.feed('aggregated:1') - # write data to the feed - activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} - activity_id = feed.add_activity(activity_data)['id'] - activities = feed.get() - - feed.follow('flat:3') - activities = feed.get() - feed.unfollow('flat:3') - feed.remove_activity(activity_id) - """ - self.api_key = api_key - self.api_secret = api_secret - self.app_id = app_id - self.version = version - self.timeout = timeout - self.location = location - - self.base_domain_name = "stream-io-api.com" - self.api_location = location - self.custom_api_port = None - self.protocol = "https" - - if os.environ.get("LOCAL"): - self.base_domain_name = "localhost" - self.protocol = "http" - self.custom_api_port = 8000 - self.timeout = 20 - elif base_url is not None: - parsed_url = urlparse(base_url) - self.base_domain_name = parsed_url.hostname - self.protocol = parsed_url.scheme - self.custom_api_port = parsed_url.port - self.api_location = "" - elif location is not None: - self.location = location - - self.base_analytics_url = "https://analytics.stream-io-api.com/analytics/" - - self.session = requests.Session() - - token = self.create_jwt_token("personalization", "*", feed_id="*", user_id="*") - self.personalization = Personalization(self, token) - - token = self.create_jwt_token("collections", "*", feed_id="*", user_id="*") - self.collections = Collections(self, token) - - token = self.create_jwt_token("reactions", "*", feed_id="*") - self.reactions = Reactions(self, token) +import jwt - token = self.create_jwt_token("users", "*", feed_id="*") - self.users = Users(self, token) +class AbstractStreamClient(ABC): + @abstractmethod def feed(self, feed_slug, user_id): """ Returns a Feed object @@ -112,193 +23,81 @@ def feed(self, feed_slug, user_id): :param feed_slug: the slug of the feed :param user_id: the user id """ - feed_slug = validate_feed_slug(feed_slug) - user_id = validate_user_id(user_id) - token = self.create_jwt_token("feed", "*", feed_id="*") - return Feed(self, feed_slug, user_id, token) + pass + @abstractmethod def get_default_params(self): """ Returns the params with the API key present """ - params = dict(api_key=self.api_key) - return params + pass + @abstractmethod def get_default_header(self): - base_headers = { - "Content-type": "application/json", - "X-Stream-Client": self.get_user_agent(), - } - return base_headers + pass + @abstractmethod def get_full_url(self, service_name, relative_url): - if self.api_location: - hostname = "%s%s.%s" % ( - self.api_location, - "" if service_name == "analytics" else f"-{service_name}", - self.base_domain_name, - ) - elif service_name: - hostname = "%s.%s" % (service_name, self.base_domain_name) - else: - hostname = self.base_domain_name - - if self.base_domain_name == "localhost": - hostname = "localhost" - - base_url = "%s://%s" % (self.protocol, hostname) - - if self.custom_api_port: - base_url = "%s:%s" % (base_url, self.custom_api_port) - - url = base_url + "/" + service_name + "/" + self.version + "/" + relative_url - return url + pass + @abstractmethod def get_user_agent(self): - from stream import __version__ - - agent = "stream-python-client-%s" % __version__ - return agent - - def _parse_response(self, response): - try: - parsed_result = serializer.loads(response.text) - except ValueError: - parsed_result = None - if ( - parsed_result is None - or parsed_result.get("exception") - or response.status_code >= 500 - ): - self.raise_exception(parsed_result, status_code=response.status_code) - return parsed_result + pass + @abstractmethod def create_user_token(self, user_id, **extra_data): """ Setup the payload for the given user_id with optional extra data (key, value pairs) and encode it using jwt """ - payload = {"user_id": user_id} - for k, v in extra_data.items(): - payload[k] = v - return jwt.encode(payload, self.api_secret, algorithm="HS256") + pass + @abstractmethod def create_jwt_token(self, resource, action, feed_id=None, user_id=None, **params): """ - Setup the payload for the given resource, action, feed or user + Set up the payload for the given resource, action, feed or user and encode it using jwt """ - payload = {**params, "action": action, "resource": resource} - if feed_id is not None: - payload["feed_id"] = feed_id - if user_id is not None: - payload["user_id"] = user_id - return jwt.encode(payload, self.api_secret, algorithm="HS256") - - def _make_request( - self, - method, - relative_url, - signature, - service_name="api", - params=None, - data=None, - ): - params = params or {} - data = data or {} - serialized = None - default_params = self.get_default_params() - default_params.update(params) - headers = self.get_default_header() - headers["Authorization"] = signature - headers["stream-auth-type"] = "jwt" - - if not relative_url.endswith("/"): - relative_url += "/" - - url = self.get_full_url(service_name, relative_url) - - if method.__name__ in ["post", "put", "delete"]: - serialized = serializer.dumps(data) - response = method( - url, - data=serialized, - headers=headers, - params=default_params, - timeout=self.timeout, - ) - logger.debug( - "stream api call %s, headers %s data %s", response.url, headers, data - ) - return self._parse_response(response) + pass + @abstractmethod def raise_exception(self, result, status_code): """ Map the exception code to an exception class and raise it If result.exception and result.detail are available use that Otherwise just raise a generic error """ - from stream.exceptions import get_exception_dict - - exception_class = exceptions.StreamApiException - - def errors_from_fields(exception_fields): - result = [] - if not isinstance(exception_fields, dict): - return exception_fields - - for field, errors in exception_fields.items(): - result.append('Field "%s" errors: %s' % (field, repr(errors))) - return result - - if result is not None: - error_message = result["detail"] - exception_fields = result.get("exception_fields") - if exception_fields is not None: - if isinstance(exception_fields, list): - errors = [ - errors_from_fields(exception_dict) - for exception_dict in exception_fields - ] - errors = [item for sublist in errors for item in sublist] - else: - errors = errors_from_fields(exception_fields) - - error_message = "\n".join(errors) - error_code = result.get("code") - exception_dict = get_exception_dict() - exception_class = exception_dict.get( - error_code, exceptions.StreamApiException - ) - else: - error_message = "GetStreamAPI%s" % status_code - exception = exception_class(error_message, status_code=status_code) - raise exception + pass + @abstractmethod def put(self, *args, **kwargs): """ Shortcut for make request """ - return self._make_request(self.session.put, *args, **kwargs) + pass + @abstractmethod def post(self, *args, **kwargs): """ Shortcut for make request """ - return self._make_request(self.session.post, *args, **kwargs) + pass + @abstractmethod def get(self, *args, **kwargs): """ Shortcut for make request """ - return self._make_request(self.session.get, *args, **kwargs) + pass + @abstractmethod def delete(self, *args, **kwargs): """ Shortcut for make request """ - return self._make_request(self.session.delete, *args, **kwargs) + pass + @abstractmethod def add_to_many(self, activity, feeds): """ Adds an activity to many feeds @@ -307,10 +106,9 @@ def add_to_many(self, activity, feeds): :param feeds: the list of follows (eg. ['feed:1', 'feed:2']) """ - data = {"activity": activity, "feeds": feeds} - token = self.create_jwt_token("feed", "*", feed_id="*") - return self.post("feed/add_to_many/", token, data=data) + pass + @abstractmethod def follow_many(self, follows, activity_copy_limit=None): """ Creates many follows @@ -319,13 +117,9 @@ def follow_many(self, follows, activity_copy_limit=None): eg. [{'source': source, 'target': target}] """ - params = None - - if activity_copy_limit is not None: - params = dict(activity_copy_limit=activity_copy_limit) - token = self.create_jwt_token("follower", "*", feed_id="*") - return self.post("follow_many/", token, params=params, data=follows) + pass + @abstractmethod def unfollow_many(self, unfollows): """ Unfollows many feeds at batch @@ -333,28 +127,23 @@ def unfollow_many(self, unfollows): eg. [{'source': source, 'target': target, 'keep_history': keep_history}] """ - params = None - - token = self.create_jwt_token("follower", "*", feed_id="*") - return self.post("unfollow_many/", token, params=params, data=unfollows) + pass + @abstractmethod def update_activities(self, activities): """ Update or create activities """ - if not isinstance(activities, (list, tuple, set)): - raise TypeError("Activities parameter should be of type list") - - auth_token = self.create_jwt_token("activities", "*", feed_id="*") - data = dict(activities=activities) - return self.post("activities/", auth_token, data=data) + pass + @abstractmethod def update_activity(self, activity): """ Update a single activity """ - return self.update_activities([activity]) + pass + @abstractmethod def get_activities( self, ids=None, foreign_id_times=None, enrich=False, reactions=None, **params ): @@ -366,38 +155,9 @@ def get_activities( ids: list of activity IDs foreign_id_time: list of tuples (foreign_id, time) """ - auth_token = self.create_jwt_token("activities", "*", feed_id="*") - - if ids is None and foreign_id_times is None: - raise TypeError( - "One the parameters ids or foreign_id_time must be provided and not None" - ) - - if ids is not None and foreign_id_times is not None: - raise TypeError( - "At most one of the parameters ids or foreign_id_time must be provided" - ) - - endpoint = "activities/" - if enrich or reactions is not None: - endpoint = "enrich/" + endpoint - - query_params = {**params} - - if ids is not None: - query_params["ids"] = ",".join(ids) - - if foreign_id_times is not None: - validate_foreign_id_time(foreign_id_times) - foreign_ids, timestamps = zip(*foreign_id_times) - timestamps = map(_datetime_encoder, timestamps) - query_params["foreign_ids"] = ",".join(foreign_ids) - query_params["timestamps"] = ",".join(timestamps) - - query_params.update(get_reaction_params(reactions)) - - return self.get(endpoint, auth_token, params=query_params) + pass + @abstractmethod def activity_partial_update( self, id=None, foreign_id=None, time=None, set=None, unset=None ): @@ -410,26 +170,9 @@ def activity_partial_update( set: object containing the set operations unset: list of unset operations """ + pass - if id is None and (foreign_id is None or time is None): - raise TypeError( - "The id or foreign_id+time parameters must be provided and not be None" - ) - if id is not None and (foreign_id is not None or time is not None): - raise TypeError( - "Only one of the id or the foreign_id+time parameters can be provided" - ) - - data = {"set": set or {}, "unset": unset or []} - - if id is not None: - data["id"] = id - else: - data["foreign_id"] = foreign_id - data["time"] = time - - return self.activities_partial_update(updates=[data]) - + @abstractmethod def activities_partial_update(self, updates=None): """ Partial update activity, via activity ID or Foreign ID + timestamp @@ -451,36 +194,18 @@ def activities_partial_update(self, updates=None): } ] """ + pass - auth_token = self.create_jwt_token("activities", "*", feed_id="*") - - data = {"changes": updates or []} - - return self.post("activity/", auth_token, data=data) - + @abstractmethod def create_redirect_url(self, target_url, user_id, events): """ Creates a redirect url for tracking the given events in the context of an email using Stream's analytics platform. Learn more at getstream.io/personalization """ - # generate the JWT token - auth_token = self.create_jwt_token( - "redirect_and_track", "*", "*", user_id=user_id - ) - # setup the params - params = dict(auth_type="jwt", authorization=auth_token, url=target_url) - params["api_key"] = self.api_key - params["events"] = json.dumps(events) - url = f"{self.base_analytics_url}redirect/" - # we get the url from the prepare request, this skips issues with - # python's urlencode implementation - request = Request("GET", url, params=params) - prepared_request = request.prepare() - # validate the target url is valid - Request("GET", target_url).prepare() - return prepared_request.url + pass + @abstractmethod def track_engagements(self, engagements): """ Creates a list of engagements @@ -515,15 +240,9 @@ def track_engagements(self, engagements): }, ] """ + pass - auth_token = self.create_jwt_token("*", "*", feed_id="*") - self.post( - "engagement/", - auth_token, - data={"content_list": engagements}, - service_name="analytics", - ) - + @abstractmethod def track_impressions(self, impressions): """ Creates a list of impressions @@ -547,46 +266,216 @@ def track_impressions(self, impressions): }, ] """ + pass - auth_token = self.create_jwt_token("*", "*", feed_id="*") - self.post("impression/", auth_token, data=impressions, service_name="analytics") - + @abstractmethod def og(self, target_url): """ Retrieve open graph information from a URL which you can then use to add images and a description to activities. """ - auth_token = self.create_jwt_token("*", "*", feed_id="*") - params = {"url": target_url} - return self.get("og/", auth_token, params=params) + pass + @abstractmethod def follow_stats(self, feed_id, followers_slugs=None, following_slugs=None): """ Retrieve the number of follower and following feed stats of a given feed. For each count, feed slugs can be provided to filter counts accordingly. eg. - client.follow_stats(me, followers_slugs=['user'], following_slugs=['commodities']) - this means to find counts of users following me and count of commodities I am following + client.follow_stats( + me, followers_slugs=['user'], following_slugs=['commodities'] + ) + this means to find counts of users following me and count + of commodities I am following """ - auth_token = self.create_jwt_token("*", "*", feed_id="*") - params = { - "followers": feed_id, - "following": feed_id, - } + pass + + @abstractmethod + def _make_request( + self, + method, + relative_url, + signature, + service_name="api", + params=None, + data=None, + ): + pass + + @abstractmethod + def _parse_response(self, response): + pass + + +class BaseStreamClient(AbstractStreamClient, ABC): + """ + Initialize the client with the given api key and secret + + :param api_key: the api key + :param api_secret: the api secret + :param app_id: the app id + + **Example usage**:: + + import stream + # initialize the client + client = stream.connect('key', 'secret') + # get a feed object + feed = client.feed('aggregated:1') + # write data to the feed + activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} + activity_id = feed.add_activity(activity_data)['id'] + activities = feed.get() + + feed.follow('flat:3') + activities = feed.get() + feed.unfollow('flat:3') + feed.remove_activity(activity_id) + """ + + def __init__( + self, + api_key, + api_secret, + app_id, + version="v1.0", + timeout=6.0, + base_url=None, + location=None, + ): + self.api_key = api_key + self.api_secret = api_secret + self.app_id = app_id + self.version = version + self.timeout = timeout + self.location = location + self.base_domain_name = "stream-io-api.com" + self.api_location = location + self.custom_api_port = None + self.protocol = "https" + + if os.environ.get("LOCAL"): + self.base_domain_name = "localhost" + self.protocol = "http" + self.custom_api_port = 8000 + self.timeout = 20 + elif base_url is not None: + parsed_url = urlparse(base_url) + self.base_domain_name = parsed_url.hostname + self.protocol = parsed_url.scheme + self.custom_api_port = parsed_url.port + self.api_location = "" + elif location is not None: + self.location = location + + self.base_analytics_url = "https://analytics.stream-io-api.com/analytics/" + + def create_user_token(self, user_id, **extra_data): + payload = {"user_id": user_id} + for k, v in extra_data.items(): + payload[k] = v + return jwt.encode(payload, self.api_secret, algorithm="HS256") + + def create_jwt_token(self, resource, action, feed_id=None, user_id=None, **params): + payload = {**params, "action": action, "resource": resource} + if feed_id is not None: + payload["feed_id"] = feed_id + if user_id is not None: + payload["user_id"] = user_id + return jwt.encode(payload, self.api_secret, algorithm="HS256") + + def raise_exception(self, result, status_code): + from stream.exceptions import get_exception_dict + + exception_class = exceptions.StreamApiException + + def errors_from_fields(exception_fields): + result = [] + if not isinstance(exception_fields, dict): + return exception_fields - if followers_slugs: - params["followers_slugs"] = ( - ",".join(followers_slugs) - if isinstance(followers_slugs, list) - else followers_slugs + for field, errors in exception_fields.items(): + result.append(f'Field "{field}" errors: {repr(errors)}') + return result + + if result is not None: + error_message = result["detail"] + exception_fields = result.get("exception_fields") + if exception_fields is not None: + if isinstance(exception_fields, list): + errors = [ + errors_from_fields(exception_dict) + for exception_dict in exception_fields + ] + errors = [item for sublist in errors for item in sublist] + else: + errors = errors_from_fields(exception_fields) + + error_message = "\n".join(errors) + error_code = result.get("code") + exception_dict = get_exception_dict() + exception_class = exception_dict.get( + error_code, exceptions.StreamApiException ) + else: + error_message = f"GetStreamAPI{status_code}" + exception = exception_class(error_message, status_code=status_code) + raise exception + + def create_redirect_url(self, target_url, user_id, events): + # generate the JWT token + auth_token = self.create_jwt_token( + "redirect_and_track", "*", "*", user_id=user_id + ) + # setup the params + params = dict(auth_type="jwt", authorization=auth_token, url=target_url) + params["api_key"] = self.api_key + params["events"] = json.dumps(events) + url = f"{self.base_analytics_url}redirect/" + # we get the url from the prepare request, this skips issues with + # python's urlencode implementation + request = requests.Request("GET", url, params=params) + prepared_request = request.prepare() + # validate the target url is valid + requests.Request("GET", target_url).prepare() + return prepared_request.url - if following_slugs: - params["following_slugs"] = ( - ",".join(following_slugs) - if isinstance(following_slugs, list) - else following_slugs + def get_full_url(self, service_name, relative_url): + if self.api_location: + hostname = "{}{}.{}".format( + self.api_location, + "" if service_name == "analytics" else f"-{service_name}", + self.base_domain_name, ) + elif service_name: + hostname = f"{service_name}.{self.base_domain_name}" + else: + hostname = self.base_domain_name + + if self.base_domain_name == "localhost": + hostname = "localhost" + + base_url = f"{self.protocol}://{hostname}" + + if self.custom_api_port: + base_url = f"{base_url}:{self.custom_api_port}" + + url = base_url + "/" + service_name + "/" + self.version + "/" + relative_url + return url + + def get_default_params(self): + params = dict(api_key=self.api_key) + return params + + def get_default_header(self): + base_headers = { + "Content-type": "application/json", + "X-Stream-Client": self.get_user_agent(), + } + return base_headers + + def get_user_agent(self): + from stream import __version__ - return self.get("stats/follow/", auth_token, params=params) + return f"stream-python-client-{__version__}" diff --git a/stream/client/client.py b/stream/client/client.py new file mode 100644 index 0000000..eef7ee6 --- /dev/null +++ b/stream/client/client.py @@ -0,0 +1,292 @@ +import json +import logging + +import requests +from requests import Request + +from stream import serializer +from stream.client.base import BaseStreamClient +from stream.collections.collections import Collections +from stream.feed import Feed +from stream.personalization import Personalization +from stream.reactions import Reactions +from stream.serializer import _datetime_encoder +from stream.users import Users +from stream.utils import ( + get_reaction_params, + validate_feed_slug, + validate_foreign_id_time, + validate_user_id, +) + +try: + from urllib.parse import urlparse +except ImportError: + pass + # from urlparse import urlparse + +logger = logging.getLogger(__name__) + + +class StreamClient(BaseStreamClient): + def __init__( + self, + api_key, + api_secret, + app_id, + version="v1.0", + timeout=6.0, + base_url=None, + location=None, + ): + super().__init__( + api_key, + api_secret, + app_id, + version=version, + timeout=timeout, + base_url=base_url, + location=location, + ) + + self.session = requests.Session() + + token = self.create_jwt_token("personalization", "*", feed_id="*", user_id="*") + self.personalization = Personalization(self, token) + + token = self.create_jwt_token("collections", "*", feed_id="*", user_id="*") + self.collections = Collections(self, token) + + token = self.create_jwt_token("reactions", "*", feed_id="*") + self.reactions = Reactions(self, token) + + token = self.create_jwt_token("users", "*", feed_id="*") + self.users = Users(self, token) + + def feed(self, feed_slug, user_id): + feed_slug = validate_feed_slug(feed_slug) + user_id = validate_user_id(user_id) + token = self.create_jwt_token("feed", "*", feed_id="*") + return Feed(self, feed_slug, user_id, token) + + def put(self, *args, **kwargs): + return self._make_request(self.session.put, *args, **kwargs) + + def post(self, *args, **kwargs): + return self._make_request(self.session.post, *args, **kwargs) + + def get(self, *args, **kwargs): + return self._make_request(self.session.get, *args, **kwargs) + + def delete(self, *args, **kwargs): + return self._make_request(self.session.delete, *args, **kwargs) + + def add_to_many(self, activity, feeds): + data = {"activity": activity, "feeds": feeds} + token = self.create_jwt_token("feed", "*", feed_id="*") + return self.post("feed/add_to_many/", token, data=data) + + def follow_many(self, follows, activity_copy_limit=None): + params = None + + if activity_copy_limit is not None: + params = dict(activity_copy_limit=activity_copy_limit) + token = self.create_jwt_token("follower", "*", feed_id="*") + return self.post("follow_many/", token, params=params, data=follows) + + def unfollow_many(self, unfollows): + params = None + + token = self.create_jwt_token("follower", "*", feed_id="*") + return self.post("unfollow_many/", token, params=params, data=unfollows) + + def update_activities(self, activities): + if not isinstance(activities, (list, tuple, set)): + raise TypeError("Activities parameter should be of type list") + + auth_token = self.create_jwt_token("activities", "*", feed_id="*") + data = dict(activities=activities) + return self.post("activities/", auth_token, data=data) + + def update_activity(self, activity): + return self.update_activities([activity]) + + def get_activities( + self, ids=None, foreign_id_times=None, enrich=False, reactions=None, **params + ): + auth_token = self.create_jwt_token("activities", "*", feed_id="*") + + if ids is None and foreign_id_times is None: + raise TypeError( + "One the parameters ids or foreign_id_time must be provided and not None" + ) + + if ids is not None and foreign_id_times is not None: + raise TypeError( + "At most one of the parameters ids or foreign_id_time must be provided" + ) + + endpoint = "activities/" + if enrich or reactions is not None: + endpoint = "enrich/" + endpoint + + query_params = {**params} + + if ids is not None: + query_params["ids"] = ",".join(ids) + + if foreign_id_times is not None: + validate_foreign_id_time(foreign_id_times) + foreign_ids, timestamps = zip(*foreign_id_times) + timestamps = map(_datetime_encoder, timestamps) + query_params["foreign_ids"] = ",".join(foreign_ids) + query_params["timestamps"] = ",".join(timestamps) + + query_params.update(get_reaction_params(reactions)) + + return self.get(endpoint, auth_token, params=query_params) + + def activity_partial_update( + self, id=None, foreign_id=None, time=None, set=None, unset=None + ): + if id is None and (foreign_id is None or time is None): + raise TypeError( + "The id or foreign_id+time parameters must be provided and not be None" + ) + if id is not None and (foreign_id is not None or time is not None): + raise TypeError( + "Only one of the id or the foreign_id+time parameters can be provided" + ) + + data = {"set": set or {}, "unset": unset or []} + + if id is not None: + data["id"] = id + else: + data["foreign_id"] = foreign_id + data["time"] = time + + return self.activities_partial_update(updates=[data]) + + def activities_partial_update(self, updates=None): + + auth_token = self.create_jwt_token("activities", "*", feed_id="*") + + data = {"changes": updates or []} + + return self.post("activity/", auth_token, data=data) + + def create_redirect_url(self, target_url, user_id, events): + # generate the JWT token + auth_token = self.create_jwt_token( + "redirect_and_track", "*", "*", user_id=user_id + ) + # setup the params + params = dict(auth_type="jwt", authorization=auth_token, url=target_url) + params["api_key"] = self.api_key + params["events"] = json.dumps(events) + url = f"{self.base_analytics_url}redirect/" + # we get the url from the prepare request, this skips issues with + # python's urlencode implementation + request = Request("GET", url, params=params) + prepared_request = request.prepare() + # validate the target url is valid + Request("GET", target_url).prepare() + return prepared_request.url + + def track_engagements(self, engagements): + + auth_token = self.create_jwt_token("*", "*", feed_id="*") + self.post( + "engagement/", + auth_token, + data={"content_list": engagements}, + service_name="analytics", + ) + + def track_impressions(self, impressions): + + auth_token = self.create_jwt_token("*", "*", feed_id="*") + self.post("impression/", auth_token, data=impressions, service_name="analytics") + + def og(self, target_url): + auth_token = self.create_jwt_token("*", "*", feed_id="*") + params = {"url": target_url} + return self.get("og/", auth_token, params=params) + + def follow_stats(self, feed_id, followers_slugs=None, following_slugs=None): + auth_token = self.create_jwt_token("*", "*", feed_id="*") + params = { + "followers": feed_id, + "following": feed_id, + } + + if followers_slugs: + params["followers_slugs"] = ( + ",".join(followers_slugs) + if isinstance(followers_slugs, list) + else followers_slugs + ) + + if following_slugs: + params["following_slugs"] = ( + ",".join(following_slugs) + if isinstance(following_slugs, list) + else following_slugs + ) + + return self.get("stats/follow/", auth_token, params=params) + + def _make_request( + self, + method, + relative_url, + signature, + service_name="api", + params=None, + data=None, + ): + params = params or {} + data = data or {} + serialized = None + default_params = self.get_default_params() + default_params.update(params) + headers = self.get_default_header() + headers["Authorization"] = signature + headers["stream-auth-type"] = "jwt" + + if not relative_url.endswith("/"): + relative_url += "/" + + url = self.get_full_url(service_name, relative_url) + + if method.__name__ in ["post", "put", "delete"]: + serialized = serializer.dumps(data) + response = method( + url, + data=serialized, + headers=headers, + params=default_params, + timeout=self.timeout, + ) + # remove JWT from logs + headers_to_log = headers.copy() + headers_to_log.pop("Authorization", None) + logger.debug( + f"stream api call {response.url}, headers {headers_to_log} data {data}" + ) + return self._parse_response(response) + + def _parse_response(self, response): + try: + parsed_result = serializer.loads(response.text) + except ValueError: + parsed_result = None + if ( + parsed_result is None + or parsed_result.get("exception") + or response.status_code >= 500 + ): + self.raise_exception(parsed_result, status_code=response.status_code) + + return parsed_result diff --git a/stream/collections.py b/stream/collections.py deleted file mode 100644 index 967d5c6..0000000 --- a/stream/collections.py +++ /dev/null @@ -1,128 +0,0 @@ -class Collections: - def __init__(self, client, token): - """ - Used to manipulate data at the 'meta' endpoint - :param client: the api client - :param token: the token - """ - - self.client = client - self.token = token - - def create_reference(self, collection_name=None, id=None, entry=None): - if isinstance(entry, (dict,)): - _collection = entry["collection"] - _id = entry["id"] - elif collection_name is not None and id is not None: - _collection = collection_name - _id = id - else: - raise ValueError( - "must call with collection_name and id or with entry arguments" - ) - return "SO:%s:%s" % (_collection, _id) - - def upsert(self, collection_name, data): - """ - "Insert new or update existing data. - :param collection_name: Collection Name i.e 'user' - :param data: list of dictionaries - :return: http response, 201 if successful along with data posted. - - **Example**:: - client.collections.upsert('user', [{"id": '1', "name": "Juniper", "hobbies": ["Playing", "Sleeping", "Eating"]}, - {"id": '2', "name": "Ruby", "interests": ["Sunbeams", "Surprise Attacks"]}]) - """ - - if not isinstance(data, list): - data = [data] - - data_json = {collection_name: data} - - return self.client.post( - "collections/", - service_name="api", - signature=self.token, - data={"data": data_json}, - ) - - def select(self, collection_name, ids): - """ - Retrieve data from meta endpoint, can include data you've uploaded or personalization/analytic data - created by the stream team. - :param collection_name: Collection Name i.e 'user' - :param ids: list of ids of feed group i.e [123,456] - :return: meta data as json blob - - **Example**:: - client.collections.select('user', 1) - client.collections.select('user', [1,2,3]) - """ - - if not isinstance(ids, list): - ids = [ids] - - foreign_ids = ",".join( - "%s:%s" % (collection_name, k) for i, k in enumerate(ids) - ) - - return self.client.get( - "collections/", - service_name="api", - params={"foreign_ids": foreign_ids}, - signature=self.token, - ) - - def delete_many(self, collection_name, ids): - """ - Delete data from meta. - :param collection_name: Collection Name i.e 'user' - :param ids: list of ids to delete i.e [123,456] - :return: data that was deleted if successful or not. - - **Example**:: - client.collections.delete('user', '1') - client.collections.delete('user', ['1','2','3']) - """ - - if not isinstance(ids, list): - ids = [ids] - ids = [str(i) for i in ids] - - params = {"collection_name": collection_name, "ids": ids} - - return self.client.delete( - "collections/", service_name="api", params=params, signature=self.token - ) - - def add(self, collection_name, data, id=None, user_id=None): - payload = dict(id=id, data=data, user_id=user_id) - return self.client.post( - "collections/%s" % collection_name, - service_name="api", - signature=self.token, - data=payload, - ) - - def get(self, collection_name, id): - return self.client.get( - "collections/%s/%s" % (collection_name, id), - service_name="api", - signature=self.token, - ) - - def update(self, collection_name, id, data=None): - payload = dict(data=data) - return self.client.put( - "collections/%s/%s" % (collection_name, id), - service_name="api", - signature=self.token, - data=payload, - ) - - def delete(self, collection_name, id): - return self.client.delete( - "collections/%s/%s" % (collection_name, id), - service_name="api", - signature=self.token, - ) diff --git a/stream/collections/__init__.py b/stream/collections/__init__.py new file mode 100644 index 0000000..8264c83 --- /dev/null +++ b/stream/collections/__init__.py @@ -0,0 +1 @@ +from .collections import AsyncCollections, Collections diff --git a/stream/collections/base.py b/stream/collections/base.py new file mode 100644 index 0000000..44e091b --- /dev/null +++ b/stream/collections/base.py @@ -0,0 +1,100 @@ +from abc import ABC, abstractmethod + + +class AbstractCollection(ABC): + @abstractmethod + def create_reference(self, collection_name=None, id=None, entry=None): + pass + + @abstractmethod + def upsert(self, collection_name, data): + """ + "Insert new or update existing data. + :param collection_name: Collection Name i.e 'user' + :param data: list of dictionaries + :return: http response, 201 if successful along with data posted. + + **Example**:: + client.collections.upsert( + 'user', [ + {"id": '1', "name": "Juniper", "hobbies": ["Playing", "Sleeping", "Eating"]}, + {"id": '2', "name": "Ruby", "interests": ["Sunbeams", "Surprise Attacks"]} + ] + ) + """ + pass + + @abstractmethod + def select(self, collection_name, ids): + """ + Retrieve data from meta endpoint, can include data you've uploaded or + personalization/analytic data + created by the stream team. + :param collection_name: Collection Name i.e 'user' + :param ids: list of ids of feed group i.e [123,456] + :return: meta data as json blob + + **Example**:: + client.collections.select('user', 1) + client.collections.select('user', [1,2,3]) + """ + pass + + @abstractmethod + def delete_many(self, collection_name, ids): + """ + Delete data from meta. + :param collection_name: Collection Name i.e 'user' + :param ids: list of ids to delete i.e [123,456] + :return: data that was deleted if successful or not. + + **Example**:: + client.collections.delete('user', '1') + client.collections.delete('user', ['1','2','3']) + """ + pass + + @abstractmethod + def add(self, collection_name, data, id=None, user_id=None): + pass + + @abstractmethod + def get(self, collection_name, id): + pass + + @abstractmethod + def update(self, collection_name, id, data=None): + pass + + @abstractmethod + def delete(self, collection_name, id): + pass + + +class BaseCollection(AbstractCollection, ABC): + + URL = "collections/" + SERVICE_NAME = "api" + + def __init__(self, client, token): + """ + Used to manipulate data at the 'meta' endpoint + :param client: the api client + :param token: the token + """ + + self.client = client + self.token = token + + def create_reference(self, collection_name=None, id=None, entry=None): + if isinstance(entry, dict): + _collection = entry["collection"] + _id = entry["id"] + elif collection_name is not None and id is not None: + _collection = collection_name + _id = id + else: + raise ValueError( + "must call with collection_name and id or with entry arguments" + ) + return f"SO:{_collection}:{_id}" diff --git a/stream/collections/collections.py b/stream/collections/collections.py new file mode 100644 index 0000000..eebc730 --- /dev/null +++ b/stream/collections/collections.py @@ -0,0 +1,148 @@ +from stream.collections.base import BaseCollection + + +class Collections(BaseCollection): + def upsert(self, collection_name, data): + if not isinstance(data, list): + data = [data] + + data_json = {collection_name: data} + + return self.client.post( + self.URL, + service_name=self.SERVICE_NAME, + signature=self.token, + data={"data": data_json}, + ) + + def select(self, collection_name, ids): + if not isinstance(ids, list): + ids = [ids] + + foreign_ids = ",".join(f"{collection_name}:{k}" for i, k in enumerate(ids)) + + return self.client.get( + self.URL, + service_name=self.SERVICE_NAME, + params={"foreign_ids": foreign_ids}, + signature=self.token, + ) + + def delete_many(self, collection_name, ids): + if not isinstance(ids, list): + ids = [ids] + ids = [str(i) for i in ids] + + params = {"collection_name": collection_name, "ids": ids} + + return self.client.delete( + self.URL, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) + + def add(self, collection_name, data, id=None, user_id=None): + payload = dict(id=id, data=data, user_id=user_id) + return self.client.post( + f"{self.URL}/{collection_name}", + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + def get(self, collection_name, id): + return self.client.get( + f"{self.URL}/{collection_name}/{id}", + service_name=self.SERVICE_NAME, + signature=self.token, + ) + + def update(self, collection_name, id, data=None): + payload = dict(data=data) + return self.client.put( + f"{self.URL}/{collection_name}/{id}", + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + def delete(self, collection_name, id): + return self.client.delete( + f"{self.URL}/{collection_name}/{id}", + service_name=self.SERVICE_NAME, + signature=self.token, + ) + + +class AsyncCollections(BaseCollection): + async def upsert(self, collection_name, data): + if not isinstance(data, list): + data = [data] + + data_json = {collection_name: data} + + return await self.client.post( + self.URL, + service_name=self.SERVICE_NAME, + signature=self.token, + data={"data": data_json}, + ) + + async def select(self, collection_name, ids): + if not isinstance(ids, list): + ids = [ids] + + foreign_ids = ",".join(f"{collection_name}:{k}" for i, k in enumerate(ids)) + + return await self.client.get( + self.URL, + service_name=self.SERVICE_NAME, + params={"foreign_ids": foreign_ids}, + signature=self.token, + ) + + async def delete_many(self, collection_name, ids): + if not isinstance(ids, list): + ids = [ids] + ids = [str(i) for i in ids] + + params = {"collection_name": collection_name, "ids": ids} + return await self.client.delete( + self.URL, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) + + async def get(self, collection_name, id): + return await self.client.get( + f"{self.URL}/{collection_name}/{id}", + service_name=self.SERVICE_NAME, + signature=self.token, + ) + + async def add(self, collection_name, data, id=None, user_id=None): + payload = dict(id=id, data=data, user_id=user_id) + return await self.client.post( + f"{self.URL}/{collection_name}", + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + async def update(self, collection_name, id, data=None): + payload = dict(data=data) + return await self.client.put( + f"{self.URL}/{collection_name}/{id}", + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + async def delete(self, collection_name, id): + return await self.client.delete( + f"{self.URL}/{collection_name}/{id}", + service_name=self.SERVICE_NAME, + signature=self.token, + ) diff --git a/stream/exceptions.py b/stream/exceptions.py index cd35f2c..6b1f70a 100644 --- a/stream/exceptions.py +++ b/stream/exceptions.py @@ -8,10 +8,10 @@ def __init__(self, error_message, status_code=None): code = 1 def __repr__(self): - return "%s (%s)" % (self.__class__.__name__, self.detail) + return f"{self.__class__.__name__} ({self.detail})" def __unicode__(self): - return "%s (%s)" % (self.__class__.__name__, self.detail) + return f"{self.__class__.__name__} ({self.detail})" class ApiKeyException(StreamApiException): diff --git a/stream/feed.py b/stream/feed.py deleted file mode 100644 index 3ef9317..0000000 --- a/stream/feed.py +++ /dev/null @@ -1,233 +0,0 @@ -from stream.utils import ( - validate_feed_id, - validate_feed_slug, - validate_user_id, - get_reaction_params, -) - - -class Feed: - def __init__(self, client, feed_slug, user_id, token): - """ - Initializes the Feed class - - :param client: the api client - :param slug: the slug of the feed, ie user, flat, notification - :param user_id: the id of the user - :param token: the token - """ - self.client = client - self.slug = feed_slug - self.user_id = str(user_id) - self.id = "%s:%s" % (feed_slug, user_id) - self.token = token.decode("utf-8") if isinstance(token, bytes) else token - - self.feed_url = "feed/%s/" % self.id.replace(":", "/") - self.enriched_feed_url = "enrich/feed/%s/" % self.id.replace(":", "/") - self.feed_targets_url = "feed_targets/%s/" % self.id.replace(":", "/") - self.feed_together = self.id.replace(":", "") - self.signature = self.feed_together + " " + self.token - - def create_scope_token(self, resource, action): - """ - creates the JWT token to perform an action on a owned resource - """ - return self.client.create_jwt_token( - resource, action, feed_id=self.feed_together - ) - - def get_readonly_token(self): - """ - creates the JWT token to perform readonly operations - """ - return self.create_scope_token("*", "read") - - def add_activity(self, activity_data): - """ - Adds an activity to the feed, this will also trigger an update - to all the feeds which follow this feed - - :param activity_data: a dict with the activity data - - **Example**:: - - activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} - activity_id = feed.add_activity(activity_data) - """ - if activity_data.get("to") and not isinstance( - activity_data.get("to"), (list, tuple, set) - ): - raise TypeError( - "please provide the activity's to field as a list not a string" - ) - - if activity_data.get("to"): - activity_data = activity_data.copy() - activity_data["to"] = self.add_to_signature(activity_data["to"]) - - token = self.create_scope_token("feed", "write") - return self.client.post(self.feed_url, data=activity_data, signature=token) - - def add_activities(self, activity_list): - """ - Adds a list of activities to the feed - - :param activity_list: a list with the activity data dicts - - **Example**:: - - activity_data = [ - {'actor': 1, 'verb': 'tweet', 'object': 1}, - {'actor': 2, 'verb': 'watch', 'object': 2}, - ] - result = feed.add_activities(activity_data) - """ - activities = [] - for activity_data in activity_list: - activity_data = activity_data.copy() - activities.append(activity_data) - if activity_data.get("to"): - activity_data["to"] = self.add_to_signature(activity_data["to"]) - token = self.create_scope_token("feed", "write") - data = dict(activities=activities) - if activities: - return self.client.post(self.feed_url, data=data, signature=token) - return None - - def remove_activity(self, activity_id=None, foreign_id=None): - """ - Removes an activity from the feed - - :param activity_id: the activity id to remove from this feed - (note this will also remove the activity from feeds which follow this feed) - :param foreign_id: the foreign id you provided when adding the activity - """ - identifier = activity_id or foreign_id - if not identifier: - raise ValueError("please either provide activity_id or foreign_id") - url = self.feed_url + "%s/" % identifier - params = dict() - token = self.create_scope_token("feed", "delete") - if foreign_id is not None: - params["foreign_id"] = "1" - return self.client.delete(url, signature=token, params=params) - - def get(self, enrich=False, reactions=None, **params): - """ - Get the activities in this feed - - **Example**:: - - # fast pagination using id filtering - feed.get(limit=10, id_lte=100292310) - - # slow pagination using offset - feed.get(limit=10, offset=10) - """ - for field in ["mark_read", "mark_seen"]: - value = params.get(field) - if isinstance(value, (list, tuple)): - params[field] = ",".join(value) - token = self.create_scope_token("feed", "read") - - if enrich or reactions is not None: - feed_url = self.enriched_feed_url - else: - feed_url = self.feed_url - - params.update(get_reaction_params(reactions)) - return self.client.get(feed_url, params=params, signature=token) - - def follow( - self, target_feed_slug, target_user_id, activity_copy_limit=None, **extra_data - ): - """ - Follows the given feed - - :param activity_copy_limit: how many activities should be copied from target feed - :param target_feed_slug: the slug of the target feed - :param target_user_id: the user id - """ - target_feed_slug = validate_feed_slug(target_feed_slug) - target_user_id = validate_user_id(target_user_id) - target_feed_id = "%s:%s" % (target_feed_slug, target_user_id) - url = self.feed_url + "follows/" - data = { - "target": target_feed_id, - "target_token": self.client.feed(target_feed_slug, target_user_id).token, - } - if activity_copy_limit is not None: - data["activity_copy_limit"] = activity_copy_limit - token = self.create_scope_token("follower", "write") - data.update(extra_data) - return self.client.post(url, data=data, signature=token) - - def unfollow(self, target_feed_slug, target_user_id, keep_history=False): - """ - Unfollow the given feed - """ - target_feed_slug = validate_feed_slug(target_feed_slug) - target_user_id = validate_user_id(target_user_id) - target_feed_id = "%s:%s" % (target_feed_slug, target_user_id) - token = self.create_scope_token("follower", "delete") - url = self.feed_url + "follows/%s/" % target_feed_id - params = {} - if keep_history: - params["keep_history"] = True - return self.client.delete(url, signature=token, params=params) - - def followers(self, offset=0, limit=25, feeds=None): - """ - Lists the followers for the given feed - """ - feeds = ",".join(feeds) if feeds is not None else "" - params = {"limit": limit, "offset": offset, "filter": feeds} - url = self.feed_url + "followers/" - token = self.create_scope_token("follower", "read") - return self.client.get(url, params=params, signature=token) - - def following(self, offset=0, limit=25, feeds=None): - """ - List the feeds which this feed is following - """ - feeds = ",".join(feeds) if feeds is not None else "" - params = {"offset": offset, "limit": limit, "filter": feeds} - url = self.feed_url + "follows/" - token = self.create_scope_token("follower", "read") - return self.client.get(url, params=params, signature=token) - - def add_to_signature(self, recipients): - """ - Takes a list of recipients such as ['user:1', 'user:2'] - and turns it into a list with the tokens included - ['user:1 token', 'user:2 token'] - """ - data = [] - for recipient in recipients: - validate_feed_id(recipient) - feed_slug, user_id = recipient.split(":") - feed = self.client.feed(feed_slug, user_id) - data.append("%s %s" % (recipient, feed.token)) - return data - - def update_activity_to_targets( - self, - foreign_id, - time, - new_targets=None, - added_targets=None, - removed_targets=None, - ): - data = {"foreign_id": foreign_id, "time": time} - - if new_targets is not None: - data["new_targets"] = new_targets - if added_targets is not None: - data["added_targets"] = added_targets - if removed_targets is not None: - data["removed_targets"] = removed_targets - - url = self.feed_targets_url + "activity_to_targets/" - - token = self.create_scope_token("feed_targets", "write") - return self.client.post(url, data=data, signature=token) diff --git a/stream/feed/__init__.py b/stream/feed/__init__.py new file mode 100644 index 0000000..1f3c784 --- /dev/null +++ b/stream/feed/__init__.py @@ -0,0 +1 @@ +from .feeds import AsyncFeed, Feed diff --git a/stream/feed/base.py b/stream/feed/base.py new file mode 100644 index 0000000..dc76fce --- /dev/null +++ b/stream/feed/base.py @@ -0,0 +1,172 @@ +from abc import ABC, abstractmethod + +from stream.utils import validate_feed_id + + +class AbstractFeed(ABC): + @abstractmethod + def create_scope_token(self, resource, action): + """ + creates the JWT token to perform an action on a owned resource + """ + pass + + @abstractmethod + def get_readonly_token(self): + """ + creates the JWT token to perform readonly operations + """ + pass + + @abstractmethod + def add_activity(self, activity_data): + """ + Adds an activity to the feed, this will also trigger an update + to all the feeds which follow this feed + + :param activity_data: a dict with the activity data + + **Example**:: + + activity_data = {'actor': 1, 'verb': 'tweet', 'object': 1} + activity_id = feed.add_activity(activity_data) + """ + pass + + @abstractmethod + def add_activities(self, activity_list): + """ + Adds a list of activities to the feed + + :param activity_list: a list with the activity data dicts + + **Example**:: + + activity_data = [ + {'actor': 1, 'verb': 'tweet', 'object': 1}, + {'actor': 2, 'verb': 'watch', 'object': 2}, + ] + result = feed.add_activities(activity_data) + """ + pass + + @abstractmethod + def remove_activity(self, activity_id=None, foreign_id=None): + """ + Removes an activity from the feed + + :param activity_id: the activity id to remove from this feed + (note this will also remove the activity from feeds which follow this feed) + :param foreign_id: the foreign id you provided when adding the activity + """ + pass + + @abstractmethod + def get(self, enrich=False, reactions=None, **params): + """ + Get the activities in this feed + + **Example**:: + + # fast pagination using id filtering + feed.get(limit=10, id_lte=100292310) + + # slow pagination using offset + feed.get(limit=10, offset=10) + """ + pass + + @abstractmethod + def follow( + self, target_feed_slug, target_user_id, activity_copy_limit=None, **extra_data + ): + """ + Follows the given feed + + :param activity_copy_limit: how many activities should be copied from target + feed + :param target_feed_slug: the slug of the target feed + :param target_user_id: the user id + """ + pass + + @abstractmethod + def unfollow(self, target_feed_slug, target_user_id, keep_history=False): + """ + Unfollow the given feed + """ + pass + + @abstractmethod + def followers(self, offset=0, limit=25, feeds=None): + """ + Lists the followers for the given feed + """ + pass + + @abstractmethod + def following(self, offset=0, limit=25, feeds=None): + """ + List the feeds which this feed is following + """ + pass + + @abstractmethod + def add_to_signature(self, recipients): + """ + Takes a list of recipients such as ['user:1', 'user:2'] + and turns it into a list with the tokens included + ['user:1 token', 'user:2 token'] + """ + pass + + @abstractmethod + def update_activity_to_targets( + self, + foreign_id, + time, + new_targets=None, + added_targets=None, + removed_targets=None, + ): + pass + + +class BaseFeed(AbstractFeed, ABC): + def __init__(self, client, feed_slug, user_id, token): + """ + Initializes the Feed class + + :param client: the api client + :param feed_slug: the slug of the feed, ie user, flat, notification + :param user_id: the id of the user + :param token: the token + """ + self.client = client + self.slug = feed_slug + self.user_id = f"{user_id}" + self.id = f"{feed_slug}:{user_id}" + self.token = token.decode("utf-8") if isinstance(token, bytes) else token + _id = self.id.replace(":", "/") + self.feed_url = f"feed/{_id}/" + self.enriched_feed_url = f"enrich/feed/{_id}/" + self.feed_targets_url = f"feed_targets/{_id}/" + self.feed_together = self.id.replace(":", "") + self.signature = f"{self.feed_together} {self.token}" + + def create_scope_token(self, resource, action): + return self.client.create_jwt_token( + resource, action, feed_id=self.feed_together + ) + + def get_readonly_token(self): + return self.create_scope_token("*", "read") + + def add_to_signature(self, recipients): + data = [] + for recipient in recipients: + validate_feed_id(recipient) + feed_slug, user_id = recipient.split(":") + feed = self.client.feed(feed_slug, user_id) + data.append(f"{recipient} {feed.token}") + return data diff --git a/stream/feed/feeds.py b/stream/feed/feeds.py new file mode 100644 index 0000000..5305427 --- /dev/null +++ b/stream/feed/feeds.py @@ -0,0 +1,238 @@ +from stream.feed.base import BaseFeed +from stream.utils import get_reaction_params, validate_feed_slug, validate_user_id + + +class Feed(BaseFeed): + def add_activity(self, activity_data): + if activity_data.get("to") and not isinstance( + activity_data.get("to"), (list, tuple, set) + ): + raise TypeError( + "please provide the activity's to field as a list not a string" + ) + + if activity_data.get("to"): + activity_data = activity_data.copy() + activity_data["to"] = self.add_to_signature(activity_data["to"]) + + token = self.create_scope_token("feed", "write") + return self.client.post(self.feed_url, data=activity_data, signature=token) + + def add_activities(self, activity_list): + activities = [] + for activity_data in activity_list: + activity_data = activity_data.copy() + activities.append(activity_data) + if activity_data.get("to"): + activity_data["to"] = self.add_to_signature(activity_data["to"]) + token = self.create_scope_token("feed", "write") + data = dict(activities=activities) + if activities: + return self.client.post(self.feed_url, data=data, signature=token) + return None + + def remove_activity(self, activity_id=None, foreign_id=None): + identifier = activity_id or foreign_id + if not identifier: + raise ValueError("please either provide activity_id or foreign_id") + url = f"{self.feed_url}{identifier}/" + params = dict() + token = self.create_scope_token("feed", "delete") + if foreign_id is not None: + params["foreign_id"] = "1" + return self.client.delete(url, signature=token, params=params) + + def get(self, enrich=False, reactions=None, **params): + for field in ["mark_read", "mark_seen"]: + value = params.get(field) + if isinstance(value, (list, tuple)): + params[field] = ",".join(value) + token = self.create_scope_token("feed", "read") + + if enrich or reactions is not None: + feed_url = self.enriched_feed_url + else: + feed_url = self.feed_url + + params.update(get_reaction_params(reactions)) + return self.client.get(feed_url, params=params, signature=token) + + def follow( + self, target_feed_slug, target_user_id, activity_copy_limit=None, **extra_data + ): + target_feed_slug = validate_feed_slug(target_feed_slug) + target_user_id = validate_user_id(target_user_id) + target_feed_id = f"{target_feed_slug}:{target_user_id}" + url = f"{self.feed_url}follows/" + target_token = self.client.feed(target_feed_slug, target_user_id).token + data = {"target": target_feed_id, "target_token": target_token} + if activity_copy_limit is not None: + data["activity_copy_limit"] = activity_copy_limit + token = self.create_scope_token("follower", "write") + data.update(extra_data) + return self.client.post(url, data=data, signature=token) + + def unfollow(self, target_feed_slug, target_user_id, keep_history=False): + target_feed_slug = validate_feed_slug(target_feed_slug) + target_user_id = validate_user_id(target_user_id) + target_feed_id = f"{target_feed_slug}:{target_user_id}" + token = self.create_scope_token("follower", "delete") + url = f"{self.feed_url}follows/{target_feed_id}/" + params = {} + if keep_history: + params["keep_history"] = True + return self.client.delete(url, signature=token, params=params) + + def followers(self, offset=0, limit=25, feeds=None): + feeds = ",".join(feeds) if feeds is not None else "" + params = {"limit": limit, "offset": offset, "filter": feeds} + url = f"{self.feed_url}followers/" + token = self.create_scope_token("follower", "read") + return self.client.get(url, params=params, signature=token) + + def following(self, offset=0, limit=25, feeds=None): + feeds = ",".join(feeds) if feeds is not None else "" + params = {"offset": offset, "limit": limit, "filter": feeds} + url = f"{self.feed_url}follows/" + token = self.create_scope_token("follower", "read") + return self.client.get(url, params=params, signature=token) + + def update_activity_to_targets( + self, + foreign_id, + time, + new_targets=None, + added_targets=None, + removed_targets=None, + ): + data = {"foreign_id": foreign_id, "time": time} + + if new_targets is not None: + data["new_targets"] = new_targets + if added_targets is not None: + data["added_targets"] = added_targets + if removed_targets is not None: + data["removed_targets"] = removed_targets + + url = f"{self.feed_targets_url}activity_to_targets/" + token = self.create_scope_token("feed_targets", "write") + return self.client.post(url, data=data, signature=token) + + +class AsyncFeed(BaseFeed): + async def add_activity(self, activity_data): + if activity_data.get("to") and not isinstance( + activity_data.get("to"), (list, tuple, set) + ): + raise TypeError( + "please provide the activity's to field as a list not a string" + ) + + if activity_data.get("to"): + activity_data = activity_data.copy() + activity_data["to"] = self.add_to_signature(activity_data["to"]) + + token = self.create_scope_token("feed", "write") + return await self.client.post( + self.feed_url, data=activity_data, signature=token + ) + + async def add_activities(self, activity_list): + activities = [] + for activity_data in activity_list: + activity_data = activity_data.copy() + activities.append(activity_data) + if activity_data.get("to"): + activity_data["to"] = self.add_to_signature(activity_data["to"]) + token = self.create_scope_token("feed", "write") + data = dict(activities=activities) + if not activities: + return + + return await self.client.post(self.feed_url, data=data, signature=token) + + async def remove_activity(self, activity_id=None, foreign_id=None): + identifier = activity_id or foreign_id + if not identifier: + raise ValueError("please either provide activity_id or foreign_id") + url = f"{self.feed_url}{identifier}/" + params = dict() + token = self.create_scope_token("feed", "delete") + if foreign_id is not None: + params["foreign_id"] = "1" + return await self.client.delete(url, signature=token, params=params) + + async def get(self, enrich=False, reactions=None, **params): + for field in ["mark_read", "mark_seen"]: + value = params.get(field) + if isinstance(value, (list, tuple)): + params[field] = ",".join(value) + + token = self.create_scope_token("feed", "read") + if enrich or reactions is not None: + feed_url = self.enriched_feed_url + else: + feed_url = self.feed_url + + params.update(get_reaction_params(reactions)) + return await self.client.get(feed_url, params=params, signature=token) + + async def follow( + self, target_feed_slug, target_user_id, activity_copy_limit=None, **extra_data + ): + target_feed_slug = validate_feed_slug(target_feed_slug) + target_user_id = validate_user_id(target_user_id) + target_feed_id = f"{target_feed_slug}:{target_user_id}" + url = f"{self.feed_url}follows/" + target_token = self.client.feed(target_feed_slug, target_user_id).token + data = {"target": target_feed_id, "target_token": target_token} + if activity_copy_limit is not None: + data["activity_copy_limit"] = activity_copy_limit + token = self.create_scope_token("follower", "write") + data.update(extra_data) + return await self.client.post(url, data=data, signature=token) + + async def unfollow(self, target_feed_slug, target_user_id, keep_history=False): + target_feed_slug = validate_feed_slug(target_feed_slug) + target_user_id = validate_user_id(target_user_id) + target_feed_id = f"{target_feed_slug}:{target_user_id}" + token = self.create_scope_token("follower", "delete") + url = f"{self.feed_url}follows/{target_feed_id}/" + params = {} + if keep_history: + params["keep_history"] = True + return await self.client.delete(url, signature=token, params=params) + + async def followers(self, offset=0, limit=25, feeds=None): + feeds = ",".join(feeds) if feeds is not None else "" + params = {"limit": limit, "offset": offset, "filter": feeds} + url = f"{self.feed_url}followers/" + token = self.create_scope_token("follower", "read") + return await self.client.get(url, params=params, signature=token) + + async def following(self, offset=0, limit=25, feeds=None): + feeds = ",".join(feeds) if feeds is not None else "" + params = {"offset": offset, "limit": limit, "filter": feeds} + url = f"{self.feed_url}follows/" + token = self.create_scope_token("follower", "read") + return await self.client.get(url, params=params, signature=token) + + async def update_activity_to_targets( + self, + foreign_id, + time, + new_targets=None, + added_targets=None, + removed_targets=None, + ): + data = {"foreign_id": foreign_id, "time": time} + if new_targets is not None: + data["new_targets"] = new_targets + if added_targets is not None: + data["added_targets"] = added_targets + if removed_targets is not None: + data["removed_targets"] = removed_targets + + url = f"{self.feed_targets_url}activity_to_targets/" + token = self.create_scope_token("feed_targets", "write") + return await self.client.post(url, data=data, signature=token) diff --git a/stream/personalization.py b/stream/personalization.py deleted file mode 100644 index 77f3174..0000000 --- a/stream/personalization.py +++ /dev/null @@ -1,66 +0,0 @@ -class Personalization: - def __init__(self, client, token): - """ - Methods to interact with personalized feeds. - :param client: the api client - :param token: the token - """ - - self.client = client - self.token = token - - def get(self, resource, **params): - """ - Get personalized activities for this feed - :param resource: personalized resource endpoint i.e "follow_recommendations" - :param params: params to pass to url i.e user_id = "user:123" - :return: personalized feed - - **Example**:: - personalization.get('follow_recommendations', user_id=123, limit=10, offset=10) - """ - - return self.client.get( - resource, - service_name="personalization", - params=params, - signature=self.token, - ) - - def post(self, resource, **params): - """ - Generic function to post data to personalization endpoint - :param resource: personalized resource endpoint i.e "follow_recommendations" - :param params: params to pass to url (data is a reserved keyword to post to body) - - - **Example**:: - #Accept or reject recommendations. - personalization.post('follow_recommendations', user_id=123, accepted=[123,345], - rejected=[456]) - """ - - data = params["data"] or None - - return self.client.post( - resource, - service_name="personalization", - params=params, - signature=self.token, - data=data, - ) - - def delete(self, resource, **params): - """ - shortcut to delete metadata or activities - :param resource: personalized url endpoint typical "meta" - :param params: params to pass to url i.e user_id = "user:123" - :return: data that was deleted if successful or not. - """ - - return self.client.delete( - resource, - service_name="personalization", - params=params, - signature=self.token, - ) diff --git a/stream/personalization/__init__.py b/stream/personalization/__init__.py new file mode 100644 index 0000000..99dfa3b --- /dev/null +++ b/stream/personalization/__init__.py @@ -0,0 +1 @@ +from .personalizations import AsyncPersonalization, Personalization diff --git a/stream/personalization/base.py b/stream/personalization/base.py new file mode 100644 index 0000000..730d78f --- /dev/null +++ b/stream/personalization/base.py @@ -0,0 +1,30 @@ +from abc import ABC, abstractmethod + + +class AbstractPersonalization(ABC): + @abstractmethod + def get(self, resource, **params): + pass + + @abstractmethod + def post(self, resource, **params): + pass + + @abstractmethod + def delete(self, resource, **params): + pass + + +class BasePersonalization(AbstractPersonalization, ABC): + + SERVICE_NAME = "personalization" + + def __init__(self, client, token): + """ + Methods to interact with personalized feeds. + :param client: the api client + :param token: the token + """ + + self.client = client + self.token = token diff --git a/stream/personalization/personalizations.py b/stream/personalization/personalizations.py new file mode 100644 index 0000000..61d9909 --- /dev/null +++ b/stream/personalization/personalizations.py @@ -0,0 +1,117 @@ +from stream.personalization.base import BasePersonalization + + +class Personalization(BasePersonalization): + def get(self, resource, **params): + """ + Get personalized activities for this feed + :param resource: personalized resource endpoint i.e "follow_recommendations" + :param params: params to pass to url i.e user_id = "user:123" + :return: personalized feed + + **Example**:: + personalization.get('follow_recommendations', user_id=123, limit=10, offset=10) + """ + + return self.client.get( + resource, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) + + def post(self, resource, **params): + """ + Generic function to post data to personalization endpoint + :param resource: personalized resource endpoint i.e "follow_recommendations" + :param params: params to pass to url (data is a reserved keyword to post to body) + + + **Example**:: + #Accept or reject recommendations. + personalization.post('follow_recommendations', user_id=123, accepted=[123,345], + rejected=[456]) + """ + + data = params["data"] or None + + return self.client.post( + resource, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + data=data, + ) + + def delete(self, resource, **params): + """ + shortcut to delete metadata or activities + :param resource: personalized url endpoint typical "meta" + :param params: params to pass to url i.e user_id = "user:123" + :return: data that was deleted if successful or not. + """ + + return self.client.delete( + resource, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) + + +class AsyncPersonalization(BasePersonalization): + async def get(self, resource, **params): + """ + Get personalized activities for this feed + :param resource: personalized resource endpoint i.e "follow_recommendations" + :param params: params to pass to url i.e user_id = "user:123" + :return: personalized feed + + **Example**:: + personalization.get('follow_recommendations', user_id=123, limit=10, offset=10) + """ + + return await self.client.get( + resource, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) + + async def post(self, resource, **params): + """ + Generic function to post data to personalization endpoint + :param resource: personalized resource endpoint i.e "follow_recommendations" + :param params: params to pass to url (data is a reserved keyword to post to body) + + + **Example**:: + #Accept or reject recommendations. + personalization.post('follow_recommendations', user_id=123, accepted=[123,345], + rejected=[456]) + """ + + data = params["data"] or None + + return await self.client.post( + resource, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + data=data, + ) + + async def delete(self, resource, **params): + """ + shortcut to delete metadata or activities + :param resource: personalized url endpoint typical "meta" + :param params: params to pass to url i.e user_id = "user:123" + :return: data that was deleted if successful or not. + """ + + return await self.client.delete( + resource, + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) diff --git a/stream/reactions.py b/stream/reactions.py deleted file mode 100644 index 88a60a4..0000000 --- a/stream/reactions.py +++ /dev/null @@ -1,89 +0,0 @@ -class Reactions: - def __init__(self, client, token): - self.client = client - self.token = token - - def add( - self, - kind, - activity_id, - user_id, - data=None, - target_feeds=None, - target_feeds_extra_data=None, - ): - payload = dict( - kind=kind, - activity_id=activity_id, - data=data, - target_feeds=target_feeds, - target_feeds_extra_data=target_feeds_extra_data, - user_id=user_id, - ) - return self.client.post( - "reaction/", service_name="api", signature=self.token, data=payload - ) - - def get(self, reaction_id): - return self.client.get( - "reaction/%s" % reaction_id, service_name="api", signature=self.token - ) - - def update(self, reaction_id, data=None, target_feeds=None): - payload = dict(data=data, target_feeds=target_feeds) - return self.client.put( - "reaction/%s" % reaction_id, - service_name="api", - signature=self.token, - data=payload, - ) - - def delete(self, reaction_id): - return self.client.delete( - "reaction/%s" % reaction_id, service_name="api", signature=self.token - ) - - def add_child( - self, - kind, - parent_id, - user_id, - data=None, - target_feeds=None, - target_feeds_extra_data=None, - ): - payload = dict( - kind=kind, - parent=parent_id, - data=data, - target_feeds=target_feeds, - target_feeds_extra_data=target_feeds_extra_data, - user_id=user_id, - ) - return self.client.post( - "reaction/", service_name="api", signature=self.token, data=payload - ) - - def filter(self, **params): - lookup_field = "" - lookup_value = "" - - kind = params.pop("kind", None) - - if "reaction_id" in params: - lookup_field = "reaction_id" - lookup_value = params.pop("reaction_id") - elif "activity_id" in params: - lookup_field = "activity_id" - lookup_value = params.pop("activity_id") - elif "user_id" in params: - lookup_field = "user_id" - lookup_value = params.pop("user_id") - - endpoint = "reaction/%s/%s/" % (lookup_field, lookup_value) - if kind is not None: - endpoint = "reaction/%s/%s/%s/" % (lookup_field, lookup_value, kind) - - return self.client.get( - endpoint, service_name="api", signature=self.token, params=params - ) diff --git a/stream/reactions/__init__.py b/stream/reactions/__init__.py new file mode 100644 index 0000000..e550051 --- /dev/null +++ b/stream/reactions/__init__.py @@ -0,0 +1 @@ +from .reaction import AsyncReactions, Reactions diff --git a/stream/reactions/base.py b/stream/reactions/base.py new file mode 100644 index 0000000..b83794e --- /dev/null +++ b/stream/reactions/base.py @@ -0,0 +1,75 @@ +from abc import ABC, abstractmethod + + +class AbstractReactions(ABC): + @abstractmethod + def add( + self, + kind, + activity_id, + user_id, + data=None, + target_feeds=None, + target_feeds_extra_data=None, + ): + pass + + @abstractmethod + def get(self, reaction_id): + pass + + @abstractmethod + def update(self, reaction_id, data=None, target_feeds=None): + pass + + @abstractmethod + def delete(self, reaction_id): + pass + + @abstractmethod + def add_child( + self, + kind, + parent_id, + user_id, + data=None, + target_feeds=None, + target_feeds_extra_data=None, + ): + pass + + @abstractmethod + def filter(self, **params): + pass + + +class BaseReactions(AbstractReactions, ABC): + + API_ENDPOINT = "reaction/" + SERVICE_NAME = "api" + + def __init__(self, client, token): + self.client = client + self.token = token + + def _prepare_endpoint_for_filter(self, **params): + lookup_field = "" + lookup_value = "" + + kind = params.pop("kind", None) + + if params.get("reaction_id"): + lookup_field = "reaction_id" + lookup_value = params.pop("reaction_id") + elif params.get("activity_id"): + lookup_field = "activity_id" + lookup_value = params.pop("activity_id") + elif params.get("user_id"): + lookup_field = "user_id" + lookup_value = params.pop("user_id") + + endpoint = f"{self.API_ENDPOINT}{lookup_field}/{lookup_value}/" + if kind is not None: + endpoint += f"{kind}/" + + return endpoint diff --git a/stream/reactions/reaction.py b/stream/reactions/reaction.py new file mode 100644 index 0000000..0466b21 --- /dev/null +++ b/stream/reactions/reaction.py @@ -0,0 +1,163 @@ +from stream.reactions.base import BaseReactions + + +class Reactions(BaseReactions): + def add( + self, + kind, + activity_id, + user_id, + data=None, + target_feeds=None, + target_feeds_extra_data=None, + ): + payload = dict( + kind=kind, + activity_id=activity_id, + data=data, + target_feeds=target_feeds, + target_feeds_extra_data=target_feeds_extra_data, + user_id=user_id, + ) + return self.client.post( + self.API_ENDPOINT, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + def get(self, reaction_id): + url = f"{self.API_ENDPOINT}{reaction_id}" + return self.client.get( + url, service_name=self.SERVICE_NAME, signature=self.token + ) + + def update(self, reaction_id, data=None, target_feeds=None): + payload = dict(data=data, target_feeds=target_feeds) + url = f"{self.API_ENDPOINT}{reaction_id}" + return self.client.put( + url, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + def delete(self, reaction_id): + url = f"{self.API_ENDPOINT}{reaction_id}" + return self.client.delete( + url, service_name=self.SERVICE_NAME, signature=self.token + ) + + def add_child( + self, + kind, + parent_id, + user_id, + data=None, + target_feeds=None, + target_feeds_extra_data=None, + ): + payload = dict( + kind=kind, + parent=parent_id, + data=data, + target_feeds=target_feeds, + target_feeds_extra_data=target_feeds_extra_data, + user_id=user_id, + ) + return self.client.post( + self.API_ENDPOINT, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + def filter(self, **params): + endpoint = self._prepare_endpoint_for_filter(**params) + return self.client.get( + endpoint, + service_name=self.SERVICE_NAME, + signature=self.token, + params=params, + ) + + +class AsyncReactions(BaseReactions): + async def add( + self, + kind, + activity_id, + user_id, + data=None, + target_feeds=None, + target_feeds_extra_data=None, + ): + payload = dict( + kind=kind, + activity_id=activity_id, + data=data, + target_feeds=target_feeds, + target_feeds_extra_data=target_feeds_extra_data, + user_id=user_id, + ) + return await self.client.post( + self.API_ENDPOINT, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + async def get(self, reaction_id): + url = f"{self.API_ENDPOINT}{reaction_id}" + return await self.client.get( + url, service_name=self.SERVICE_NAME, signature=self.token + ) + + async def update(self, reaction_id, data=None, target_feeds=None): + payload = dict(data=data, target_feeds=target_feeds) + url = f"{self.API_ENDPOINT}{reaction_id}" + return await self.client.put( + url, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + async def delete(self, reaction_id): + url = f"{self.API_ENDPOINT}{reaction_id}" + return await self.client.delete( + url, service_name=self.SERVICE_NAME, signature=self.token + ) + + async def add_child( + self, + kind, + parent_id, + user_id, + data=None, + target_feeds=None, + target_feeds_extra_data=None, + ): + payload = dict( + kind=kind, + parent=parent_id, + data=data, + target_feeds=target_feeds, + target_feeds_extra_data=target_feeds_extra_data, + user_id=user_id, + ) + return await self.client.post( + self.API_ENDPOINT, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + async def filter(self, **params): + endpoint = self._prepare_endpoint_for_filter(**params) + return await self.client.get( + endpoint, + service_name=self.SERVICE_NAME, + signature=self.token, + params=params, + ) diff --git a/stream/tests/conftest.py b/stream/tests/conftest.py new file mode 100644 index 0000000..88df1e2 --- /dev/null +++ b/stream/tests/conftest.py @@ -0,0 +1,73 @@ +import asyncio +import os +import sys + +import pytest + +from stream import connect + + +def wrapper(meth): + async def _parse_response(*args, **kwargs): + response = await meth(*args, **kwargs) + assert "duration" in response + return response + + return _parse_response + + +@pytest.fixture(scope="module") +def event_loop(): + """Create an instance of the default event loop for each test case.""" + loop = asyncio.get_event_loop_policy().new_event_loop() + yield loop + loop.close() + + +@pytest.fixture +async def async_client(): + key = os.getenv("STREAM_KEY") + secret = os.getenv("STREAM_SECRET") + if not key or not secret: + print( + "To run the tests the STREAM_KEY and STREAM_SECRET variables " + "need to be available. \n" + "Please create a pull request if you are an external " + "contributor, because these variables are automatically added " + "by Travis." + ) + sys.exit(1) + + client = connect(key, secret, location="qa", timeout=30, use_async=True) + wrapper(client._parse_response) + yield client + + +@pytest.fixture +def user1(async_client): + return async_client.feed("user", "1") + + +@pytest.fixture +def user2(async_client): + return async_client.feed("user", "2") + + +@pytest.fixture +def aggregated2(async_client): + return async_client.feed("aggregated", "2") + + +@pytest.fixture +def aggregated3(async_client): + return async_client.feed("aggregated", "3") + + +@pytest.fixture +def topic(async_client): + return async_client.feed("topic", "1") + + +@pytest.fixture +def flat3(async_client): + return async_client.feed("flat", "3") diff --git a/stream/tests/test_async_client.py b/stream/tests/test_async_client.py new file mode 100644 index 0000000..c1dfdf4 --- /dev/null +++ b/stream/tests/test_async_client.py @@ -0,0 +1,1342 @@ +import asyncio +import random +from datetime import datetime, timedelta +from uuid import uuid1, uuid4 + +import pytest +import pytz +from dateutil.tz import tzlocal + +import stream +from stream.exceptions import ApiKeyException, InputException +from stream.tests.test_client import get_unique_postfix + + +def assert_first_activity_id_equal(activities, correct_activity_id): + activity_id = None + if activities: + activity_id = activities[0]["id"] + assert activity_id == correct_activity_id + + +def assert_first_activity_id_not_equal(activities, correct_activity_id): + activity_id = None + if activities: + activity_id = activities[0]["id"] + assert activity_id != correct_activity_id + + +def _get_first_aggregated_activity(activities): + try: + return activities[0]["activities"][0] + except IndexError: + pass + + +def _get_first_activity(activities): + try: + return activities[0] + except IndexError: + pass + + +def assert_datetime_almost_equal(a, b): + difference = abs(a - b) + if difference > timedelta(milliseconds=1): + assert a == b + + +def assert_clearly_not_equal(a, b): + difference = abs(a - b) + if difference < timedelta(milliseconds=1): + raise ValueError("the dates are too close") + + +async def _test_sleep(production_wait): + """ + when testing against a live API, sometimes we need a small sleep to + ensure data stability, however when testing locally the wait does + not need to be as long + :param production_wait: float, number of seconds to sleep when hitting real API + :return: None + """ + sleep_time = production_wait + await asyncio.sleep(sleep_time) + + +@pytest.mark.asyncio +async def test_update_activities_create(async_client): + activities = [ + { + "actor": "user:1", + "verb": "do", + "object": "object:1", + "foreign_id": "object:1", + "time": datetime.utcnow().isoformat(), + } + ] + + response = await async_client.update_activities(activities) + assert response + + +@pytest.mark.asyncio +async def test_add_activity(async_client): + feed = async_client.feed("user", "py1") + activity_data = {"actor": 1, "verb": "tweet", "object": 1} + response = await feed.add_activity(activity_data) + activity_id = response["id"] + response = await feed.get(limit=1) + activities = response["results"] + assert activities[0]["id"] == activity_id + + +@pytest.mark.asyncio +async def test_add_activity_to_inplace_change(async_client): + feed = async_client.feed("user", "py1") + team_feed = async_client.feed("user", "teamy") + activity_data = {"actor": 1, "verb": "tweet", "object": 1} + activity_data["to"] = [team_feed.id] + await feed.add_activity(activity_data) + assert activity_data["to"] == [team_feed.id] + + +@pytest.mark.asyncio +async def test_add_activities_to_inplace_change(async_client): + feed = async_client.feed("user", "py1") + team_feed = async_client.feed("user", "teamy") + activity_data = {"actor": 1, "verb": "tweet", "object": 1} + activity_data["to"] = [team_feed.id] + await feed.add_activities([activity_data]) + assert activity_data["to"] == [team_feed.id] + + +@pytest.mark.asyncio +async def test_add_activity_to(async_client): + # test for sending an activities to the team feed using to + feeds = ["user", "teamy", "team_follower"] + user_feed, team_feed, team_follower_feed = map( + lambda x: async_client.feed("user", x), feeds + ) + await team_follower_feed.follow(team_feed.slug, team_feed.user_id) + activity_data = {"actor": 1, "verb": "tweet", "object": 1, "to": [team_feed.id]} + activity = await user_feed.add_activity(activity_data) + activity_id = activity["id"] + + # see if the new activity is also in the team feed + response = await team_feed.get(limit=1) + activities = response["results"] + assert activities[0]["id"] == activity_id + assert activities[0]["origin"] is None + # see if the fanout process also works + response = await team_follower_feed.get(limit=1) + activities = response["results"] + assert activities[0]["id"] == activity_id + assert activities[0]["origin"] == team_feed.id + # and validate removing also works + await user_feed.remove_activity(activity["id"]) + # check the user pyto feed + response = await team_feed.get(limit=1) + activities = response["results"] + assert_first_activity_id_not_equal(activities, activity_id) + # and the flat feed + response = await team_follower_feed.get(limit=1) + activities = response["results"] + assert_first_activity_id_not_equal(activities, activity_id) + + +@pytest.mark.asyncio +async def test_remove_activity(user1): + activity_data = {"actor": 1, "verb": "tweet", "object": 1} + activity = await user1.add_activity(activity_data) + activity_id = activity["id"] + response = await user1.get(limit=8) + activities = response["results"] + assert len(activities) == 1 + + await user1.remove_activity(activity_id) + # verify that no activities were returned + response = await user1.get(limit=8) + activities = response["results"] + assert len(activities) == 0 + + +@pytest.mark.asyncio +async def test_remove_activity_by_foreign_id(user1): + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "foreign_id": "tweet:10", + } + + await user1.add_activity(activity_data) + response = await user1.get(limit=8) + activities = response["results"] + assert len(activities) == 1 + assert activities[0]["id"] != "" + assert activities[0]["foreign_id"] == "tweet:10" + + await user1.remove_activity(foreign_id="tweet:10") + # verify that no activities were returned + response = await user1.get(limit=8) + activities = response["results"] + assert len(activities) == 0 + # verify this doesn't raise an error, but fails silently + await user1.remove_activity(foreign_id="tweet:unknownandmissing") + + +@pytest.mark.asyncio +async def test_add_activities(user1): + activity_data = [ + {"actor": 1, "verb": "tweet", "object": 1}, + {"actor": 2, "verb": "watch", "object": 2}, + ] + response = await user1.add_activities(activity_data) + activity_ids = [a["id"] for a in response["activities"]] + response = await user1.get(limit=2) + activities = response["results"] + get_activity_ids = [a["id"] for a in activities] + assert get_activity_ids == activity_ids[::-1] + + +@pytest.mark.asyncio +async def test_add_activities_to(async_client, user1): + pyto2 = async_client.feed("user", "pyto2") + pyto3 = async_client.feed("user", "pyto3") + + to = [pyto2.id, pyto3.id] + activity_data = [ + {"actor": 1, "verb": "tweet", "object": 1, "to": to}, + {"actor": 2, "verb": "watch", "object": 2, "to": to}, + ] + response = await user1.add_activities(activity_data) + activity_ids = [a["id"] for a in response["activities"]] + response = await user1.get(limit=2) + activities = response["results"] + get_activity_ids = [a["id"] for a in activities] + assert get_activity_ids == activity_ids[::-1] + # test first target + response = await pyto2.get(limit=2) + activities = response["results"] + get_activity_ids = [a["id"] for a in activities] + assert get_activity_ids == activity_ids[::-1] + # test second target + response = await pyto3.get(limit=2) + activities = response["results"] + get_activity_ids = [a["id"] for a in activities] + assert get_activity_ids == activity_ids[::-1] + + +@pytest.mark.asyncio +async def test_follow_and_source(async_client): + feed = async_client.feed("user", "test_follow") + agg_feed = async_client.feed("aggregated", "test_follow") + actor_id = random.randint(10, 100000) + activity_data = {"actor": actor_id, "verb": "tweet", "object": 1} + response = await feed.add_activity(activity_data) + activity_id = response["id"] + await agg_feed.follow(feed.slug, feed.user_id) + + response = await agg_feed.get(limit=3) + activities = response["results"] + activity = _get_first_aggregated_activity(activities) + activity_id_found = activity["id"] if activity is not None else None + assert activity["origin"] == feed.id + assert activity_id_found == activity_id + + +@pytest.mark.asyncio +async def test_empty_followings(async_client): + asocial = async_client.feed("user", "asocialpython") + followings = await asocial.following() + assert followings["results"] == [] + + +@pytest.mark.asyncio +async def test_get_followings(async_client): + social = async_client.feed("user", "psocial") + await social.follow("user", "apy") + await social.follow("user", "bpy") + await social.follow("user", "cpy") + followings = await social.following(offset=0, limit=2) + assert len(followings["results"]) == 2 + assert followings["results"][0]["feed_id"] == social.id + assert followings["results"][0]["target_id"] == "user:cpy" + followings = await social.following(offset=1, limit=2) + assert len(followings["results"]) == 2 + assert followings["results"][0]["feed_id"] == social.id + assert followings["results"][0]["target_id"] == "user:bpy" + + +@pytest.mark.asyncio +async def test_empty_followers(async_client): + asocial = async_client.feed("user", "asocialpython") + followers = await asocial.followers() + assert followers["results"] == [] + + +@pytest.mark.asyncio +async def test_get_followers(async_client): + social = async_client.feed("user", "psocial") + spammy1 = async_client.feed("user", "spammy1") + spammy2 = async_client.feed("user", "spammy2") + spammy3 = async_client.feed("user", "spammy3") + for feed in [spammy1, spammy2, spammy3]: + await feed.follow("user", social.user_id) + followers = await social.followers(offset=0, limit=2) + assert len(followers["results"]) == 2 + assert followers["results"][0]["feed_id"] == spammy3.id + assert followers["results"][0]["target_id"] == social.id + followers = await social.followers(offset=1, limit=2) + assert len(followers["results"]) == 2 + assert followers["results"][0]["feed_id"] == spammy2.id + assert followers["results"][0]["target_id"] == social.id + + +@pytest.mark.asyncio +async def test_empty_do_i_follow(async_client): + social = async_client.feed("user", "psocial") + await social.follow("user", "apy") + await social.follow("user", "bpy") + followings = await social.following(feeds=["user:missingpy"]) + assert followings["results"] == [] + + +@pytest.mark.asyncio +async def test_do_i_follow(async_client): + social = async_client.feed("user", "psocial") + await social.follow("user", "apy") + await social.follow("user", "bpy") + followings = await social.following(feeds=["user:apy"]) + assert len(followings["results"]) == 1 + assert followings["results"][0]["feed_id"] == social.id + assert followings["results"][0]["target_id"] == "user:apy" + + +@pytest.mark.asyncio +async def test_update_activity_to_targets(user1): + now = datetime.utcnow().isoformat() + foreign_id = "user:1" + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "foreign_id": foreign_id, + "time": now, + "to": ["user:1", "user:2"], + } + await user1.add_activity(activity_data) + + ret = await user1.update_activity_to_targets( + foreign_id, now, new_targets=["user:3", "user:2"] + ) + assert len(ret["activity"]["to"]) == 2 + assert "user:2" in ret["activity"]["to"] + assert "user:3" in ret["activity"]["to"] + + ret = await user1.update_activity_to_targets( + foreign_id, + now, + added_targets=["user:4", "user:5"], + removed_targets=["user:3"], + ) + assert len(ret["activity"]["to"]) == 3 + assert "user:2" in ret["activity"]["to"] + assert "user:4" in ret["activity"]["to"] + assert "user:5" in ret["activity"]["to"] + + +@pytest.mark.asyncio +async def test_get(user1): + activity_data = {"actor": 1, "verb": "tweet", "object": 1} + response1 = await user1.add_activity(activity_data) + activity_id = response1["id"] + activity_data = {"actor": 2, "verb": "add", "object": 2} + response2 = await user1.add_activity(activity_data) + activity_id_two = response2["id"] + activity_data = {"actor": 3, "verb": "watch", "object": 2} + response3 = await user1.add_activity(activity_data) + activity_id_three = response3["id"] + response = await user1.get(limit=2) + activities = response["results"] + # verify the first two results + assert len(activities) == 2 + assert activities[0]["id"] == activity_id_three + assert activities[1]["id"] == activity_id_two + # try offset based + response = await user1.get(limit=2, offset=1) + activities = response["results"] + assert activities[0]["id"] == activity_id_two + # try id_lt based + response = await user1.get(limit=2, id_lt=activity_id_two) + activities = response["results"] + assert activities[0]["id"] == activity_id + + +@pytest.mark.asyncio +async def test_get_not_marked_seen(async_client): + notification_feed = async_client.feed("notification", "test_mark_seen") + response = await notification_feed.get(limit=3) + activities = response["results"] + for activity in activities: + assert not activity["is_seen"] + + +@pytest.mark.asyncio +async def test_mark_seen_on_get(async_client): + notification_feed = async_client.feed("notification", "test_mark_seen") + response = await notification_feed.get(limit=100) + activities = response["results"] + for activity in activities: + await notification_feed.remove_activity(activity["id"]) + + old_activities = [ + await notification_feed.add_activity( + {"actor": 1, "verb": "tweet", "object": 1} + ), + await notification_feed.add_activity( + {"actor": 2, "verb": "add", "object": 2} + ), + await notification_feed.add_activity( + {"actor": 3, "verb": "watch", "object": 3} + ), + ] + + await notification_feed.get( + mark_seen=[old_activities[0]["id"], old_activities[1]["id"]] + ) + + response = await notification_feed.get(limit=3) + activities = response["results"] + + # is the seen state correct + for activity in activities: + # using a loop in case we're retrieving activities in a different + # order than old_activities + if old_activities[0]["id"] == activity["id"]: + assert activity["is_seen"] + if old_activities[1]["id"] == activity["id"]: + assert activity["is_seen"] + if old_activities[2]["id"] == activity["id"]: + assert not activity["is_seen"] + + # see if the state properly resets after we add another activity + await notification_feed.add_activity( + {"actor": 3, "verb": "watch", "object": 3} + ) # ['id'] + response = await notification_feed.get(limit=3) + activities = response["results"] + assert not activities[0]["is_seen"] + assert len(activities[0]["activities"]) == 2 + + +@pytest.mark.asyncio +async def test_mark_read_by_id(async_client): + notification_feed = async_client.feed("notification", "py2") + response = await notification_feed.get(limit=3) + activities = response["results"] + ids = [] + for activity in activities: + ids.append(activity["id"]) + assert not activity["is_read"] + ids = ids[:2] + await notification_feed.get(mark_read=ids) + response = await notification_feed.get(limit=3) + activities = response["results"] + for activity in activities: + if activity["id"] in ids: + assert activity["is_read"] + assert not activity["is_seen"] + + +@pytest.mark.asyncio +async def test_api_key_exception(): + client = stream.connect( + "5crf3bhfzesnMISSING", + "tfq2sdqpj9g446sbv653x3aqmgn33hsn8uzdc9jpskaw8mj6vsnhzswuwptuj9su", + use_async=True, + ) + user1 = client.feed("user", "1") + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "debug_example_undefined": "test", + } + with pytest.raises(ApiKeyException): + await user1.add_activity(activity_data) + + +@pytest.mark.asyncio +async def test_complex_field(user1): + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "participants": ["Tommaso", "Thierry"], + } + response = await user1.add_activity(activity_data) + activity_id = response["id"] + response = await user1.get(limit=1) + activities = response["results"] + assert activities[0]["id"] == activity_id + assert activities[0]["participants"] == ["Tommaso", "Thierry"] + + +@pytest.mark.asyncio +async def test_uniqueness(user1): + """ + In order for things to be considere unique they need: + a.) The same time and activity data + b.) The same time and foreign id + """ + + utcnow = datetime.now(tz=pytz.UTC) + activity_data = {"actor": 1, "verb": "tweet", "object": 1, "time": utcnow} + await user1.add_activity(activity_data) + await user1.add_activity(activity_data) + response = await user1.get(limit=2) + activities = response["results"] + assert_datetime_almost_equal(activities[0]["time"], utcnow) + if len(activities) > 1: + assert_clearly_not_equal(activities[1]["time"], utcnow) + + +@pytest.mark.asyncio +async def test_uniqueness_topic(flat3, topic, user1): + """ + In order for things to be considere unique they need: + a.) The same time and activity data, or + b.) The same time and foreign id + """ + # follow both the topic and the user + await flat3.follow("topic", topic.user_id) + await flat3.follow("user", user1.user_id) + # add the same activity twice + now = datetime.now(tzlocal()) + tweet = f"My Way {get_unique_postfix()}" + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "time": now, + "tweet": tweet, + } + await topic.add_activity(activity_data) + await user1.add_activity(activity_data) + # verify that flat3 contains the activity exactly once + response = await flat3.get(limit=3) + activity_tweets = [a.get("tweet") for a in response["results"]] + assert activity_tweets.count(tweet) == 1 + + +@pytest.mark.asyncio +async def test_uniqueness_foreign_id(user1): + now = datetime.now(tzlocal()) + utcnow = now.astimezone(pytz.utc) + + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "foreign_id": "tweet:11", + "time": utcnow, + } + await user1.add_activity(activity_data) + + activity_data = { + "actor": 2, + "verb": "tweet", + "object": 3, + "foreign_id": "tweet:11", + "time": utcnow, + } + await user1.add_activity(activity_data) + response = await user1.get(limit=10) + activities = response["results"] + # the second post should have overwritten the first one (because they + # had same id) + + assert len(activities) == 1 + assert activities[0]["object"] == "3" + assert activities[0]["foreign_id"] == "tweet:11" + assert_datetime_almost_equal(activities[0]["time"], utcnow) + + +@pytest.mark.asyncio +async def test_time_ordering(user2): + """ + datetime.datetime.now(tz=pytz.utc) is our recommended approach + so if we add an activity + add one using time + add another activity it should be in the right spot + """ + + # timedelta is used to "make sure" that ordering is known even though + # server time is not + custom_time = datetime.now(tz=pytz.utc) - timedelta(days=1) + + feed = user2 + for index, activity_time in enumerate([None, custom_time, None]): + await _test_sleep(1) # so times are a bit different + activity_data = { + "actor": 1, + "verb": "tweet", + "object": 1, + "foreign_id": f"tweet:{index}", + "time": activity_time, + } + await feed.add_activity(activity_data) + + response = await feed.get(limit=3) + activities = response["results"] + # the second post should have overwritten the first one (because they + # had same id) + assert activities[0]["foreign_id"] == "tweet:2" + assert activities[1]["foreign_id"] == "tweet:0" + assert activities[2]["foreign_id"] == "tweet:1" + assert_datetime_almost_equal(activities[2]["time"], custom_time) + + +@pytest.mark.asyncio +async def test_missing_actor(user1): + activity_data = { + "verb": "tweet", + "object": 1, + "debug_example_undefined": "test", + } + try: + await user1.add_activity(activity_data) + raise ValueError("should have raised InputException") + except InputException: + pass + + +@pytest.mark.asyncio +async def test_follow_many(async_client): + sources = [async_client.feed("user", str(i)).id for i in range(10)] + targets = [async_client.feed("flat", str(i)).id for i in range(10)] + feeds = [{"source": s, "target": t} for s, t in zip(sources, targets)] + await async_client.follow_many(feeds) + + for target in targets: + response = await async_client.feed(*target.split(":")).followers() + follows = response["results"] + assert len(follows) == 1 + assert follows[0]["feed_id"] in sources + assert follows[0]["target_id"] == target + + for source in sources: + response = await async_client.feed(*source.split(":")).following() + follows = response["results"] + assert len(follows) == 1 + assert follows[0]["feed_id"] in sources + assert follows[0]["target_id"] == source + + +@pytest.mark.asyncio +async def test_follow_many_acl(async_client): + sources = [async_client.feed("user", str(i)) for i in range(10)] + # ensure every source is empty first + for feed in sources: + response = await feed.get(limit=100) + activities = response["results"] + for activity in activities: + await feed.remove_activity(activity["id"]) + + targets = [async_client.feed("flat", str(i)) for i in range(10)] + # ensure every source is empty first + for feed in targets: + response = await feed.get(limit=100) + activities = response["results"] + for activity in activities: + await feed.remove_activity(activity["id"]) + # add activity to each target feed + activity = { + "actor": "barry", + "object": "09", + "verb": "tweet", + "time": datetime.utcnow().isoformat(), + } + for feed in targets: + await feed.add_activity(activity) + response = await feed.get(limit=5) + assert len(response["results"]) == 1 + + sources_id = [feed.id for feed in sources] + targets_id = [target.id for target in targets] + feeds = [{"source": s, "target": t} for s, t in zip(sources_id, targets_id)] + + await async_client.follow_many(feeds, activity_copy_limit=0) + + for feed in sources: + response = await feed.get(limit=5) + activities = response["results"] + assert len(activities) == 0 + + +@pytest.mark.asyncio +async def test_unfollow_many(async_client): + unfollows = [ + {"source": "user:1", "target": "timeline:1"}, + {"source": "user:2", "target": "timeline:2", "keep_history": False}, + ] + + await async_client.unfollow_many(unfollows) + unfollows.append({"source": "user:1", "target": 42}) + + async def failing_unfollow(): + await async_client.unfollow_many(unfollows) + + with pytest.raises(InputException): + await failing_unfollow() + + +@pytest.mark.asyncio +async def test_add_to_many(async_client): + activity = {"actor": 1, "verb": "tweet", "object": 1, "custom": "data"} + feeds = [async_client.feed("flat", str(i)).id for i in range(10, 20)] + await async_client.add_to_many(activity, feeds) + + for feed in feeds: + feed = async_client.feed(*feed.split(":")) + response = await feed.get() + assert response["results"][0]["custom"] == "data" + + +@pytest.mark.asyncio +async def test_get_activities_empty_ids(async_client): + response = await async_client.get_activities(ids=[str(uuid1())]) + assert len(response["results"]) == 0 + + +@pytest.mark.asyncio +async def test_get_activities_empty_foreign_ids(async_client): + response = await async_client.get_activities( + foreign_id_times=[("fid-x", datetime.utcnow())] + ) + assert len(response["results"]) == 0 + + +@pytest.mark.asyncio +async def test_get_activities_full(async_client): + dt = datetime.utcnow() + fid = "awesome-test" + + activity = { + "actor": "barry", + "object": "09", + "verb": "tweet", + "time": dt, + "foreign_id": fid, + } + + feed = async_client.feed("user", "test_get_activity") + response = await feed.add_activity(activity) + + response = await async_client.get_activities(ids=[response["id"]]) + assert len(response["results"]) == 1 + foreign_id = response["results"][0]["foreign_id"] + assert activity["foreign_id"] == foreign_id + + response = await async_client.get_activities(foreign_id_times=[(fid, dt)]) + assert len(response["results"]) == 1 + foreign_id = response["results"][0]["foreign_id"] + assert activity["foreign_id"] == foreign_id + + +@pytest.mark.asyncio +async def test_get_activities_full_with_enrichment(async_client): + dt = datetime.utcnow() + fid = "awesome-test" + + actor = await async_client.users.add(str(uuid1()), data={"name": "barry"}) + activity = { + "actor": async_client.users.create_reference(actor["id"]), + "object": "09", + "verb": "tweet", + "time": dt, + "foreign_id": fid, + } + + feed = async_client.feed("user", "test_get_activity") + activity = await feed.add_activity(activity) + + reaction1 = await async_client.reactions.add("like", activity["id"], "liker") + reaction2 = await async_client.reactions.add("reshare", activity["id"], "sharer") + + def validate(response): + assert len(response["results"]) == 1 + assert response["results"][0]["id"] == activity["id"] + assert response["results"][0]["foreign_id"] == activity["foreign_id"] + assert response["results"][0]["actor"]["data"]["name"] == "barry" + latest_reactions = response["results"][0]["latest_reactions"] + assert len(latest_reactions) == 2 + assert latest_reactions["like"][0]["id"] == reaction1["id"] + assert latest_reactions["reshare"][0]["id"] == reaction2["id"] + assert response["results"][0]["reaction_counts"] == {"like": 1, "reshare": 1} + + reactions = {"recent": True, "counts": True} + validate( + await async_client.get_activities(ids=[activity["id"]], reactions=reactions) + ) + validate( + await async_client.get_activities( + foreign_id_times=[(fid, dt)], reactions=reactions + ) + ) + + +@pytest.mark.asyncio +async def test_get_activities_full_with_enrichment_and_reaction_kinds(async_client): + dt = datetime.utcnow() + fid = "awesome-test" + + actor = await async_client.users.add(str(uuid1()), data={"name": "barry"}) + activity = { + "actor": async_client.users.create_reference(actor["id"]), + "object": "09", + "verb": "tweet", + "time": dt, + "foreign_id": fid, + } + + feed = async_client.feed("user", "test_get_activity") + activity = await feed.add_activity(activity) + + await async_client.reactions.add("like", activity["id"], "liker") + await async_client.reactions.add("reshare", activity["id"], "sharer") + await async_client.reactions.add("comment", activity["id"], "commenter") + + reactions = {"recent": True, "counts": True, "kinds": "like,comment"} + response = await async_client.get_activities( + ids=[activity["id"]], reactions=reactions + ) + assert len(response["results"]) == 1 + assert response["results"][0]["id"] == activity["id"] + assert sorted(response["results"][0]["latest_reactions"].keys()) == [ + "comment", + "like", + ] + + assert response["results"][0]["reaction_counts"] == {"like": 1, "comment": 1} + + reactions = { + "recent": True, + "counts": True, + "kinds": ["", "reshare ", "comment\n"], + } + response = await async_client.get_activities( + foreign_id_times=[(fid, dt)], reactions=reactions + ) + assert len(response["results"]) == 1 + assert response["results"][0]["id"] == activity["id"] + assert sorted(response["results"][0]["latest_reactions"].keys()) == [ + "comment", + "reshare", + ] + assert response["results"][0]["reaction_counts"] == {"comment": 1, "reshare": 1} + + +@pytest.mark.asyncio +async def test_activity_partial_update(async_client): + now = datetime.utcnow() + feed = async_client.feed("user", uuid4()) + await feed.add_activity( + { + "actor": "barry", + "object": "09", + "verb": "tweet", + "time": now, + "foreign_id": "fid:123", + "product": {"name": "shoes", "price": 9.99, "color": "blue"}, + } + ) + response = await feed.get() + activity = response["results"][0] + + to_set = { + "product.name": "boots", + "product.price": 7.99, + "popularity": 1000, + "foo": {"bar": {"baz": "qux"}}, + } + to_unset = ["product.color"] + + # partial update by ID + await async_client.activity_partial_update( + id=activity["id"], set=to_set, unset=to_unset + ) + response = await feed.get() + updated = response["results"][0] + expected = activity + expected["product"] = {"name": "boots", "price": 7.99} + expected["popularity"] = 1000 + expected["foo"] = {"bar": {"baz": "qux"}} + assert updated == expected + + # partial update by foreign ID + time + to_set = {"foo.bar.baz": 42, "popularity": 9000} + to_unset = ["product.price"] + await async_client.activity_partial_update( + foreign_id=activity["foreign_id"], + time=activity["time"], + set=to_set, + unset=to_unset, + ) + response = await feed.get() + updated = response["results"][0] + expected["product"] = {"name": "boots"} + expected["foo"] = {"bar": {"baz": 42}} + expected["popularity"] = 9000 + assert updated == expected + + +@pytest.mark.asyncio +async def test_activities_partial_update(async_client): + feed = async_client.feed("user", uuid4()) + await feed.add_activities( + [ + { + "actor": "barry", + "object": "09", + "verb": "tweet", + "time": datetime.utcnow(), + "foreign_id": "fid:123", + "product": {"name": "shoes", "price": 9.99, "color": "blue"}, + }, + { + "actor": "jerry", + "object": "10", + "verb": "tweet", + "time": datetime.utcnow(), + "foreign_id": "fid:456", + "product": {"name": "shoes", "price": 9.99, "color": "blue"}, + }, + { + "actor": "tommy", + "object": "09", + "verb": "tweet", + "time": datetime.utcnow(), + "foreign_id": "fid:789", + "product": {"name": "shoes", "price": 9.99, "color": "blue"}, + }, + ] + ) + response = await feed.get() + activities = response["results"] + + batch = [ + { + "id": activities[0]["id"], + "set": {"product.color": "purple", "custom": {"some": "extra data"}}, + "unset": ["product.price"], + }, + { + "id": activities[2]["id"], + "set": {"product.price": 9001, "on_sale": True}, + }, + ] + + # partial update by ID + await async_client.activities_partial_update(batch) + response = await feed.get() + updated = response["results"] + expected = activities + expected[0]["product"] = {"name": "shoes", "color": "purple"} + expected[0]["custom"] = {"some": "extra data"} + expected[2]["product"] = {"name": "shoes", "price": 9001, "color": "blue"} + expected[2]["on_sale"] = True + assert updated == expected + + # partial update by foreign ID + time + batch = [ + { + "foreign_id": activities[1]["foreign_id"], + "time": activities[1]["time"], + "set": {"product.color": "beeeeeeige", "custom": {"modified_by": "me"}}, + "unset": ["product.name"], + }, + { + "foreign_id": activities[2]["foreign_id"], + "time": activities[2]["time"], + "unset": ["on_sale"], + }, + ] + await async_client.activities_partial_update(batch) + response = await feed.get() + updated = response["results"] + + expected[1]["product"] = {"price": 9.99, "color": "beeeeeeige"} + expected[1]["custom"] = {"modified_by": "me"} + del expected[2]["on_sale"] + assert updated == expected + + +@pytest.mark.asyncio +async def test_reaction_add(async_client): + await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + + +@pytest.mark.asyncio +async def test_reaction_add_to_target_feeds(async_client): + r = await async_client.reactions.add( + "superlike", + "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", + "mike", + data={"popularity": 50}, + target_feeds=["user:michelle"], + target_feeds_extra_data={"popularity": 100}, + ) + assert r["data"]["popularity"] == 50 + response = await async_client.feed("user", "michelle").get(limit=1) + a = response["results"][0] + assert r["id"] in a["reaction"] + assert a["verb"] == "superlike" + assert a["popularity"] == 100 + + child = await async_client.reactions.add_child( + "superlike", + r["id"], + "rob", + data={"popularity": 60}, + target_feeds=["user:michelle"], + target_feeds_extra_data={"popularity": 200}, + ) + + assert child["data"]["popularity"] == 60 + response = await async_client.feed("user", "michelle").get(limit=1) + a = response["results"][0] + assert child["id"] in a["reaction"] + assert a["verb"] == "superlike" + assert a["popularity"] == 200 + + +@pytest.mark.asyncio +async def test_reaction_get(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + reaction = await async_client.reactions.get(response["id"]) + assert reaction["parent"] == "" + assert reaction["data"] == {} + assert reaction["latest_children"] == {} + assert reaction["children_counts"] == {} + assert reaction["activity_id"] == "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4" + assert reaction["kind"] == "like" + assert "created_at" in reaction + assert "updated_at" in reaction + assert "id" in reaction + + +@pytest.mark.asyncio +async def test_reaction_update(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + await async_client.reactions.update(response["id"], {"changed": True}) + + +@pytest.mark.asyncio +async def test_reaction_delete(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + await async_client.reactions.delete(response["id"]) + + +@pytest.mark.asyncio +async def test_reaction_add_child(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + await async_client.reactions.add_child("like", response["id"], "rob") + + +@pytest.mark.asyncio +async def test_reaction_filter_random(async_client): + await async_client.reactions.filter( + kind="like", + reaction_id="87a9eec0-fd5f-11e8-8080-80013fed2f5b", + id_lte="87a9eec0-fd5f-11e8-8080-80013fed2f5b", + ) + await async_client.reactions.filter( + activity_id="87a9eec0-fd5f-11e8-8080-80013fed2f5b", + id_lte="87a9eec0-fd5f-11e8-8080-80013fed2f5b", + ) + await async_client.reactions.filter( + user_id="mike", id_lte="87a9eec0-fd5f-11e8-8080-80013fed2f5b" + ) + + +def _first_result_should_be(response, element): + el = element.copy() + el.pop("duration") + assert len(response["results"]) == 1 + assert response["results"][0] == el + + +@pytest.mark.asyncio +async def test_reaction_filter(async_client): + activity_id = str(uuid1()) + user = str(uuid1()) + + response = await async_client.reactions.add("like", activity_id, user) + child = await async_client.reactions.add_child("like", response["id"], user) + reaction = await async_client.reactions.get(response["id"]) + + response = await async_client.reactions.add("comment", activity_id, user) + reaction_comment = await async_client.reactions.get(response["id"]) + + r = await async_client.reactions.filter(reaction_id=reaction["id"]) + _first_result_should_be(r, child) + + r = await async_client.reactions.filter( + kind="like", activity_id=activity_id, id_lte=reaction["id"] + ) + _first_result_should_be(r, reaction) + + r = await async_client.reactions.filter( + kind="like", user_id=user, id_lte=reaction["id"] + ) + _first_result_should_be(r, reaction) + + r = await async_client.reactions.filter(kind="comment", activity_id=activity_id) + _first_result_should_be(r, reaction_comment) + + +@pytest.mark.asyncio +async def test_user_add(async_client): + await async_client.users.add(str(uuid1())) + + +@pytest.mark.asyncio +async def test_user_add_get_or_create(async_client): + user_id = str(uuid1()) + r1 = await async_client.users.add(user_id) + r2 = await async_client.users.add(user_id, get_or_create=True) + assert r1["id"] == r2["id"] + assert r1["created_at"] == r2["created_at"] + assert r1["updated_at"] == r2["updated_at"] + + +@pytest.mark.asyncio +async def test_user_get(async_client): + response = await async_client.users.add(str(uuid1())) + user = await async_client.users.get(response["id"]) + assert user["data"] == {} + assert "created_at" in user + assert "updated_at" in user + assert "id" in user + + +@pytest.mark.asyncio +async def test_user_get_with_follow_counts(async_client): + response = await async_client.users.add(str(uuid1())) + user = await async_client.users.get(response["id"], with_follow_counts=True) + assert user["id"] == response["id"] + assert "followers_count" in user + assert "following_count" in user + + +@pytest.mark.asyncio +async def test_user_update(async_client): + response = await async_client.users.add(str(uuid1())) + await async_client.users.update(response["id"], {"changed": True}) + + +@pytest.mark.asyncio +async def test_user_delete(async_client): + response = await async_client.users.add(str(uuid1())) + await async_client.users.delete(response["id"]) + + +@pytest.mark.asyncio +async def test_collections_add(async_client): + await async_client.collections.add( + "items", {"data": 1}, id=str(uuid1()), user_id="tom" + ) + + +@pytest.mark.asyncio +async def test_collections_add_no_id(async_client): + await async_client.collections.add("items", {"data": 1}) + + +@pytest.mark.asyncio +async def test_collections_get(async_client): + response = await async_client.collections.add("items", {"data": 1}, id=str(uuid1())) + entry = await async_client.collections.get("items", response["id"]) + assert entry["data"] == {"data": 1} + assert "created_at" in entry + assert "updated_at" in entry + assert "id" in entry + + +@pytest.mark.asyncio +async def test_collections_update(async_client): + response = await async_client.collections.add("items", {"data": 1}, str(uuid1())) + await async_client.collections.update( + "items", response["id"], data={"changed": True} + ) + entry = await async_client.collections.get("items", response["id"]) + assert entry["data"] == {"changed": True} + + +@pytest.mark.asyncio +async def test_collections_delete(async_client): + response = await async_client.collections.add("items", {"data": 1}, str(uuid1())) + await async_client.collections.delete("items", response["id"]) + + +@pytest.mark.asyncio +async def test_feed_enrichment_collection(async_client): + entry = await async_client.collections.add("items", {"name": "time machine"}) + entry.pop("duration") + f = async_client.feed("user", "mike") + activity_data = { + "actor": "mike", + "verb": "buy", + "object": async_client.collections.create_reference(entry=entry), + } + await f.add_activity(activity_data) + response = await f.get() + assert set(activity_data.items()).issubset(set(response["results"][0].items())) + enriched_response = await f.get(enrich=True) + assert enriched_response["results"][0]["object"] == entry + + +@pytest.mark.asyncio +async def test_feed_enrichment_user(async_client): + user = await async_client.users.add(str(uuid1()), {"name": "Mike"}) + user.pop("duration") + f = async_client.feed("user", "mike") + activity_data = { + "actor": async_client.users.create_reference(user), + "verb": "buy", + "object": "time machine", + } + await f.add_activity(activity_data) + response = await f.get() + assert set(activity_data.items()).issubset(set(response["results"][0].items())) + enriched_response = await f.get(enrich=True) + assert enriched_response["results"][0]["actor"] == user + + +@pytest.mark.asyncio +async def test_feed_enrichment_own_reaction(async_client): + f = async_client.feed("user", "mike") + activity_data = {"actor": "mike", "verb": "buy", "object": "object"} + response = await f.add_activity(activity_data) + reaction = await async_client.reactions.add("like", response["id"], "mike") + reaction.pop("duration") + enriched_response = await f.get(reactions={"own": True}, user_id="mike") + assert enriched_response["results"][0]["own_reactions"]["like"][0] == reaction + + +@pytest.mark.asyncio +async def test_feed_enrichment_recent_reaction(async_client): + f = async_client.feed("user", "mike") + activity_data = {"actor": "mike", "verb": "buy", "object": "object"} + response = await f.add_activity(activity_data) + reaction = await async_client.reactions.add("like", response["id"], "mike") + reaction.pop("duration") + enriched_response = await f.get(reactions={"recent": True}) + assert enriched_response["results"][0]["latest_reactions"]["like"][0] == reaction + + +@pytest.mark.asyncio +async def test_feed_enrichment_reaction_counts(async_client): + f = async_client.feed("user", "mike") + activity_data = {"actor": "mike", "verb": "buy", "object": "object"} + response = await f.add_activity(activity_data) + reaction = await async_client.reactions.add("like", response["id"], "mike") + reaction.pop("duration") + enriched_response = await f.get(reactions={"counts": True}) + assert enriched_response["results"][0]["reaction_counts"]["like"] == 1 + + +@pytest.mark.asyncio +async def test_track_engagements(async_client): + engagements = [ + { + "content": "1", + "label": "click", + "features": [ + {"group": "topic", "value": "js"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": "tommaso", + }, + { + "content": "2", + "label": "click", + "features": [ + {"group": "topic", "value": "go"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": {"id": "486892", "alias": "Julian"}, + }, + { + "content": "3", + "label": "click", + "features": [{"group": "topic", "value": "go"}], + "user_data": {"id": "tommaso", "alias": "tommaso"}, + }, + ] + await async_client.track_engagements(engagements) + + +@pytest.mark.asyncio +async def test_track_impressions(async_client): + impressions = [ + { + "content_list": ["1", "2", "3"], + "features": [ + {"group": "topic", "value": "js"}, + {"group": "user", "value": "tommaso"}, + ], + "user_data": {"id": "tommaso", "alias": "tommaso"}, + }, + { + "content_list": ["2", "3", "5"], + "features": [{"group": "topic", "value": "js"}], + "user_data": {"id": "486892", "alias": "Julian"}, + }, + ] + await async_client.track_impressions(impressions) + + +@pytest.mark.asyncio +async def test_og(async_client): + response = await async_client.og("https://google.com") + assert "title" in response + assert "description" in response + + +@pytest.mark.asyncio +async def test_follow_stats(async_client): + uniq = uuid4() + f = async_client.feed("user", uniq) + await f.follow("user", uuid4()) + await f.follow("user", uuid4()) + await f.follow("user", uuid4()) + + await async_client.feed("user", uuid4()).follow("user", uniq) + await async_client.feed("timeline", uuid4()).follow("user", uniq) + + feed_id = "user:" + str(uniq) + response = await async_client.follow_stats(feed_id) + result = response["results"] + assert result["following"]["count"] == 3 + assert result["followers"]["count"] == 2 + + response = await async_client.follow_stats( + feed_id, followers_slugs=["timeline"], following_slugs=["timeline"] + ) + result = response["results"] + assert result["following"]["count"] == 0 + assert result["followers"]["count"] == 1 diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index a06af67..1dcbf27 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -13,23 +13,14 @@ import requests from dateutil.tz import tzlocal from requests.exceptions import MissingSchema +from urllib.parse import parse_qs, urlparse +from unittest import TestCase import stream from stream import serializer from stream.exceptions import ApiKeyException, InputException from stream.feed import Feed -try: - from unittest.case import TestCase -except ImportError: - from unittest import TestCase - - -try: - from urlparse import urlparse, parse_qs -except ImportError: - from urllib.parse import urlparse, parse_qs - def connect_debug(): try: @@ -45,7 +36,7 @@ def connect_debug(): ) sys.exit(1) - return stream.connect(key, secret, location="qa", timeout=30) + return stream.connect(key, secret, location="qa", timeout=30, use_async=False) client = connect_debug() @@ -703,8 +694,8 @@ def test_update_activity_to_targets(self): "object": 1, "foreign_id": foreign_id, "time": now, + "to": ["user:1", "user:2"], } - activity_data["to"] = ["user:1", "user:2"] self.user1.add_activity(activity_data) ret = self.user1.update_activity_to_targets( diff --git a/stream/users.py b/stream/users.py deleted file mode 100644 index 776e577..0000000 --- a/stream/users.py +++ /dev/null @@ -1,36 +0,0 @@ -class Users: - def __init__(self, client, token): - self.client = client - self.token = token - - def create_reference(self, id): - _id = id - if isinstance(id, (dict,)) and id.get("id") is not None: - _id = id.get("id") - return "SU:%s" % _id - - def add(self, user_id, data=None, get_or_create=False): - payload = dict(id=user_id, data=data) - return self.client.post( - "user/", - service_name="api", - signature=self.token, - data=payload, - params={"get_or_create": get_or_create}, - ) - - def get(self, user_id, **params): - return self.client.get( - "user/%s" % user_id, service_name="api", params=params, signature=self.token - ) - - def update(self, user_id, data=None): - payload = dict(data=data) - return self.client.put( - "user/%s" % user_id, service_name="api", signature=self.token, data=payload - ) - - def delete(self, user_id): - return self.client.delete( - "user/%s" % user_id, service_name="api", signature=self.token - ) diff --git a/stream/users/__init__.py b/stream/users/__init__.py new file mode 100644 index 0000000..f1cfbef --- /dev/null +++ b/stream/users/__init__.py @@ -0,0 +1 @@ +from .user import AsyncUsers, Users diff --git a/stream/users/base.py b/stream/users/base.py new file mode 100644 index 0000000..b17dead --- /dev/null +++ b/stream/users/base.py @@ -0,0 +1,39 @@ +from abc import ABC, abstractmethod + + +class AbstractUsers(ABC): + @abstractmethod + def create_reference(self, id): + pass + + @abstractmethod + def add(self, user_id, data=None, get_or_create=False): + pass + + @abstractmethod + def get(self, user_id, **params): + pass + + @abstractmethod + def update(self, user_id, data=None): + pass + + @abstractmethod + def delete(self, user_id): + pass + + +class BaseUsers(AbstractUsers, ABC): + + API_ENDPOINT = "user/" + SERVICE_NAME = "api" + + def __init__(self, client, token): + self.client = client + self.token = token + + def create_reference(self, id): + _id = id + if isinstance(id, (dict,)) and id.get("id") is not None: + _id = id.get("id") + return f"SU:{_id}" diff --git a/stream/users/user.py b/stream/users/user.py new file mode 100644 index 0000000..b96048d --- /dev/null +++ b/stream/users/user.py @@ -0,0 +1,73 @@ +from stream.users.base import BaseUsers + + +class Users(BaseUsers): + def add(self, user_id, data=None, get_or_create=False): + payload = dict(id=user_id, data=data) + return self.client.post( + self.API_ENDPOINT, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + params={"get_or_create": get_or_create}, + ) + + def get(self, user_id, **params): + return self.client.get( + f"{self.API_ENDPOINT}/{user_id}", + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) + + def update(self, user_id, data=None): + payload = dict(data=data) + return self.client.put( + f"{self.API_ENDPOINT}/{user_id}", + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + def delete(self, user_id): + return self.client.delete( + f"{self.API_ENDPOINT}/{user_id}", + service_name=self.SERVICE_NAME, + signature=self.token, + ) + + +class AsyncUsers(BaseUsers): + async def add(self, user_id, data=None, get_or_create=False): + payload = dict(id=user_id, data=data) + return await self.client.post( + self.API_ENDPOINT, + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + params={"get_or_create": str(get_or_create)}, + ) + + async def get(self, user_id, **params): + return await self.client.get( + f"{self.API_ENDPOINT}/{user_id}", + service_name=self.SERVICE_NAME, + params=params, + signature=self.token, + ) + + async def update(self, user_id, data=None): + payload = dict(data=data) + return await self.client.put( + f"{self.API_ENDPOINT}/{user_id}", + service_name=self.SERVICE_NAME, + signature=self.token, + data=payload, + ) + + async def delete(self, user_id): + return await self.client.delete( + f"{self.API_ENDPOINT}/{user_id}", + service_name=self.SERVICE_NAME, + signature=self.token, + ) diff --git a/stream/utils.py b/stream/utils.py index ea50635..bd0dbc5 100644 --- a/stream/utils.py +++ b/stream/utils.py @@ -13,8 +13,11 @@ def validate_feed_id(feed_id): """ feed_id = str(feed_id) if len(feed_id.split(":")) != 2: - msg = "Invalid feed_id spec %s, please specify the feed_id as feed_slug:feed_id" - raise ValueError(msg % feed_id) + msg = ( + f"Invalid feed_id spec {feed_id}, " + f"please specify the feed_id as feed_slug:feed_id" + ) + raise ValueError(msg) feed_slug, user_id = feed_id.split(":") validate_feed_slug(feed_slug) @@ -28,8 +31,8 @@ def validate_feed_slug(feed_slug): """ feed_slug = str(feed_slug) if not valid_re.match(feed_slug): - msg = "Invalid feed slug %s, please only use letters, numbers and _" - raise ValueError(msg % feed_slug) + msg = f"Invalid feed slug {feed_slug}, please only use letters, numbers and _" + raise ValueError(msg) return feed_slug @@ -39,8 +42,8 @@ def validate_user_id(user_id): """ user_id = str(user_id) if not valid_re.match(user_id): - msg = "Invalid user id %s, please only use letters, numbers and _" - raise ValueError(msg % user_id) + msg = f"Invalid user id {user_id}, please only use letters, numbers and _" + raise ValueError(msg) return user_id From aefdcd39ff8a41a443455f1a41cc819039015cdb Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Thu, 21 Apr 2022 08:01:07 +0200 Subject: [PATCH 197/208] fix: redirect, uniqueness and deprecations --- stream/client/base.py | 13 ++++- stream/tests/conftest.py | 13 ++--- stream/tests/test_async_client.py | 83 ++++++++++++++++--------------- stream/tests/test_client.py | 12 +---- 4 files changed, 63 insertions(+), 58 deletions(-) diff --git a/stream/client/base.py b/stream/client/base.py index e6a45ab..ee7100f 100644 --- a/stream/client/base.py +++ b/stream/client/base.py @@ -461,7 +461,18 @@ def get_full_url(self, service_name, relative_url): if self.custom_api_port: base_url = f"{base_url}:{self.custom_api_port}" - url = base_url + "/" + service_name + "/" + self.version + "/" + relative_url + url = ( + base_url + + "/" + + service_name + + "/" + + self.version + + "/" + + relative_url.replace( + "//", "/" + ) # non-standard url will cause redirect and so can lose its body + ) + return url def get_default_params(self): diff --git a/stream/tests/conftest.py b/stream/tests/conftest.py index 88df1e2..ac5df29 100644 --- a/stream/tests/conftest.py +++ b/stream/tests/conftest.py @@ -1,6 +1,7 @@ import asyncio import os import sys +from uuid import uuid4 import pytest @@ -45,29 +46,29 @@ async def async_client(): @pytest.fixture def user1(async_client): - return async_client.feed("user", "1") + return async_client.feed("user", f"1-{uuid4()}") @pytest.fixture def user2(async_client): - return async_client.feed("user", "2") + return async_client.feed("user", f"2-{uuid4()}") @pytest.fixture def aggregated2(async_client): - return async_client.feed("aggregated", "2") + return async_client.feed("aggregated", f"2-{uuid4()}") @pytest.fixture def aggregated3(async_client): - return async_client.feed("aggregated", "3") + return async_client.feed("aggregated", f"3-{uuid4()}") @pytest.fixture def topic(async_client): - return async_client.feed("topic", "1") + return async_client.feed("topic", f"1-{uuid4()}") @pytest.fixture def flat3(async_client): - return async_client.feed("flat", "3") + return async_client.feed("flat", f"3-{uuid4()}") diff --git a/stream/tests/test_async_client.py b/stream/tests/test_async_client.py index c1dfdf4..bb1bc27 100644 --- a/stream/tests/test_async_client.py +++ b/stream/tests/test_async_client.py @@ -9,7 +9,6 @@ import stream from stream.exceptions import ApiKeyException, InputException -from stream.tests.test_client import get_unique_postfix def assert_first_activity_id_equal(activities, correct_activity_id): @@ -82,7 +81,7 @@ async def test_update_activities_create(async_client): @pytest.mark.asyncio async def test_add_activity(async_client): - feed = async_client.feed("user", "py1") + feed = async_client.feed("user", f"py1-{uuid4()}") activity_data = {"actor": 1, "verb": "tweet", "object": 1} response = await feed.add_activity(activity_data) activity_id = response["id"] @@ -93,7 +92,7 @@ async def test_add_activity(async_client): @pytest.mark.asyncio async def test_add_activity_to_inplace_change(async_client): - feed = async_client.feed("user", "py1") + feed = async_client.feed("user", f"py1-{uuid4()}") team_feed = async_client.feed("user", "teamy") activity_data = {"actor": 1, "verb": "tweet", "object": 1} activity_data["to"] = [team_feed.id] @@ -103,8 +102,8 @@ async def test_add_activity_to_inplace_change(async_client): @pytest.mark.asyncio async def test_add_activities_to_inplace_change(async_client): - feed = async_client.feed("user", "py1") - team_feed = async_client.feed("user", "teamy") + feed = async_client.feed("user", f"py1-{uuid4()}") + team_feed = async_client.feed("user", f"teamy-{uuid4()}") activity_data = {"actor": 1, "verb": "tweet", "object": 1} activity_data["to"] = [team_feed.id] await feed.add_activities([activity_data]) @@ -116,7 +115,7 @@ async def test_add_activity_to(async_client): # test for sending an activities to the team feed using to feeds = ["user", "teamy", "team_follower"] user_feed, team_feed, team_follower_feed = map( - lambda x: async_client.feed("user", x), feeds + lambda x: async_client.feed("user", f"{x}-{uuid4()}"), feeds ) await team_follower_feed.follow(team_feed.slug, team_feed.user_id) activity_data = {"actor": 1, "verb": "tweet", "object": 1, "to": [team_feed.id]} @@ -202,8 +201,8 @@ async def test_add_activities(user1): @pytest.mark.asyncio async def test_add_activities_to(async_client, user1): - pyto2 = async_client.feed("user", "pyto2") - pyto3 = async_client.feed("user", "pyto3") + pyto2 = async_client.feed("user", f"pyto2-{uuid4()}") + pyto3 = async_client.feed("user", f"pyto3-{uuid4()}") to = [pyto2.id, pyto3.id] activity_data = [ @@ -230,7 +229,7 @@ async def test_add_activities_to(async_client, user1): @pytest.mark.asyncio async def test_follow_and_source(async_client): - feed = async_client.feed("user", "test_follow") + feed = async_client.feed("user", f"test_follow-{uuid4()}") agg_feed = async_client.feed("aggregated", "test_follow") actor_id = random.randint(10, 100000) activity_data = {"actor": actor_id, "verb": "tweet", "object": 1} @@ -248,14 +247,14 @@ async def test_follow_and_source(async_client): @pytest.mark.asyncio async def test_empty_followings(async_client): - asocial = async_client.feed("user", "asocialpython") + asocial = async_client.feed("user", f"asocialpython-{uuid4()}") followings = await asocial.following() assert followings["results"] == [] @pytest.mark.asyncio async def test_get_followings(async_client): - social = async_client.feed("user", "psocial") + social = async_client.feed("user", f"psocial-{uuid4()}") await social.follow("user", "apy") await social.follow("user", "bpy") await social.follow("user", "cpy") @@ -271,17 +270,17 @@ async def test_get_followings(async_client): @pytest.mark.asyncio async def test_empty_followers(async_client): - asocial = async_client.feed("user", "asocialpython") + asocial = async_client.feed("user", f"asocialpython-{uuid4()}") followers = await asocial.followers() assert followers["results"] == [] @pytest.mark.asyncio async def test_get_followers(async_client): - social = async_client.feed("user", "psocial") - spammy1 = async_client.feed("user", "spammy1") - spammy2 = async_client.feed("user", "spammy2") - spammy3 = async_client.feed("user", "spammy3") + social = async_client.feed("user", f"psocial-{uuid4()}") + spammy1 = async_client.feed("user", f"spammy1-{uuid4()}") + spammy2 = async_client.feed("user", f"spammy2-{uuid4()}") + spammy3 = async_client.feed("user", f"spammy3-{uuid4()}") for feed in [spammy1, spammy2, spammy3]: await feed.follow("user", social.user_id) followers = await social.followers(offset=0, limit=2) @@ -296,7 +295,7 @@ async def test_get_followers(async_client): @pytest.mark.asyncio async def test_empty_do_i_follow(async_client): - social = async_client.feed("user", "psocial") + social = async_client.feed("user", f"psocial-{uuid4()}") await social.follow("user", "apy") await social.follow("user", "bpy") followings = await social.following(feeds=["user:missingpy"]) @@ -305,7 +304,7 @@ async def test_empty_do_i_follow(async_client): @pytest.mark.asyncio async def test_do_i_follow(async_client): - social = async_client.feed("user", "psocial") + social = async_client.feed("user", f"psocial-{uuid4()}") await social.follow("user", "apy") await social.follow("user", "bpy") followings = await social.following(feeds=["user:apy"]) @@ -376,7 +375,7 @@ async def test_get(user1): @pytest.mark.asyncio async def test_get_not_marked_seen(async_client): - notification_feed = async_client.feed("notification", "test_mark_seen") + notification_feed = async_client.feed("notification", f"test_mark_seen-{uuid4()}") response = await notification_feed.get(limit=3) activities = response["results"] for activity in activities: @@ -385,7 +384,7 @@ async def test_get_not_marked_seen(async_client): @pytest.mark.asyncio async def test_mark_seen_on_get(async_client): - notification_feed = async_client.feed("notification", "test_mark_seen") + notification_feed = async_client.feed("notification", f"test_mark_seen-{uuid4()}") response = await notification_feed.get(limit=100) activities = response["results"] for activity in activities: @@ -433,7 +432,7 @@ async def test_mark_seen_on_get(async_client): @pytest.mark.asyncio async def test_mark_read_by_id(async_client): - notification_feed = async_client.feed("notification", "py2") + notification_feed = async_client.feed("notification", f"py2-{uuid4()}") response = await notification_feed.get(limit=3) activities = response["results"] ids = [] @@ -515,7 +514,7 @@ async def test_uniqueness_topic(flat3, topic, user1): await flat3.follow("user", user1.user_id) # add the same activity twice now = datetime.now(tzlocal()) - tweet = f"My Way {get_unique_postfix()}" + tweet = f"My Way {uuid4()}" activity_data = { "actor": 1, "verb": "tweet", @@ -615,8 +614,8 @@ async def test_missing_actor(user1): @pytest.mark.asyncio async def test_follow_many(async_client): - sources = [async_client.feed("user", str(i)).id for i in range(10)] - targets = [async_client.feed("flat", str(i)).id for i in range(10)] + sources = [async_client.feed("user", f"{i}-{uuid4()}").id for i in range(10)] + targets = [async_client.feed("flat", f"{i}-{uuid4()}").id for i in range(10)] feeds = [{"source": s, "target": t} for s, t in zip(sources, targets)] await async_client.follow_many(feeds) @@ -631,13 +630,13 @@ async def test_follow_many(async_client): response = await async_client.feed(*source.split(":")).following() follows = response["results"] assert len(follows) == 1 - assert follows[0]["feed_id"] in sources - assert follows[0]["target_id"] == source + assert follows[0]["feed_id"] == source + assert follows[0]["target_id"] in targets @pytest.mark.asyncio async def test_follow_many_acl(async_client): - sources = [async_client.feed("user", str(i)) for i in range(10)] + sources = [async_client.feed("user", f"{i}-{uuid4()}") for i in range(10)] # ensure every source is empty first for feed in sources: response = await feed.get(limit=100) @@ -645,7 +644,7 @@ async def test_follow_many_acl(async_client): for activity in activities: await feed.remove_activity(activity["id"]) - targets = [async_client.feed("flat", str(i)) for i in range(10)] + targets = [async_client.feed("flat", f"{i}-{uuid4()}") for i in range(10)] # ensure every source is empty first for feed in targets: response = await feed.get(limit=100) @@ -696,7 +695,7 @@ async def failing_unfollow(): @pytest.mark.asyncio async def test_add_to_many(async_client): activity = {"actor": 1, "verb": "tweet", "object": 1, "custom": "data"} - feeds = [async_client.feed("flat", str(i)).id for i in range(10, 20)] + feeds = [async_client.feed("flat", f"{i}-{uuid4()}").id for i in range(10, 20)] await async_client.add_to_many(activity, feeds) for feed in feeds: @@ -732,7 +731,7 @@ async def test_get_activities_full(async_client): "foreign_id": fid, } - feed = async_client.feed("user", "test_get_activity") + feed = async_client.feed("user", f"test_get_activity-{uuid4()}") response = await feed.add_activity(activity) response = await async_client.get_activities(ids=[response["id"]]) @@ -760,7 +759,7 @@ async def test_get_activities_full_with_enrichment(async_client): "foreign_id": fid, } - feed = async_client.feed("user", "test_get_activity") + feed = async_client.feed("user", f"test_get_activity-{uuid4()}") activity = await feed.add_activity(activity) reaction1 = await async_client.reactions.add("like", activity["id"], "liker") @@ -802,7 +801,7 @@ async def test_get_activities_full_with_enrichment_and_reaction_kinds(async_clie "foreign_id": fid, } - feed = async_client.feed("user", "test_get_activity") + feed = async_client.feed("user", f"test_get_activity-{uuid4()}") activity = await feed.add_activity(activity) await async_client.reactions.add("like", activity["id"], "liker") @@ -983,16 +982,18 @@ async def test_reaction_add(async_client): @pytest.mark.asyncio async def test_reaction_add_to_target_feeds(async_client): + feed_id = f"user:michelle-{uuid4()}" r = await async_client.reactions.add( "superlike", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike", data={"popularity": 50}, - target_feeds=["user:michelle"], + target_feeds=[feed_id], target_feeds_extra_data={"popularity": 100}, ) assert r["data"]["popularity"] == 50 - response = await async_client.feed("user", "michelle").get(limit=1) + feed = async_client.feed(*feed_id.split(":")) + response = await feed.get(limit=1) a = response["results"][0] assert r["id"] in a["reaction"] assert a["verb"] == "superlike" @@ -1003,12 +1004,12 @@ async def test_reaction_add_to_target_feeds(async_client): r["id"], "rob", data={"popularity": 60}, - target_feeds=["user:michelle"], + target_feeds=[feed_id], target_feeds_extra_data={"popularity": 200}, ) assert child["data"]["popularity"] == 60 - response = await async_client.feed("user", "michelle").get(limit=1) + response = await feed.get(limit=1) a = response["results"][0] assert child["id"] in a["reaction"] assert a["verb"] == "superlike" @@ -1196,7 +1197,7 @@ async def test_collections_delete(async_client): async def test_feed_enrichment_collection(async_client): entry = await async_client.collections.add("items", {"name": "time machine"}) entry.pop("duration") - f = async_client.feed("user", "mike") + f = async_client.feed("user", f"mike-{uuid4()}") activity_data = { "actor": "mike", "verb": "buy", @@ -1213,7 +1214,7 @@ async def test_feed_enrichment_collection(async_client): async def test_feed_enrichment_user(async_client): user = await async_client.users.add(str(uuid1()), {"name": "Mike"}) user.pop("duration") - f = async_client.feed("user", "mike") + f = async_client.feed("user", f"mike-{uuid4()}") activity_data = { "actor": async_client.users.create_reference(user), "verb": "buy", @@ -1228,7 +1229,7 @@ async def test_feed_enrichment_user(async_client): @pytest.mark.asyncio async def test_feed_enrichment_own_reaction(async_client): - f = async_client.feed("user", "mike") + f = async_client.feed("user", f"mike-{uuid4()}") activity_data = {"actor": "mike", "verb": "buy", "object": "object"} response = await f.add_activity(activity_data) reaction = await async_client.reactions.add("like", response["id"], "mike") @@ -1239,7 +1240,7 @@ async def test_feed_enrichment_own_reaction(async_client): @pytest.mark.asyncio async def test_feed_enrichment_recent_reaction(async_client): - f = async_client.feed("user", "mike") + f = async_client.feed("user", f"mike-{uuid4()}") activity_data = {"actor": "mike", "verb": "buy", "object": "object"} response = await f.add_activity(activity_data) reaction = await async_client.reactions.add("like", response["id"], "mike") @@ -1250,7 +1251,7 @@ async def test_feed_enrichment_recent_reaction(async_client): @pytest.mark.asyncio async def test_feed_enrichment_reaction_counts(async_client): - f = async_client.feed("user", "mike") + f = async_client.feed("user", f"mike-{uuid4()}") activity_data = {"actor": "mike", "verb": "buy", "object": "object"} response = await f.add_activity(activity_data) reaction = await async_client.reactions.add("like", response["id"], "mike") diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 1dcbf27..33dfe17 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -5,7 +5,6 @@ import random import sys import time -from itertools import count from uuid import uuid1, uuid4 import jwt @@ -41,19 +40,12 @@ def connect_debug(): client = connect_debug() -counter = count() -test_identifier = uuid4() - - -def get_unique_postfix(): - return "---test_%s-feed_%s" % (test_identifier, next(counter)) - def getfeed(feed_slug, user_id): """ Adds the random postfix to the user id """ - return client.feed(feed_slug, user_id + get_unique_postfix()) + return client.feed(feed_slug, f"user_id-{uuid4()}") def api_request_parse_validator(test): @@ -860,7 +852,7 @@ def test_uniqueness_topic(self): self.flat3.follow("user", self.user1.user_id) # add the same activity twice now = datetime.datetime.now(tzlocal()) - tweet = "My Way %s" % get_unique_postfix() + tweet = f"My Way {uuid4()}" activity_data = { "actor": 1, "verb": "tweet", From 7d9c8f6faf486cce159357d033ea944ed9709b0d Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Thu, 21 Apr 2022 08:33:15 +0200 Subject: [PATCH 198/208] chore: old review config --- Makefile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 4c2098a..276b1f0 100644 --- a/Makefile +++ b/Makefile @@ -19,9 +19,8 @@ lint-fix: test: ## Run tests STREAM_KEY=$(STREAM_KEY) STREAM_SECRET=$(STREAM_SECRET) pytest stream/tests - check: lint test ## Run linters + tests reviewdog: black --check --diff --quiet stream | reviewdog -f=diff -f.diff.strip=0 -filter-mode="diff_context" -name=black -reporter=github-pr-review - flake8 --ignore=E501,W503 stream | reviewdog -f=flake8 -name=flake8 -reporter=github-pr-review + flake8 --ignore=E501,W503,E225,W293,F401 stream | reviewdog -f=flake8 -name=flake8 -reporter=github-pr-review From 3a3d5224236a49eafc2adc5005327c6e1f6c9f9a Mon Sep 17 00:00:00 2001 From: Peter Deme Date: Mon, 30 May 2022 09:45:30 +0200 Subject: [PATCH 199/208] docs: update readme (#139) --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 8be6d5d..52bfb6d 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,8 @@ You can use this library to access feeds API endpoints server-side. For the client-side integrations (web and mobile) have a look at the JavaScript, iOS and Android SDK libraries ([docs](https://getstream.io/activity-feeds/)). +> 💡 Note: this is a library for the **Feeds** product. The Chat SDKs can be found [here](https://getstream.io/chat/docs/). + ## ⚙️ Installation From cfa51bba73c36d864a8a45711afddf2cc310d9aa Mon Sep 17 00:00:00 2001 From: Peter Deme Date: Tue, 31 May 2022 14:27:12 +0200 Subject: [PATCH 200/208] docs: django notice (#140) --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 52bfb6d..1265bae 100644 --- a/README.md +++ b/README.md @@ -20,13 +20,15 @@ ## 📝 About Stream +> 💡 Note: this is a library for the **Feeds** product. The Chat SDKs can be found [here](https://getstream.io/chat/docs/). + You can sign up for a Stream account at our [Get Started](https://getstream.io/get_started/) page. You can use this library to access feeds API endpoints server-side. For the client-side integrations (web and mobile) have a look at the JavaScript, iOS and Android SDK libraries ([docs](https://getstream.io/activity-feeds/)). -> 💡 Note: this is a library for the **Feeds** product. The Chat SDKs can be found [here](https://getstream.io/chat/docs/). +> 💡 We have a Django integration available [here](https://github.com/GetStream/stream-django). ## ⚙️ Installation From c4948a2dc954695c42cd2f41ebe84029b0b3e3e6 Mon Sep 17 00:00:00 2001 From: ferhat elmas Date: Thu, 2 Jun 2022 08:54:09 +0200 Subject: [PATCH 201/208] chore: remove peter --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 8c2b60b..02b87e1 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @ferhatelmas @peterdeme +* @ferhatelmas From bf45d6d981b8489aa43838aa5f06ff890bddb718 Mon Sep 17 00:00:00 2001 From: Jimmy Pettersson <953852+JimmyPettersson85@users.noreply.github.com> Date: Thu, 16 Feb 2023 12:18:15 +0100 Subject: [PATCH 202/208] PBE-111 (#141) * fix: tests and linting * chore: add more venvs * chore: same ignores as Makefile * chore: update deps * fix: add support for 3.11 * ci: add support for 3.11 * chore: update CODEOWNERS --- .github/CODEOWNERS | 2 +- .github/workflows/ci.yml | 2 +- .gitignore | 5 +++++ dotgit/hooks/pre-commit-format.sh | 2 +- setup.py | 9 +++++---- stream/client/client.py | 3 --- stream/collections/base.py | 1 - stream/personalization/base.py | 1 - stream/reactions/base.py | 1 - stream/tests/conftest.py | 18 +++++++++--------- stream/tests/test_client.py | 1 - stream/users/base.py | 1 - 12 files changed, 22 insertions(+), 24 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 02b87e1..10fd19e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @ferhatelmas +* @JimmyPettersson85 @xernobyl @yaziine diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c83a6af..d057329 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,7 +16,7 @@ jobs: strategy: max-parallel: 1 matrix: - python: ['3.7', '3.8', '3.9', '3.10'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11'] steps: - uses: actions/checkout@v3 with: diff --git a/.gitignore b/.gitignore index 4c239ea..615fdd5 100644 --- a/.gitignore +++ b/.gitignore @@ -61,4 +61,9 @@ secrets.*sh .python-version .venv +.venv3.7 +.venv3.8 +.venv3.9 +.venv3.10 +.venv3.11 .envrc diff --git a/dotgit/hooks/pre-commit-format.sh b/dotgit/hooks/pre-commit-format.sh index a1758b8..bf0d444 100755 --- a/dotgit/hooks/pre-commit-format.sh +++ b/dotgit/hooks/pre-commit-format.sh @@ -10,7 +10,7 @@ if ! black stream --check -q; then exit 1 fi -if ! flake8 --ignore=E501,E225,W293,W503 stream; then +if ! flake8 --ignore=E501,E225,W293,W503,F401 stream; then echo echo "commit is aborted because there are some error prone issues in your changes as printed above" echo "your changes are still staged, you can accept formatting changes with git add or ignore them by adding --no-verify to git commit" diff --git a/setup.py b/setup.py index b404722..487ba99 100644 --- a/setup.py +++ b/setup.py @@ -5,10 +5,10 @@ from stream import __version__, __maintainer__, __email__, __license__ install_requires = [ - "requests>=2.3.0,<3", - "pyjwt>=2.0.0,<3", - "pytz>=2019.3", - "aiohttp>=3.6.0", + "requests>=2.28.0,<3", + "pyjwt>=2.6.0,<3", + "pytz>=2022.7.1", + "aiohttp>=3.8.4", ] tests_require = ["pytest", "pytest-cov", "python-dateutil", "pytest-asyncio"] ci_require = ["black", "flake8", "pytest-cov"] @@ -52,6 +52,7 @@ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Topic :: Software Development :: Libraries :: Python Modules", ], ) diff --git a/stream/client/client.py b/stream/client/client.py index eef7ee6..0345360 100644 --- a/stream/client/client.py +++ b/stream/client/client.py @@ -169,7 +169,6 @@ def activity_partial_update( return self.activities_partial_update(updates=[data]) def activities_partial_update(self, updates=None): - auth_token = self.create_jwt_token("activities", "*", feed_id="*") data = {"changes": updates or []} @@ -195,7 +194,6 @@ def create_redirect_url(self, target_url, user_id, events): return prepared_request.url def track_engagements(self, engagements): - auth_token = self.create_jwt_token("*", "*", feed_id="*") self.post( "engagement/", @@ -205,7 +203,6 @@ def track_engagements(self, engagements): ) def track_impressions(self, impressions): - auth_token = self.create_jwt_token("*", "*", feed_id="*") self.post("impression/", auth_token, data=impressions, service_name="analytics") diff --git a/stream/collections/base.py b/stream/collections/base.py index 44e091b..10c0805 100644 --- a/stream/collections/base.py +++ b/stream/collections/base.py @@ -72,7 +72,6 @@ def delete(self, collection_name, id): class BaseCollection(AbstractCollection, ABC): - URL = "collections/" SERVICE_NAME = "api" diff --git a/stream/personalization/base.py b/stream/personalization/base.py index 730d78f..04f823f 100644 --- a/stream/personalization/base.py +++ b/stream/personalization/base.py @@ -16,7 +16,6 @@ def delete(self, resource, **params): class BasePersonalization(AbstractPersonalization, ABC): - SERVICE_NAME = "personalization" def __init__(self, client, token): diff --git a/stream/reactions/base.py b/stream/reactions/base.py index b83794e..31078d0 100644 --- a/stream/reactions/base.py +++ b/stream/reactions/base.py @@ -44,7 +44,6 @@ def filter(self, **params): class BaseReactions(AbstractReactions, ABC): - API_ENDPOINT = "reaction/" SERVICE_NAME = "api" diff --git a/stream/tests/conftest.py b/stream/tests/conftest.py index ac5df29..c700997 100644 --- a/stream/tests/conftest.py +++ b/stream/tests/conftest.py @@ -1,9 +1,9 @@ import asyncio import os import sys +import pytest_asyncio from uuid import uuid4 -import pytest from stream import connect @@ -17,7 +17,7 @@ async def _parse_response(*args, **kwargs): return _parse_response -@pytest.fixture(scope="module") +@pytest_asyncio.fixture(scope="module") def event_loop(): """Create an instance of the default event loop for each test case.""" loop = asyncio.get_event_loop_policy().new_event_loop() @@ -25,7 +25,7 @@ def event_loop(): loop.close() -@pytest.fixture +@pytest_asyncio.fixture async def async_client(): key = os.getenv("STREAM_KEY") secret = os.getenv("STREAM_SECRET") @@ -44,31 +44,31 @@ async def async_client(): yield client -@pytest.fixture +@pytest_asyncio.fixture def user1(async_client): return async_client.feed("user", f"1-{uuid4()}") -@pytest.fixture +@pytest_asyncio.fixture def user2(async_client): return async_client.feed("user", f"2-{uuid4()}") -@pytest.fixture +@pytest_asyncio.fixture def aggregated2(async_client): return async_client.feed("aggregated", f"2-{uuid4()}") -@pytest.fixture +@pytest_asyncio.fixture def aggregated3(async_client): return async_client.feed("aggregated", f"3-{uuid4()}") -@pytest.fixture +@pytest_asyncio.fixture def topic(async_client): return async_client.feed("topic", f"1-{uuid4()}") -@pytest.fixture +@pytest_asyncio.fixture def flat3(async_client): return async_client.feed("flat", f"3-{uuid4()}") diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index 33dfe17..aced337 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -1294,7 +1294,6 @@ def test_activity_partial_update(self): self.assertEqual(updated, expected) def test_activities_partial_update(self): - feed = self.c.feed("user", uuid4()) feed.add_activities( [ diff --git a/stream/users/base.py b/stream/users/base.py index b17dead..21d3d8d 100644 --- a/stream/users/base.py +++ b/stream/users/base.py @@ -24,7 +24,6 @@ def delete(self, user_id): class BaseUsers(AbstractUsers, ABC): - API_ENDPOINT = "user/" SERVICE_NAME = "api" From 8a0e88a6cf115a34c2d6d39a54527398f3fa5a90 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 16 Feb 2023 15:15:13 +0100 Subject: [PATCH 203/208] chore(release): v5.2.0 (#142) * chore(release): v5.2.0 * chore: update changelog * chore: update changelog --------- Co-authored-by: github-actions Co-authored-by: Yassine Ennebati <4570448+yaziine@users.noreply.github.com> Co-authored-by: Jimmy Pettersson --- CHANGELOG.md | 18 ++++++++++++++++++ stream/__init__.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 97e250e..9dd1ab5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,21 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [5.2.0](https://github.com/GetStream/stream-python/compare/v5.1.1...v5.2.0) (2023-02-16) + + +### Features + +* add support for 3.11 ([2eae7d7](https://github.com/GetStream/stream-python/commit/2eae7d7958f3b869982701188fc0d04a5b8ab021)) +* added async support ([b4515d3](https://github.com/GetStream/stream-python/commit/b4515d337be88ff50ba1cbad8645b1fbc8862ce0)) + + +### Bug Fixes + +* tests and linting ([cfacbbc](https://github.com/GetStream/stream-python/commit/cfacbbcadf45ca91d3e6c2a310dfd6fea1a03146)) +* redirect, uniqueness and deprecations ([aefdcd3](https://github.com/GetStream/stream-python/commit/aefdcd39ff8a41a443455f1a41cc819039015cdb)) + ## 5.1.1 - 2022-01-18 * Handle backward compatible pyjwt 1.x support for token generation diff --git a/stream/__init__.py b/stream/__init__.py index e5de2db..dc56b4b 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2022, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "5.1.1" +__version__ = "5.2.0" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From e04f4cf4c36032b8e141a63d84ffa4f9cd758194 Mon Sep 17 00:00:00 2001 From: Jimmy Pettersson Date: Fri, 17 Feb 2023 10:16:25 +0100 Subject: [PATCH 204/208] chore: fix logo link logo is not working in PyPi since it's a relative reference, changed to use the full GH URL --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1265bae..2b986e1 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ [![build](https://github.com/GetStream/stream-python/workflows/build/badge.svg)](https://github.com/GetStream/stream-python/actions) [![PyPI version](https://badge.fury.io/py/stream-python.svg)](http://badge.fury.io/py/stream-python) ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/stream-python.svg)

- +

Official Python API client for Stream Feeds, a web service for building scalable newsfeeds and activity streams. From 88db1b197f9a0e4ea16ed255c7e0581058c1d4ca Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 27 Feb 2023 20:57:53 +0100 Subject: [PATCH 205/208] chore(release): v5.2.1 (#143) Co-authored-by: github-actions --- CHANGELOG.md | 2 ++ stream/__init__.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9dd1ab5..f66fbe3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [5.2.1](https://github.com/GetStream/stream-python/compare/v5.2.0...v5.2.1) (2023-02-27) + ## [5.2.0](https://github.com/GetStream/stream-python/compare/v5.1.1...v5.2.0) (2023-02-16) diff --git a/stream/__init__.py b/stream/__init__.py index dc56b4b..1129601 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2022, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "5.2.0" +__version__ = "5.2.1" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production" From 53ed9569e2417d925e6b0f378eb98caa0b4d30a1 Mon Sep 17 00:00:00 2001 From: Marco Ulgelmo <54494803+marco-ulge@users.noreply.github.com> Date: Thu, 25 May 2023 15:56:34 +0200 Subject: [PATCH 206/208] Create SECURITY.md --- SECURITY.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..4094801 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,16 @@ +# Reporting a Vulnerability +At Stream we are committed to the security of our Software. We appreciate your efforts in disclosing vulnerabilities responsibly and we will make every effort to acknowledge your contributions. + +Report security vulnerabilities at the following email address: +``` +[security@getstream.io](mailto:security@getstream.io) +``` +Alternatively it is also possible to open a new issue in the affected repository, tagging it with the `security` tag. + +A team member will acknowledge the vulnerability and will follow-up with more detailed information. A representative of the security team will be in touch if more information is needed. + +# Information to include in a report +While we appreciate any information that you are willing to provide, please make sure to include the following: +* Which repository is affected +* Which branch, if relevant +* Be as descriptive as possible, the team will replicate the vulnerability before working on a fix. From 3fcfb59e81f3152a3e5655f992dedbc13d80fdef Mon Sep 17 00:00:00 2001 From: Jimmy Pettersson <953852+JimmyPettersson85@users.noreply.github.com> Date: Wed, 25 Oct 2023 17:22:45 +0100 Subject: [PATCH 207/208] Add support for soft deletions for reacitons (#145) * implement support for soft deletion of reactions * add tests for soft deletion * drop support for 3.7, add support for 3.12, drop lint commit msg * fix missing ' * add support for STREAM_REGION * make tests work with region * bump deps for python 3.12 * remove yassine from CODEOWNERS --- .github/CODEOWNERS | 2 +- .github/workflows/ci.yml | 8 ++----- setup.py | 8 +++---- stream/__init__.py | 3 +++ stream/reactions/base.py | 6 ++++- stream/reactions/reaction.py | 22 +++++++++++++++-- stream/tests/test_async_client.py | 40 ++++++++++++++++++++++++++++++- stream/tests/test_client.py | 39 ++++++++++++++++++++++++++---- 8 files changed, 109 insertions(+), 19 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 10fd19e..5e8b594 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @JimmyPettersson85 @xernobyl @yaziine +* @JimmyPettersson85 @xernobyl diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d057329..2545a09 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,16 +16,12 @@ jobs: strategy: max-parallel: 1 matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python: ['3.8', '3.9', '3.10', '3.11', '3.12'] steps: - uses: actions/checkout@v3 with: fetch-depth: 0 # gives the commit linter access to previous commits - - name: Commit message linter - if: ${{ matrix.python == '3.7' }} - uses: wagoid/commitlint-github-action@v4 - - uses: actions/setup-python@v3 with: python-version: ${{ matrix.python }} @@ -34,7 +30,7 @@ jobs: run: pip install -q ".[test, ci]" - name: Lint with ${{ matrix.python }} - if: ${{ matrix.python == '3.7' }} + if: ${{ matrix.python == '3.8' }} run: make lint - name: Install, test and code coverage with ${{ matrix.python }} diff --git a/setup.py b/setup.py index 487ba99..49b41fb 100644 --- a/setup.py +++ b/setup.py @@ -5,10 +5,10 @@ from stream import __version__, __maintainer__, __email__, __license__ install_requires = [ - "requests>=2.28.0,<3", - "pyjwt>=2.6.0,<3", - "pytz>=2022.7.1", - "aiohttp>=3.8.4", + "requests>=2.31.0,<3", + "pyjwt>=2.8.0,<3", + "pytz>=2023.3.post1", + "aiohttp>=3.9.0b0", ] tests_require = ["pytest", "pytest-cov", "python-dateutil", "pytest-asyncio"] ci_require = ["black", "flake8", "pytest-cov"] diff --git a/stream/__init__.py b/stream/__init__.py index 1129601..d09380b 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -31,6 +31,9 @@ def connect( """ from stream.client import AsyncStreamClient, StreamClient + if location is None: + location = os.environ.get("STREAM_REGION") + stream_url = os.environ.get("STREAM_URL") # support for the heroku STREAM_URL syntax if stream_url and not api_key: diff --git a/stream/reactions/base.py b/stream/reactions/base.py index 31078d0..31e2842 100644 --- a/stream/reactions/base.py +++ b/stream/reactions/base.py @@ -23,7 +23,11 @@ def update(self, reaction_id, data=None, target_feeds=None): pass @abstractmethod - def delete(self, reaction_id): + def delete(self, reaction_id, soft=False): + pass + + @abstractmethod + def restore(self, reaction_id): pass @abstractmethod diff --git a/stream/reactions/reaction.py b/stream/reactions/reaction.py index 0466b21..f65403c 100644 --- a/stream/reactions/reaction.py +++ b/stream/reactions/reaction.py @@ -42,9 +42,18 @@ def update(self, reaction_id, data=None, target_feeds=None): data=payload, ) - def delete(self, reaction_id): + def delete(self, reaction_id, soft=False): url = f"{self.API_ENDPOINT}{reaction_id}" return self.client.delete( + url, + service_name=self.SERVICE_NAME, + signature=self.token, + params={"soft": soft}, + ) + + def restore(self, reaction_id): + url = f"{self.API_ENDPOINT}{reaction_id}/restore" + return self.client.put( url, service_name=self.SERVICE_NAME, signature=self.token ) @@ -123,9 +132,18 @@ async def update(self, reaction_id, data=None, target_feeds=None): data=payload, ) - async def delete(self, reaction_id): + async def delete(self, reaction_id, soft=False): url = f"{self.API_ENDPOINT}{reaction_id}" return await self.client.delete( + url, + service_name=self.SERVICE_NAME, + signature=self.token, + params={"soft": soft}, + ) + + async def restore(self, reaction_id): + url = f"{self.API_ENDPOINT}{reaction_id}/restore" + return await self.client.put( url, service_name=self.SERVICE_NAME, signature=self.token ) diff --git a/stream/tests/test_async_client.py b/stream/tests/test_async_client.py index bb1bc27..d4b0c0f 100644 --- a/stream/tests/test_async_client.py +++ b/stream/tests/test_async_client.py @@ -8,7 +8,7 @@ from dateutil.tz import tzlocal import stream -from stream.exceptions import ApiKeyException, InputException +from stream.exceptions import ApiKeyException, InputException, DoesNotExistException def assert_first_activity_id_equal(activities, correct_activity_id): @@ -1049,6 +1049,44 @@ async def test_reaction_delete(async_client): await async_client.reactions.delete(response["id"]) +@pytest.mark.asyncio +async def test_reaction_hard_delete(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + await async_client.reactions.delete(response["id"], soft=False) + + +@pytest.mark.asyncio +async def test_reaction_soft_delete(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + await async_client.reactions.delete(response["id"], soft=True) + + +@pytest.mark.asyncio +async def test_reaction_soft_delete_and_restore(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + await async_client.reactions.delete(response["id"], soft=True) + r1 = await async_client.reactions.get(response["id"]) + assert r1.get("deleted_at", None) is not None + await async_client.reactions.restore(response["id"]) + r1 = await async_client.reactions.get(response["id"]) + assert "deleted_at" not in r1 + + +@pytest.mark.asyncio +async def test_reaction_invalid_restore(async_client): + response = await async_client.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + with pytest.raises(DoesNotExistException): + await async_client.reactions.restore(response["id"]) + + @pytest.mark.asyncio async def test_reaction_add_child(async_client): response = await async_client.reactions.add( diff --git a/stream/tests/test_client.py b/stream/tests/test_client.py index aced337..3ccb8cc 100644 --- a/stream/tests/test_client.py +++ b/stream/tests/test_client.py @@ -17,7 +17,7 @@ import stream from stream import serializer -from stream.exceptions import ApiKeyException, InputException +from stream.exceptions import ApiKeyException, InputException, DoesNotExistException from stream.feed import Feed @@ -150,14 +150,14 @@ def test_api_url(self): ) def test_collections_url_default(self): - c = stream.connect("key", "secret") + c = stream.connect("key", "secret", location="") feed_url = c.get_full_url(relative_url="meta/", service_name="api") if not self.local_tests: self.assertEqual(feed_url, "https://api.stream-io-api.com/api/v1.0/meta/") def test_personalization_url_default(self): - c = stream.connect("key", "secret") + c = stream.connect("key", "secret", location="") feed_url = c.get_full_url( relative_url="recommended", service_name="personalization" ) @@ -169,7 +169,7 @@ def test_personalization_url_default(self): ) def test_api_url_default(self): - c = stream.connect("key", "secret") + c = stream.connect("key", "secret", location="") feed_url = c.get_full_url(service_name="api", relative_url="feed/") if not self.local_tests: @@ -1439,6 +1439,37 @@ def test_reaction_delete(self): ) self.c.reactions.delete(response["id"]) + def test_reaction_hard_delete(self): + response = self.c.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + self.c.reactions.delete(response["id"], soft=False) + + def test_reaction_soft_delete(self): + response = self.c.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + self.c.reactions.delete(response["id"], soft=True) + + def test_reaction_soft_delete_and_restore(self): + response = self.c.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + self.c.reactions.delete(response["id"], soft=True) + r1 = self.c.reactions.get(response["id"]) + self.assertIsNot(r1["deleted_at"], None) + self.c.reactions.restore(response["id"]) + r1 = self.c.reactions.get(response["id"]) + self.assertTrue("deleted_at" not in r1) + + def test_reaction_invalid_restore(self): + response = self.c.reactions.add( + "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" + ) + self.assertRaises( + DoesNotExistException, lambda: self.c.reactions.restore(response["id"]) + ) + def test_reaction_add_child(self): response = self.c.reactions.add( "like", "54a60c1e-4ee3-494b-a1e3-50c06acb5ed4", "mike" From 9c8b9cc9c3f9a3134532e63c383dab1e0718fc6c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 25 Oct 2023 18:58:34 +0200 Subject: [PATCH 208/208] chore(release): v5.3.1 (#147) * chore(release): v5.3.1 * empty to kickstart CI --------- Co-authored-by: github-actions Co-authored-by: Jimmy Pettersson --- CHANGELOG.md | 2 ++ stream/__init__.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f66fbe3..d158ba3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [5.3.1](https://github.com/GetStream/stream-python/compare/v5.2.1...v5.3.1) (2023-10-25) + ### [5.2.1](https://github.com/GetStream/stream-python/compare/v5.2.0...v5.2.1) (2023-02-27) ## [5.2.0](https://github.com/GetStream/stream-python/compare/v5.1.1...v5.2.0) (2023-02-16) diff --git a/stream/__init__.py b/stream/__init__.py index d09380b..d769388 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -5,7 +5,7 @@ __copyright__ = "Copyright 2022, Stream.io, Inc" __credits__ = ["Thierry Schellenbach, mellowmorning.com, @tschellenbach"] __license__ = "BSD-3-Clause" -__version__ = "5.2.1" +__version__ = "5.3.1" __maintainer__ = "Thierry Schellenbach" __email__ = "support@getstream.io" __status__ = "Production"