diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000000000000000000000000000000000000..f7236dbfe5e60b2afbf6e62299fab4d56eaca534 --- /dev/null +++ b/.flake8 @@ -0,0 +1,3 @@ +[flake8] +max-line-length = 88 +max-doc-length = 88 diff --git a/postgrestutils/__init__.py b/postgrestutils/__init__.py index 35dc1c4ad604f5e7d1684c230dafc16e332cdd6d..7ee8e39de7b95475163e8b11ed867575ca2af942 100644 --- a/postgrestutils/__init__.py +++ b/postgrestutils/__init__.py @@ -16,13 +16,13 @@ default_app_config = "postgrestutils.apps.PostgrestUtilsConfig" REPR_OUTPUT_SIZE = 20 Count = enum.Enum( - 'Count', + "Count", ( - ('NONE', None), - ('EXACT', 'exact'), - ('PLANNED', 'planned'), - ('ESTIMATED', 'estimated') - ) + ("NONE", None), + ("EXACT", "exact"), + ("PLANNED", "planned"), + ("ESTIMATED", "estimated"), + ), ) DEFAULT_SCHEMA = object() @@ -43,13 +43,14 @@ class Session: some of these may be overridden on a per-request basis by using the `.get()` or `.filter()` methods. """ + def __init__( self, base_uri: Optional[str] = None, token: Optional[str] = None, schema: Optional[str] = None, parse_dt: bool = True, - count: Count = Count.NONE + count: Count = Count.NONE, ): """ :param base_uri: base uri of the PostgREST instance to use @@ -104,7 +105,7 @@ class Session: True, parse_dt if parse_dt is not None else self.parse_dt, count if count is not None else self.count, - **kwargs + **kwargs, ) # populate the cache # will raise ObjectDoesNotExist/MultipleObjectsReturned if no or @@ -120,7 +121,7 @@ class Session: count: Optional[Count] = None, schema: Optional[str] = None, **kwargs - ) -> 'JsonResultSet': + ) -> "JsonResultSet": """ :param endpoint: specifies which endpoint to request :param parse_dt: whether to parse datetime strings as returned by @@ -137,20 +138,20 @@ class Session: False, parse_dt if parse_dt is not None else self.parse_dt, count if count is not None else self.count, - **kwargs + **kwargs, ) def _configure_session_defaults(self): - self.session.headers['Accept'] = 'application/json' + self.session.headers["Accept"] = "application/json" if self.token: - self.session.headers['Authorization'] = 'Bearer {}'.format(self.token) + self.session.headers["Authorization"] = "Bearer {}".format(self.token) if self.schema is not None: - self.session.headers['Accept-Profile'] = self.schema + self.session.headers["Accept-Profile"] = self.schema def _set_schema_header(self, schema, kwargs: dict): if schema is DEFAULT_SCHEMA: schema = None - kwargs.setdefault('headers', dict())['Accept-Profile'] = schema + kwargs.setdefault("headers", dict())["Accept-Profile"] = schema class JsonResultSet: @@ -161,7 +162,16 @@ class JsonResultSet: to ensure pythonic behavior. Check the README for more detailed information. """ - def __init__(self, client: Session, endpoint: str, singular: bool, parse_dt: bool, count: Count, **kwargs): + + def __init__( + self, + client: Session, + endpoint: str, + singular: bool, + parse_dt: bool, + count: Count, + **kwargs + ): self._len_cache = None # type: Optional[int] self._result_cache = None # type: Optional[list] @@ -173,10 +183,10 @@ class JsonResultSet: self.request_kwargs = kwargs def __repr__(self): - data = list(self[:REPR_OUTPUT_SIZE + 1]) + data = list(self[: REPR_OUTPUT_SIZE + 1]) if len(data) > REPR_OUTPUT_SIZE: data[-1] = "...(remaining elements truncated)..." - return '<{} {}>'.format(self.__class__.__name__, data) + return "<{} {}>".format(self.__class__.__name__, data) def __iter__(self): self._fetch_all() @@ -201,19 +211,30 @@ class JsonResultSet: if not isinstance(key, (int, slice)): raise TypeError( "{self.__class__.__name__} indices must be integers or slices, not {key.__class__.__name__}".format( - self=self, - key=key + self=self, key=key + ) + ) + if (isinstance(key, int) and key < 0) or ( + isinstance(key, slice) + and ( + (key.start is not None and key.start < 0) + or (key.stop is not None and key.stop < 0) + ) + ): + raise ValueError( + "{self.__class__.__name__} does not support negative indexing".format( + self=self ) ) - if ((isinstance(key, int) and key < 0) or - (isinstance(key, slice) and ((key.start is not None and key.start < 0) or - (key.stop is not None and key.stop < 0)))): - raise ValueError("{self.__class__.__name__} does not support negative indexing".format(self=self)) if isinstance(key, slice) and key.step is not None: - raise ValueError("{self.__class__.__name__} does not support stepping".format(self=self)) + raise ValueError( + "{self.__class__.__name__} does not support stepping".format(self=self) + ) # cache is not populated and unbounded slice is requested, i.e. res[:] - if isinstance(key, slice) and all(e is None for e in (self._result_cache, key.start, key.stop)): + if isinstance(key, slice) and all( + e is None for e in (self._result_cache, key.start, key.stop) + ): self._fetch_all() if self._result_cache is not None: @@ -223,9 +244,13 @@ class JsonResultSet: start = key.start if key.start is not None else 0 if key.stop is not None and key.stop <= start: return list() - range = '{start}-{stop}'.format(start=start, stop=key.stop - 1 if key.stop is not None else '') + range = "{start}-{stop}".format( + start=start, stop=key.stop - 1 if key.stop is not None else "" + ) return self._fetch_range(range) - return self._fetch_range('{0}-{0}'.format(key))[0] # single element requested, return dict + return self._fetch_range("{0}-{0}".format(key))[ + 0 + ] # single element requested, return dict def refresh_from_pgrest(self): """Lazily refresh data from PostgREST.""" @@ -240,14 +265,18 @@ class JsonResultSet: """ if self._len_cache is None: request_kwargs = copy.deepcopy(self.request_kwargs) - request_kwargs.setdefault('headers', dict())['Prefer'] = 'count={}'.format(self.count.value) - request_kwargs['headers']['Range-Unit'] = 'items' + request_kwargs.setdefault("headers", dict())["Prefer"] = "count={}".format( + self.count.value + ) + request_kwargs["headers"]["Range-Unit"] = "items" # Have to request something so just fetch the first item - request_kwargs['headers']['Range'] = '0-0' + request_kwargs["headers"]["Range"] = "0-0" - resp = self.client.session.get(urljoin(self.client.base_uri, self.endpoint), **request_kwargs) + resp = self.client.session.get( + urljoin(self.client.base_uri, self.endpoint), **request_kwargs + ) - count = int(resp.headers['Content-Range'].split('/')[-1]) + count = int(resp.headers["Content-Range"].split("/")[-1]) self._len_cache = count # If the request yields only one element anyway, might as well cache @@ -266,9 +295,13 @@ class JsonResultSet: request_kwargs = copy.deepcopy(self.request_kwargs) if self.singular: - request_kwargs.setdefault('headers', dict())['Accept'] = 'application/vnd.pgrst.object+json' + request_kwargs.setdefault("headers", dict())[ + "Accept" + ] = "application/vnd.pgrst.object+json" - resp = self.client.session.get(urljoin(self.client.base_uri, self.endpoint), **request_kwargs) + resp = self.client.session.get( + urljoin(self.client.base_uri, self.endpoint), **request_kwargs + ) self._result_cache = self._parse_response(resp) # fetched all elements anyway, caching their length is very cheap @@ -281,10 +314,12 @@ class JsonResultSet: more information. """ request_kwargs = copy.deepcopy(self.request_kwargs) - request_kwargs.setdefault('headers', dict())['Range-Unit'] = 'items' - request_kwargs['headers']['Range'] = range + request_kwargs.setdefault("headers", dict())["Range-Unit"] = "items" + request_kwargs["headers"]["Range"] = range - resp = self.client.session.get(urljoin(self.client.base_uri, self.endpoint), **request_kwargs) + resp = self.client.session.get( + urljoin(self.client.base_uri, self.endpoint), **request_kwargs + ) return self._parse_response(resp) def _parse_response(self, resp): @@ -308,7 +343,13 @@ class JsonResultSet: raise detailed from e # fall back to raising a generic HTTPError exception - raise type(e)(resp.status_code, resp.reason, resp.text, response=resp, request=e.request) + raise type(e)( + resp.status_code, + resp.reason, + resp.text, + response=resp, + request=e.request, + ) if self.parse_dt: json_result = resp.json(object_hook=datetime_parser) @@ -325,7 +366,7 @@ class JsonResultSet: observed row count. """ detail_regex = re.compile( - r'Results contain (?P<row_count>\d+) rows, application/vnd\.pgrst\.object\+json requires 1 row' + r"Results contain (?P<row_count>\d+) rows, application/vnd\.pgrst\.object\+json requires 1 row" ) try: json = resp.json() @@ -335,9 +376,9 @@ class JsonResultSet: # more insights. logger.warning("Unparsable 406: {}".format(resp.text)) else: - result = re.match(detail_regex, json['details']) + result = re.match(detail_regex, json["details"]) if result is not None: - row_count = int(result.group('row_count')) + row_count = int(result.group("row_count")) if row_count == 0: raise ObjectDoesNotExist(json) else: diff --git a/postgrestutils/_django_utils.py b/postgrestutils/_django_utils.py index b552a1c6f2dcdf810c29f17b29481f4224ae171d..8d57cb6a465bc4d95d59e9a3495e7770cb5de900 100644 --- a/postgrestutils/_django_utils.py +++ b/postgrestutils/_django_utils.py @@ -9,8 +9,7 @@ from datetime import datetime from typing import Union from django.conf import settings -from django.utils import dateparse -from django.utils import timezone as django_tz +from django.utils import dateparse, timezone as django_tz import postgrestutils @@ -20,19 +19,17 @@ from .signals import user_account_fetched def autofetch(sender, **kwargs): """Fetch user account on login based on the AUTOFETCH configuration""" - payload = { - 'select': app_settings.AUTOFETCH - } + payload = {"select": app_settings.AUTOFETCH} if settings.DEBUG: # prod uuids != dev uuids, fall back on matching accounts by username - payload['username'] = 'eq.{}'.format(kwargs['user'].get_username()) + payload["username"] = "eq.{}".format(kwargs["user"].get_username()) else: - payload['auth_provider_uid'] = 'eq.{}'.format(kwargs['user'].sso_mapping.uuid) + payload["auth_provider_uid"] = "eq.{}".format(kwargs["user"].sso_mapping.uuid) with postgrestutils.Session() as s: - account = s.get('account', params=payload) - user_account_fetched.send(sender=None, request=kwargs['request'], account=account) + account = s.get("account", params=payload) + user_account_fetched.send(sender=None, request=kwargs["request"], account=account) def _try_django_parse_dt(value: str) -> Union[datetime, str]: diff --git a/postgrestutils/app_settings.py b/postgrestutils/app_settings.py index 7b8c8a2a7c0641283cbb0078843ec15b1368dc91..a5be7d65a23176ca84add19c89bf3e66a305a7be 100644 --- a/postgrestutils/app_settings.py +++ b/postgrestutils/app_settings.py @@ -16,6 +16,7 @@ for k in list(globals().keys()): # list prevents errors on changes if _DJANGO: from django.conf import settings as django_settings from django.core.exceptions import ImproperlyConfigured + try: new_value = getattr(django_settings, "POSTGREST_UTILS_" + k) globals()[k] = getattr(django_settings, "POSTGREST_UTILS_" + k) @@ -25,6 +26,7 @@ for k in list(globals().keys()): # list prevents errors on changes pass # django is installed and used but the setting is not present else: import os + try: globals()[k] = os.environ["POSTGREST_UTILS_" + k] except KeyError: diff --git a/postgrestutils/signals.py b/postgrestutils/signals.py index 0221d10e2b9ae3d54128f54328f3eedcd3a2b363..76d5000f1f6ae7614953e11a722ef9f35b7d4e75 100644 --- a/postgrestutils/signals.py +++ b/postgrestutils/signals.py @@ -1,3 +1,3 @@ import django.dispatch -user_account_fetched = django.dispatch.Signal(providing_args=['request', 'account']) +user_account_fetched = django.dispatch.Signal(providing_args=["request", "account"]) diff --git a/postgrestutils/utils.py b/postgrestutils/utils.py index 6eab7a3e9484e5ceb26f04ca7e2542aa1b2cde50..cf21a3fca42b2e96933464be13d075069ae16be9 100644 --- a/postgrestutils/utils.py +++ b/postgrestutils/utils.py @@ -12,16 +12,18 @@ if _DJANGO: logger = logging.getLogger("postgrestutils") # this regex matches postgres' JSON formatting for timestamps -JSON_TIMESTAMP_REGEX = re.compile(r"(?P<year>\d{4})-" - r"(?P<month>\d{2})-" - r"(?P<day>\d{2})" - r"T(?P<hour>\d{2}):" - r"(?P<minute>\d{2}):" - r"(?P<second>\d{2})\." - r"(?P<microsecond>\d{1,6})" - r"((?P<offsetsign>[+-])" - r"(?P<offsethours>\d{2}):" - r"(?P<offsetminutes>\d{2}))?$") +JSON_TIMESTAMP_REGEX = re.compile( + r"(?P<year>\d{4})-" + r"(?P<month>\d{2})-" + r"(?P<day>\d{2})" + r"T(?P<hour>\d{2}):" + r"(?P<minute>\d{2}):" + r"(?P<second>\d{2})\." + r"(?P<microsecond>\d{1,6})" + r"((?P<offsetsign>[+-])" + r"(?P<offsethours>\d{2}):" + r"(?P<offsetminutes>\d{2}))?$" +) def _clean_parts(parts: Dict[str, str]): @@ -44,12 +46,17 @@ def _try_python_parse_dt(value: str) -> Union[datetime, str]: match = JSON_TIMESTAMP_REGEX.match(value) if match: parts = _clean_parts(match.groupdict()) - if parts.get('offsetsign') and parts.get('offsethours') and parts.get('offsetminutes'): - sign = -1 if parts.pop('offsetsign', '+') == '-' else 1 + if ( + parts.get("offsetsign") + and parts.get("offsethours") + and parts.get("offsetminutes") + ): + sign = -1 if parts.pop("offsetsign", "+") == "-" else 1 tz = timezone( - offset=sign * timedelta( - hours=int(parts.pop('offsethours')), - minutes=int(parts.pop('offsetminutes')) + offset=sign + * timedelta( + hours=int(parts.pop("offsethours")), + minutes=int(parts.pop("offsetminutes")), ) ) parsed_dt = datetime(**parts).replace(tzinfo=tz).astimezone() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..8e790bf46af6d8c1541a7becf53614793bccaf18 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,11 @@ +[tool.black] +line-length = 88 +target-version = ["py35"] + +[tool.isort] +profile = "black" +combine_as_imports = true +default_section = "THIRDPARTY" +known_first_party = "postgrestutils" +known_django = "django" +sections = "FUTURE,STDLIB,DJANGO,THIRDPARTY,FIRSTPARTY,LOCALFOLDER" diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 2048ccfdbac9527ec0b53b4cd67cd5807cfb6ac4..0000000000000000000000000000000000000000 --- a/setup.cfg +++ /dev/null @@ -1,14 +0,0 @@ -[flake8] -max-line-length = 119 -exclude = **/settings/* - -[isort] -combine_as_import = true -default_section = THIRDPARTY -include_trailing_comma = true -known_first_party = postgrestutils -known_django = django -sections = FUTURE,STDLIB,DJANGO,THIRDPARTY,FIRSTPARTY,LOCALFOLDER -line_length = 79 -multi_line_output = 5 -not_skip = __init__.py diff --git a/setup.py b/setup.py index b05b89daa9fb516d65a5b6486ddbbc75e276a5bc..a6fe38fc7358f7425a838202610e7f39c651abcd 100644 --- a/setup.py +++ b/setup.py @@ -9,33 +9,27 @@ os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( - name='postgrestutils', - version='1.0.0', + name="postgrestutils", + version="1.0.0", packages=find_packages(), include_package_data=True, - license='BSD', - description='Some helpers to use our postgREST API(s)', + license="BSD", + description="Some helpers to use our postgREST API(s)", long_description=README, - url='https://lab.it.hs-hannover.de/tools/postgrestutils', - author='Fynn Becker', - author_email='fynn.becker@hs-hannover.de', + url="https://lab.it.hs-hannover.de/tools/postgrestutils", + author="Fynn Becker", + author_email="fynn.becker@hs-hannover.de", zip_safe=False, - install_requires=[ - 'requests>=2.19.1,<3.0.0' - ], - extras_require={ - 'dev': [ - 'requests-mock' - ] - }, + install_requires=["requests>=2.19.1,<3.0.0"], + extras_require={"dev": ["requests-mock"]}, classifiers=[ - 'Environment :: Web Environment', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: BSD License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3.4', - 'Topic :: Internet :: WWW/HTTP', - 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', + "Environment :: Web Environment", + "Intended Audience :: Developers", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3.4", + "Topic :: Internet :: WWW/HTTP", + "Topic :: Internet :: WWW/HTTP :: Dynamic Content", ], ) diff --git a/tests/test_postgrestclient.py b/tests/test_postgrestclient.py index 33eeed59629bd62d06d3e0f9e24fccfca9ea38b9..099f4fc0912e62b5cdc073d875ad6d10d902e44d 100644 --- a/tests/test_postgrestclient.py +++ b/tests/test_postgrestclient.py @@ -6,76 +6,82 @@ from requests_mock import Mocker import postgrestutils -TOKEN = 'JWT_token' +TOKEN = "JWT_token" -default_session = functools.partial(postgrestutils.Session, base_uri='http://example.com/', token=TOKEN) +default_session = functools.partial( + postgrestutils.Session, base_uri="http://example.com/", token=TOKEN +) DEFAULT_HEADERS = { - 'Authorization': 'Bearer {}'.format(TOKEN), - 'Accept': 'application/json' + "Authorization": "Bearer {}".format(TOKEN), + "Accept": "application/json", } SUPERHERO_TEST_DATA = [ { - 'id': 68, - 'name': 'Batman', - 'gender': 'Male', - 'eye_color': 'blue', - 'race': 'Human', - 'hair_color': 'black', - 'height': 188, - 'publisher': 'DC Comics', - 'skin_color': None, - 'alignment': 'good', - 'weight': 95 - }, { - 'id': 212, - 'name': 'Deadpool', - 'gender': 'Male', - 'eye_color': 'brown', - 'race': 'Mutant', - 'hair_color': 'No Hair', - 'height': 188, - 'publisher': 'Marvel Comics', - 'skin_color': None, - 'alignment': 'neutral', - 'weight': 95 - }, { - 'id': 345, - 'name': 'Iron Man', - 'gender': 'Male', - 'eye_color': 'blue', - 'race': 'Human', - 'hair_color': 'Black', - 'height': 198, - 'publisher': 'Marvel Comics', - 'skin_color': None, - 'alignment': 'good', - 'weight': 191 - }, { - 'id': 369, - 'name': 'Joker', - 'gender': 'Male', - 'eye_color': 'green', - 'race': 'Human', - 'hair_color': 'Green', - 'height': 196, - 'publisher': 'DC Comics', - 'skin_color': 'white', - 'alignment': 'bad', - 'weight': 86 - }, { - 'id': 423, - 'name': 'Magneto', - 'gender': 'Male', - 'eye_color': 'grey', - 'race': 'Mutant', - 'hair_color': 'White', - 'height': 188, - 'publisher': 'Marvel Comics', - 'skin_color': None, - 'alignment': 'bad', - 'weight': 86 - } + "id": 68, + "name": "Batman", + "gender": "Male", + "eye_color": "blue", + "race": "Human", + "hair_color": "black", + "height": 188, + "publisher": "DC Comics", + "skin_color": None, + "alignment": "good", + "weight": 95, + }, + { + "id": 212, + "name": "Deadpool", + "gender": "Male", + "eye_color": "brown", + "race": "Mutant", + "hair_color": "No Hair", + "height": 188, + "publisher": "Marvel Comics", + "skin_color": None, + "alignment": "neutral", + "weight": 95, + }, + { + "id": 345, + "name": "Iron Man", + "gender": "Male", + "eye_color": "blue", + "race": "Human", + "hair_color": "Black", + "height": 198, + "publisher": "Marvel Comics", + "skin_color": None, + "alignment": "good", + "weight": 191, + }, + { + "id": 369, + "name": "Joker", + "gender": "Male", + "eye_color": "green", + "race": "Human", + "hair_color": "Green", + "height": 196, + "publisher": "DC Comics", + "skin_color": "white", + "alignment": "bad", + "weight": 86, + }, + { + "id": 423, + "name": "Magneto", + "gender": "Male", + "eye_color": "grey", + "race": "Mutant", + "hair_color": "White", + "height": 188, + "publisher": "Marvel Comics", + "skin_color": None, + "alignment": "bad", + "weight": 86, + }, ] @@ -87,84 +93,105 @@ class TestPgrestClientGet(TestCase): def test_single_object_returned(self, mock): mock.register_uri( - 'GET', - 'http://example.com/superhero?id=eq.1000000000', - request_headers={**DEFAULT_HEADERS, **{'Accept': 'application/vnd.pgrst.object+json'}}, + "GET", + "http://example.com/superhero?id=eq.1000000000", + request_headers={ + **DEFAULT_HEADERS, + **{"Accept": "application/vnd.pgrst.object+json"}, + }, status_code=200, - reason='OK', - json=self.data + reason="OK", + json=self.data, ) with default_session() as s: - params = {'id': 'eq.1000000000'} - res = s.get('superhero', params=params) + params = {"id": "eq.1000000000"} + res = s.get("superhero", params=params) self.assertEqual(res, self.data) self.assertTrue(mock.called_once) def test_object_does_not_exist(self, mock): mock.register_uri( - 'GET', - 'http://example.com/superhero?id=eq.1337', - request_headers={**DEFAULT_HEADERS, **{'Accept': 'application/vnd.pgrst.object+json'}}, + "GET", + "http://example.com/superhero?id=eq.1337", + request_headers={ + **DEFAULT_HEADERS, + **{"Accept": "application/vnd.pgrst.object+json"}, + }, status_code=406, - reason='Not Acceptable', + reason="Not Acceptable", text="""{"details":"Results contain 0 rows, application/vnd.pgrst.object+json requires 1 row","message":""" - """"JSON object requested, multiple (or no) rows returned"}""" + """"JSON object requested, multiple (or no) rows returned"}""", ) - with default_session() as s, self.assertRaises(postgrestutils.ObjectDoesNotExist): - params = {'id': 'eq.1337'} - s.get('superhero', params=params) + with default_session() as s, self.assertRaises( + postgrestutils.ObjectDoesNotExist + ): + params = {"id": "eq.1337"} + s.get("superhero", params=params) self.assertTrue(mock.called_once) def test_multiple_objects_returned(self, mock): mock.register_uri( - 'GET', - 'http://example.com/superhero', - request_headers={**DEFAULT_HEADERS, **{'Accept': 'application/vnd.pgrst.object+json'}}, + "GET", + "http://example.com/superhero", + request_headers={ + **DEFAULT_HEADERS, + **{"Accept": "application/vnd.pgrst.object+json"}, + }, status_code=406, - reason='Not Acceptable', + reason="Not Acceptable", text="""{"details":"Results contain 5 rows, application/vnd.pgrst.object+json requires 1 row","message":""" - """"JSON object requested, multiple (or no) rows returned"}""" + """"JSON object requested, multiple (or no) rows returned"}""", ) - with default_session() as s, self.assertRaises(postgrestutils.MultipleObjectsReturned): - s.get('superhero') + with default_session() as s, self.assertRaises( + postgrestutils.MultipleObjectsReturned + ): + s.get("superhero") self.assertTrue(mock.called_once) def test_datetime_parser(self, mock): expected = { - 'id': 1337, - 'random': datetime.datetime(2020, 5, 20, 8, 35, 6, 659425, tzinfo=datetime.timezone.utc) + "id": 1337, + "random": datetime.datetime( + 2020, 5, 20, 8, 35, 6, 659425, tzinfo=datetime.timezone.utc + ), } mock.register_uri( - 'GET', - 'http://example.com/random_datetime', - request_headers={**DEFAULT_HEADERS, **{'Accept': 'application/vnd.pgrst.object+json'}}, + "GET", + "http://example.com/random_datetime", + request_headers={ + **DEFAULT_HEADERS, + **{"Accept": "application/vnd.pgrst.object+json"}, + }, status_code=200, - reason='OK', - json={'id': 1337, 'random': "2020-05-20T08:35:06.659425+00:00"} + reason="OK", + json={"id": 1337, "random": "2020-05-20T08:35:06.659425+00:00"}, ) with default_session() as s: - params = {'id': 'eq.1337'} - res = s.get('random_datetime', params=params) + params = {"id": "eq.1337"} + res = s.get("random_datetime", params=params) self.assertEqual(res, expected) self.assertTrue(mock.called_once) def test_without_datetime_parser(self, mock): - test_json = {'id': 1337, 'random': "2020-05-20T08:35:06.659425+00:00"} + test_json = {"id": 1337, "random": "2020-05-20T08:35:06.659425+00:00"} mock.register_uri( - 'GET', - 'http://example.com/random_datetime', - request_headers={**DEFAULT_HEADERS, **{'Accept': 'application/vnd.pgrst.object+json'}}, + "GET", + "http://example.com/random_datetime", + request_headers={ + **DEFAULT_HEADERS, + **{"Accept": "application/vnd.pgrst.object+json"}, + }, status_code=200, - reason='OK', - json=test_json + reason="OK", + json=test_json, ) with default_session() as s: - params = {'select': 'id,random', 'id': 'eq.1337'} - res = s.get('random_datetime', params=params, parse_dt=False) + params = {"select": "id,random", "id": "eq.1337"} + res = s.get("random_datetime", params=params, parse_dt=False) self.assertEqual(res, test_json) self.assertTrue(mock.called_once) @@ -178,23 +205,27 @@ class TestPgrestClientFilterStrategyNone(TestCase): def test_fetch_all_first(self, mock): mock.register_uri( - 'GET', - 'http://example.com/superhero', + "GET", + "http://example.com/superhero", request_headers=DEFAULT_HEADERS, status_code=200, - reason='OK', - json=self.data + reason="OK", + json=self.data, ) with default_session() as s: - res = s.filter('superhero') + res = s.filter("superhero") - self.assertIsInstance(res, postgrestutils.JsonResultSet) # should return lazy object + self.assertIsInstance( + res, postgrestutils.JsonResultSet + ) # should return lazy object self.assertFalse(mock.called) # no request should have been made yet self.assertEqual(list(res), self.data) # fetch data self.assertTrue(mock.called_once) # should have been called once - self.assertEqual(res._result_cache, self.data) # fetched data should be cached - self.assertEqual(res._len_cache, len(self.data)) # len of fetched data should be cached + # fetched data should be cached + self.assertEqual(res._result_cache, self.data) + # len of fetched data should be cached + self.assertEqual(res._len_cache, len(self.data)) self.assertEqual(list(res), self.data) # should utilize cache self.assertEqual(res[:1], self.data[:1]) # should utilize cache self.assertEqual(res[:0], self.data[:0]) # should return empty list @@ -205,23 +236,26 @@ class TestPgrestClientFilterStrategyNone(TestCase): def test_fetch_len_first(self, mock): mock.register_uri( - 'GET', - 'http://example.com/superhero', + "GET", + "http://example.com/superhero", request_headers=DEFAULT_HEADERS, status_code=200, - reason='OK', - json=self.data + reason="OK", + json=self.data, ) with default_session() as s: - res = s.filter('superhero') + res = s.filter("superhero") - self.assertIsInstance(res, postgrestutils.JsonResultSet) # should return lazy object + # should return lazy object + self.assertIsInstance(res, postgrestutils.JsonResultSet) self.assertFalse(mock.called) # no request should have been made yet self.assertEqual(len(res), len(self.data)) # should fetch len self.assertTrue(mock.called_once) # should have been called once - self.assertEqual(res._len_cache, len(self.data)) # len of fetched data should be cached - self.assertEqual(res._result_cache, self.data) # results should be cached (counting strategy none) + # len of fetched data should be cached + self.assertEqual(res._len_cache, len(self.data)) + # results should be cached (counting strategy none) + self.assertEqual(res._result_cache, self.data) self.assertEqual(res[:1], self.data[:1]) # should utilize cache self.assertEqual(res[:0], self.data[:0]) # should return empty list self.assertEqual(res[4:2], self.data[4:2]) # should return empty list @@ -232,23 +266,26 @@ class TestPgrestClientFilterStrategyNone(TestCase): def test_cache_fetching_unbounded_slice(self, mock): mock.register_uri( - 'GET', - 'http://example.com/superhero', + "GET", + "http://example.com/superhero", request_headers=DEFAULT_HEADERS, status_code=200, - reason='OK', - json=self.data + reason="OK", + json=self.data, ) with default_session() as s: - res = s.filter('superhero') + res = s.filter("superhero") - self.assertIsInstance(res, postgrestutils.JsonResultSet) # should return lazy object + # should return lazy object + self.assertIsInstance(res, postgrestutils.JsonResultSet) self.assertFalse(mock.called) # no request should have been made yet self.assertEqual(res[:], self.data) # fetch data self.assertTrue(mock.called_once) # should have been called once - self.assertEqual(res._result_cache, self.data) # fetched data should be cached - self.assertEqual(res._len_cache, len(self.data)) # len of fetched data should be cached + # fetched data should be cached + self.assertEqual(res._result_cache, self.data) + # len of fetched data should be cached + self.assertEqual(res._len_cache, len(self.data)) self.assertEqual(res[:], self.data) # should utilize cache self.assertEqual(res[:0], self.data[:0]) # should return empty list self.assertEqual(res[4:2], self.data[4:2]) # should return empty list @@ -265,31 +302,34 @@ class TestPgrestClientFilterCountingStrategies(TestCase): self.counting_strategies = ( postgrestutils.Count.EXACT, postgrestutils.Count.PLANNED, - postgrestutils.Count.ESTIMATED + postgrestutils.Count.ESTIMATED, ) def test_fetch_all_first(self, mock): # in order to fetch all mock.register_uri( - 'GET', - 'http://example.com/superhero', + "GET", + "http://example.com/superhero", request_headers=DEFAULT_HEADERS, status_code=200, - reason='OK', - json=self.data + reason="OK", + json=self.data, ) for strategy in self.counting_strategies: mock.reset() with default_session(count=strategy) as s: - res = s.filter('superhero') + res = s.filter("superhero") - self.assertIsInstance(res, postgrestutils.JsonResultSet) # should return lazy object + # should return lazy object + self.assertIsInstance(res, postgrestutils.JsonResultSet) self.assertFalse(mock.called) # no request should have been made yet self.assertEqual(list(res), self.data) # fetch data self.assertTrue(mock.called_once) # should have been called once - self.assertEqual(res._result_cache, self.data) # fetched data should be cached - self.assertEqual(res._len_cache, len(self.data)) # len of fetched data should be cached + # fetched data should be cached + self.assertEqual(res._result_cache, self.data) + # len of fetched data should be cached + self.assertEqual(res._len_cache, len(self.data)) self.assertEqual(list(res), self.data) # should utilize cache self.assertEqual(res[:1], self.data[:1]) # should utilize cache @@ -302,67 +342,84 @@ class TestPgrestClientFilterCountingStrategies(TestCase): def test_fetch_len_first(self, mock): # in order to fetch all mock.register_uri( - 'GET', - 'http://example.com/superhero', + "GET", + "http://example.com/superhero", request_headers=DEFAULT_HEADERS, status_code=200, - reason='OK', - json=self.data + reason="OK", + json=self.data, ) # in order to fetch first mock.register_uri( - 'GET', - 'http://example.com/superhero', - request_headers={**DEFAULT_HEADERS, **{'Range-Unit': 'items', 'Range': '0-0'}}, + "GET", + "http://example.com/superhero", + request_headers={ + **DEFAULT_HEADERS, + **{"Range-Unit": "items", "Range": "0-0"}, + }, status_code=200, - reason='OK', - headers={'Content-Range': '0-0/*'}, - json=[self.data[0]] + reason="OK", + headers={"Content-Range": "0-0/*"}, + json=[self.data[0]], ) # in order to fetch range since index 2 mock.register_uri( - 'GET', - 'http://example.com/superhero', - request_headers={**DEFAULT_HEADERS, **{'Range-Unit': 'items', 'Range': '2-'}}, + "GET", + "http://example.com/superhero", + request_headers={ + **DEFAULT_HEADERS, + **{"Range-Unit": "items", "Range": "2-"}, + }, status_code=200, - reason='OK', - headers={'Content-Range': '2-4/*'}, - json=self.data[2:] + reason="OK", + headers={"Content-Range": "2-4/*"}, + json=self.data[2:], ) # in order to fetch length using different strategies for strategy in self.counting_strategies: mock.register_uri( - 'GET', - 'http://example.com/superhero', + "GET", + "http://example.com/superhero", request_headers={ **DEFAULT_HEADERS, - **{'Range-Unit': 'items', 'Range': '0-0', 'Prefer': 'count={}'.format(strategy.value)} + **{ + "Range-Unit": "items", + "Range": "0-0", + "Prefer": "count={}".format(strategy.value), + }, }, status_code=206, - reason='Partial Content', - headers={'Content-Range': '0-0/5'}, - json=self.data[0] + reason="Partial Content", + headers={"Content-Range": "0-0/5"}, + json=self.data[0], ) mock.reset() with default_session(count=strategy) as s: - res = s.filter('superhero') + res = s.filter("superhero") - self.assertIsInstance(res, postgrestutils.JsonResultSet) # should return lazy object + # should return lazy object + self.assertIsInstance(res, postgrestutils.JsonResultSet) self.assertFalse(mock.called) # no request should have been made yet self.assertEqual(len(res), len(self.data)) # should fetch len self.assertTrue(mock.called_once) # should have been called once - self.assertEqual(res._len_cache, len(self.data)) # len of fetched data should be cached + # len of fetched data should be cached + self.assertEqual(res._len_cache, len(self.data)) - self.assertEqual(res[:1], self.data[:1]) # should fetch first element as range + # should fetch first element as range + self.assertEqual(res[:1], self.data[:1]) self.assertEqual(res[:0], self.data[:0]) # should return empty list self.assertEqual(res[4:2], self.data[4:2]) # should return empty list - self.assertEqual(res[2:], self.data[2:]) # should fetch range starting at index 2 - self.assertEqual(res[0], self.data[0]) # should fetch first element as range but return dict + # should fetch range starting at index 2 + self.assertEqual(res[2:], self.data[2:]) + # should fetch first element as range but return dict + self.assertEqual(res[0], self.data[0]) self.assertEqual(list(res), self.data) # should fetch all elements - self.assertEqual(res._result_cache, self.data) # should fetch all elements - self.assertEqual(res._result_cache, self.data) # should cache all elements + # should fetch all elements + self.assertEqual(res._result_cache, self.data) + # should cache all elements + self.assertEqual(res._result_cache, self.data) self.assertTrue(mock.called) # should have been called at least once # should have been called 5 times (fetch len, fetch 2 ranges, # fetch first and fetch all) @@ -376,27 +433,32 @@ class TestPgrestClientSessionDefaults(TestCase): self.data = SUPERHERO_TEST_DATA def test_override_parse_dt_session_option(self, mock): - test_json = {'id': 1337, 'random': "2020-05-20T08:35:06.659425+00:00"} + test_json = {"id": 1337, "random": "2020-05-20T08:35:06.659425+00:00"} mock.register_uri( - 'GET', - 'http://example.com/random_datetime', - request_headers={**DEFAULT_HEADERS, **{'Accept': 'application/vnd.pgrst.object+json'}}, + "GET", + "http://example.com/random_datetime", + request_headers={ + **DEFAULT_HEADERS, + **{"Accept": "application/vnd.pgrst.object+json"}, + }, status_code=200, - reason='OK', - json=test_json + reason="OK", + json=test_json, ) with default_session(parse_dt=False) as s: - params = {'select': 'id,random', 'id': 'eq.1337'} - res = s.get('random_datetime', params=params) + params = {"select": "id,random", "id": "eq.1337"} + res = s.get("random_datetime", params=params) self.assertEqual(res, test_json) self.assertTrue(mock.called_once) mock.reset() - res2 = s.get('random_datetime', params=params, parse_dt=True) + res2 = s.get("random_datetime", params=params, parse_dt=True) expected = { - 'id': 1337, - 'random': datetime.datetime(2020, 5, 20, 8, 35, 6, 659425, tzinfo=datetime.timezone.utc) + "id": 1337, + "random": datetime.datetime( + 2020, 5, 20, 8, 35, 6, 659425, tzinfo=datetime.timezone.utc + ), } self.assertEqual(res2, expected) self.assertTrue(mock.called_once) @@ -404,83 +466,99 @@ class TestPgrestClientSessionDefaults(TestCase): def test_override_count_session_option(self, mock): # in order to fetch all mock.register_uri( - 'GET', - 'http://example.com/superhero', + "GET", + "http://example.com/superhero", request_headers=DEFAULT_HEADERS, status_code=200, - reason='OK', - json=self.data + reason="OK", + json=self.data, ) # in order to fetch length mock.register_uri( - 'GET', - 'http://example.com/superhero', - request_headers={**DEFAULT_HEADERS, **{'Range-Unit': 'items', 'Range': '0-0', 'Prefer': 'count=exact'}}, + "GET", + "http://example.com/superhero", + request_headers={ + **DEFAULT_HEADERS, + **{"Range-Unit": "items", "Range": "0-0", "Prefer": "count=exact"}, + }, status_code=206, - reason='Partial Content', - headers={'Content-Range': '0-0/5'}, - json=self.data[0] + reason="Partial Content", + headers={"Content-Range": "0-0/5"}, + json=self.data[0], ) with default_session(count=postgrestutils.Count.EXACT) as s: - res = s.filter('superhero') + res = s.filter("superhero") - self.assertIsInstance(res, postgrestutils.JsonResultSet) # should return lazy object + # should return lazy object + self.assertIsInstance(res, postgrestutils.JsonResultSet) self.assertFalse(mock.called) # no request should have been made yet self.assertEqual(len(res), len(self.data)) # should fetch len self.assertTrue(mock.called_once) # should have been called once - self.assertEqual(res._len_cache, len(self.data)) # len of fetched data should be cached - self.assertNotEqual(res._result_cache, self.data) # should not have cached all elements + # len of fetched data should be cached + self.assertEqual(res._len_cache, len(self.data)) + # should not have cached all elements + self.assertNotEqual(res._result_cache, self.data) mock.reset() # reset mock # override the count session option in this specific request - res2 = s.filter('superhero', count=postgrestutils.Count.NONE) + res2 = s.filter("superhero", count=postgrestutils.Count.NONE) - self.assertIsInstance(res2, postgrestutils.JsonResultSet) # should return lazy object + # should return lazy object + self.assertIsInstance(res2, postgrestutils.JsonResultSet) self.assertFalse(mock.called) # no request should have been made yet - self.assertEqual(len(res2), len(self.data)) # should fetch all elements to get len + # should fetch all elements to get len + self.assertEqual(len(res2), len(self.data)) self.assertTrue(mock.called_once) # should have been called once - self.assertEqual(res2._len_cache, len(self.data)) # len of fetched data should be cached - self.assertEqual(res2._result_cache, self.data) # should have cached all elements + # len of fetched data should be cached + self.assertEqual(res2._len_cache, len(self.data)) + # should have cached all elements + self.assertEqual(res2._result_cache, self.data) def test_override_schema_session_option(self, mock): # in order to fetch all mock.register_uri( - 'GET', - 'http://example.com/superhero', + "GET", + "http://example.com/superhero", request_headers=DEFAULT_HEADERS, status_code=200, - reason='OK', - json=self.data + reason="OK", + json=self.data, ) # in order to fetch all (other schema) mock.register_uri( - 'GET', - 'http://example.com/superhero', - request_headers={**DEFAULT_HEADERS, **{'Accept-Profile': 'other_schema'}}, + "GET", + "http://example.com/superhero", + request_headers={**DEFAULT_HEADERS, **{"Accept-Profile": "other_schema"}}, status_code=200, - reason='OK', - json=self.data + reason="OK", + json=self.data, ) - with default_session(schema='other_schema') as s: - res = s.filter('superhero') + with default_session(schema="other_schema") as s: + res = s.filter("superhero") - self.assertIsInstance(res, postgrestutils.JsonResultSet) # should return lazy object + # should return lazy object + self.assertIsInstance(res, postgrestutils.JsonResultSet) self.assertFalse(mock.called) # no request should have been made yet self.assertEqual(list(res), self.data) # should fetch all elements self.assertTrue(mock.called_once) # should have been called once - self.assertEqual(res._result_cache, self.data) # should have cached all elements - self.assertEqual(res._len_cache, len(self.data)) # should have cached the length + # should have cached all elements + self.assertEqual(res._result_cache, self.data) + # should have cached the length + self.assertEqual(res._len_cache, len(self.data)) mock.reset() - res2 = s.filter('superhero', schema=postgrestutils.DEFAULT_SCHEMA) + res2 = s.filter("superhero", schema=postgrestutils.DEFAULT_SCHEMA) - self.assertIsInstance(res2, postgrestutils.JsonResultSet) # should return lazy object + # should return lazy object + self.assertIsInstance(res2, postgrestutils.JsonResultSet) self.assertFalse(mock.called) # no request.should have been made yet self.assertEqual(list(res2), self.data) # should fetch all elements self.assertTrue(mock.called_once) # should have been called once - self.assertEqual(res2._result_cache, self.data) # should have cached all elements - self.assertEqual(res2._len_cache, len(self.data)) # should have cached the length + # should have cached all elements + self.assertEqual(res2._result_cache, self.data) + # should have cached the length + self.assertEqual(res2._len_cache, len(self.data))