diff --git a/pfunk/__init__.py b/pfunk/__init__.py index 8568372..56ad1d9 100644 --- a/pfunk/__init__.py +++ b/pfunk/__init__.py @@ -5,8 +5,9 @@ .. include:: ../CONTRIBUTE.md """ __docformat__ = "google" + +from .client import FaunaClient from .collection import Collection, Enum from .fields import (StringField, IntegerField, DateField, DateTimeField, BooleanField, FloatField, EmailField, EnumField, ReferenceField, ManyToManyField, SlugField) from .project import Project -from .client import FaunaClient diff --git a/pfunk/cli.py b/pfunk/cli.py index 0b234c8..7337cae 100644 --- a/pfunk/cli.py +++ b/pfunk/cli.py @@ -1,20 +1,22 @@ -import click import json import os import sys -import datetime -from jinja2 import TemplateNotFound +import click +from envs import env from valley.utils import import_util from werkzeug.serving import run_simple -from pfunk.client import FaunaClient, q -from pfunk.contrib.auth.collections import Group, PermissionGroup +from pfunk.client import FaunaClient, q +from pfunk.contrib.auth.collections import PermissionGroup from pfunk.exceptions import DocNotFound from pfunk.template import wsgi_template, project_template, collections_templates, key_template from pfunk.utils.deploy import Deploy +Group = import_util(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.group.Group')) + + @click.group() def pfunk(): pass @@ -25,6 +27,7 @@ def load_config_file(filename): config = json.load(f) return config + @pfunk.command() @click.option('--generate_local_key', prompt=True, help='Specifies whether to generate a local database and key', default=False) @@ -36,17 +39,19 @@ def load_config_file(filename): @click.option('--description', prompt=True, help='Project Description') @click.option('--api_type', type=click.Choice(['web', 'rest', 'none']), prompt=True, help='API Type (web, rest, none)') @click.argument('name') -def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stage_name: str, description: str, host: str): - +def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stage_name: str, description: str, host: str, generate_local_key: bool): """ Creates a PFunk project Args: name: Project name api_type: API Gateway type (web, rest, none) + description: Project Description + host: Host fauna_key: Fauna secret key bucket: S3 Bucket email: Default from Email stage_name: Application stage + generate_local_key: Specifies whether to generate a local database and key Returns: @@ -69,21 +74,25 @@ def init(name: str, api_type: str, fauna_key: str, bucket: str, email: str, stag }, f, indent=4, sort_keys=True) open(f'{name}/__init__.py', 'x').close() with open(f'{name}/wsgi.py', 'x') as f: - f.write(wsgi_template.render(PFUNK_PROJECT=f'{name}.project.project')) + f.write(wsgi_template.render( + PFUNK_PROJECT=f'{name}.project.project')) with open(f'{name}/project.py', 'x') as f: f.write(project_template.render()) with open(f'{name}/collections.py', 'x') as f: f.write(collections_templates.render()) if generate_local_key: - client = FaunaClient(secret='secret') + domain = click.prompt('Please enter your local Fauna Docker hostname.', default='fauna') + client = FaunaClient(secret='secret', scheme='http') db_name = f'{name}-local' client.query( q.create_database({'name': db_name}) ) key = client.query( - q.create_key({'database': q.database(db_name), 'role': 'admin'}) + q.create_key( + {'database': q.database(db_name), 'role': 'admin'}) ) - click.secho(f'Fauna Local Secret (copy into your .env or pipenv file): {key}', fg='green') + click.secho( + f'Fauna Local Secret (copy into your .env or pipenv file): {key}', fg='green') else: click.echo('There is already a project file in this directory.') @@ -113,6 +122,7 @@ def add_stage(stage_name: str, fauna_key: str, filename: str): else: click.echo('You have not run the init command yet.') + @pfunk.command() @click.option('--use_reloader', default=True) @click.option('--use_debugger', default=True) @@ -138,7 +148,8 @@ def local(hostname: str, port: int, wsgi: str, config_file: str, use_debugger: b sys.path.insert(0, os.getcwd()) wsgi_path = wsgi or f'{config.get("name")}.wsgi.app' app = import_util(wsgi_path) - run_simple(hostname, port, app, use_debugger=use_debugger, use_reloader=use_reloader) + run_simple(hostname, port, app, use_debugger=use_debugger, + use_reloader=use_reloader) @pfunk.command() @@ -163,7 +174,6 @@ def publish(stage_name: str, project_path: str, config_path: str, publish_locall project_path = f'{config.get("name")}.project.project' project = import_util(project_path) if not publish_locally: - secret = config['stages'][stage_name]['fauna_secret'] os.environ['FAUNA_SECRET'] = secret project.publish() @@ -191,6 +201,7 @@ def seed_keys(stage_name: str, config_path: str): f.write(key_template.render(keys=keys)) return keys_path + @pfunk.command() @click.option('--local_user', help='Specifies whether the user is local.', prompt=True, default=False) @click.option('--config_path', help='Configuration file path', default='pfunk.json') @@ -202,7 +213,8 @@ def seed_keys(stage_name: str, config_path: str): @click.option('--last_name', prompt=True, help='Last Name') @click.option('--group_slug', prompt=True, help='User Group Slug', default=None) @click.argument('stage_name') -def create_admin_user(stage_name: str, group_slug: str, last_name: str, first_name: str, email: str, password: str, username: str, +def create_admin_user(stage_name: str, group_slug: str, last_name: str, first_name: str, email: str, password: str, + username: str, project_path: str, config_path: str, local_user: bool): """ Create an admin user in the project's Fauna user collection. @@ -223,7 +235,7 @@ def create_admin_user(stage_name: str, group_slug: str, last_name: str, first_na """ config = load_config_file(config_path) secret = config['stages'][stage_name]['fauna_secret'] - User = import_util('pfunk.contrib.auth.collections.User') + User = import_util('pfunk.contrib.auth.collections.user.User') if not local_user: os.environ['FAUNA_SECRET'] = secret @@ -247,9 +259,11 @@ def create_admin_user(stage_name: str, group_slug: str, last_name: str, first_na project = import_util(project_path) perm_list = [] for i in project.collections: - perm_list.append(PermissionGroup(collection=i, permissions=['create', 'write', 'read', 'delete'])) + perm_list.append(PermissionGroup(collection=i, permissions=[ + 'create', 'write', 'read', 'delete'])) user.add_permissions(group, perm_list) + @pfunk.command() @click.option('--config_path', help='Configuration file path') @click.argument('stage_name') @@ -271,6 +285,28 @@ def deploy(stage_name: str, config_path: str): return d.deploy(stage_name) + +@pfunk.command() +@click.option('--config_path', help='Configuration file path', default='pfunk.json') +@click.option('--yaml_path', help='Dir to create yaml swagger file to', default='') +def generate_swagger(config_path: str, yaml_path: str): + """ Generates the swagger file of the project from a config json file + + Args: + config_path (str, optional): + dir of the json config file to use + yaml_path (str, optional): + dir to put the generated swagger file + + Returns: + + """ + config = load_config_file(config_path) + sys.path.insert(0, os.getcwd()) + project_path = f'{config.get("name")}.project.project' + project = import_util(project_path) + project.generate_swagger(yaml_dir=yaml_path, config_file=config_path) + + if __name__ == '__main__': pfunk() - diff --git a/pfunk/contrib/auth/collections/__init__.py b/pfunk/contrib/auth/collections/__init__.py new file mode 100644 index 0000000..394c5f4 --- /dev/null +++ b/pfunk/contrib/auth/collections/__init__.py @@ -0,0 +1,103 @@ +import datetime +import json +import random +import uuid + +import jwt +from cryptography.fernet import Fernet +from dateutil import tz +from envs import env +from jwt import ExpiredSignatureError +from valley.utils import import_util +from werkzeug.utils import cached_property + +from pfunk import Collection +from pfunk.exceptions import Unauthorized + + +class Key(object): + + @classmethod + def create_keys(cls): + c = cls() + keys = {} + for i in range(10): + kid = str(uuid.uuid4()) + k = {'signature_key': Fernet.generate_key().decode(), 'payload_key': Fernet.generate_key().decode(), + 'kid': kid} + keys[kid] = k + return keys + + @classmethod + def import_keys(cls): + try: + keys = import_util(env('KEY_MODULE', 'bad.import')) + except ImportError: + keys = {} + return keys + + @classmethod + def get_keys(cls): + keys = cls.import_keys() + return list(keys.values()) + + @classmethod + def get_key(cls): + + return random.choice(cls.get_keys()) + + @classmethod + def create_jwt(cls, secret_claims): + + key = cls.get_key() + pay_f = Fernet(key.get('payload_key')) + gmt = tz.gettz('GMT') + now = datetime.datetime.now(tz=gmt) + exp = now + datetime.timedelta(days=1) + payload = { + 'iat': now.timestamp(), + 'exp': exp.timestamp(), + 'nbf': now.timestamp(), + 'iss': env('PROJECT_NAME', 'pfunk'), + 'til': pay_f.encrypt(json.dumps(secret_claims).encode()).decode() + } + return jwt.encode(payload, key.get('signature_key'), algorithm="HS256", headers={'kid': key.get('kid')}), exp + + @classmethod + def decrypt_jwt(cls, encoded): + headers = jwt.get_unverified_header(encoded) + keys = cls.import_keys() + key = keys.get(headers.get('kid')) + try: + decoded = jwt.decode(encoded, key.get('signature_key'), algorithms="HS256", verify=True, + options={"require": ["iat", "exp", "nbf", 'iss', 'til']}) + except ExpiredSignatureError: + raise Unauthorized('Unauthorized') + pay_f = Fernet(key.get('payload_key').encode()) + k = pay_f.decrypt(decoded.get('til').encode()) + return json.loads(k.decode()) + + +class PermissionGroup(object): + """ List of permission that a user/object has + + Attributes: + collection (`pfunk.collection.Collection`, required): + Collection to allow permissions + permission (list, required): + What operations should be allowed `['create', 'read', 'delete', 'write']` + """ + valid_actions: list = ['create', 'read', 'delete', 'write'] + + def __init__(self, collection: Collection, permissions: list): + if not issubclass(collection, Collection): + raise ValueError( + 'Permission class requires a Collection class as the first argument.') + self.collection = collection + self._permissions = permissions + self.collection_name = self.collection.get_class_name() + + @cached_property + def permissions(self): + """ Lists all collections and its given permissions """ + return [f'{self.collection_name}-{i}'.lower() for i in self._permissions if i in self.valid_actions] diff --git a/pfunk/contrib/auth/collections/common.py b/pfunk/contrib/auth/collections/common.py new file mode 100644 index 0000000..2aa07df --- /dev/null +++ b/pfunk/contrib/auth/collections/common.py @@ -0,0 +1,34 @@ +from envs import env + +from pfunk import ReferenceField, Collection +from pfunk.fields import ListField + + +class UserGroups(Collection): + """ Many-to-many collection of the user-group relationship + + The native fauna-way of holding many-to-many relationship + is to only have the ID of the 2 object. Here in pfunk, we + leverage the flexibility of the collection to have another + field, which is `permissions`, this field holds the capablities + of a user, allowing us to add easier permission handling. + Instead of manually going to roles and adding individual + collections which can be painful in long term. + + Attributes: + collection_name (str): + Name of the collection in Fauna + userID (str): + Fauna ref of user that is tied to the group + groupID (str): + Fauna ref of a collection that is tied with the user + permissions (str[]): + List of permissions, `['create', 'read', 'delete', 'write']` + """ + collection_name = 'users_groups' + userID = ReferenceField(env('USER_COLLECTION', 'pfunk.contrib.auth.collections.user.User')) + groupID = ReferenceField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.group.Group')) + permissions = ListField() + + def __unicode__(self): + return f"{self.userID}, {self.groupID}, {self.permissions}" diff --git a/pfunk/contrib/auth/collections/group.py b/pfunk/contrib/auth/collections/group.py new file mode 100644 index 0000000..4ae5a96 --- /dev/null +++ b/pfunk/contrib/auth/collections/group.py @@ -0,0 +1,16 @@ +from envs import env + +from pfunk.collection import Collection +from pfunk.fields import SlugField, ManyToManyField, StringField + + +class Group(Collection): + """ Group collection that the user belongs to """ + name = StringField(required=True) + slug = SlugField(unique=True, required=False) + users = ManyToManyField( + env('USER_COLLECTION', 'pfunk.contrib.auth.collections.user.User'), + relation_name='users_groups') + + def __unicode__(self): + return self.name # pragma: no cover diff --git a/pfunk/contrib/auth/collections/group_user.py b/pfunk/contrib/auth/collections/group_user.py new file mode 100644 index 0000000..e69de29 diff --git a/pfunk/contrib/auth/collections.py b/pfunk/contrib/auth/collections/user.py similarity index 62% rename from pfunk/contrib/auth/collections.py rename to pfunk/contrib/auth/collections/user.py index 5a1bcb5..6fca5d3 100644 --- a/pfunk/contrib/auth/collections.py +++ b/pfunk/contrib/auth/collections/user.py @@ -1,106 +1,23 @@ -import datetime -import json -import random import uuid -import jwt -from cryptography.fernet import Fernet -from dateutil import tz from envs import env -from faunadb.errors import BadRequest, NotFound -from jwt import ExpiredSignatureError +from faunadb.errors import BadRequest from valley.exceptions import ValidationException from valley.utils import import_util -from werkzeug.utils import cached_property from pfunk.client import q from pfunk.collection import Collection, Enum +from pfunk.contrib.auth.collections import Key from pfunk.contrib.auth.resources import LoginUser, UpdatePassword, Public, UserRole, LogoutUser -from pfunk.contrib.auth.views import ForgotPasswordChangeView, LoginView, SignUpView, VerifyEmailView, LogoutView, UpdatePasswordView, ForgotPasswordView +from pfunk.contrib.auth.views import ForgotPasswordChangeView, LoginView, SignUpView, VerifyEmailView, LogoutView, \ + UpdatePasswordView, ForgotPasswordView from pfunk.contrib.email.base import send_email -from pfunk.exceptions import LoginFailed, DocNotFound, Unauthorized -from pfunk.fields import EmailField, SlugField, ManyToManyField, ListField, ReferenceField, StringField, EnumField +from pfunk.exceptions import LoginFailed, DocNotFound +from pfunk.fields import EmailField, ManyToManyField, StringField, EnumField AccountStatus = Enum(name='AccountStatus', choices=['ACTIVE', 'INACTIVE']) - - - -class Key(object): - - @classmethod - def create_keys(cls): - c = cls() - keys = {} - for i in range(10): - kid = str(uuid.uuid4()) - k = {'signature_key': Fernet.generate_key().decode(), 'payload_key': Fernet.generate_key().decode(), - 'kid': kid} - keys[kid] = k - return keys - - @classmethod - def import_keys(cls): - try: - keys = import_util(env('KEY_MODULE', 'bad.import')) - except ImportError: - keys = {} - return keys - - @classmethod - def get_keys(cls): - keys = cls.import_keys() - return list(keys.values()) - - @classmethod - def get_key(cls): - - return random.choice(cls.get_keys()) - - @classmethod - def create_jwt(cls, secret_claims): - - key = cls.get_key() - pay_f = Fernet(key.get('payload_key')) - gmt = tz.gettz('GMT') - now = datetime.datetime.now(tz=gmt) - exp = now + datetime.timedelta(days=1) - payload = { - 'iat': now.timestamp(), - 'exp': exp.timestamp(), - 'nbf': now.timestamp(), - 'iss': env('PROJECT_NAME', 'pfunk'), - 'til': pay_f.encrypt(json.dumps(secret_claims).encode()).decode() - } - return jwt.encode(payload, key.get('signature_key'), algorithm="HS256", headers={'kid': key.get('kid')}), exp - - @classmethod - def decrypt_jwt(cls, encoded): - headers = jwt.get_unverified_header(encoded) - keys = cls.import_keys() - key = keys.get(headers.get('kid')) - try: - decoded = jwt.decode(encoded, key.get('signature_key'), algorithms="HS256", verify=True, - options={"require": ["iat", "exp", "nbf", 'iss', 'til']}) - except ExpiredSignatureError: - raise Unauthorized('Unauthorized') - pay_f = Fernet(key.get('payload_key').encode()) - k = pay_f.decrypt(decoded.get('til').encode()) - return json.loads(k.decode()) - - -class Group(Collection): - """ Group collection that the user belongs to """ - name = StringField(required=True) - slug = SlugField(unique=True, required=False) - users = ManyToManyField( - 'pfunk.contrib.auth.collections.User', relation_name='users_groups') - - def __unicode__(self): - return self.name # pragma: no cover - - def attach_verification_key(doc): if not doc.ref and doc.use_email_verification: doc.attach_verification_key() @@ -122,8 +39,10 @@ class BaseUser(Collection): collection_roles = [Public, UserRole] non_public_fields = ['groups'] use_email_verification = True + group_class = import_util(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.group.Group')) # Views - collection_views = [LoginView, SignUpView, VerifyEmailView, LogoutView, UpdatePasswordView, ForgotPasswordView, ForgotPasswordChangeView] + collection_views = [LoginView, SignUpView, VerifyEmailView, LogoutView, UpdatePasswordView, ForgotPasswordView, + ForgotPasswordChangeView] # Signals pre_create_signals = [attach_verification_key] post_create_signals = [send_verification_email] @@ -150,7 +69,7 @@ def login(cls, username, password, _token=None): try: return c.client(_token=_token).query( q.call("login_user", { - "username": username, "password": password}) + "username": username, "password": password}) ) except BadRequest: raise LoginFailed( @@ -164,6 +83,7 @@ def logout(cls, _token=None): q.call("logout_user") ) + def permissions(self, _token=None): return [] @@ -247,7 +167,7 @@ def send_verification_email(self, from_email=None, verification_type='signup'): @classmethod def forgot_password(cls, email): - """ Sends forgot password email to let user + """ Sends forgot password email to let user use that link to reset their password """ user = cls.get_by('unique_User_email', email) @@ -323,64 +243,11 @@ def __unicode__(self): return self.username # pragma: no cover -class UserGroups(Collection): - """ Many-to-many collection of the user-group relationship - - The native fauna-way of holding many-to-many relationship - is to only have the ID of the 2 object. Here in pfunk, we - leverage the flexibility of the collection to have another - field, which is `permissions`, this field holds the capablities - of a user, allowing us to add easier permission handling. - Instead of manually going to roles and adding individual - collections which can be painful in long term. - - Attributes: - collection_name (str): - Name of the collection in Fauna - userID (str): - Fauna ref of user that is tied to the group - groupID (str): - Fauna ref of a collection that is tied with the user - permissions (str[]): - List of permissions, `['create', 'read', 'delete', 'write']` - """ - collection_name = 'users_groups' - userID = ReferenceField(env('USER_COLLECTION', 'pfunk.contrib.auth.collections.User')) - groupID = ReferenceField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.Group')) - permissions = ListField() - - def __unicode__(self): - return f"{self.userID}, {self.groupID}, {self.permissions}" - - -class PermissionGroup(object): - """ List of permission that a user/object has - - Attributes: - collection (`pfunk.collection.Collection`, required): - Collection to allow permissions - permission (list, required): - What operations should be allowed `['create', 'read', 'delete', 'write']` - """ - valid_actions: list = ['create', 'read', 'delete', 'write'] - - def __init__(self, collection: Collection, permissions: list): - if not issubclass(collection, Collection): - raise ValueError( - 'Permission class requires a Collection class as the first argument.') - self.collection = collection - self._permissions = permissions - self.collection_name = self.collection.get_class_name() - - @cached_property - def permissions(self): - """ Lists all collections and its given permissions """ - return [f'{self.collection_name}-{i}'.lower() for i in self._permissions if i in self.valid_actions] - - class User(BaseUser): + user_group_class = import_util('pfunk.contrib.auth.collections.common.UserGroups') + group_class = import_util('pfunk.contrib.auth.collections.group.Group') """ User that has permission capabilities. Extension of `BaseUser` """ - groups = ManyToManyField(Group, 'users_groups') + groups = ManyToManyField(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.group.Group'), 'users_groups') @classmethod def get_permissions(cls, ref, _token=None): @@ -388,7 +255,7 @@ def get_permissions(cls, ref, _token=None): def get_groups(self, _token=None): """ Returns the groups (collections) that the user is bound with """ - return [Group.get(i.id(), _token=_token) for i in self.client(_token=_token).query( + return [self.group_class.get(i.id(), _token=_token) for i in self.client(_token=_token).query( q.paginate(q.match('users_groups_by_user', self.ref)) ).get('data')] @@ -408,8 +275,8 @@ def permissions(self, _token=None): """ perm_list = [] for i in self.get_groups(_token=_token): - ug = UserGroups.get_index('users_groups_by_group_and_user', [ - i.ref, self.ref], _token=_token) + ug = self.user_group_class.get_index('users_groups_by_group_and_user', [ + i.ref, self.ref], _token=_token) for user_group in ug: p = [] if isinstance(user_group.permissions, list): @@ -419,24 +286,24 @@ def permissions(self, _token=None): return perm_list def add_permissions(self, group, permissions: list, _token=None): - """ Adds permission for the user - - Adds permission by extending the list of permission - in the many-to-many collection of the user, i.e. in + """ Adds permission for the user + + Adds permission by extending the list of permission + in the many-to-many collection of the user, i.e. in the `UserGroup` collection. Args: - group (str, required): + group (str, required): Group collection of the User permissions (list, required): Permissions to give, `['create', 'read', 'delete', 'write']` Just add the operation you need _token (str, required): auth token of the user - + Returns: UserGroup (`contrib.auth.collections.UserGroup`): - `UserGroup` instance which has the added permissions + `UserGroup` instance which has the added permissions of the user """ perm_list = [] @@ -444,9 +311,9 @@ def add_permissions(self, group, permissions: list, _token=None): perm_list.extend(i.permissions) try: - user_group = UserGroups.get_by('users_groups_by_group_and_user', terms=[group.ref, self.ref]) + user_group = self.user_group_class.get_by('users_groups_by_group_and_user', terms=[group.ref, self.ref]) except DocNotFound: - user_group = UserGroups.create(userID=self.ref, groupID=group.ref, permissions=perm_list) + user_group = self.user_group_class.create(userID=self.ref, groupID=group.ref, permissions=perm_list) if user_group.permissions != perm_list: user_group.permissions = perm_list user_group.save() diff --git a/pfunk/contrib/auth/resources.py b/pfunk/contrib/auth/resources.py index 40a560f..ab0c65a 100644 --- a/pfunk/contrib/auth/resources.py +++ b/pfunk/contrib/auth/resources.py @@ -1,5 +1,5 @@ from pfunk.client import q -from pfunk.resources import Function, Role, Index +from pfunk.resources import Function, Role class AuthFunction(Function): diff --git a/pfunk/contrib/auth/views.py b/pfunk/contrib/auth/views.py index fbfea54..ee3de70 100644 --- a/pfunk/contrib/auth/views.py +++ b/pfunk/contrib/auth/views.py @@ -1,7 +1,6 @@ from abc import ABC from envs import env -from werkzeug.http import http_date from werkzeug.routing import Rule from pfunk.web.views.base import ActionMixin @@ -118,8 +117,8 @@ class ForgotPasswordChangeView(ActionMixin, JSONAuthView): def get_query(self): kwargs = self.get_query_kwargs() return self.collection.verify_email( - str(kwargs['verification_key']), - verify_type='forgot', + str(kwargs['verification_key']), + verify_type='forgot', password=kwargs['password']) diff --git a/pfunk/contrib/ecommerce/collections.py b/pfunk/contrib/ecommerce/collections.py index a507904..7e00515 100644 --- a/pfunk/contrib/ecommerce/collections.py +++ b/pfunk/contrib/ecommerce/collections.py @@ -1,19 +1,21 @@ import stripe from envs import env +from valley.utils import import_util from pfunk.collection import Collection -from pfunk.contrib.auth.collections import User, Group +from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole +from pfunk.contrib.ecommerce.views import ListStripePackage, DetailStripePackage, CheckoutSuccessView, BaseWebhookView from pfunk.exceptions import DocNotFound -from pfunk.fields import EmailField, SlugField, ManyToManyField, ListField, ReferenceField, StringField, EnumField, FloatField -from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole, Public, UserRole -from pfunk.contrib.ecommerce.resources import StripePublic -from pfunk.contrib.ecommerce.views import BaseWebhookView, ListStripePackage, DetailStripePackage, CheckoutSuccessView +from pfunk.fields import ReferenceField, StringField, FloatField from pfunk.web.views.json import CreateView, UpdateView, DeleteView - stripe.api_key = env('STRIPE_API_KEY') +User = import_util(env('USER_COLLECTION', 'pfunk.contrib.auth.collections.user.User')) +Group = import_util(env('GROUP_COLLECTION', 'pfunk.contrib.auth.collections.group.Group')) + + class StripePackage(Collection): """ Collection that has the essential info about a stripe package @@ -23,7 +25,7 @@ class StripePackage(Collection): Read and detail views are naturally public. Write operations requires authentication from admin group. While it grealty - depends on your app, it is recommended to have this only + depends on your app, it is recommended to have this only modified by the admins and use `StripeCustomer` model to attach a `stripe_id` to a model that is bound for payment. """ @@ -42,7 +44,7 @@ def __unicode__(self): @property def stripe_price(self): - return int(self.price*100) + return int(self.price * 100) class StripeCustomer(Collection): @@ -52,10 +54,10 @@ class StripeCustomer(Collection): can you structure your collections. Override the fields and functions to match your system. """ - user = ReferenceField(User) collection_roles = [GenericUserBasedRole] - stripe_id = StringField(required=True, unique=True) collection_views = [BaseWebhookView] + user = ReferenceField(User) + stripe_id = StringField(required=True, unique=True) def __unicode__(self): return self.customer_id diff --git a/pfunk/contrib/ecommerce/resources.py b/pfunk/contrib/ecommerce/resources.py index da8460a..ebd0729 100644 --- a/pfunk/contrib/ecommerce/resources.py +++ b/pfunk/contrib/ecommerce/resources.py @@ -1,5 +1,5 @@ from pfunk.client import q -from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole, Public, UserRole +from pfunk.contrib.auth.resources import Public class StripePublic(Public): diff --git a/pfunk/contrib/ecommerce/views.py b/pfunk/contrib/ecommerce/views.py index a9ab3c6..9d74e0f 100644 --- a/pfunk/contrib/ecommerce/views.py +++ b/pfunk/contrib/ecommerce/views.py @@ -1,21 +1,18 @@ import collections import json +from json import JSONDecodeError + +import bleach import requests import stripe -import bleach from envs import env -from datetime import datetime -from json import JSONDecodeError -from werkzeug.routing import Rule from jinja2 import Environment, BaseLoader +from werkzeug.routing import Rule -from pfunk.contrib.email import ses -from pfunk.exceptions import DocNotFound -from pfunk.web.views.json import JSONView, ListView, DetailView, CreateView from pfunk.contrib.email.ses import SESBackend -from pfunk.contrib.auth.collections import Group, User +from pfunk.exceptions import DocNotFound from pfunk.web.views.base import ActionMixin - +from pfunk.web.views.json import ListView, DetailView, CreateView stripe.api_key = env('STRIPE_API_KEY') STRIPE_PUBLISHABLE_KEY = env('STRIPE_PUBLISHABLE_KEY') @@ -46,7 +43,7 @@ class CheckoutView(DetailView): def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) customer = self.collection.objects.get_or_create_customer( - self.request.user) # `StripeCustomer` collection + self.request.user) # `StripeCustomer` collection session = stripe.checkout.Session.create( payment_method_types=['card'], customer=customer.customer_id, @@ -181,9 +178,9 @@ def send_html_email(self, subject, from_email: str, to_email_list: list, templat def check_signing_secret(self): """ - Make sure the request's Stripe signature to make sure it matches our signing secret - then returns the event - + Make sure the request's Stripe signature to make sure it matches our signing secret + then returns the event + :return: Stripe Event Object """ # If we are running tests we can't verify the signature but we need the event objects @@ -198,7 +195,7 @@ def get_transfer_data(self): def checkout_session_completed(self): """ A method to override to implement custom actions - after successful Stripe checkout. + after successful Stripe checkout. This is a Stripe event. Use this method by subclassing this class in your diff --git a/pfunk/contrib/email/base.py b/pfunk/contrib/email/base.py index 452da44..a5c87a8 100644 --- a/pfunk/contrib/email/base.py +++ b/pfunk/contrib/email/base.py @@ -10,7 +10,8 @@ class EmailBackend(object): """ Base email backend class """ - def get_template(self, template:str): + + def get_template(self, template: str): """ Get the template based on the template location string Args: @@ -81,5 +82,5 @@ def send_email(subject: str, to_emails: list, html_template: str = None, txt_tem with warnings.catch_warnings(): warnings.simplefilter('ignore', category=ResourceWarning) email_backend().send_email(subject=subject, to_emails=to_emails, html_template=html_template, - txt_template=txt_template, from_email=from_email, cc_emails=cc_emails, - bcc_emails=bcc_emails, fail_silently=fail_silently, **kwargs) \ No newline at end of file + txt_template=txt_template, from_email=from_email, cc_emails=cc_emails, + bcc_emails=bcc_emails, fail_silently=fail_silently, **kwargs) diff --git a/pfunk/contrib/email/ses.py b/pfunk/contrib/email/ses.py index 20077e5..fd181d2 100644 --- a/pfunk/contrib/email/ses.py +++ b/pfunk/contrib/email/ses.py @@ -1,5 +1,6 @@ import boto3 from envs import env + from pfunk.contrib.email.base import EmailBackend @@ -47,5 +48,5 @@ def send_email(self, subject: str, to_emails: list, html_template: str = None, t 'Body': self.get_body_kwargs(html_template=html_template, txt_template=txt_template, **kwargs) } ) - - return res \ No newline at end of file + + return res diff --git a/pfunk/contrib/generic.py b/pfunk/contrib/generic.py index a0b00b9..204b541 100644 --- a/pfunk/contrib/generic.py +++ b/pfunk/contrib/generic.py @@ -1,13 +1,12 @@ -from pfunk.resources import Function, Index from pfunk.client import q - +from pfunk.resources import Function class GenericFunction(Function): action = 'create' def get_role(self): - return None # pragma: no cover + return None # pragma: no cover def get_name(self): return f"{self.action}_{self.collection.get_class_name()}" @@ -24,19 +23,19 @@ def get_name(self): def get_body(self): return q.query( q.lambda_(["input"], - q.map_( - q.lambda_(['ref'], - q.get(q.var('ref')) - ), - q.paginate( - q.match(q.index(self.collection.all_index_name())), - q.select('size', q.var('input')) - ) - ) - ) + q.map_( + q.lambda_(['ref'], + q.get(q.var('ref')) + ), + q.paginate( + q.match(q.index(self.collection.all_index_name())), + q.select('size', q.var('input')) + ) + ) + ) ) - - + + class GenericCreate(GenericFunction): def get_body(self): @@ -68,13 +67,13 @@ def get_body(self): )) - class GenericDelete(GenericFunction): action = 'delete' def get_body(self): return q.query( q.lambda_(["input"], - q.delete(q.ref(q.collection(self.collection.get_collection_name()), q.select('id', q.var("input")))) + q.delete( + q.ref(q.collection(self.collection.get_collection_name()), q.select('id', q.var("input")))) ) - ) \ No newline at end of file + ) diff --git a/pfunk/contrib/templates.py b/pfunk/contrib/templates.py index e86940a..cbb4c9a 100644 --- a/pfunk/contrib/templates.py +++ b/pfunk/contrib/templates.py @@ -2,5 +2,4 @@ from jinja2 import Environment from jinja2.loaders import FileSystemLoader - -temp_env = Environment(loader=FileSystemLoader(env('TEMPLATE_ROOT_DIR'))) \ No newline at end of file +temp_env = Environment(loader=FileSystemLoader(env('TEMPLATE_ROOT_DIR'))) diff --git a/pfunk/exceptions.py b/pfunk/exceptions.py index fc128e1..9625b75 100644 --- a/pfunk/exceptions.py +++ b/pfunk/exceptions.py @@ -1,5 +1,3 @@ - - class LoginFailed(Exception): """Exception raised when an attempt to login fails.""" pass @@ -22,4 +20,4 @@ class Unauthorized(Exception): class GraphQLError(Exception): """Graphql SyntaxError""" - pass \ No newline at end of file + pass diff --git a/pfunk/fields.py b/pfunk/fields.py index d0e06e3..e376333 100644 --- a/pfunk/fields.py +++ b/pfunk/fields.py @@ -5,11 +5,10 @@ from valley.properties import CharProperty, IntegerProperty, DateTimeProperty, DateProperty, FloatProperty, \ BooleanProperty, EmailProperty, SlugProperty, BaseProperty, ForeignProperty, ForeignListProperty, ListProperty from valley.utils import import_util +from valley.validators import ChoiceValidator, ForeignValidator -from valley.validators import Validator, ChoiceValidator, ForeignValidator - -from pfunk.collection import Enum from pfunk.client import Ref +from pfunk.collection import Enum class ChoiceListValidator(ChoiceValidator): @@ -113,6 +112,7 @@ def validate(self, value, key): raise ValidationException('{0}: This value ({1}) should be an instance of {2}.'.format( key, value, self.foreign_class.__name__)) + class ReferenceField(GraphQLMixin, ForeignProperty): def get_validators(self): @@ -153,7 +153,7 @@ def validate(self, value, key): self.foreign_class = import_util(self.foreign_class) if value: for obj in value: - if not isinstance(obj,self.foreign_class): + if not isinstance(obj, self.foreign_class): raise ValidationException( '{0}: This value ({1}) should be an instance of {2}.'.format( key, obj, self.foreign_class.__name__)) @@ -162,7 +162,8 @@ def validate(self, value, key): class ManyToManyField(GraphQLMixin, ForeignListProperty): relation_field = True - def __init__(self, foreign_class, relation_name, return_type=None,return_prop=None,**kwargs): + def __init__(self, foreign_class, relation_name, return_type=None, return_prop=None, **kwargs): + self.foreign_class = foreign_class self.relation_name = relation_name super(ManyToManyField, self).__init__(foreign_class, return_type=return_type, return_prop=return_prop, **kwargs) @@ -187,8 +188,13 @@ def get_python_value(self, value): c.ref = i c._lazied = True ra(c) - if isinstance(i, self.foreign_class): - ra(i) + + try: + if isinstance(i, self.foreign_class): + ra(i) + except TypeError: + if f'{i.__class__.__module__}.{i.__class__.__name__}' == self.foreign_class: + ra(i) return ref_list def get_db_value(self, value): diff --git a/pfunk/project.py b/pfunk/project.py index 321f6ed..ce635d1 100644 --- a/pfunk/project.py +++ b/pfunk/project.py @@ -1,8 +1,9 @@ import logging -import requests from io import BytesIO + +import requests from envs import env -import swaggyp as sw + from faunadb.client import FaunaClient from jinja2 import Template from valley.contrib import Schema @@ -189,9 +190,7 @@ def publish(self, mode: str = 'merge') -> int: Returns: int """ - gql_io = BytesIO(self.render().encode()) - if self.client: secret = self.client.secret else: @@ -207,9 +206,10 @@ def publish(self, mode: str = 'merge') -> int: test_mode = env('PFUNK_TEST_MODE', False, var_type='boolean') if not test_mode: print('GraphQL Schema Imported Successfully!!') # pragma: no cover - for col in set(self.collections): - col.publish() + for col in set(self.collections): + col.publish() if resp.status_code != 200: + print(resp.text) print(resp.content) return resp.status_code @@ -294,9 +294,21 @@ def wsgi_app(self, environ, start_response): start_response(status_str, response.wsgi_headers) return [str.encode(response.body)] - def generate_swagger(self): + def generate_swagger(self, yaml_dir='', config_file='pfunk.json'): + """ Generates a swagger file that houses all endpoints + + Args: + yaml_dir (str, optional): + which directory to create the swagger yaml file + config_file (str, optional): + which directory to look for the config file + + Returns: + swagger file + """ swag = SwaggerDoc( collections=self.collections, - rules=[GraphQLView.url()]) - swag_file = swag.generate_swagger() + rules=[GraphQLView.url()], + config_file=config_file) + swag_file = swag.generate_swagger(dir=yaml_dir) return swag_file diff --git a/pfunk/queryset.py b/pfunk/queryset.py index e9195cc..5c49e96 100644 --- a/pfunk/queryset.py +++ b/pfunk/queryset.py @@ -25,4 +25,4 @@ def __len__(self): return len(self.data) def __getitem__(self, x): - return self.data[x] \ No newline at end of file + return self.data[x] diff --git a/pfunk/resources.py b/pfunk/resources.py index a4e9058..c31f98e 100644 --- a/pfunk/resources.py +++ b/pfunk/resources.py @@ -2,8 +2,8 @@ from faunadb.query import query -from pfunk.utils.publishing import create_or_update_function, create_or_update_role, create_or_pass_index from pfunk.client import q +from pfunk.utils.publishing import create_or_update_function, create_or_update_role, create_or_pass_index class Resource(object): @@ -52,20 +52,20 @@ def get_payload(self) -> dict: return payload_dict def publish(self): - raise NotImplementedError # pragma: no cover + raise NotImplementedError # pragma: no cover def unpublish(self): - raise NotImplementedError # pragma: no cover + raise NotImplementedError # pragma: no cover def get_body(self): - raise NotImplementedError # pragma: no cover + raise NotImplementedError # pragma: no cover class Function(Resource): def get_role(self): """Gets the role to use when calling the function.""" - return None # pragma: no cover + return None # pragma: no cover def publish(self): """ @@ -88,7 +88,7 @@ class Role(Resource): user_table: str = None def get_lambda(self, resource_type): - return # pragma: no cover + return # pragma: no cover def get_payload(self) -> dict: """ @@ -98,12 +98,14 @@ def get_payload(self) -> dict: """ payload_dict = { "name": self.get_name(), - "membership": self.get_membership(), "privileges": self.get_privileges(), } data = self.get_data() + membership = self.get_membership() if data: payload_dict['data'] = data + if membership: + payload_dict['membership'] = membership return payload_dict def get_data(self) -> dict: @@ -112,10 +114,10 @@ def get_data(self) -> dict: Returns: dict """ - return None # pragma: no cover + return None # pragma: no cover def get_privileges(self): - raise NotImplementedError # pragma: no cover + raise NotImplementedError # pragma: no cover def get_membership_lambda(self): """ @@ -125,10 +127,10 @@ def get_membership_lambda(self): """ return q.query( q.lambda_(['object_ref'], - q.equals( - q.select('account_status', q.select('data', q.get(q.var('object_ref')))), - "ACTIVE" - ) + q.equals( + q.select('account_status', q.select('data', q.get(q.var('object_ref')))), + "ACTIVE" + ) )) def get_membership(self) -> dict: @@ -137,10 +139,13 @@ def get_membership(self) -> dict: Returns: dict """ - return { + membership = self.get_membership_lambda() + payload_dict = { 'resource': q.collection(self.user_table or self.collection.get_collection_name()), - 'predicate': self.get_membership_lambda() } + if membership: + payload_dict['predicate'] = self.get_membership_lambda() + return payload_dict def publish(self): """ @@ -189,7 +194,6 @@ def get_kwargs(self) -> dict: kwargs = {'name': self.name, 'source': q.collection(self.source), } if self.terms: - kwargs['terms'] = self.terms if self.values: kwargs['values'] = self.values @@ -246,4 +250,3 @@ def get_body(self): ) ) ) - diff --git a/pfunk/template.py b/pfunk/template.py index a61f68f..71fdd27 100644 --- a/pfunk/template.py +++ b/pfunk/template.py @@ -9,7 +9,7 @@ } {% endfor %} {% for t in collection_list %} -type {{t.get_class_name()|capitalize}} { +type {{t.__name__}} { {% for k,v in t._base_properties.items() %} {{k}}:{{v.get_graphql_type()}} {% endfor %} @@ -19,7 +19,7 @@ type Query { {% for t in collection_list %} {% if t.all_index %} - all{{t.get_verbose_plural_name()|capitalize}}: [{{t.get_class_name()|capitalize}}] @index(name: "all_{{t.get_verbose_plural_name()}}") + all{{t.get_verbose_plural_name()|capitalize}}: [{{t.__name__}}] @index(name: "all_{{t.get_verbose_plural_name()}}") {% endif %} {% endfor %} {{extra_graphql_queries}} @@ -56,4 +56,4 @@ key_template = Template(""" KEYS = {{keys}} -""") \ No newline at end of file +""") diff --git a/pfunk/testcase.py b/pfunk/testcase.py index eb6d022..054bbab 100644 --- a/pfunk/testcase.py +++ b/pfunk/testcase.py @@ -1,10 +1,10 @@ +import os import unittest - import uuid -import os from valley.utils import import_util from werkzeug.test import Client + from pfunk import Project from pfunk.client import FaunaClient, q from pfunk.template import key_template @@ -41,8 +41,13 @@ class CollectionTestCase(PFunkTestCase): def setUp(self) -> None: super(CollectionTestCase, self).setUp() self.project = Project() - - self.project.add_resources(self.collections) + coll = [] + for i in self.collections: + if isinstance(i, str): + coll.append(import_util(i)) + else: + coll.append(i) + self.project.add_resources(coll) self.project.publish() @@ -59,7 +64,6 @@ def setUp(self) -> None: with open(self.keys_path, 'w+') as f: f.write(key_template.render(keys=keys)) - def tearDown(self) -> None: super(APITestCase, self).tearDown() if os.path.exists(self.keys_path): diff --git a/pfunk/tests/__init__.py b/pfunk/tests/__init__.py index 74257c0..40083f0 100644 --- a/pfunk/tests/__init__.py +++ b/pfunk/tests/__init__.py @@ -1,6 +1,6 @@ +from ast import Del from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField from pfunk.resources import Index -from pfunk.contrib.auth.collections import User, Group from pfunk.contrib.auth.resources import GenericGroupBasedRole, GenericUserBasedRole GENDER_PRONOUN = Enum(name='gender_pronouns', choices=['he', 'her', 'they']) @@ -32,7 +32,7 @@ class Person(Collection): last_name = StringField(required=True) gender_pronoun = EnumField(GENDER_PRONOUN) sport = ReferenceField(Sport) - group = ReferenceField(Group) + group = ReferenceField('pfunk.contrib.auth.collections.group.Group') def __unicode__(self): return f"{self.first_name} {self.last_name}" @@ -41,7 +41,7 @@ def __unicode__(self): class House(Collection): collection_roles = [GenericUserBasedRole] address = StringField(required=True) - user = ReferenceField(User) + user = ReferenceField('pfunk.contrib.auth.collections.user.User') def __unicode__(self): - return self.address \ No newline at end of file + return self.address diff --git a/pfunk/tests/init_digitalocean.py b/pfunk/tests/init_digitalocean.py new file mode 100644 index 0000000..e3d9ac5 --- /dev/null +++ b/pfunk/tests/init_digitalocean.py @@ -0,0 +1,76 @@ +import os +from valley.utils import import_util + +from pfunk.contrib.auth.collections.user import BaseUser, User +from pfunk.contrib.auth.collections.group import Group +from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField +from pfunk.web.request import DigitalOCeanRequest + + +class DOLoginView(LoginView): + request_class = DigitalOCeanRequest + + +class DOSignUpView(SignUpView): + request_class = DigitalOCeanRequest + + +class DOVerifyEmailView(VerifyEmailView): + request_class = DigitalOCeanRequest + + +class DOLogoutView(LogoutView): + request_class = DigitalOCeanRequest + + +class DOUpdatePasswordView(UpdatePasswordView): + request_class = DigitalOCeanRequest + + +class DOForgotPasswordView(ForgotPasswordView): + request_class = DigitalOCeanRequest + + +class DOForgotPasswordChangeView(ForgotPasswordChangeView): + request_class = DigitalOCeanRequest + + +class DOUser(User): + collection_views = [DOLoginView, DOSignUpView, DOVerifyEmailView, DOLogoutView, + DOUpdatePasswordView, DOForgotPasswordView, DOForgotPasswordChangeView] + group_class = import_util('pfunk.tests.init_digitalocean.DOGroup') + +class DOGroup(Group): + pass + + +class DODetailView(DetailView): + request_class = DigitalOCeanRequest + + +class DOCreateView(CreateView): + request_class = DigitalOCeanRequest + + +class DOUpdateView(UpdateView): + request_class = DigitalOCeanRequest + + +class DOListView(ListView): + request_class = DigitalOCeanRequest + + +class DODeleteView(DeleteView): + request_class = DigitalOCeanRequest + + +class Blog(Collection): + """ Collection for DigitalOcean-Type request """ + title = StringField(required=True) + content = StringField(required=True) + user = ReferenceField(DOUser) + crud_views = [DODetailView, DOCreateView, + DOUpdateView, DOListView, DODeleteView] + + def __unicode__(self): + return self.title \ No newline at end of file diff --git a/pfunk/tests/test_auth.py b/pfunk/tests/test_auth.py index 33ec70a..afc8204 100644 --- a/pfunk/tests/test_auth.py +++ b/pfunk/tests/test_auth.py @@ -1,13 +1,16 @@ from faunadb.errors import PermissionDenied from pfunk.contrib.auth.collections import PermissionGroup -from pfunk.tests import User, Group, Sport, Person, House +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.exceptions import LoginFailed from pfunk.testcase import CollectionTestCase +from pfunk.tests import Sport, Person, House class AuthTestCase(CollectionTestCase): - collections = [User, Group, Sport, Person, House] + collections = [User, Group, + Sport, Person, House] def setUp(self) -> None: super(AuthTestCase, self).setUp() diff --git a/pfunk/tests/test_aws.py b/pfunk/tests/test_aws.py new file mode 100644 index 0000000..d28c852 --- /dev/null +++ b/pfunk/tests/test_aws.py @@ -0,0 +1,68 @@ +import os +import unittest +import tempfile +from unittest import mock + +from pfunk.utils.aws import ApiGateway +from pfunk.tests import User, Group, Person, Sport +from pfunk.project import Project + + +class ApiGatewayTests(unittest.TestCase): + """ Unit tests for creation of API from Swagger file + + Note that the unittests uses mocked boto3 normally. If + you want to test against a real endpoint, remove the + patch decorator at `setUpClass` and the `mocked` + param. Also make sure you have the required + env vars for AWS credentials and you have + the json config in the current env. + """ + + @classmethod + @mock.patch('boto3.client') + def setUpClass(cls, mocked) -> None: + cls.project = Project() + cls.aws_client = ApiGateway() + cls.project.add_resources([Person, Sport, Group, User]) + + swagger = cls.project.generate_swagger() + cls.swagger_dir = swagger['dir'] + cls.swagger_file = swagger['swagger_file'] + + def test_validate_yaml(self): + result = self.aws_client.validate_yaml(self.swagger_dir) + self.assertIsNone(result) # if there are no errors, then spec is valid + + def test_validate_wrong_yaml(self): + result = self.aws_client.validate_yaml('wrong yaml...33::39') + # if there are returned objs, there is an error + self.assertIsNotNone(result) + + @mock.patch('boto3.client') + def test_create_api_from_yaml(self, mocked): + result = self.aws_client.create_api_from_yaml( + yaml_file=self.swagger_dir) + self.assertTrue(result['success']) + + @mock.patch('boto3.client') + def test_create_api_from_wrong_yaml(self, mocked): + with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: + tmp.seek(0) + tmp.write('test wrong yaml') + result = self.aws_client.create_api_from_yaml(tmp.name) + self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') + + @mock.patch('boto3.client') + def test_update_api_from_yaml(self, mocked): + result = self.aws_client.update_api_from_yaml( + yaml_file=self.swagger_dir, mode='merge') + self.assertTrue(result['success']) + + @mock.patch('boto3.client') + def test_update_api_from_wrong_yaml(self, mocked): + with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as tmp: + tmp.seek(0) + tmp.write('test wrong yaml') + result = self.aws_client.update_api_from_yaml(tmp.name, mode='merge') + self.assertEqual(result['error'], 'Bad Request. YAML is not valid.') diff --git a/pfunk/tests/test_collection.py b/pfunk/tests/test_collection.py index 717b164..2692497 100644 --- a/pfunk/tests/test_collection.py +++ b/pfunk/tests/test_collection.py @@ -50,7 +50,3 @@ def test_get_unique_together(self): sport = Sport() sport.get_unique_together() self.assertEqual(len(sport.collection_indexes), 1) - - - - diff --git a/pfunk/tests/test_crud.py b/pfunk/tests/test_crud.py index c09c17e..b7abc64 100644 --- a/pfunk/tests/test_crud.py +++ b/pfunk/tests/test_crud.py @@ -1,6 +1,5 @@ -from faunadb.errors import PermissionDenied - -from pfunk.tests import User, Group +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.testcase import CollectionTestCase @@ -12,8 +11,8 @@ def setUp(self) -> None: self.managers = Group.create(name='Managers', slug='managers') self.power_users = Group.create(name='Power Users', slug='power-users') self.user = User.create(username='test', email='tlasso@example.org', first_name='Ted', - last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.managers]) - + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.managers]) def test_create_user(self): self.assertEqual(2, len(Group.all())) @@ -33,7 +32,3 @@ def test_update(self): self.user.save() u = User.get(self.user.ref.id()) self.assertEqual(u.username, 'test-c') - - - - diff --git a/pfunk/tests/test_deployment.py b/pfunk/tests/test_deployment.py index 587f9f0..c938b29 100644 --- a/pfunk/tests/test_deployment.py +++ b/pfunk/tests/test_deployment.py @@ -1,8 +1,9 @@ -from pfunk.contrib.auth.collections import Group, User -from pfunk.testcase import PFunkTestCase -from pfunk.project import Project from pfunk.client import q -from pfunk.tests import Sport, Person, User, Group +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User +from pfunk.project import Project +from pfunk.testcase import PFunkTestCase +from pfunk.tests import Sport, Person class DeploymentTestCase(PFunkTestCase): @@ -42,6 +43,3 @@ def test_project_publish(self): # functions self.project.publish() self.project.publish() - - - diff --git a/pfunk/tests/test_email.py b/pfunk/tests/test_email.py index 491a3a2..af42f6d 100644 --- a/pfunk/tests/test_email.py +++ b/pfunk/tests/test_email.py @@ -1,13 +1,14 @@ import tempfile -from werkzeug.test import Client from unittest import mock -import os + from jinja2.exceptions import TemplateNotFound +from werkzeug.test import Client -from pfunk.tests import User, Group -from pfunk.testcase import APITestCase -from pfunk.contrib.email.ses import SESBackend +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.contrib.email.base import EmailBackend +from pfunk.contrib.email.ses import SESBackend +from pfunk.testcase import APITestCase class TestEmailBackend(APITestCase): @@ -22,7 +23,6 @@ def setUp(self) -> None: self.backend = EmailBackend() def test_get_template(self): - template = self.backend.get_template('email/email_template.html') # test jinja render if no exceptions template.render(unittest_value="random value") @@ -58,7 +58,6 @@ def setUp(self) -> None: @mock.patch('boto3.client') def test_send_email(self, mocked): - res = self.SES.send_email( subject="test", to_emails=["testemail@email.com"], diff --git a/pfunk/tests/test_jwt.py b/pfunk/tests/test_jwt.py index a9ff9b3..571a907 100644 --- a/pfunk/tests/test_jwt.py +++ b/pfunk/tests/test_jwt.py @@ -1,10 +1,8 @@ -from faunadb.errors import PermissionDenied - from pfunk.contrib.auth.collections import Key -from pfunk.tests import User, Group +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.exceptions import LoginFailed from pfunk.testcase import APITestCase -from pfunk.contrib.auth.collections import Key class AuthToken(APITestCase): diff --git a/pfunk/tests/test_project.py b/pfunk/tests/test_project.py index d96985f..858dcc3 100644 --- a/pfunk/tests/test_project.py +++ b/pfunk/tests/test_project.py @@ -1,12 +1,38 @@ +import json +import os import unittest + +from pfunk.cli import init from pfunk.project import Project -from pfunk.tests import Person, Sport, GENDER_PRONOUN, Group, User +from pfunk.tests import Person, Sport, GENDER_PRONOUN +from pfunk.contrib.auth.collections.user import User +from pfunk.contrib.auth.collections.group import Group class ProjectTestCase(unittest.TestCase): def setUp(self) -> None: self.project = Project() + with open(f'pfunk.json', 'x') as f: + json.dump({ + 'name': 'test', + 'api_type': 'rest', + 'description': 'test project', + 'host': 'localhost', + 'stages': {'dev': { + 'key_module': f'test.dev_keys.KEYS', + 'fauna_secret': 'test-key', + 'bucket': 'test-bucket', + 'default_from_email': 'test@example.org' + }} + }, f, indent=4, sort_keys=True) + + def tearDown(self) -> None: + os.remove("pfunk.json") + try: + os.remove('swagger.yaml') + except FileNotFoundError: + pass def test_add_resource(self): self.project.add_resource(Person) diff --git a/pfunk/tests/test_resources.py b/pfunk/tests/test_resources.py index 05a7117..faf2f02 100644 --- a/pfunk/tests/test_resources.py +++ b/pfunk/tests/test_resources.py @@ -1,6 +1,8 @@ import unittest -from pfunk.tests import SimpleIndex + from pfunk.client import q +from pfunk.tests import SimpleIndex + class IndexTestCase(unittest.TestCase): @@ -20,9 +22,9 @@ def test_get_kwargs(self): self.assertEqual( self.index.get_kwargs(), { - 'name':'simple-index', + 'name': 'simple-index', 'source': q.collection('Project'), 'terms': ['name', 'slug'], 'unique': True } - ) \ No newline at end of file + ) diff --git a/pfunk/tests/test_web_change_password.py b/pfunk/tests/test_web_change_password.py index 85e6fc7..bdc2d7a 100644 --- a/pfunk/tests/test_web_change_password.py +++ b/pfunk/tests/test_web_change_password.py @@ -1,4 +1,5 @@ -from pfunk.tests import User, Group +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.testcase import APITestCase @@ -25,12 +26,12 @@ def test_update_password(self): headers={ "Authorization": self.token }) - + new_token, new_exp = User.api_login("test", "updated_password") self.assertIsNotNone(new_token) self.assertTrue(res.json['success']) - + def test_update_pass_wrong_current(self): """ Tests `pfunk.contrib.auth.views.UpdatePasswordView` throw an error if the current password given was wrong """ res = self.c.post('/user/update-password/', @@ -43,6 +44,6 @@ def test_update_pass_wrong_current(self): "Authorization": self.token }) expected = {'success': False, 'data': {'validation_errors': {'current_password': ' Password update failed.'}}} - + self.assertDictEqual(res.json, expected) self.assertFalse(res.json['success']) diff --git a/pfunk/tests/test_web_crud.py b/pfunk/tests/test_web_crud.py index fe0fd2e..71123e6 100644 --- a/pfunk/tests/test_web_crud.py +++ b/pfunk/tests/test_web_crud.py @@ -1,7 +1,7 @@ from werkzeug.test import Client -from pfunk.tests import User, Group -from pfunk.exceptions import LoginFailed +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.testcase import APITestCase from pfunk.tests import House @@ -45,7 +45,7 @@ def test_create(self): self.assertTrue(res.json['success']) self.assertIn("the street somewhere", [ - house.address for house in House.all()]) + house.address for house in House.all()]) def test_update(self): self.assertNotIn("the updated street somewhere", [ @@ -59,7 +59,7 @@ def test_update(self): self.assertTrue(res.json['success']) self.assertIn("the updated street somewhere", [ - house.address for house in House.all()]) + house.address for house in House.all()]) def test_delete(self): res = self.c.delete(f'/house/delete/{self.house.ref.id()}/', diff --git a/pfunk/tests/test_web_digitalocean.py b/pfunk/tests/test_web_digitalocean.py new file mode 100644 index 0000000..3744805 --- /dev/null +++ b/pfunk/tests/test_web_digitalocean.py @@ -0,0 +1,84 @@ +import os +from valley.utils import import_util + +from pfunk import Collection, StringField, EnumField, Enum, ReferenceField, SlugField, ManyToManyField, IntegerField, BooleanField, DateTimeField +from pfunk.testcase import APITestCase +from pfunk.web.request import DigitalOCeanRequest +from pfunk.web.views.json import DetailView, CreateView, UpdateView, DeleteView, ListView +from pfunk.contrib.auth.views import ForgotPasswordChangeView, LoginView, SignUpView, VerifyEmailView, LogoutView, UpdatePasswordView, ForgotPasswordView +from pfunk.tests.init_digitalocean import DOUser, DOGroup, Blog + + +# TODO: Mock digitalocean environment functions here to emulate working proj in digitalocean ecosystem +# TODO: find a way to override requestclass for the whole pfunk app +class TestWebDigitalOcean(APITestCase): + collections = [DOUser, DOGroup, Blog] + + def setUp(self) -> None: + super().setUp() + self.group = DOGroup.create(name='Power Users', slug='power-users') + self.user = DOUser.create(username='test', email='tlasso@example.org', first_name='Ted', + last_name='Lasso', _credentials='abc123', account_status='ACTIVE', + groups=[self.group]) + self.blog = Blog.create( + title='test_blog', content='test content', user=self.user) + + self.token, self.exp = DOUser.api_login("test", "abc123") + print(f'\n\nTOKEN: {self.token}') + print(f'\n\nEXP: {self.exp}') + + def test_mock(self): + assert True + + # def test_read(self): + # res = self.c.get(f'/blog/detail/{self.blog.ref.id()}/', + # headers={ + # "Authorization": self.token}) + # print(f'RESPONSE:\n{res.json}') + # self.assertTrue(res.json['success']) + # self.assertEqual("test content", res.json['data']['data']['content']) + + # def test_read_all(self): + # res = self.c.get(f'/blog/list/', + # headers={ + # "Authorization": self.token}) + # self.assertTrue(res.json['success']) + + # def test_create(self): + # self.assertNotIn("the created blog", [ + # blog.content for blog in Blog.all()]) + # res = self.c.post('/blog/create/', + # json={ + # "title": "test_create_blog", + # "content": "the created blog", + # "user": self.user.ref.id()}, + # headers={ + # "Authorization": self.token}) + + # self.assertTrue(res.json['success']) + # self.assertIn("test_create_blog", [ + # blog.title for blog in Blog.all()]) + + # def test_update(self): + # self.assertNotIn("the updated blog", [ + # house.address for house in Blog.all()]) + # res = self.c.put(f'/blog/update/{self.blog.ref.id()}/', + # json={ + # "title": "test_updated_blog", + # "content": "the updated blog", + # "user": self.user.ref.id()}, + # headers={ + # "Authorization": self.token}) + + # self.assertTrue(res.json['success']) + # self.assertIn("test_updated_blog", [ + # blog.title for blog in Blog.all()]) + + # def test_delete(self): + # res = self.c.delete(f'/blog/delete/{self.blog.ref.id()}/', + # headers={ + # "Authorization": self.token, + # "Content-Type": "application/json" + # }) + + # self.assertTrue(res.json['success']) diff --git a/pfunk/tests/test_web_forgot_password.py b/pfunk/tests/test_web_forgot_password.py index 4b81492..83c3e32 100644 --- a/pfunk/tests/test_web_forgot_password.py +++ b/pfunk/tests/test_web_forgot_password.py @@ -1,6 +1,7 @@ from werkzeug.test import Client -from pfunk.tests import User, Group +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.testcase import APITestCase @@ -26,16 +27,16 @@ def test_send_forgot_req(self): "Content-Type": "application/json"}) self.assertTrue(res.json['success']) - + def test_submit_key_for_forgot_pass(self): """ Submits the key from the forgot password email to initiate password reset """ - + res = self.c.put(f'/user/forgot-password/', - json={ - "verification_key": self.key, - "password": "new_updated_pass"}, - headers={ - "Content-Type": "application/json"}) + json={ + "verification_key": self.key, + "password": "new_updated_pass"}, + headers={ + "Content-Type": "application/json"}) new_login = User.api_login("test", "new_updated_pass") self.assertTrue(res.json['success']) @@ -45,11 +46,11 @@ def test_submit_wrong_key_for_forgot_pass(self): """ Submit a wrong key for verification of reset password. Should return `Not Found` """ key = 'wrong-key' res = self.c.put(f'/user/forgot-password/', - json={ - "verification_key": key, - "password": "forgotten_password"}, - headers={ - "Content-Type": "application/json"}) + json={ + "verification_key": key, + "password": "forgotten_password"}, + headers={ + "Content-Type": "application/json"}) expected = {'data': 'Not Found', 'success': False} self.assertFalse(res.json['success']) diff --git a/pfunk/tests/test_web_login.py b/pfunk/tests/test_web_login.py index b08cc27..4a895c6 100644 --- a/pfunk/tests/test_web_login.py +++ b/pfunk/tests/test_web_login.py @@ -1,6 +1,7 @@ from werkzeug.test import Client -from pfunk.tests import User, Group +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.exceptions import LoginFailed from pfunk.testcase import APITestCase @@ -24,6 +25,7 @@ def test_login(self): # check if response has cookies self.assertIsNotNone(res.headers['Set-Cookie']) + self.assertTrue(res.json['success']) def test_wrong_login(self): @@ -36,11 +38,11 @@ def test_wrong_login(self): def test_logout(self): """ Tests `pfunk.contrib.auth.views.LogoutView` invalidate token login and remove cookie """ token, exp = User.api_login("test", "abc123") + res = self.c.post('/user/logout/', headers={ "Authorization": token, "Content-Type": "application/json" }) - self.assertTrue(res.json['success']) def test_wrong_logout(self): diff --git a/pfunk/tests/test_web_signup.py b/pfunk/tests/test_web_signup.py index f1c5fa4..c651084 100644 --- a/pfunk/tests/test_web_signup.py +++ b/pfunk/tests/test_web_signup.py @@ -1,6 +1,7 @@ from werkzeug.test import Client -from pfunk.tests import User, Group +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.testcase import APITestCase @@ -27,7 +28,7 @@ def test_signup(self): "email": "testemail@email.com", "first_name": "Forest", "last_name": "Gump", - "_credential_field": "password" + "_credential_field": "password" }) # token = User.login(username="new_user", password="password") @@ -41,7 +42,7 @@ def test_signup_not_unique(self): "email": "testemail@email.com", "first_name": "Forest", "last_name": "Gump", - "_credential_field": "password" + "_credential_field": "password" }) self.assertFalse(res.json['success']) diff --git a/pfunk/tests/test_web_stripe.py b/pfunk/tests/test_web_stripe.py index 5d41a66..23d833a 100644 --- a/pfunk/tests/test_web_stripe.py +++ b/pfunk/tests/test_web_stripe.py @@ -1,15 +1,15 @@ -import json -from lib2to3.pytree import Base import tempfile -from werkzeug.test import Client from types import SimpleNamespace from unittest import mock -from pfunk.tests import User, Group +from werkzeug.test import Client + from pfunk.contrib.auth.collections import PermissionGroup +from pfunk.contrib.auth.collections.group import Group +from pfunk.contrib.auth.collections.user import User from pfunk.contrib.ecommerce.collections import StripePackage, StripeCustomer -from pfunk.testcase import APITestCase from pfunk.contrib.ecommerce.views import BaseWebhookView +from pfunk.testcase import APITestCase from pfunk.web.request import HTTPRequest @@ -23,7 +23,8 @@ def setUp(self) -> None: last_name='Lasso', _credentials='abc123', account_status='ACTIVE', groups=[self.group]) self.stripe_pkg = StripePackage.create(group=self.group, - stripe_id='100', price='10', description='unit testing...', name='unit test package') + stripe_id='100', price='10', description='unit testing...', + name='unit test package') self.stripe_cus = StripeCustomer.create( user=self.user, stripe_id='100') @@ -68,7 +69,7 @@ def test_create_package(self): self.assertTrue(res.json['success']) self.assertIn("new stripe pkg", [ - pkg.name for pkg in StripePackage.all()]) + pkg.name for pkg in StripePackage.all()]) def test_update_package(self): self.assertNotIn("updated pkg", [ @@ -120,7 +121,7 @@ def test_create_customer(self): self.assertTrue(res.json['success']) self.assertIn(stripe_id, [ - cus.stripe_id for cus in StripeCustomer.all()]) + cus.stripe_id for cus in StripeCustomer.all()]) def test_list_customers(self): res = self.c.get('/stripecustomer/list/', headers={ @@ -232,14 +233,13 @@ def test_check_ip(self): @mock.patch('boto3.client') def test_send_html_email(self, mocked): # Requires to have `TEMPLATE_ROOT_DIR=/tmp` in your .env file - with tempfile.NamedTemporaryFile(prefix='/tmp/', suffix='.html') as tmp: - res = self.view.send_html_email( - subject='Test Subject', - from_email='unittesting@email.com', - to_email_list=['recipient@email.com'], - template_name=(tmp.name.split("/")[-1]) - ) - self.assertTrue(True) # if there are no exceptions, then it passed + res = self.view.send_html_email( + subject='Test Subject', + from_email='unittesting@email.com', + to_email_list=['recipient@email.com'], + template_name=('email/email_template.html') + ) + self.assertTrue(True) # if there are no exceptions, then it passed @mock.patch('stripe.Webhook') def test_check_signing_secret(self, mocked): @@ -273,7 +273,8 @@ def setUp(self) -> None: groups=[self.group]) self.token, self.exp = User.api_login("test", "abc123") self.stripe_pkg = StripePackage.create(group=self.group, - stripe_id='100', price='10', description='unit testing...', name='unit test package') + stripe_id='100', price='10', description='unit testing...', + name='unit test package') self.app = self.project.wsgi_app self.c = Client(self.app) diff --git a/pfunk/tests/unittest_keys.py b/pfunk/tests/unittest_keys.py new file mode 100644 index 0000000..9828618 --- /dev/null +++ b/pfunk/tests/unittest_keys.py @@ -0,0 +1,2 @@ + +KEYS = {'616a6228-8ba7-4362-b9f3-ddecb0bac7c0': {'signature_key': 'leh61jqbIYVpLoak3A_MOm70ji07IdCnLKa2WdVleUA=', 'payload_key': 'P2toF_UI9CnllUE-HSNPUwvMOj5cNOXJ4J8k3q__kkg=', 'kid': '616a6228-8ba7-4362-b9f3-ddecb0bac7c0'}, '08b59582-a09e-4f0d-bde2-2b0c41c3a326': {'signature_key': '-hga4fKEio4kKx6Moh0VBgzb9HWnN7czj8LCs87j-C8=', 'payload_key': 'OLW5EDA27EpD1bD-EM6-vak5PM3PE6ICh5_JmgXOMps=', 'kid': '08b59582-a09e-4f0d-bde2-2b0c41c3a326'}, '8bcbb6e6-b0bc-432a-aabe-8937b5ff4019': {'signature_key': 'T6I7cjew-cgl6gAt9OrdeKIqaAhw5ETnS7Y4RJ0gwvg=', 'payload_key': 'vRnQ5qTbr9a_9bQMRZiIOEmSJZ85uihbrBh7cp2MXIU=', 'kid': '8bcbb6e6-b0bc-432a-aabe-8937b5ff4019'}, '4b6327dc-7a12-4c8d-a064-4d577dd5a0c3': {'signature_key': 'WaQVfGyAcn9iAHrtD1eKGgtogytpq5NxPfHTCfu0QS8=', 'payload_key': 'OkxihknI3MOhwe3-RdGR4YygNOFj7qouwRRCz3DqWXg=', 'kid': '4b6327dc-7a12-4c8d-a064-4d577dd5a0c3'}, 'afde7f68-7233-4816-a5ba-47a0763beca5': {'signature_key': 'JS8xjJJ5RRe2j9ZwT3rEaCOLArYlE475kcG98vt1WaQ=', 'payload_key': 'boWQE9dXlDEZ_45QgbBntEKeb6l3E9V0ajYXzv3VG8s=', 'kid': 'afde7f68-7233-4816-a5ba-47a0763beca5'}, '16201efc-0241-4fb6-bbfd-98cb51df53e8': {'signature_key': 'bPicMmxL_xdQw4Xdxb9_hlWv_NBDS-bm1xWfp2IP0io=', 'payload_key': 'N8m1cdkZNRrF9LF4aeyJIgSUgx9vIsMj6wtArEIcEpc=', 'kid': '16201efc-0241-4fb6-bbfd-98cb51df53e8'}, '19fabb2b-0591-4f51-98e6-eb78ed984d20': {'signature_key': 'XaANCYidONcbmxSKKhLjlfBgjDxi6modj_Cz-E9jq-E=', 'payload_key': 'WhL7Or38TOJwdOPVFaxnBruIc0QkhsSEvA9DgNqQ5bM=', 'kid': '19fabb2b-0591-4f51-98e6-eb78ed984d20'}, '4111f87e-e234-4fd8-b2fc-a520de57b57c': {'signature_key': 'SBmcfg5Ig_1fnQbyWMjx2rR0V1CH21IwYQr_84KmQmI=', 'payload_key': 'Qx_iftIj3tLtjbMgyLaIHWE11lwAQZvBMHWBMg4uMeM=', 'kid': '4111f87e-e234-4fd8-b2fc-a520de57b57c'}, 'a4d60e83-8adc-4c45-bc60-860bfba558e6': {'signature_key': 'ugKbY1AMvucSZHZUvvFD7JmicKuJBplmy_fnTtv2khA=', 'payload_key': 'PSu7ZQQyJaOjiiX6klGWB-OPGW_4kqjZHsixmID1FBk=', 'kid': 'a4d60e83-8adc-4c45-bc60-860bfba558e6'}, '92b77c10-51a2-46c5-af30-9b1ff0e9c2c1': {'signature_key': '2gcQrSKkdvxno3qLeeMIDMgFi0vEzrcpU9amyJ0LuDw=', 'payload_key': 'MsFGA7gzlXjBg79B6sZpOSfNmkz8W3hBxbsrx4zFLkw=', 'kid': '92b77c10-51a2-46c5-af30-9b1ff0e9c2c1'}} \ No newline at end of file diff --git a/pfunk/utils/aws.py b/pfunk/utils/aws.py new file mode 100644 index 0000000..28449eb --- /dev/null +++ b/pfunk/utils/aws.py @@ -0,0 +1,171 @@ +import datetime +import boto3 +import json +import swaggyp as sw +from botocore.exceptions import ClientError, NoCredentialsError +from envs import env +from openapi_spec_validator import validate_v2_spec, openapi_v2_spec_validator +from openapi_spec_validator.readers import read_from_filename +from openapi_spec_validator.exceptions import OpenAPIValidationError + +AWS_ACCESS_KEY = env('AWS_ACCESS_KEY') +AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY') +AWS_DEFAULT_REGION = env('AWS_DEFAULT_REGION') + + +def _json_dt_helper(o): + """ Helps serializing `datetime` objects to a readable string """ + if isinstance(o, (datetime.date, datetime.datetime)): + return o.isoformat() + + +def write_to_config(obj, config_file_dir='pfunk.json'): + """ Appends object to pfunk config file + + Args: + obj (dict, required): + key, value pairs to write to json file + config_file_dir (str, optional): + directory of the config json file, default='pfunk.json' + Returns: + config_file (dict, required): + the current value of config file (pfunk.json) + """ + with open(config_file_dir, 'r+') as f: + data = json.load(f) + data.update(obj) + f.seek(0) + f.truncate() + json.dump(data, f, indent=4, sort_keys=True, default=_json_dt_helper) + return data + + +def read_from_config_file(config_file_dir='pfunk.json'): + """ Returns data from config file in dict form """ + with open(config_file_dir, 'r') as f: + data = json.load(f) + return data + + +class ApiGateway(object): + region_name = env('SES_REGION_NAME', 'us-east-1') + + def __init__(self): + self.client = boto3.client( + 'apigateway', + aws_access_key_id=AWS_ACCESS_KEY, + aws_secret_access_key=AWS_SECRET_ACCESS_KEY, + region_name=AWS_DEFAULT_REGION) + + def validate_yaml(self, yaml_file): + """ Validate YAML file if it is valid for using OpenAPI Spec v2""" + try: + spec_dict, spec_url = read_from_filename(yaml_file) + validate_v2_spec(spec_dict) + except (OSError, AttributeError) as err: + return {'errors': str(err)} + except OpenAPIValidationError as err: + return self._iterate_validator_errors(spec_dict) + return None + + def _iterate_validator_errors(self, spec_dict): + """ Iterates through list of errors that the `openapi_spec_validator` returned + + This method was implemented due to `openapi_spec_validator` design + that if an error happened while iterating through the YAML file + it returns a Python error. + + Args: + spec_dict (dict, required): + `spec_dict` generated from `openapi_spec_validator.readers.read_from_filename` + Returns: + list of errors + """ + try: + errors = [{err.message: err.json_path} + for err in openapi_v2_spec_validator.iter_errors(spec_dict)] + return errors + except (OSError, AttributeError) as err: + return str(err) + + def create_api_from_yaml(self, yaml_file, fail_on_warnings=True): + """ Creates an API for AWS API Gateway from a YAML swagger file + + Args: + yaml_file (yaml file, required): + The OpenAPI swagger file to create API from + fail_on_warnings (bool, optional): + Specifies if the method will error on warnings. Default: `True` + """ + _yaml_valid = self.validate_yaml(yaml_file) + if _yaml_valid: + return { + "error": 'Bad Request. YAML is not valid.', + "yaml_err": _yaml_valid + } + + try: + if not type(yaml_file) == 'string': + with open(yaml_file, 'r') as file: + yaml_file = file.read() + response = self.client.import_rest_api( + failOnWarnings=fail_on_warnings, + body=yaml_file) + if response: + write_to_config({'api': response}) + return { + 'success': True, + 'response': response + } + except (ClientError, NoCredentialsError) as err: + return { + 'error': str(err) + } + + def update_api_from_yaml(self, yaml_file, mode, rest_api_id=None, fail_on_warnings=True): + """ Updates rest API using yaml file + + Args: + rest_api_id (string, required): + ID of the API for updating, if not provided, use API ID from `pfunk.json` + yaml_file (yaml file, required): + The OpenAPI swagger file to create API from + mode (string, required): + Mode of update, choice=['merge', 'overwrite'] + fail_on_warnings (bool, optional): + Specifies if the method will error on warnings. Default: `True` + """ + _yaml_valid = self.validate_yaml(yaml_file) + if _yaml_valid: + return { + "error": 'Bad Request. YAML is not valid.', + "yaml_err": _yaml_valid + } + + try: + if not type(yaml_file) == 'string': + with open(yaml_file, 'r') as file: + yaml_file = file.read() + # Acquire REST API ID from config file if not provided + if not rest_api_id: + data = read_from_config_file() + if data.get('api'): + rest_api_id = (data.get('api') + .get('id')) + + response = self.client.put_rest_api( + restApiId=rest_api_id, + mode=mode, + failOnWarnings=fail_on_warnings, + body=yaml_file + ) + + if response: + return { + 'success': True, + 'response': response + } + except (ClientError, NoCredentialsError) as err: + return { + 'error': str(err) + } diff --git a/pfunk/utils/deploy.py b/pfunk/utils/deploy.py index f07ec12..a78cae2 100644 --- a/pfunk/utils/deploy.py +++ b/pfunk/utils/deploy.py @@ -1,9 +1,10 @@ -import boto3 import datetime import json import os -import pip import shutil + +import boto3 +import pip import sammy as sm s3 = boto3.client('s3') diff --git a/pfunk/utils/digitalocean.py b/pfunk/utils/digitalocean.py new file mode 100644 index 0000000..e69de29 diff --git a/pfunk/utils/json_utils.py b/pfunk/utils/json_utils.py index ee7342f..15de3a0 100644 --- a/pfunk/utils/json_utils.py +++ b/pfunk/utils/json_utils.py @@ -20,4 +20,4 @@ def default(self, obj): try: return super(PFunkEncoder, self).default(obj) except AttributeError: - return str(obj) \ No newline at end of file + return str(obj) diff --git a/pfunk/utils/publishing.py b/pfunk/utils/publishing.py index 4d08373..60633bd 100644 --- a/pfunk/utils/publishing.py +++ b/pfunk/utils/publishing.py @@ -7,6 +7,7 @@ class BearerAuth(requests.auth.AuthBase): """ Bearer Token Auth class for the requests library. """ + def __init__(self, token): """ @@ -19,7 +20,8 @@ def __call__(self, r): r.headers["authorization"] = "Bearer " + self.token return r -def create_or_update_role(client, payload:dict={}): + +def create_or_update_role(client, payload: dict = {}): """ Utility that attempts to create a role and if that fails it attempts to update it. Args: @@ -96,4 +98,4 @@ def create_or_update_function(client, payload): ) ) - return response \ No newline at end of file + return response diff --git a/pfunk/utils/swagger.py b/pfunk/utils/swagger.py index 9711904..2820efe 100644 --- a/pfunk/utils/swagger.py +++ b/pfunk/utils/swagger.py @@ -38,10 +38,11 @@ class SwaggerDoc(object): - def __init__(self, collections, rules=[]): + def __init__(self, collections, rules=[], config_file='pfunk.json'): """ Generates swagger doc. Details are going to be acquired from the collections The acquisition of the information needed for docs are as follows: + ``` Response: Description (str): View's `get_query` docstrings Status Code (int): @@ -56,15 +57,26 @@ def __init__(self, collections, rules=[]): Model: Name (str): The class name of the `collection` Properties (str): The fields of the collection and their type - + ``` + + Args: + collections ([`pfunk.collection.Collection`]): + array of collection of the project to generate models from + rules ([`werkzeug.routing.Rule`]): + array of additional URLs that the given collection doesn't have + config_file (str, optional): + directory of the config_file + Returns: - Generated YAML file + swagger.yaml (yaml, required): + Generated YAML file """ self.collections = collections self.rules = rules self.paths = [] self.definitions = [] self.responses = [] + self.config_file = config_file self._response_classes = [ 'response_class', 'not_found_class', @@ -77,16 +89,25 @@ def __init__(self, collections, rules=[]): def _convert_url_to_swagger(self, replacement: str, to_replace: str) -> str: return re.sub('<\w+:\w+>', f'{{{replacement}}}', to_replace) - def write_to_yaml(self): + def write_to_yaml(self, dir=''): """ Using the class' variables, write it to a swagger (yaml) file It will create `swagger.yaml` file in current directory, if there is already one, it will print the yaml file instead. + + Args: + dir (str, optional): + custom directory of the swagger file. If there are no provided, create one in current dir. + Returns: + dir (str, required): + directory of the created swagger file + swagger_file (str, required): + the contents of the swagger yaml file """ - if not os.path.exists(f'pfunk.json'): + if not os.path.exists(self.config_file): raise Exception('Missing JSON Config file.') else: - with open(f'pfunk.json', 'r') as f: + with open(self.config_file, 'r') as f: data = json.loads(f.read()) proj_title = data.get('name') proj_desc = data.get('description', 'A Pfunk project') @@ -95,6 +116,10 @@ def write_to_yaml(self): basePath = data.get('basePath', '/') schemes = ['https'] + if dir: + if not dir.endswith('/'): + dir = dir + "/" + info = sw.Info( title=proj_title, description=proj_desc, @@ -107,13 +132,17 @@ def write_to_yaml(self): schemes=schemes, definitions=self.definitions) - if not os.path.exists(f'swagger.yaml'): - with open(f'swagger.yaml', 'x') as swag_doc: + if not os.path.exists(f'{dir}swagger.yaml'): + with open(f'{dir}swagger.yaml', 'x') as swag_doc: swag_doc.write(t.to_yaml()) else: - print('There is an existing swagger file. Kindly move/delete it to generate a new one. Printing instead...') - print(t.to_yaml()) - return t.to_yaml() + print( + 'There is an existing swagger file. Kindly move/delete it to generate a new one.') + # print(t.to_yaml()) + return { + "dir": f'{dir}swagger.yaml', + "swagger_file": t.to_yaml() + } def get_operations(self, col: Collection): """ Acquires all of the endpoint in the collections and make it @@ -131,6 +160,7 @@ def get_operations(self, col: Collection): An array of `Path` that can be consumed using `swaggyp.SwaggerTemplate` to show available paths + ``` """ for view in col.collection_views: route = view.url(col) @@ -159,6 +189,7 @@ def get_operations(self, col: Collection): # Skip HEAD operations continue + # Acquire path parameters of URL if args is None or len(args) == 0: # if `defaults` weren't used in URL building, use the argument defined in the URL string for converter, arguments, variable in parse_rule(rule): @@ -167,10 +198,10 @@ def get_operations(self, col: Collection): args = variable arg_type = converter - # Replace werkzeug params () to swagger-style params ({id}) - swagger_rule = self._convert_url_to_swagger(args, rule) + params = [] + # Construct path parameters for swagger generation if arg_type: - params = sw.Parameter( + path_params = sw.Parameter( name=args, _type=WERKZEUG_URL_TO_YAML_TYPES.get(arg_type), _in='path', @@ -178,18 +209,59 @@ def get_operations(self, col: Collection): required=True, allowEmptyValue=False ) + params.append(path_params) + + # Acquire payload of the view from the View's `_payload_docs` + view_payload = view(col)._payload_docs() + + # Construct payload for swagger generation + if view_payload: + for field in view_payload.get('data'): + if field.get('schema'): + schema = sw.SwagSchema(ref=field.get('schema')) + param = sw.Parameter( + name=field.get('name'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + schema=schema + ) + else: + param = sw.Parameter( + name=field.get('name'), + _type=field.get('type'), + _in=field.get('in'), + description=field.get('description'), + required=field.get('required'), + allowEmptyValue=False + ) + params.append(param) + + consumes = ['application/json', + 'application/x-www-form-urlencoded'] + produces = ['application/json', + 'application/x-www-form-urlencoded'] + view_docs = view.__doc__ + if params: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=view.__doc__, + description=view_docs, responses=responses, - parameters=[params]) + consumes=consumes, + produces=produces, + parameters=params) else: op = sw.Operation( http_method=method.lower(), summary=f'({method}) -> {col.__class__.__name__}', - description=view.__doc__, - responses=responses) + description=view_docs, + responses=responses, + consumes=consumes, + produces=produces) + + # Replace werkzeug params () to swagger-style params ({id}) + swagger_rule = self._convert_url_to_swagger(args, rule) p = sw.Path(endpoint=swagger_rule, operations=[op]) self.paths.append(p) return self.paths @@ -211,6 +283,9 @@ def get_model_definitions(self, col: Collection): An array of `Definition` that can be consumed using `swaggyp.SwaggerTemplate` to show available models + + Payload: + """ # Define model definitions by iterating through collection's fields for its properties @@ -233,10 +308,15 @@ def get_model_definitions(self, col: Collection): self.definitions.append(model) return self.definitions - def generate_swagger(self): - """ One-function-to-call needed function to generate a swagger documentation """ + def generate_swagger(self, dir=''): + """ One-function-to-call needed function to generate a swagger documentation + + Args: + dir (str, optional): + directory to create the yaml file + """ for i in self.collections: col = i() self.get_operations(col) self.get_model_definitions(col) - return self.write_to_yaml() \ No newline at end of file + return self.write_to_yaml(dir) diff --git a/pfunk/web/request.py b/pfunk/web/request.py index c19a0b6..a96d386 100644 --- a/pfunk/web/request.py +++ b/pfunk/web/request.py @@ -20,7 +20,7 @@ def __init__(self, event, kwargs): self.user = None self.token: str = None self.jwt: str = None - + def get_cookies(self, raw_cookies): """ Returns dict of cookies @@ -59,6 +59,7 @@ class WSGIRequest(Request): """ WSGI Request """ + def __init__(self, event, kwargs=None): super(WSGIRequest, self).__init__(event, kwargs=kwargs) self.method = event.method @@ -98,6 +99,7 @@ class HTTPRequest(BaseAPIGatewayRequest): """ HTTP Request: For HTTP API Gateway """ + def __init__(self, event, kwargs=None): super(HTTPRequest, self).__init__(event, kwargs=kwargs) self.raw_event = event @@ -116,3 +118,26 @@ def get_cookies(self, raw_cookies): return parse_cookie(';'.join(raw_cookies)) +class DigitalOCeanRequest(Request): + """ API Request for digitalocean functions """ + + def __init__(self, args): + self.raw_event = args + self.body = args + self.query_params = args + self.headers = args.get('__ow_headers') + self.method = args.get('__ow_method') + self.path = args.get('__ow_path') + + if args.get('__ow_query'): + self.query_params = args.get('__ow_query') # only shows up if web:raw in project.yml + if args.get('__ow_body'): + self.body = args.get('__ow_body') # only shows up if web:raw in project.yml + + try: + self.cookies = self.get_cookies(self.headers.pop('Cookie')) + except KeyError: + self.cookies = {} + + def get_cookies(self, raw_cookies): + return parse_cookie(raw_cookies) diff --git a/pfunk/web/response.py b/pfunk/web/response.py index 0feef25..b81e471 100644 --- a/pfunk/web/response.py +++ b/pfunk/web/response.py @@ -33,7 +33,7 @@ def response(self): 'statusCode': self.status_code, 'body': self.body, 'headers': self.headers - } + } class NotFoundResponseMixin(object): @@ -122,4 +122,4 @@ class HttpBadRequestResponse(BadRequestResponseMixin, Response): class JSONBadRequestResponse(BadRequestResponseMixin, JSONResponse): - pass \ No newline at end of file + pass diff --git a/pfunk/web/views/base.py b/pfunk/web/views/base.py index 0637609..34ea049 100644 --- a/pfunk/web/views/base.py +++ b/pfunk/web/views/base.py @@ -1,13 +1,14 @@ from envs import env -from faunadb.errors import NotFound as FaunaNotFound, PermissionDenied, BadRequest, ErrorData +from faunadb.errors import NotFound as FaunaNotFound, PermissionDenied, BadRequest from jwt import InvalidSignatureError from valley.exceptions import ValidationException +from valley.utils import import_util from werkzeug.exceptions import NotFound, MethodNotAllowed from werkzeug.http import dump_cookie from werkzeug.routing import Rule from pfunk.exceptions import TokenValidationFailed, LoginFailed, Unauthorized, DocNotFound, GraphQLError -from pfunk.web.request import Request, RESTRequest, HTTPRequest +from pfunk.web.request import Request, RESTRequest, HTTPRequest, DigitalOCeanRequest from pfunk.web.response import (Response, HttpNotFoundResponse, HttpForbiddenResponse, HttpBadRequestResponse, HttpMethodNotAllowedResponse, HttpUnauthorizedResponse) @@ -140,12 +141,50 @@ def process_wsgi_request(self): response = self.unauthorized_class() return response + def process_digitalocean_request(self): + """ Processes the DigitalOcean Request. + Returns response if it returned a successful + query otherwise, a json error response. + + Returns: + response (`web.Response`, required): + Response object with differing status_code to represent + stauts of the request + """ + + try: + if self.login_required: + self.token_check() + response = getattr(self, self.request.method.lower())() + except (FaunaNotFound, NotFound, DocNotFound): + response = self.not_found_class() + except PermissionDenied: + response = self.forbidden_class() + except (BadRequest, GraphQLError) as e: + if isinstance(e, BadRequest): + payload = e._get_description() + else: + payload = str(e) + response = self.bad_request_class(payload=payload) + except (ValidationException,) as e: + key, value = str(e).split(':') + response = self.bad_request_class(payload={'validation_errors': {key: value}}) + except (MethodNotAllowed,): + response = self.method_not_allowed_class() + except (LoginFailed,) as e: + response = self.unauthorized_class(payload=str(e)) + except (Unauthorized, InvalidSignatureError, TokenValidationFailed): + response = self.unauthorized_class() + return response + def process_request(self): """ Calls the handler for varying `request` and leave the handling to it. """ if isinstance(self.request, (HTTPRequest, RESTRequest)): return self.process_lambda_request() + elif isinstance(self.request, (DigitalOCeanRequest)): + return self.process_digitalocean_request() return self.process_wsgi_request() def get_token(self): @@ -358,6 +397,8 @@ def get_query_kwargs(self): for k, v in fields.items(): current_value = data.get(k) col = v.get('foreign_class') + if isinstance(col, str): + col = import_util(col) if current_value: obj = col.get(current_value) data[k] = obj diff --git a/pfunk/web/views/graphql.py b/pfunk/web/views/graphql.py index cbd6065..f0842d7 100644 --- a/pfunk/web/views/graphql.py +++ b/pfunk/web/views/graphql.py @@ -1,13 +1,13 @@ import requests from envs import env +from graphql.exceptions import SyntaxError as GQLSyntaxError +from graphql.parser import GraphQLParser from werkzeug.routing import Rule from pfunk.exceptions import GraphQLError from pfunk.utils.publishing import BearerAuth from pfunk.web.response import GraphQLResponse from pfunk.web.views.json import JSONView -from graphql.parser import GraphQLParser -from graphql.exceptions import SyntaxError as GQLSyntaxError parser = GraphQLParser() @@ -55,12 +55,12 @@ class GraphQLView(JSONView): def get_query(self): gql = self.process_graphql() resp = requests.request( - method='post', - url=env('FAUNA_GRAPHQL_URL', 'https://graphql.fauna.com/graphql'), - json=self.request.get_json(), - auth=BearerAuth(self.request.token), - allow_redirects=False - ) + method='post', + url=env('FAUNA_GRAPHQL_URL', 'https://graphql.fauna.com/graphql'), + json=self.request.get_json(), + auth=BearerAuth(self.request.token), + allow_redirects=False + ) return resp.json() def process_graphql(self): @@ -76,4 +76,4 @@ def process_graphql(self): @classmethod def url(cls, collection=None): return Rule(f'/graphql/', endpoint=cls.as_view(), - methods=cls.http_methods) \ No newline at end of file + methods=cls.http_methods) diff --git a/pfunk/web/views/json.py b/pfunk/web/views/json.py index cfbe739..791af54 100644 --- a/pfunk/web/views/json.py +++ b/pfunk/web/views/json.py @@ -1,6 +1,6 @@ +from pfunk.client import q from pfunk.web.response import JSONResponse, JSONNotFoundResponse, JSONBadRequestResponse, \ JSONMethodNotAllowedResponse, JSONUnauthorizedResponse, JSONForbiddenResponse -from pfunk.client import q from pfunk.web.views.base import ActionMixin, HTTPView, IDMixin, ObjectMixin, QuerysetMixin, UpdateMixin @@ -25,6 +25,46 @@ def get_response(self): headers=self.get_headers() ) + def _payload_docs(self): + """ Used in custom defining payload parameters for the view in Swagger generation. + + Should return a dict that has the fields of a swagger parameter. + If there is an error in the swagger, it will not be raised. + Usage of `https://editor.swagger.io` to validate is recommended + e.g. + ``` + # Defining formdata + {"data": [ + { + "name":"name", + "in":"formData", + "description":"name of the pet", + "required": true, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "status of the pet", + "required":true, + "type":"string" + } + ]} + + # Defining a payload that references a model + {"data": [ + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": "#/definitions/Person" + } + ]} + ``` + """ + return {} + class CreateView(UpdateMixin, ActionMixin, JSONView): """ Define a `Create` view that allows `creation` of an entity in the collection """ @@ -34,7 +74,8 @@ class CreateView(UpdateMixin, ActionMixin, JSONView): def get_query(self): """ Entity created in a collection """ - obj = self.collection.create(**self.get_query_kwargs(), _token=self.request.token) + obj = self.collection.create( + **self.get_query_kwargs(), _token=self.request.token) return obj def get_m2m_kwargs(self, obj): @@ -50,7 +91,8 @@ def get_m2m_kwargs(self, obj): """ data = self.request.get_json() - fields = self.collection.get_foreign_fields_by_type('pfunk.fields.ManyToManyField') + fields = self.collection.get_foreign_fields_by_type( + 'pfunk.fields.ManyToManyField') for k, v in fields.items(): current_value = data.get(k) col = v.get('foreign_class')() @@ -61,6 +103,19 @@ def get_m2m_kwargs(self, obj): ) ) + def _payload_docs(self): + # Reference the collection by default + if self.collection: + return {"data": [ + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": f"#/definitions/{self.collection.__class__.__name__}" + } + ]} + class UpdateView(UpdateMixin, IDMixin, JSONView): """ Define a view to allow `Update` operations """ @@ -70,11 +125,25 @@ class UpdateView(UpdateMixin, IDMixin, JSONView): def get_query(self): """ Entity in collection updated by an ID """ - obj = self.collection.get(self.request.kwargs.get('id'), _token=self.request.token) + obj = self.collection.get(self.request.kwargs.get( + 'id'), _token=self.request.token) obj._data.update(self.get_query_kwargs()) obj.save() return obj + def _payload_docs(self): + # Reference the collection by default + if self.collection: + return {"data": [ + { + "name": "body", + "in": "body", + "description": "Collection object to add", + "required": True, + "schema": f"#/definitions/{self.collection.__class__.__name__}" + } + ]} + class DetailView(ObjectMixin, IDMixin, JSONView): """ Define a view to allow single entity operations """ @@ -102,4 +171,4 @@ class ListView(QuerysetMixin, ActionMixin, JSONView): class GraphQLView(HTTPView): - pass \ No newline at end of file + pass diff --git a/poetry.lock b/poetry.lock index 67a8d8a..43564c8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,6 +1,6 @@ [[package]] name = "appnope" -version = "0.1.2" +version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" category = "dev" optional = false @@ -66,7 +66,7 @@ six = ">=1.6.1,<2.0" name = "attrs" version = "21.4.0" description = "Classes Without Boilerplate" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -86,11 +86,11 @@ python-versions = "*" [[package]] name = "beautifulsoup4" -version = "4.10.0" +version = "4.11.1" description = "Screen-scraping library" category = "dev" optional = false -python-versions = ">3.0.0" +python-versions = ">=3.6.0" [package.dependencies] soupsieve = ">1.2" @@ -114,14 +114,14 @@ webencodings = "*" [[package]] name = "boto3" -version = "1.21.31" +version = "1.23.8" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.6" [package.dependencies] -botocore = ">=1.24.31,<1.25.0" +botocore = ">=1.26.8,<1.27.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.5.0,<0.6.0" @@ -130,7 +130,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.24.31" +version = "1.26.8" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -142,7 +142,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.13.5)"] +crt = ["awscrt (==0.13.8)"] [[package]] name = "cachetools" @@ -154,11 +154,11 @@ python-versions = "~=3.5" [[package]] name = "certifi" -version = "2021.10.8" +version = "2022.5.18.1" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "cffi" @@ -184,7 +184,7 @@ unicode_backport = ["unicodedata2"] [[package]] name = "click" -version = "8.1.2" +version = "8.1.3" description = "Composable command line interface toolkit" category = "main" optional = false @@ -282,6 +282,17 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "fastjsonschema" +version = "2.15.3" +description = "Fastest Python implementation of JSON schema" +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + [[package]] name = "faunadb" version = "4.2.0" @@ -372,9 +383,9 @@ python-versions = ">=3.5" [[package]] name = "importlib-resources" -version = "5.6.0" +version = "5.7.1" description = "Read resources from Python packages" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -387,7 +398,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [[package]] name = "ipykernel" -version = "6.11.0" +version = "6.13.0" description = "IPython Kernel for Jupyter" category = "dev" optional = false @@ -400,6 +411,7 @@ ipython = ">=7.23.1" jupyter-client = ">=6.1.12" matplotlib-inline = ">=0.1" nest-asyncio = "*" +packaging = "*" psutil = "*" tornado = ">=6.1" traitlets = ">=5.1.0" @@ -409,7 +421,7 @@ test = ["pytest (>=6.0)", "pytest-cov", "flaky", "ipyparallel", "pre-commit", "p [[package]] name = "ipython" -version = "8.2.0" +version = "8.3.0" description = "IPython: Productive Interactive Computing" category = "dev" optional = false @@ -517,9 +529,9 @@ python-versions = ">=3.7" [[package]] name = "jsonschema" -version = "4.4.0" +version = "4.5.1" description = "An implementation of JSON Schema validation for Python" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -550,7 +562,7 @@ qtconsole = "*" [[package]] name = "jupyter-client" -version = "7.2.1" +version = "7.3.1" description = "Jupyter protocol implementation and client libraries" category = "dev" optional = false @@ -589,26 +601,26 @@ test = ["pexpect"] [[package]] name = "jupyter-core" -version = "4.9.2" +version = "4.10.0" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} traitlets = "*" +[package.extras] +test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] + [[package]] name = "jupyterlab-pygments" -version = "0.1.2" +version = "0.2.2" description = "Pygments theme using JupyterLab CSS variables" category = "dev" optional = false -python-versions = "*" - -[package.dependencies] -pygments = ">=2.4.1,<3" +python-versions = ">=3.7" [[package]] name = "jupyterlab-widgets" @@ -647,7 +659,7 @@ python-versions = "*" [[package]] name = "nbclient" -version = "0.5.13" +version = "0.6.3" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." category = "dev" optional = false @@ -660,12 +672,12 @@ nest-asyncio = "*" traitlets = ">=5.0.0" [package.extras] -sphinx = ["Sphinx (>=1.7)", "sphinx-book-theme", "mock", "moto", "myst-parser"] -test = ["ipython (<8.0.0)", "ipykernel", "ipywidgets (<8.0.0)", "pytest (>=4.1)", "pytest-asyncio", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "xmltodict", "black", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)"] +sphinx = ["autodoc-traits", "mock", "moto", "myst-parser", "Sphinx (>=1.7)", "sphinx-book-theme"] +test = ["black", "check-manifest", "flake8", "ipykernel", "ipython (<8.0.0)", "ipywidgets (<8.0.0)", "mypy", "pip (>=18.1)", "pre-commit", "pytest (>=4.1)", "pytest-asyncio", "pytest-cov (>=2.6.1)", "setuptools (>=60.0)", "testpath", "twine (>=1.11.0)", "xmltodict"] [[package]] name = "nbconvert" -version = "6.4.5" +version = "6.5.0" description = "Converting Jupyter Notebooks" category = "dev" optional = false @@ -676,45 +688,46 @@ beautifulsoup4 = "*" bleach = "*" defusedxml = "*" entrypoints = ">=0.2.2" -jinja2 = ">=2.4" -jupyter-core = "*" +jinja2 = ">=3.0" +jupyter-core = ">=4.7" jupyterlab-pygments = "*" MarkupSafe = ">=2.0" mistune = ">=0.8.1,<2" -nbclient = ">=0.5.0,<0.6.0" -nbformat = ">=4.4" +nbclient = ">=0.5.0" +nbformat = ">=5.1" +packaging = "*" pandocfilters = ">=1.4.1" pygments = ">=2.4.1" -testpath = "*" +tinycss2 = "*" traitlets = ">=5.0" [package.extras] -all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (>=1,<1.1)", "tornado (>=4.0)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] +all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)", "tornado (>=6.1)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] docs = ["sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] -serve = ["tornado (>=4.0)"] -test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (>=1,<1.1)"] +serve = ["tornado (>=6.1)"] +test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)"] webpdf = ["pyppeteer (>=1,<1.1)"] [[package]] name = "nbformat" -version = "5.2.0" +version = "5.4.0" description = "The Jupyter Notebook format" category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] -jsonschema = ">=2.4,<2.5.0 || >2.5.0" +fastjsonschema = "*" +jsonschema = ">=2.6" jupyter-core = "*" -traitlets = ">=4.1" +traitlets = ">=5.1" [package.extras] -fast = ["fastjsonschema"] -test = ["check-manifest", "fastjsonschema", "testpath", "pytest"] +test = ["check-manifest", "testpath", "pytest", "pre-commit"] [[package]] name = "nest-asyncio" -version = "1.5.4" +version = "1.5.5" description = "Patch asyncio to allow nested event loops" category = "dev" optional = false @@ -722,11 +735,11 @@ python-versions = ">=3.5" [[package]] name = "notebook" -version = "6.4.10" +version = "6.4.11" description = "A web-based notebook environment for interactive computing" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] argon2-cffi = "*" @@ -748,7 +761,103 @@ traitlets = ">=4.2.1" [package.extras] docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] json-logging = ["json-logging"] -test = ["pytest", "coverage", "requests", "nbval", "selenium", "pytest-cov", "requests-unixsocket"] +test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium", "pytest-cov", "requests-unixsocket"] + +[[package]] +name = "openapi-schema-validator" +version = "0.2.3" +description = "OpenAPI schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.0.0,<5.0.0" + +[package.extras] +rfc3339-validator = ["rfc3339-validator"] +strict-rfc3339 = ["strict-rfc3339"] +isodate = ["isodate"] + +[[package]] +name = "openapi-spec-validator" +version = "0.4.0" +description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.2.0,<5.0.0" +openapi-schema-validator = ">=0.2.0,<0.3.0" +PyYAML = ">=5.1" + +[package.extras] +requests = ["requests"] + +[[package]] +name = "openapi-schema-validator" +version = "0.2.3" +description = "OpenAPI schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.0.0,<5.0.0" + +[package.extras] +rfc3339-validator = ["rfc3339-validator"] +strict-rfc3339 = ["strict-rfc3339"] +isodate = ["isodate"] + +[[package]] +name = "openapi-spec-validator" +version = "0.4.0" +description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.2.0,<5.0.0" +openapi-schema-validator = ">=0.2.0,<0.3.0" +PyYAML = ">=5.1" + +[package.extras] +requests = ["requests"] + +[[package]] +name = "openapi-schema-validator" +version = "0.2.3" +description = "OpenAPI schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.0.0,<5.0.0" + +[package.extras] +rfc3339-validator = ["rfc3339-validator"] +strict-rfc3339 = ["strict-rfc3339"] +isodate = ["isodate"] + +[[package]] +name = "openapi-spec-validator" +version = "0.4.0" +description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" +category = "main" +optional = false +python-versions = ">=3.7.0,<4.0.0" + +[package.dependencies] +jsonschema = ">=3.2.0,<5.0.0" +openapi-schema-validator = ">=0.2.0,<0.3.0" +PyYAML = ">=5.1" + +[package.extras] +requests = ["requests"] [[package]] name = "packaging" @@ -827,7 +936,7 @@ python-versions = "*" [[package]] name = "prometheus-client" -version = "0.13.1" +version = "0.14.1" description = "Python client for the Prometheus monitoring system." category = "dev" optional = false @@ -838,7 +947,7 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.28" +version = "3.0.29" description = "Library for building powerful interactive command lines in Python" category = "dev" optional = false @@ -849,14 +958,14 @@ wcwidth = "*" [[package]] name = "psutil" -version = "5.9.0" +version = "5.9.1" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.extras] -test = ["ipaddress", "mock", "unittest2", "enum34", "pywin32", "wmi"] +test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] [[package]] name = "ptyprocess" @@ -895,15 +1004,15 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.11.2" +version = "2.12.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [[package]] name = "pyjwt" -version = "2.3.0" +version = "2.4.0" description = "JSON Web Token implementation in Python" category = "main" optional = false @@ -917,20 +1026,20 @@ tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] [[package]] name = "pyparsing" -version = "3.0.7" -description = "Python parsing module" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.8" [package.extras] -diagrams = ["jinja2", "railroad-diagrams"] +diagrams = ["railroad-diagrams", "jinja2"] [[package]] name = "pyrsistent" version = "0.18.1" description = "Persistent/Functional/Immutable data structures" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -955,7 +1064,7 @@ python-versions = "*" [[package]] name = "pywin32" -version = "303" +version = "304" description = "Python for Window Extensions" category = "dev" optional = false @@ -979,7 +1088,7 @@ python-versions = ">=3.6" [[package]] name = "pyzmq" -version = "22.3.0" +version = "23.0.0" description = "Python bindings for 0MQ" category = "dev" optional = false @@ -1013,17 +1122,17 @@ test = ["flaky", "pytest", "pytest-qt"] [[package]] name = "qtpy" -version = "2.0.1" +version = "2.1.0" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] packaging = "*" [package.extras] -test = ["pytest (>=6.0.0)", "pytest-cov (>=3.0.0)", "pytest-qt"] +test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] [[package]] name = "requests" @@ -1093,7 +1202,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "soupsieve" -version = "2.3.1" +version = "2.3.2.post1" description = "A modern CSS selector implementation for Beautiful Soup." category = "dev" optional = false @@ -1117,7 +1226,7 @@ tests = ["pytest", "typeguard", "pygments", "littleutils", "cython"] [[package]] name = "stripe" -version = "2.70.0" +version = "2.76.0" description = "Python bindings for the Stripe API" category = "main" optional = false @@ -1128,7 +1237,7 @@ requests = {version = ">=2.20", markers = "python_version >= \"3.0\""} [[package]] name = "swaggyp" -version = "0.2.0" +version = "0.3.0" description = "Python library for generating Swagger templates based on valley" category = "main" optional = false @@ -1140,7 +1249,7 @@ valley = ">=1.5.6,<2.0.0" [[package]] name = "terminado" -version = "0.13.3" +version = "0.15.0" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." category = "dev" optional = false @@ -1149,21 +1258,25 @@ python-versions = ">=3.7" [package.dependencies] ptyprocess = {version = "*", markers = "os_name != \"nt\""} pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} -tornado = ">=4" +tornado = ">=6.1.0" [package.extras] -test = ["pytest"] +test = ["pre-commit", "pytest-timeout", "pytest (>=6.0)"] [[package]] -name = "testpath" -version = "0.6.0" -description = "Test utilities for code working with files and commands" +name = "tinycss2" +version = "1.1.1" +description = "A tiny CSS parser" category = "dev" optional = false -python-versions = ">= 3.5" +python-versions = ">=3.6" + +[package.dependencies] +webencodings = ">=0.4" [package.extras] -test = ["pytest"] +doc = ["sphinx", "sphinx-rtd-theme"] +test = ["pytest", "pytest-cov", "pytest-flake8", "pytest-isort", "coverage"] [[package]] name = "tornado" @@ -1175,14 +1288,14 @@ python-versions = ">= 3.5" [[package]] name = "traitlets" -version = "5.1.1" -description = "Traitlets Python configuration system" +version = "5.2.1.post0" +description = "" category = "dev" optional = false python-versions = ">=3.7" [package.extras] -test = ["pytest"] +test = ["pre-commit", "pytest"] [[package]] name = "urllib3" @@ -1226,7 +1339,7 @@ python-versions = "*" [[package]] name = "werkzeug" -version = "2.1.0" +version = "2.1.2" description = "The comprehensive WSGI web application library." category = "main" optional = false @@ -1248,25 +1361,25 @@ notebook = ">=4.4.1" [[package]] name = "zipp" -version = "3.7.0" +version = "3.8.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "45c80cdba27ede0a7a28b611355294d4166ccfd7d4209b7fb6b75582d854b5a7" +content-hash = "b98fdaeac6227f48d169512de02fc0155103f626a8e7c5d074c25fef2bef9cd5" [metadata.files] appnope = [ - {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, - {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, ] argon2-cffi = [ {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, @@ -1312,28 +1425,28 @@ backcall = [ {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, ] beautifulsoup4 = [ - {file = "beautifulsoup4-4.10.0-py3-none-any.whl", hash = "sha256:9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf"}, - {file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"}, + {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, + {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, ] bleach = [ {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, ] boto3 = [ - {file = "boto3-1.21.31-py3-none-any.whl", hash = "sha256:35f68b60652bff50e7bc926238443cb578f29f120908bb945e5640e90c6dd53e"}, - {file = "boto3-1.21.31.tar.gz", hash = "sha256:7f3f93ee97215862ccd1a216f37deb7d64055c71f826b821805904df7b84ee6a"}, + {file = "boto3-1.23.8-py3-none-any.whl", hash = "sha256:15733c2bbedce7a36fcf1749560c72c3ee90785aa6302a98658c7bffdcbe1f2a"}, + {file = "boto3-1.23.8.tar.gz", hash = "sha256:ea8ebcea4ccb70d1cf57526d9eec6012c76796f28ada3e9cc1d89178683d8107"}, ] botocore = [ - {file = "botocore-1.24.31-py3-none-any.whl", hash = "sha256:424fd94bef86a11f5340dc15eb50602dedec2ecc01c3a25c4fea23a2c8195500"}, - {file = "botocore-1.24.31.tar.gz", hash = "sha256:3bb21e3ee5e4de3ed76bb99b4496a46e9b5c82e7b7fdb62702f11dda1b57b769"}, + {file = "botocore-1.26.8-py3-none-any.whl", hash = "sha256:620851daf1245af5bc28137aa821375bac964aa0eddc482437c783fe01e298fc"}, + {file = "botocore-1.26.8.tar.gz", hash = "sha256:e786722cb14de7319331cc55e9092174de66a768559700ef656d05ff41b3e24f"}, ] cachetools = [ {file = "cachetools-4.2.4-py3-none-any.whl", hash = "sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1"}, {file = "cachetools-4.2.4.tar.gz", hash = "sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693"}, ] certifi = [ - {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, - {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, + {file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"}, + {file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"}, ] cffi = [ {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, @@ -1392,8 +1505,8 @@ charset-normalizer = [ {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, ] click = [ - {file = "click-8.1.2-py3-none-any.whl", hash = "sha256:24e1a4a9ec5bf6299411369b208c1df2188d9eb8d916302fe6bf03faed227f1e"}, - {file = "click-8.1.2.tar.gz", hash = "sha256:479707fe14d9ec9a0757618b7a100a0ae4c4e236fac5b7f80ca68028141a1a72"}, + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, @@ -1514,6 +1627,10 @@ executing = [ {file = "executing-0.8.3-py2.py3-none-any.whl", hash = "sha256:d1eef132db1b83649a3905ca6dd8897f71ac6f8cac79a7e58a1a09cf137546c9"}, {file = "executing-0.8.3.tar.gz", hash = "sha256:c6554e21c6b060590a6d3be4b82fb78f8f0194d809de5ea7df1c093763311501"}, ] +fastjsonschema = [ + {file = "fastjsonschema-2.15.3-py3-none-any.whl", hash = "sha256:ddb0b1d8243e6e3abb822bd14e447a89f4ab7439342912d590444831fa00b6a0"}, + {file = "fastjsonschema-2.15.3.tar.gz", hash = "sha256:0a572f0836962d844c1fc435e200b2e4f4677e4e6611a2e3bdd01ba697c275ec"}, +] faunadb = [ {file = "faunadb-4.2.0-py2.py3-none-any.whl", hash = "sha256:73d5f560bddb7fc45f9201d526f97fcd0a7b0ef74ce3d4d46b8e116f4ce6e219"}, ] @@ -1544,16 +1661,16 @@ idna = [ {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] importlib-resources = [ - {file = "importlib_resources-5.6.0-py3-none-any.whl", hash = "sha256:a9dd72f6cc106aeb50f6e66b86b69b454766dd6e39b69ac68450253058706bcc"}, - {file = "importlib_resources-5.6.0.tar.gz", hash = "sha256:1b93238cbf23b4cde34240dd8321d99e9bf2eb4bc91c0c99b2886283e7baad85"}, + {file = "importlib_resources-5.7.1-py3-none-any.whl", hash = "sha256:e447dc01619b1e951286f3929be820029d48c75eb25d265c28b92a16548212b8"}, + {file = "importlib_resources-5.7.1.tar.gz", hash = "sha256:b6062987dfc51f0fcb809187cffbd60f35df7acb4589091f154214af6d0d49d3"}, ] ipykernel = [ - {file = "ipykernel-6.11.0-py3-none-any.whl", hash = "sha256:62ec17caff6e4fa1dc87ef0a6f9eff5a5d6588bb585ab1e06897e7bec9eb2819"}, - {file = "ipykernel-6.11.0.tar.gz", hash = "sha256:6712604531c96100f326440c11cb023da26819f2f34ba9d1ca0fb163401834e8"}, + {file = "ipykernel-6.13.0-py3-none-any.whl", hash = "sha256:2b0987af43c0d4b62cecb13c592755f599f96f29aafe36c01731aaa96df30d39"}, + {file = "ipykernel-6.13.0.tar.gz", hash = "sha256:0e28273e290858393e86e152b104e5506a79c13d25b951ac6eca220051b4be60"}, ] ipython = [ - {file = "ipython-8.2.0-py3-none-any.whl", hash = "sha256:1b672bfd7a48d87ab203d9af8727a3b0174a4566b4091e9447c22fb63ea32857"}, - {file = "ipython-8.2.0.tar.gz", hash = "sha256:70e5eb132cac594a34b5f799bd252589009905f05104728aea6a403ec2519dc1"}, + {file = "ipython-8.3.0-py3-none-any.whl", hash = "sha256:341456643a764c28f670409bbd5d2518f9b82c013441084ff2c2fc999698f83b"}, + {file = "ipython-8.3.0.tar.gz", hash = "sha256:807ae3cf43b84693c9272f70368440a9a7eaa2e7e6882dad943c32fbf7e51402"}, ] ipython-genutils = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, @@ -1580,8 +1697,8 @@ jmespath = [ {file = "jmespath-1.0.0.tar.gz", hash = "sha256:a490e280edd1f57d6de88636992d05b71e97d69a26a19f058ecf7d304474bf5e"}, ] jsonschema = [ - {file = "jsonschema-4.4.0-py3-none-any.whl", hash = "sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823"}, - {file = "jsonschema-4.4.0.tar.gz", hash = "sha256:636694eb41b3535ed608fe04129f26542b59ed99808b4f688aa32dcf55317a83"}, + {file = "jsonschema-4.5.1-py3-none-any.whl", hash = "sha256:71b5e39324422543546572954ce71c67728922c104902cb7ce252e522235b33f"}, + {file = "jsonschema-4.5.1.tar.gz", hash = "sha256:7c6d882619340c3347a1bf7315e147e6d3dae439033ae6383d6acb908c101dfc"}, ] jupyter = [ {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, @@ -1589,20 +1706,20 @@ jupyter = [ {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, ] jupyter-client = [ - {file = "jupyter_client-7.2.1-py3-none-any.whl", hash = "sha256:d10e31ac4b8364d1cb30ebcee9e5cc7b7eb5d23b76912be9ef3d4c75167fbc68"}, - {file = "jupyter_client-7.2.1.tar.gz", hash = "sha256:aa177279e93205d0681ec0e2e210da01b22c5a1464a56abd455adcac64f0de91"}, + {file = "jupyter_client-7.3.1-py3-none-any.whl", hash = "sha256:404abe552540aff3527e66e16beb114b6b4ff58479d51a301f4eb9701e4f52ef"}, + {file = "jupyter_client-7.3.1.tar.gz", hash = "sha256:05d4ff6a0ade25138c6bb0fbeac7ddc26b5fe835e7dd816b64b4a45b931bdc0b"}, ] jupyter-console = [ {file = "jupyter_console-6.4.3-py3-none-any.whl", hash = "sha256:e630bcb682c0088dda45688ad7c2424d4a825c8acf494cb036ced03ed0424841"}, {file = "jupyter_console-6.4.3.tar.gz", hash = "sha256:55f32626b0be647a85e3217ddcdb22db69efc79e8b403b9771eb9ecc696019b5"}, ] jupyter-core = [ - {file = "jupyter_core-4.9.2-py3-none-any.whl", hash = "sha256:f875e4d27e202590311d468fa55f90c575f201490bd0c18acabe4e318db4a46d"}, - {file = "jupyter_core-4.9.2.tar.gz", hash = "sha256:d69baeb9ffb128b8cd2657fcf2703f89c769d1673c851812119e3a2a0e93ad9a"}, + {file = "jupyter_core-4.10.0-py3-none-any.whl", hash = "sha256:e7f5212177af7ab34179690140f188aa9bf3d322d8155ed972cbded19f55b6f3"}, + {file = "jupyter_core-4.10.0.tar.gz", hash = "sha256:a6de44b16b7b31d7271130c71a6792c4040f077011961138afed5e5e73181aec"}, ] jupyterlab-pygments = [ - {file = "jupyterlab_pygments-0.1.2-py2.py3-none-any.whl", hash = "sha256:abfb880fd1561987efaefcb2d2ac75145d2a5d0139b1876d5be806e32f630008"}, - {file = "jupyterlab_pygments-0.1.2.tar.gz", hash = "sha256:cfcda0873626150932f438eccf0f8bf22bfa92345b814890ab360d666b254146"}, + {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, + {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, ] jupyterlab-widgets = [ {file = "jupyterlab_widgets-1.1.0-py3-none-any.whl", hash = "sha256:c2a9bd3789f120f64d73268c066ed3b000c56bc1dda217be5cdc43e7b4ebad3f"}, @@ -1659,24 +1776,48 @@ mistune = [ {file = "mistune-0.8.4.tar.gz", hash = "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"}, ] nbclient = [ - {file = "nbclient-0.5.13-py3-none-any.whl", hash = "sha256:47ac905af59379913c1f8f541098d2550153cf8dc58553cbe18c702b181518b0"}, - {file = "nbclient-0.5.13.tar.gz", hash = "sha256:40c52c9b5e3c31faecaee69f202b3f53e38d7c1c563de0fadde9d7eda0fdafe8"}, + {file = "nbclient-0.6.3-py3-none-any.whl", hash = "sha256:2747ac9b385720d8a6c34f2f71e72cbe64aec6cadaadcc064a4df0b0e99c5874"}, + {file = "nbclient-0.6.3.tar.gz", hash = "sha256:b80726fc1fb89a0e8f8be1e77e28d0026b1e8ed90bc143c8a0c7622e4f8cdd9e"}, ] nbconvert = [ - {file = "nbconvert-6.4.5-py3-none-any.whl", hash = "sha256:e01d219f55cc79f9701c834d605e8aa3acf35725345d3942e3983937f368ce14"}, - {file = "nbconvert-6.4.5.tar.gz", hash = "sha256:21163a8e2073c07109ca8f398836e45efdba2aacea68d6f75a8a545fef070d4e"}, + {file = "nbconvert-6.5.0-py3-none-any.whl", hash = "sha256:c56dd0b8978a1811a5654f74c727ff16ca87dd5a43abd435a1c49b840fcd8360"}, + {file = "nbconvert-6.5.0.tar.gz", hash = "sha256:223e46e27abe8596b8aed54301fadbba433b7ffea8196a68fd7b1ff509eee99d"}, ] nbformat = [ - {file = "nbformat-5.2.0-py3-none-any.whl", hash = "sha256:3e30424e8291b2188347f5c3ba5273ed3766f12f8c5137c2e456a0815f36e785"}, - {file = "nbformat-5.2.0.tar.gz", hash = "sha256:93df0b9c67221d38fb970c48f6d361819a6c388299a0ef3171bbb912edfe1324"}, + {file = "nbformat-5.4.0-py3-none-any.whl", hash = "sha256:0d6072aaec95dddc39735c144ee8bbc6589c383fb462e4058abc855348152dad"}, + {file = "nbformat-5.4.0.tar.gz", hash = "sha256:44ba5ca6acb80c5d5a500f1e5b83ede8cbe364d5a495c4c8cf60aaf1ba656501"}, ] nest-asyncio = [ - {file = "nest_asyncio-1.5.4-py3-none-any.whl", hash = "sha256:3fdd0d6061a2bb16f21fe8a9c6a7945be83521d81a0d15cff52e9edee50101d6"}, - {file = "nest_asyncio-1.5.4.tar.gz", hash = "sha256:f969f6013a16fadb4adcf09d11a68a4f617c6049d7af7ac2c676110169a63abd"}, + {file = "nest_asyncio-1.5.5-py3-none-any.whl", hash = "sha256:b98e3ec1b246135e4642eceffa5a6c23a3ab12c82ff816a92c612d68205813b2"}, + {file = "nest_asyncio-1.5.5.tar.gz", hash = "sha256:e442291cd942698be619823a17a86a5759eabe1f8613084790de189fe9e16d65"}, ] notebook = [ - {file = "notebook-6.4.10-py3-none-any.whl", hash = "sha256:49cead814bff0945fcb2ee07579259418672ac175d3dc3d8102a4b0a656ed4df"}, - {file = "notebook-6.4.10.tar.gz", hash = "sha256:2408a76bc6289283a8eecfca67e298ec83c67db51a4c2e1b713dd180bb39e90e"}, + {file = "notebook-6.4.11-py3-none-any.whl", hash = "sha256:b4a6baf2eba21ce67a0ca11a793d1781b06b8078f34d06c710742e55f3eee505"}, + {file = "notebook-6.4.11.tar.gz", hash = "sha256:709b1856a564fe53054796c80e17a67262071c86bfbdfa6b96aaa346113c555a"}, +] +openapi-schema-validator = [ + {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, + {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, +] +openapi-spec-validator = [ + {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, + {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, +] +openapi-schema-validator = [ + {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, + {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, +] +openapi-spec-validator = [ + {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, + {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, +] +openapi-schema-validator = [ + {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, + {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, +] +openapi-spec-validator = [ + {file = "openapi-spec-validator-0.4.0.tar.gz", hash = "sha256:97f258850afc97b048f7c2653855e0f88fa66ac103c2be5077c7960aca2ad49a"}, + {file = "openapi_spec_validator-0.4.0-py3-none-any.whl", hash = "sha256:06900ac4d546a1df3642a779da0055be58869c598e3042a2fef067cfd99d04d0"}, ] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, @@ -1706,46 +1847,46 @@ ply = [ {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, ] prometheus-client = [ - {file = "prometheus_client-0.13.1-py3-none-any.whl", hash = "sha256:357a447fd2359b0a1d2e9b311a0c5778c330cfbe186d880ad5a6b39884652316"}, - {file = "prometheus_client-0.13.1.tar.gz", hash = "sha256:ada41b891b79fca5638bd5cfe149efa86512eaa55987893becd2c6d8d0a5dfc5"}, + {file = "prometheus_client-0.14.1-py3-none-any.whl", hash = "sha256:522fded625282822a89e2773452f42df14b5a8e84a86433e3f8a189c1d54dc01"}, + {file = "prometheus_client-0.14.1.tar.gz", hash = "sha256:5459c427624961076277fdc6dc50540e2bacb98eebde99886e59ec55ed92093a"}, ] prompt-toolkit = [ - {file = "prompt_toolkit-3.0.28-py3-none-any.whl", hash = "sha256:30129d870dcb0b3b6a53efdc9d0a83ea96162ffd28ffe077e94215b233dc670c"}, - {file = "prompt_toolkit-3.0.28.tar.gz", hash = "sha256:9f1cd16b1e86c2968f2519d7fb31dd9d669916f515612c269d14e9ed52b51650"}, + {file = "prompt_toolkit-3.0.29-py3-none-any.whl", hash = "sha256:62291dad495e665fca0bda814e342c69952086afb0f4094d0893d357e5c78752"}, + {file = "prompt_toolkit-3.0.29.tar.gz", hash = "sha256:bd640f60e8cecd74f0dc249713d433ace2ddc62b65ee07f96d358e0b152b6ea7"}, ] psutil = [ - {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:55ce319452e3d139e25d6c3f85a1acf12d1607ddedea5e35fb47a552c051161b"}, - {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:7336292a13a80eb93c21f36bde4328aa748a04b68c13d01dfddd67fc13fd0618"}, - {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cb8d10461c1ceee0c25a64f2dd54872b70b89c26419e147a05a10b753ad36ec2"}, - {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:7641300de73e4909e5d148e90cc3142fb890079e1525a840cf0dfd39195239fd"}, - {file = "psutil-5.9.0-cp27-none-win32.whl", hash = "sha256:ea42d747c5f71b5ccaa6897b216a7dadb9f52c72a0fe2b872ef7d3e1eacf3ba3"}, - {file = "psutil-5.9.0-cp27-none-win_amd64.whl", hash = "sha256:ef216cc9feb60634bda2f341a9559ac594e2eeaadd0ba187a4c2eb5b5d40b91c"}, - {file = "psutil-5.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90a58b9fcae2dbfe4ba852b57bd4a1dded6b990a33d6428c7614b7d48eccb492"}, - {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d41f8b3e9ebb6b6110057e40019a432e96aae2008951121ba4e56040b84f3"}, - {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:742c34fff804f34f62659279ed5c5b723bb0195e9d7bd9907591de9f8f6558e2"}, - {file = "psutil-5.9.0-cp310-cp310-win32.whl", hash = "sha256:8293942e4ce0c5689821f65ce6522ce4786d02af57f13c0195b40e1edb1db61d"}, - {file = "psutil-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:9b51917c1af3fa35a3f2dabd7ba96a2a4f19df3dec911da73875e1edaf22a40b"}, - {file = "psutil-5.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e9805fed4f2a81de98ae5fe38b75a74c6e6ad2df8a5c479594c7629a1fe35f56"}, - {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c51f1af02334e4b516ec221ee26b8fdf105032418ca5a5ab9737e8c87dafe203"}, - {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32acf55cb9a8cbfb29167cd005951df81b567099295291bcfd1027365b36591d"}, - {file = "psutil-5.9.0-cp36-cp36m-win32.whl", hash = "sha256:e5c783d0b1ad6ca8a5d3e7b680468c9c926b804be83a3a8e95141b05c39c9f64"}, - {file = "psutil-5.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d62a2796e08dd024b8179bd441cb714e0f81226c352c802fca0fd3f89eeacd94"}, - {file = "psutil-5.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d00a664e31921009a84367266b35ba0aac04a2a6cad09c550a89041034d19a0"}, - {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7779be4025c540d1d65a2de3f30caeacc49ae7a2152108adeaf42c7534a115ce"}, - {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072664401ae6e7c1bfb878c65d7282d4b4391f1bc9a56d5e03b5a490403271b5"}, - {file = "psutil-5.9.0-cp37-cp37m-win32.whl", hash = "sha256:df2c8bd48fb83a8408c8390b143c6a6fa10cb1a674ca664954de193fdcab36a9"}, - {file = "psutil-5.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1d7b433519b9a38192dfda962dd8f44446668c009833e1429a52424624f408b4"}, - {file = "psutil-5.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3400cae15bdb449d518545cbd5b649117de54e3596ded84aacabfbb3297ead2"}, - {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2237f35c4bbae932ee98902a08050a27821f8f6dfa880a47195e5993af4702d"}, - {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1070a9b287846a21a5d572d6dddd369517510b68710fca56b0e9e02fd24bed9a"}, - {file = "psutil-5.9.0-cp38-cp38-win32.whl", hash = "sha256:76cebf84aac1d6da5b63df11fe0d377b46b7b500d892284068bacccf12f20666"}, - {file = "psutil-5.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:3151a58f0fbd8942ba94f7c31c7e6b310d2989f4da74fcbf28b934374e9bf841"}, - {file = "psutil-5.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:539e429da49c5d27d5a58e3563886057f8fc3868a5547b4f1876d9c0f007bccf"}, - {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58c7d923dc209225600aec73aa2c4ae8ea33b1ab31bc11ef8a5933b027476f07"}, - {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3611e87eea393f779a35b192b46a164b1d01167c9d323dda9b1e527ea69d697d"}, - {file = "psutil-5.9.0-cp39-cp39-win32.whl", hash = "sha256:4e2fb92e3aeae3ec3b7b66c528981fd327fb93fd906a77215200404444ec1845"}, - {file = "psutil-5.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:7d190ee2eaef7831163f254dc58f6d2e2a22e27382b936aab51c835fc080c3d3"}, - {file = "psutil-5.9.0.tar.gz", hash = "sha256:869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25"}, + {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:799759d809c31aab5fe4579e50addf84565e71c1dc9f1c31258f159ff70d3f87"}, + {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9272167b5f5fbfe16945be3db475b3ce8d792386907e673a209da686176552af"}, + {file = "psutil-5.9.1-cp27-cp27m-win32.whl", hash = "sha256:0904727e0b0a038830b019551cf3204dd48ef5c6868adc776e06e93d615fc5fc"}, + {file = "psutil-5.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e7e10454cb1ab62cc6ce776e1c135a64045a11ec4c6d254d3f7689c16eb3efd2"}, + {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:56960b9e8edcca1456f8c86a196f0c3d8e3e361320071c93378d41445ffd28b0"}, + {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:44d1826150d49ffd62035785a9e2c56afcea66e55b43b8b630d7706276e87f22"}, + {file = "psutil-5.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7be9d7f5b0d206f0bbc3794b8e16fb7dbc53ec9e40bbe8787c6f2d38efcf6c9"}, + {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd9246e4cdd5b554a2ddd97c157e292ac11ef3e7af25ac56b08b455c829dca8"}, + {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29a442e25fab1f4d05e2655bb1b8ab6887981838d22effa2396d584b740194de"}, + {file = "psutil-5.9.1-cp310-cp310-win32.whl", hash = "sha256:20b27771b077dcaa0de1de3ad52d22538fe101f9946d6dc7869e6f694f079329"}, + {file = "psutil-5.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:58678bbadae12e0db55186dc58f2888839228ac9f41cc7848853539b70490021"}, + {file = "psutil-5.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3a76ad658641172d9c6e593de6fe248ddde825b5866464c3b2ee26c35da9d237"}, + {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6a11e48cb93a5fa606306493f439b4aa7c56cb03fc9ace7f6bfa21aaf07c453"}, + {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068935df39055bf27a29824b95c801c7a5130f118b806eee663cad28dca97685"}, + {file = "psutil-5.9.1-cp36-cp36m-win32.whl", hash = "sha256:0f15a19a05f39a09327345bc279c1ba4a8cfb0172cc0d3c7f7d16c813b2e7d36"}, + {file = "psutil-5.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:db417f0865f90bdc07fa30e1aadc69b6f4cad7f86324b02aa842034efe8d8c4d"}, + {file = "psutil-5.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:91c7ff2a40c373d0cc9121d54bc5f31c4fa09c346528e6a08d1845bce5771ffc"}, + {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fea896b54f3a4ae6f790ac1d017101252c93f6fe075d0e7571543510f11d2676"}, + {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3054e923204b8e9c23a55b23b6df73a8089ae1d075cb0bf711d3e9da1724ded4"}, + {file = "psutil-5.9.1-cp37-cp37m-win32.whl", hash = "sha256:d2d006286fbcb60f0b391741f520862e9b69f4019b4d738a2a45728c7e952f1b"}, + {file = "psutil-5.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b14ee12da9338f5e5b3a3ef7ca58b3cba30f5b66f7662159762932e6d0b8f680"}, + {file = "psutil-5.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:19f36c16012ba9cfc742604df189f2f28d2720e23ff7d1e81602dbe066be9fd1"}, + {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:944c4b4b82dc4a1b805329c980f270f170fdc9945464223f2ec8e57563139cf4"}, + {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b6750a73a9c4a4e689490ccb862d53c7b976a2a35c4e1846d049dcc3f17d83b"}, + {file = "psutil-5.9.1-cp38-cp38-win32.whl", hash = "sha256:a8746bfe4e8f659528c5c7e9af5090c5a7d252f32b2e859c584ef7d8efb1e689"}, + {file = "psutil-5.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:79c9108d9aa7fa6fba6e668b61b82facc067a6b81517cab34d07a84aa89f3df0"}, + {file = "psutil-5.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28976df6c64ddd6320d281128817f32c29b539a52bdae5e192537bc338a9ec81"}, + {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b88f75005586131276634027f4219d06e0561292be8bd6bc7f2f00bdabd63c4e"}, + {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:645bd4f7bb5b8633803e0b6746ff1628724668681a434482546887d22c7a9537"}, + {file = "psutil-5.9.1-cp39-cp39-win32.whl", hash = "sha256:32c52611756096ae91f5d1499fe6c53b86f4a9ada147ee42db4991ba1520e574"}, + {file = "psutil-5.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:f65f9a46d984b8cd9b3750c2bdb419b2996895b005aefa6cbaba9a143b1ce2c5"}, + {file = "psutil-5.9.1.tar.gz", hash = "sha256:57f1819b5d9e95cdfb0c881a8a5b7d542ed0b7c522d575706a80bedc848c8954"}, ] ptyprocess = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, @@ -1764,16 +1905,16 @@ pycparser = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pygments = [ - {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, - {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, + {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, + {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, ] pyjwt = [ - {file = "PyJWT-2.3.0-py3-none-any.whl", hash = "sha256:e0c4bb8d9f0af0c7f5b1ec4c5036309617d03d56932877f2f7a0beeb5318322f"}, - {file = "PyJWT-2.3.0.tar.gz", hash = "sha256:b888b4d56f06f6dcd777210c334e69c737be74755d3e5e9ee3fe67dc18a0ee41"}, + {file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"}, + {file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"}, ] pyparsing = [ - {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, - {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pyrsistent = [ {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, @@ -1807,18 +1948,20 @@ pytz = [ {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, ] pywin32 = [ - {file = "pywin32-303-cp310-cp310-win32.whl", hash = "sha256:6fed4af057039f309263fd3285d7b8042d41507343cd5fa781d98fcc5b90e8bb"}, - {file = "pywin32-303-cp310-cp310-win_amd64.whl", hash = "sha256:51cb52c5ec6709f96c3f26e7795b0bf169ee0d8395b2c1d7eb2c029a5008ed51"}, - {file = "pywin32-303-cp311-cp311-win32.whl", hash = "sha256:d9b5d87ca944eb3aa4cd45516203ead4b37ab06b8b777c54aedc35975dec0dee"}, - {file = "pywin32-303-cp311-cp311-win_amd64.whl", hash = "sha256:fcf44032f5b14fcda86028cdf49b6ebdaea091230eb0a757282aa656e4732439"}, - {file = "pywin32-303-cp36-cp36m-win32.whl", hash = "sha256:aad484d52ec58008ca36bd4ad14a71d7dd0a99db1a4ca71072213f63bf49c7d9"}, - {file = "pywin32-303-cp36-cp36m-win_amd64.whl", hash = "sha256:2a09632916b6bb231ba49983fe989f2f625cea237219530e81a69239cd0c4559"}, - {file = "pywin32-303-cp37-cp37m-win32.whl", hash = "sha256:b1675d82bcf6dbc96363fca747bac8bff6f6e4a447a4287ac652aa4b9adc796e"}, - {file = "pywin32-303-cp37-cp37m-win_amd64.whl", hash = "sha256:c268040769b48a13367221fced6d4232ed52f044ffafeda247bd9d2c6bdc29ca"}, - {file = "pywin32-303-cp38-cp38-win32.whl", hash = "sha256:5f9ec054f5a46a0f4dfd72af2ce1372f3d5a6e4052af20b858aa7df2df7d355b"}, - {file = "pywin32-303-cp38-cp38-win_amd64.whl", hash = "sha256:793bf74fce164bcffd9d57bb13c2c15d56e43c9542a7b9687b4fccf8f8a41aba"}, - {file = "pywin32-303-cp39-cp39-win32.whl", hash = "sha256:7d3271c98434617a11921c5ccf74615794d97b079e22ed7773790822735cc352"}, - {file = "pywin32-303-cp39-cp39-win_amd64.whl", hash = "sha256:79cbb862c11b9af19bcb682891c1b91942ec2ff7de8151e2aea2e175899cda34"}, + {file = "pywin32-304-cp310-cp310-win32.whl", hash = "sha256:3c7bacf5e24298c86314f03fa20e16558a4e4138fc34615d7de4070c23e65af3"}, + {file = "pywin32-304-cp310-cp310-win_amd64.whl", hash = "sha256:4f32145913a2447736dad62495199a8e280a77a0ca662daa2332acf849f0be48"}, + {file = "pywin32-304-cp310-cp310-win_arm64.whl", hash = "sha256:d3ee45adff48e0551d1aa60d2ec066fec006083b791f5c3527c40cd8aefac71f"}, + {file = "pywin32-304-cp311-cp311-win32.whl", hash = "sha256:30c53d6ce44c12a316a06c153ea74152d3b1342610f1b99d40ba2795e5af0269"}, + {file = "pywin32-304-cp311-cp311-win_amd64.whl", hash = "sha256:7ffa0c0fa4ae4077e8b8aa73800540ef8c24530057768c3ac57c609f99a14fd4"}, + {file = "pywin32-304-cp311-cp311-win_arm64.whl", hash = "sha256:cbbe34dad39bdbaa2889a424d28752f1b4971939b14b1bb48cbf0182a3bcfc43"}, + {file = "pywin32-304-cp36-cp36m-win32.whl", hash = "sha256:be253e7b14bc601718f014d2832e4c18a5b023cbe72db826da63df76b77507a1"}, + {file = "pywin32-304-cp36-cp36m-win_amd64.whl", hash = "sha256:de9827c23321dcf43d2f288f09f3b6d772fee11e809015bdae9e69fe13213988"}, + {file = "pywin32-304-cp37-cp37m-win32.whl", hash = "sha256:f64c0377cf01b61bd5e76c25e1480ca8ab3b73f0c4add50538d332afdf8f69c5"}, + {file = "pywin32-304-cp37-cp37m-win_amd64.whl", hash = "sha256:bb2ea2aa81e96eee6a6b79d87e1d1648d3f8b87f9a64499e0b92b30d141e76df"}, + {file = "pywin32-304-cp38-cp38-win32.whl", hash = "sha256:94037b5259701988954931333aafd39cf897e990852115656b014ce72e052e96"}, + {file = "pywin32-304-cp38-cp38-win_amd64.whl", hash = "sha256:ead865a2e179b30fb717831f73cf4373401fc62fbc3455a0889a7ddac848f83e"}, + {file = "pywin32-304-cp39-cp39-win32.whl", hash = "sha256:25746d841201fd9f96b648a248f731c1dec851c9a08b8e33da8b56148e4c65cc"}, + {file = "pywin32-304-cp39-cp39-win_amd64.whl", hash = "sha256:d24a3382f013b21aa24a5cfbfad5a2cd9926610c0affde3e8ab5b3d7dbcf4ac9"}, ] pywinpty = [ {file = "pywinpty-2.0.5-cp310-none-win_amd64.whl", hash = "sha256:f86c76e2881c37e69678cbbf178109f8da1fa8584db24d58e1b9369b0276cfcb"}, @@ -1863,61 +2006,71 @@ pyyaml = [ {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] pyzmq = [ - {file = "pyzmq-22.3.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:6b217b8f9dfb6628f74b94bdaf9f7408708cb02167d644edca33f38746ca12dd"}, - {file = "pyzmq-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2841997a0d85b998cbafecb4183caf51fd19c4357075dfd33eb7efea57e4c149"}, - {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f89468059ebc519a7acde1ee50b779019535db8dcf9b8c162ef669257fef7a93"}, - {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea12133df25e3a6918718fbb9a510c6ee5d3fdd5a346320421aac3882f4feeea"}, - {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c532fd68b93998aab92356be280deec5de8f8fe59cd28763d2cc8a58747b7f"}, - {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f907c7359ce8bf7f7e63c82f75ad0223384105f5126f313400b7e8004d9b33c3"}, - {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:902319cfe23366595d3fa769b5b751e6ee6750a0a64c5d9f757d624b2ac3519e"}, - {file = "pyzmq-22.3.0-cp310-cp310-win32.whl", hash = "sha256:67db33bea0a29d03e6eeec55a8190e033318cee3cbc732ba8fd939617cbf762d"}, - {file = "pyzmq-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:7661fc1d5cb73481cf710a1418a4e1e301ed7d5d924f91c67ba84b2a1b89defd"}, - {file = "pyzmq-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79244b9e97948eaf38695f4b8e6fc63b14b78cc37f403c6642ba555517ac1268"}, - {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab888624ed68930442a3f3b0b921ad7439c51ba122dbc8c386e6487a658e4a4e"}, - {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18cd854b423fce44951c3a4d3e686bac8f1243d954f579e120a1714096637cc0"}, - {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:de8df0684398bd74ad160afdc2a118ca28384ac6f5e234eb0508858d8d2d9364"}, - {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:62bcade20813796c426409a3e7423862d50ff0639f5a2a95be4b85b09a618666"}, - {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ea5a79e808baef98c48c884effce05c31a0698c1057de8fc1c688891043c1ce1"}, - {file = "pyzmq-22.3.0-cp36-cp36m-win32.whl", hash = "sha256:3c1895c95be92600233e476fe283f042e71cf8f0b938aabf21b7aafa62a8dac9"}, - {file = "pyzmq-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:851977788b9caa8ed011f5f643d3ee8653af02c5fc723fa350db5125abf2be7b"}, - {file = "pyzmq-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b4ebed0977f92320f6686c96e9e8dd29eed199eb8d066936bac991afc37cbb70"}, - {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42abddebe2c6a35180ca549fadc7228d23c1e1f76167c5ebc8a936b5804ea2df"}, - {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1e41b32d6f7f9c26bc731a8b529ff592f31fc8b6ef2be9fa74abd05c8a342d7"}, - {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:be4e0f229cf3a71f9ecd633566bd6f80d9fa6afaaff5489492be63fe459ef98c"}, - {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08c4e315a76ef26eb833511ebf3fa87d182152adf43dedee8d79f998a2162a0b"}, - {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:badb868fff14cfd0e200eaa845887b1011146a7d26d579aaa7f966c203736b92"}, - {file = "pyzmq-22.3.0-cp37-cp37m-win32.whl", hash = "sha256:7c58f598d9fcc52772b89a92d72bf8829c12d09746a6d2c724c5b30076c1f11d"}, - {file = "pyzmq-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2b97502c16a5ec611cd52410bdfaab264997c627a46b0f98d3f666227fd1ea2d"}, - {file = "pyzmq-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d728b08448e5ac3e4d886b165385a262883c34b84a7fe1166277fe675e1c197a"}, - {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:480b9931bfb08bf8b094edd4836271d4d6b44150da051547d8c7113bf947a8b0"}, - {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7dc09198e4073e6015d9a8ea093fc348d4e59de49382476940c3dd9ae156fba8"}, - {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ca6cd58f62a2751728016d40082008d3b3412a7f28ddfb4a2f0d3c130f69e74"}, - {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:468bd59a588e276961a918a3060948ae68f6ff5a7fa10bb2f9160c18fe341067"}, - {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c88fa7410e9fc471e0858638f403739ee869924dd8e4ae26748496466e27ac59"}, - {file = "pyzmq-22.3.0-cp38-cp38-win32.whl", hash = "sha256:c0f84360dcca3481e8674393bdf931f9f10470988f87311b19d23cda869bb6b7"}, - {file = "pyzmq-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f762442bab706fd874064ca218b33a1d8e40d4938e96c24dafd9b12e28017f45"}, - {file = "pyzmq-22.3.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:954e73c9cd4d6ae319f1c936ad159072b6d356a92dcbbabfd6e6204b9a79d356"}, - {file = "pyzmq-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f43b4a2e6218371dd4f41e547bd919ceeb6ebf4abf31a7a0669cd11cd91ea973"}, - {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:acebba1a23fb9d72b42471c3771b6f2f18dcd46df77482612054bd45c07dfa36"}, - {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cf98fd7a6c8aaa08dbc699ffae33fd71175696d78028281bc7b832b26f00ca57"}, - {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d072f7dfbdb184f0786d63bda26e8a0882041b1e393fbe98940395f7fab4c5e2"}, - {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:53f4fd13976789ffafedd4d46f954c7bb01146121812b72b4ddca286034df966"}, - {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1b5d457acbadcf8b27561deeaa386b0217f47626b29672fa7bd31deb6e91e1b"}, - {file = "pyzmq-22.3.0-cp39-cp39-win32.whl", hash = "sha256:e6a02cf7271ee94674a44f4e62aa061d2d049001c844657740e156596298b70b"}, - {file = "pyzmq-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d3dcb5548ead4f1123851a5ced467791f6986d68c656bc63bfff1bf9e36671e2"}, - {file = "pyzmq-22.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3a4c9886d61d386b2b493377d980f502186cd71d501fffdba52bd2a0880cef4f"}, - {file = "pyzmq-22.3.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:80e043a89c6cadefd3a0712f8a1322038e819ebe9dbac7eca3bce1721bcb63bf"}, - {file = "pyzmq-22.3.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1621e7a2af72cced1f6ec8ca8ca91d0f76ac236ab2e8828ac8fe909512d566cb"}, - {file = "pyzmq-22.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d6157793719de168b199194f6b6173f0ccd3bf3499e6870fac17086072e39115"}, - {file = "pyzmq-22.3.0.tar.gz", hash = "sha256:8eddc033e716f8c91c6a2112f0a8ebc5e00532b4a6ae1eb0ccc48e027f9c671c"}, + {file = "pyzmq-23.0.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:176be6c348dbec04e8e0d41e810743b7084b73e50954a6fedeeafc65d7fa9290"}, + {file = "pyzmq-23.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ef2d1476cea927ba33a29f59aa128ce3b174e81083cbd091dd3149af741c85d"}, + {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2394bb857607494c3750b5040f852a1ad7831d7a7907b6106f0af2c70860cef"}, + {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fe8807d67456e7cf0e9a33b85e0d05bb9d2977dbdb23977e4cc2b843633618fd"}, + {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be3425dfdb9c46dc62d490fc1a6142a5f3dc6605ebb9048ae675056ef621413c"}, + {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cda55ff0a7566405fb29ca38db1829fecb4c041b8dc3f91754f337bb7b27cbd8"}, + {file = "pyzmq-23.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e4d70d34112997a32c8193fae2579aec854745f8730031e5d84cc579fd98ff"}, + {file = "pyzmq-23.0.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f3daabbe42ca31712e29d906dfa4bf1890341d2fd5178de118bc9977a8d2b23b"}, + {file = "pyzmq-23.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e7ae3e520bd182a0cbfff3cc69dda3a2c26f69847e81bd3f090ed04471fc1282"}, + {file = "pyzmq-23.0.0-cp310-cp310-win32.whl", hash = "sha256:1d480d48253f61ff90115b8069ed32f51a0907eb19101c4a5ae0b9a5973e40ad"}, + {file = "pyzmq-23.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:7eca5902ff41575d9a26f91fc750018b7eb129600ea600fe69ce852fbdfab4e2"}, + {file = "pyzmq-23.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b2a4af5e6fa85ee1743c725b46579f8de0b97024eb5ae1a0b5c5711adc436665"}, + {file = "pyzmq-23.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:591b455546d34bb96aa453dd9666bddb8c81314e23dbf2606f9614acf7e73d9f"}, + {file = "pyzmq-23.0.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdd008629293a0d4f00b516841ac0df89f17a64bc2d83bcfa48212d3f3b3ca1a"}, + {file = "pyzmq-23.0.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:df0b05fa4321b090abe5601dea9b1c8933c06f496588ccb397a0b1f9dfe32ebe"}, + {file = "pyzmq-23.0.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:12a53f5c13edf12547ce495afebdd5ab11c1b67ea078a941b21e13161783741a"}, + {file = "pyzmq-23.0.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:cb45b7ea577283b547b907a3389d62ca2eaddaf725fbb79cd360802440fa9c91"}, + {file = "pyzmq-23.0.0-cp36-cp36m-win32.whl", hash = "sha256:0a787f7870cba38d655c68ea7ae14bb6c3e9e19bb618d0c2412513321eeaeb80"}, + {file = "pyzmq-23.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:536491ad640448f14d8aa2dc497c354a348f216eb23513bf5aa0ac40e2b02577"}, + {file = "pyzmq-23.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5eaf7e0841d3d8d1d92838c8b56f98cb9bf35b14bcbe4efa281e4812ef4be728"}, + {file = "pyzmq-23.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21792f4d0fcc5040978ee211c033e915d8b6608ea8a5b33fe197a04f0d43e991"}, + {file = "pyzmq-23.0.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a37f0ec88e220326803084208d80229218b309d728954ab747ab21cca33424aa"}, + {file = "pyzmq-23.0.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9622d9560a6fd8d589816cdcec6946642cb4e070b3f68be1d3779b52cf240f73"}, + {file = "pyzmq-23.0.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:434044eec7f9df08fc5ca5c9bdd1a4bb08663679d43ebe7b9849713956f4d85f"}, + {file = "pyzmq-23.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12eac2294d48ee27d1eaef7e214acedb394e4c95e3a1f6e4467099b82d58ef73"}, + {file = "pyzmq-23.0.0-cp37-cp37m-win32.whl", hash = "sha256:07d2008e51718fba60641e5d1a0646b222b7929f16f6e7cf0834b8439f42c9e8"}, + {file = "pyzmq-23.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b8528aefceb787f41ad429f3210a3c6b52e99f85413416e3d0c9e6d035f8ac"}, + {file = "pyzmq-23.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3f3807e81bf51d4c63eb12a21920614e0e840645418e9f2e3b5ffdd5991b3415"}, + {file = "pyzmq-23.0.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:011a45c846ec69a3671ed15893b74b6ad608800c89ac6d0f0411e2137c6b313d"}, + {file = "pyzmq-23.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b97dc1273f16f85a38cff6668a07b636ef14e30591039efbfd21f5f91efae964"}, + {file = "pyzmq-23.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8951830d6a00636b3af478091f9668ecc486f1dad01b975527957fd1d8c31bfd"}, + {file = "pyzmq-23.0.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5619f6598d6fd30778053ae2daa48a7c54029816648b908270b751411fd52e74"}, + {file = "pyzmq-23.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a89b9860d2171bcf674648dc8186db9cf3b773ad3c0610a2c7bf189cf3560b6"}, + {file = "pyzmq-23.0.0-cp38-cp38-win32.whl", hash = "sha256:0258563bf69f6ca305204354f171e0627a9bf8fe78c9d4f63a5e2447035cbb4b"}, + {file = "pyzmq-23.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:9feb7ccd426ff2158ce79f4c87a8a1600ed4f77e65e2fffda2b42638b2bc73e4"}, + {file = "pyzmq-23.0.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:e9631c6a339843e4f95efb80ff9a1bfaaf3d611ba9677a7a5cc61ffb923b4e06"}, + {file = "pyzmq-23.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34b143751e9b2b89cf9b656081f1b2842a563c4c9ffc8465531875daf546e772"}, + {file = "pyzmq-23.0.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f227150148e7c3db7ecd8a58500439979f556e15455841a30b6d121755b14bc"}, + {file = "pyzmq-23.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277b3ebc684b369a57a186a9acf629c1b01247eb04d1105536ef2dae5f61168a"}, + {file = "pyzmq-23.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e2093a97bf3f6008a4be6b5bae8ae3fc409f18373593bef19dd7b381ab8030c"}, + {file = "pyzmq-23.0.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6c09e6e5c4baf0959287943dc8170624d739ae555d334e896a94d9de01c7bb21"}, + {file = "pyzmq-23.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c234aefeef034c5d6de452e2af5173a95ea06315b685db703091e6f937a6e60"}, + {file = "pyzmq-23.0.0-cp39-cp39-win32.whl", hash = "sha256:7b518ad9cdbaaeb1a9da3444797698871ae2eeae34ff9a656d5150d37e1e42a1"}, + {file = "pyzmq-23.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:011f26841dd56ed87e464c98023dbbd4c0b3ab8802a045de3ea83e0187eb8145"}, + {file = "pyzmq-23.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a89285fedbeca483a855a77285453e21e4fc86ef0944bc018ef4b3033aa04ad2"}, + {file = "pyzmq-23.0.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5a13171268f05d127e31b4c369b753733f67dbb0d765901ef625a115feb5c7de"}, + {file = "pyzmq-23.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd3f563b98e2a8730c93bdc550f119ae766b2d3da1f0d6a3c7735b59adfa1642"}, + {file = "pyzmq-23.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e730d490b1421e52b43b1b9f5e1f8c3973499206e188f29b582577531e11033b"}, + {file = "pyzmq-23.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0de8a7e13ffacfe33c89acc0d7bfa2f5bde94e3f74b7f1e4d43c97ce17864d77"}, + {file = "pyzmq-23.0.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a64b9cce166396df5f33894074d6515778d48c63aae5ee1abd86d8bbc5a711d8"}, + {file = "pyzmq-23.0.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e464e7b1be2216eba54b47256c15bf307ae4a656aa0f73becea7b3e7283c5ac2"}, + {file = "pyzmq-23.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3fa7126d532effee452c0ab395ab3cbef1c06fd6870ab7e681f812ba9e685cfa"}, + {file = "pyzmq-23.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9273f6d1da1018822f41630fb0f3fe208e8e70e5d5e780795326900cfa22d8b6"}, + {file = "pyzmq-23.0.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca7d77f24644298cbe53bc279eb7ca05f3b8637473d392f0c9f34b37f08b49a"}, + {file = "pyzmq-23.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8f40604437ec8010f77f7053fd135ccb202d6ca18329903831087cab8dbdab1"}, + {file = "pyzmq-23.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4d861ae20040afc17adef33053c328667da78d4d3676b2936788fd031665e3a8"}, + {file = "pyzmq-23.0.0.tar.gz", hash = "sha256:a45f5c0477d12df05ef2e2922b49b7c0ae9d0f4ff9b6bb0d666558df0ef37122"}, ] qtconsole = [ {file = "qtconsole-5.3.0-py3-none-any.whl", hash = "sha256:75f2ded876444454edcb5a53262149e33b53db3a4a53116b7c3df52830905b0f"}, {file = "qtconsole-5.3.0.tar.gz", hash = "sha256:8e3520fdc75e46abc4cc6cffeca16fa2652754109b8ae839fa28e27d1eba5625"}, ] qtpy = [ - {file = "QtPy-2.0.1-py3-none-any.whl", hash = "sha256:d93f2c98e97387fcc9d623d509772af5b6c15ab9d8f9f4c5dfbad9a73ad34812"}, - {file = "QtPy-2.0.1.tar.gz", hash = "sha256:adfd073ffbd2de81dc7aaa0b983499ef5c59c96adcfdcc9dea60d42ca885eb8f"}, + {file = "QtPy-2.1.0-py3-none-any.whl", hash = "sha256:aee0586081f943029312becece9f63977b0a9e3788f77a6ac8cc74802bb173d6"}, + {file = "QtPy-2.1.0.tar.gz", hash = "sha256:ca8cd4217175186344299ee4c0f7e7adcf362c70852ba35b255a534077025c06"}, ] requests = [ {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, @@ -1940,28 +2093,28 @@ six = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] soupsieve = [ - {file = "soupsieve-2.3.1-py3-none-any.whl", hash = "sha256:1a3cca2617c6b38c0343ed661b1fa5de5637f257d4fe22bd9f1338010a1efefb"}, - {file = "soupsieve-2.3.1.tar.gz", hash = "sha256:b8d49b1cd4f037c7082a9683dfa1801aa2597fb11c3a1155b7a5b94829b4f1f9"}, + {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, + {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, ] stack-data = [ {file = "stack_data-0.2.0-py3-none-any.whl", hash = "sha256:999762f9c3132308789affa03e9271bbbe947bf78311851f4d485d8402ed858e"}, {file = "stack_data-0.2.0.tar.gz", hash = "sha256:45692d41bd633a9503a5195552df22b583caf16f0b27c4e58c98d88c8b648e12"}, ] stripe = [ - {file = "stripe-2.70.0-py2.py3-none-any.whl", hash = "sha256:7e3f50e844913e036f5419d73274315ae0d72315d36d3f791fdb572b1e84660c"}, - {file = "stripe-2.70.0.tar.gz", hash = "sha256:ed8897f68e6bac3398cc998eb5634551840630d6504c0026fcfd0ad91c9a74a4"}, + {file = "stripe-2.76.0-py2.py3-none-any.whl", hash = "sha256:756bf6c1206f438d1fa23bb90cdf1233c9383478f854f2720a8a3e1eaf1f715b"}, + {file = "stripe-2.76.0.tar.gz", hash = "sha256:fd3fc6935c3b6189967191607b6f38ebe490005a590b4d0d43fbe3aba45deca8"}, ] swaggyp = [ - {file = "swaggyp-0.2.0-py3-none-any.whl", hash = "sha256:030dca0f0a24469abcf7e1f047eaa01c84c206a8d891cfb1a4a24f40e2a6a146"}, - {file = "swaggyp-0.2.0.tar.gz", hash = "sha256:b39ab7f8a49fdb17af7862c5ac81b872a5f7f95c3b0989a6a285fde3f4ee4885"}, + {file = "swaggyp-0.3.0-py3-none-any.whl", hash = "sha256:d678daa026f374c09690bfae93670e8824c616fe7186375cca531dd9530d2c41"}, + {file = "swaggyp-0.3.0.tar.gz", hash = "sha256:2af8b74e014ba83287d4b2fcbe355636ca8ec09ca70bffba4290b3f833c8ad46"}, ] terminado = [ - {file = "terminado-0.13.3-py3-none-any.whl", hash = "sha256:874d4ea3183536c1782d13c7c91342ef0cf4e5ee1d53633029cbc972c8760bd8"}, - {file = "terminado-0.13.3.tar.gz", hash = "sha256:94d1cfab63525993f7d5c9b469a50a18d0cdf39435b59785715539dd41e36c0d"}, + {file = "terminado-0.15.0-py3-none-any.whl", hash = "sha256:0d5f126fbfdb5887b25ae7d9d07b0d716b1cc0ccaacc71c1f3c14d228e065197"}, + {file = "terminado-0.15.0.tar.gz", hash = "sha256:ab4eeedccfcc1e6134bfee86106af90852c69d602884ea3a1e8ca6d4486e9bfe"}, ] -testpath = [ - {file = "testpath-0.6.0-py3-none-any.whl", hash = "sha256:8ada9f80a2ac6fb0391aa7cdb1a7d11cfa8429f693eda83f74dde570fe6fa639"}, - {file = "testpath-0.6.0.tar.gz", hash = "sha256:2f1b97e6442c02681ebe01bd84f531028a7caea1af3825000f52345c30285e0f"}, +tinycss2 = [ + {file = "tinycss2-1.1.1-py3-none-any.whl", hash = "sha256:fe794ceaadfe3cf3e686b22155d0da5780dd0e273471a51846d0a02bc204fec8"}, + {file = "tinycss2-1.1.1.tar.gz", hash = "sha256:b2e44dd8883c360c35dd0d1b5aad0b610e5156c2cb3b33434634e539ead9d8bf"}, ] tornado = [ {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"}, @@ -2007,8 +2160,8 @@ tornado = [ {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, ] traitlets = [ - {file = "traitlets-5.1.1-py3-none-any.whl", hash = "sha256:2d313cc50a42cd6c277e7d7dc8d4d7fedd06a2c215f78766ae7b1a66277e0033"}, - {file = "traitlets-5.1.1.tar.gz", hash = "sha256:059f456c5a7c1c82b98c2e8c799f39c9b8128f6d0d46941ee118daace9eb70c7"}, + {file = "traitlets-5.2.1.post0-py3-none-any.whl", hash = "sha256:f44b708d33d98b0addb40c29d148a761f44af740603a8fd0e2f8b5b27cf0f087"}, + {file = "traitlets-5.2.1.post0.tar.gz", hash = "sha256:70815ecb20ec619d1af28910ade523383be13754283aef90528eb3d47b77c5db"}, ] urllib3 = [ {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, @@ -2027,14 +2180,14 @@ webencodings = [ {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] werkzeug = [ - {file = "Werkzeug-2.1.0-py3-none-any.whl", hash = "sha256:094ecfc981948f228b30ee09dbfe250e474823b69b9b1292658301b5894bbf08"}, - {file = "Werkzeug-2.1.0.tar.gz", hash = "sha256:9b55466a3e99e13b1f0686a66117d39bda85a992166e0a79aedfcf3586328f7a"}, + {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, + {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, ] widgetsnbextension = [ {file = "widgetsnbextension-3.6.0-py2.py3-none-any.whl", hash = "sha256:4fd321cad39fdcf8a8e248a657202d42917ada8e8ed5dd3f60f073e0d54ceabd"}, {file = "widgetsnbextension-3.6.0.tar.gz", hash = "sha256:e84a7a9fcb9baf3d57106e184a7389a8f8eb935bf741a5eb9d60aa18cc029a80"}, ] zipp = [ - {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, - {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, + {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, + {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, ] diff --git a/pyproject.toml b/pyproject.toml index 25bd35e..5aaa2ab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,8 @@ graphql-py = "^0.8.1" sammy = "^0.4.3" stripe = "^2.61.0" bleach = "^4.1.0" -swaggyp = "^0.2.0" +openapi-spec-validator = "^0.4.0" +swaggyp = "^0.3.0" [tool.poetry.dev-dependencies] jupyter = "^1.0.0"