Compare commits

...

27 Commits

Author SHA1 Message Date
2ad7700f72 Merge pull request #30 from creyD/renovate/stripe-11.x
feat(deps): update dependency stripe to v11
2025-01-24 18:39:30 +01:00
1d7b767623 Merge pull request #31 from vikynoah/pagination_fix 2025-01-24 15:29:52 +01:00
vikynoah
f1f29e84c2 fix: Pagination Fix 2025-01-24 15:23:08 +01:00
dcb9afb8f2 fix: added btree_gist extension option 2025-01-24 11:01:48 +01:00
creyD
8c98e001f9 Adjusted files for isort & autopep 2025-01-24 07:49:05 +00:00
959a746e4f fix: fixed issue with new date format 2025-01-24 08:48:35 +01:00
4f6c066242 feat: added unittest basecase 2025-01-23 11:12:25 +01:00
creyD
da66e116c3 Adjusted files for isort & autopep 2025-01-21 21:50:45 +00:00
c09df1341f fix: fixed migration issue 2025-01-21 22:50:15 +01:00
88000f9cf4 fix: updated mail 2025-01-21 22:20:16 +01:00
92a33489ac fix: Updated author_email 2025-01-21 22:19:51 +01:00
9da4cbcb8e feat: added auth0 testing helpers 2025-01-21 22:17:29 +01:00
52307f6028 fix: fixed deprecation warning 2025-01-21 22:17:08 +01:00
8019b566f2 fix: added string method for base model 2025-01-21 22:16:07 +01:00
renovate[bot]
10c1ea5411 feat(deps): update dependency stripe to v11 2025-01-21 21:13:13 +00:00
83726f517c feat: added stripe service 2025-01-21 22:12:03 +01:00
abe84bcfcb Merge pull request #22 from creyD/dev
Major Version 3.0.0
2025-01-21 12:15:43 +01:00
vikynoah
2d6de99585 fix: post_file method change for testing (#29)
* fix: post_file method change for testing

* changes
2025-01-16 09:35:23 +01:00
vikynoah
573f59349f fix: changes to post method in testing_async (#28) 2025-01-08 19:37:10 +01:00
creyD
32bf089456 Adjusted files for isort & autopep 2025-01-02 22:20:49 +00:00
vikynoah
d75fede3d1 fix: Force postgresql SSL mode (#27)
* fix: Force postgresql SSL mode

* changes
2025-01-02 23:20:17 +01:00
creyD
f8b781b3e7 Adjusted files for isort & autopep 2024-12-11 16:15:33 +00:00
vikynoah
93c7f6f6cb fix: Async Testing (#26)
* fix: httpx fix as per latest version

* fix: Fix Async Testing client
2024-12-11 17:14:59 +01:00
creyD
2e44453915 Adjusted files for isort & autopep 2024-12-09 15:29:15 +00:00
vikynoah
2a22471de9 fix: httpx fix as per latest version (#25) 2024-12-09 16:28:44 +01:00
2176b1a37d fix: bumped security risks and enabled newer packages installed 2024-12-04 20:05:19 +01:00
5daddf260e fix: added timeouts to the requests to fix Bandit issue 2024-11-25 13:20:17 +01:00
19 changed files with 382 additions and 40 deletions

View File

@@ -5,7 +5,9 @@ from sqlalchemy.orm import sessionmaker
from .common import SQLALCHEMY_DATABASE_URL, name from .common import SQLALCHEMY_DATABASE_URL, name
async_engine = create_async_engine(SQLALCHEMY_DATABASE_URL + name, pool_pre_ping=True) async_engine = create_async_engine(
SQLALCHEMY_DATABASE_URL + name, pool_pre_ping=True, connect_args={"sslmode": "require"}
)
AsyncSessionLocal = sessionmaker( AsyncSessionLocal = sessionmaker(
bind=async_engine, bind=async_engine,

View File

@@ -6,7 +6,9 @@ from sqlalchemy.orm.session import Session
from .common import SQLALCHEMY_DATABASE_URL, name from .common import SQLALCHEMY_DATABASE_URL, name
engine = create_engine(SQLALCHEMY_DATABASE_URL + name, pool_pre_ping=True) engine = create_engine(
SQLALCHEMY_DATABASE_URL + name, pool_pre_ping=True, connect_args={"sslmode": "require"}
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)

View File

@@ -19,10 +19,21 @@ class Base:
__name__: str __name__: str
# TODO: Add default representation string
# TODO: Add automated foreign key resolution # TODO: Add automated foreign key resolution
# Generate __tablename__ automatically # Generate __tablename__ automatically
@declared_attr @declared_attr
def __tablename__(cls) -> str: def __tablename__(cls) -> str:
return cls.__name__.lower() return cls.__name__.lower()
def __str__(self) -> str:
# if the object has a name, title or similar attribute, return it
if hasattr(self, "name"):
return str(self.name) # type: ignore
# if the object has a title attribute, return it
if hasattr(self, "title"):
return str(self.title) # type: ignore
# otherwise return the object's id
return str(self.id)

View File

@@ -1,27 +1,26 @@
from contextlib import suppress
from math import ceil from math import ceil
from typing import Any, Generic, Optional, Self, Sequence, TypeVar, Union, overload from typing import Any, Generic, Optional, Self, Sequence, TypeVar, Union, overload
from contextlib import suppress
from pydantic import BaseModel from fastapi import Query
from fastapi_pagination import Params from fastapi_pagination.api import apply_items_transformer, create_page
from fastapi_pagination.bases import AbstractPage, AbstractParams from fastapi_pagination.bases import AbstractPage, AbstractParams, RawParams
from fastapi_pagination.ext.sqlalchemy import create_paginate_query
from fastapi_pagination.types import ( from fastapi_pagination.types import (
AdditionalData,
AsyncItemsTransformer,
GreaterEqualOne, GreaterEqualOne,
GreaterEqualZero, GreaterEqualZero,
AdditionalData,
SyncItemsTransformer,
AsyncItemsTransformer,
ItemsTransformer, ItemsTransformer,
SyncItemsTransformer,
) )
from fastapi_pagination.api import create_page, apply_items_transformer
from fastapi_pagination.utils import verify_params from fastapi_pagination.utils import verify_params
from fastapi_pagination.ext.sqlalchemy import create_paginate_query from pydantic import BaseModel
from fastapi_pagination.bases import AbstractParams, RawParams
from pydantic.json_schema import SkipJsonSchema from pydantic.json_schema import SkipJsonSchema
from sqlalchemy.sql.selectable import Select from sqlalchemy import func, select
from sqlalchemy.orm.session import Session
from sqlalchemy import select, func
from sqlalchemy.ext.asyncio import AsyncSession, async_scoped_session from sqlalchemy.ext.asyncio import AsyncSession, async_scoped_session
from fastapi import Query from sqlalchemy.orm.session import Session
from sqlalchemy.sql.selectable import Select
from sqlalchemy.util import await_only, greenlet_spawn from sqlalchemy.util import await_only, greenlet_spawn
T = TypeVar("T") T = TypeVar("T")
@@ -29,7 +28,7 @@ T = TypeVar("T")
class PaginationParams(BaseModel, AbstractParams): class PaginationParams(BaseModel, AbstractParams):
page: int = Query(1, ge=1, description="Page number") page: int = Query(1, ge=1, description="Page number")
size: int = Query(50, ge=1, le=100, description="Page size") size: int = Query(50, ge=1, description="Page size")
pagination: bool = Query(True, description="Toggle pagination") pagination: bool = Query(True, description="Toggle pagination")
def to_raw_params(self) -> RawParams: def to_raw_params(self) -> RawParams:
@@ -62,7 +61,7 @@ class Page(AbstractPage[T], Generic[T]):
total: Optional[int] = None, total: Optional[int] = None,
**kwargs: Any, **kwargs: Any,
) -> Self: ) -> Self:
if not isinstance(params, Params): if not isinstance(params, PaginationParams):
raise TypeError("Page should be used with Params") raise TypeError("Page should be used with Params")
size = params.size or total or len(items) size = params.size or total or len(items)
@@ -170,9 +169,9 @@ def _paginate(
total = connection.scalar(count_query) total = connection.scalar(count_query)
if params.pagination is False and total > 0: if params.pagination is False and total > 0:
params = Params(page=1, size=total) params = PaginationParams(page=1, size=total)
else: else:
params = Params(page=params.page, size=params.size) params = PaginationParams(page=params.page, size=params.size)
query = create_paginate_query(query, params) query = create_paginate_query(query, params)
items = connection.execute(query).all() items = connection.execute(query).all()

View File

@@ -41,7 +41,7 @@ class GenericClient(TestClient):
re = self.c.post( re = self.c.post(
url, url,
files={"file": file}, files={"file": file},
headers=self.default_headers | {"Content-Type": "application/json"}, headers=self.default_headers,
*args, *args,
**kwargs, **kwargs,
) )

View File

@@ -1,11 +1,14 @@
import json import json
from httpx import AsyncClient
from httpx import ASGITransport, AsyncClient
class AsyncGenericClient: class AsyncGenericClient:
def __init__(self, app): def __init__(self, app, headers={}):
self.c = AsyncClient(app=app, base_url="http://testserver", follow_redirects=True) self.c = AsyncClient(
self.default_headers = {} transport=ASGITransport(app=app), base_url="http://testserver", follow_redirects=True
)
self.default_headers = headers
async def get(self, url: str, r_code: int = 200, parse_json=True): async def get(self, url: str, r_code: int = 200, parse_json=True):
re = await self.c.get(url, headers=self.default_headers) re = await self.c.get(url, headers=self.default_headers)
@@ -33,7 +36,8 @@ class AsyncGenericClient:
) )
if re.status_code != r_code: if re.status_code != r_code:
print(re.content) print(re.content)
assert r_code == re.status_code if not raw_response:
assert r_code == re.status_code
return re.json() if not raw_response else re return re.json() if not raw_response else re
async def post_file( async def post_file(
@@ -42,7 +46,7 @@ class AsyncGenericClient:
re = await self.c.post( re = await self.c.post(
url, url,
files={"file": file}, files={"file": file},
headers=self.default_headers | {"Content-Type": "application/json"}, headers=self.default_headers,
*args, *args,
**kwargs, **kwargs,
) )

View File

@@ -0,0 +1,183 @@
import json
import unittest
from typing import Type
from httpx import ASGITransport, AsyncClient, Response
from sqlalchemy import create_engine, text
from sqlalchemy.ext.asyncio import create_async_engine
from sqlalchemy_utils import create_database, database_exists, drop_database
from creyPY.fastapi.models.base import Base
class AbstractTestAPI(unittest.IsolatedAsyncioTestCase):
client: AsyncClient
default_headers: dict = {}
@classmethod
def setUpClass(cls, app, headers={}) -> None:
cls.client = AsyncClient(
transport=ASGITransport(app=app), base_url="http://testserver", follow_redirects=True
)
cls.default_headers = headers
print("setting up abstract")
@classmethod
def setup_database(
cls, sync_db_url: str, async_db_url: str, base: Type[Base], btree_gist: bool = False
):
cls.engine_s = create_engine(
sync_db_url,
echo=False,
pool_pre_ping=True,
connect_args={"sslmode": "require"},
)
if database_exists(cls.engine_s.url):
drop_database(cls.engine_s.url)
create_database(cls.engine_s.url)
if btree_gist:
with cls.engine_s.begin() as conn:
conn.execute(text("CREATE EXTENSION IF NOT EXISTS btree_gist"))
# Migrate
base.metadata.create_all(cls.engine_s)
cls.engine = create_async_engine(
async_db_url,
echo=False,
pool_pre_ping=True,
connect_args={"sslmode": "require"},
)
async def get(self, url: str, r_code: int = 200, parse_json=True) -> dict | bytes:
re = await self.client.get(url, headers=self.default_headers)
if re.status_code != r_code:
print(re.content)
self.assertEqual(r_code, re.status_code)
return re.json() if parse_json else re.content
async def delete(self, url: str, r_code: int = 204) -> dict | None:
re = await self.client.delete(url, headers=self.default_headers)
if re.status_code != r_code:
print(re.content)
self.assertEqual(r_code, re.status_code)
return re.json() if r_code != 204 else None
async def post(
self, url: str, obj: dict | str = {}, r_code: int = 201, raw_response=False, *args, **kwargs
):
re = await self.client.post(
url,
data=json.dumps(obj) if isinstance(obj, dict) else obj,
headers=self.default_headers | {"Content-Type": "application/json"},
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
if not raw_response:
self.assertEqual(r_code, re.status_code)
return re.json() if not raw_response else re
async def post_file(
self, url: str, file, r_code: int = 201, raw_response=False, *args, **kwargs
) -> dict | bytes | Response:
re = await self.client.post(
url,
files={"file": file},
headers=self.default_headers,
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
self.assertEqual(r_code, re.status_code)
return re.json() if not raw_response else re
async def patch(
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
) -> dict | bytes | Response:
re = await self.client.patch(
url,
data=json.dumps(obj) if isinstance(obj, dict) else obj,
headers=self.default_headers | {"Content-Type": "application/json"},
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
self.assertEqual(r_code, re.status_code)
return re.json() if not raw_response else re
async def put(
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
) -> dict | bytes | Response:
re = await self.client.put(
url,
data=json.dumps(obj) if isinstance(obj, dict) else obj,
headers=self.default_headers
| {
"Content-Type": "application/json",
"accept": "application/json",
},
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
self.assertEqual(r_code, re.status_code)
return re.json() if not raw_response else re
async def obj_lifecycle(
self,
input_obj: dict,
url: str,
pagination: bool = True,
id_field: str = "id",
created_at_check: bool = True,
):
# GET LIST
re = await self.get(url)
if pagination:
self.assertEqual(re["total"], 0)
self.assertEqual(len(re["results"]), 0)
else:
self.assertEqual(len(re), 0)
# CREATE
re = await self.post(url, obj=input_obj)
self.assertIn(id_field, re)
self.assertIsNotNone(re[id_field])
if created_at_check:
self.assertIn("created_at", re)
self.assertIsNotNone(re["created_at"])
obj_id = str(re[id_field])
# GET
re = await self.get(f"{url}{obj_id}/")
self.assertEqual(re[id_field], obj_id)
# GET LIST
re = await self.get(url)
if pagination:
self.assertEqual(re["total"], 1)
self.assertEqual(len(re["results"]), 1)
else:
self.assertEqual(len(re), 1)
# DELETE
await self.delete(f"{url}{obj_id}")
# GET LIST
re = await self.get(url)
if pagination:
self.assertEqual(re["total"], 0)
self.assertEqual(len(re["results"]), 0)
else:
self.assertEqual(len(re), 0)
# GET
await self.get(f"{url}{obj_id}", parse_json=False, r_code=404)

View File

@@ -1 +1,2 @@
from .auth0 import * # noqa from .auth0 import * # noqa
from .stripe import * # noqa

View File

@@ -1,3 +1,4 @@
from .exceptions import * # noqa from .exceptions import * # noqa
from .manage import * # noqa from .manage import * # noqa
from .testing import * # noqa
from .utils import * # noqa from .utils import * # noqa

View File

@@ -8,7 +8,7 @@ cache = TTLCache(maxsize=100, ttl=600)
@cached(cache) @cached(cache)
def get_management_token() -> str: def get_management_token() -> str:
re = requests.post( response = requests.post(
f"https://{AUTH0_DOMAIN}/oauth/token", f"https://{AUTH0_DOMAIN}/oauth/token",
json={ json={
"client_id": AUTH0_CLIENT_ID, "client_id": AUTH0_CLIENT_ID,
@@ -16,5 +16,6 @@ def get_management_token() -> str:
"audience": f"https://{AUTH0_DOMAIN}/api/v2/", # This should be the management audience "audience": f"https://{AUTH0_DOMAIN}/api/v2/", # This should be the management audience
"grant_type": "client_credentials", "grant_type": "client_credentials",
}, },
timeout=5, # Add a timeout parameter to avoid hanging requests
).json() ).json()
return re["access_token"] return response["access_token"]

View File

@@ -0,0 +1,93 @@
USER_OBJ = {
"auth0|testing": {
"created_at": "2023-08-15T13:25:31.507Z",
"email": "test@test.org",
"email_verified": True,
"identities": [
{
"connection": "Username-Password-Authentication",
"provider": "auth0",
"user_id": "testing",
"isSocial": False,
}
],
"name": "Test Tester",
"nickname": "testing",
"picture": "https://avatars.githubusercontent.com/u/15138480?v=4",
"updated_at": "2024-01-17T12:36:37.300Z",
"user_id": "auth0|testing",
"user_metadata": {},
"last_password_reset": "2024-01-17T11:42:08.761Z",
"last_ip": "127.0.0.1",
"last_login": "2024-01-17T11:43:09.620Z",
"logins_count": 1,
},
"auth0|new_user": {
"created_at": "2023-08-15T13:25:31.507Z",
"email": "test2@test.org",
"email_verified": True,
"identities": [
{
"connection": "Username-Password-Authentication",
"provider": "auth0",
"user_id": "testing",
"isSocial": False,
}
],
"name": "Test Tester 2",
"nickname": "testing 2",
"picture": "https://avatars.githubusercontent.com/u/15138481?v=4",
"updated_at": "2024-01-17T12:36:37.303Z",
"user_id": "auth0|new_user",
"user_metadata": {},
"last_password_reset": "2024-01-17T11:42:08.759Z",
"last_ip": "127.0.0.1",
"last_login": "2024-01-17T11:43:09.618Z",
"logins_count": 1,
},
}
def get_user_auth0(sub, *args, **kwargs) -> dict:
return USER_OBJ[sub]
def patch_user_auth0(input_obj: dict, sub, *args, **kwargs) -> dict:
USER_OBJ[sub].update(input_obj)
return get_user_auth0(sub)
def get_user_auth0_metadata(sub, *args, **kwargs) -> dict:
return USER_OBJ[sub]["user_metadata"]
def check_company_auth0(*args, **kwargs) -> bool:
return True
def auth0_sub_to_profile(sub: str) -> dict:
return {
"email": USER_OBJ[sub]["email"],
"name": USER_OBJ[sub]["name"],
"picture": USER_OBJ[sub]["picture"],
"company_ids": USER_OBJ[sub]["user_metadata"]["company_ids"],
}
def auth0_sub_to_public(sub: str) -> dict:
return {
"email": USER_OBJ[sub]["email"],
"name": USER_OBJ[sub]["name"],
"picture": USER_OBJ[sub]["picture"],
}
def patch_user_auth0_metadata(input_obj: dict, sub, *args, **kwargs) -> dict:
USER_OBJ[sub]["user_metadata"].update(input_obj)
return get_user_auth0_metadata(sub)
def set_company_id(sub: str, company_id: str):
if sub not in USER_OBJ:
USER_OBJ[sub] = {}
USER_OBJ[sub]["user_metadata"] = {"company_ids": [company_id]}

View File

@@ -54,6 +54,7 @@ def get_user(sub) -> dict:
re = requests.get( re = requests.get(
f"https://{AUTH0_DOMAIN}/api/v2/users/{sub}", f"https://{AUTH0_DOMAIN}/api/v2/users/{sub}",
headers={"Authorization": f"Bearer {get_management_token()}"}, headers={"Authorization": f"Bearer {get_management_token()}"},
timeout=5,
) )
if re.status_code != 200: if re.status_code != 200:
raise HTTPException(re.status_code, re.json()) raise HTTPException(re.status_code, re.json())
@@ -65,6 +66,7 @@ def patch_user(input_obj: dict, sub) -> dict:
f"https://{AUTH0_DOMAIN}/api/v2/users/{sub}", f"https://{AUTH0_DOMAIN}/api/v2/users/{sub}",
headers={"Authorization": f"Bearer {get_management_token()}"}, headers={"Authorization": f"Bearer {get_management_token()}"},
json=input_obj, json=input_obj,
timeout=5,
) )
if re.status_code != 200: if re.status_code != 200:
raise HTTPException(re.status_code, re.json()) raise HTTPException(re.status_code, re.json())
@@ -92,6 +94,7 @@ def request_verification_mail(sub: str) -> None:
f"https://{AUTH0_DOMAIN}/api/v2/jobs/verification-email", f"https://{AUTH0_DOMAIN}/api/v2/jobs/verification-email",
headers={"Authorization": f"Bearer {get_management_token()}"}, headers={"Authorization": f"Bearer {get_management_token()}"},
json={"user_id": sub}, json={"user_id": sub},
timeout=5,
) )
if re.status_code != 201: if re.status_code != 201:
raise HTTPException(re.status_code, re.json()) raise HTTPException(re.status_code, re.json())
@@ -109,6 +112,7 @@ def create_user_invite(email: str) -> dict:
"verify_email": False, "verify_email": False,
"app_metadata": {"invitedToMyApp": True}, "app_metadata": {"invitedToMyApp": True},
}, },
timeout=5,
) )
if re.status_code != 201: if re.status_code != 201:
raise HTTPException(re.status_code, re.json()) raise HTTPException(re.status_code, re.json())
@@ -124,6 +128,7 @@ def password_change_mail(email: str) -> bool:
"email": email, "email": email,
"connection": "Username-Password-Authentication", "connection": "Username-Password-Authentication",
}, },
timeout=5,
) )
if re.status_code != 200: if re.status_code != 200:

View File

@@ -0,0 +1 @@
from .testing import * # noqa

View File

@@ -0,0 +1,23 @@
class ItemReturn:
quantity = 1
class SubscriptionItem:
def retrieve(self, id: str = ""):
return ItemReturn
def modify(self, id: str, quantity: int):
return ItemReturn
class StripeAPI:
def __init__(self, key: str):
pass
@property
def SubscriptionItem(self):
return SubscriptionItem
def get_stripe_api():
return StripeAPI("test")

View File

@@ -0,0 +1,11 @@
import os
import stripe
from dotenv import load_dotenv
load_dotenv()
def get_stripe_api():
stripe.api_key = os.getenv("STRIPE_API_KEY", "")
return stripe

View File

@@ -1,7 +1,7 @@
cachetools==5.5.0 # for caching cachetools>=5.5.0 # for caching
charset-normalizer==3.4.0 # Auth0 API interactions charset-normalizer>=3.4.0 # Auth0 API interactions
requests==2.32.3 # Auth0 API interactions requests>=2.32.3 # Auth0 API interactions
pyjwt==2.10.0 # Auth0 API interactions pyjwt>=2.10.1 # Auth0 API interactions
cffi==1.17.1 # Auth0 API interactions cffi>=1.17.1 # Auth0 API interactions
cryptography==43.0.3 # Auth0 API interactions cryptography>=43.0.3 # Auth0 API interactions
pycparser==2.22 # Auth0 API interactions pycparser>=2.22 # Auth0 API interactions

1
requirements.stripe.txt Normal file
View File

@@ -0,0 +1 @@
stripe==11.4.1 # Stripe

View File

@@ -11,7 +11,7 @@ starlette>=0.37.2 # FastAPI
fastapi-pagination>=0.12.26 # Pagination fastapi-pagination>=0.12.26 # Pagination
sqlalchemy>=2.0.31 # SQLAlchemy sqlalchemy>=2.0.31 # SQLAlchemy
sqlalchemy-utils==0.41.2 # For managing databases sqlalchemy-utils>=0.41.2 # For managing databases
python-dotenv>=1.0.1 # Environment variables python-dotenv>=1.0.1 # Environment variables

View File

@@ -14,6 +14,9 @@ with open("requirements.pg.txt") as f:
with open("requirements.auth0.txt") as f: with open("requirements.auth0.txt") as f:
auth0_requirements = f.read().splitlines() auth0_requirements = f.read().splitlines()
with open("requirements.stripe.txt") as f:
stripe_requirements = f.read().splitlines()
def get_latest_git_tag() -> str: def get_latest_git_tag() -> str:
try: try:
@@ -36,7 +39,7 @@ setup(
long_description=open("README.md").read(), long_description=open("README.md").read(),
long_description_content_type="text/markdown", long_description_content_type="text/markdown",
author="Conrad Großer", author="Conrad Großer",
author_email="conrad@noah.tech", author_email="code@grosser.group",
packages=find_packages(), packages=find_packages(),
url="https://github.com/creyD/creyPY", url="https://github.com/creyD/creyPY",
license="MIT", license="MIT",
@@ -46,7 +49,8 @@ setup(
"build": build_requirements, "build": build_requirements,
"postgres": pg_requirements, "postgres": pg_requirements,
"auth0": auth0_requirements, "auth0": auth0_requirements,
"all": build_requirements + pg_requirements + auth0_requirements, "stripe": stripe_requirements,
"all": build_requirements + pg_requirements + auth0_requirements + stripe_requirements,
}, },
keywords=[ keywords=[
"creyPY", "creyPY",