mirror of
https://github.com/creyD/creyPY.git
synced 2026-04-12 19:30:30 +02:00
Compare commits
103 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1f5ba9210f | ||
|
|
f805b3f508 | ||
|
|
8a882cdaae | ||
|
|
40176aa3e9 | ||
|
|
be66bbebbf | ||
|
|
79dde8008a | ||
|
|
adb017d6ce | ||
|
|
e160cc5fea | ||
|
|
7afb8e2fd8 | ||
|
|
badf2b157f | ||
|
|
c903266ec4 | ||
|
|
910638e3a6 | ||
|
|
83dca59817 | ||
|
|
b80d26586d | ||
|
|
c2e2469027 | ||
|
|
8363055984 | ||
|
|
f586ce5c03 | ||
|
|
0af8f05edf | ||
|
|
b73c524e8d | ||
|
|
be260b0ee6 | ||
|
|
b0f2815568 | ||
|
|
6ac609f3f4 | ||
|
|
53ed939451 | ||
| c56d14c2fd | |||
|
|
1e9bcb92b6 | ||
| 5e16bd5cbc | |||
|
|
50b444be89 | ||
| e12c86e352 | |||
|
|
0708a48301 | ||
| 34595d52f2 | |||
|
|
421725ad10 | ||
| 31c4cbb055 | |||
| 410ae12f8e | |||
| 1f224c44bc | |||
| 5b0cc0d87d | |||
| ecfc0fc167 | |||
| eb62c87679 | |||
| 2ad7700f72 | |||
| 1d7b767623 | |||
|
|
f1f29e84c2 | ||
| dcb9afb8f2 | |||
|
|
8c98e001f9 | ||
| 959a746e4f | |||
| 4f6c066242 | |||
|
|
da66e116c3 | ||
| c09df1341f | |||
| 88000f9cf4 | |||
| 92a33489ac | |||
| 9da4cbcb8e | |||
| 52307f6028 | |||
| 8019b566f2 | |||
|
|
10c1ea5411 | ||
| 83726f517c | |||
| abe84bcfcb | |||
|
|
2d6de99585 | ||
|
|
573f59349f | ||
|
|
32bf089456 | ||
|
|
d75fede3d1 | ||
|
|
f8b781b3e7 | ||
|
|
93c7f6f6cb | ||
|
|
2e44453915 | ||
|
|
2a22471de9 | ||
| 2176b1a37d | |||
| 5daddf260e | |||
| 364e07daa1 | |||
| 5daf6eb8c5 | |||
| dfb0588d1c | |||
| 3251afdb90 | |||
| 85fe263da4 | |||
| 0be70deb00 | |||
| 0418c75e19 | |||
| 2444269486 | |||
|
|
33bdeb12a0 | ||
| 5efed5399b | |||
| 7dbce117c8 | |||
| 481bfcfffd | |||
| 90c9d2dc09 | |||
|
|
8b037fbeb5 | ||
| b86b58f3e4 | |||
|
|
17f96c920d | ||
|
|
523241ac4b | ||
|
|
6f09c2ef4c | ||
|
|
9bba5b0a4e | ||
|
|
50031556f9 | ||
|
|
2940ddbdcd | ||
| 807af12fa1 | |||
|
|
dce897c247 | ||
|
|
89997372ef | ||
| c8c5977978 | |||
| 974bc591d6 | |||
| eb895398ab | |||
| 867abd7054 | |||
| 26e18f6b31 | |||
| 8a3a60dbb0 | |||
| e52a5f421b | |||
| a6ded91185 | |||
| eb64874c47 | |||
| b7200852a4 | |||
| 3d18205205 | |||
| 99c84b676c | |||
| 6806de23b3 | |||
| 6a93ab05a3 | |||
|
|
c5b2ab9932 |
17
.github/workflows/ci.yml
vendored
17
.github/workflows/ci.yml
vendored
@@ -12,7 +12,6 @@ on:
|
||||
- "**/CHANGELOG.md"
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- dev
|
||||
workflow_dispatch:
|
||||
|
||||
@@ -39,14 +38,17 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
python-version: '3.13'
|
||||
- run: python -m pip install --upgrade pip
|
||||
- run: python -m pip install -r requirements.txt
|
||||
- run: |
|
||||
python -m pip install -r requirements.txt
|
||||
python -m pip install -r requirements.pg.txt
|
||||
python -m pip install -r requirements.auth0.txt
|
||||
- run: python test.py
|
||||
|
||||
tag_and_publish:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.head_ref == 'master' || github.head_ref == 'dev'
|
||||
if: (github.ref_name == 'master' || github.ref_name == 'dev') && github.event_name == 'push'
|
||||
needs: test
|
||||
permissions:
|
||||
id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
|
||||
@@ -56,7 +58,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-tags: true
|
||||
ref: ${{ github.head_ref }}
|
||||
ref: ${{ github.ref_name }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: setup git
|
||||
@@ -67,7 +69,7 @@ jobs:
|
||||
- name: set version format
|
||||
id: version_format
|
||||
run: |
|
||||
if [[ ${{ github.head_ref }} == 'master' ]]; then
|
||||
if [[ ${{ github.ref_name }} == 'master' ]]; then
|
||||
echo "version_format=\${major}.\${minor}.\${patch}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "version_format=\${major}.\${minor}.\${patch}rc\${increment}" >> $GITHUB_OUTPUT
|
||||
@@ -84,7 +86,6 @@ jobs:
|
||||
version_format: ${{ steps.version_format.outputs.version_format }}
|
||||
|
||||
- name: Create & Push Tag
|
||||
if: github.head_ref == 'master' || github.head_ref == 'dev'
|
||||
run: |
|
||||
git tag ${{ steps.git_version.outputs.version }}
|
||||
git push origin ${{ steps.git_version.outputs.version }}
|
||||
@@ -92,7 +93,7 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
python-version: '3.13'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -158,3 +158,5 @@ cython_debug/
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
.DS_*
|
||||
|
||||
@@ -55,8 +55,3 @@ from creyPY.const import LanguageEnum
|
||||
print(LanguageEnum.EN) # Output: LanguageEnum.EN
|
||||
print(LanguageEnum.EN.value) # Output: English
|
||||
```
|
||||
|
||||
## TODO
|
||||
|
||||
- Add async support for database connection
|
||||
- Add version without postgresql dependency
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
from typing import Type, TypeVar
|
||||
import asyncio
|
||||
from typing import List, Type, TypeVar, overload
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import HTTPException
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from .models.base import Base
|
||||
@@ -10,54 +13,233 @@ from .models.base import Base
|
||||
T = TypeVar("T", bound=Base)
|
||||
|
||||
|
||||
@overload
|
||||
async def get_object_or_404(
|
||||
db_class: Type[T],
|
||||
id: UUID | str,
|
||||
db: AsyncSession,
|
||||
expunge: bool = False,
|
||||
lookup_column: str = "id",
|
||||
response_fields: List[str] = [],
|
||||
) -> T:
|
||||
pass
|
||||
|
||||
|
||||
@overload
|
||||
def get_object_or_404(
|
||||
db_class: Type[T], id: UUID | str, db: Session, expunge: bool = False, lookup_column: str = "id"
|
||||
db_class: Type[T],
|
||||
id: UUID | str,
|
||||
db: Session,
|
||||
expunge: bool = False,
|
||||
lookup_column: str = "id",
|
||||
response_fields: List[str] = [],
|
||||
) -> T:
|
||||
obj = db.query(db_class).filter(getattr(db_class, lookup_column) == id).one_or_none()
|
||||
if obj is None:
|
||||
raise HTTPException(status_code=404, detail="The object does not exist.")
|
||||
if expunge:
|
||||
db.expunge(obj)
|
||||
return obj
|
||||
pass
|
||||
|
||||
|
||||
def get_object_or_404(
|
||||
db_class: Type[T],
|
||||
id: UUID | str,
|
||||
db: Session | AsyncSession,
|
||||
expunge: bool = False,
|
||||
lookup_column: str = "id",
|
||||
response_fields: List[str] = [],
|
||||
) -> T:
|
||||
|
||||
async def _get_async_object() -> T:
|
||||
if response_fields:
|
||||
selected_columns = [
|
||||
getattr(db_class, field) for field in response_fields if hasattr(db_class, field)
|
||||
]
|
||||
query = select(*selected_columns).where(getattr(db_class, lookup_column) == id)
|
||||
result = await db.execute(query)
|
||||
row = result.first()
|
||||
|
||||
if row is None:
|
||||
raise HTTPException(status_code=404, detail="The object does not exist.")
|
||||
if hasattr(row, "_mapping"):
|
||||
obj_dict = dict(row._mapping)
|
||||
else:
|
||||
obj_dict = {column.key: getattr(row, column.key) for column in selected_columns}
|
||||
else:
|
||||
query = select(db_class).where(getattr(db_class, lookup_column) == id)
|
||||
result = await db.execute(query)
|
||||
row = result.scalar_one_or_none()
|
||||
if row is None:
|
||||
raise HTTPException(status_code=404, detail="The object does not exist.")
|
||||
obj_dict = row
|
||||
if expunge:
|
||||
await db.expunge(obj_dict)
|
||||
return obj_dict
|
||||
|
||||
def _get_sync_object() -> T:
|
||||
if response_fields:
|
||||
selected_columns = [
|
||||
getattr(db_class, field) for field in response_fields if hasattr(db_class, field)
|
||||
]
|
||||
query = db.query(*selected_columns).filter(getattr(db_class, lookup_column) == id)
|
||||
else:
|
||||
query = db.query(db_class).filter(getattr(db_class, lookup_column) == id)
|
||||
obj = query.one_or_none()
|
||||
if obj is None:
|
||||
raise HTTPException(status_code=404, detail="The object does not exist.") # type: ignore
|
||||
if expunge:
|
||||
db.expunge(obj)
|
||||
return obj
|
||||
|
||||
if isinstance(db, AsyncSession):
|
||||
return asyncio.ensure_future(_get_async_object()) # type: ignore
|
||||
elif isinstance(db, Session):
|
||||
return _get_sync_object()
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="Invalid session type. Expected Session or AsyncSession.") # type: ignore
|
||||
|
||||
|
||||
# TODO: Add testing
|
||||
@overload
|
||||
async def create_obj_from_data(
|
||||
data: BaseModel,
|
||||
model: Type[T],
|
||||
db: AsyncSession,
|
||||
additional_data: dict = {},
|
||||
exclude: dict = {},
|
||||
) -> T:
|
||||
pass
|
||||
|
||||
|
||||
@overload
|
||||
def create_obj_from_data(
|
||||
data: BaseModel, model: Type[T], db: Session, additional_data={}, exclude={}
|
||||
data: BaseModel, model: Type[T], db: Session, additional_data: dict = {}, exclude: dict = {}
|
||||
) -> T:
|
||||
obj = model(**data.model_dump(exclude=exclude) | additional_data)
|
||||
db.add(obj)
|
||||
db.commit()
|
||||
db.refresh(obj)
|
||||
return obj
|
||||
pass
|
||||
|
||||
|
||||
def create_obj_from_data(
|
||||
data: BaseModel, model: Type[T], db: Session | AsyncSession, additional_data={}, exclude={}
|
||||
) -> T:
|
||||
obj_data = data.model_dump(exclude=exclude) | additional_data
|
||||
obj = model(**obj_data)
|
||||
|
||||
async def _create_async_obj():
|
||||
db.add(obj)
|
||||
await db.commit()
|
||||
await db.refresh(obj)
|
||||
return obj
|
||||
|
||||
def _create_sync_obj():
|
||||
db.add(obj)
|
||||
db.commit()
|
||||
db.refresh(obj)
|
||||
return obj
|
||||
|
||||
if isinstance(db, AsyncSession):
|
||||
return asyncio.ensure_future(_create_async_obj()) # type: ignore
|
||||
elif isinstance(db, Session):
|
||||
return _create_sync_obj()
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="Invalid session type. Expected Session or AsyncSession.") # type: ignore
|
||||
|
||||
|
||||
# TODO: Add testing
|
||||
@overload
|
||||
async def update_obj_from_data(
|
||||
data: BaseModel,
|
||||
model: Type[T],
|
||||
id: UUID | str,
|
||||
db: AsyncSession,
|
||||
partial: bool = True,
|
||||
ignore_fields: list = [],
|
||||
additional_data: dict = {},
|
||||
exclude: dict = {},
|
||||
) -> T:
|
||||
pass
|
||||
|
||||
|
||||
@overload
|
||||
def update_obj_from_data(
|
||||
data: BaseModel,
|
||||
model: Type[T],
|
||||
id: UUID | str,
|
||||
db: Session,
|
||||
partial: bool = True,
|
||||
ignore_fields: list = [],
|
||||
additional_data: dict = {},
|
||||
exclude: dict = {},
|
||||
) -> T:
|
||||
pass
|
||||
|
||||
|
||||
def update_obj_from_data(
|
||||
data: BaseModel,
|
||||
model: Type[T],
|
||||
id: UUID | str,
|
||||
db: Session | AsyncSession,
|
||||
partial: bool = True,
|
||||
ignore_fields=[],
|
||||
additional_data={},
|
||||
exclude={},
|
||||
) -> T:
|
||||
obj = get_object_or_404(model, id, db)
|
||||
data_dict = data.model_dump(exclude_unset=partial, exclude=exclude)
|
||||
data_dict.update(additional_data) # merge additional_data into data_dict
|
||||
for field in data_dict:
|
||||
if field not in ignore_fields:
|
||||
setattr(obj, field, data_dict[field])
|
||||
db.commit()
|
||||
db.refresh(obj)
|
||||
return obj
|
||||
def _update_fields(obj: T):
|
||||
data_dict = data.model_dump(exclude_unset=partial, exclude=exclude)
|
||||
data_dict.update(additional_data)
|
||||
|
||||
for field in data_dict:
|
||||
if field not in ignore_fields:
|
||||
setattr(obj, field, data_dict[field])
|
||||
|
||||
async def _update_async_obj() -> T:
|
||||
obj = await get_object_or_404(model, id, db)
|
||||
_update_fields(obj)
|
||||
await db.commit()
|
||||
await db.refresh(obj)
|
||||
return obj
|
||||
|
||||
def _update_sync_obj() -> T:
|
||||
obj = get_object_or_404(model, id, db)
|
||||
_update_fields(obj)
|
||||
db.commit()
|
||||
db.refresh(obj)
|
||||
return obj
|
||||
|
||||
if isinstance(db, AsyncSession):
|
||||
return asyncio.ensure_future(_update_async_obj()) # type: ignore
|
||||
elif isinstance(db, Session):
|
||||
return _update_sync_obj()
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="Invalid session type. Expected Session or AsyncSession.") # type: ignore
|
||||
|
||||
|
||||
# TODO: Add testing
|
||||
@overload
|
||||
async def delete_object(db_class: Type[T], id: UUID | str, db: AsyncSession) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@overload
|
||||
def delete_object(db_class: Type[T], id: UUID | str, db: Session) -> None:
|
||||
obj = db.query(db_class).filter(db_class.id == id).one_or_none()
|
||||
if obj is None:
|
||||
raise HTTPException(status_code=404, detail="The object does not exist.")
|
||||
db.delete(obj)
|
||||
db.commit()
|
||||
pass
|
||||
|
||||
|
||||
def delete_object(db_class: Type[T], id: UUID | str, db: Session | AsyncSession) -> None:
|
||||
async def _delete_async_obj() -> None:
|
||||
query = select(db_class).filter(db_class.id == id)
|
||||
result = await db.execute(query)
|
||||
obj = result.scalar_one_or_none()
|
||||
if obj is None:
|
||||
raise HTTPException(status_code=404, detail="The object does not exist.")
|
||||
await db.delete(obj)
|
||||
await db.commit()
|
||||
|
||||
def _delete_sync_obj() -> None:
|
||||
obj = db.query(db_class).filter(db_class.id == id).one_or_none()
|
||||
if obj is None:
|
||||
raise HTTPException(status_code=404, detail="The object does not exist.")
|
||||
db.delete(obj)
|
||||
db.commit()
|
||||
|
||||
if isinstance(db, AsyncSession):
|
||||
return asyncio.ensure_future(_delete_async_obj()) # type: ignore
|
||||
elif isinstance(db, Session):
|
||||
return _delete_sync_obj()
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="Invalid session type. Expected Session or AsyncSession.") # type: ignore
|
||||
|
||||
@@ -1 +1,8 @@
|
||||
from .session import * # noqa
|
||||
try:
|
||||
import sqlalchemy
|
||||
|
||||
from .async_session import *
|
||||
from .helpers import *
|
||||
from .session import *
|
||||
except ImportError:
|
||||
print("SQLAlchemy not installed. Database functionality will be disabled.")
|
||||
|
||||
22
creyPY/fastapi/db/async_session.py
Normal file
22
creyPY/fastapi/db/async_session.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
|
||||
from .common import SQLALCHEMY_DATABASE_URL, name, ssl_mode
|
||||
|
||||
async_engine = create_async_engine(
|
||||
SQLALCHEMY_DATABASE_URL + name, pool_pre_ping=True, connect_args={"sslmode": ssl_mode}
|
||||
)
|
||||
|
||||
AsyncSessionLocal = async_sessionmaker(
|
||||
bind=async_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
autoflush=False,
|
||||
autocommit=False,
|
||||
)
|
||||
|
||||
|
||||
async def get_async_db() -> AsyncGenerator[AsyncSession, None]:
|
||||
async with AsyncSessionLocal() as db:
|
||||
yield db
|
||||
15
creyPY/fastapi/db/common.py
Normal file
15
creyPY/fastapi/db/common.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import os
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
host = os.getenv("POSTGRES_HOST", "localhost")
|
||||
user = os.getenv("POSTGRES_USER", "postgres")
|
||||
password = os.getenv("POSTGRES_PASSWORD", "root")
|
||||
port = os.getenv("POSTGRES_PORT", "5432")
|
||||
name = os.getenv("POSTGRES_DB", "fastapi")
|
||||
|
||||
ssl_mode = os.getenv("SSL_MODE", "require")
|
||||
|
||||
SQLALCHEMY_DATABASE_URL = f"postgresql+psycopg://{user}:{password}@{host}:{port}/"
|
||||
8
creyPY/fastapi/db/helpers.py
Normal file
8
creyPY/fastapi/db/helpers.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from sqlalchemy_utils import create_database, database_exists
|
||||
|
||||
|
||||
def create_if_not_exists(db_name: str):
|
||||
from .common import SQLALCHEMY_DATABASE_URL
|
||||
|
||||
if not database_exists(SQLALCHEMY_DATABASE_URL + db_name):
|
||||
create_database(SQLALCHEMY_DATABASE_URL + db_name)
|
||||
@@ -1,23 +1,14 @@
|
||||
import os
|
||||
from typing import Generator
|
||||
|
||||
from dotenv import load_dotenv
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
load_dotenv()
|
||||
from .common import SQLALCHEMY_DATABASE_URL, name, ssl_mode
|
||||
|
||||
host = os.getenv("POSTGRES_HOST", "localhost")
|
||||
user = os.getenv("POSTGRES_USER", "postgres")
|
||||
password = os.getenv("POSTGRES_PASSWORD", "root")
|
||||
port = os.getenv("POSTGRES_PORT", "5432")
|
||||
name = os.getenv("POSTGRES_DB", "fastapi")
|
||||
|
||||
SQLALCHEMY_DATABASE_URL = f"postgresql+psycopg://{user}:{password}@{host}:{port}/"
|
||||
|
||||
|
||||
engine = create_engine(SQLALCHEMY_DATABASE_URL + name, pool_pre_ping=True)
|
||||
engine = create_engine(
|
||||
SQLALCHEMY_DATABASE_URL + name, pool_pre_ping=True, connect_args={"sslmode": ssl_mode}
|
||||
)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
from .base import * # noqa
|
||||
from .mixins import * # noqa
|
||||
|
||||
@@ -7,9 +7,11 @@ from sqlalchemy.ext.declarative import declared_attr
|
||||
from sqlalchemy.orm import as_declarative
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .mixins import AutoAnnotateMixin, AutoInitMixin
|
||||
|
||||
|
||||
@as_declarative()
|
||||
class Base:
|
||||
class Base(AutoAnnotateMixin, AutoInitMixin):
|
||||
__abstract__ = True
|
||||
# Primary key as uuid
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
@@ -19,10 +21,21 @@ class Base:
|
||||
|
||||
__name__: str
|
||||
|
||||
# TODO: Add default representation string
|
||||
# TODO: Add automated foreign key resolution
|
||||
|
||||
# Generate __tablename__ automatically
|
||||
@declared_attr
|
||||
def __tablename__(cls) -> str:
|
||||
return cls.__name__.lower()
|
||||
|
||||
def __str__(self) -> str:
|
||||
# if the object has a name, title or similar attribute, return it
|
||||
if hasattr(self, "name"):
|
||||
return str(self.name) # type: ignore
|
||||
|
||||
# if the object has a title attribute, return it
|
||||
if hasattr(self, "title"):
|
||||
return str(self.title) # type: ignore
|
||||
|
||||
# otherwise return the object's id
|
||||
return str(self.id)
|
||||
|
||||
36
creyPY/fastapi/models/mixins.py
Normal file
36
creyPY/fastapi/models/mixins.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from sqlalchemy import Column
|
||||
from sqlalchemy.orm import Mapped
|
||||
|
||||
|
||||
class AutoAnnotateMixin:
|
||||
@classmethod
|
||||
def __init_subclass__(cls) -> None:
|
||||
super().__init_subclass__()
|
||||
annotations = {}
|
||||
for key, value in cls.__dict__.items():
|
||||
if isinstance(value, Column):
|
||||
annotations[key] = Mapped[value.type.python_type]
|
||||
cls.__annotations__ = annotations
|
||||
|
||||
|
||||
class AutoInitMixin:
|
||||
@classmethod
|
||||
def __init_subclass__(cls) -> None:
|
||||
super().__init_subclass__()
|
||||
init_params = []
|
||||
for key, value in cls.__dict__.items():
|
||||
if isinstance(value, Column):
|
||||
if not value.nullable and not value.default and not value.server_default:
|
||||
init_params.append((key, value.type.python_type))
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(cls, self).__init__()
|
||||
for key, _ in init_params:
|
||||
if key not in kwargs:
|
||||
raise TypeError(f"Missing required argument: {key}")
|
||||
setattr(self, key, kwargs[key])
|
||||
for key, value in kwargs.items():
|
||||
if key not in init_params and hasattr(self.__class__, key):
|
||||
setattr(self, key, value)
|
||||
|
||||
cls.__init__ = __init__
|
||||
@@ -1,25 +1,43 @@
|
||||
from contextlib import suppress
|
||||
from math import ceil
|
||||
from typing import Any, Generic, Optional, Self, Sequence, TypeVar, Union
|
||||
from typing import Any, Generic, Optional, Self, Sequence, TypeVar, Union, overload
|
||||
|
||||
from fastapi_pagination import Params
|
||||
from fastapi_pagination.bases import AbstractPage, AbstractParams
|
||||
from fastapi import Query
|
||||
from fastapi_pagination.api import apply_items_transformer, create_page
|
||||
from fastapi_pagination.bases import AbstractPage, AbstractParams, RawParams
|
||||
from fastapi_pagination.ext.sqlalchemy import create_paginate_query
|
||||
from fastapi_pagination.types import (
|
||||
AdditionalData,
|
||||
AsyncItemsTransformer,
|
||||
GreaterEqualOne,
|
||||
GreaterEqualZero,
|
||||
AdditionalData,
|
||||
ItemsTransformer,
|
||||
SyncItemsTransformer,
|
||||
)
|
||||
from fastapi_pagination.api import create_page, apply_items_transformer
|
||||
from fastapi_pagination.utils import verify_params
|
||||
from fastapi_pagination.ext.sqlalchemy import create_paginate_query
|
||||
from pydantic import BaseModel
|
||||
from pydantic.json_schema import SkipJsonSchema
|
||||
from sqlalchemy.sql.selectable import Select
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_scoped_session
|
||||
from sqlalchemy.orm.session import Session
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.sql.selectable import Select
|
||||
from sqlalchemy.util import await_only, greenlet_spawn
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class PaginationParams(BaseModel, AbstractParams):
|
||||
page: int = Query(1, ge=1, description="Page number")
|
||||
size: int = Query(50, ge=1, description="Page size")
|
||||
pagination: bool = Query(True, description="Toggle pagination")
|
||||
|
||||
def to_raw_params(self) -> RawParams:
|
||||
if not self.pagination:
|
||||
return RawParams(limit=None, offset=None)
|
||||
|
||||
return RawParams(limit=self.size, offset=(self.page - 1) * self.size)
|
||||
|
||||
|
||||
# TODO: Add complete fastapi-pagination proxy here
|
||||
# TODO: Add pagination off functionality
|
||||
# SkipJsonSchema is used to avoid generating invalid JSON schema in FastAPI
|
||||
@@ -32,7 +50,7 @@ class Page(AbstractPage[T], Generic[T]):
|
||||
has_next: bool | SkipJsonSchema[None] = None
|
||||
has_prev: bool | SkipJsonSchema[None] = None
|
||||
|
||||
__params_type__ = Params
|
||||
__params_type__ = PaginationParams
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
@@ -43,7 +61,7 @@ class Page(AbstractPage[T], Generic[T]):
|
||||
total: Optional[int] = None,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
if not isinstance(params, Params):
|
||||
if not isinstance(params, PaginationParams):
|
||||
raise TypeError("Page should be used with Params")
|
||||
|
||||
size = params.size or total or len(items)
|
||||
@@ -94,27 +112,72 @@ def unwrap_scalars(
|
||||
return [item[0] if force_unwrap else item for item in items]
|
||||
|
||||
|
||||
def _get_sync_conn_from_async(conn: Any) -> Session: # pragma: no cover
|
||||
if isinstance(conn, async_scoped_session):
|
||||
conn = conn()
|
||||
|
||||
with suppress(AttributeError):
|
||||
return conn.sync_session # type: ignore
|
||||
|
||||
with suppress(AttributeError):
|
||||
return conn.sync_connection # type: ignore
|
||||
|
||||
raise TypeError("conn must be an AsyncConnection or AsyncSession")
|
||||
|
||||
|
||||
@overload
|
||||
def paginate(
|
||||
connection: Session,
|
||||
query: Select,
|
||||
paginationFlag: bool = True,
|
||||
params: Optional[AbstractParams] = None,
|
||||
transformer: Optional[SyncItemsTransformer] = None,
|
||||
additional_data: Optional[AdditionalData] = None,
|
||||
):
|
||||
params, _ = verify_params(params, "limit-offset", "cursor")
|
||||
) -> Any:
|
||||
pass
|
||||
|
||||
|
||||
@overload
|
||||
async def paginate(
|
||||
connection: AsyncSession,
|
||||
query: Select,
|
||||
params: Optional[AbstractParams] = None,
|
||||
transformer: Optional[AsyncItemsTransformer] = None,
|
||||
additional_data: Optional[AdditionalData] = None,
|
||||
) -> Any:
|
||||
pass
|
||||
|
||||
|
||||
def _paginate(
|
||||
connection: Session,
|
||||
query: Select,
|
||||
params: Optional[AbstractParams] = None,
|
||||
transformer: Optional[ItemsTransformer] = None,
|
||||
additional_data: Optional[AdditionalData] = None,
|
||||
async_: bool = False,
|
||||
):
|
||||
|
||||
if async_:
|
||||
|
||||
def _apply_items_transformer(*args: Any, **kwargs: Any) -> Any:
|
||||
return await_only(apply_items_transformer(*args, **kwargs, async_=True))
|
||||
|
||||
else:
|
||||
_apply_items_transformer = apply_items_transformer
|
||||
|
||||
params, raw_params = verify_params(params, "limit-offset", "cursor")
|
||||
count_query = create_count_query(query)
|
||||
total = connection.scalar(count_query)
|
||||
|
||||
if paginationFlag is False:
|
||||
params = Params(page=1, size=total)
|
||||
if params.pagination is False and total > 0:
|
||||
params = PaginationParams(page=1, size=total)
|
||||
else:
|
||||
params = PaginationParams(page=params.page, size=params.size)
|
||||
|
||||
query = create_paginate_query(query, params)
|
||||
items = connection.execute(query).all()
|
||||
|
||||
items = unwrap_scalars(items)
|
||||
t_items = apply_items_transformer(items, transformer)
|
||||
t_items = _apply_items_transformer(items, transformer)
|
||||
|
||||
return create_page(
|
||||
t_items,
|
||||
@@ -122,3 +185,19 @@ def paginate(
|
||||
total=total,
|
||||
**(additional_data or {}),
|
||||
)
|
||||
|
||||
|
||||
def paginate(
|
||||
connection: Session,
|
||||
query: Select,
|
||||
params: Optional[AbstractParams] = None,
|
||||
transformer: Optional[ItemsTransformer] = None,
|
||||
additional_data: Optional[AdditionalData] = None,
|
||||
):
|
||||
if isinstance(connection, AsyncSession):
|
||||
connection = _get_sync_conn_from_async(connection)
|
||||
return greenlet_spawn(
|
||||
_paginate, connection, query, params, transformer, additional_data, async_=True
|
||||
)
|
||||
|
||||
return _paginate(connection, query, params, transformer, additional_data, async_=False)
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
from .base import * # noqa
|
||||
from .schema_optional import * #noqa
|
||||
|
||||
19
creyPY/fastapi/schemas/schema_optional.py
Normal file
19
creyPY/fastapi/schemas/schema_optional.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from typing import Optional, Type, Union, get_args, get_origin, get_type_hints
|
||||
|
||||
from pydantic import BaseModel, create_model
|
||||
|
||||
|
||||
def optional_fields(cls: Type[BaseModel]) -> Type[BaseModel]:
|
||||
fields = {}
|
||||
for name, hint in get_type_hints(cls).items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
|
||||
if get_origin(hint) is not Union or type(None) not in get_args(hint):
|
||||
hint = Optional[hint]
|
||||
|
||||
fields[name] = (hint, None)
|
||||
|
||||
new_model = create_model(cls.__name__, __base__=cls, **fields)
|
||||
|
||||
return new_model
|
||||
@@ -41,7 +41,7 @@ class GenericClient(TestClient):
|
||||
re = self.c.post(
|
||||
url,
|
||||
files={"file": file},
|
||||
headers=self.default_headers | {"Content-Type": "application/json"},
|
||||
headers=self.default_headers,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
143
creyPY/fastapi/testing_async.py
Normal file
143
creyPY/fastapi/testing_async.py
Normal file
@@ -0,0 +1,143 @@
|
||||
import json
|
||||
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
|
||||
|
||||
class AsyncGenericClient:
|
||||
def __init__(self, app, headers={}):
|
||||
self.c = AsyncClient(
|
||||
transport=ASGITransport(app=app), base_url="http://testserver", follow_redirects=True
|
||||
)
|
||||
self.default_headers = headers
|
||||
|
||||
async def get(self, url: str, r_code: int = 200, parse_json=True):
|
||||
re = await self.c.get(url, headers=self.default_headers)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if parse_json else re.content
|
||||
|
||||
async def delete(self, url: str, r_code: int = 204):
|
||||
re = await self.c.delete(url, headers=self.default_headers)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if r_code != 204 else None
|
||||
|
||||
async def post(
|
||||
self, url: str, obj: dict | str = {}, r_code: int = 201, raw_response=False, *args, **kwargs
|
||||
):
|
||||
re = await self.c.post(
|
||||
url,
|
||||
data=json.dumps(obj) if isinstance(obj, dict) else obj,
|
||||
headers=self.default_headers | {"Content-Type": "application/json"},
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
if not raw_response:
|
||||
assert r_code == re.status_code
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
async def post_file(
|
||||
self, url: str, file, r_code: int = 201, raw_response=False, *args, **kwargs
|
||||
):
|
||||
re = await self.c.post(
|
||||
url,
|
||||
files={"file": file},
|
||||
headers=self.default_headers,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
async def patch(
|
||||
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
|
||||
):
|
||||
re = await self.c.patch(
|
||||
url,
|
||||
data=json.dumps(obj) if isinstance(obj, dict) else obj,
|
||||
headers=self.default_headers | {"Content-Type": "application/json"},
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
async def put(
|
||||
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
|
||||
):
|
||||
re = await self.c.put(
|
||||
url,
|
||||
data=json.dumps(obj) if isinstance(obj, dict) else obj,
|
||||
headers=self.default_headers
|
||||
| {
|
||||
"Content-Type": "application/json",
|
||||
"accept": "application/json",
|
||||
},
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
async def obj_lifecycle(
|
||||
self,
|
||||
input_obj: dict,
|
||||
url: str,
|
||||
pagination: bool = True,
|
||||
id_field: str = "id",
|
||||
created_at_check: bool = True,
|
||||
):
|
||||
# GET LIST
|
||||
re = await self.get(url)
|
||||
if pagination:
|
||||
assert re["total"] == 0
|
||||
assert len(re["results"]) == 0
|
||||
else:
|
||||
assert len(re) == 0
|
||||
|
||||
# CREATE
|
||||
re = await self.post(url, obj=input_obj)
|
||||
assert id_field in re
|
||||
assert re[id_field] is not None
|
||||
|
||||
if created_at_check:
|
||||
assert "created_at" in re
|
||||
assert re["created_at"] is not None
|
||||
|
||||
obj_id = str(re[id_field])
|
||||
|
||||
# GET
|
||||
re = await self.get(f"{url}{obj_id}/")
|
||||
assert re[id_field] == obj_id
|
||||
|
||||
# GET LIST
|
||||
re = await self.get(url)
|
||||
if pagination:
|
||||
assert re["total"] == 1
|
||||
assert len(re["results"]) == 1
|
||||
else:
|
||||
assert len(re) == 1
|
||||
|
||||
# DELETE
|
||||
await self.delete(f"{url}{obj_id}")
|
||||
|
||||
# GET LIST
|
||||
re = await self.get(url)
|
||||
if pagination:
|
||||
assert re["total"] == 0
|
||||
assert len(re["results"]) == 0
|
||||
else:
|
||||
assert len(re) == 0
|
||||
|
||||
# GET
|
||||
await self.get(f"{url}{obj_id}", parse_json=False, r_code=404)
|
||||
196
creyPY/fastapi/testing_unit.py
Normal file
196
creyPY/fastapi/testing_unit.py
Normal file
@@ -0,0 +1,196 @@
|
||||
import json
|
||||
import unittest
|
||||
from typing import Type
|
||||
|
||||
from httpx import ASGITransport, AsyncClient, Response
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy_utils import create_database, database_exists, drop_database
|
||||
|
||||
from creyPY.fastapi.models.base import Base
|
||||
|
||||
|
||||
class AbstractTestAPI(unittest.IsolatedAsyncioTestCase):
|
||||
client: AsyncClient
|
||||
default_headers: dict = {}
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls, app, headers={}) -> None:
|
||||
cls.client = AsyncClient(
|
||||
transport=ASGITransport(app=app), base_url="http://testserver", follow_redirects=True
|
||||
)
|
||||
cls.default_headers = headers
|
||||
|
||||
@classmethod
|
||||
def setup_database(
|
||||
cls,
|
||||
sync_db_url: str,
|
||||
async_db_url: str,
|
||||
base: Type[Base],
|
||||
btree_gist: bool = False,
|
||||
ssl_mode: str = "require",
|
||||
):
|
||||
cls.engine_s = create_engine(
|
||||
sync_db_url,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
connect_args={"sslmode": ssl_mode},
|
||||
)
|
||||
if database_exists(cls.engine_s.url):
|
||||
drop_database(cls.engine_s.url)
|
||||
create_database(cls.engine_s.url)
|
||||
|
||||
if btree_gist:
|
||||
with cls.engine_s.begin() as conn:
|
||||
conn.execute(text("CREATE EXTENSION IF NOT EXISTS btree_gist"))
|
||||
|
||||
# Migrate
|
||||
base.metadata.create_all(cls.engine_s)
|
||||
|
||||
cls.engine = create_async_engine(
|
||||
async_db_url,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
connect_args={"sslmode": ssl_mode},
|
||||
)
|
||||
|
||||
async def get(self, url: str, r_code: int = 200, parse_json=True) -> dict | bytes:
|
||||
re = await self.client.get(url, headers=self.default_headers)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
self.assertEqual(r_code, re.status_code)
|
||||
return re.json() if parse_json else re.content
|
||||
|
||||
async def delete(self, url: str, r_code: int = 204) -> dict | None:
|
||||
re = await self.client.delete(url, headers=self.default_headers)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
self.assertEqual(r_code, re.status_code)
|
||||
return re.json() if r_code != 204 else None
|
||||
|
||||
async def post(
|
||||
self, url: str, obj: dict | str = {}, r_code: int = 201, raw_response=False, *args, **kwargs
|
||||
):
|
||||
re = await self.client.post(
|
||||
url,
|
||||
data=json.dumps(obj) if isinstance(obj, dict) else obj,
|
||||
headers=self.default_headers | {"Content-Type": "application/json"},
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
if not raw_response:
|
||||
self.assertEqual(r_code, re.status_code)
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
async def post_file(
|
||||
self, url: str, file, r_code: int = 201, raw_response=False, *args, **kwargs
|
||||
) -> dict | bytes | Response:
|
||||
re = await self.client.post(
|
||||
url,
|
||||
files={"file": file},
|
||||
headers=self.default_headers,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
self.assertEqual(r_code, re.status_code)
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
async def patch(
|
||||
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
|
||||
) -> dict | bytes | Response:
|
||||
re = await self.client.patch(
|
||||
url,
|
||||
data=json.dumps(obj) if isinstance(obj, dict) else obj,
|
||||
headers=self.default_headers | {"Content-Type": "application/json"},
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
self.assertEqual(r_code, re.status_code)
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
async def put(
|
||||
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
|
||||
) -> dict | bytes | Response:
|
||||
re = await self.client.put(
|
||||
url,
|
||||
data=json.dumps(obj) if isinstance(obj, dict) else obj,
|
||||
headers=self.default_headers
|
||||
| {
|
||||
"Content-Type": "application/json",
|
||||
"accept": "application/json",
|
||||
},
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
self.assertEqual(r_code, re.status_code)
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
async def obj_lifecycle(
|
||||
self,
|
||||
input_obj: dict,
|
||||
url: str,
|
||||
pagination: bool = True,
|
||||
id_field: str = "id",
|
||||
created_at_check: bool = True,
|
||||
patch: dict | None = None,
|
||||
):
|
||||
# GET LIST
|
||||
re = await self.get(url)
|
||||
if pagination:
|
||||
self.assertEqual(re["total"], 0)
|
||||
self.assertEqual(len(re["results"]), 0)
|
||||
else:
|
||||
self.assertEqual(len(re), 0)
|
||||
|
||||
# CREATE
|
||||
re = await self.post(url, obj=input_obj)
|
||||
self.assertIn(id_field, re)
|
||||
self.assertIsNotNone(re[id_field])
|
||||
|
||||
if created_at_check:
|
||||
self.assertIn("created_at", re)
|
||||
self.assertIsNotNone(re["created_at"])
|
||||
|
||||
obj_id = str(re[id_field])
|
||||
|
||||
# GET
|
||||
re = await self.get(f"{url}{obj_id}/")
|
||||
self.assertEqual(re[id_field], obj_id)
|
||||
|
||||
# PATCH
|
||||
if patch:
|
||||
for key, value in patch.items():
|
||||
input_obj[key] = value
|
||||
re = await self.patch(f"{url}{obj_id}/", obj=input_obj)
|
||||
for key, value in patch.items():
|
||||
self.assertEqual(re[key], value)
|
||||
|
||||
# GET LIST
|
||||
re = await self.get(url)
|
||||
if pagination:
|
||||
self.assertEqual(re["total"], 1)
|
||||
self.assertEqual(len(re["results"]), 1)
|
||||
else:
|
||||
self.assertEqual(len(re), 1)
|
||||
|
||||
# DELETE
|
||||
await self.delete(f"{url}{obj_id}")
|
||||
|
||||
# GET LIST
|
||||
re = await self.get(url)
|
||||
if pagination:
|
||||
self.assertEqual(re["total"], 0)
|
||||
self.assertEqual(len(re["results"]), 0)
|
||||
else:
|
||||
self.assertEqual(len(re), 0)
|
||||
|
||||
# GET
|
||||
await self.get(f"{url}{obj_id}", parse_json=False, r_code=404)
|
||||
26
creyPY/helpers.py
Normal file
26
creyPY/helpers.py
Normal file
@@ -0,0 +1,26 @@
|
||||
import secrets
|
||||
import string
|
||||
import csv
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def create_random_password(length: int = 12) -> str:
|
||||
all_characters = string.ascii_letters + string.digits + string.punctuation
|
||||
|
||||
password = [
|
||||
secrets.choice(string.ascii_lowercase),
|
||||
secrets.choice(string.ascii_uppercase),
|
||||
secrets.choice(string.digits),
|
||||
secrets.choice(string.punctuation),
|
||||
]
|
||||
password += [secrets.choice(all_characters) for _ in range(length - 4)]
|
||||
secrets.SystemRandom().shuffle(password)
|
||||
return "".join(password)
|
||||
|
||||
|
||||
def data_to_csv(file: Path, data: list) -> None:
|
||||
|
||||
with file.open(mode="w", newline="", encoding="utf-8") as f:
|
||||
writer = csv.DictWriter(f, fieldnames=data[0].keys(), delimiter=";")
|
||||
writer.writeheader()
|
||||
writer.writerows(data)
|
||||
3
creyPY/services/__init__.py
Normal file
3
creyPY/services/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .auth0 import * # noqa
|
||||
from .stripe import * # noqa
|
||||
from .aws import * # noqa
|
||||
4
creyPY/services/auth0/__init__.py
Normal file
4
creyPY/services/auth0/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from .exceptions import * # noqa
|
||||
from .manage import * # noqa
|
||||
from .testing import * # noqa
|
||||
from .utils import * # noqa
|
||||
13
creyPY/services/auth0/common.py
Normal file
13
creyPY/services/auth0/common.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import os
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
AUTH0_DOMAIN = os.getenv("AUTH0_DOMAIN")
|
||||
AUTH0_CLIENT_ID = os.getenv("AUTH0_CLIENT_ID")
|
||||
AUTH0_CLIENT_SECRET = os.getenv("AUTH0_CLIENT_SECRET")
|
||||
AUTH0_ALGORIGHM = os.getenv("AUTH0_ALGORIGHM", "RS256")
|
||||
|
||||
AUTH0_AUDIENCE = os.getenv("AUTH0_AUDIENCE")
|
||||
AUTH0_ISSUER = os.getenv("AUTH0_ISSUER")
|
||||
12
creyPY/services/auth0/exceptions.py
Normal file
12
creyPY/services/auth0/exceptions.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
|
||||
class UnauthorizedException(HTTPException):
|
||||
def __init__(self, detail: str, **kwargs):
|
||||
"""Returns HTTP 403"""
|
||||
super().__init__(status.HTTP_403_FORBIDDEN, detail=detail)
|
||||
|
||||
|
||||
class UnauthenticatedException(HTTPException):
|
||||
def __init__(self):
|
||||
super().__init__(status_code=status.HTTP_401_UNAUTHORIZED, detail="Requires authentication")
|
||||
21
creyPY/services/auth0/manage.py
Normal file
21
creyPY/services/auth0/manage.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import requests
|
||||
from cachetools import TTLCache, cached
|
||||
|
||||
from .common import AUTH0_CLIENT_ID, AUTH0_CLIENT_SECRET, AUTH0_DOMAIN
|
||||
|
||||
cache = TTLCache(maxsize=100, ttl=600)
|
||||
|
||||
|
||||
@cached(cache)
|
||||
def get_management_token() -> str:
|
||||
response = requests.post(
|
||||
f"https://{AUTH0_DOMAIN}/oauth/token",
|
||||
json={
|
||||
"client_id": AUTH0_CLIENT_ID,
|
||||
"client_secret": AUTH0_CLIENT_SECRET,
|
||||
"audience": f"https://{AUTH0_DOMAIN}/api/v2/", # This should be the management audience
|
||||
"grant_type": "client_credentials",
|
||||
},
|
||||
timeout=5, # Add a timeout parameter to avoid hanging requests
|
||||
).json()
|
||||
return response["access_token"]
|
||||
93
creyPY/services/auth0/testing.py
Normal file
93
creyPY/services/auth0/testing.py
Normal file
@@ -0,0 +1,93 @@
|
||||
USER_OBJ = {
|
||||
"auth0|testing": {
|
||||
"created_at": "2023-08-15T13:25:31.507Z",
|
||||
"email": "test@test.org",
|
||||
"email_verified": True,
|
||||
"identities": [
|
||||
{
|
||||
"connection": "Username-Password-Authentication",
|
||||
"provider": "auth0",
|
||||
"user_id": "testing",
|
||||
"isSocial": False,
|
||||
}
|
||||
],
|
||||
"name": "Test Tester",
|
||||
"nickname": "testing",
|
||||
"picture": "https://avatars.githubusercontent.com/u/15138480?v=4",
|
||||
"updated_at": "2024-01-17T12:36:37.300Z",
|
||||
"user_id": "auth0|testing",
|
||||
"user_metadata": {},
|
||||
"last_password_reset": "2024-01-17T11:42:08.761Z",
|
||||
"last_ip": "127.0.0.1",
|
||||
"last_login": "2024-01-17T11:43:09.620Z",
|
||||
"logins_count": 1,
|
||||
},
|
||||
"auth0|new_user": {
|
||||
"created_at": "2023-08-15T13:25:31.507Z",
|
||||
"email": "test2@test.org",
|
||||
"email_verified": True,
|
||||
"identities": [
|
||||
{
|
||||
"connection": "Username-Password-Authentication",
|
||||
"provider": "auth0",
|
||||
"user_id": "testing",
|
||||
"isSocial": False,
|
||||
}
|
||||
],
|
||||
"name": "Test Tester 2",
|
||||
"nickname": "testing 2",
|
||||
"picture": "https://avatars.githubusercontent.com/u/15138481?v=4",
|
||||
"updated_at": "2024-01-17T12:36:37.303Z",
|
||||
"user_id": "auth0|new_user",
|
||||
"user_metadata": {},
|
||||
"last_password_reset": "2024-01-17T11:42:08.759Z",
|
||||
"last_ip": "127.0.0.1",
|
||||
"last_login": "2024-01-17T11:43:09.618Z",
|
||||
"logins_count": 1,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def get_user_auth0(sub, *args, **kwargs) -> dict:
|
||||
return USER_OBJ[sub]
|
||||
|
||||
|
||||
def patch_user_auth0(input_obj: dict, sub, *args, **kwargs) -> dict:
|
||||
USER_OBJ[sub].update(input_obj)
|
||||
return get_user_auth0(sub)
|
||||
|
||||
|
||||
def get_user_auth0_metadata(sub, *args, **kwargs) -> dict:
|
||||
return USER_OBJ[sub]["user_metadata"]
|
||||
|
||||
|
||||
def check_company_auth0(*args, **kwargs) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def auth0_sub_to_profile(sub: str) -> dict:
|
||||
return {
|
||||
"email": USER_OBJ[sub]["email"],
|
||||
"name": USER_OBJ[sub]["name"],
|
||||
"picture": USER_OBJ[sub]["picture"],
|
||||
"company_ids": USER_OBJ[sub]["user_metadata"]["company_ids"],
|
||||
}
|
||||
|
||||
|
||||
def auth0_sub_to_public(sub: str) -> dict:
|
||||
return {
|
||||
"email": USER_OBJ[sub]["email"],
|
||||
"name": USER_OBJ[sub]["name"],
|
||||
"picture": USER_OBJ[sub]["picture"],
|
||||
}
|
||||
|
||||
|
||||
def patch_user_auth0_metadata(input_obj: dict, sub, *args, **kwargs) -> dict:
|
||||
USER_OBJ[sub]["user_metadata"].update(input_obj)
|
||||
return get_user_auth0_metadata(sub)
|
||||
|
||||
|
||||
def set_company_id(sub: str, company_id: str):
|
||||
if sub not in USER_OBJ:
|
||||
USER_OBJ[sub] = {}
|
||||
USER_OBJ[sub]["user_metadata"] = {"company_ids": [company_id]}
|
||||
147
creyPY/services/auth0/utils.py
Normal file
147
creyPY/services/auth0/utils.py
Normal file
@@ -0,0 +1,147 @@
|
||||
from typing import Optional
|
||||
|
||||
import jwt
|
||||
import requests
|
||||
from fastapi import HTTPException, Request, Security
|
||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||
|
||||
from creyPY.helpers import create_random_password
|
||||
|
||||
from .common import (
|
||||
AUTH0_ALGORIGHM,
|
||||
AUTH0_AUDIENCE,
|
||||
AUTH0_CLIENT_ID,
|
||||
AUTH0_DOMAIN,
|
||||
AUTH0_ISSUER,
|
||||
)
|
||||
from .exceptions import UnauthenticatedException, UnauthorizedException
|
||||
from .manage import get_management_token
|
||||
|
||||
JWKS_CLIENT = jwt.PyJWKClient(f"https://{AUTH0_DOMAIN}/.well-known/jwks.json")
|
||||
|
||||
|
||||
async def verify(
|
||||
request: Request,
|
||||
token: Optional[HTTPAuthorizationCredentials] = Security(HTTPBearer(auto_error=False)),
|
||||
) -> str:
|
||||
if token is None:
|
||||
raise UnauthenticatedException
|
||||
|
||||
# This gets the 'kid' from the passed token
|
||||
try:
|
||||
signing_key = JWKS_CLIENT.get_signing_key_from_jwt(token.credentials).key
|
||||
except jwt.exceptions.PyJWKClientError as error:
|
||||
raise UnauthorizedException(str(error))
|
||||
except jwt.exceptions.DecodeError as error:
|
||||
raise UnauthorizedException(str(error))
|
||||
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token.credentials,
|
||||
signing_key,
|
||||
algorithms=[AUTH0_ALGORIGHM],
|
||||
audience=AUTH0_AUDIENCE,
|
||||
issuer=AUTH0_ISSUER,
|
||||
)
|
||||
except Exception as error:
|
||||
raise UnauthorizedException(str(error))
|
||||
|
||||
return payload["sub"]
|
||||
|
||||
|
||||
### GENERIC AUTH0 CALLS ###
|
||||
def get_user(sub) -> dict:
|
||||
re = requests.get(
|
||||
f"https://{AUTH0_DOMAIN}/api/v2/users/{sub}",
|
||||
headers={"Authorization": f"Bearer {get_management_token()}"},
|
||||
timeout=5,
|
||||
)
|
||||
if re.status_code != 200:
|
||||
raise HTTPException(re.status_code, re.json())
|
||||
return re.json()
|
||||
|
||||
|
||||
def patch_user(input_obj: dict, sub) -> dict:
|
||||
re = requests.patch(
|
||||
f"https://{AUTH0_DOMAIN}/api/v2/users/{sub}",
|
||||
headers={"Authorization": f"Bearer {get_management_token()}"},
|
||||
json=input_obj,
|
||||
timeout=5,
|
||||
)
|
||||
if re.status_code != 200:
|
||||
raise HTTPException(re.status_code, re.json())
|
||||
return re.json()
|
||||
|
||||
|
||||
### USER METADATA CALLS ###
|
||||
def get_user_metadata(sub) -> dict:
|
||||
try:
|
||||
return get_user(sub).get("user_metadata", {})
|
||||
except:
|
||||
return {}
|
||||
|
||||
|
||||
def patch_user_metadata(input_obj: dict, sub) -> dict:
|
||||
return patch_user({"user_metadata": input_obj}, sub)
|
||||
|
||||
|
||||
def clear_user_metadata(sub) -> dict:
|
||||
return patch_user({"user_metadata": {}}, sub)
|
||||
|
||||
|
||||
def request_verification_mail(sub: str) -> None:
|
||||
re = requests.post(
|
||||
f"https://{AUTH0_DOMAIN}/api/v2/jobs/verification-email",
|
||||
headers={"Authorization": f"Bearer {get_management_token()}"},
|
||||
json={"user_id": sub},
|
||||
timeout=5,
|
||||
)
|
||||
if re.status_code != 201:
|
||||
raise HTTPException(re.status_code, re.json())
|
||||
return re.json()
|
||||
|
||||
|
||||
def create_user_invite(email: str, company_id: str) -> dict:
|
||||
re = requests.post(
|
||||
f"https://{AUTH0_DOMAIN}/api/v2/users",
|
||||
headers={"Authorization": f"Bearer {get_management_token()}"},
|
||||
json={
|
||||
"email": email,
|
||||
"connection": "Username-Password-Authentication",
|
||||
"password": create_random_password(),
|
||||
"verify_email": False,
|
||||
"app_metadata": {"invitedToMyApp": True},
|
||||
"user_metadata": {"company_ids": [company_id]},
|
||||
},
|
||||
timeout=5,
|
||||
)
|
||||
if re.status_code != 201:
|
||||
raise HTTPException(re.status_code, re.json())
|
||||
return re.json()
|
||||
|
||||
|
||||
def delete_user_invite(user_id: str) -> None:
|
||||
re = requests.delete(
|
||||
f"https://{AUTH0_DOMAIN}/api/v2/users/{user_id}",
|
||||
headers={"Authorization": f"Bearer {get_management_token()}"},
|
||||
timeout=5,
|
||||
)
|
||||
if re.status_code != 204:
|
||||
raise HTTPException(re.status_code, re.json())
|
||||
|
||||
|
||||
def password_change_mail(email: str) -> bool:
|
||||
re = requests.post(
|
||||
f"https://{AUTH0_DOMAIN}/dbconnections/change_password",
|
||||
headers={"Authorization": f"Bearer {get_management_token()}"},
|
||||
json={
|
||||
"client_id": AUTH0_CLIENT_ID,
|
||||
"email": email,
|
||||
"connection": "Username-Password-Authentication",
|
||||
},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
if re.status_code != 200:
|
||||
raise HTTPException(re.status_code, re.json())
|
||||
return True
|
||||
1
creyPY/services/aws/__init__.py
Normal file
1
creyPY/services/aws/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .email import * # noqa
|
||||
32
creyPY/services/aws/email.py
Normal file
32
creyPY/services/aws/email.py
Normal file
@@ -0,0 +1,32 @@
|
||||
import os
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
AWS_CLIENT_ID = os.getenv("AWS_CLIENT_ID")
|
||||
AWS_CLIENT_SECRET = os.getenv("AWS_CLIENT_SECRET")
|
||||
AWS_SENDER_EMAIL = os.getenv("AWS_SENDER_EMAIL")
|
||||
AWS_REGION = os.getenv("AWS_REGION", "eu-central-1")
|
||||
|
||||
|
||||
async def send_email_ses(recipient_email, subject, html_body):
|
||||
ses_client = boto3.client(
|
||||
"ses",
|
||||
aws_access_key_id=AWS_CLIENT_ID,
|
||||
aws_secret_access_key=AWS_CLIENT_SECRET,
|
||||
region_name=AWS_REGION,
|
||||
)
|
||||
email_message = {
|
||||
"Source": AWS_SENDER_EMAIL,
|
||||
"Destination": {"ToAddresses": [recipient_email]},
|
||||
"Message": {
|
||||
"Subject": {"Data": subject, "Charset": "UTF-8"},
|
||||
"Body": {"Html": {"Data": html_body, "Charset": "UTF-8"}},
|
||||
},
|
||||
}
|
||||
|
||||
try:
|
||||
response = ses_client.send_email(**email_message)
|
||||
return response["MessageId"]
|
||||
except ClientError as e:
|
||||
return None
|
||||
1
creyPY/services/stripe/__init__.py
Normal file
1
creyPY/services/stripe/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .testing import * # noqa
|
||||
23
creyPY/services/stripe/testing.py
Normal file
23
creyPY/services/stripe/testing.py
Normal file
@@ -0,0 +1,23 @@
|
||||
class ItemReturn:
|
||||
quantity = 1
|
||||
|
||||
|
||||
class SubscriptionItem:
|
||||
def retrieve(self, id: str = ""):
|
||||
return ItemReturn
|
||||
|
||||
def modify(self, id: str, quantity: int):
|
||||
return ItemReturn
|
||||
|
||||
|
||||
class StripeAPI:
|
||||
def __init__(self, key: str):
|
||||
pass
|
||||
|
||||
@property
|
||||
def SubscriptionItem(self):
|
||||
return SubscriptionItem
|
||||
|
||||
|
||||
def get_stripe_api():
|
||||
return StripeAPI("test")
|
||||
11
creyPY/services/stripe/utils.py
Normal file
11
creyPY/services/stripe/utils.py
Normal file
@@ -0,0 +1,11 @@
|
||||
import os
|
||||
|
||||
import stripe
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
def get_stripe_api():
|
||||
stripe.api_key = os.getenv("STRIPE_API_KEY", "")
|
||||
return stripe
|
||||
7
renovate.json
Normal file
7
renovate.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": [
|
||||
"config:recommended",
|
||||
":semanticCommitTypeAll(feat)"
|
||||
]
|
||||
}
|
||||
7
requirements.auth0.txt
Normal file
7
requirements.auth0.txt
Normal file
@@ -0,0 +1,7 @@
|
||||
cachetools>=5.5.0 # for caching
|
||||
charset-normalizer>=3.4.0 # Auth0 API interactions
|
||||
requests>=2.32.3 # Auth0 API interactions
|
||||
pyjwt>=2.10.1 # Auth0 API interactions
|
||||
cffi>=1.17.1 # Auth0 API interactions
|
||||
cryptography>=43.0.3 # Auth0 API interactions
|
||||
pycparser>=2.22 # Auth0 API interactions
|
||||
@@ -23,5 +23,3 @@ twine>=5.0.0
|
||||
urllib3>=2.2.1
|
||||
wheel>=0.43.0
|
||||
zipp>=3.18.1
|
||||
|
||||
-r requirements.txt
|
||||
|
||||
5
requirements.pg.txt
Normal file
5
requirements.pg.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
psycopg>=3.2.1 # PostgreSQL
|
||||
psycopg-binary>=3.2.1 # PostgreSQL
|
||||
psycopg-pool>=3.2.2 # PostgreSQL
|
||||
asyncpg>=0.30.0 # SQLAlchemy
|
||||
greenlet>=3.1.1 # Async
|
||||
1
requirements.stripe.txt
Normal file
1
requirements.stripe.txt
Normal file
@@ -0,0 +1 @@
|
||||
stripe==12.2.0 # Stripe
|
||||
@@ -11,13 +11,10 @@ starlette>=0.37.2 # FastAPI
|
||||
|
||||
fastapi-pagination>=0.12.26 # Pagination
|
||||
sqlalchemy>=2.0.31 # SQLAlchemy
|
||||
sqlalchemy-utils>=0.41.2 # For managing databases
|
||||
|
||||
python-dotenv>=1.0.1 # Environment variables
|
||||
|
||||
psycopg>=3.2.1 # PostgreSQL
|
||||
psycopg-binary>=3.2.1 # PostgreSQL
|
||||
psycopg-pool>=3.2.2 # PostgreSQL
|
||||
|
||||
h11>=0.14.0 # Testing
|
||||
httpcore>=1.0.5 # Testing
|
||||
httpx>=0.27.0 # Testing
|
||||
|
||||
22
setup.py
22
setup.py
@@ -5,6 +5,18 @@ from setuptools import find_packages, setup
|
||||
with open("requirements.txt") as f:
|
||||
requirements = f.read().splitlines()
|
||||
|
||||
with open("requirements.build.txt") as f:
|
||||
build_requirements = f.read().splitlines()
|
||||
|
||||
with open("requirements.pg.txt") as f:
|
||||
pg_requirements = f.read().splitlines()
|
||||
|
||||
with open("requirements.auth0.txt") as f:
|
||||
auth0_requirements = f.read().splitlines()
|
||||
|
||||
with open("requirements.stripe.txt") as f:
|
||||
stripe_requirements = f.read().splitlines()
|
||||
|
||||
|
||||
def get_latest_git_tag() -> str:
|
||||
try:
|
||||
@@ -27,12 +39,19 @@ setup(
|
||||
long_description=open("README.md").read(),
|
||||
long_description_content_type="text/markdown",
|
||||
author="Conrad Großer",
|
||||
author_email="conrad@noah.tech",
|
||||
author_email="code@grosser.group",
|
||||
packages=find_packages(),
|
||||
url="https://github.com/creyD/creyPY",
|
||||
license="MIT",
|
||||
python_requires=">=3.12",
|
||||
install_requires=requirements,
|
||||
extras_require={
|
||||
"build": build_requirements,
|
||||
"postgres": pg_requirements,
|
||||
"auth0": auth0_requirements,
|
||||
"stripe": stripe_requirements,
|
||||
"all": build_requirements + pg_requirements + auth0_requirements + stripe_requirements,
|
||||
},
|
||||
keywords=[
|
||||
"creyPY",
|
||||
"Python",
|
||||
@@ -40,7 +59,6 @@ setup(
|
||||
"shortcuts",
|
||||
"snippets",
|
||||
"utils",
|
||||
"personal library",
|
||||
],
|
||||
platforms="any",
|
||||
)
|
||||
|
||||
6
test.py
6
test.py
@@ -7,9 +7,7 @@ from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from creyPY.fastapi.app import generate_unique_id
|
||||
from creyPY.fastapi.crud import (
|
||||
get_object_or_404,
|
||||
)
|
||||
from creyPY.fastapi.crud import get_object_or_404
|
||||
from creyPY.fastapi.models.base import Base
|
||||
|
||||
|
||||
@@ -65,7 +63,7 @@ class TestMyFunction(unittest.TestCase):
|
||||
def test_get_object_or_404_existing_object(self):
|
||||
# Arrange
|
||||
obj_id = UUID("123e4567-e89b-12d3-a456-426614174000")
|
||||
obj = MockDBClass(obj_id)
|
||||
obj = MockDBClass(id=obj_id)
|
||||
self.db.add(obj)
|
||||
self.db.commit()
|
||||
|
||||
|
||||
Reference in New Issue
Block a user