mirror of
https://github.com/creyD/creyPY.git
synced 2026-04-12 19:30:30 +02:00
Compare commits
116 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2d6de99585 | ||
|
|
573f59349f | ||
|
|
32bf089456 | ||
|
|
d75fede3d1 | ||
|
|
f8b781b3e7 | ||
|
|
93c7f6f6cb | ||
|
|
2e44453915 | ||
|
|
2a22471de9 | ||
| 2176b1a37d | |||
| 5daddf260e | |||
| 364e07daa1 | |||
| 5daf6eb8c5 | |||
| dfb0588d1c | |||
| 3251afdb90 | |||
| 85fe263da4 | |||
| 0be70deb00 | |||
| 0418c75e19 | |||
| 2444269486 | |||
|
|
33bdeb12a0 | ||
| 5efed5399b | |||
| 7dbce117c8 | |||
| 481bfcfffd | |||
| 90c9d2dc09 | |||
|
|
8b037fbeb5 | ||
| b86b58f3e4 | |||
|
|
17f96c920d | ||
|
|
523241ac4b | ||
|
|
6f09c2ef4c | ||
|
|
9bba5b0a4e | ||
|
|
50031556f9 | ||
|
|
2940ddbdcd | ||
| 807af12fa1 | |||
|
|
dce897c247 | ||
|
|
89997372ef | ||
| c8c5977978 | |||
| 974bc591d6 | |||
| eb895398ab | |||
| 867abd7054 | |||
| 26e18f6b31 | |||
| 8a3a60dbb0 | |||
| e52a5f421b | |||
| a6ded91185 | |||
| eb64874c47 | |||
| b7200852a4 | |||
| 3d18205205 | |||
| 99c84b676c | |||
| 6806de23b3 | |||
| 6a93ab05a3 | |||
|
|
c5b2ab9932 | ||
| 5a32a5908b | |||
| b7df0bfdcd | |||
| 378d1d60f1 | |||
| e381992f8e | |||
| 6d5411a8ae | |||
| 89351d714b | |||
| c24f8933fb | |||
| 0bed0e0da4 | |||
| 8463eef907 | |||
| 5903de2aad | |||
| 0bf89fe14d | |||
| d54146e05b | |||
| d6f79c3ed8 | |||
| 3f4a0ee00d | |||
| 714178d68f | |||
| c7e205f14b | |||
| 39ae74becb | |||
| 5f39966223 | |||
| c91e684f08 | |||
| f11b8b8864 | |||
| 983553e97a | |||
| 8740eafce2 | |||
| aa44b9ebe9 | |||
| 851573d964 | |||
| cfa1da08d3 | |||
| 4a5a777ef5 | |||
| c9a9b1bc0a | |||
| d9f6e82736 | |||
| 65e93a023b | |||
|
|
6ce0cfbd14 | ||
|
|
da7ec0b28e | ||
| 2727c452b6 | |||
| be7d7ddb22 | |||
| 3f0379290d | |||
|
|
fa7a1c8a61 | ||
| 5a7e1776db | |||
| 4c25d91e18 | |||
| f24db62781 | |||
| 4d997a375e | |||
| ee11d86235 | |||
| e47f5f2b07 | |||
| 754a951dc3 | |||
| 8eb04f4b17 | |||
| 140c6e4678 | |||
| 6fc0d01189 | |||
| 10eaa2c0e0 | |||
| b549fd941c | |||
| 2f4e3e7dba | |||
| 38d9a0c177 | |||
| f4c9b7b6b6 | |||
| 5b3389e939 | |||
| 7eabeb3e04 | |||
| 51d4e7e6b8 | |||
| 3fc6ae51c3 | |||
| 525af5b34d | |||
| 2daa8e5a22 | |||
| 4fef25f898 | |||
|
|
351642b0f1 | ||
| 80dfe98a1d | |||
| b2b726ed9a | |||
| 246eccd606 | |||
| fc13dad076 | |||
| 172f47221c | |||
| 65c8203348 | |||
| 515c3372c6 | |||
| 43055dde1b | |||
| 0aaa1dc6a1 |
108
.github/workflows/ci.yml
vendored
Normal file
108
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
name: Lint, Test, Tag & Publish
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- dev
|
||||
paths-ignore:
|
||||
- "**/.gitignore"
|
||||
- "**/.vscode/**"
|
||||
- "**/README.md"
|
||||
- "**/CHANGELOG.md"
|
||||
pull_request:
|
||||
branches:
|
||||
- dev
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: psf/black@stable
|
||||
with:
|
||||
options: "-l 100 --exclude '/.venv/|/__init__.py'"
|
||||
- uses: creyD/autoflake_action@master
|
||||
with:
|
||||
no_commit: True
|
||||
options: --in-place --remove-all-unused-imports -r --exclude **/__init__.py,**/db/models.py,
|
||||
- uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: Adjusted files for isort & autopep
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
needs: lint
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
- run: python -m pip install --upgrade pip
|
||||
- run: |
|
||||
python -m pip install -r requirements.txt
|
||||
python -m pip install -r requirements.pg.txt
|
||||
python -m pip install -r requirements.auth0.txt
|
||||
- run: python test.py
|
||||
|
||||
tag_and_publish:
|
||||
runs-on: ubuntu-latest
|
||||
if: (github.ref_name == 'master' || github.ref_name == 'dev') && github.event_name == 'push'
|
||||
needs: test
|
||||
permissions:
|
||||
id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
|
||||
contents: write # for the tags
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-tags: true
|
||||
ref: ${{ github.ref_name }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: setup git
|
||||
run: |
|
||||
git config --local user.email "15138480+creyD@users.noreply.github.com"
|
||||
git config --local user.name "creyD"
|
||||
|
||||
- name: set version format
|
||||
id: version_format
|
||||
run: |
|
||||
if [[ ${{ github.ref_name }} == 'master' ]]; then
|
||||
echo "version_format=\${major}.\${minor}.\${patch}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "version_format=\${major}.\${minor}.\${patch}rc\${increment}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Git Version
|
||||
uses: PaulHatch/semantic-version@v5.4.0
|
||||
id: git_version
|
||||
with:
|
||||
tag_prefix: ""
|
||||
major_pattern: "breaking:"
|
||||
minor_pattern: "feat:"
|
||||
enable_prerelease_mode: false
|
||||
version_format: ${{ steps.version_format.outputs.version_format }}
|
||||
|
||||
- name: Create & Push Tag
|
||||
run: |
|
||||
git tag ${{ steps.git_version.outputs.version }}
|
||||
git push origin ${{ steps.git_version.outputs.version }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.build.txt
|
||||
python setup.py sdist bdist_wheel
|
||||
|
||||
- name: Build and publish
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
46
.github/workflows/lint.yml
vendored
46
.github/workflows/lint.yml
vendored
@@ -1,46 +0,0 @@
|
||||
name: Lint and tag
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- "**/.github/**"
|
||||
- "**/.gitignore"
|
||||
- "**/.vscode/**"
|
||||
- "**/README.md"
|
||||
- "**/CHANGELOG.md"
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: psf/black@stable
|
||||
with:
|
||||
options: "-l 100 --exclude '/.venv/|/__init__.py'"
|
||||
- uses: creyD/autoflake_action@master
|
||||
with:
|
||||
no_commit: True
|
||||
options: --in-place --remove-all-unused-imports -r --exclude **/__init__.py,**/db/models.py,
|
||||
- uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: Adjusted files for isort & autopep
|
||||
|
||||
tag:
|
||||
runs-on: ubuntu-latest
|
||||
needs: lint
|
||||
steps:
|
||||
- name: Git Version
|
||||
uses: codacy/git-version@2.8.0
|
||||
id: git_version
|
||||
with:
|
||||
prefix: v
|
||||
minor-identifier: "feat:"
|
||||
major-identifier: "breaking:"
|
||||
|
||||
- name: Create Tag
|
||||
run: git tag -a v${{ steps.git_version.outputs.version }} -m "v${{ steps.git_version.outputs.version }}"
|
||||
|
||||
- name: Push Tag
|
||||
run: git push origin v${{ steps.git_version.outputs.version }}
|
||||
29
.github/workflows/publish.yml
vendored
29
.github/workflows/publish.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: Publish to pypi
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.build.txt
|
||||
|
||||
- name: Build and publish
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
8
.vscode/settings.json
vendored
8
.vscode/settings.json
vendored
@@ -26,10 +26,16 @@
|
||||
"**/db.sqlite3": true,
|
||||
"**/.DS_Store": true,
|
||||
"**/*.pyc": true,
|
||||
"**/__pycache__/": true
|
||||
"**/__pycache__/": true,
|
||||
"**/build": true,
|
||||
"**/dist": true,
|
||||
"**/*.egg-info": true,
|
||||
},
|
||||
"search.exclude": {
|
||||
"**/.git": true,
|
||||
"**/build": true,
|
||||
"**/*.egg-info": true,
|
||||
"**/dist": true,
|
||||
"**/.venv": true,
|
||||
"**/tmp": true,
|
||||
"htmlcov/*": true,
|
||||
|
||||
49
CHANGELOG.md
Normal file
49
CHANGELOG.md
Normal file
@@ -0,0 +1,49 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
## 2.0.0
|
||||
|
||||
- Fixed #1 Rename misspelled additonal_data to additional_data on create_obj_from_data
|
||||
- Fixed #3 Inverse partial flag: bool = False because it was wrong on update_obj_from_data
|
||||
|
||||
Notes:
|
||||
|
||||
You will need to change calls to `create_obj_from_data` according to #1 (rename additonal_data to additional_data)
|
||||
|
||||
You will need to change calls to `update_obj_from_data` according to #3 (if you supplied `partial`, you will need to reverse it: `true` -> `false` and `false` -> `true`)
|
||||
|
||||
## 1.3.0
|
||||
|
||||
- Addition of pagination proxy and pagination=off functionality (Thanks to @vikbhas)
|
||||
|
||||
## 1.2.5
|
||||
|
||||
- Bumped dependencies
|
||||
|
||||
## 1.2.4
|
||||
|
||||
- Enabled newer versions for all dependencies
|
||||
|
||||
## 1.2.3
|
||||
|
||||
- Added option to specify lookup_column for get_object_or_404
|
||||
|
||||
## 1.2.2
|
||||
|
||||
- Added order_by method
|
||||
|
||||
## 1.1.0
|
||||
|
||||
- Added headers to testing
|
||||
|
||||
## 1.0.0
|
||||
|
||||
- Bumped dependencies
|
||||
- Added const documentation
|
||||
- Added installation instructions and examples to README
|
||||
- Added sqlalchemy session for db connection
|
||||
|
||||
## 0.2.0
|
||||
|
||||
- Added testing client
|
||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2024 Conrad
|
||||
Copyright (c) 2024 Conrad Großer
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
54
README.md
54
README.md
@@ -1,9 +1,57 @@
|
||||
# creyPY
|
||||
|
||||
My collection of Python and FastAPI shortcuts etc.
|
||||
|
||||
## Installation
|
||||
|
||||
# Release
|
||||
```bash
|
||||
pip install creyPY -U
|
||||
```
|
||||
|
||||
``` rm -rf dist build creyPY.egg-info && python setup.py sdist bdist_wheel ```
|
||||
## Versioning
|
||||
|
||||
``` twine upload dist/* ```
|
||||
This library uses [Semantic Versioning](https://semver.org/).
|
||||
|
||||
## FastAPI
|
||||
|
||||
This library installes fastapi and pydantic, as well as sqlalchemy for you. It also provides a sqlalchemy base class and companion pydantic schemas. Also there are some helper functions for FastAPI in `creyPY.fastapi.app` like `generate_unique_id` to generate unique operation IDs for the OpenAPI schema to work with code generators.
|
||||
|
||||
### Database connection
|
||||
|
||||
The `creyPY.fastapi.db` module provides a `Session` class that can be used as a context manager to connect to a database. It exposes the `SQLALCHEMY_DATABASE_URL` variable for you to use. It uses the following environment variables:
|
||||
|
||||
- `POSTGRES_HOST`: The host of the database
|
||||
- `POSTGRES_PORT`: The port of the database
|
||||
- `POSTGRES_USER`: The user of the database
|
||||
- `POSTGRES_PASSWORD`: The password of the database
|
||||
- `POSTGRES_DB`: The database name
|
||||
|
||||
Currently only PostgreSQL is supported. It creates a sync session, it is planned to add async support in the future. You can use this like this:
|
||||
|
||||
```python
|
||||
from creyPY.fastapi.db.session import get_db
|
||||
|
||||
async def test_endpoint(
|
||||
db: Session = Depends(get_db),
|
||||
) -> Any:
|
||||
pass
|
||||
```
|
||||
|
||||
## Constants
|
||||
|
||||
The constants module contains a few enums that I use in my projects. The best way to understand this library is to look at the code (it's not that much). However for simplicity, here is a brief overview:
|
||||
|
||||
- LanguageEnum: Contains all languages according to ISO 639
|
||||
- CountryEnum: Contains all countries according to ISO 3166
|
||||
- CurrencyEnum: Contains all accepted stripe currencies (Commented out are the Zero-decimal currencies, to avoid custom implementation)
|
||||
- StripeStatus: Contains all stripe payment statuses
|
||||
- GroupMode: Contains time group modes (e.g. day, week, month, year)
|
||||
|
||||
### Usage example
|
||||
|
||||
```python
|
||||
from creyPY.const import LanguageEnum
|
||||
|
||||
print(LanguageEnum.EN) # Output: LanguageEnum.EN
|
||||
print(LanguageEnum.EN.value) # Output: English
|
||||
```
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import enum
|
||||
|
||||
|
||||
# Source: https://en.wikipedia.org/wiki/List_of_ISO_3166_country_codes
|
||||
class CountryEnum(str, enum.Enum):
|
||||
AF = "Afghanistan"
|
||||
AX = "land Islands"
|
||||
@@ -248,6 +249,7 @@ class CountryEnum(str, enum.Enum):
|
||||
ZW = "Zimbabwe"
|
||||
|
||||
|
||||
# :: https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes
|
||||
class LanguageEnum(str, enum.Enum):
|
||||
AA = "Afar"
|
||||
AB = "Abkhazian"
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from .app import * # noqa
|
||||
from .crud import * # noqa
|
||||
from .db import * # noqa
|
||||
from .models import * # noqa
|
||||
from .pagination import * # noqa
|
||||
from .schemas import * # noqa
|
||||
from .testing import * # noqa
|
||||
|
||||
@@ -1,58 +1,214 @@
|
||||
from typing import Type, TypeVar
|
||||
from typing import Type, TypeVar, overload
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import HTTPException
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
import asyncio
|
||||
from .models.base import Base
|
||||
|
||||
T = TypeVar("T", bound=Base)
|
||||
|
||||
|
||||
def get_object_or_404(db_class: Type[T], id: UUID | str, db: Session, expunge: bool = False) -> T:
|
||||
obj = db.query(db_class).filter(db_class.id == id).one_or_none()
|
||||
if obj is None:
|
||||
raise HTTPException(status_code=404, detail="The object does not exist.")
|
||||
if expunge:
|
||||
db.expunge(obj)
|
||||
return obj
|
||||
@overload
|
||||
async def get_object_or_404(
|
||||
db_class: Type[T],
|
||||
id: UUID | str,
|
||||
db: AsyncSession,
|
||||
expunge: bool = False,
|
||||
lookup_column: str = "id",
|
||||
) -> T:
|
||||
pass
|
||||
|
||||
|
||||
@overload
|
||||
def get_object_or_404(
|
||||
db_class: Type[T], id: UUID | str, db: Session, expunge: bool = False, lookup_column: str = "id"
|
||||
) -> T:
|
||||
pass
|
||||
|
||||
|
||||
def get_object_or_404(
|
||||
db_class: Type[T],
|
||||
id: UUID | str,
|
||||
db: Session | AsyncSession,
|
||||
expunge: bool = False,
|
||||
lookup_column: str = "id",
|
||||
) -> T:
|
||||
|
||||
async def _get_async_object() -> T:
|
||||
query = select(db_class).filter(getattr(db_class, lookup_column) == id)
|
||||
result = await db.execute(query)
|
||||
obj = result.scalar_one_or_none()
|
||||
if obj is None:
|
||||
raise HTTPException(status_code=404, detail="The object does not exist.") # type: ignore
|
||||
if expunge:
|
||||
await db.expunge(obj)
|
||||
return obj
|
||||
|
||||
def _get_sync_object() -> T:
|
||||
obj = db.query(db_class).filter(getattr(db_class, lookup_column) == id).one_or_none()
|
||||
if obj is None:
|
||||
raise HTTPException(status_code=404, detail="The object does not exist.") # type: ignore
|
||||
if expunge:
|
||||
db.expunge(obj)
|
||||
return obj
|
||||
|
||||
if isinstance(db, AsyncSession):
|
||||
return asyncio.ensure_future(_get_async_object()) # type: ignore
|
||||
elif isinstance(db, Session):
|
||||
return _get_sync_object()
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="Invalid session type. Expected Session or AsyncSession.") # type: ignore
|
||||
|
||||
|
||||
# TODO: Add testing
|
||||
@overload
|
||||
async def create_obj_from_data(
|
||||
data: BaseModel,
|
||||
model: Type[T],
|
||||
db: AsyncSession,
|
||||
additional_data: dict = {},
|
||||
exclude: dict = {},
|
||||
) -> T:
|
||||
pass
|
||||
|
||||
|
||||
@overload
|
||||
def create_obj_from_data(
|
||||
data: BaseModel, model: Type[T], db: Session, additional_data: dict = {}, exclude: dict = {}
|
||||
) -> T:
|
||||
pass
|
||||
|
||||
|
||||
def create_obj_from_data(
|
||||
data: BaseModel, model: Type[T], db: Session, additonal_data={}, exclude={}
|
||||
data: BaseModel, model: Type[T], db: Session | AsyncSession, additional_data={}, exclude={}
|
||||
) -> T:
|
||||
obj = model(**data.model_dump(exclude=exclude) | additonal_data)
|
||||
db.add(obj)
|
||||
db.commit()
|
||||
db.refresh(obj)
|
||||
return obj
|
||||
obj_data = data.model_dump(exclude=exclude) | additional_data
|
||||
obj = model(**obj_data)
|
||||
|
||||
async def _create_async_obj():
|
||||
db.add(obj)
|
||||
await db.commit()
|
||||
await db.refresh(obj)
|
||||
return obj
|
||||
|
||||
def _create_sync_obj():
|
||||
db.add(obj)
|
||||
db.commit()
|
||||
db.refresh(obj)
|
||||
return obj
|
||||
|
||||
if isinstance(db, AsyncSession):
|
||||
return asyncio.ensure_future(_create_async_obj()) # type: ignore
|
||||
elif isinstance(db, Session):
|
||||
return _create_sync_obj()
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="Invalid session type. Expected Session or AsyncSession.") # type: ignore
|
||||
|
||||
|
||||
# TODO: Add testing
|
||||
@overload
|
||||
async def update_obj_from_data(
|
||||
data: BaseModel,
|
||||
model: Type[T],
|
||||
id: UUID | str,
|
||||
db: AsyncSession,
|
||||
partial: bool = True,
|
||||
ignore_fields: list = [],
|
||||
additional_data: dict = {},
|
||||
exclude: dict = {},
|
||||
) -> T:
|
||||
pass
|
||||
|
||||
|
||||
@overload
|
||||
def update_obj_from_data(
|
||||
data: BaseModel,
|
||||
model: Type[T],
|
||||
id: UUID | str,
|
||||
db: Session,
|
||||
partial: bool = True,
|
||||
ignore_fields: list = [],
|
||||
additional_data: dict = {},
|
||||
exclude: dict = {},
|
||||
) -> T:
|
||||
pass
|
||||
|
||||
|
||||
def update_obj_from_data(
|
||||
data: BaseModel,
|
||||
model: Type[T],
|
||||
id: UUID | str,
|
||||
db: Session,
|
||||
partial: bool = False,
|
||||
db: Session | AsyncSession,
|
||||
partial: bool = True,
|
||||
ignore_fields=[],
|
||||
additional_data={},
|
||||
exclude={},
|
||||
) -> T:
|
||||
obj = get_object_or_404(model, id, db)
|
||||
data_dict = data.model_dump(exclude_unset=not partial, exclude=exclude)
|
||||
data_dict.update(additional_data) # merge additional_data into data_dict
|
||||
for field in data_dict:
|
||||
if field not in ignore_fields:
|
||||
setattr(obj, field, data_dict[field])
|
||||
db.commit()
|
||||
db.refresh(obj)
|
||||
return obj
|
||||
def _update_fields(obj: T):
|
||||
data_dict = data.model_dump(exclude_unset=partial, exclude=exclude)
|
||||
data_dict.update(additional_data)
|
||||
|
||||
for field in data_dict:
|
||||
if field not in ignore_fields:
|
||||
setattr(obj, field, data_dict[field])
|
||||
|
||||
async def _update_async_obj() -> T:
|
||||
obj = await get_object_or_404(model, id, db)
|
||||
_update_fields(obj)
|
||||
await db.commit()
|
||||
await db.refresh(obj)
|
||||
return obj
|
||||
|
||||
def _update_sync_obj() -> T:
|
||||
obj = get_object_or_404(model, id, db)
|
||||
_update_fields(obj)
|
||||
db.commit()
|
||||
db.refresh(obj)
|
||||
return obj
|
||||
|
||||
if isinstance(db, AsyncSession):
|
||||
return asyncio.ensure_future(_update_async_obj()) # type: ignore
|
||||
elif isinstance(db, Session):
|
||||
return _update_sync_obj()
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="Invalid session type. Expected Session or AsyncSession.") # type: ignore
|
||||
|
||||
|
||||
# TODO: Add testing
|
||||
@overload
|
||||
async def delete_object(db_class: Type[T], id: UUID | str, db: AsyncSession) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@overload
|
||||
def delete_object(db_class: Type[T], id: UUID | str, db: Session) -> None:
|
||||
obj = db.query(db_class).filter(db_class.id == id).one_or_none()
|
||||
if obj is None:
|
||||
raise HTTPException(status_code=404, detail="The object does not exist.")
|
||||
db.delete(obj)
|
||||
db.commit()
|
||||
pass
|
||||
|
||||
|
||||
def delete_object(db_class: Type[T], id: UUID | str, db: Session | AsyncSession) -> None:
|
||||
async def _delete_async_obj() -> None:
|
||||
query = select(db_class).filter(db_class.id == id)
|
||||
result = await db.execute(query)
|
||||
obj = result.scalar_one_or_none()
|
||||
if obj is None:
|
||||
raise HTTPException(status_code=404, detail="The object does not exist.")
|
||||
await db.delete(obj)
|
||||
await db.commit()
|
||||
|
||||
def _delete_sync_obj() -> None:
|
||||
obj = db.query(db_class).filter(db_class.id == id).one_or_none()
|
||||
if obj is None:
|
||||
raise HTTPException(status_code=404, detail="The object does not exist.")
|
||||
db.delete(obj)
|
||||
db.commit()
|
||||
|
||||
if isinstance(db, AsyncSession):
|
||||
return asyncio.ensure_future(_delete_async_obj()) # type: ignore
|
||||
elif isinstance(db, Session):
|
||||
return _delete_sync_obj()
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="Invalid session type. Expected Session or AsyncSession.") # type: ignore
|
||||
|
||||
3
creyPY/fastapi/db/__init__.py
Normal file
3
creyPY/fastapi/db/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .async_session import * # noqa
|
||||
from .helpers import * # noqa
|
||||
from .session import * # noqa
|
||||
23
creyPY/fastapi/db/async_session.py
Normal file
23
creyPY/fastapi/db/async_session.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from .common import SQLALCHEMY_DATABASE_URL, name
|
||||
|
||||
async_engine = create_async_engine(
|
||||
SQLALCHEMY_DATABASE_URL + name, pool_pre_ping=True, connect_args={"sslmode": "require"}
|
||||
)
|
||||
|
||||
AsyncSessionLocal = sessionmaker(
|
||||
bind=async_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
autoflush=False,
|
||||
autocommit=False,
|
||||
)
|
||||
|
||||
|
||||
async def get_async_db() -> AsyncGenerator[AsyncSession, None]:
|
||||
async with AsyncSessionLocal() as db:
|
||||
yield db
|
||||
13
creyPY/fastapi/db/common.py
Normal file
13
creyPY/fastapi/db/common.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import os
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
host = os.getenv("POSTGRES_HOST", "localhost")
|
||||
user = os.getenv("POSTGRES_USER", "postgres")
|
||||
password = os.getenv("POSTGRES_PASSWORD", "root")
|
||||
port = os.getenv("POSTGRES_PORT", "5432")
|
||||
name = os.getenv("POSTGRES_DB", "fastapi")
|
||||
|
||||
SQLALCHEMY_DATABASE_URL = f"postgresql+psycopg://{user}:{password}@{host}:{port}/"
|
||||
8
creyPY/fastapi/db/helpers.py
Normal file
8
creyPY/fastapi/db/helpers.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from sqlalchemy_utils import create_database, database_exists
|
||||
|
||||
|
||||
def create_if_not_exists(db_name: str):
|
||||
from .common import SQLALCHEMY_DATABASE_URL
|
||||
|
||||
if not database_exists(SQLALCHEMY_DATABASE_URL + db_name):
|
||||
create_database(SQLALCHEMY_DATABASE_URL + db_name)
|
||||
17
creyPY/fastapi/db/session.py
Normal file
17
creyPY/fastapi/db/session.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from typing import Generator
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from .common import SQLALCHEMY_DATABASE_URL, name
|
||||
|
||||
engine = create_engine(
|
||||
SQLALCHEMY_DATABASE_URL + name, pool_pre_ping=True, connect_args={"sslmode": "require"}
|
||||
)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
|
||||
def get_db() -> Generator[Session, None, None]:
|
||||
with SessionLocal() as db:
|
||||
yield db
|
||||
@@ -19,6 +19,9 @@ class Base:
|
||||
|
||||
__name__: str
|
||||
|
||||
# TODO: Add default representation string
|
||||
# TODO: Add automated foreign key resolution
|
||||
|
||||
# Generate __tablename__ automatically
|
||||
@declared_attr
|
||||
def __tablename__(cls) -> str:
|
||||
|
||||
25
creyPY/fastapi/order_by.py
Normal file
25
creyPY/fastapi/order_by.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from typing import Callable
|
||||
|
||||
from pydantic.json_schema import SkipJsonSchema
|
||||
from sqlalchemy import String, asc, cast, desc
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.sql.selectable import Select
|
||||
|
||||
|
||||
def order_by(order_by: str | SkipJsonSchema[None] = None) -> Callable[[Select], Select]:
|
||||
def _order_by(query: Select) -> Select:
|
||||
if order_by:
|
||||
direction = desc if order_by.startswith("-") else asc
|
||||
column_name = order_by.lstrip("-")
|
||||
|
||||
# Get the column from the query
|
||||
for column in query.inner_columns:
|
||||
if column.key == column_name:
|
||||
# If the column is a UUID, cast it to a string
|
||||
if isinstance(column.type, UUID):
|
||||
column = cast(column, String)
|
||||
query = query.order_by(direction(column))
|
||||
break
|
||||
return query
|
||||
|
||||
return _order_by
|
||||
@@ -1,14 +1,47 @@
|
||||
from math import ceil
|
||||
from typing import Any, Generic, Optional, Self, Sequence, TypeVar
|
||||
|
||||
from typing import Any, Generic, Optional, Self, Sequence, TypeVar, Union, overload
|
||||
from contextlib import suppress
|
||||
from pydantic import BaseModel
|
||||
from fastapi_pagination import Params
|
||||
from fastapi_pagination.bases import AbstractPage, AbstractParams
|
||||
from fastapi_pagination.types import GreaterEqualOne, GreaterEqualZero
|
||||
from fastapi_pagination.types import (
|
||||
GreaterEqualOne,
|
||||
GreaterEqualZero,
|
||||
AdditionalData,
|
||||
SyncItemsTransformer,
|
||||
AsyncItemsTransformer,
|
||||
ItemsTransformer,
|
||||
)
|
||||
from fastapi_pagination.api import create_page, apply_items_transformer
|
||||
from fastapi_pagination.utils import verify_params
|
||||
from fastapi_pagination.ext.sqlalchemy import create_paginate_query
|
||||
from fastapi_pagination.bases import AbstractParams, RawParams
|
||||
from pydantic.json_schema import SkipJsonSchema
|
||||
from sqlalchemy.sql.selectable import Select
|
||||
from sqlalchemy.orm.session import Session
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_scoped_session
|
||||
from fastapi import Query
|
||||
from sqlalchemy.util import await_only, greenlet_spawn
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class PaginationParams(BaseModel, AbstractParams):
|
||||
page: int = Query(1, ge=1, description="Page number")
|
||||
size: int = Query(50, ge=1, le=100, description="Page size")
|
||||
pagination: bool = Query(True, description="Toggle pagination")
|
||||
|
||||
def to_raw_params(self) -> RawParams:
|
||||
if not self.pagination:
|
||||
return RawParams(limit=None, offset=None)
|
||||
|
||||
return RawParams(limit=self.size, offset=(self.page - 1) * self.size)
|
||||
|
||||
|
||||
# TODO: Add complete fastapi-pagination proxy here
|
||||
# TODO: Add pagination off functionality
|
||||
# SkipJsonSchema is used to avoid generating invalid JSON schema in FastAPI
|
||||
class Page(AbstractPage[T], Generic[T]):
|
||||
results: Sequence[T]
|
||||
page: GreaterEqualOne | SkipJsonSchema[None] = None
|
||||
@@ -18,7 +51,7 @@ class Page(AbstractPage[T], Generic[T]):
|
||||
has_next: bool | SkipJsonSchema[None] = None
|
||||
has_prev: bool | SkipJsonSchema[None] = None
|
||||
|
||||
__params_type__ = Params
|
||||
__params_type__ = PaginationParams
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
@@ -67,3 +100,105 @@ def parse_page(response, page: int, size: int) -> Page:
|
||||
has_next=response.has_next,
|
||||
has_prev=response.has_prev,
|
||||
)
|
||||
|
||||
|
||||
def create_count_query(query: Select) -> Select:
|
||||
return select(func.count()).select_from(query.subquery())
|
||||
|
||||
|
||||
def unwrap_scalars(
|
||||
items: Sequence[Sequence[T]],
|
||||
force_unwrap: bool = True,
|
||||
) -> Union[Sequence[T], Sequence[Sequence[T]]]:
|
||||
return [item[0] if force_unwrap else item for item in items]
|
||||
|
||||
|
||||
def _get_sync_conn_from_async(conn: Any) -> Session: # pragma: no cover
|
||||
if isinstance(conn, async_scoped_session):
|
||||
conn = conn()
|
||||
|
||||
with suppress(AttributeError):
|
||||
return conn.sync_session # type: ignore
|
||||
|
||||
with suppress(AttributeError):
|
||||
return conn.sync_connection # type: ignore
|
||||
|
||||
raise TypeError("conn must be an AsyncConnection or AsyncSession")
|
||||
|
||||
|
||||
@overload
|
||||
def paginate(
|
||||
connection: Session,
|
||||
query: Select,
|
||||
params: Optional[AbstractParams] = None,
|
||||
transformer: Optional[SyncItemsTransformer] = None,
|
||||
additional_data: Optional[AdditionalData] = None,
|
||||
) -> Any:
|
||||
pass
|
||||
|
||||
|
||||
@overload
|
||||
async def paginate(
|
||||
connection: AsyncSession,
|
||||
query: Select,
|
||||
params: Optional[AbstractParams] = None,
|
||||
transformer: Optional[AsyncItemsTransformer] = None,
|
||||
additional_data: Optional[AdditionalData] = None,
|
||||
) -> Any:
|
||||
pass
|
||||
|
||||
|
||||
def _paginate(
|
||||
connection: Session,
|
||||
query: Select,
|
||||
params: Optional[AbstractParams] = None,
|
||||
transformer: Optional[ItemsTransformer] = None,
|
||||
additional_data: Optional[AdditionalData] = None,
|
||||
async_: bool = False,
|
||||
):
|
||||
|
||||
if async_:
|
||||
|
||||
def _apply_items_transformer(*args: Any, **kwargs: Any) -> Any:
|
||||
return await_only(apply_items_transformer(*args, **kwargs, async_=True))
|
||||
|
||||
else:
|
||||
_apply_items_transformer = apply_items_transformer
|
||||
|
||||
params, raw_params = verify_params(params, "limit-offset", "cursor")
|
||||
count_query = create_count_query(query)
|
||||
total = connection.scalar(count_query)
|
||||
|
||||
if params.pagination is False and total > 0:
|
||||
params = Params(page=1, size=total)
|
||||
else:
|
||||
params = Params(page=params.page, size=params.size)
|
||||
|
||||
query = create_paginate_query(query, params)
|
||||
items = connection.execute(query).all()
|
||||
|
||||
items = unwrap_scalars(items)
|
||||
t_items = _apply_items_transformer(items, transformer)
|
||||
|
||||
return create_page(
|
||||
t_items,
|
||||
params=params,
|
||||
total=total,
|
||||
**(additional_data or {}),
|
||||
)
|
||||
|
||||
|
||||
def paginate(
|
||||
connection: Session,
|
||||
query: Select,
|
||||
params: Optional[AbstractParams] = None,
|
||||
transformer: Optional[ItemsTransformer] = None,
|
||||
additional_data: Optional[AdditionalData] = None,
|
||||
):
|
||||
if isinstance(connection, AsyncSession):
|
||||
connection = _get_sync_conn_from_async(connection)
|
||||
return greenlet_spawn(
|
||||
_paginate, connection, query, params, transformer, additional_data, async_=True
|
||||
)
|
||||
|
||||
return _paginate(connection, query, params, transformer, additional_data, async_=False)
|
||||
|
||||
@@ -4,6 +4,7 @@ from uuid import UUID
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
# The created_by_id is a string because we use the sub from Auth0
|
||||
class BaseSchemaModelIN(BaseModel):
|
||||
created_by_id: str
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
138
creyPY/fastapi/testing.py
Normal file
138
creyPY/fastapi/testing.py
Normal file
@@ -0,0 +1,138 @@
|
||||
import json
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
class GenericClient(TestClient):
|
||||
def __init__(self, client):
|
||||
self.c = TestClient(client)
|
||||
self.default_headers = {}
|
||||
|
||||
def get(self, url: str, r_code: int = 200, parse_json=True):
|
||||
re = self.c.get(url, headers=self.default_headers)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if parse_json else re.content
|
||||
|
||||
def delete(self, url: str, r_code: int = 204):
|
||||
re = self.c.delete(url, headers=self.default_headers)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if r_code != 204 else None
|
||||
|
||||
def post(
|
||||
self, url: str, obj: dict | str = {}, r_code: int = 201, raw_response=False, *args, **kwargs
|
||||
):
|
||||
re = self.c.post(
|
||||
url,
|
||||
data=json.dumps(obj) if type(obj) == dict else obj,
|
||||
headers=self.default_headers | {"Content-Type": "application/json"},
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
def post_file(self, url: str, file, r_code: int = 201, raw_response=False, *args, **kwargs):
|
||||
re = self.c.post(
|
||||
url,
|
||||
files={"file": file},
|
||||
headers=self.default_headers,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
def patch(
|
||||
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
|
||||
):
|
||||
re = self.c.patch(
|
||||
url,
|
||||
data=json.dumps(obj) if type(obj) == dict else obj,
|
||||
headers=self.default_headers | {"Content-Type": "application/json"},
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
def put(
|
||||
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
|
||||
):
|
||||
re = self.c.put(
|
||||
url,
|
||||
data=json.dumps(obj) if type(obj) == dict else obj,
|
||||
headers=self.default_headers
|
||||
| {
|
||||
"Content-Type": "application/json",
|
||||
"accept": "application/json",
|
||||
},
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
def obj_lifecycle(
|
||||
self,
|
||||
input_obj: dict,
|
||||
url: str,
|
||||
pagination: bool = True,
|
||||
id_field: str = "id",
|
||||
created_at_check: bool = True,
|
||||
):
|
||||
# GET LIST
|
||||
re = self.get(url)
|
||||
if pagination:
|
||||
assert re["total"] == 0
|
||||
assert len(re["results"]) == 0
|
||||
else:
|
||||
assert len(re) == 0
|
||||
|
||||
# CREATE
|
||||
re = self.post(url, obj=input_obj)
|
||||
assert id_field in re
|
||||
assert re[id_field] is not None
|
||||
|
||||
if created_at_check:
|
||||
assert "created_at" in re
|
||||
assert re["created_at"] is not None
|
||||
|
||||
obj_id = str(re[id_field])
|
||||
|
||||
# GET
|
||||
re = self.get(f"{url}{obj_id}/")
|
||||
assert re[id_field] == obj_id
|
||||
|
||||
# GET LIST
|
||||
re = self.get(url)
|
||||
if pagination:
|
||||
assert re["total"] == 1
|
||||
assert len(re["results"]) == 1
|
||||
else:
|
||||
assert len(re) == 1
|
||||
|
||||
# DELETE
|
||||
self.delete(f"{url}{obj_id}")
|
||||
|
||||
# GET LIST
|
||||
re = self.get(url)
|
||||
if pagination:
|
||||
assert re["total"] == 0
|
||||
assert len(re["results"]) == 0
|
||||
else:
|
||||
assert len(re) == 0
|
||||
|
||||
# GET
|
||||
self.get(f"{url}{obj_id}", parse_json=False, r_code=404)
|
||||
143
creyPY/fastapi/testing_async.py
Normal file
143
creyPY/fastapi/testing_async.py
Normal file
@@ -0,0 +1,143 @@
|
||||
import json
|
||||
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
|
||||
|
||||
class AsyncGenericClient:
|
||||
def __init__(self, app, headers={}):
|
||||
self.c = AsyncClient(
|
||||
transport=ASGITransport(app=app), base_url="http://testserver", follow_redirects=True
|
||||
)
|
||||
self.default_headers = headers
|
||||
|
||||
async def get(self, url: str, r_code: int = 200, parse_json=True):
|
||||
re = await self.c.get(url, headers=self.default_headers)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if parse_json else re.content
|
||||
|
||||
async def delete(self, url: str, r_code: int = 204):
|
||||
re = await self.c.delete(url, headers=self.default_headers)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if r_code != 204 else None
|
||||
|
||||
async def post(
|
||||
self, url: str, obj: dict | str = {}, r_code: int = 201, raw_response=False, *args, **kwargs
|
||||
):
|
||||
re = await self.c.post(
|
||||
url,
|
||||
data=json.dumps(obj) if isinstance(obj, dict) else obj,
|
||||
headers=self.default_headers | {"Content-Type": "application/json"},
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
if not raw_response:
|
||||
assert r_code == re.status_code
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
async def post_file(
|
||||
self, url: str, file, r_code: int = 201, raw_response=False, *args, **kwargs
|
||||
):
|
||||
re = await self.c.post(
|
||||
url,
|
||||
files={"file": file},
|
||||
headers=self.default_headers,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
async def patch(
|
||||
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
|
||||
):
|
||||
re = await self.c.patch(
|
||||
url,
|
||||
data=json.dumps(obj) if isinstance(obj, dict) else obj,
|
||||
headers=self.default_headers | {"Content-Type": "application/json"},
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
async def put(
|
||||
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
|
||||
):
|
||||
re = await self.c.put(
|
||||
url,
|
||||
data=json.dumps(obj) if isinstance(obj, dict) else obj,
|
||||
headers=self.default_headers
|
||||
| {
|
||||
"Content-Type": "application/json",
|
||||
"accept": "application/json",
|
||||
},
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if re.status_code != r_code:
|
||||
print(re.content)
|
||||
assert r_code == re.status_code
|
||||
return re.json() if not raw_response else re
|
||||
|
||||
async def obj_lifecycle(
|
||||
self,
|
||||
input_obj: dict,
|
||||
url: str,
|
||||
pagination: bool = True,
|
||||
id_field: str = "id",
|
||||
created_at_check: bool = True,
|
||||
):
|
||||
# GET LIST
|
||||
re = await self.get(url)
|
||||
if pagination:
|
||||
assert re["total"] == 0
|
||||
assert len(re["results"]) == 0
|
||||
else:
|
||||
assert len(re) == 0
|
||||
|
||||
# CREATE
|
||||
re = await self.post(url, obj=input_obj)
|
||||
assert id_field in re
|
||||
assert re[id_field] is not None
|
||||
|
||||
if created_at_check:
|
||||
assert "created_at" in re
|
||||
assert re["created_at"] is not None
|
||||
|
||||
obj_id = str(re[id_field])
|
||||
|
||||
# GET
|
||||
re = await self.get(f"{url}{obj_id}/")
|
||||
assert re[id_field] == obj_id
|
||||
|
||||
# GET LIST
|
||||
re = await self.get(url)
|
||||
if pagination:
|
||||
assert re["total"] == 1
|
||||
assert len(re["results"]) == 1
|
||||
else:
|
||||
assert len(re) == 1
|
||||
|
||||
# DELETE
|
||||
await self.delete(f"{url}{obj_id}")
|
||||
|
||||
# GET LIST
|
||||
re = await self.get(url)
|
||||
if pagination:
|
||||
assert re["total"] == 0
|
||||
assert len(re["results"]) == 0
|
||||
else:
|
||||
assert len(re) == 0
|
||||
|
||||
# GET
|
||||
await self.get(f"{url}{obj_id}", parse_json=False, r_code=404)
|
||||
16
creyPY/helpers.py
Normal file
16
creyPY/helpers.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import secrets
|
||||
import string
|
||||
|
||||
|
||||
def create_random_password(length: int = 12) -> str:
|
||||
all_characters = string.ascii_letters + string.digits + string.punctuation
|
||||
|
||||
password = [
|
||||
secrets.choice(string.ascii_lowercase),
|
||||
secrets.choice(string.ascii_uppercase),
|
||||
secrets.choice(string.digits),
|
||||
secrets.choice(string.punctuation),
|
||||
]
|
||||
password += [secrets.choice(all_characters) for _ in range(length - 4)]
|
||||
secrets.SystemRandom().shuffle(password)
|
||||
return "".join(password)
|
||||
1
creyPY/services/__init__.py
Normal file
1
creyPY/services/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .auth0 import * # noqa
|
||||
3
creyPY/services/auth0/__init__.py
Normal file
3
creyPY/services/auth0/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .exceptions import * # noqa
|
||||
from .manage import * # noqa
|
||||
from .utils import * # noqa
|
||||
13
creyPY/services/auth0/common.py
Normal file
13
creyPY/services/auth0/common.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import os
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
AUTH0_DOMAIN = os.getenv("AUTH0_DOMAIN")
|
||||
AUTH0_CLIENT_ID = os.getenv("AUTH0_CLIENT_ID")
|
||||
AUTH0_CLIENT_SECRET = os.getenv("AUTH0_CLIENT_SECRET")
|
||||
AUTH0_ALGORIGHM = os.getenv("AUTH0_ALGORIGHM", "RS256")
|
||||
|
||||
AUTH0_AUDIENCE = os.getenv("AUTH0_AUDIENCE")
|
||||
AUTH0_ISSUER = os.getenv("AUTH0_ISSUER")
|
||||
12
creyPY/services/auth0/exceptions.py
Normal file
12
creyPY/services/auth0/exceptions.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
|
||||
class UnauthorizedException(HTTPException):
|
||||
def __init__(self, detail: str, **kwargs):
|
||||
"""Returns HTTP 403"""
|
||||
super().__init__(status.HTTP_403_FORBIDDEN, detail=detail)
|
||||
|
||||
|
||||
class UnauthenticatedException(HTTPException):
|
||||
def __init__(self):
|
||||
super().__init__(status_code=status.HTTP_401_UNAUTHORIZED, detail="Requires authentication")
|
||||
21
creyPY/services/auth0/manage.py
Normal file
21
creyPY/services/auth0/manage.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import requests
|
||||
from cachetools import TTLCache, cached
|
||||
|
||||
from .common import AUTH0_CLIENT_ID, AUTH0_CLIENT_SECRET, AUTH0_DOMAIN
|
||||
|
||||
cache = TTLCache(maxsize=100, ttl=600)
|
||||
|
||||
|
||||
@cached(cache)
|
||||
def get_management_token() -> str:
|
||||
response = requests.post(
|
||||
f"https://{AUTH0_DOMAIN}/oauth/token",
|
||||
json={
|
||||
"client_id": AUTH0_CLIENT_ID,
|
||||
"client_secret": AUTH0_CLIENT_SECRET,
|
||||
"audience": f"https://{AUTH0_DOMAIN}/api/v2/", # This should be the management audience
|
||||
"grant_type": "client_credentials",
|
||||
},
|
||||
timeout=5, # Add a timeout parameter to avoid hanging requests
|
||||
).json()
|
||||
return response["access_token"]
|
||||
136
creyPY/services/auth0/utils.py
Normal file
136
creyPY/services/auth0/utils.py
Normal file
@@ -0,0 +1,136 @@
|
||||
from typing import Optional
|
||||
|
||||
import jwt
|
||||
import requests
|
||||
from fastapi import HTTPException, Request, Security
|
||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||
|
||||
from creyPY.helpers import create_random_password
|
||||
|
||||
from .common import (
|
||||
AUTH0_ALGORIGHM,
|
||||
AUTH0_AUDIENCE,
|
||||
AUTH0_CLIENT_ID,
|
||||
AUTH0_DOMAIN,
|
||||
AUTH0_ISSUER,
|
||||
)
|
||||
from .exceptions import UnauthenticatedException, UnauthorizedException
|
||||
from .manage import get_management_token
|
||||
|
||||
JWKS_CLIENT = jwt.PyJWKClient(f"https://{AUTH0_DOMAIN}/.well-known/jwks.json")
|
||||
|
||||
|
||||
async def verify(
|
||||
request: Request,
|
||||
token: Optional[HTTPAuthorizationCredentials] = Security(HTTPBearer(auto_error=False)),
|
||||
) -> str:
|
||||
if token is None:
|
||||
raise UnauthenticatedException
|
||||
|
||||
# This gets the 'kid' from the passed token
|
||||
try:
|
||||
signing_key = JWKS_CLIENT.get_signing_key_from_jwt(token.credentials).key
|
||||
except jwt.exceptions.PyJWKClientError as error:
|
||||
raise UnauthorizedException(str(error))
|
||||
except jwt.exceptions.DecodeError as error:
|
||||
raise UnauthorizedException(str(error))
|
||||
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token.credentials,
|
||||
signing_key,
|
||||
algorithms=[AUTH0_ALGORIGHM],
|
||||
audience=AUTH0_AUDIENCE,
|
||||
issuer=AUTH0_ISSUER,
|
||||
)
|
||||
except Exception as error:
|
||||
raise UnauthorizedException(str(error))
|
||||
|
||||
return payload["sub"]
|
||||
|
||||
|
||||
### GENERIC AUTH0 CALLS ###
|
||||
def get_user(sub) -> dict:
|
||||
re = requests.get(
|
||||
f"https://{AUTH0_DOMAIN}/api/v2/users/{sub}",
|
||||
headers={"Authorization": f"Bearer {get_management_token()}"},
|
||||
timeout=5,
|
||||
)
|
||||
if re.status_code != 200:
|
||||
raise HTTPException(re.status_code, re.json())
|
||||
return re.json()
|
||||
|
||||
|
||||
def patch_user(input_obj: dict, sub) -> dict:
|
||||
re = requests.patch(
|
||||
f"https://{AUTH0_DOMAIN}/api/v2/users/{sub}",
|
||||
headers={"Authorization": f"Bearer {get_management_token()}"},
|
||||
json=input_obj,
|
||||
timeout=5,
|
||||
)
|
||||
if re.status_code != 200:
|
||||
raise HTTPException(re.status_code, re.json())
|
||||
return re.json()
|
||||
|
||||
|
||||
### USER METADATA CALLS ###
|
||||
def get_user_metadata(sub) -> dict:
|
||||
try:
|
||||
return get_user(sub).get("user_metadata", {})
|
||||
except:
|
||||
return {}
|
||||
|
||||
|
||||
def patch_user_metadata(input_obj: dict, sub) -> dict:
|
||||
return patch_user({"user_metadata": input_obj}, sub)
|
||||
|
||||
|
||||
def clear_user_metadata(sub) -> dict:
|
||||
return patch_user({"user_metadata": {}}, sub)
|
||||
|
||||
|
||||
def request_verification_mail(sub: str) -> None:
|
||||
re = requests.post(
|
||||
f"https://{AUTH0_DOMAIN}/api/v2/jobs/verification-email",
|
||||
headers={"Authorization": f"Bearer {get_management_token()}"},
|
||||
json={"user_id": sub},
|
||||
timeout=5,
|
||||
)
|
||||
if re.status_code != 201:
|
||||
raise HTTPException(re.status_code, re.json())
|
||||
return re.json()
|
||||
|
||||
|
||||
def create_user_invite(email: str) -> dict:
|
||||
re = requests.post(
|
||||
f"https://{AUTH0_DOMAIN}/api/v2/users",
|
||||
headers={"Authorization": f"Bearer {get_management_token()}"},
|
||||
json={
|
||||
"email": email,
|
||||
"connection": "Username-Password-Authentication",
|
||||
"password": create_random_password(),
|
||||
"verify_email": False,
|
||||
"app_metadata": {"invitedToMyApp": True},
|
||||
},
|
||||
timeout=5,
|
||||
)
|
||||
if re.status_code != 201:
|
||||
raise HTTPException(re.status_code, re.json())
|
||||
return re.json()
|
||||
|
||||
|
||||
def password_change_mail(email: str) -> bool:
|
||||
re = requests.post(
|
||||
f"https://{AUTH0_DOMAIN}/dbconnections/change_password",
|
||||
headers={"Authorization": f"Bearer {get_management_token()}"},
|
||||
json={
|
||||
"client_id": AUTH0_CLIENT_ID,
|
||||
"email": email,
|
||||
"connection": "Username-Password-Authentication",
|
||||
},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
if re.status_code != 200:
|
||||
raise HTTPException(re.status_code, re.json())
|
||||
return True
|
||||
7
renovate.json
Normal file
7
renovate.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": [
|
||||
"config:recommended",
|
||||
":semanticCommitTypeAll(feat)"
|
||||
]
|
||||
}
|
||||
7
requirements.auth0.txt
Normal file
7
requirements.auth0.txt
Normal file
@@ -0,0 +1,7 @@
|
||||
cachetools>=5.5.0 # for caching
|
||||
charset-normalizer>=3.4.0 # Auth0 API interactions
|
||||
requests>=2.32.3 # Auth0 API interactions
|
||||
pyjwt>=2.10.1 # Auth0 API interactions
|
||||
cffi>=1.17.1 # Auth0 API interactions
|
||||
cryptography>=43.0.3 # Auth0 API interactions
|
||||
pycparser>=2.22 # Auth0 API interactions
|
||||
@@ -1,25 +1,25 @@
|
||||
certifi==2024.2.2
|
||||
charset-normalizer==3.3.2
|
||||
docutils==0.20.1
|
||||
idna==3.6
|
||||
importlib_metadata==7.1.0
|
||||
jaraco.classes==3.4.0
|
||||
jaraco.context==4.3.0
|
||||
jaraco.functools==4.0.0
|
||||
keyring==25.0.0
|
||||
markdown-it-py==3.0.0
|
||||
mdurl==0.1.2
|
||||
more-itertools==10.2.0
|
||||
nh3==0.2.17
|
||||
pkginfo==1.10.0
|
||||
Pygments==2.17.2
|
||||
readme_renderer==43.0
|
||||
requests==2.31.0
|
||||
requests-toolbelt==1.0.0
|
||||
rfc3986==2.0.0
|
||||
rich==13.7.1
|
||||
setuptools==69.2.0
|
||||
twine==5.0.0
|
||||
urllib3==2.2.1
|
||||
wheel==0.43.0
|
||||
zipp==3.18.1
|
||||
certifi>=2024.2.2
|
||||
charset-normalizer>=3.3.2
|
||||
docutils>=0.20.1
|
||||
idna>=3.6
|
||||
importlib_metadata>=7.1.0
|
||||
jaraco.classes>=3.4.0
|
||||
jaraco.context>=4.3.0
|
||||
jaraco.functools>=4.0.0
|
||||
keyring>=25.0.0
|
||||
markdown-it-py>=3.0.0
|
||||
mdurl>=0.1.2
|
||||
more-itertools>=10.2.0
|
||||
nh3>=0.2.17
|
||||
pkginfo>=1.10.0
|
||||
Pygments>=2.17.2
|
||||
readme_renderer>=43.0
|
||||
requests>=2.31.0
|
||||
requests-toolbelt>=1.0.0
|
||||
rfc3986>=2.0.0
|
||||
rich>=13.7.1
|
||||
setuptools>=69.2.0
|
||||
twine>=5.0.0
|
||||
urllib3>=2.2.1
|
||||
wheel>=0.43.0
|
||||
zipp>=3.18.1
|
||||
|
||||
5
requirements.pg.txt
Normal file
5
requirements.pg.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
psycopg>=3.2.1 # PostgreSQL
|
||||
psycopg-binary>=3.2.1 # PostgreSQL
|
||||
psycopg-pool>=3.2.2 # PostgreSQL
|
||||
asyncpg>=0.30.0 # SQLAlchemy
|
||||
greenlet>=3.1.1 # Async
|
||||
@@ -1,12 +1,20 @@
|
||||
annotated-types==0.6.0 # Pydantic
|
||||
pydantic==2.6.4 # Pydantic
|
||||
pydantic-core==2.16.3 # Pydantic
|
||||
typing-extensions==4.10.0 # Pydantic
|
||||
annotated-types>=0.7.0 # Pydantic
|
||||
pydantic>=2.8.2 # Pydantic
|
||||
pydantic-core>=2.20.1 # Pydantic
|
||||
typing-extensions>=4.12.2 # Pydantic
|
||||
|
||||
anyio==4.3.0 # Pagination
|
||||
fastapi==0.110.0 # Pagination
|
||||
fastapi-pagination==0.12.21 # Pagination
|
||||
sniffio==1.3.1 # Pagination
|
||||
starlette==0.36.3 # Pagination
|
||||
anyio>=4.4.0 # FastAPI
|
||||
fastapi>=0.111.0 # FastAPI
|
||||
idna>=3.7 # FastAPI
|
||||
sniffio>=1.3.1 # FastAPI
|
||||
starlette>=0.37.2 # FastAPI
|
||||
|
||||
sqlalchemy==2.0.29 # SQLAlchemy
|
||||
fastapi-pagination>=0.12.26 # Pagination
|
||||
sqlalchemy>=2.0.31 # SQLAlchemy
|
||||
sqlalchemy-utils>=0.41.2 # For managing databases
|
||||
|
||||
python-dotenv>=1.0.1 # Environment variables
|
||||
|
||||
h11>=0.14.0 # Testing
|
||||
httpcore>=1.0.5 # Testing
|
||||
httpx>=0.27.0 # Testing
|
||||
|
||||
28
setup.py
28
setup.py
@@ -5,6 +5,15 @@ from setuptools import find_packages, setup
|
||||
with open("requirements.txt") as f:
|
||||
requirements = f.read().splitlines()
|
||||
|
||||
with open("requirements.build.txt") as f:
|
||||
build_requirements = f.read().splitlines()
|
||||
|
||||
with open("requirements.pg.txt") as f:
|
||||
pg_requirements = f.read().splitlines()
|
||||
|
||||
with open("requirements.auth0.txt") as f:
|
||||
auth0_requirements = f.read().splitlines()
|
||||
|
||||
|
||||
def get_latest_git_tag() -> str:
|
||||
try:
|
||||
@@ -23,7 +32,9 @@ def get_latest_git_tag() -> str:
|
||||
setup(
|
||||
name="creyPY",
|
||||
version=get_latest_git_tag(),
|
||||
description="My collection of Python and FastAPI shortcuts etc.",
|
||||
description="Collection of my Python and FastAPI shortcuts, snippets etc.",
|
||||
long_description=open("README.md").read(),
|
||||
long_description_content_type="text/markdown",
|
||||
author="Conrad Großer",
|
||||
author_email="conrad@noah.tech",
|
||||
packages=find_packages(),
|
||||
@@ -31,4 +42,19 @@ setup(
|
||||
license="MIT",
|
||||
python_requires=">=3.12",
|
||||
install_requires=requirements,
|
||||
extras_require={
|
||||
"build": build_requirements,
|
||||
"postgres": pg_requirements,
|
||||
"auth0": auth0_requirements,
|
||||
"all": build_requirements + pg_requirements + auth0_requirements,
|
||||
},
|
||||
keywords=[
|
||||
"creyPY",
|
||||
"Python",
|
||||
"FastAPI",
|
||||
"shortcuts",
|
||||
"snippets",
|
||||
"utils",
|
||||
],
|
||||
platforms="any",
|
||||
)
|
||||
|
||||
90
test.py
Normal file
90
test.py
Normal file
@@ -0,0 +1,90 @@
|
||||
import unittest
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import HTTPException
|
||||
from fastapi.routing import APIRoute
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from creyPY.fastapi.app import generate_unique_id
|
||||
from creyPY.fastapi.crud import (
|
||||
get_object_or_404,
|
||||
)
|
||||
from creyPY.fastapi.models.base import Base
|
||||
|
||||
|
||||
class MockDBClass(Base):
|
||||
def __init__(self, id):
|
||||
self.id = id
|
||||
|
||||
|
||||
class TestMyFunction(unittest.TestCase):
|
||||
def setUp(self):
|
||||
# Create a SQLite in-memory database for testing
|
||||
engine = create_engine("sqlite:///:memory:")
|
||||
|
||||
# Create a sessionmaker bound to this engine
|
||||
Session = sessionmaker(bind=engine)
|
||||
|
||||
# Now you can use Session() to get a session bound to the engine
|
||||
self.db = Session()
|
||||
|
||||
# create the table
|
||||
Base.metadata.create_all(engine)
|
||||
|
||||
def test_generate_unique_id(self):
|
||||
# Test case 1: Route with no path parameters and GET method
|
||||
route1 = APIRoute(path="/users", methods={"GET"}, endpoint=lambda: None)
|
||||
assert generate_unique_id(route1) == "users_list"
|
||||
|
||||
# Test case 2: Route with path parameters and POST method
|
||||
route2 = APIRoute(path="/users/{user_id}", methods={"POST"}, endpoint=lambda: None)
|
||||
assert generate_unique_id(route2) == "users_post"
|
||||
|
||||
# Test case 3: Route with path parameters and multiple methods
|
||||
route3 = APIRoute(path="/users/{user_id}", methods={"GET", "PUT"}, endpoint=lambda: None)
|
||||
result = generate_unique_id(route3)
|
||||
assert result == "users_get" or result == "users_put"
|
||||
|
||||
# Test case 4: Route with special characters in path
|
||||
route4 = APIRoute(
|
||||
path="/users/{user_id}/posts/{post_id}", methods={"DELETE"}, endpoint=lambda: None
|
||||
)
|
||||
assert generate_unique_id(route4) == "users_posts_delete"
|
||||
|
||||
# Test case 5: Route with multiple path parameters and PATCH method
|
||||
route5 = APIRoute(
|
||||
path="/users/{user_id}/posts/{post_id}", methods={"PATCH"}, endpoint=lambda: None
|
||||
)
|
||||
assert generate_unique_id(route5) == "users_posts_patch"
|
||||
|
||||
# Test case 6: Route with no path parameters and PUT method
|
||||
route6 = APIRoute(path="/users", methods={"PUT"}, endpoint=lambda: None)
|
||||
assert generate_unique_id(route6) == "users_put"
|
||||
|
||||
def test_get_object_or_404_existing_object(self):
|
||||
# Arrange
|
||||
obj_id = UUID("123e4567-e89b-12d3-a456-426614174000")
|
||||
obj = MockDBClass(obj_id)
|
||||
self.db.add(obj)
|
||||
self.db.commit()
|
||||
|
||||
# Act
|
||||
result = get_object_or_404(MockDBClass, obj_id, self.db)
|
||||
|
||||
# Assert
|
||||
assert result == obj
|
||||
|
||||
def test_get_object_or_404_non_existing_object(self):
|
||||
# Arrange
|
||||
obj_id = UUID("123e4567-e89b-12d3-a456-426614174000")
|
||||
|
||||
# Act & Assert
|
||||
with self.assertRaises(HTTPException) as exc_info:
|
||||
get_object_or_404(MockDBClass, obj_id, self.db)
|
||||
assert exc_info.exception.status_code == 404
|
||||
assert exc_info.exception.detail == "The object does not exist."
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user