Compare commits

...

118 Commits

Author SHA1 Message Date
83726f517c feat: added stripe service 2025-01-21 22:12:03 +01:00
abe84bcfcb Merge pull request #22 from creyD/dev
Major Version 3.0.0
2025-01-21 12:15:43 +01:00
vikynoah
2d6de99585 fix: post_file method change for testing (#29)
* fix: post_file method change for testing

* changes
2025-01-16 09:35:23 +01:00
vikynoah
573f59349f fix: changes to post method in testing_async (#28) 2025-01-08 19:37:10 +01:00
creyD
32bf089456 Adjusted files for isort & autopep 2025-01-02 22:20:49 +00:00
vikynoah
d75fede3d1 fix: Force postgresql SSL mode (#27)
* fix: Force postgresql SSL mode

* changes
2025-01-02 23:20:17 +01:00
creyD
f8b781b3e7 Adjusted files for isort & autopep 2024-12-11 16:15:33 +00:00
vikynoah
93c7f6f6cb fix: Async Testing (#26)
* fix: httpx fix as per latest version

* fix: Fix Async Testing client
2024-12-11 17:14:59 +01:00
creyD
2e44453915 Adjusted files for isort & autopep 2024-12-09 15:29:15 +00:00
vikynoah
2a22471de9 fix: httpx fix as per latest version (#25) 2024-12-09 16:28:44 +01:00
2176b1a37d fix: bumped security risks and enabled newer packages installed 2024-12-04 20:05:19 +01:00
5daddf260e fix: added timeouts to the requests to fix Bandit issue 2024-11-25 13:20:17 +01:00
364e07daa1 fix: fixed random issue (codacy) 2024-11-25 13:14:07 +01:00
5daf6eb8c5 fix: fixed missing import 2024-11-25 12:55:35 +01:00
dfb0588d1c fix: fixed pipeline 2024-11-24 18:27:45 +01:00
3251afdb90 fix: fixed pipeline 2024-11-24 18:25:59 +01:00
85fe263da4 fix: pipeline fix 2024-11-24 18:21:43 +01:00
0be70deb00 fix: fixed pipeline 2024-11-24 18:18:13 +01:00
0418c75e19 feat: added all install option for dependencies 2024-11-24 18:16:03 +01:00
2444269486 feat: added auth0 common module 2024-11-24 18:13:58 +01:00
creyD
33bdeb12a0 Adjusted files for isort & autopep 2024-11-24 17:03:03 +00:00
5efed5399b Update README.md 2024-11-24 18:02:00 +01:00
7dbce117c8 feat: added common database helper 2024-11-24 18:01:45 +01:00
481bfcfffd feat: unified configs for pg sessions 2024-11-24 17:57:53 +01:00
90c9d2dc09 breaking: default version no longer uses postgres 2024-11-24 17:57:49 +01:00
renovate[bot]
8b037fbeb5 chore: Configure Renovate (#20)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Conrad <grosserconrad@gmail.com>
2024-11-24 16:23:52 +01:00
b86b58f3e4 Merge pull request #19 from creyD/dev 2024-11-22 13:20:27 +01:00
creyD
17f96c920d Adjusted files for isort & autopep 2024-11-22 11:58:05 +00:00
vikynoah
523241ac4b feat: N-271 async db (#18) 2024-11-22 12:56:45 +01:00
creyD
6f09c2ef4c Adjusted files for isort & autopep 2024-11-15 11:39:59 +00:00
vikynoah
9bba5b0a4e fix: N 271 async db (#17) 2024-11-15 12:39:30 +01:00
creyD
50031556f9 Adjusted files for isort & autopep 2024-11-12 08:54:34 +00:00
vikynoah
2940ddbdcd feat: Introduce ASYNC DB as Plug and Play (#16)
Co-authored-by: vikbhas <waraa.vignesh@gmail.com>
2024-11-12 09:54:04 +01:00
807af12fa1 Merge pull request #13 from creyD/dev 2024-11-05 11:54:46 +01:00
creyD
dce897c247 Adjusted files for isort & autopep 2024-11-05 10:29:40 +00:00
vikynoah
89997372ef fix: Changes to accomodate pagination flag in Params (#14)
Co-authored-by: vikbhas <waraa.vignesh@gmail.com>
2024-11-05 11:29:06 +01:00
c8c5977978 fix: removed non-working backsync 2024-10-29 16:20:01 +01:00
974bc591d6 Merge pull request #11 from creyD/dev 2024-10-29 15:49:00 +01:00
eb895398ab fix: trying again to fix the pipeline 2024-10-29 15:46:52 +01:00
867abd7054 fix: fixed workflow again 2024-10-29 15:33:54 +01:00
26e18f6b31 Merge pull request #10 from creyD/dev 2024-10-29 15:32:28 +01:00
8a3a60dbb0 fix: fixed workflow again 2024-10-29 15:30:42 +01:00
e52a5f421b Merge pull request #9 from creyD/dev 2024-10-29 15:23:41 +01:00
a6ded91185 fix: syncing back tags to dev 2024-10-29 15:18:57 +01:00
eb64874c47 fix: minor adjustment to the pipeline 2024-10-29 15:13:36 +01:00
b7200852a4 Merge branch 'dev' of https://github.com/creyD/creyPY into dev 2024-10-29 15:13:30 +01:00
3d18205205 fix: fixed github pipeline 2024-10-29 15:12:41 +01:00
99c84b676c Merge pull request #8 from creyD/dev 2024-10-29 15:09:34 +01:00
6806de23b3 fix: fixed github pipeline 2024-10-29 15:06:31 +01:00
6a93ab05a3 Merge pull request #6 from creyD/dev 2024-10-29 14:56:14 +01:00
vikynoah
c5b2ab9932 fix: Add condition for total greater than zero (#7) 2024-10-28 15:37:14 +01:00
5a32a5908b Removed debug statement 2024-10-25 15:34:16 +02:00
b7df0bfdcd fix: added escape for variable names 2024-10-25 15:27:50 +02:00
378d1d60f1 fix: adjusting pipeline for prod as well 2024-10-25 15:22:35 +02:00
e381992f8e fix: fixing dev versioning 2024-10-25 15:09:15 +02:00
6d5411a8ae fix: debugging pipeline 2024-10-25 14:46:35 +02:00
89351d714b fix: debugging pipeline 2024-10-25 14:44:18 +02:00
c24f8933fb fix: attempt on fixing the versioning issue 2024-10-25 14:38:58 +02:00
0bed0e0da4 fix: attempt on fixing the versioning issue 2024-10-25 14:34:56 +02:00
8463eef907 fix: attempt on fixing the versioning issue 2024-10-25 14:25:48 +02:00
5903de2aad fix: fixed semantic versioning format selector 2024-10-25 14:19:11 +02:00
0bf89fe14d fix: switched to semantic versioning action 2024-10-25 14:12:04 +02:00
d54146e05b fix: fixed naming of pre-release commits 2024-10-24 12:41:04 +02:00
d6f79c3ed8 fix: fixed naming of pre-release commits 2024-10-24 12:35:13 +02:00
3f4a0ee00d fix: fixed naming of pre-release commits 2024-10-24 12:25:30 +02:00
714178d68f fix: fixed naming of pre-release commits 2024-10-24 12:22:45 +02:00
c7e205f14b fix: fixed naming of pre-release commits 2024-10-24 12:18:50 +02:00
39ae74becb fix: minor changelog adjustment 2024-10-24 12:15:23 +02:00
5f39966223 fix: Fixed tag pushing and changelog 2024-10-24 12:10:16 +02:00
c91e684f08 fix: fix attempt for the github pipeline 2024-10-24 12:10:16 +02:00
f11b8b8864 fix: alternative attempt on the fix 2024-10-24 12:10:16 +02:00
983553e97a fix: locked tag and publish to master and dev 2024-10-24 12:10:16 +02:00
8740eafce2 fix: fixed pipeline tagging 2024-10-24 12:10:16 +02:00
aa44b9ebe9 fix: fixed pipeline tagging 2024-10-24 12:10:16 +02:00
851573d964 fix: fixed pipeline tagging 2024-10-24 12:10:16 +02:00
cfa1da08d3 fix: pipeline now pushes pre-release versions 2024-10-24 12:10:16 +02:00
4a5a777ef5 breaking: Fixed #3 2024-10-24 12:10:16 +02:00
c9a9b1bc0a breaking: Fixed #1 2024-10-24 12:10:16 +02:00
d9f6e82736 Merge pull request #5 from creyD/dev
Co-authored-by: vikbhas <waraa.vignesh@gmail.com>
Co-authored-by: vikynoah <vigneshwaraa.sarangapani@noah.tech>
Co-authored-by: creyD <creyD@users.noreply.github.com>
2024-10-24 11:04:12 +02:00
65e93a023b fix: minor vscode adjustments 2024-10-24 09:32:11 +02:00
creyD
6ce0cfbd14 Adjusted files for isort & autopep 2024-10-24 07:27:26 +00:00
vikynoah
da7ec0b28e Feat: Addition of pagination proxy and Flag functionality (#4)
Co-authored-by: vikbhas <waraa.vignesh@gmail.com>
2024-10-24 09:26:57 +02:00
2727c452b6 fix: adjusted pipeline to dev branch and pull requests 2024-10-24 09:25:39 +02:00
be7d7ddb22 fix: bumped dependencies 2024-07-14 18:24:07 +02:00
3f0379290d fix: Dependencies can now be installed with newer versions 2024-05-14 14:40:12 +02:00
creyD
fa7a1c8a61 Adjusted files for isort & autopep 2024-05-13 09:22:06 +00:00
5a7e1776db fix: Added option to specify lookup_column for get_object_or_404 2024-05-13 11:21:25 +02:00
4c25d91e18 fix: fixed another minor bug with the order_by method 2024-04-25 19:43:12 +02:00
f24db62781 fix: fixed a bug with the jsonschema for order_by 2024-04-25 18:44:58 +02:00
4d997a375e feat: added order_by method 2024-04-25 18:19:26 +02:00
ee11d86235 feat: Added headers to testing 2024-04-02 13:18:41 +02:00
e47f5f2b07 breaking: Release of 1.0.0 2024-04-02 12:19:08 +02:00
754a951dc3 Fixed missing dependencies 2024-04-02 12:14:47 +02:00
8eb04f4b17 beaking: Version 1 release 2024-04-02 12:09:06 +02:00
140c6e4678 Minor bugfix 2024-04-02 11:40:26 +02:00
6fc0d01189 feat: Added generic testing client 2024-04-02 11:37:51 +02:00
10eaa2c0e0 Fixed workflow 2024-04-01 20:57:39 +02:00
b549fd941c Added content type for long description 2024-04-01 20:53:02 +02:00
2f4e3e7dba Added long_description 2024-04-01 20:46:27 +02:00
38d9a0c177 Added additional fetching for git checkout 2024-04-01 20:43:11 +02:00
f4c9b7b6b6 Update ci.yml 2024-04-01 20:38:16 +02:00
5b3389e939 Added manual triggering of the action 2024-04-01 20:33:34 +02:00
7eabeb3e04 Moved to tagging without prefix 2024-04-01 20:31:45 +02:00
51d4e7e6b8 Reworked CI 2024-04-01 20:28:16 +02:00
3fc6ae51c3 Added todos 2024-04-01 20:19:43 +02:00
525af5b34d Merged workflows 2024-04-01 20:19:02 +02:00
2daa8e5a22 feat: add todo 2024-04-01 20:11:51 +02:00
4fef25f898 feat: smaller bugfix for pipeline 2024-04-01 20:07:54 +02:00
creyD
351642b0f1 Adjusted files for isort & autopep 2024-04-01 18:05:56 +00:00
80dfe98a1d feat: added todos to trigger pipeline 2024-04-01 20:05:13 +02:00
b2b726ed9a Added testing to CI 2024-04-01 20:04:19 +02:00
246eccd606 feat: Added testing for CRUD 2024-04-01 20:01:15 +02:00
fc13dad076 feat: Added tests 2024-04-01 19:44:13 +02:00
172f47221c Fixed issue in pipeline 2024-04-01 19:29:58 +02:00
65c8203348 Fixed issue in pipeline 2024-04-01 19:26:14 +02:00
515c3372c6 Minor Changes 2024-04-01 19:22:08 +02:00
43055dde1b Triggering workflow 2024-04-01 19:19:35 +02:00
0aaa1dc6a1 Fixed workflow 2024-04-01 19:18:43 +02:00
39 changed files with 1343 additions and 151 deletions

108
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,108 @@
name: Lint, Test, Tag & Publish
on:
push:
branches:
- master
- dev
paths-ignore:
- "**/.gitignore"
- "**/.vscode/**"
- "**/README.md"
- "**/CHANGELOG.md"
pull_request:
branches:
- dev
workflow_dispatch:
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: psf/black@stable
with:
options: "-l 100 --exclude '/.venv/|/__init__.py'"
- uses: creyD/autoflake_action@master
with:
no_commit: True
options: --in-place --remove-all-unused-imports -r --exclude **/__init__.py,**/db/models.py,
- uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: Adjusted files for isort & autopep
test:
runs-on: ubuntu-latest
needs: lint
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- run: python -m pip install --upgrade pip
- run: |
python -m pip install -r requirements.txt
python -m pip install -r requirements.pg.txt
python -m pip install -r requirements.auth0.txt
- run: python test.py
tag_and_publish:
runs-on: ubuntu-latest
if: (github.ref_name == 'master' || github.ref_name == 'dev') && github.event_name == 'push'
needs: test
permissions:
id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
contents: write # for the tags
steps:
- uses: actions/checkout@v4
with:
fetch-tags: true
ref: ${{ github.ref_name }}
fetch-depth: 0
- name: setup git
run: |
git config --local user.email "15138480+creyD@users.noreply.github.com"
git config --local user.name "creyD"
- name: set version format
id: version_format
run: |
if [[ ${{ github.ref_name }} == 'master' ]]; then
echo "version_format=\${major}.\${minor}.\${patch}" >> $GITHUB_OUTPUT
else
echo "version_format=\${major}.\${minor}.\${patch}rc\${increment}" >> $GITHUB_OUTPUT
fi
- name: Git Version
uses: PaulHatch/semantic-version@v5.4.0
id: git_version
with:
tag_prefix: ""
major_pattern: "breaking:"
minor_pattern: "feat:"
enable_prerelease_mode: false
version_format: ${{ steps.version_format.outputs.version_format }}
- name: Create & Push Tag
run: |
git tag ${{ steps.git_version.outputs.version }}
git push origin ${{ steps.git_version.outputs.version }}
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.build.txt
python setup.py sdist bdist_wheel
- name: Build and publish
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}

View File

@@ -1,46 +0,0 @@
name: Lint and tag
on:
push:
branches:
- master
paths-ignore:
- "**/.github/**"
- "**/.gitignore"
- "**/.vscode/**"
- "**/README.md"
- "**/CHANGELOG.md"
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: psf/black@stable
with:
options: "-l 100 --exclude '/.venv/|/__init__.py'"
- uses: creyD/autoflake_action@master
with:
no_commit: True
options: --in-place --remove-all-unused-imports -r --exclude **/__init__.py,**/db/models.py,
- uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: Adjusted files for isort & autopep
tag:
runs-on: ubuntu-latest
needs: lint
steps:
- name: Git Version
uses: codacy/git-version@2.8.0
id: git_version
with:
prefix: v
minor-identifier: "feat:"
major-identifier: "breaking:"
- name: Create Tag
run: git tag -a v${{ steps.git_version.outputs.version }} -m "v${{ steps.git_version.outputs.version }}"
- name: Push Tag
run: git push origin v${{ steps.git_version.outputs.version }}

View File

@@ -1,29 +0,0 @@
name: Publish to pypi
on:
push:
tags:
- '*'
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.build.txt
- name: Build and publish
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}

View File

@@ -26,10 +26,16 @@
"**/db.sqlite3": true,
"**/.DS_Store": true,
"**/*.pyc": true,
"**/__pycache__/": true
"**/__pycache__/": true,
"**/build": true,
"**/dist": true,
"**/*.egg-info": true,
},
"search.exclude": {
"**/.git": true,
"**/build": true,
"**/*.egg-info": true,
"**/dist": true,
"**/.venv": true,
"**/tmp": true,
"htmlcov/*": true,

49
CHANGELOG.md Normal file
View File

@@ -0,0 +1,49 @@
# Changelog
All notable changes to this project will be documented in this file.
## 2.0.0
- Fixed #1 Rename misspelled additonal_data to additional_data on create_obj_from_data
- Fixed #3 Inverse partial flag: bool = False because it was wrong on update_obj_from_data
Notes:
You will need to change calls to `create_obj_from_data` according to #1 (rename additonal_data to additional_data)
You will need to change calls to `update_obj_from_data` according to #3 (if you supplied `partial`, you will need to reverse it: `true` -> `false` and `false` -> `true`)
## 1.3.0
- Addition of pagination proxy and pagination=off functionality (Thanks to @vikbhas)
## 1.2.5
- Bumped dependencies
## 1.2.4
- Enabled newer versions for all dependencies
## 1.2.3
- Added option to specify lookup_column for get_object_or_404
## 1.2.2
- Added order_by method
## 1.1.0
- Added headers to testing
## 1.0.0
- Bumped dependencies
- Added const documentation
- Added installation instructions and examples to README
- Added sqlalchemy session for db connection
## 0.2.0
- Added testing client

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2024 Conrad
Copyright (c) 2024 Conrad Großer
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -1,9 +1,57 @@
# creyPY
My collection of Python and FastAPI shortcuts etc.
## Installation
# Release
```bash
pip install creyPY -U
```
``` rm -rf dist build creyPY.egg-info && python setup.py sdist bdist_wheel ```
## Versioning
``` twine upload dist/* ```
This library uses [Semantic Versioning](https://semver.org/).
## FastAPI
This library installes fastapi and pydantic, as well as sqlalchemy for you. It also provides a sqlalchemy base class and companion pydantic schemas. Also there are some helper functions for FastAPI in `creyPY.fastapi.app` like `generate_unique_id` to generate unique operation IDs for the OpenAPI schema to work with code generators.
### Database connection
The `creyPY.fastapi.db` module provides a `Session` class that can be used as a context manager to connect to a database. It exposes the `SQLALCHEMY_DATABASE_URL` variable for you to use. It uses the following environment variables:
- `POSTGRES_HOST`: The host of the database
- `POSTGRES_PORT`: The port of the database
- `POSTGRES_USER`: The user of the database
- `POSTGRES_PASSWORD`: The password of the database
- `POSTGRES_DB`: The database name
Currently only PostgreSQL is supported. It creates a sync session, it is planned to add async support in the future. You can use this like this:
```python
from creyPY.fastapi.db.session import get_db
async def test_endpoint(
db: Session = Depends(get_db),
) -> Any:
pass
```
## Constants
The constants module contains a few enums that I use in my projects. The best way to understand this library is to look at the code (it's not that much). However for simplicity, here is a brief overview:
- LanguageEnum: Contains all languages according to ISO 639
- CountryEnum: Contains all countries according to ISO 3166
- CurrencyEnum: Contains all accepted stripe currencies (Commented out are the Zero-decimal currencies, to avoid custom implementation)
- StripeStatus: Contains all stripe payment statuses
- GroupMode: Contains time group modes (e.g. day, week, month, year)
### Usage example
```python
from creyPY.const import LanguageEnum
print(LanguageEnum.EN) # Output: LanguageEnum.EN
print(LanguageEnum.EN.value) # Output: English
```

View File

@@ -1,6 +1,7 @@
import enum
# Source: https://en.wikipedia.org/wiki/List_of_ISO_3166_country_codes
class CountryEnum(str, enum.Enum):
AF = "Afghanistan"
AX = "land Islands"
@@ -248,6 +249,7 @@ class CountryEnum(str, enum.Enum):
ZW = "Zimbabwe"
# :: https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes
class LanguageEnum(str, enum.Enum):
AA = "Afar"
AB = "Abkhazian"

View File

@@ -1,5 +1,7 @@
from .app import * # noqa
from .crud import * # noqa
from .db import * # noqa
from .models import * # noqa
from .pagination import * # noqa
from .schemas import * # noqa
from .testing import * # noqa

View File

@@ -1,58 +1,214 @@
from typing import Type, TypeVar
from typing import Type, TypeVar, overload
from uuid import UUID
from fastapi import HTTPException
from pydantic import BaseModel
from sqlalchemy.orm import Session
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
import asyncio
from .models.base import Base
T = TypeVar("T", bound=Base)
def get_object_or_404(db_class: Type[T], id: UUID | str, db: Session, expunge: bool = False) -> T:
obj = db.query(db_class).filter(db_class.id == id).one_or_none()
if obj is None:
raise HTTPException(status_code=404, detail="The object does not exist.")
if expunge:
db.expunge(obj)
return obj
@overload
async def get_object_or_404(
db_class: Type[T],
id: UUID | str,
db: AsyncSession,
expunge: bool = False,
lookup_column: str = "id",
) -> T:
pass
@overload
def get_object_or_404(
db_class: Type[T], id: UUID | str, db: Session, expunge: bool = False, lookup_column: str = "id"
) -> T:
pass
def get_object_or_404(
db_class: Type[T],
id: UUID | str,
db: Session | AsyncSession,
expunge: bool = False,
lookup_column: str = "id",
) -> T:
async def _get_async_object() -> T:
query = select(db_class).filter(getattr(db_class, lookup_column) == id)
result = await db.execute(query)
obj = result.scalar_one_or_none()
if obj is None:
raise HTTPException(status_code=404, detail="The object does not exist.") # type: ignore
if expunge:
await db.expunge(obj)
return obj
def _get_sync_object() -> T:
obj = db.query(db_class).filter(getattr(db_class, lookup_column) == id).one_or_none()
if obj is None:
raise HTTPException(status_code=404, detail="The object does not exist.") # type: ignore
if expunge:
db.expunge(obj)
return obj
if isinstance(db, AsyncSession):
return asyncio.ensure_future(_get_async_object()) # type: ignore
elif isinstance(db, Session):
return _get_sync_object()
else:
raise HTTPException(status_code=404, detail="Invalid session type. Expected Session or AsyncSession.") # type: ignore
# TODO: Add testing
@overload
async def create_obj_from_data(
data: BaseModel,
model: Type[T],
db: AsyncSession,
additional_data: dict = {},
exclude: dict = {},
) -> T:
pass
@overload
def create_obj_from_data(
data: BaseModel, model: Type[T], db: Session, additional_data: dict = {}, exclude: dict = {}
) -> T:
pass
def create_obj_from_data(
data: BaseModel, model: Type[T], db: Session, additonal_data={}, exclude={}
data: BaseModel, model: Type[T], db: Session | AsyncSession, additional_data={}, exclude={}
) -> T:
obj = model(**data.model_dump(exclude=exclude) | additonal_data)
db.add(obj)
db.commit()
db.refresh(obj)
return obj
obj_data = data.model_dump(exclude=exclude) | additional_data
obj = model(**obj_data)
async def _create_async_obj():
db.add(obj)
await db.commit()
await db.refresh(obj)
return obj
def _create_sync_obj():
db.add(obj)
db.commit()
db.refresh(obj)
return obj
if isinstance(db, AsyncSession):
return asyncio.ensure_future(_create_async_obj()) # type: ignore
elif isinstance(db, Session):
return _create_sync_obj()
else:
raise HTTPException(status_code=404, detail="Invalid session type. Expected Session or AsyncSession.") # type: ignore
# TODO: Add testing
@overload
async def update_obj_from_data(
data: BaseModel,
model: Type[T],
id: UUID | str,
db: AsyncSession,
partial: bool = True,
ignore_fields: list = [],
additional_data: dict = {},
exclude: dict = {},
) -> T:
pass
@overload
def update_obj_from_data(
data: BaseModel,
model: Type[T],
id: UUID | str,
db: Session,
partial: bool = True,
ignore_fields: list = [],
additional_data: dict = {},
exclude: dict = {},
) -> T:
pass
def update_obj_from_data(
data: BaseModel,
model: Type[T],
id: UUID | str,
db: Session,
partial: bool = False,
db: Session | AsyncSession,
partial: bool = True,
ignore_fields=[],
additional_data={},
exclude={},
) -> T:
obj = get_object_or_404(model, id, db)
data_dict = data.model_dump(exclude_unset=not partial, exclude=exclude)
data_dict.update(additional_data) # merge additional_data into data_dict
for field in data_dict:
if field not in ignore_fields:
setattr(obj, field, data_dict[field])
db.commit()
db.refresh(obj)
return obj
def _update_fields(obj: T):
data_dict = data.model_dump(exclude_unset=partial, exclude=exclude)
data_dict.update(additional_data)
for field in data_dict:
if field not in ignore_fields:
setattr(obj, field, data_dict[field])
async def _update_async_obj() -> T:
obj = await get_object_or_404(model, id, db)
_update_fields(obj)
await db.commit()
await db.refresh(obj)
return obj
def _update_sync_obj() -> T:
obj = get_object_or_404(model, id, db)
_update_fields(obj)
db.commit()
db.refresh(obj)
return obj
if isinstance(db, AsyncSession):
return asyncio.ensure_future(_update_async_obj()) # type: ignore
elif isinstance(db, Session):
return _update_sync_obj()
else:
raise HTTPException(status_code=404, detail="Invalid session type. Expected Session or AsyncSession.") # type: ignore
# TODO: Add testing
@overload
async def delete_object(db_class: Type[T], id: UUID | str, db: AsyncSession) -> None:
pass
@overload
def delete_object(db_class: Type[T], id: UUID | str, db: Session) -> None:
obj = db.query(db_class).filter(db_class.id == id).one_or_none()
if obj is None:
raise HTTPException(status_code=404, detail="The object does not exist.")
db.delete(obj)
db.commit()
pass
def delete_object(db_class: Type[T], id: UUID | str, db: Session | AsyncSession) -> None:
async def _delete_async_obj() -> None:
query = select(db_class).filter(db_class.id == id)
result = await db.execute(query)
obj = result.scalar_one_or_none()
if obj is None:
raise HTTPException(status_code=404, detail="The object does not exist.")
await db.delete(obj)
await db.commit()
def _delete_sync_obj() -> None:
obj = db.query(db_class).filter(db_class.id == id).one_or_none()
if obj is None:
raise HTTPException(status_code=404, detail="The object does not exist.")
db.delete(obj)
db.commit()
if isinstance(db, AsyncSession):
return asyncio.ensure_future(_delete_async_obj()) # type: ignore
elif isinstance(db, Session):
return _delete_sync_obj()
else:
raise HTTPException(status_code=404, detail="Invalid session type. Expected Session or AsyncSession.") # type: ignore

View File

@@ -0,0 +1,3 @@
from .async_session import * # noqa
from .helpers import * # noqa
from .session import * # noqa

View File

@@ -0,0 +1,23 @@
from typing import AsyncGenerator
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
from .common import SQLALCHEMY_DATABASE_URL, name
async_engine = create_async_engine(
SQLALCHEMY_DATABASE_URL + name, pool_pre_ping=True, connect_args={"sslmode": "require"}
)
AsyncSessionLocal = sessionmaker(
bind=async_engine,
class_=AsyncSession,
expire_on_commit=False,
autoflush=False,
autocommit=False,
)
async def get_async_db() -> AsyncGenerator[AsyncSession, None]:
async with AsyncSessionLocal() as db:
yield db

View File

@@ -0,0 +1,13 @@
import os
from dotenv import load_dotenv
load_dotenv()
host = os.getenv("POSTGRES_HOST", "localhost")
user = os.getenv("POSTGRES_USER", "postgres")
password = os.getenv("POSTGRES_PASSWORD", "root")
port = os.getenv("POSTGRES_PORT", "5432")
name = os.getenv("POSTGRES_DB", "fastapi")
SQLALCHEMY_DATABASE_URL = f"postgresql+psycopg://{user}:{password}@{host}:{port}/"

View File

@@ -0,0 +1,8 @@
from sqlalchemy_utils import create_database, database_exists
def create_if_not_exists(db_name: str):
from .common import SQLALCHEMY_DATABASE_URL
if not database_exists(SQLALCHEMY_DATABASE_URL + db_name):
create_database(SQLALCHEMY_DATABASE_URL + db_name)

View File

@@ -0,0 +1,17 @@
from typing import Generator
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm.session import Session
from .common import SQLALCHEMY_DATABASE_URL, name
engine = create_engine(
SQLALCHEMY_DATABASE_URL + name, pool_pre_ping=True, connect_args={"sslmode": "require"}
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
def get_db() -> Generator[Session, None, None]:
with SessionLocal() as db:
yield db

View File

@@ -19,6 +19,9 @@ class Base:
__name__: str
# TODO: Add default representation string
# TODO: Add automated foreign key resolution
# Generate __tablename__ automatically
@declared_attr
def __tablename__(cls) -> str:

View File

@@ -0,0 +1,25 @@
from typing import Callable
from pydantic.json_schema import SkipJsonSchema
from sqlalchemy import String, asc, cast, desc
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.sql.selectable import Select
def order_by(order_by: str | SkipJsonSchema[None] = None) -> Callable[[Select], Select]:
def _order_by(query: Select) -> Select:
if order_by:
direction = desc if order_by.startswith("-") else asc
column_name = order_by.lstrip("-")
# Get the column from the query
for column in query.inner_columns:
if column.key == column_name:
# If the column is a UUID, cast it to a string
if isinstance(column.type, UUID):
column = cast(column, String)
query = query.order_by(direction(column))
break
return query
return _order_by

View File

@@ -1,14 +1,47 @@
from math import ceil
from typing import Any, Generic, Optional, Self, Sequence, TypeVar
from typing import Any, Generic, Optional, Self, Sequence, TypeVar, Union, overload
from contextlib import suppress
from pydantic import BaseModel
from fastapi_pagination import Params
from fastapi_pagination.bases import AbstractPage, AbstractParams
from fastapi_pagination.types import GreaterEqualOne, GreaterEqualZero
from fastapi_pagination.types import (
GreaterEqualOne,
GreaterEqualZero,
AdditionalData,
SyncItemsTransformer,
AsyncItemsTransformer,
ItemsTransformer,
)
from fastapi_pagination.api import create_page, apply_items_transformer
from fastapi_pagination.utils import verify_params
from fastapi_pagination.ext.sqlalchemy import create_paginate_query
from fastapi_pagination.bases import AbstractParams, RawParams
from pydantic.json_schema import SkipJsonSchema
from sqlalchemy.sql.selectable import Select
from sqlalchemy.orm.session import Session
from sqlalchemy import select, func
from sqlalchemy.ext.asyncio import AsyncSession, async_scoped_session
from fastapi import Query
from sqlalchemy.util import await_only, greenlet_spawn
T = TypeVar("T")
class PaginationParams(BaseModel, AbstractParams):
page: int = Query(1, ge=1, description="Page number")
size: int = Query(50, ge=1, le=100, description="Page size")
pagination: bool = Query(True, description="Toggle pagination")
def to_raw_params(self) -> RawParams:
if not self.pagination:
return RawParams(limit=None, offset=None)
return RawParams(limit=self.size, offset=(self.page - 1) * self.size)
# TODO: Add complete fastapi-pagination proxy here
# TODO: Add pagination off functionality
# SkipJsonSchema is used to avoid generating invalid JSON schema in FastAPI
class Page(AbstractPage[T], Generic[T]):
results: Sequence[T]
page: GreaterEqualOne | SkipJsonSchema[None] = None
@@ -18,7 +51,7 @@ class Page(AbstractPage[T], Generic[T]):
has_next: bool | SkipJsonSchema[None] = None
has_prev: bool | SkipJsonSchema[None] = None
__params_type__ = Params
__params_type__ = PaginationParams
@classmethod
def create(
@@ -67,3 +100,105 @@ def parse_page(response, page: int, size: int) -> Page:
has_next=response.has_next,
has_prev=response.has_prev,
)
def create_count_query(query: Select) -> Select:
return select(func.count()).select_from(query.subquery())
def unwrap_scalars(
items: Sequence[Sequence[T]],
force_unwrap: bool = True,
) -> Union[Sequence[T], Sequence[Sequence[T]]]:
return [item[0] if force_unwrap else item for item in items]
def _get_sync_conn_from_async(conn: Any) -> Session: # pragma: no cover
if isinstance(conn, async_scoped_session):
conn = conn()
with suppress(AttributeError):
return conn.sync_session # type: ignore
with suppress(AttributeError):
return conn.sync_connection # type: ignore
raise TypeError("conn must be an AsyncConnection or AsyncSession")
@overload
def paginate(
connection: Session,
query: Select,
params: Optional[AbstractParams] = None,
transformer: Optional[SyncItemsTransformer] = None,
additional_data: Optional[AdditionalData] = None,
) -> Any:
pass
@overload
async def paginate(
connection: AsyncSession,
query: Select,
params: Optional[AbstractParams] = None,
transformer: Optional[AsyncItemsTransformer] = None,
additional_data: Optional[AdditionalData] = None,
) -> Any:
pass
def _paginate(
connection: Session,
query: Select,
params: Optional[AbstractParams] = None,
transformer: Optional[ItemsTransformer] = None,
additional_data: Optional[AdditionalData] = None,
async_: bool = False,
):
if async_:
def _apply_items_transformer(*args: Any, **kwargs: Any) -> Any:
return await_only(apply_items_transformer(*args, **kwargs, async_=True))
else:
_apply_items_transformer = apply_items_transformer
params, raw_params = verify_params(params, "limit-offset", "cursor")
count_query = create_count_query(query)
total = connection.scalar(count_query)
if params.pagination is False and total > 0:
params = Params(page=1, size=total)
else:
params = Params(page=params.page, size=params.size)
query = create_paginate_query(query, params)
items = connection.execute(query).all()
items = unwrap_scalars(items)
t_items = _apply_items_transformer(items, transformer)
return create_page(
t_items,
params=params,
total=total,
**(additional_data or {}),
)
def paginate(
connection: Session,
query: Select,
params: Optional[AbstractParams] = None,
transformer: Optional[ItemsTransformer] = None,
additional_data: Optional[AdditionalData] = None,
):
if isinstance(connection, AsyncSession):
connection = _get_sync_conn_from_async(connection)
return greenlet_spawn(
_paginate, connection, query, params, transformer, additional_data, async_=True
)
return _paginate(connection, query, params, transformer, additional_data, async_=False)

View File

@@ -4,6 +4,7 @@ from uuid import UUID
from pydantic import BaseModel, ConfigDict
# The created_by_id is a string because we use the sub from Auth0
class BaseSchemaModelIN(BaseModel):
created_by_id: str
model_config = ConfigDict(from_attributes=True)

138
creyPY/fastapi/testing.py Normal file
View File

@@ -0,0 +1,138 @@
import json
from fastapi.testclient import TestClient
class GenericClient(TestClient):
def __init__(self, client):
self.c = TestClient(client)
self.default_headers = {}
def get(self, url: str, r_code: int = 200, parse_json=True):
re = self.c.get(url, headers=self.default_headers)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if parse_json else re.content
def delete(self, url: str, r_code: int = 204):
re = self.c.delete(url, headers=self.default_headers)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if r_code != 204 else None
def post(
self, url: str, obj: dict | str = {}, r_code: int = 201, raw_response=False, *args, **kwargs
):
re = self.c.post(
url,
data=json.dumps(obj) if type(obj) == dict else obj,
headers=self.default_headers | {"Content-Type": "application/json"},
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if not raw_response else re
def post_file(self, url: str, file, r_code: int = 201, raw_response=False, *args, **kwargs):
re = self.c.post(
url,
files={"file": file},
headers=self.default_headers,
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if not raw_response else re
def patch(
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
):
re = self.c.patch(
url,
data=json.dumps(obj) if type(obj) == dict else obj,
headers=self.default_headers | {"Content-Type": "application/json"},
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if not raw_response else re
def put(
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
):
re = self.c.put(
url,
data=json.dumps(obj) if type(obj) == dict else obj,
headers=self.default_headers
| {
"Content-Type": "application/json",
"accept": "application/json",
},
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if not raw_response else re
def obj_lifecycle(
self,
input_obj: dict,
url: str,
pagination: bool = True,
id_field: str = "id",
created_at_check: bool = True,
):
# GET LIST
re = self.get(url)
if pagination:
assert re["total"] == 0
assert len(re["results"]) == 0
else:
assert len(re) == 0
# CREATE
re = self.post(url, obj=input_obj)
assert id_field in re
assert re[id_field] is not None
if created_at_check:
assert "created_at" in re
assert re["created_at"] is not None
obj_id = str(re[id_field])
# GET
re = self.get(f"{url}{obj_id}/")
assert re[id_field] == obj_id
# GET LIST
re = self.get(url)
if pagination:
assert re["total"] == 1
assert len(re["results"]) == 1
else:
assert len(re) == 1
# DELETE
self.delete(f"{url}{obj_id}")
# GET LIST
re = self.get(url)
if pagination:
assert re["total"] == 0
assert len(re["results"]) == 0
else:
assert len(re) == 0
# GET
self.get(f"{url}{obj_id}", parse_json=False, r_code=404)

View File

@@ -0,0 +1,143 @@
import json
from httpx import ASGITransport, AsyncClient
class AsyncGenericClient:
def __init__(self, app, headers={}):
self.c = AsyncClient(
transport=ASGITransport(app=app), base_url="http://testserver", follow_redirects=True
)
self.default_headers = headers
async def get(self, url: str, r_code: int = 200, parse_json=True):
re = await self.c.get(url, headers=self.default_headers)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if parse_json else re.content
async def delete(self, url: str, r_code: int = 204):
re = await self.c.delete(url, headers=self.default_headers)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if r_code != 204 else None
async def post(
self, url: str, obj: dict | str = {}, r_code: int = 201, raw_response=False, *args, **kwargs
):
re = await self.c.post(
url,
data=json.dumps(obj) if isinstance(obj, dict) else obj,
headers=self.default_headers | {"Content-Type": "application/json"},
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
if not raw_response:
assert r_code == re.status_code
return re.json() if not raw_response else re
async def post_file(
self, url: str, file, r_code: int = 201, raw_response=False, *args, **kwargs
):
re = await self.c.post(
url,
files={"file": file},
headers=self.default_headers,
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if not raw_response else re
async def patch(
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
):
re = await self.c.patch(
url,
data=json.dumps(obj) if isinstance(obj, dict) else obj,
headers=self.default_headers | {"Content-Type": "application/json"},
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if not raw_response else re
async def put(
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
):
re = await self.c.put(
url,
data=json.dumps(obj) if isinstance(obj, dict) else obj,
headers=self.default_headers
| {
"Content-Type": "application/json",
"accept": "application/json",
},
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if not raw_response else re
async def obj_lifecycle(
self,
input_obj: dict,
url: str,
pagination: bool = True,
id_field: str = "id",
created_at_check: bool = True,
):
# GET LIST
re = await self.get(url)
if pagination:
assert re["total"] == 0
assert len(re["results"]) == 0
else:
assert len(re) == 0
# CREATE
re = await self.post(url, obj=input_obj)
assert id_field in re
assert re[id_field] is not None
if created_at_check:
assert "created_at" in re
assert re["created_at"] is not None
obj_id = str(re[id_field])
# GET
re = await self.get(f"{url}{obj_id}/")
assert re[id_field] == obj_id
# GET LIST
re = await self.get(url)
if pagination:
assert re["total"] == 1
assert len(re["results"]) == 1
else:
assert len(re) == 1
# DELETE
await self.delete(f"{url}{obj_id}")
# GET LIST
re = await self.get(url)
if pagination:
assert re["total"] == 0
assert len(re["results"]) == 0
else:
assert len(re) == 0
# GET
await self.get(f"{url}{obj_id}", parse_json=False, r_code=404)

16
creyPY/helpers.py Normal file
View File

@@ -0,0 +1,16 @@
import secrets
import string
def create_random_password(length: int = 12) -> str:
all_characters = string.ascii_letters + string.digits + string.punctuation
password = [
secrets.choice(string.ascii_lowercase),
secrets.choice(string.ascii_uppercase),
secrets.choice(string.digits),
secrets.choice(string.punctuation),
]
password += [secrets.choice(all_characters) for _ in range(length - 4)]
secrets.SystemRandom().shuffle(password)
return "".join(password)

View File

@@ -0,0 +1,2 @@
from .auth0 import * # noqa
from .stripe import * # noqa

View File

@@ -0,0 +1,4 @@
from .exceptions import * # noqa
from .manage import * # noqa
from .testing import * # noqa
from .utils import * # noqa

View File

@@ -0,0 +1,13 @@
import os
from dotenv import load_dotenv
load_dotenv()
AUTH0_DOMAIN = os.getenv("AUTH0_DOMAIN")
AUTH0_CLIENT_ID = os.getenv("AUTH0_CLIENT_ID")
AUTH0_CLIENT_SECRET = os.getenv("AUTH0_CLIENT_SECRET")
AUTH0_ALGORIGHM = os.getenv("AUTH0_ALGORIGHM", "RS256")
AUTH0_AUDIENCE = os.getenv("AUTH0_AUDIENCE")
AUTH0_ISSUER = os.getenv("AUTH0_ISSUER")

View File

@@ -0,0 +1,12 @@
from fastapi import HTTPException, status
class UnauthorizedException(HTTPException):
def __init__(self, detail: str, **kwargs):
"""Returns HTTP 403"""
super().__init__(status.HTTP_403_FORBIDDEN, detail=detail)
class UnauthenticatedException(HTTPException):
def __init__(self):
super().__init__(status_code=status.HTTP_401_UNAUTHORIZED, detail="Requires authentication")

View File

@@ -0,0 +1,21 @@
import requests
from cachetools import TTLCache, cached
from .common import AUTH0_CLIENT_ID, AUTH0_CLIENT_SECRET, AUTH0_DOMAIN
cache = TTLCache(maxsize=100, ttl=600)
@cached(cache)
def get_management_token() -> str:
response = requests.post(
f"https://{AUTH0_DOMAIN}/oauth/token",
json={
"client_id": AUTH0_CLIENT_ID,
"client_secret": AUTH0_CLIENT_SECRET,
"audience": f"https://{AUTH0_DOMAIN}/api/v2/", # This should be the management audience
"grant_type": "client_credentials",
},
timeout=5, # Add a timeout parameter to avoid hanging requests
).json()
return response["access_token"]

View File

@@ -0,0 +1,136 @@
from typing import Optional
import jwt
import requests
from fastapi import HTTPException, Request, Security
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from creyPY.helpers import create_random_password
from .common import (
AUTH0_ALGORIGHM,
AUTH0_AUDIENCE,
AUTH0_CLIENT_ID,
AUTH0_DOMAIN,
AUTH0_ISSUER,
)
from .exceptions import UnauthenticatedException, UnauthorizedException
from .manage import get_management_token
JWKS_CLIENT = jwt.PyJWKClient(f"https://{AUTH0_DOMAIN}/.well-known/jwks.json")
async def verify(
request: Request,
token: Optional[HTTPAuthorizationCredentials] = Security(HTTPBearer(auto_error=False)),
) -> str:
if token is None:
raise UnauthenticatedException
# This gets the 'kid' from the passed token
try:
signing_key = JWKS_CLIENT.get_signing_key_from_jwt(token.credentials).key
except jwt.exceptions.PyJWKClientError as error:
raise UnauthorizedException(str(error))
except jwt.exceptions.DecodeError as error:
raise UnauthorizedException(str(error))
try:
payload = jwt.decode(
token.credentials,
signing_key,
algorithms=[AUTH0_ALGORIGHM],
audience=AUTH0_AUDIENCE,
issuer=AUTH0_ISSUER,
)
except Exception as error:
raise UnauthorizedException(str(error))
return payload["sub"]
### GENERIC AUTH0 CALLS ###
def get_user(sub) -> dict:
re = requests.get(
f"https://{AUTH0_DOMAIN}/api/v2/users/{sub}",
headers={"Authorization": f"Bearer {get_management_token()}"},
timeout=5,
)
if re.status_code != 200:
raise HTTPException(re.status_code, re.json())
return re.json()
def patch_user(input_obj: dict, sub) -> dict:
re = requests.patch(
f"https://{AUTH0_DOMAIN}/api/v2/users/{sub}",
headers={"Authorization": f"Bearer {get_management_token()}"},
json=input_obj,
timeout=5,
)
if re.status_code != 200:
raise HTTPException(re.status_code, re.json())
return re.json()
### USER METADATA CALLS ###
def get_user_metadata(sub) -> dict:
try:
return get_user(sub).get("user_metadata", {})
except:
return {}
def patch_user_metadata(input_obj: dict, sub) -> dict:
return patch_user({"user_metadata": input_obj}, sub)
def clear_user_metadata(sub) -> dict:
return patch_user({"user_metadata": {}}, sub)
def request_verification_mail(sub: str) -> None:
re = requests.post(
f"https://{AUTH0_DOMAIN}/api/v2/jobs/verification-email",
headers={"Authorization": f"Bearer {get_management_token()}"},
json={"user_id": sub},
timeout=5,
)
if re.status_code != 201:
raise HTTPException(re.status_code, re.json())
return re.json()
def create_user_invite(email: str) -> dict:
re = requests.post(
f"https://{AUTH0_DOMAIN}/api/v2/users",
headers={"Authorization": f"Bearer {get_management_token()}"},
json={
"email": email,
"connection": "Username-Password-Authentication",
"password": create_random_password(),
"verify_email": False,
"app_metadata": {"invitedToMyApp": True},
},
timeout=5,
)
if re.status_code != 201:
raise HTTPException(re.status_code, re.json())
return re.json()
def password_change_mail(email: str) -> bool:
re = requests.post(
f"https://{AUTH0_DOMAIN}/dbconnections/change_password",
headers={"Authorization": f"Bearer {get_management_token()}"},
json={
"client_id": AUTH0_CLIENT_ID,
"email": email,
"connection": "Username-Password-Authentication",
},
timeout=5,
)
if re.status_code != 200:
raise HTTPException(re.status_code, re.json())
return True

View File

@@ -0,0 +1 @@
from .testing import * # noqa

View File

@@ -0,0 +1,23 @@
class ItemReturn:
quantity = 1
class SubscriptionItem:
def retrieve(self, id: str = ""):
return ItemReturn
def modify(self, id: str, quantity: int):
return ItemReturn
class StripeAPI:
def __init__(self, key: str):
pass
@property
def SubscriptionItem(self):
return SubscriptionItem
def get_stripe_api():
return StripeAPI("test")

View File

@@ -0,0 +1,11 @@
import os
import stripe
from dotenv import load_dotenv
load_dotenv()
def get_stripe_api():
stripe.api_key = os.getenv("STRIPE_API_KEY", "")
return stripe

7
renovate.json Normal file
View File

@@ -0,0 +1,7 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:recommended",
":semanticCommitTypeAll(feat)"
]
}

7
requirements.auth0.txt Normal file
View File

@@ -0,0 +1,7 @@
cachetools>=5.5.0 # for caching
charset-normalizer>=3.4.0 # Auth0 API interactions
requests>=2.32.3 # Auth0 API interactions
pyjwt>=2.10.1 # Auth0 API interactions
cffi>=1.17.1 # Auth0 API interactions
cryptography>=43.0.3 # Auth0 API interactions
pycparser>=2.22 # Auth0 API interactions

View File

@@ -1,25 +1,25 @@
certifi==2024.2.2
charset-normalizer==3.3.2
docutils==0.20.1
idna==3.6
importlib_metadata==7.1.0
jaraco.classes==3.4.0
jaraco.context==4.3.0
jaraco.functools==4.0.0
keyring==25.0.0
markdown-it-py==3.0.0
mdurl==0.1.2
more-itertools==10.2.0
nh3==0.2.17
pkginfo==1.10.0
Pygments==2.17.2
readme_renderer==43.0
requests==2.31.0
requests-toolbelt==1.0.0
rfc3986==2.0.0
rich==13.7.1
setuptools==69.2.0
twine==5.0.0
urllib3==2.2.1
wheel==0.43.0
zipp==3.18.1
certifi>=2024.2.2
charset-normalizer>=3.3.2
docutils>=0.20.1
idna>=3.6
importlib_metadata>=7.1.0
jaraco.classes>=3.4.0
jaraco.context>=4.3.0
jaraco.functools>=4.0.0
keyring>=25.0.0
markdown-it-py>=3.0.0
mdurl>=0.1.2
more-itertools>=10.2.0
nh3>=0.2.17
pkginfo>=1.10.0
Pygments>=2.17.2
readme_renderer>=43.0
requests>=2.31.0
requests-toolbelt>=1.0.0
rfc3986>=2.0.0
rich>=13.7.1
setuptools>=69.2.0
twine>=5.0.0
urllib3>=2.2.1
wheel>=0.43.0
zipp>=3.18.1

5
requirements.pg.txt Normal file
View File

@@ -0,0 +1,5 @@
psycopg>=3.2.1 # PostgreSQL
psycopg-binary>=3.2.1 # PostgreSQL
psycopg-pool>=3.2.2 # PostgreSQL
asyncpg>=0.30.0 # SQLAlchemy
greenlet>=3.1.1 # Async

1
requirements.stripe.txt Normal file
View File

@@ -0,0 +1 @@
stripe==10.12.0 # Stripe

View File

@@ -1,12 +1,20 @@
annotated-types==0.6.0 # Pydantic
pydantic==2.6.4 # Pydantic
pydantic-core==2.16.3 # Pydantic
typing-extensions==4.10.0 # Pydantic
annotated-types>=0.7.0 # Pydantic
pydantic>=2.8.2 # Pydantic
pydantic-core>=2.20.1 # Pydantic
typing-extensions>=4.12.2 # Pydantic
anyio==4.3.0 # Pagination
fastapi==0.110.0 # Pagination
fastapi-pagination==0.12.21 # Pagination
sniffio==1.3.1 # Pagination
starlette==0.36.3 # Pagination
anyio>=4.4.0 # FastAPI
fastapi>=0.111.0 # FastAPI
idna>=3.7 # FastAPI
sniffio>=1.3.1 # FastAPI
starlette>=0.37.2 # FastAPI
sqlalchemy==2.0.29 # SQLAlchemy
fastapi-pagination>=0.12.26 # Pagination
sqlalchemy>=2.0.31 # SQLAlchemy
sqlalchemy-utils>=0.41.2 # For managing databases
python-dotenv>=1.0.1 # Environment variables
h11>=0.14.0 # Testing
httpcore>=1.0.5 # Testing
httpx>=0.27.0 # Testing

View File

@@ -5,6 +5,18 @@ from setuptools import find_packages, setup
with open("requirements.txt") as f:
requirements = f.read().splitlines()
with open("requirements.build.txt") as f:
build_requirements = f.read().splitlines()
with open("requirements.pg.txt") as f:
pg_requirements = f.read().splitlines()
with open("requirements.auth0.txt") as f:
auth0_requirements = f.read().splitlines()
with open("requirements.stripe.txt") as f:
stripe_requirements = f.read().splitlines()
def get_latest_git_tag() -> str:
try:
@@ -23,7 +35,9 @@ def get_latest_git_tag() -> str:
setup(
name="creyPY",
version=get_latest_git_tag(),
description="My collection of Python and FastAPI shortcuts etc.",
description="Collection of my Python and FastAPI shortcuts, snippets etc.",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
author="Conrad Großer",
author_email="conrad@noah.tech",
packages=find_packages(),
@@ -31,4 +45,20 @@ setup(
license="MIT",
python_requires=">=3.12",
install_requires=requirements,
extras_require={
"build": build_requirements,
"postgres": pg_requirements,
"auth0": auth0_requirements,
"stripe": stripe_requirements,
"all": build_requirements + pg_requirements + auth0_requirements + stripe_requirements,
},
keywords=[
"creyPY",
"Python",
"FastAPI",
"shortcuts",
"snippets",
"utils",
],
platforms="any",
)

90
test.py Normal file
View File

@@ -0,0 +1,90 @@
import unittest
from uuid import UUID
from fastapi import HTTPException
from fastapi.routing import APIRoute
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from creyPY.fastapi.app import generate_unique_id
from creyPY.fastapi.crud import (
get_object_or_404,
)
from creyPY.fastapi.models.base import Base
class MockDBClass(Base):
def __init__(self, id):
self.id = id
class TestMyFunction(unittest.TestCase):
def setUp(self):
# Create a SQLite in-memory database for testing
engine = create_engine("sqlite:///:memory:")
# Create a sessionmaker bound to this engine
Session = sessionmaker(bind=engine)
# Now you can use Session() to get a session bound to the engine
self.db = Session()
# create the table
Base.metadata.create_all(engine)
def test_generate_unique_id(self):
# Test case 1: Route with no path parameters and GET method
route1 = APIRoute(path="/users", methods={"GET"}, endpoint=lambda: None)
assert generate_unique_id(route1) == "users_list"
# Test case 2: Route with path parameters and POST method
route2 = APIRoute(path="/users/{user_id}", methods={"POST"}, endpoint=lambda: None)
assert generate_unique_id(route2) == "users_post"
# Test case 3: Route with path parameters and multiple methods
route3 = APIRoute(path="/users/{user_id}", methods={"GET", "PUT"}, endpoint=lambda: None)
result = generate_unique_id(route3)
assert result == "users_get" or result == "users_put"
# Test case 4: Route with special characters in path
route4 = APIRoute(
path="/users/{user_id}/posts/{post_id}", methods={"DELETE"}, endpoint=lambda: None
)
assert generate_unique_id(route4) == "users_posts_delete"
# Test case 5: Route with multiple path parameters and PATCH method
route5 = APIRoute(
path="/users/{user_id}/posts/{post_id}", methods={"PATCH"}, endpoint=lambda: None
)
assert generate_unique_id(route5) == "users_posts_patch"
# Test case 6: Route with no path parameters and PUT method
route6 = APIRoute(path="/users", methods={"PUT"}, endpoint=lambda: None)
assert generate_unique_id(route6) == "users_put"
def test_get_object_or_404_existing_object(self):
# Arrange
obj_id = UUID("123e4567-e89b-12d3-a456-426614174000")
obj = MockDBClass(obj_id)
self.db.add(obj)
self.db.commit()
# Act
result = get_object_or_404(MockDBClass, obj_id, self.db)
# Assert
assert result == obj
def test_get_object_or_404_non_existing_object(self):
# Arrange
obj_id = UUID("123e4567-e89b-12d3-a456-426614174000")
# Act & Assert
with self.assertRaises(HTTPException) as exc_info:
get_object_or_404(MockDBClass, obj_id, self.db)
assert exc_info.exception.status_code == 404
assert exc_info.exception.detail == "The object does not exist."
if __name__ == "__main__":
unittest.main()