Compare commits

...

55 Commits

Author SHA1 Message Date
d6f79c3ed8 fix: fixed naming of pre-release commits 2024-10-24 12:35:13 +02:00
3f4a0ee00d fix: fixed naming of pre-release commits 2024-10-24 12:25:30 +02:00
714178d68f fix: fixed naming of pre-release commits 2024-10-24 12:22:45 +02:00
c7e205f14b fix: fixed naming of pre-release commits 2024-10-24 12:18:50 +02:00
39ae74becb fix: minor changelog adjustment 2024-10-24 12:15:23 +02:00
5f39966223 fix: Fixed tag pushing and changelog 2024-10-24 12:10:16 +02:00
c91e684f08 fix: fix attempt for the github pipeline 2024-10-24 12:10:16 +02:00
f11b8b8864 fix: alternative attempt on the fix 2024-10-24 12:10:16 +02:00
983553e97a fix: locked tag and publish to master and dev 2024-10-24 12:10:16 +02:00
8740eafce2 fix: fixed pipeline tagging 2024-10-24 12:10:16 +02:00
aa44b9ebe9 fix: fixed pipeline tagging 2024-10-24 12:10:16 +02:00
851573d964 fix: fixed pipeline tagging 2024-10-24 12:10:16 +02:00
cfa1da08d3 fix: pipeline now pushes pre-release versions 2024-10-24 12:10:16 +02:00
4a5a777ef5 breaking: Fixed #3 2024-10-24 12:10:16 +02:00
c9a9b1bc0a breaking: Fixed #1 2024-10-24 12:10:16 +02:00
d9f6e82736 Merge pull request #5 from creyD/dev
Co-authored-by: vikbhas <waraa.vignesh@gmail.com>
Co-authored-by: vikynoah <vigneshwaraa.sarangapani@noah.tech>
Co-authored-by: creyD <creyD@users.noreply.github.com>
2024-10-24 11:04:12 +02:00
65e93a023b fix: minor vscode adjustments 2024-10-24 09:32:11 +02:00
creyD
6ce0cfbd14 Adjusted files for isort & autopep 2024-10-24 07:27:26 +00:00
vikynoah
da7ec0b28e Feat: Addition of pagination proxy and Flag functionality (#4)
Co-authored-by: vikbhas <waraa.vignesh@gmail.com>
2024-10-24 09:26:57 +02:00
2727c452b6 fix: adjusted pipeline to dev branch and pull requests 2024-10-24 09:25:39 +02:00
be7d7ddb22 fix: bumped dependencies 2024-07-14 18:24:07 +02:00
3f0379290d fix: Dependencies can now be installed with newer versions 2024-05-14 14:40:12 +02:00
creyD
fa7a1c8a61 Adjusted files for isort & autopep 2024-05-13 09:22:06 +00:00
5a7e1776db fix: Added option to specify lookup_column for get_object_or_404 2024-05-13 11:21:25 +02:00
4c25d91e18 fix: fixed another minor bug with the order_by method 2024-04-25 19:43:12 +02:00
f24db62781 fix: fixed a bug with the jsonschema for order_by 2024-04-25 18:44:58 +02:00
4d997a375e feat: added order_by method 2024-04-25 18:19:26 +02:00
ee11d86235 feat: Added headers to testing 2024-04-02 13:18:41 +02:00
e47f5f2b07 breaking: Release of 1.0.0 2024-04-02 12:19:08 +02:00
754a951dc3 Fixed missing dependencies 2024-04-02 12:14:47 +02:00
8eb04f4b17 beaking: Version 1 release 2024-04-02 12:09:06 +02:00
140c6e4678 Minor bugfix 2024-04-02 11:40:26 +02:00
6fc0d01189 feat: Added generic testing client 2024-04-02 11:37:51 +02:00
10eaa2c0e0 Fixed workflow 2024-04-01 20:57:39 +02:00
b549fd941c Added content type for long description 2024-04-01 20:53:02 +02:00
2f4e3e7dba Added long_description 2024-04-01 20:46:27 +02:00
38d9a0c177 Added additional fetching for git checkout 2024-04-01 20:43:11 +02:00
f4c9b7b6b6 Update ci.yml 2024-04-01 20:38:16 +02:00
5b3389e939 Added manual triggering of the action 2024-04-01 20:33:34 +02:00
7eabeb3e04 Moved to tagging without prefix 2024-04-01 20:31:45 +02:00
51d4e7e6b8 Reworked CI 2024-04-01 20:28:16 +02:00
3fc6ae51c3 Added todos 2024-04-01 20:19:43 +02:00
525af5b34d Merged workflows 2024-04-01 20:19:02 +02:00
2daa8e5a22 feat: add todo 2024-04-01 20:11:51 +02:00
4fef25f898 feat: smaller bugfix for pipeline 2024-04-01 20:07:54 +02:00
creyD
351642b0f1 Adjusted files for isort & autopep 2024-04-01 18:05:56 +00:00
80dfe98a1d feat: added todos to trigger pipeline 2024-04-01 20:05:13 +02:00
b2b726ed9a Added testing to CI 2024-04-01 20:04:19 +02:00
246eccd606 feat: Added testing for CRUD 2024-04-01 20:01:15 +02:00
fc13dad076 feat: Added tests 2024-04-01 19:44:13 +02:00
172f47221c Fixed issue in pipeline 2024-04-01 19:29:58 +02:00
65c8203348 Fixed issue in pipeline 2024-04-01 19:26:14 +02:00
515c3372c6 Minor Changes 2024-04-01 19:22:08 +02:00
43055dde1b Triggering workflow 2024-04-01 19:19:35 +02:00
0aaa1dc6a1 Fixed workflow 2024-04-01 19:18:43 +02:00
21 changed files with 633 additions and 124 deletions

103
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,103 @@
name: Lint, Test, Tag & Publish
on:
push:
branches:
- master
- dev
paths-ignore:
- "**/.github/**"
- "**/.gitignore"
- "**/.vscode/**"
- "**/README.md"
- "**/CHANGELOG.md"
pull_request:
branches:
- master
- dev
workflow_dispatch:
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: psf/black@stable
with:
options: "-l 100 --exclude '/.venv/|/__init__.py'"
- uses: creyD/autoflake_action@master
with:
no_commit: True
options: --in-place --remove-all-unused-imports -r --exclude **/__init__.py,**/db/models.py,
- uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: Adjusted files for isort & autopep
test:
runs-on: ubuntu-latest
needs: lint
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- run: python -m pip install --upgrade pip
- run: python -m pip install -r requirements.txt
- run: python test.py
tag_and_publish:
runs-on: ubuntu-latest
if: github.head_ref == 'master' || github.head_ref == 'dev'
needs: test
permissions:
id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
contents: write # for the tags
steps:
- uses: actions/checkout@v4
with:
fetch-tags: true
ref: ${{ github.head_ref }}
fetch-depth: 0
- name: setup git
run: |
git config --local user.email "15138480+creyD@users.noreply.github.com"
git config --local user.name "creyD"
- name: Git Version
uses: codacy/git-version@2.8.0
id: git_version
with:
minor-identifier: "feat:"
major-identifier: "breaking:"
release-branch: ${{ github.head_ref }}
- name: Create & Push Tag
if: github.head_ref == 'master' || github.head_ref == 'dev'
run: |
if [ "${{ github.head_ref }}" == "master" ]; then
git tag ${{ steps.git_version.outputs.version }}
git push origin ${{ steps.git_version.outputs.version }}
elif [ "${{ github.head_ref }}" == "dev" ]; then
calculatedSha=$(git rev-parse --short ${{ github.sha }})
git tag ${{ steps.git_version.outputs.version }}-rc+${calculatedSha}
git push origin ${{ steps.git_version.outputs.version }}-rc+${calculatedSha}
fi
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.build.txt
python setup.py sdist bdist_wheel
- name: Build and publish
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}

View File

@@ -1,46 +0,0 @@
name: Lint and tag
on:
push:
branches:
- master
paths-ignore:
- "**/.github/**"
- "**/.gitignore"
- "**/.vscode/**"
- "**/README.md"
- "**/CHANGELOG.md"
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: psf/black@stable
with:
options: "-l 100 --exclude '/.venv/|/__init__.py'"
- uses: creyD/autoflake_action@master
with:
no_commit: True
options: --in-place --remove-all-unused-imports -r --exclude **/__init__.py,**/db/models.py,
- uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: Adjusted files for isort & autopep
tag:
runs-on: ubuntu-latest
needs: lint
steps:
- name: Git Version
uses: codacy/git-version@2.8.0
id: git_version
with:
prefix: v
minor-identifier: "feat:"
major-identifier: "breaking:"
- name: Create Tag
run: git tag -a v${{ steps.git_version.outputs.version }} -m "v${{ steps.git_version.outputs.version }}"
- name: Push Tag
run: git push origin v${{ steps.git_version.outputs.version }}

View File

@@ -1,29 +0,0 @@
name: Publish to pypi
on:
push:
tags:
- '*'
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.build.txt
- name: Build and publish
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}

View File

@@ -26,10 +26,16 @@
"**/db.sqlite3": true,
"**/.DS_Store": true,
"**/*.pyc": true,
"**/__pycache__/": true
"**/__pycache__/": true,
"**/build": true,
"**/dist": true,
"**/*.egg-info": true,
},
"search.exclude": {
"**/.git": true,
"**/build": true,
"**/*.egg-info": true,
"**/dist": true,
"**/.venv": true,
"**/tmp": true,
"htmlcov/*": true,

49
CHANGELOG.md Normal file
View File

@@ -0,0 +1,49 @@
# Changelog
All notable changes to this project will be documented in this file.
## 2.0.0
- Fixed #1 Rename misspelled additonal_data to additional_data on create_obj_from_data
- Fixed #3 Inverse partial flag: bool = False because it was wrong on update_obj_from_data
Notes:
You will need to change calls to `create_obj_from_data` according to #1 (rename additonal_data to additional_data)
You will need to change calls to `update_obj_from_data` according to #3 (if you supplied `partial`, you will need to reverse it: `true` -> `false` and `false` -> `true`)
## 1.3.0
- Addition of pagination proxy and pagination=off functionality (Thanks to @vikbhas)
## 1.2.5
- Bumped dependencies
## 1.2.4
- Enabled newer versions for all dependencies
## 1.2.3
- Added option to specify lookup_column for get_object_or_404
## 1.2.2
- Added order_by method
## 1.1.0
- Added headers to testing
## 1.0.0
- Bumped dependencies
- Added const documentation
- Added installation instructions and examples to README
- Added sqlalchemy session for db connection
## 0.2.0
- Added testing client

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2024 Conrad
Copyright (c) 2024 Conrad Großer
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -1,9 +1,62 @@
# creyPY
My collection of Python and FastAPI shortcuts etc.
## Installation
# Release
```bash
pip install creyPY -U
```
``` rm -rf dist build creyPY.egg-info && python setup.py sdist bdist_wheel ```
## Versioning
``` twine upload dist/* ```
This library uses [Semantic Versioning](https://semver.org/).
## FastAPI
This library installes fastapi and pydantic, as well as sqlalchemy for you. It also provides a sqlalchemy base class and companion pydantic schemas. Also there are some helper functions for FastAPI in `creyPY.fastapi.app` like `generate_unique_id` to generate unique operation IDs for the OpenAPI schema to work with code generators.
### Database connection
The `creyPY.fastapi.db` module provides a `Session` class that can be used as a context manager to connect to a database. It exposes the `SQLALCHEMY_DATABASE_URL` variable for you to use. It uses the following environment variables:
- `POSTGRES_HOST`: The host of the database
- `POSTGRES_PORT`: The port of the database
- `POSTGRES_USER`: The user of the database
- `POSTGRES_PASSWORD`: The password of the database
- `POSTGRES_DB`: The database name
Currently only PostgreSQL is supported. It creates a sync session, it is planned to add async support in the future. You can use this like this:
```python
from creyPY.fastapi.db.session import get_db
async def test_endpoint(
db: Session = Depends(get_db),
) -> Any:
pass
```
## Constants
The constants module contains a few enums that I use in my projects. The best way to understand this library is to look at the code (it's not that much). However for simplicity, here is a brief overview:
- LanguageEnum: Contains all languages according to ISO 639
- CountryEnum: Contains all countries according to ISO 3166
- CurrencyEnum: Contains all accepted stripe currencies (Commented out are the Zero-decimal currencies, to avoid custom implementation)
- StripeStatus: Contains all stripe payment statuses
- GroupMode: Contains time group modes (e.g. day, week, month, year)
### Usage example
```python
from creyPY.const import LanguageEnum
print(LanguageEnum.EN) # Output: LanguageEnum.EN
print(LanguageEnum.EN.value) # Output: English
```
## TODO
- Add async support for database connection
- Add version without postgresql dependency

View File

@@ -1,6 +1,7 @@
import enum
# Source: https://en.wikipedia.org/wiki/List_of_ISO_3166_country_codes
class CountryEnum(str, enum.Enum):
AF = "Afghanistan"
AX = "land Islands"
@@ -248,6 +249,7 @@ class CountryEnum(str, enum.Enum):
ZW = "Zimbabwe"
# :: https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes
class LanguageEnum(str, enum.Enum):
AA = "Afar"
AB = "Abkhazian"

View File

@@ -1,5 +1,7 @@
from .app import * # noqa
from .crud import * # noqa
from .db import * # noqa
from .models import * # noqa
from .pagination import * # noqa
from .schemas import * # noqa
from .testing import * # noqa

View File

@@ -10,8 +10,10 @@ from .models.base import Base
T = TypeVar("T", bound=Base)
def get_object_or_404(db_class: Type[T], id: UUID | str, db: Session, expunge: bool = False) -> T:
obj = db.query(db_class).filter(db_class.id == id).one_or_none()
def get_object_or_404(
db_class: Type[T], id: UUID | str, db: Session, expunge: bool = False, lookup_column: str = "id"
) -> T:
obj = db.query(db_class).filter(getattr(db_class, lookup_column) == id).one_or_none()
if obj is None:
raise HTTPException(status_code=404, detail="The object does not exist.")
if expunge:
@@ -19,28 +21,30 @@ def get_object_or_404(db_class: Type[T], id: UUID | str, db: Session, expunge: b
return obj
# TODO: Add testing
def create_obj_from_data(
data: BaseModel, model: Type[T], db: Session, additonal_data={}, exclude={}
data: BaseModel, model: Type[T], db: Session, additional_data={}, exclude={}
) -> T:
obj = model(**data.model_dump(exclude=exclude) | additonal_data)
obj = model(**data.model_dump(exclude=exclude) | additional_data)
db.add(obj)
db.commit()
db.refresh(obj)
return obj
# TODO: Add testing
def update_obj_from_data(
data: BaseModel,
model: Type[T],
id: UUID | str,
db: Session,
partial: bool = False,
partial: bool = True,
ignore_fields=[],
additional_data={},
exclude={},
) -> T:
obj = get_object_or_404(model, id, db)
data_dict = data.model_dump(exclude_unset=not partial, exclude=exclude)
data_dict = data.model_dump(exclude_unset=partial, exclude=exclude)
data_dict.update(additional_data) # merge additional_data into data_dict
for field in data_dict:
if field not in ignore_fields:
@@ -50,6 +54,7 @@ def update_obj_from_data(
return obj
# TODO: Add testing
def delete_object(db_class: Type[T], id: UUID | str, db: Session) -> None:
obj = db.query(db_class).filter(db_class.id == id).one_or_none()
if obj is None:

View File

@@ -0,0 +1 @@
from .session import * # noqa

View File

@@ -0,0 +1,26 @@
import os
from typing import Generator
from dotenv import load_dotenv
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm.session import Session
load_dotenv()
host = os.getenv("POSTGRES_HOST", "localhost")
user = os.getenv("POSTGRES_USER", "postgres")
password = os.getenv("POSTGRES_PASSWORD", "root")
port = os.getenv("POSTGRES_PORT", "5432")
name = os.getenv("POSTGRES_DB", "fastapi")
SQLALCHEMY_DATABASE_URL = f"postgresql+psycopg://{user}:{password}@{host}:{port}/"
engine = create_engine(SQLALCHEMY_DATABASE_URL + name, pool_pre_ping=True)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
def get_db() -> Generator[Session, None, None]:
with SessionLocal() as db:
yield db

View File

@@ -19,6 +19,9 @@ class Base:
__name__: str
# TODO: Add default representation string
# TODO: Add automated foreign key resolution
# Generate __tablename__ automatically
@declared_attr
def __tablename__(cls) -> str:

View File

@@ -0,0 +1,25 @@
from typing import Callable
from pydantic.json_schema import SkipJsonSchema
from sqlalchemy import String, asc, cast, desc
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.sql.selectable import Select
def order_by(order_by: str | SkipJsonSchema[None] = None) -> Callable[[Select], Select]:
def _order_by(query: Select) -> Select:
if order_by:
direction = desc if order_by.startswith("-") else asc
column_name = order_by.lstrip("-")
# Get the column from the query
for column in query.inner_columns:
if column.key == column_name:
# If the column is a UUID, cast it to a string
if isinstance(column.type, UUID):
column = cast(column, String)
query = query.order_by(direction(column))
break
return query
return _order_by

View File

@@ -1,14 +1,28 @@
from math import ceil
from typing import Any, Generic, Optional, Self, Sequence, TypeVar
from typing import Any, Generic, Optional, Self, Sequence, TypeVar, Union
from fastapi_pagination import Params
from fastapi_pagination.bases import AbstractPage, AbstractParams
from fastapi_pagination.types import GreaterEqualOne, GreaterEqualZero
from fastapi_pagination.types import (
GreaterEqualOne,
GreaterEqualZero,
AdditionalData,
SyncItemsTransformer,
)
from fastapi_pagination.api import create_page, apply_items_transformer
from fastapi_pagination.utils import verify_params
from fastapi_pagination.ext.sqlalchemy import create_paginate_query
from pydantic.json_schema import SkipJsonSchema
from sqlalchemy.sql.selectable import Select
from sqlalchemy.orm.session import Session
from sqlalchemy import select, func
T = TypeVar("T")
# TODO: Add complete fastapi-pagination proxy here
# TODO: Add pagination off functionality
# SkipJsonSchema is used to avoid generating invalid JSON schema in FastAPI
class Page(AbstractPage[T], Generic[T]):
results: Sequence[T]
page: GreaterEqualOne | SkipJsonSchema[None] = None
@@ -67,3 +81,44 @@ def parse_page(response, page: int, size: int) -> Page:
has_next=response.has_next,
has_prev=response.has_prev,
)
def create_count_query(query: Select) -> Select:
return select(func.count()).select_from(query.subquery())
def unwrap_scalars(
items: Sequence[Sequence[T]],
force_unwrap: bool = True,
) -> Union[Sequence[T], Sequence[Sequence[T]]]:
return [item[0] if force_unwrap else item for item in items]
def paginate(
connection: Session,
query: Select,
paginationFlag: bool = True,
params: Optional[AbstractParams] = None,
transformer: Optional[SyncItemsTransformer] = None,
additional_data: Optional[AdditionalData] = None,
):
params, _ = verify_params(params, "limit-offset", "cursor")
count_query = create_count_query(query)
total = connection.scalar(count_query)
if paginationFlag is False:
params = Params(page=1, size=total)
query = create_paginate_query(query, params)
items = connection.execute(query).all()
items = unwrap_scalars(items)
t_items = apply_items_transformer(items, transformer)
return create_page(
t_items,
params=params,
total=total,
**(additional_data or {}),
)

View File

@@ -4,6 +4,7 @@ from uuid import UUID
from pydantic import BaseModel, ConfigDict
# The created_by_id is a string because we use the sub from Auth0
class BaseSchemaModelIN(BaseModel):
created_by_id: str
model_config = ConfigDict(from_attributes=True)

138
creyPY/fastapi/testing.py Normal file
View File

@@ -0,0 +1,138 @@
import json
from fastapi.testclient import TestClient
class GenericClient(TestClient):
def __init__(self, client):
self.c = TestClient(client)
self.default_headers = {}
def get(self, url: str, r_code: int = 200, parse_json=True):
re = self.c.get(url, headers=self.default_headers)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if parse_json else re.content
def delete(self, url: str, r_code: int = 204):
re = self.c.delete(url, headers=self.default_headers)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if r_code != 204 else None
def post(
self, url: str, obj: dict | str = {}, r_code: int = 201, raw_response=False, *args, **kwargs
):
re = self.c.post(
url,
data=json.dumps(obj) if type(obj) == dict else obj,
headers=self.default_headers | {"Content-Type": "application/json"},
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if not raw_response else re
def post_file(self, url: str, file, r_code: int = 201, raw_response=False, *args, **kwargs):
re = self.c.post(
url,
files={"file": file},
headers=self.default_headers | {"Content-Type": "application/json"},
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if not raw_response else re
def patch(
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
):
re = self.c.patch(
url,
data=json.dumps(obj) if type(obj) == dict else obj,
headers=self.default_headers | {"Content-Type": "application/json"},
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if not raw_response else re
def put(
self, url: str, obj: dict | str = {}, r_code: int = 200, raw_response=False, *args, **kwargs
):
re = self.c.put(
url,
data=json.dumps(obj) if type(obj) == dict else obj,
headers=self.default_headers
| {
"Content-Type": "application/json",
"accept": "application/json",
},
*args,
**kwargs,
)
if re.status_code != r_code:
print(re.content)
assert r_code == re.status_code
return re.json() if not raw_response else re
def obj_lifecycle(
self,
input_obj: dict,
url: str,
pagination: bool = True,
id_field: str = "id",
created_at_check: bool = True,
):
# GET LIST
re = self.get(url)
if pagination:
assert re["total"] == 0
assert len(re["results"]) == 0
else:
assert len(re) == 0
# CREATE
re = self.post(url, obj=input_obj)
assert id_field in re
assert re[id_field] is not None
if created_at_check:
assert "created_at" in re
assert re["created_at"] is not None
obj_id = str(re[id_field])
# GET
re = self.get(f"{url}{obj_id}/")
assert re[id_field] == obj_id
# GET LIST
re = self.get(url)
if pagination:
assert re["total"] == 1
assert len(re["results"]) == 1
else:
assert len(re) == 1
# DELETE
self.delete(f"{url}{obj_id}")
# GET LIST
re = self.get(url)
if pagination:
assert re["total"] == 0
assert len(re["results"]) == 0
else:
assert len(re) == 0
# GET
self.get(f"{url}{obj_id}", parse_json=False, r_code=404)

View File

@@ -1,25 +1,27 @@
certifi==2024.2.2
charset-normalizer==3.3.2
docutils==0.20.1
idna==3.6
importlib_metadata==7.1.0
jaraco.classes==3.4.0
jaraco.context==4.3.0
jaraco.functools==4.0.0
keyring==25.0.0
markdown-it-py==3.0.0
mdurl==0.1.2
more-itertools==10.2.0
nh3==0.2.17
pkginfo==1.10.0
Pygments==2.17.2
readme_renderer==43.0
requests==2.31.0
requests-toolbelt==1.0.0
rfc3986==2.0.0
rich==13.7.1
setuptools==69.2.0
twine==5.0.0
urllib3==2.2.1
wheel==0.43.0
zipp==3.18.1
certifi>=2024.2.2
charset-normalizer>=3.3.2
docutils>=0.20.1
idna>=3.6
importlib_metadata>=7.1.0
jaraco.classes>=3.4.0
jaraco.context>=4.3.0
jaraco.functools>=4.0.0
keyring>=25.0.0
markdown-it-py>=3.0.0
mdurl>=0.1.2
more-itertools>=10.2.0
nh3>=0.2.17
pkginfo>=1.10.0
Pygments>=2.17.2
readme_renderer>=43.0
requests>=2.31.0
requests-toolbelt>=1.0.0
rfc3986>=2.0.0
rich>=13.7.1
setuptools>=69.2.0
twine>=5.0.0
urllib3>=2.2.1
wheel>=0.43.0
zipp>=3.18.1
-r requirements.txt

View File

@@ -1,12 +1,23 @@
annotated-types==0.6.0 # Pydantic
pydantic==2.6.4 # Pydantic
pydantic-core==2.16.3 # Pydantic
typing-extensions==4.10.0 # Pydantic
annotated-types>=0.7.0 # Pydantic
pydantic>=2.8.2 # Pydantic
pydantic-core>=2.20.1 # Pydantic
typing-extensions>=4.12.2 # Pydantic
anyio==4.3.0 # Pagination
fastapi==0.110.0 # Pagination
fastapi-pagination==0.12.21 # Pagination
sniffio==1.3.1 # Pagination
starlette==0.36.3 # Pagination
anyio>=4.4.0 # FastAPI
fastapi>=0.111.0 # FastAPI
idna>=3.7 # FastAPI
sniffio>=1.3.1 # FastAPI
starlette>=0.37.2 # FastAPI
sqlalchemy==2.0.29 # SQLAlchemy
fastapi-pagination>=0.12.26 # Pagination
sqlalchemy>=2.0.31 # SQLAlchemy
python-dotenv>=1.0.1 # Environment variables
psycopg>=3.2.1 # PostgreSQL
psycopg-binary>=3.2.1 # PostgreSQL
psycopg-pool>=3.2.2 # PostgreSQL
h11>=0.14.0 # Testing
httpcore>=1.0.5 # Testing
httpx>=0.27.0 # Testing

View File

@@ -23,7 +23,9 @@ def get_latest_git_tag() -> str:
setup(
name="creyPY",
version=get_latest_git_tag(),
description="My collection of Python and FastAPI shortcuts etc.",
description="Collection of my Python and FastAPI shortcuts, snippets etc.",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
author="Conrad Großer",
author_email="conrad@noah.tech",
packages=find_packages(),
@@ -31,4 +33,14 @@ setup(
license="MIT",
python_requires=">=3.12",
install_requires=requirements,
keywords=[
"creyPY",
"Python",
"FastAPI",
"shortcuts",
"snippets",
"utils",
"personal library",
],
platforms="any",
)

90
test.py Normal file
View File

@@ -0,0 +1,90 @@
import unittest
from uuid import UUID
from fastapi import HTTPException
from fastapi.routing import APIRoute
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from creyPY.fastapi.app import generate_unique_id
from creyPY.fastapi.crud import (
get_object_or_404,
)
from creyPY.fastapi.models.base import Base
class MockDBClass(Base):
def __init__(self, id):
self.id = id
class TestMyFunction(unittest.TestCase):
def setUp(self):
# Create a SQLite in-memory database for testing
engine = create_engine("sqlite:///:memory:")
# Create a sessionmaker bound to this engine
Session = sessionmaker(bind=engine)
# Now you can use Session() to get a session bound to the engine
self.db = Session()
# create the table
Base.metadata.create_all(engine)
def test_generate_unique_id(self):
# Test case 1: Route with no path parameters and GET method
route1 = APIRoute(path="/users", methods={"GET"}, endpoint=lambda: None)
assert generate_unique_id(route1) == "users_list"
# Test case 2: Route with path parameters and POST method
route2 = APIRoute(path="/users/{user_id}", methods={"POST"}, endpoint=lambda: None)
assert generate_unique_id(route2) == "users_post"
# Test case 3: Route with path parameters and multiple methods
route3 = APIRoute(path="/users/{user_id}", methods={"GET", "PUT"}, endpoint=lambda: None)
result = generate_unique_id(route3)
assert result == "users_get" or result == "users_put"
# Test case 4: Route with special characters in path
route4 = APIRoute(
path="/users/{user_id}/posts/{post_id}", methods={"DELETE"}, endpoint=lambda: None
)
assert generate_unique_id(route4) == "users_posts_delete"
# Test case 5: Route with multiple path parameters and PATCH method
route5 = APIRoute(
path="/users/{user_id}/posts/{post_id}", methods={"PATCH"}, endpoint=lambda: None
)
assert generate_unique_id(route5) == "users_posts_patch"
# Test case 6: Route with no path parameters and PUT method
route6 = APIRoute(path="/users", methods={"PUT"}, endpoint=lambda: None)
assert generate_unique_id(route6) == "users_put"
def test_get_object_or_404_existing_object(self):
# Arrange
obj_id = UUID("123e4567-e89b-12d3-a456-426614174000")
obj = MockDBClass(obj_id)
self.db.add(obj)
self.db.commit()
# Act
result = get_object_or_404(MockDBClass, obj_id, self.db)
# Assert
assert result == obj
def test_get_object_or_404_non_existing_object(self):
# Arrange
obj_id = UUID("123e4567-e89b-12d3-a456-426614174000")
# Act & Assert
with self.assertRaises(HTTPException) as exc_info:
get_object_or_404(MockDBClass, obj_id, self.db)
assert exc_info.exception.status_code == 404
assert exc_info.exception.detail == "The object does not exist."
if __name__ == "__main__":
unittest.main()