Compare commits

...

5 Commits

Author SHA1 Message Date
creyD
50031556f9 Adjusted files for isort & autopep 2024-11-12 08:54:34 +00:00
vikynoah
2940ddbdcd feat: Introduce ASYNC DB as Plug and Play (#16)
Co-authored-by: vikbhas <waraa.vignesh@gmail.com>
2024-11-12 09:54:04 +01:00
807af12fa1 Merge pull request #13 from creyD/dev 2024-11-05 11:54:46 +01:00
creyD
dce897c247 Adjusted files for isort & autopep 2024-11-05 10:29:40 +00:00
vikynoah
89997372ef fix: Changes to accomodate pagination flag in Params (#14)
Co-authored-by: vikbhas <waraa.vignesh@gmail.com>
2024-11-05 11:29:06 +01:00
4 changed files with 123 additions and 8 deletions

View File

@@ -1 +1,2 @@
from .session import * # noqa from .session import * # noqa
from .async_session import * # noqa

View File

@@ -0,0 +1,31 @@
import os
from typing import AsyncGenerator
from dotenv import load_dotenv
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
load_dotenv()
host = os.getenv("POSTGRES_HOST", "localhost")
user = os.getenv("POSTGRES_USER", "postgres")
password = os.getenv("POSTGRES_PASSWORD", "root")
port = os.getenv("POSTGRES_PORT", "5432")
name = os.getenv("POSTGRES_DB", "fastapi")
SQLALCHEMY_DATABASE_URL = f"postgresql+psycopg://{user}:{password}@{host}:{port}/"
async_engine = create_async_engine(SQLALCHEMY_DATABASE_URL + name, pool_pre_ping=True)
AsyncSessionLocal = sessionmaker(
bind=async_engine,
class_=AsyncSession,
expire_on_commit=False,
autoflush=False,
autocommit=False,
)
async def get_async_db() -> AsyncGenerator[AsyncSession, None]:
async with AsyncSessionLocal() as db:
yield db

View File

@@ -1,6 +1,7 @@
from math import ceil from math import ceil
from typing import Any, Generic, Optional, Self, Sequence, TypeVar, Union from typing import Any, Generic, Optional, Self, Sequence, TypeVar, Union, overload
from contextlib import suppress
from pydantic import BaseModel
from fastapi_pagination import Params from fastapi_pagination import Params
from fastapi_pagination.bases import AbstractPage, AbstractParams from fastapi_pagination.bases import AbstractPage, AbstractParams
from fastapi_pagination.types import ( from fastapi_pagination.types import (
@@ -8,18 +9,36 @@ from fastapi_pagination.types import (
GreaterEqualZero, GreaterEqualZero,
AdditionalData, AdditionalData,
SyncItemsTransformer, SyncItemsTransformer,
AsyncItemsTransformer,
ItemsTransformer,
) )
from fastapi_pagination.api import create_page, apply_items_transformer from fastapi_pagination.api import create_page, apply_items_transformer
from fastapi_pagination.utils import verify_params from fastapi_pagination.utils import verify_params
from fastapi_pagination.ext.sqlalchemy import create_paginate_query from fastapi_pagination.ext.sqlalchemy import create_paginate_query
from fastapi_pagination.bases import AbstractParams, RawParams
from pydantic.json_schema import SkipJsonSchema from pydantic.json_schema import SkipJsonSchema
from sqlalchemy.sql.selectable import Select from sqlalchemy.sql.selectable import Select
from sqlalchemy.orm.session import Session from sqlalchemy.orm.session import Session
from sqlalchemy import select, func from sqlalchemy import select, func
from sqlalchemy.ext.asyncio import AsyncSession, async_scoped_session
from fastapi import Query
from sqlalchemy.util import await_only, greenlet_spawn
T = TypeVar("T") T = TypeVar("T")
class PaginationParams(BaseModel, AbstractParams):
page: int = Query(1, ge=1, description="Page number")
size: int = Query(50, ge=1, le=100, description="Page size")
pagination: bool = Query(True, description="Toggle pagination")
def to_raw_params(self) -> RawParams:
if not self.pagination:
return RawParams(limit=None, offset=None)
return RawParams(limit=self.size, offset=(self.page - 1) * self.size)
# TODO: Add complete fastapi-pagination proxy here # TODO: Add complete fastapi-pagination proxy here
# TODO: Add pagination off functionality # TODO: Add pagination off functionality
# SkipJsonSchema is used to avoid generating invalid JSON schema in FastAPI # SkipJsonSchema is used to avoid generating invalid JSON schema in FastAPI
@@ -32,7 +51,7 @@ class Page(AbstractPage[T], Generic[T]):
has_next: bool | SkipJsonSchema[None] = None has_next: bool | SkipJsonSchema[None] = None
has_prev: bool | SkipJsonSchema[None] = None has_prev: bool | SkipJsonSchema[None] = None
__params_type__ = Params __params_type__ = PaginationParams
@classmethod @classmethod
def create( def create(
@@ -94,27 +113,72 @@ def unwrap_scalars(
return [item[0] if force_unwrap else item for item in items] return [item[0] if force_unwrap else item for item in items]
def _get_sync_conn_from_async(conn: Any) -> Session: # pragma: no cover
if isinstance(conn, async_scoped_session):
conn = conn()
with suppress(AttributeError):
return conn.sync_session # type: ignore
with suppress(AttributeError):
return conn.sync_connection # type: ignore
raise TypeError("conn must be an AsyncConnection or AsyncSession")
@overload
def paginate( def paginate(
connection: Session, connection: Session,
query: Select, query: Select,
paginationFlag: bool = True,
params: Optional[AbstractParams] = None, params: Optional[AbstractParams] = None,
transformer: Optional[SyncItemsTransformer] = None, transformer: Optional[SyncItemsTransformer] = None,
additional_data: Optional[AdditionalData] = None, additional_data: Optional[AdditionalData] = None,
): ) -> Any:
params, _ = verify_params(params, "limit-offset", "cursor") pass
@overload
async def paginate(
connection: AsyncSession,
query: Select,
params: Optional[AbstractParams] = None,
transformer: Optional[AsyncItemsTransformer] = None,
additional_data: Optional[AdditionalData] = None,
) -> Any:
pass
def _paginate(
connection: Session,
query: Select,
params: Optional[AbstractParams] = None,
transformer: Optional[ItemsTransformer] = None,
additional_data: Optional[AdditionalData] = None,
async_: bool = False,
):
if async_:
def _apply_items_transformer(*args: Any, **kwargs: Any) -> Any:
return await_only(apply_items_transformer(*args, **kwargs, async_=True))
else:
_apply_items_transformer = apply_items_transformer
params, raw_params = verify_params(params, "limit-offset", "cursor")
count_query = create_count_query(query) count_query = create_count_query(query)
total = connection.scalar(count_query) total = connection.scalar(count_query)
if paginationFlag is False and total > 0: if params.pagination is False and total > 0:
params = Params(page=1, size=total) params = Params(page=1, size=total)
else:
params = Params(page=params.page, size=params.size)
query = create_paginate_query(query, params) query = create_paginate_query(query, params)
items = connection.execute(query).all() items = connection.execute(query).all()
items = unwrap_scalars(items) items = unwrap_scalars(items)
t_items = apply_items_transformer(items, transformer) t_items = _apply_items_transformer(items, transformer)
return create_page( return create_page(
t_items, t_items,
@@ -122,3 +186,19 @@ def paginate(
total=total, total=total,
**(additional_data or {}), **(additional_data or {}),
) )
def paginate(
connection: Session,
query: Select,
params: Optional[AbstractParams] = None,
transformer: Optional[ItemsTransformer] = None,
additional_data: Optional[AdditionalData] = None,
):
if isinstance(connection, AsyncSession):
connection = _get_sync_conn_from_async(connection)
return greenlet_spawn(
_paginate, connection, query, params, transformer, additional_data, async_=True
)
return _paginate(connection, query, params, transformer, additional_data, async_=False)

View File

@@ -21,3 +21,6 @@ psycopg-pool>=3.2.2 # PostgreSQL
h11>=0.14.0 # Testing h11>=0.14.0 # Testing
httpcore>=1.0.5 # Testing httpcore>=1.0.5 # Testing
httpx>=0.27.0 # Testing httpx>=0.27.0 # Testing
asyncpg>=0.30.0 #SQLAlchemy
greenlet>=3.1.1 #Async