Compare commits

...

58 Commits

Author SHA1 Message Date
renovate[bot]
7e8cc8653c feat(deps): update dependency psycopg-binary to v3.2.6 2025-03-12 23:02:53 +00:00
renovate[bot]
015d5eb13d feat(deps): update dependency sqlalchemy to v2.0.39 2025-03-11 22:08:22 +00:00
renovate[bot]
608ec8de6f feat(deps): update dependency starlette to v0.46.1 2025-03-08 13:12:47 +00:00
renovate[bot]
a21947fe02 feat(deps): update dependency tzlocal to v5.3.1 2025-03-05 22:10:10 +00:00
renovate[bot]
11e5ff9646 feat(deps): update dependency alembic to v1.15.1 2025-03-05 02:49:17 +00:00
renovate[bot]
971d7743f1 feat(deps): update dependency pytest to v8.3.5 2025-03-02 13:36:00 +00:00
renovate[bot]
1d34b32179 feat(deps): update dependency fastapi to v0.115.11 2025-03-02 01:23:16 +00:00
renovate[bot]
59abb07721 feat(deps): update dependency starlette to v0.46.0 (#23)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-01 03:10:09 +00:00
renovate[bot]
7fc2a7a1a9 feat(deps): update dependency fastapi to v0.115.10 2025-02-28 18:34:23 +00:00
renovate[bot]
ae570acc6a feat(deps): update dependency fastapi to v0.115.9 2025-02-27 20:01:50 +00:00
renovate[bot]
24aa470d6d feat(deps): update dependency psycopg-pool to v3.2.6 2025-02-26 15:40:27 +00:00
renovate[bot]
112b2def80 feat(deps): update dependency psycopg-binary to v3.2.5 2025-02-23 02:45:53 +00:00
renovate[bot]
cc7be6e4ac feat(deps): update dependency psycopg to v3.2.5 2025-02-22 22:32:43 +00:00
renovate[bot]
8410fc886e feat(deps): update dependency psycopg-pool to v3.2.5 2025-02-21 22:55:59 +00:00
renovate[bot]
13496b0cbd feat(deps): update dependency tzlocal to v5.3 2025-02-13 19:07:57 +00:00
renovate[bot]
e88a617e30 feat(deps): update dependency sqlalchemy to v2.0.38 2025-02-07 17:42:48 +00:00
renovate[bot]
76d0866595 feat(deps): update dependency certifi to v2025 (#21)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-05 09:41:47 +01:00
renovate[bot]
b194e583fb feat(deps): update dependency mako to v1.3.9 2025-02-04 17:20:34 +00:00
renovate[bot]
5f45d3aeb8 feat(deps): update dependency fastapi to v0.115.8 2025-01-30 17:07:56 +00:00
renovate[bot]
20a107a49c feat(deps): update dependency starlette to v0.45.3 2025-01-24 13:00:43 +00:00
renovate[bot]
087ea50c4e feat(deps): update dependency starlette to v0.45.2 (#20)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-23 09:25:31 +00:00
renovate[bot]
f47b44ba95 feat(deps): update dependency fastapi to v0.115.7 2025-01-23 02:14:28 +00:00
9b1343b90d fix: bumped creyPY version to 3 2025-01-21 12:42:45 +01:00
renovate[bot]
15d754f68d feat(deps): update python docker tag to v3.13 2025-01-20 20:55:43 +00:00
renovate[bot]
9e0c8f0173 feat(deps): update dependency uvicorn to v0.34.0 2025-01-20 16:46:01 +00:00
renovate[bot]
0c769ba843 feat(deps): update dependency certifi to v2024.12.14 2025-01-20 12:47:02 +00:00
18cce99967 Update README.md 2025-01-20 13:12:15 +01:00
4e7f352a15 feat: added retention_days deletion 2025-01-20 13:04:18 +01:00
263d962912 feat: added retention_days for compliance 2025-01-20 11:37:23 +01:00
renovate[bot]
3d4e5e3f4b feat(deps): update dependency alembic to v1.14.1 (#16)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-20 09:31:02 +00:00
renovate[bot]
6cdae87f42 feat(deps): update dependency httpx to v0.28.1 2025-01-20 05:11:08 +00:00
renovate[bot]
de36e60710 feat(deps): update dependency anyio to v4.8.0 2025-01-20 01:16:18 +00:00
renovate[bot]
bcec3079d3 feat(deps): update dependency pytest to v8.3.4 2025-01-19 21:23:04 +00:00
renovate[bot]
cf033298ce feat(deps): update dependency psycopg-binary to v3.2.4 2025-01-19 17:13:09 +00:00
renovate[bot]
3738b6f0a7 feat(deps): update dependency psycopg to v3.2.4 2025-01-19 12:37:22 +00:00
renovate[bot]
b8ac7226be feat(deps): update dependency click to v8.1.8 2025-01-19 08:23:32 +00:00
dafdf34f71 feat: added automerge to renovate 2025-01-17 12:33:14 +01:00
e77fe115c6 fix: removed duplicate install 2024-11-24 17:16:23 +01:00
6ab1eafe1d fix: fixed security recommendation from codacy 2024-11-24 17:16:11 +01:00
256e2adbf7 fix: fixed a recommendation from codacy 2024-11-24 17:13:44 +01:00
7c0d0da511 fix: bumped security dependency 2024-11-24 17:13:33 +01:00
renovate[bot]
4f793585e5 feat(deps): update dependency anyio to v4.6.2.post1 (#2)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-11-24 16:19:31 +01:00
renovate[bot]
98df462b61 chore: Configure Renovate (#1)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Conrad <grosserconrad@gmail.com>
2024-11-24 16:05:51 +01:00
6db2b3e14e Update ci.yml 2024-11-24 16:05:40 +01:00
22eaed8a75 fix: fixed an issue with duplicate operation_id 2024-10-10 23:38:35 +02:00
creyD
4d0ecb2ee8 Adjusted files for isort & autopep 2024-10-10 21:35:18 +00:00
88e97faddb feat: added select delete 2024-10-10 23:34:45 +02:00
cefb48a4b2 feat: added environment for log entries 2024-10-10 20:32:46 +02:00
4f50f6bb7e fix: fixed issue with the uvicorn worker command 2024-10-10 17:59:38 +02:00
a43ec6abd8 breaking: release 1.0.0 2024-10-10 17:39:13 +02:00
f8c7c5e50e major: first test release 2024-10-10 17:34:26 +02:00
44e5448625 feat: added filters for logs 2024-10-10 17:33:24 +02:00
1a8e6e849e feat: added order by for logs 2024-10-10 17:21:59 +02:00
f0c93d7c7a feat: added search for logs 2024-10-10 16:57:42 +02:00
133e3cf2a9 feat: added search for apps 2024-10-10 16:55:33 +02:00
b95e40b40d feat: added logging API 2024-10-10 16:47:15 +02:00
c79b5e5c5b fix: fixed another issue with the dockerfile 2024-10-10 16:24:47 +02:00
bf732a7e0b fix: fixed dockerfile 2024-10-10 16:24:12 +02:00
17 changed files with 638 additions and 55 deletions

View File

@@ -5,6 +5,7 @@ on:
branches:
- dev
- master
- renovate/**
paths-ignore:
- "**/.github/**"
- "**/.gitignore"
@@ -13,6 +14,10 @@ on:
- "**/CHANGELOG.md"
- "**/docs/**"
workflow_dispatch:
pull_request:
branches:
- dev
- master
env:
REGISTRY: ghcr.io

View File

@@ -1,30 +1,36 @@
FROM python:3.12-slim AS builder
WORKDIR /build
COPY requirements.txt .
RUN pip wheel --no-cache-dir --wheel-dir=/build/wheels \
-r requirements.txt \
FROM python:3.13-slim
ARG VERSION=unknown
FROM python:3.12-slim
# Create a non-root user and group
RUN groupadd -r appuser && useradd -r -g appuser appuser
WORKDIR /app
COPY . .
RUN apt-get update && apt-get install -y curl git
# Change ownership of the application directory
RUN chown -R appuser:appuser /app
COPY --from=builder /build/wheels /wheels
RUN pip install --no-cache /wheels/*
# Remove the wheels directory after installation to save space
RUN rm -rf /wheels
# Python setup
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ARG VERSION=unknown
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ENV VERSION=${VERSION}
ENV ENV=DEV
# Install dependencies
RUN pip install --no-cache-dir --upgrade -r requirements.txt
# Switch to the non-root user
USER appuser
CMD ["gunicorn", "-k", "uvicorn.workers.UvicornWorker", "-w", "6" , "-b", "0.0.0.0:9000","app.main:app"]
EXPOSE 9000
CMD ["uvicorn", "app.main:app", "--workers", "6" , "--host", "0.0.0.0", "--port", "9000"]
# Install curl
USER root
RUN apt-get update && apt-get install -y --no-install-recommends curl && apt-get clean
# Switch back to the non-root user
USER appuser
HEALTHCHECK --interval=30s --timeout=10s --retries=5 \
CMD curl --fail http://localhost:9000/openapi.json || exit 1

View File

@@ -1,3 +1,8 @@
# apilog
Tiny logging API server, for taking logs via HTTP POST requests.
## TODO
[ ] Application Patch
[ ] Team CRUD

View File

@@ -0,0 +1,29 @@
"""empty message
Revision ID: 1e695b024786
Revises: 21dc1dc045b8
Create Date: 2025-01-20 11:36:14.692849
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "1e695b024786"
down_revision: Union[str, None] = "21dc1dc045b8"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
with op.batch_alter_table("application", schema=None) as batch_op:
batch_op.add_column(sa.Column("retention_days", sa.Integer(), nullable=True))
def downgrade() -> None:
with op.batch_alter_table("application", schema=None) as batch_op:
batch_op.drop_column("retention_days")

View File

@@ -0,0 +1,29 @@
"""empty message
Revision ID: 21dc1dc045b8
Revises: 74c576cf9560
Create Date: 2024-10-10 20:32:12.579725
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "21dc1dc045b8"
down_revision: Union[str, None] = "74c576cf9560"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
with op.batch_alter_table("logentry", schema=None) as batch_op:
batch_op.add_column(sa.Column("environment", sa.String(length=64), nullable=True))
def downgrade() -> None:
with op.batch_alter_table("logentry", schema=None) as batch_op:
batch_op.drop_column("environment")

View File

@@ -0,0 +1,54 @@
"""empty message
Revision ID: 74c576cf9560
Revises: 95201f00f6b9
Create Date: 2024-10-10 17:38:19.834168
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "74c576cf9560"
down_revision: Union[str, None] = "95201f00f6b9"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
"logentry",
sa.Column("application", sa.UUID(), nullable=False),
sa.Column(
"l_type",
sa.Enum("INFO", "WARNING", "ERROR", "CRITICAL", name="logtype"),
nullable=False,
),
sa.Column(
"t_type",
sa.Enum("CREATE", "UPDATE", "DELETE", "UNDEFINED", name="transactiontype"),
nullable=False,
),
sa.Column("message", sa.String(length=512), nullable=True),
sa.Column("author", sa.String(length=512), nullable=False),
sa.Column("object_reference", sa.String(length=512), nullable=True),
sa.Column("previous_object", sa.JSON(), nullable=True),
sa.Column("id", sa.UUID(), nullable=False),
sa.Column(
"created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True
),
sa.Column("updated_at", sa.DateTime(), nullable=True),
sa.Column("created_by_id", sa.String(), nullable=True),
sa.ForeignKeyConstraint(
["application"], ["application.id"], ondelete="CASCADE", name="fk_logentry_application"
),
sa.PrimaryKeyConstraint("id", name="pk_logentry"),
)
def downgrade() -> None:
op.drop_table("logentry")

View File

@@ -64,6 +64,7 @@ origins = [
"http://localhost:3000",
"http://localhost:5173",
"http://localhost:4200",
"https://logs.grosser.group",
]
# CORS Setup
@@ -77,8 +78,10 @@ app.add_middleware(
# App Routers
from app.routes.app import router as app_router
from app.routes.entry import router as entry_router
app.include_router(app_router)
app.include_router(entry_router)
# Pagination

View File

@@ -1,6 +1,7 @@
from creyPY.fastapi.models.base import Base
from sqlalchemy import Column, String
from sqlalchemy import Column, Integer, String
class Application(Base):
name = Column(String(512), nullable=False, unique=True)
retention_days = Column(Integer, nullable=True)

View File

@@ -1,9 +1,9 @@
from creyPY.fastapi.models.base import Base
from sqlalchemy import Column, String, ForeignKey, Enum, JSON
from sqlalchemy.dialects.postgresql import UUID
from enum import Enum as pyenum
from creyPY.fastapi.models.base import Base
from sqlalchemy import JSON, Column, Enum, ForeignKey, String
from sqlalchemy.dialects.postgresql import UUID
class TransactionType(pyenum):
CREATE = "create"
@@ -23,11 +23,16 @@ class LogEntry(Base):
application = Column(
UUID(as_uuid=True), ForeignKey("application.id", ondelete="CASCADE"), nullable=False
)
t_type = Column(Enum(TransactionType), nullable=False, default=TransactionType.UNDEFINED)
environment = Column(String(64), nullable=True, default="prod")
# type of the log entry
l_type = Column(Enum(LogType), nullable=False, default=LogType.INFO)
# type of the transaction
t_type = Column(Enum(TransactionType), nullable=False, default=TransactionType.UNDEFINED)
# a custom logmessage
message = Column(String(512), nullable=True)
# author ID i.e. auth0 user sub
author = Column(String(512), nullable=False, default="system")
# optional reference to the object (like object ID)
object_reference = Column(String(512), nullable=True)
# for irreversible operations, store the object state before the operation
previous_object = Column(JSON, nullable=True)

View File

@@ -1,16 +1,16 @@
from creyPY.fastapi.crud import (
create_obj_from_data,
)
from uuid import UUID
from creyPY.fastapi.crud import create_obj_from_data
from creyPY.fastapi.db.session import get_db
from fastapi import APIRouter, Depends, Security, HTTPException
from creyPY.fastapi.pagination import Page, paginate
from fastapi import APIRouter, Depends, HTTPException, Security
from pydantic.json_schema import SkipJsonSchema
from sqlalchemy import select
from sqlalchemy.orm import Session
from app.services.auth import verify
from app.schema.app import AppIN, AppOUT
from app.models.app import Application
from fastapi_pagination.ext.sqlalchemy import paginate
from creyPY.fastapi.pagination import Page
from uuid import UUID
from app.schema.app import AppIN, AppOUT
from app.services.auth import verify
router = APIRouter(prefix="/app", tags=["apps"])
@@ -25,7 +25,7 @@ async def create_app(
data,
Application,
db,
additonal_data={"created_by_id": sub},
additional_data={"created_by_id": sub},
)
return AppOUT.model_validate(obj)
@@ -46,11 +46,14 @@ async def delete_app(
@router.get("/")
async def get_apps(
search: str | SkipJsonSchema[None] = None,
sub: str = Security(verify),
db: Session = Depends(get_db),
) -> Page[AppOUT]:
the_select = db.query(Application).filter_by(created_by_id=sub)
return paginate(the_select)
the_select = select(Application).filter(Application.created_by_id == sub)
if search:
the_select = the_select.filter(Application.name.ilike(f"%{search}%"))
return paginate(db, the_select)
@router.get("/{app_id}")

126
app/routes/entry.py Normal file
View File

@@ -0,0 +1,126 @@
from datetime import datetime
from typing import Callable
from uuid import UUID
from creyPY.fastapi.crud import create_obj_from_data
from creyPY.fastapi.db.session import get_db
from creyPY.fastapi.order_by import order_by
from creyPY.fastapi.pagination import Page, paginate
from fastapi import APIRouter, Depends, HTTPException, Security
from fastapi_filters import FilterValues, create_filters
from fastapi_filters.ext.sqlalchemy import apply_filters
from pydantic.json_schema import SkipJsonSchema
from sqlalchemy import select
from sqlalchemy.orm import Session
from sqlalchemy.sql.selectable import Select
from app.models.entry import LogEntry, LogType, TransactionType
from app.schema.entry import LogIN, LogOUT
from app.services.auth import verify
router = APIRouter(prefix="/log", tags=["logging"])
@router.post("/", status_code=201)
async def create_log(
data: LogIN,
sub: str = Security(verify),
db: Session = Depends(get_db),
) -> LogOUT:
obj = create_obj_from_data(
data,
LogEntry,
db,
additional_data={"created_by_id": sub},
)
return LogOUT.model_validate(obj)
@router.delete("/{log_id}", status_code=204)
async def delete_log(
log_id: UUID,
sub: str = Security(verify),
db: Session = Depends(get_db),
) -> None:
obj = db.query(LogEntry).filter_by(id=log_id, created_by_id=sub).one_or_none()
if obj is None:
raise HTTPException(status_code=404, detail="Item not found")
db.delete(obj)
db.commit()
return None
@router.get("/{log_id}")
async def get_log(
log_id: UUID,
sub: str = Security(verify),
db: Session = Depends(get_db),
) -> LogOUT:
obj = db.query(LogEntry).filter_by(id=log_id, created_by_id=sub).one_or_none()
if obj is None:
raise HTTPException(status_code=404, detail="Item not found")
return LogOUT.model_validate(obj)
@router.get("/")
async def get_logs(
search: str | SkipJsonSchema[None] = None,
order_by_query: Callable[[Select], Select] = Depends(order_by),
filters: FilterValues = Depends(
create_filters(
created_by_id=str,
environment=str,
l_type=LogType,
t_type=TransactionType,
application=UUID,
object_reference=str,
author=str,
created_at=datetime,
)
),
sub: str = Security(verify),
db: Session = Depends(get_db),
) -> Page[LogOUT]:
"""
Filter logs of your systems. Searching works only for author and message. Use filters for the rest.
"""
the_select = apply_filters(select(LogEntry).filter(LogEntry.created_by_id == sub), filters)
if search:
the_select = the_select.filter(
LogEntry.message.ilike(f"%{search}%") | LogEntry.author.ilike(f"%{search}%")
)
return paginate(db, order_by_query(the_select))
@router.delete("/", status_code=200, operation_id="log_delete_many")
async def delete_logs(
application: UUID,
environment: str | SkipJsonSchema[None] = None,
l_type: LogType | SkipJsonSchema[None] = None,
t_type: TransactionType | SkipJsonSchema[None] = None,
object_reference: str | SkipJsonSchema[None] = None,
author: str | SkipJsonSchema[None] = None,
sub: str = Security(verify),
db: Session = Depends(get_db),
) -> int:
filters = {
"application": application,
"created_by_id": sub,
}
if environment is not None:
filters["environment"] = environment
if l_type is not None:
filters["l_type"] = l_type
if t_type is not None:
filters["t_type"] = t_type
if object_reference is not None:
filters["object_reference"] = object_reference
if author is not None:
filters["author"] = author
query = db.query(LogEntry).filter_by(**filters)
the_impact = query.count()
query.delete(synchronize_session=False)
db.commit()
return the_impact

View File

@@ -1,8 +1,11 @@
from pydantic.json_schema import SkipJsonSchema
from app.schema.common import BaseSchemaModelIN, BaseSchemaModelOUT
class AppIN(BaseSchemaModelIN):
name: str
retention_days: int | SkipJsonSchema[None] = 30
class AppOUT(BaseSchemaModelOUT, AppIN):

20
app/schema/entry.py Normal file
View File

@@ -0,0 +1,20 @@
from app.schema.common import BaseSchemaModelIN, BaseSchemaModelOUT
from app.models.entry import TransactionType, LogType
from uuid import UUID
from pydantic.json_schema import SkipJsonSchema
class LogIN(BaseSchemaModelIN):
application: UUID
environment: str = "prod"
l_type: LogType = LogType.INFO
t_type: TransactionType = TransactionType.UNDEFINED
message: str | SkipJsonSchema[None] = None
author: str = "system"
object_reference: str | SkipJsonSchema[None] = None
previous_object: dict | SkipJsonSchema[None] = None
class LogOUT(BaseSchemaModelOUT, LogIN):
pass

View File

@@ -1,12 +1,32 @@
import os
from datetime import datetime, timedelta
from creyPY.fastapi.db.session import SQLALCHEMY_DATABASE_URL, name
from apscheduler.schedulers.background import BackgroundScheduler
from creyPY.fastapi.db.session import SQLALCHEMY_DATABASE_URL, get_db, name
from sqlalchemy.orm import Session
from alembic import command
from alembic.config import Config
from app.models.app import Application
from app.models.entry import LogEntry
from app.services.db.session import create_if_not_exists
def delete_old_logs(sess: Session | None = None):
session = sess or next(get_db())
for app in session.query(Application).filter(Application.retention_days.isnot(None)):
cutoff = datetime.now() - timedelta(days=app.retention_days)
print(
f"Deleting logs older than {app.retention_days} days (cutoff: {cutoff}) for {app.name}",
)
session.query(LogEntry).filter(
LogEntry.application == app.id, LogEntry.created_at < cutoff
).delete()
session.commit()
def setup(db_name=name):
# Create Database
create_if_not_exists(db_name)
@@ -18,3 +38,17 @@ def setup(db_name=name):
"script_location", os.path.join(os.path.dirname(os.path.dirname(__file__)), "alembic")
)
command.upgrade(config, "head")
# Start retention deletion
scheduler = BackgroundScheduler()
scheduler.add_job(
delete_old_logs,
"interval",
id="deletor",
days=1,
max_instances=1,
replace_existing=True,
next_run_time=datetime.now(),
)
scheduler.start()
print("Deletion scheduler started")

View File

@@ -1,3 +1,6 @@
import contextlib
from datetime import datetime, timedelta
from creyPY.fastapi.db.session import SQLALCHEMY_DATABASE_URL, get_db
from creyPY.fastapi.models.base import Base
from creyPY.fastapi.testing import GenericClient
@@ -5,11 +8,67 @@ from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy_utils import create_database, database_exists, drop_database
from app.models.entry import LogEntry
from app.services.auth import verify
from app.setup import delete_old_logs
from .main import app
CURRENT_USER = "api-key|testing"
ENTRY_EXAMPLES = [
{
"l_type": "info",
"t_type": "create",
"message": "User Max Mustermann created",
"environment": "dev",
},
{
"l_type": "info",
"t_type": "update",
"message": "User Max Mustermann updated",
"environment": "dev",
},
{
"l_type": "info",
"t_type": "create",
"author": "auth|max_muster",
"message": "User Max Mustermann created a Unit",
"object_reference": "1",
"environment": "dev",
},
{
"l_type": "info",
"t_type": "update",
"author": "auth|max_muster",
"message": "User Max Mustermann updated Unit 1",
"object_reference": "1",
"previous_object": {"name": "Unit 1"},
"environment": "prod",
},
{
"l_type": "warning",
"t_type": "delete",
"message": "User Max Mustermann deleted",
"environment": "prod",
},
]
@contextlib.contextmanager
def app_context(self, name: str = "Testing", retention_days: int | None = None):
app_id = self.create_app(name, retention_days)
try:
yield app_id
finally:
self.destroy_app(app_id)
@contextlib.contextmanager
def log_examples(self):
with app_context(self) as app_id:
for entry in ENTRY_EXAMPLES:
self.log_message({"application": app_id, **entry})
yield app_id
class TestAPI:
@@ -32,14 +91,27 @@ class TestAPI:
global CURRENT_USER
return CURRENT_USER
self.db_instance = get_db_test()
app.dependency_overrides[get_db] = get_db_test
app.dependency_overrides[verify] = get_test_sub
self.c = GenericClient(app)
def teardown_class(self):
drop_database(self.engine.url)
# HELPERS
def create_app(self, name: str = "Testing", retention_days: int | None = None):
re = self.c.post("/app/", {"name": name, "retention_days": retention_days})
return re["id"]
def destroy_app(self, app_id):
self.c.delete(f"/app/{app_id}")
def log_message(self, entry_obj):
re = self.c.post("/log/", entry_obj)
return re["id"]
# GENERIC TEST CASES
def test_swagger_gen(self):
re = self.c.get("/openapi.json")
assert re["info"]["title"] == "ApiLog API"
@@ -47,5 +119,176 @@ class TestAPI:
def test_health_check(self):
self.c.get("/", parse_json=False)
# TESTS for module application
def test_application_api(self):
self.c.obj_lifecycle({"name": "Testing"}, "/app/")
def test_application_search(self):
with app_context(self, "testing 1") as app_id1:
with app_context(self, "second app 2") as app_id2:
re = self.c.get("/app/")
assert re["total"] == 2
assert len(re["results"]) == 2
re = self.c.get("/app/?search=testing")
assert re["total"] == 1
assert len(re["results"]) == 1
re = self.c.get("/app/?search=2")
assert re["total"] == 1
assert len(re["results"]) == 1
# TESTS for module log
def test_log_api(self):
with app_context(self) as app_id:
self.c.obj_lifecycle({"application": app_id}, "/log/")
def test_logging_standards(self):
with app_context(self) as app_id:
re = self.c.post("/log/", {"application": app_id})
log_id = re["id"]
assert re["application"] == app_id
assert re["l_type"] == "info"
assert re["t_type"] == "undefined"
assert re["message"] == None
assert re["author"] == "system"
assert re["environment"] == "prod"
assert re["object_reference"] == None
assert re["previous_object"] == None
assert re["created_by_id"] == CURRENT_USER
self.c.delete(f"/log/{log_id}")
def test_logging_search(self):
with log_examples(self) as app_id:
re = self.c.get("/log/")
assert re["total"] == 5
assert len(re["results"]) == 5
re = self.c.get("/log/?search=auth|max_muster")
assert re["total"] == 2
assert len(re["results"]) == 2
re = self.c.get("/log/?search=system")
assert re["total"] == 3
assert len(re["results"]) == 3
re = self.c.get("/log/?search=created%20a%20Unit")
assert re["total"] == 1
assert len(re["results"]) == 1
def test_logging_order(self):
with log_examples(self) as app_id:
re = self.c.get("/log/?order_by=created_at")
assert re["total"] == 5
assert len(re["results"]) == 5
assert re["results"][0]["created_at"] < re["results"][1]["created_at"]
re = self.c.get("/log/?order_by=-created_at")
assert re["total"] == 5
assert len(re["results"]) == 5
assert re["results"][0]["created_at"] > re["results"][1]["created_at"]
def test_logging_filter(self):
with log_examples(self) as app_id:
# API KEY
re = self.c.get("/log/?created_by_id=" + CURRENT_USER)
assert re["total"] == 5
assert len(re["results"]) == 5
# LogType
re = self.c.get("/log/?l_type=info")
assert re["total"] == 4
assert len(re["results"]) == 4
# TransactionType
re = self.c.get("/log/?t_type=create")
assert re["total"] == 2
assert len(re["results"]) == 2
# TransactipnType create and update
re = self.c.get("/log/?t_type%5Bin%5D=create,update")
assert re["total"] == 4
assert len(re["results"]) == 4
# Application
re = self.c.get("/log/?application=" + app_id)
assert re["total"] == 5
assert len(re["results"]) == 5
# Application not
re = self.c.get("/log/?application%5Bne%5D=" + app_id)
assert re["total"] == 0
assert len(re["results"]) == 0
# Object Reference
re = self.c.get("/log/?object_reference=1")
assert re["total"] == 2
assert len(re["results"]) == 2
# author
re = self.c.get("/log/?author=auth|max_muster")
assert re["total"] == 2
assert len(re["results"]) == 2
# not author
re = self.c.get("/log/?author%5Bne%5D=auth|max_muster")
assert re["total"] == 3
assert len(re["results"]) == 3
# environment
re = self.c.get("/log/?environment=dev")
assert re["total"] == 3
assert len(re["results"]) == 3
# application and environment
re = self.c.get("/log/?application=" + app_id + "&environment=prod")
assert re["total"] == 2
assert len(re["results"]) == 2
def test_logging_delete(self):
with log_examples(self) as app_id:
re = self.c.delete("/log/?application=" + str(app_id) + "&environment=prod", r_code=200)
assert re == 2
re = self.c.get("/log/?application=" + str(app_id) + "&environment=prod")
assert re["total"] == 0
re = self.c.get("/log/?application=" + str(app_id) + "&environment=dev")
assert re["total"] == 3
# clear complete application
re = self.c.get("/log/?application=" + str(app_id))
assert re["total"] == 3
re = self.c.delete("/log/?application=" + str(app_id), r_code=200)
assert re == 3
re = self.c.get("/log/?application=" + str(app_id))
assert re["total"] == 0
def test_retention_delete(self):
sess = next(self.db_instance)
with app_context(self, retention_days=2) as app_id:
for i in range(5):
sess.add(
LogEntry(
application=app_id,
created_at=datetime.now() - timedelta(days=i),
created_by_id=CURRENT_USER,
)
)
sess.commit()
assert sess.query(LogEntry).count() == 5
re = self.c.get("/log/?application=" + str(app_id))
assert re["total"] == 5
delete_old_logs(sess)
assert sess.query(LogEntry).count() == 2
# delete all logs
re = self.c.delete("/log/?application=" + str(app_id), r_code=200)

12
renovate.json Normal file
View File

@@ -0,0 +1,12 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": ["config:recommended", ":semanticCommitTypeAll(feat)"],
"packageRules": [
{
"automerge": true,
"description": "Automerge non-major updates",
"matchUpdateTypes": ["minor", "patch"],
"automergeType": "branch"
}
]
}

View File

@@ -1,34 +1,39 @@
annotated-types==0.7.0
anyio==4.6.0
certifi==2024.8.30
creyPY==1.2.5
fastapi==0.115.0
anyio==4.8.0
certifi==2025.1.31
creyPY[postgres]==3.0.0
fastapi==0.115.11
fastapi-pagination==0.12.31
h11==0.14.0
httpcore==1.0.6
httpx==0.27.2
httpx==0.28.1
idna==3.10
psycopg==3.2.3
psycopg-binary==3.2.3
psycopg-pool==3.2.3
psycopg==3.2.5
psycopg-binary==3.2.6
psycopg-pool==3.2.6
pydantic==2.9.2
pydantic_core==2.23.4
python-dotenv==1.0.1
sniffio==1.3.1
SQLAlchemy==2.0.35
starlette==0.38.6
SQLAlchemy==2.0.39
starlette==0.46.1
typing_extensions==4.12.2
Mako==1.3.5 # Alembic
Mako==1.3.9 # Alembic
MarkupSafe==3.0.1 # Alembic
alembic==1.13.3 # Alembic
alembic==1.15.1 # Alembic
SQLAlchemy-Utils==0.41.2 # SQLAlchemy
click==8.1.7 # Uvicorn
uvicorn==0.31.1 # Uvicorn
click==8.1.8 # Uvicorn
uvicorn==0.34.0 # Uvicorn
iniconfig==2.0.0 # pytest
packaging==24.1 # pytest
pluggy==1.5.0 # pytest
pytest==8.3.3 # pytest
pytest==8.3.5 # pytest
fastapi-filters==0.2.9 # Filters
APScheduler==3.11.0 # Scheduler for deletion
tzlocal==5.3.1 # Scheduler for deletion