Initial FastAPI + SQLite + Alembic

This commit is contained in:
ShaBle
2026-02-06 17:06:35 +01:00
commit 465aa0f1c1
29 changed files with 826 additions and 0 deletions

20
.gitignore vendored Normal file
View File

@@ -0,0 +1,20 @@
# venv / python cache
.venv/
__pycache__/
*.pyc
.pytest_cache/
.ruff_cache/
# env / secrets
.env
# local data / sqlite
data/
*.db
# editor
.vscode/
# OS
.DS_Store
Thumbs.db

149
alembic.ini Normal file
View File

@@ -0,0 +1,149 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts.
# this is typically a path given in POSIX (e.g. forward slashes)
# format, relative to the token %(here)s which refers to the location of this
# ini file
script_location = %(here)s/alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# Or organize into date-based subdirectories (requires recursive_version_locations = true)
# file_template = %%(year)d/%%(month).2d/%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d_%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory. for multiple paths, the path separator
# is defined by "path_separator" below.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the tzdata library which can be installed by adding
# `alembic[tz]` to the pip requirements.
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to <script_location>/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "path_separator"
# below.
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
# path_separator; This indicates what character is used to split lists of file
# paths, including version_locations and prepend_sys_path within configparser
# files such as alembic.ini.
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
# to provide os-dependent path splitting.
#
# Note that in order to support legacy alembic.ini files, this default does NOT
# take place if path_separator is not present in alembic.ini. If this
# option is omitted entirely, fallback logic is as follows:
#
# 1. Parsing of the version_locations option falls back to using the legacy
# "version_path_separator" key, which if absent then falls back to the legacy
# behavior of splitting on spaces and/or commas.
# 2. Parsing of the prepend_sys_path option falls back to the legacy
# behavior of splitting on spaces, commas, or colons.
#
# Valid values for path_separator are:
#
# path_separator = :
# path_separator = ;
# path_separator = space
# path_separator = newline
#
# Use os.pathsep. Default configuration used for new projects.
path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# database URL. This is consumed by the user-maintained env.py script only.
# other means of configuring database URLs may be customized within the env.py
# file.
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
# hooks = ruff
# ruff.type = module
# ruff.module = ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Alternatively, use the exec runner to execute a binary found on your PATH
# hooks = ruff
# ruff.type = exec
# ruff.executable = ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Logging configuration. This is also consumed by the user-maintained
# env.py script only.
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARNING
handlers = console
qualname =
[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

1
alembic/README Normal file
View File

@@ -0,0 +1 @@
Generic single-database configuration.

86
alembic/env.py Normal file
View File

@@ -0,0 +1,86 @@
import os
import sys
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# Damit "fastapi_demo...." Imports funktionieren:
sys.path.append(os.path.abspath(os.getcwd()))
from fastapi_demo.app.core.config import settings
from fastapi_demo.app.db.base import Base
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# DB-URL aus .env/settings
config.set_main_option("sqlalchemy.url", settings.DATABASE_URL)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

28
alembic/script.py.mako Normal file
View File

@@ -0,0 +1,28 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,32 @@
"""init
Revision ID: 54567ad268e8
Revises:
Create Date: 2026-02-06 16:22:40.309011
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '54567ad268e8'
down_revision: Union[str, Sequence[str], None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View File

@@ -0,0 +1,32 @@
"""add revision to assets
Revision ID: ffb0393ff471
Revises: 54567ad268e8
Create Date: 2026-02-06 17:02:28.538265
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'ffb0393ff471'
down_revision: Union[str, Sequence[str], None] = '54567ad268e8'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('assets', sa.Column('revision', sa.Integer(), server_default=sa.text('0'), nullable=False))
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('assets', 'revision')
# ### end Alembic commands ###

0
fastapi_demo/__init__.py Normal file
View File

View File

View File

View File

View File

@@ -0,0 +1,45 @@
from fastapi import APIRouter, Depends
from uuid import UUID
from sqlalchemy.orm import Session
from fastapi_demo.app.db.deps import get_db
from fastapi_demo.app.repositories.sql_assets_repo import SqlAssetsRepo
from fastapi_demo.app.services.assets_service import AssetsService
from fastapi_demo.app.schemas.asset import (
AssetCreate,
AssetOut,
AssetTransitionIn,
AssetEventOut,
)
router = APIRouter(prefix="/assets", tags=["assets"])
def get_service(db: Session = Depends(get_db)) -> AssetsService:
repo = SqlAssetsRepo(db)
return AssetsService(repo)
@router.post("", response_model=AssetOut, status_code=201)
def create_asset(payload: AssetCreate, svc: AssetsService = Depends(get_service)):
return svc.create_asset(payload)
@router.get("/{asset_id}", response_model=AssetOut)
def get_asset(asset_id: UUID, svc: AssetsService = Depends(get_service)):
return svc.get_asset(asset_id)
@router.post("/{asset_id}/transition", response_model=AssetOut)
def transition_asset(
asset_id: UUID,
payload: AssetTransitionIn,
svc: AssetsService = Depends(get_service),
):
asset, _event = svc.transition(asset_id, payload)
return asset
@router.get("/{asset_id}/events", response_model=list[AssetEventOut])
def list_events(asset_id: UUID, svc: AssetsService = Depends(get_service)):
return svc.list_events(asset_id)

View File

@@ -0,0 +1,8 @@
from fastapi import APIRouter
router = APIRouter(tags=["health"])
@router.get("/health")
def health():
return {"status": "ok"}

View File

View File

@@ -0,0 +1,12 @@
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
APP_NAME: str = "FASTAPI_DEMO"
ENV: str = "dev"
DATABASE_URL: str = "sqlite:///./data/app.db"
model_config = SettingsConfigDict(env_file=".env", extra="ignore")
settings = Settings()

View File

@@ -0,0 +1,2 @@
from fastapi_demo.app.db.session import Base # noqa: F401
from fastapi_demo.app.db import models # noqa: F401

View File

@@ -0,0 +1,13 @@
from __future__ import annotations
from typing import Generator
from fastapi_demo.app.db.session import SessionLocal
from sqlalchemy.orm import Session
def get_db() -> Generator[Session, None, None]:
db = SessionLocal()
try:
yield db
finally:
db.close()

View File

@@ -0,0 +1,53 @@
from __future__ import annotations
from sqlalchemy import String, DateTime, Text, ForeignKey, Integer, text
from sqlalchemy.orm import Mapped, mapped_column, relationship
from datetime import datetime
from fastapi_demo.app.db.session import Base
class Asset(Base):
__tablename__ = "assets"
id: Mapped[str] = mapped_column(String, primary_key=True, index=True)
name: Mapped[str] = mapped_column(String, nullable=False)
serial: Mapped[str | None] = mapped_column(String, nullable=True)
status: Mapped[str] = mapped_column(String, nullable=False)
revision: Mapped[int] = mapped_column(
Integer,
nullable=False,
server_default=text("0"),
)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), nullable=False
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), nullable=False
)
events: Mapped[list["AssetEvent"]] = relationship(
"AssetEvent",
back_populates="asset",
cascade="all, delete-orphan",
passive_deletes=True,
)
class AssetEvent(Base):
__tablename__ = "asset_events"
id: Mapped[str] = mapped_column(String, primary_key=True, index=True)
asset_id: Mapped[str] = mapped_column(
String, ForeignKey("assets.id", ondelete="CASCADE"), index=True
)
from_status: Mapped[str] = mapped_column(String, nullable=False)
to_status: Mapped[str] = mapped_column(String, nullable=False)
at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
note: Mapped[str | None] = mapped_column(Text, nullable=True)
asset: Mapped["Asset"] = relationship("Asset", back_populates="events")

View File

@@ -0,0 +1,24 @@
from __future__ import annotations
from pathlib import Path
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, declarative_base
from fastapi_demo.app.core.config import settings
# sorgt dafür, dass ./data existiert (für sqlite datei)
Path("data").mkdir(exist_ok=True)
engine = create_engine(
settings.DATABASE_URL,
connect_args={"check_same_thread": False}
if settings.DATABASE_URL.startswith("sqlite")
else {},
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def dispose_engine() -> None:
engine.dispose()

View File

@@ -0,0 +1,10 @@
from enum import Enum
class AssetStatus(str, Enum):
WARENEINGANG = "WARENEINGANG"
SICHTPRUEFUNG = "SICHTPRUEFUNG"
WARTESCHLANGE = "WARTESCHLANGE"
IN_BEARBEITUNG = "IN_BEARBEITUNG"
QUALITAETSKONTROLLE = "QUALITAETSKONTROLLE"
WARENAUSGANG = "WARENAUSGANG"

View File

@@ -0,0 +1,24 @@
from fastapi import HTTPException
from fastapi_demo.app.domain.status import AssetStatus
# Erlaubte Übergänge (Transition Matrix)
ALLOWED_TRANSITIONS: dict[AssetStatus, set[AssetStatus]] = {
AssetStatus.WARENEINGANG: {AssetStatus.SICHTPRUEFUNG},
AssetStatus.SICHTPRUEFUNG: {AssetStatus.WARTESCHLANGE},
AssetStatus.WARTESCHLANGE: {AssetStatus.IN_BEARBEITUNG},
AssetStatus.IN_BEARBEITUNG: {AssetStatus.QUALITAETSKONTROLLE},
AssetStatus.QUALITAETSKONTROLLE: {
AssetStatus.WARENAUSGANG,
AssetStatus.IN_BEARBEITUNG,
}, # z.B. Nacharbeit
AssetStatus.WARENAUSGANG: set(),
}
def ensure_transition_allowed(from_status: AssetStatus, to_status: AssetStatus) -> None:
allowed = ALLOWED_TRANSITIONS.get(from_status, set())
if to_status not in allowed:
raise HTTPException(
status_code=409,
detail=f"Transition nicht erlaubt: {from_status} -> {to_status}",
)

24
fastapi_demo/app/main.py Normal file
View File

@@ -0,0 +1,24 @@
from contextlib import asynccontextmanager
from fastapi import FastAPI
from fastapi_demo.app.db.session import dispose_engine
from fastapi_demo.app.api.routes.health import router as health_router
from fastapi_demo.app.api.routes.assets import router as assets_router
@asynccontextmanager
async def lifespan(app: FastAPI):
# Startup
yield
# Shutdown
dispose_engine()
app = FastAPI(
title="FASTAPI_DEMO",
version="0.1.0",
lifespan=lifespan,
)
app.include_router(health_router)
app.include_router(assets_router)

View File

@@ -0,0 +1,11 @@
from typing import Protocol
from uuid import UUID
from fastapi_demo.app.schemas.asset import AssetOut, AssetEventOut
class AssetsRepo(Protocol):
def create(self, asset: AssetOut) -> None: ...
def get(self, asset_id: UUID) -> AssetOut | None: ...
def update(self, asset: AssetOut) -> None: ...
def add_event(self, event: AssetEventOut) -> None: ...
def list_events(self, asset_id: UUID) -> list[AssetEventOut]: ...

View File

@@ -0,0 +1,25 @@
from uuid import UUID
from fastapi_demo.app.repositories.assets_repo import AssetsRepo
from fastapi_demo.app.schemas.asset import AssetOut, AssetEventOut
class MemoryAssetsRepo(AssetsRepo):
def __init__(self) -> None:
self.assets: dict[UUID, AssetOut] = {}
self.events: dict[UUID, list[AssetEventOut]] = {}
def create(self, asset: AssetOut) -> None:
self.assets[asset.id] = asset
self.events.setdefault(asset.id, [])
def get(self, asset_id: UUID) -> AssetOut | None:
return self.assets.get(asset_id)
def update(self, asset: AssetOut) -> None:
self.assets[asset.id] = asset
def add_event(self, event: AssetEventOut) -> None:
self.events.setdefault(event.asset_id, []).append(event)
def list_events(self, asset_id: UUID) -> list[AssetEventOut]:
return list(self.events.get(asset_id, []))

View File

@@ -0,0 +1,134 @@
from __future__ import annotations
from datetime import datetime
from uuid import UUID, uuid4
from fastapi import HTTPException
from sqlalchemy import select, update
from sqlalchemy.orm import Session
from fastapi_demo.app.db.models import Asset as AssetORM, AssetEvent as AssetEventORM
from fastapi_demo.app.domain.status import AssetStatus
from fastapi_demo.app.schemas.asset import AssetOut, AssetEventOut
class SqlAssetsRepo:
def __init__(self, db: Session) -> None:
self.db = db
def create(self, asset: AssetOut) -> None:
row = AssetORM(
id=str(asset.id),
name=asset.name,
serial=asset.serial,
status=str(asset.status),
revision=asset.revision, # <-- wichtig
created_at=asset.updated_at, # MVP: created_at == updated_at
updated_at=asset.updated_at,
)
self.db.add(row)
self.db.commit()
def get(self, asset_id: UUID) -> AssetOut | None:
row = self.db.get(AssetORM, str(asset_id))
if not row:
return None
return AssetOut(
id=UUID(row.id),
name=row.name,
serial=row.serial,
status=AssetStatus(row.status),
revision=row.revision, # <-- wichtig
updated_at=row.updated_at,
)
def transition_with_revision(
self,
asset_id: UUID,
expected_revision: int,
to_status: AssetStatus,
at: datetime,
note: str | None,
) -> tuple[AssetOut, AssetEventOut]:
with self.db.begin(): # <-- begin ganz nach oben
current = self.db.get(AssetORM, str(asset_id))
if not current:
raise HTTPException(status_code=404, detail="Asset nicht gefunden")
from_status = AssetStatus(current.status)
stmt = (
update(AssetORM)
.where(
AssetORM.id == str(asset_id),
AssetORM.revision == expected_revision,
)
.values(
status=str(to_status),
updated_at=at,
revision=expected_revision + 1,
)
)
res = self.db.execute(stmt)
if res.rowcount != 1:
# aktuelle Revision für saubere Fehlermeldung neu lesen
latest = self.db.get(AssetORM, str(asset_id))
raise HTTPException(
status_code=409,
detail={
"message": "Revision-Konflikt",
"expected_revision": expected_revision,
"current_revision": latest.revision if latest else None,
},
)
event_row = AssetEventORM(
id=str(uuid4()),
asset_id=str(asset_id),
from_status=str(from_status),
to_status=str(to_status),
at=at,
note=note,
)
self.db.add(event_row)
# nach Commit: updated Asset laden
updated = self.db.get(AssetORM, str(asset_id))
assert updated is not None
asset_out = AssetOut(
id=UUID(updated.id),
name=updated.name,
serial=updated.serial,
status=AssetStatus(updated.status),
revision=updated.revision,
updated_at=updated.updated_at,
)
event_out = AssetEventOut(
asset_id=asset_out.id,
from_status=from_status,
to_status=to_status,
at=at,
note=note,
)
return asset_out, event_out
def list_events(self, asset_id: UUID) -> list[AssetEventOut]:
stmt = (
select(AssetEventORM)
.where(AssetEventORM.asset_id == str(asset_id))
.order_by(AssetEventORM.at.asc())
)
rows = self.db.execute(stmt).scalars().all()
return [
AssetEventOut(
asset_id=UUID(r.asset_id),
from_status=AssetStatus(r.from_status),
to_status=AssetStatus(r.to_status),
at=r.at,
note=r.note,
)
for r in rows
]

View File

View File

@@ -0,0 +1,32 @@
from pydantic import BaseModel, Field
from uuid import UUID
from datetime import datetime
from fastapi_demo.app.domain.status import AssetStatus
class AssetCreate(BaseModel):
name: str = Field(min_length=1)
serial: str | None = None
class AssetOut(BaseModel):
id: UUID
name: str
serial: str | None
status: AssetStatus
revision: int
updated_at: datetime
class AssetTransitionIn(BaseModel):
to_status: AssetStatus
expected_revision: int
note: str | None = None
class AssetEventOut(BaseModel):
asset_id: UUID
from_status: AssetStatus
to_status: AssetStatus
at: datetime
note: str | None = None

View File

@@ -0,0 +1,61 @@
from datetime import datetime, timezone
from uuid import uuid4, UUID
from fastapi import HTTPException
from fastapi_demo.app.domain.status import AssetStatus
from fastapi_demo.app.domain.transitions import ensure_transition_allowed
from fastapi_demo.app.repositories.assets_repo import AssetsRepo
from fastapi_demo.app.schemas.asset import (
AssetCreate,
AssetOut,
AssetTransitionIn,
AssetEventOut,
)
class AssetsService:
def __init__(self, repo: AssetsRepo) -> None:
self.repo = repo
def create_asset(self, data: AssetCreate) -> AssetOut:
now = datetime.now(timezone.utc)
asset = AssetOut(
id=uuid4(),
name=data.name,
serial=data.serial,
status=AssetStatus.WARENEINGANG,
revision=0, # <-- neu
updated_at=now,
)
self.repo.create(asset)
return asset
def get_asset(self, asset_id: UUID) -> AssetOut:
asset = self.repo.get(asset_id)
if not asset:
raise HTTPException(status_code=404, detail="Asset nicht gefunden")
return asset
def transition(
self, asset_id: UUID, data: AssetTransitionIn
) -> tuple[AssetOut, AssetEventOut]:
asset = self.get_asset(asset_id)
# Domain-Regel: Transition erlaubt?
ensure_transition_allowed(asset.status, data.to_status)
now = datetime.now(timezone.utc)
# Repo macht atomar: revision check + status update + event insert
updated_asset, event = self.repo.transition_with_revision(
asset_id=asset_id,
expected_revision=data.expected_revision,
to_status=data.to_status,
at=now,
note=data.note,
)
return updated_asset, event
def list_events(self, asset_id: UUID) -> list[AssetEventOut]:
self.get_asset(asset_id) # 404 falls nicht existiert
return self.repo.list_events(asset_id)

BIN
requirements.txt Normal file

Binary file not shown.