feat: FASE 0 - Setup progetto TieMeasureFlow

Struttura monorepo completa con server FastAPI e client Flask:
- Server: FastAPI + SQLAlchemy 2.0 async + Alembic migrations
- Client: Flask + blueprints (auth, measure, maker, statistics)
- Database: docker-compose MySQL 8.0 + Alembic async config
- Config: pydantic-settings, TailwindCSS, Flask-Babel i18n
- Piano implementazione completo (18 sezioni, 1600 righe)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Adriano
2026-02-07 00:16:54 +01:00
commit dbdbb77daf
47 changed files with 2489 additions and 0 deletions
+51
View File
@@ -0,0 +1,51 @@
"""TieMeasureFlow Server Configuration."""
from pathlib import Path
from pydantic_settings import BaseSettings
class Settings(BaseSettings):
"""Application settings loaded from environment variables."""
# Database
db_host: str = "localhost"
db_port: int = 3306
db_name: str = "tiemeasureflow"
db_user: str = "tmflow"
db_password: str = "change_me_in_production"
# Server
server_host: str = "0.0.0.0"
server_port: int = 8000
server_secret_key: str = "change-this-to-a-random-secret-key"
server_cors_origins: str = "http://localhost:5000"
# File Storage
upload_dir: str = "uploads"
max_upload_size_mb: int = 50
# SSL (Production)
ssl_certfile: str | None = None
ssl_keyfile: str | None = None
@property
def database_url(self) -> str:
"""Async MySQL connection string."""
return (
f"mysql+asyncmy://{self.db_user}:{self.db_password}"
f"@{self.db_host}:{self.db_port}/{self.db_name}"
)
@property
def cors_origins(self) -> list[str]:
"""Parse CORS origins from comma-separated string."""
return [origin.strip() for origin in self.server_cors_origins.split(",")]
@property
def upload_path(self) -> Path:
"""Absolute path to upload directory."""
return Path(__file__).parent / self.upload_dir
model_config = {"env_file": "../.env", "env_file_encoding": "utf-8"}
settings = Settings()
+51
View File
@@ -0,0 +1,51 @@
"""Async SQLAlchemy database engine and session management."""
from collections.abc import AsyncGenerator
from sqlalchemy.ext.asyncio import (
AsyncSession,
async_sessionmaker,
create_async_engine,
)
from sqlalchemy.orm import DeclarativeBase
from config import settings
# Create async engine
engine = create_async_engine(
settings.database_url,
echo=False,
pool_size=10,
max_overflow=20,
pool_recycle=3600,
)
# Session factory
async_session_factory = async_sessionmaker(
engine,
class_=AsyncSession,
expire_on_commit=False,
)
class Base(DeclarativeBase):
"""Base class for all SQLAlchemy models."""
pass
async def get_db() -> AsyncGenerator[AsyncSession, None]:
"""Dependency for FastAPI - yields an async database session."""
async with async_session_factory() as session:
try:
yield session
await session.commit()
except Exception:
await session.rollback()
raise
finally:
await session.close()
async def init_db() -> None:
"""Create all tables (dev only - use Alembic in production)."""
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
+45
View File
@@ -0,0 +1,45 @@
"""TieMeasureFlow Server - FastAPI Entry Point."""
from contextlib import asynccontextmanager
from collections.abc import AsyncGenerator
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from config import settings
from database import init_db
@asynccontextmanager
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
"""Application lifespan: startup and shutdown events."""
# Startup
# Ensure upload directories exist
for subdir in ["images", "pdfs", "logos", "reports"]:
(settings.upload_path / subdir).mkdir(parents=True, exist_ok=True)
yield
# Shutdown (cleanup if needed)
app = FastAPI(
title="TieMeasureFlow API",
description="API per gestione task misurazioni con calibro manuale",
version="0.1.0",
lifespan=lifespan,
)
# CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=settings.cors_origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
@app.get("/api/health")
async def health_check() -> dict:
"""Health check endpoint."""
return {"status": "ok", "service": "TieMeasureFlow API", "version": "0.1.0"}
View File
+36
View File
@@ -0,0 +1,36 @@
[alembic]
script_location = .
sqlalchemy.url = mysql+asyncmy://tmflow:change_me_in_production@localhost:3306/tiemeasureflow
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
+80
View File
@@ -0,0 +1,80 @@
"""Alembic environment configuration for async SQLAlchemy."""
import asyncio
from logging.config import fileConfig
from alembic import context
from sqlalchemy import pool
from sqlalchemy.ext.asyncio import create_async_engine
# Alembic Config object
config = context.config
# Logging configuration
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Import all models so Alembic can detect them
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
from config import settings
from database import Base
# Override alembic.ini URL with .env settings (keep in sync)
config.set_main_option("sqlalchemy.url", settings.database_url)
# Import all models here so they register with Base.metadata
# from models.user import User
# from models.recipe import Recipe, RecipeVersion
# from models.task import RecipeTask, RecipeSubtask
# from models.measurement import Measurement
# from models.access_log import AccessLog
# from models.setting import SystemSetting
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection):
"""Run migrations with connection."""
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode with async engine."""
connectable = create_async_engine(
config.get_main_option("sqlalchemy.url"),
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
+26
View File
@@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}
View File
View File
+28
View File
@@ -0,0 +1,28 @@
# FastAPI + ASGI
fastapi>=0.110.0
uvicorn[standard]>=0.30.0
# Database
sqlalchemy[asyncio]>=2.0.0
asyncmy>=0.2.0
alembic>=1.13.0
# Validation
pydantic>=2.0.0
pydantic-settings>=2.0.0
# Security
passlib[bcrypt]>=1.7.0
bcrypt>=4.0.0
# File handling
pillow>=10.0.0
python-multipart>=0.0.6
# Reports
plotly>=5.0.0
kaleido>=0.2.0
weasyprint>=62.0
# Utilities
python-dotenv>=1.0.0
View File
View File
View File
View File
View File
View File
View File
View File
View File