We have connected and queried with new CRUDKit!
This commit is contained in:
parent
cf56baabe2
commit
49e6ab38b9
4 changed files with 219 additions and 15 deletions
|
|
@ -6,19 +6,21 @@ from flask import Flask
|
|||
from crudkit.api.flask_api import generate_crud_blueprint
|
||||
from crudkit.core.service import CRUDService
|
||||
|
||||
from .config import DevConfig
|
||||
from .db import init_db, create_all_tables, get_session
|
||||
|
||||
def create_app() -> Flask:
|
||||
def create_app(config_cls=DevConfig) -> Flask:
|
||||
app = Flask(__name__)
|
||||
app.config.from_object(config_cls)
|
||||
|
||||
app.config["DATABASE_URL"] = os.getenv("DATABASE_URL", "sqlite:///inventory.db")
|
||||
|
||||
init_db(app.config["DATABASE_URL"])
|
||||
init_db(
|
||||
app.config["DATABASE_URL"],
|
||||
config_cls.engine_kwargs(),
|
||||
config_cls.session_kwargs()
|
||||
)
|
||||
|
||||
from . import models as _models
|
||||
|
||||
create_all_tables()
|
||||
|
||||
session = get_session()
|
||||
|
||||
area_service = CRUDService(_models.Area, session)
|
||||
|
|
@ -33,9 +35,18 @@ def create_app() -> Flask:
|
|||
work_note_service = CRUDService(_models.WorkNote, session)
|
||||
|
||||
app.register_blueprint(generate_crud_blueprint(_models.Area, area_service), url_prefix="/api/area")
|
||||
app.register_blueprint(generate_crud_blueprint(_models.Brand, brand_service), url_prefix="/api/brand")
|
||||
app.register_blueprint(generate_crud_blueprint(_models.DeviceType, device_type_service), url_prefix="/api/device_type")
|
||||
app.register_blueprint(generate_crud_blueprint(_models.Image, image_service), url_prefix="/api/image")
|
||||
app.register_blueprint(generate_crud_blueprint(_models.Inventory, inventory_service), url_prefix="/api/inventory")
|
||||
app.register_blueprint(generate_crud_blueprint(_models.RoomFunction, room_function_service), url_prefix="/api/room_function")
|
||||
app.register_blueprint(generate_crud_blueprint(_models.Room, room_service), url_prefix="/api/room")
|
||||
app.register_blueprint(generate_crud_blueprint(_models.User, user_service), url_prefix="/api/user")
|
||||
app.register_blueprint(generate_crud_blueprint(_models.WorkLog, work_log_service), url_prefix="/api/work_log")
|
||||
app.register_blueprint(generate_crud_blueprint(_models.WorkNote, work_note_service), url_prefix="/api/work_note")
|
||||
|
||||
@app.get("/")
|
||||
def index():
|
||||
return {"status": "ok"}
|
||||
return {"status": "ok", "db": app.config["DATABASE_URL"]}
|
||||
|
||||
return app
|
||||
184
inventory/config.py
Normal file
184
inventory/config.py
Normal file
|
|
@ -0,0 +1,184 @@
|
|||
from __future__ import annotations
|
||||
import os
|
||||
from urllib.parse import quote_plus
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from dotenv import load_dotenv
|
||||
from pathlib import Path
|
||||
|
||||
env_path = Path(__file__).resolve().parent.parent / ".env"
|
||||
load_dotenv(dotenv_path=env_path, override=False)
|
||||
|
||||
def build_database_url(
|
||||
*,
|
||||
backend: Optional[str] = None,
|
||||
url: Optional[str] = None,
|
||||
user: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
host: Optional[str] = None,
|
||||
port: Optional[str] = None,
|
||||
database: Optional[str] = None,
|
||||
driver: Optional[str] = None,
|
||||
dsn: Optional[str] = None,
|
||||
trusted: Optional[bool] = None,
|
||||
options: Optional[Dict[str, str]] = None,
|
||||
) -> str:
|
||||
"""
|
||||
Build a SQLAlchemy URI. If `url` is provided, it wins.
|
||||
Supported backends: sqlite, postgresql, mysql, mssql
|
||||
"""
|
||||
if url:
|
||||
return url
|
||||
|
||||
backend = (backend or "").lower().strip()
|
||||
options = options or {}
|
||||
|
||||
if backend == 'sqlite':
|
||||
# Database can be a file path or ':memory:'
|
||||
db_path = database or 'inventory.db'
|
||||
if db_path == ':memory:':
|
||||
return 'sqlite:///:memory:'
|
||||
# Three slashes = relative file; four = absolute. We'll assume relative.
|
||||
return f"sqlite:///{db_path}"
|
||||
|
||||
if backend in {'postgres', 'postgresql'}:
|
||||
driver = driver or 'psycopg'
|
||||
user = user or ""
|
||||
password = password or ""
|
||||
creds = f"{quote_plus(user)}:{quote_plus(password)}@" if user or password else ""
|
||||
host = host or "localhost"
|
||||
port = port or "5432"
|
||||
database = database or "inventory"
|
||||
qs = ""
|
||||
if options:
|
||||
qs = "?" + "&".join(f"{k}={quote_plus(v)}" for k, v in options.items())
|
||||
return f"postgresql+{driver}://{creds}{host}:{port}/{database}{qs}"
|
||||
|
||||
if backend in {"mysql"}:
|
||||
driver = driver or "pymysql"
|
||||
user = user or ""
|
||||
password = password or ""
|
||||
creds = f"{quote_plus(user)}:{quote_plus(password)}@" if user or password else ""
|
||||
host = host or "localhost"
|
||||
port = port or "3306"
|
||||
database = database or "inventory"
|
||||
qs = ""
|
||||
if options:
|
||||
qs = "?" + "&".join(f"{k}={quote_plus(v)}" for k, v in options.items())
|
||||
return f"mysql+{driver}://{creds}{host}:{port}/{database}{qs}"
|
||||
|
||||
if backend in {"mssql", "sqlserver", "sqlsrv"}:
|
||||
if dsn:
|
||||
qs = ""
|
||||
if options:
|
||||
qs = "?" + "&".join(f"{k}={quote_plus(v)}" for k, v in options.items())
|
||||
return f"mssql+pyodbc://@{quote_plus(dsn)}{qs}"
|
||||
|
||||
driver = driver or "ODBC Driver 18 for SQL Server"
|
||||
host = host or "localhost"
|
||||
port = port or "1433"
|
||||
database = database or "inventory"
|
||||
|
||||
if trusted:
|
||||
opts = {"driver": driver, "Trusted_Connection": "yes", **options}
|
||||
qs = "?" + "&".join(f"{k}={quote_plus(v)}" for k, v in opts.items())
|
||||
return f"mssql+pyodbc://{host}:{port}/{database}{qs}"
|
||||
|
||||
user = user or ""
|
||||
password = password or ""
|
||||
creds = f"{quote_plus(user)}:{quote_plus(password)}@" if user or password else ""
|
||||
options = {"driver": driver, "TrustCertificate": "yes", **options}
|
||||
qs = "?" + "&".join(f"{k}={quote_plus(v)}" for k, v, in options.items())
|
||||
return f"mssql+pyodbc://{creds}{host}:{port}/{database}{qs}"
|
||||
|
||||
raise ValueError(f"Unsupported backend: {backend!r}")
|
||||
|
||||
class Config:
|
||||
"""
|
||||
One config to feed both Flask and SQLAlchemy.
|
||||
Pulls sane defaults from env; override per-environment via subclasses.
|
||||
"""
|
||||
|
||||
# Flask bits
|
||||
DEBUG = False
|
||||
TESTING = False
|
||||
SECRET_KEY = os.getenv("SECRET_KEY", "dev-not-secret")
|
||||
|
||||
DATABASE_URL = build_database_url(
|
||||
url=os.getenv("DATABASE_URL", None),
|
||||
backend=os.getenv("DB_BACKEND", None),
|
||||
user=os.getenv("DB_USER", None),
|
||||
password=os.getenv("DB_PASS", None),
|
||||
host=os.getenv("DB_HOST", None),
|
||||
port=os.getenv("DB_PORT", None),
|
||||
database=os.getenv("DB_NAME", None),
|
||||
driver=os.getenv("DB_DRIVER", None),
|
||||
dsn=os.getenv("DB_DSN", None),
|
||||
trusted=bool(int(os.getenv("DB_TRUSTED", "0"))),
|
||||
options=None,
|
||||
)
|
||||
|
||||
SQLALCHEMY_ECHO = bool(int(os.getenv("DB_ECHO", "0")))
|
||||
POOL_SIZE = int(os.getenv("DB_POOL_SIZE", "5"))
|
||||
MAX_OVERFLOW = int(os.getenv("DB_MAX_OVERFLOW", "10"))
|
||||
POOL_TIMEOUT = int(os.getenv("DB_POOL_TIMEOUT", "30"))
|
||||
POOL_RECYCLE = int(os.getenv("DB_POOL_RECYCLE", "1000"))
|
||||
POOL_PRE_PING = True
|
||||
|
||||
SQLITE_PRAGMAS = {
|
||||
"journal_mode": os.getenv("SQLITE_JOURNAL_MODE", "WAL"),
|
||||
"foreign_keys": os.getenv("SQLITE_FOREIGN_KEYS", "ON"),
|
||||
"synchronous": os.getenv("SQLITE_SYNCHRONOUS", "NORMAL"),
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def engine_kwargs(cls) -> Dict[str, Any]:
|
||||
"""
|
||||
Create engine kwargs consistently for any backend
|
||||
"""
|
||||
url = cls.DATABASE_URL
|
||||
kwargs: Dict[str, Any] = {
|
||||
"echo": cls.SQLALCHEMY_ECHO,
|
||||
"pool_pre_ping": cls.POOL_PRE_PING,
|
||||
"future": True,
|
||||
}
|
||||
|
||||
if url.startswith("sqlite://"):
|
||||
kwargs["connect_args"] = {"check_same_thread": False}
|
||||
elif url.startswith("mssql+pyodbc://"):
|
||||
pass
|
||||
|
||||
kwargs.update(
|
||||
{
|
||||
"pool_size": cls.POOL_SIZE,
|
||||
"max_overflow": cls.MAX_OVERFLOW,
|
||||
"pool_timeout": cls.POOL_TIMEOUT,
|
||||
"pool_recycle": cls.POOL_RECYCLE,
|
||||
}
|
||||
)
|
||||
return kwargs
|
||||
|
||||
@classmethod
|
||||
def session_kwargs(cls) -> Dict[str, Any]:
|
||||
"""
|
||||
SessionMaker settings that won't bite you during normal request lifecycles.
|
||||
"""
|
||||
return {
|
||||
"autoflush": False,
|
||||
"autocommit": False,
|
||||
"expire_on_commit": False,
|
||||
"future": True,
|
||||
}
|
||||
|
||||
class DevConfig(Config):
|
||||
DEBUG = True
|
||||
SQLALCHEMY_ECHO = bool(int(os.getenv("DB_ECHO", "1")))
|
||||
|
||||
class TestConfig(Config):
|
||||
TESTING = True
|
||||
DATABASE_URL = build_database_url(backend="sqlite", database=":memory:")
|
||||
SQLALCHEMY_ECHO = False
|
||||
|
||||
class ProdConfig(Config):
|
||||
DEBUG = False
|
||||
SQLALCHEMY_ECHO = bool(int(os.getenv("DB_ECHO", "0")))
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
from __future__ import annotations
|
||||
from typing import Any, Dict
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy import create_engine, text, event
|
||||
from sqlalchemy.orm import sessionmaker, scoped_session
|
||||
|
||||
from crudkit.core.base import Base
|
||||
|
|
@ -8,14 +9,22 @@ from crudkit.core.base import Base
|
|||
_engine = None
|
||||
SessionLocal = None
|
||||
|
||||
def init_db(database_url: str) -> None:
|
||||
def init_db(database_url: str, engine_kwargs: Dict[str, Any], session_kwargs: Dict[str, Any]) -> None:
|
||||
global _engine, SessionLocal
|
||||
connect_args = {}
|
||||
if database_url.startswith("sqlite:///"):
|
||||
connect_args["check_same_thread"] = False
|
||||
print(database_url)
|
||||
_engine = create_engine(database_url, **engine_kwargs)
|
||||
SessionLocal = scoped_session(sessionmaker(bind=_engine, **session_kwargs))
|
||||
|
||||
_engine = create_engine(database_url, future=True, echo=False, connect_args=connect_args)
|
||||
SessionLocal = scoped_session(sessionmaker(bind=_engine, autoflush=False, autocommit=False, future=True))
|
||||
if database_url.startswith("sqlite:///"):
|
||||
with _engine.connect() as conn:
|
||||
conn.exec_driver_sql(f"PRAGMA journal_mode = WAL;")
|
||||
conn.exec_driver_sql(f"PRAGMA foreign_keys = ON;")
|
||||
conn.exec_driver_sql(f"PRAGMA synchronous = NORMAL;")
|
||||
elif database_url.startswith("mssql+pyodbc://"):
|
||||
@event.listens_for(_engine, "before_cursor_execute")
|
||||
def _enable_fastexecutemany(conn, cursor, statement, parameters, context, executemany):
|
||||
if executemany and hasattr(cursor, "fast_executemany"):
|
||||
cursor.fast_executemany = True
|
||||
|
||||
def get_session():
|
||||
if SessionLocal is None:
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ class Inventory(Base, CRUDMixin):
|
|||
brand_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey('brand.id'), nullable=True, index=True)
|
||||
|
||||
device_type: Mapped[Optional['DeviceType']] = relationship('DeviceType', back_populates='inventory')
|
||||
device_type_id: Mapped[Optional[int]] = mapped_column('type_id', Integer, ForeignKey("item.id"), nullable=True, index=True)
|
||||
type_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("item.id"), nullable=True, index=True)
|
||||
|
||||
image: Mapped[Optional['Image']] = relationship('Image', back_populates='inventory', passive_deletes=True)
|
||||
image_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey('images.id', ondelete='SET NULL'), nullable=True, index=True)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue