From 3c07741500c9c79f1eee35a2b6a28436c1fff6c0 Mon Sep 17 00:00:00 2001 From: Yaro Kasear Date: Thu, 9 Oct 2025 09:27:54 -0500 Subject: [PATCH] Various bug fixes. Still trying to fix cartesian issue on search. --- crudkit/config.py | 5 + crudkit/core/service.py | 230 +++++++++++++++++++++++-- crudkit/engines.py | 25 ++- crudkit/ui/fragments.py | 27 ++- inventory/__init__.py | 4 +- inventory/routes/listing.py | 150 ++++++++++------ inventory/routes/search.py | 2 +- inventory/templates/listing.html | 46 ++++- inventory/templates/submit_button.html | 17 +- 9 files changed, 412 insertions(+), 94 deletions(-) diff --git a/crudkit/config.py b/crudkit/config.py index 0439a3e..fb87b51 100644 --- a/crudkit/config.py +++ b/crudkit/config.py @@ -187,6 +187,8 @@ class Config: "synchronous": os.getenv("SQLITE_SYNCHRONOUS", "NORMAL"), } + STRICT_NPLUS1 = bool(int(os.getenv("CRUDKIT_STRICT_NPLUS1", "1"))) + @classmethod def engine_kwargs(cls) -> Dict[str, Any]: url = cls.DATABASE_URL @@ -221,15 +223,18 @@ class Config: class DevConfig(Config): DEBUG = True SQLALCHEMY_ECHO = bool(int(os.getenv("DB_ECHO", "1"))) + STRICT_NPLUS1 = bool(int(os.getenv("CRUDKIT_STRICT_NPLUS1", "1"))) class TestConfig(Config): TESTING = True DATABASE_URL = build_database_url(backend="sqlite", database=":memory:") SQLALCHEMY_ECHO = False + STRICT_NPLUS1 = bool(int(os.getenv("CRUDKIT_STRICT_NPLUS1", "1"))) class ProdConfig(Config): DEBUG = False SQLALCHEMY_ECHO = bool(int(os.getenv("DB_ECHO", "0"))) + STRICT_NPLUS1 = bool(int(os.getenv("CRUDKIT_STRICT_NPLUS1", "0"))) def get_config(name: str | None) -> Type[Config]: """ diff --git a/crudkit/core/service.py b/crudkit/core/service.py index db510cd..c4a863f 100644 --- a/crudkit/core/service.py +++ b/crudkit/core/service.py @@ -8,7 +8,7 @@ from sqlalchemy import and_, func, inspect, or_, text from sqlalchemy.engine import Engine, Connection from sqlalchemy.orm import Load, Session, with_polymorphic, Mapper, selectinload, with_loader_criteria from sqlalchemy.orm.attributes import InstrumentedAttribute -from sqlalchemy.sql import operators +from sqlalchemy.sql import operators, visitors from sqlalchemy.sql.elements import UnaryExpression, ColumnElement from crudkit.core import to_jsonable, deep_diff, diff_to_patch, filter_to_columns, normalize_payload @@ -20,6 +20,18 @@ from crudkit.projection import compile_projection import logging log = logging.getLogger("crudkit.service") +# logging.getLogger("crudkit.service").setLevel(logging.DEBUG) +# Ensure our debug actually prints even if the app/root logger is WARNING+ +# if not log.handlers: +# _h = logging.StreamHandler() +# _h.setLevel(logging.DEBUG) +# _h.setFormatter(logging.Formatter( +# "%(asctime)s %(levelname)s %(name)s: %(message)s" +# )) +# log.addHandler(_h) +# +# log.setLevel(logging.DEBUG) +# log.propagate = False @runtime_checkable class _HasID(Protocol): @@ -230,7 +242,9 @@ class CRUDService(Generic[T]): # Make sure joins/filters match the real query query = self._apply_firsthop_strategies(query, root_alias, plan) if plan.filters: - query = query.filter(*plan.filters) + filters = self._final_filters(root_alias, plan) + if filters: + query = query.filter(*filters) order_spec = self._extract_order_spec(root_alias, plan.order_by) @@ -358,10 +372,11 @@ class CRUDService(Generic[T]): spec.parse_includes() join_paths = tuple(spec.get_join_paths()) filter_tables = _collect_tables_from_filters(filters) + fkeys = set() _, proj_opts = compile_projection(self.model, req_fields) if req_fields else ([], []) - filter_tables = () - fkeys = set() + # filter_tables = () + # fkeys = set() return self._Plan( spec=spec, filters=filters, order_by=order_by, limit=limit, offset=offset, @@ -377,6 +392,9 @@ class CRUDService(Generic[T]): def _apply_firsthop_strategies(self, query, root_alias, plan: _Plan): nested_first_hops = { p[0] for p in (plan.rel_field_names or {}).keys() if len(p) > 1 } + joined_rel_keys = set() + + # Existing behavior: join everything in join_paths (to-one), selectinload collections for base_alias, rel_attr, target_alias in plan.join_paths: if base_alias is not root_alias: continue @@ -385,17 +403,50 @@ class CRUDService(Generic[T]): if not is_collection: query = query.join(target_alias, rel_attr.of_type(target_alias), isouter=True) + joined_rel_keys.add(prop.key if prop is not None else rel_attr.key) else: opt = selectinload(rel_attr) - if is_collection: - child_names = (plan.collection_field_names or {}).get(rel_attr.key, []) - if child_names: - target_cls = prop.mapper.class_ - cols = [getattr(target_cls, n, None) for n in child_names] - cols = [c for c in cols if isinstance(c, InstrumentedAttribute)] - if cols: - opt = opt.load_only(*cols) + child_names = (plan.collection_field_names or {}).get(rel_attr.key, []) + if child_names: + target_cls = prop.mapper.class_ + cols = [getattr(target_cls, n, None) for n in child_names] + cols = [c for c in cols if isinstance(c, InstrumentedAttribute)] + if cols: + opt = opt.load_only(*cols) query = query.options(opt) + + # NEW: if a first-hop to-one relationship’s target table is present in filter expressions, + # make sure we actually JOIN it (outer) so filters don’t create a cartesian product. + if plan.filter_tables: + mapper: Mapper[Any] = cast(Mapper[Any], inspect(self.model)) + for rel in mapper.relationships: + if rel.uselist: + continue # only first-hop to-one here + target_tbl = getattr(rel.mapper.class_, "__table__", None) + if target_tbl is None: + continue + if target_tbl in plan.filter_tables: + if rel.key in joined_rel_keys: + continue # already joined via join_paths + query = query.join(getattr(root_alias, rel.key), isouter=True) + joined_rel_keys.add(rel.key) + if log.isEnabledFor(logging.DEBUG): + info = [] + for base_alias, rel_attr, target_alias in plan.join_paths: + if base_alias is not root_alias: + continue + prop = getattr(rel_attr, "property", None) + sel = getattr(target_alias, "selectable", None) + info.append({ + "rel": (getattr(prop, "key", getattr(rel_attr, "key", "?"))), + "collection": bool(getattr(prop, "uselist", False)), + "target_keys": list(_selectable_keys(sel)) if sel is not None else [], + "joined": (getattr(prop, "key", None) in joined_rel_keys), + }) + log.debug("FIRSTHOP: %s.%s first-hop paths: %s", + self.model.__name__, getattr(root_alias, "__table__", type(root_alias)).key, + info) + return query def _apply_proj_opts(self, query, plan: _Plan): @@ -428,6 +479,145 @@ class CRUDService(Generic[T]): except Exception: pass + def _rebind_filters_to_firsthop_aliases(self, filters, root_alias, plan): + """Make filter expressions use the exact same alias objects as our JOINs.""" + if not filters: + return filters + + # Map first-hop target selectable keysets -> the exact selectable object we JOINed with + alias_map = {} + for base_alias, _rel_attr, target_alias in plan.join_paths: + if base_alias is not root_alias: + continue + sel = getattr(target_alias, "selectable", None) + if sel is not None: + alias_map[frozenset(_selectable_keys(sel))] = sel + + if not alias_map: + return filters + + def replace(elem): + tbl = getattr(elem, "table", None) + if tbl is None: + return elem + keyset = frozenset(_selectable_keys(tbl)) + new_sel = alias_map.get(keyset) + if new_sel is None or new_sel is tbl: + return elem + + colkey = getattr(elem, "key", None) or getattr(elem, "name", None) + if not colkey: + return elem + try: + return getattr(new_sel.c, colkey) + except Exception: + return elem + + return [visitors.replacement_traverse(f, {}, replace) for f in filters] + + def _final_filters(self, root_alias, plan): + """Return filters rebounded to our first-hop aliases, with first-hop collection + predicates rewritten to EXISTS via rel.any(...).""" + filters = list(plan.filters or []) + if not filters: + return [] + + # 1) Build alias map for first-hop targets we joined (to-one) + alias_map = {} + coll_map = {} # KEY CHANGE: table -> (rel_attr, target_cls) + for base_alias, rel_attr, target_alias in plan.join_paths: + if base_alias is not root_alias: + continue + prop = getattr(rel_attr, "property", None) + if prop is None: + continue + + # Try to capture a selectable for to-one rebinds (nice-to-have) + sel = getattr(target_alias, "selectable", None) + if sel is not None: + alias_map[frozenset(_selectable_keys(sel))] = sel + + # Always build a collection map keyed by the mapped table (no alias needed) + if bool(getattr(prop, "uselist", False)): + target_cls = prop.mapper.class_ + tbl = getattr(target_cls, "__table__", None) + if tbl is not None: + coll_map[tbl] = (rel_attr, target_cls) + print(f"STAGE 1 - alias_map = {alias_map}, coll_map={coll_map}") + + # 2) Rebind to-one columns to the exact alias objects we JOINed (if we have them) + if alias_map: + def _rebind(elem): + tbl = getattr(elem, "table", None) + if tbl is None: + return elem + keyset = frozenset(_selectable_keys(tbl)) + new_sel = alias_map.get(keyset) + if new_sel is None or new_sel is tbl: + return elem + colkey = getattr(elem, "key", None) or getattr(elem, "name", None) + if not colkey: + return elem + try: + return getattr(new_sel.c, colkey) + except Exception: + return elem + filters = [visitors.replacement_traverse(f, {}, _rebind) for f in filters] + print(f"STAGE 2 - filters = {filters}") + + # 3) If there are no collection filters, we’re done + if not coll_map: + print("STAGE 3 - No, I have determined there are no collections to handle like a bad girl.") + return filters + print("STAGE 3 - Yes, I have determined there are collections to handle like a good boy.") + + # 4) Group any filters that reference a first-hop collection TABLE + keep = [] + per_coll = {} # table -> [expr, ...] + for f in filters: + touched_tbl = None + def _find(elem): + nonlocal touched_tbl + tbl = getattr(elem, "table", None) + if tbl is None: + return + # normalize alias -> base table + base_tbl = tbl + while getattr(base_tbl, "element", None) is not None: + base_tbl = getattr(base_tbl, "element") + if base_tbl in coll_map and touched_tbl is None: + touched_tbl = base_tbl + visitors.traverse(f, {}, {'column': _find}) + + if touched_tbl is None: + keep.append(f) + else: + per_coll.setdefault(touched_tbl, []).append(f) + print(f"STAGE 4 - keep = {keep}, per_coll = {per_coll}") + + # 5) For each collection, remap columns to mapped class attrs and wrap with .any(and_(...)) + for tbl, exprs in per_coll.items(): + rel_attr, target_cls = coll_map[tbl] + + def _to_model(elem): + etbl = getattr(elem, "table", None) + if etbl is not None: + # normalize alias -> base table + etbl_base = etbl + while getattr(etbl_base, "element", None) is not None: + etbl_base = getattr(etbl_base, "element") + if etbl_base is tbl: + key = getattr(elem, "key", None) or getattr(elem, "name", None) + if key and hasattr(target_cls, key): + return getattr(target_cls, key) + return elem + + remapped = [visitors.replacement_traverse(e, {}, _to_model) for e in exprs] + keep.append(rel_attr.any(and_(*remapped))) + print(f"STAGE 5 - keep={keep}") + + return keep + # ---- public read ops def page(self, params=None, *, page: int = 1, per_page: int = 50, include_total: bool = True): @@ -469,7 +659,9 @@ class CRUDService(Generic[T]): query = self._apply_firsthop_strategies(query, root_alias, plan) query = self._apply_soft_delete_criteria_for_children(query, plan, params) if plan.filters: - query = query.filter(*plan.filters) + filters = self._final_filters(root_alias, plan) + if filters: + query = query.filter(*filters) order_spec = self._extract_order_spec(root_alias, plan.order_by) limit = 50 if plan.limit is None else (None if plan.limit == 0 else plan.limit) @@ -529,7 +721,9 @@ class CRUDService(Generic[T]): if not bool(getattr(getattr(rel_attr, "property", None), "uselist", False)): base = base.join(target_alias, rel_attr.of_type(target_alias), isouter=True) if plan.filters: - base = base.filter(*plan.filters) + filters = self._final_filters(root_alias, plan) + if filters: + base = base.filter(*filters) # <-- use base, not query total = session.query(func.count()).select_from( base.order_by(None).distinct().subquery() ).scalar() or 0 @@ -556,7 +750,9 @@ class CRUDService(Generic[T]): query = self._apply_firsthop_strategies(query, root_alias, plan) query = self._apply_soft_delete_criteria_for_children(query, plan, params) if plan.filters: - query = query.filter(*plan.filters) + filters = self._final_filters(root_alias, plan) + if filters: + query = query.filter(*filters) query = query.filter(getattr(root_alias, "id") == id) query = self._apply_proj_opts(query, plan) @@ -577,7 +773,9 @@ class CRUDService(Generic[T]): query = self._apply_firsthop_strategies(query, root_alias, plan) query = self._apply_soft_delete_criteria_for_children(query, plan, params) if plan.filters: - query = query.filter(*plan.filters) + filters = self._final_filters(root_alias, plan) + if filters: + query = query.filter(*filters) order_by = plan.order_by paginating = (plan.limit is not None) or (plan.offset not in (None, 0)) diff --git a/crudkit/engines.py b/crudkit/engines.py index b420a8d..4e18fd5 100644 --- a/crudkit/engines.py +++ b/crudkit/engines.py @@ -1,7 +1,8 @@ +# engines.py from __future__ import annotations from typing import Type, Optional -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker +from sqlalchemy import create_engine, event +from sqlalchemy.orm import sessionmaker, raiseload, Mapper, RelationshipProperty from .backend import make_backend_info, BackendInfo from .config import Config, get_config from ._sqlite import apply_sqlite_pragmas @@ -12,15 +13,31 @@ def build_engine(config_cls: Type[Config] | None = None): apply_sqlite_pragmas(engine, config_cls.SQLITE_PRAGMAS) return engine +def _install_nplus1_guards(SessionMaker, *, strict: bool): + if not strict: + return + + @event.listens_for(SessionMaker, "do_orm_execute") + def _add_global_raiseload(execute_state): + stmt = execute_state.statement + # Only touch ORM statements (have column_descriptions) + if getattr(stmt, "column_descriptions", None): + execute_state.statement = stmt.options(raiseload("*")) + def build_sessionmaker(config_cls: Type[Config] | None = None, engine=None): config_cls = config_cls or get_config(None) engine = engine or build_engine(config_cls) - return sessionmaker(bind=engine, **config_cls.session_kwargs()) + SessionMaker = sessionmaker(bind=engine, **config_cls.session_kwargs()) + + # Toggle with a config flag; default off so you can turn it on when ready + strict = bool(getattr(config_cls, "STRICT_NPLUS1", False)) + _install_nplus1_guards(SessionMaker, strict=strict) + return SessionMaker class CRUDKitRuntime: """ Lightweight container so CRUDKit can be given either: - - prebuild engine/sessionmaker, or + - prebuilt engine/sessionmaker, or - a Config to build them lazily """ def __init__(self, *, engine=None, session_factory=None, config: Optional[Type[Config]] = None): diff --git a/crudkit/ui/fragments.py b/crudkit/ui/fragments.py index b4db83f..08903de 100644 --- a/crudkit/ui/fragments.py +++ b/crudkit/ui/fragments.py @@ -413,7 +413,7 @@ def _value_label_for_field(field: dict, mapper, values_map: dict, instance, sess if not rel_prop: return None - rid = _coerce_fk_value(values_map, instance, base) + rid = _coerce_fk_value(values_map, instance, base, rel_prop) rel_obj = _resolve_rel_obj(values_map, instance, base) label_spec = ( @@ -493,7 +493,7 @@ class _SafeObj: val = _get_loaded_attr(self._obj, name) return "" if val is None else _SafeObj(val) -def _coerce_fk_value(values: dict | None, instance: Any, base: str): +def _coerce_fk_value(values: dict | None, instance: Any, base: str, rel_prop: Optional[RelationshipProperty] = None): """ Resolve current selection for relationship 'base': 1) values['_id'] @@ -540,6 +540,25 @@ def _coerce_fk_value(values: dict | None, instance: Any, base: str): except Exception: pass + # Fallback: if we know the relationship, try its local FK column names + if rel_prop is not None: + try: + st = inspect(instance) if instance is not None else None + except Exception: + st = None + + # Try values[...] first + for col in getattr(rel_prop, "local_columns", []) or []: + key = getattr(col, "key", None) or getattr(col, "name", None) + if not key: + continue + if isinstance(values, dict) and key in values and values[key] not in (None, ""): + return values[key] + if set is not None: + attr = st.attrs.get(key) if hasattr(st, "attrs") else None + if attr is not None and attr.loaded_value is not NO_VALUE: + return attr.loaded_value + return None def _is_many_to_one(mapper, name: str) -> Optional[RelationshipProperty]: @@ -1136,7 +1155,7 @@ def render_form( base = name[:-3] rel_prop = mapper.relationships.get(base) if isinstance(rel_prop, RelationshipProperty) and rel_prop.direction.name == "MANYTOONE": - values_map[name] = _coerce_fk_value(values, instance, base) + values_map[name] = _coerce_fk_value(values, instance, base, rel_prop) # add rel_prop else: # Auto-generate path (your original behavior) @@ -1169,7 +1188,7 @@ def render_form( fk_fields.add(f"{base}_id") # NEW: set the current selection for this dropdown - values_map[f"{base}_id"] = _coerce_fk_value(values, instance, base) + values_map[f"{base}_id"] = _coerce_fk_value(values, instance, base, prop) # Then plain columns for col in model_cls.__table__.columns: diff --git a/inventory/__init__.py b/inventory/__init__.py index 478dfed..4291f8a 100644 --- a/inventory/__init__.py +++ b/inventory/__init__.py @@ -1,6 +1,6 @@ from __future__ import annotations -import os +import os, logging, sys from flask import Flask from jinja_markdown import MarkdownExtension @@ -27,7 +27,7 @@ def create_app(config_cls=crudkit.DevConfig) -> Flask: init_pretty(app) - runtime = init_app(app, config=crudkit.ProdConfig) + runtime = init_app(app, config=crudkit.DevConfig) from sqlalchemy import event engine = runtime.engine diff --git a/inventory/routes/listing.py b/inventory/routes/listing.py index 241e3b8..95301e9 100644 --- a/inventory/routes/listing.py +++ b/inventory/routes/listing.py @@ -1,14 +1,12 @@ -from flask import Blueprint, render_template, abort, request +from flask import Blueprint, render_template, abort, request, url_for import crudkit -from crudkit.api._cursor import decode_cursor, encode_cursor from crudkit.ui.fragments import render_table, register_template_globals bp_listing = Blueprint("listing", __name__) def init_listing_routes(app): - # Make helpers available in all templates register_template_globals(app) @bp_listing.get("/listing/") @@ -21,12 +19,15 @@ def init_listing_routes(app): abort(404) # read query args - limit = request.args.get("limit", None) - limit = int(limit) if (limit is not None and str(limit).isdigit()) else 15 - sort = request.args.get("sort") - fields_qs = request.args.get("fields") - cursor = request.args.get("cursor") - key, _desc, backward = decode_cursor(cursor) + # accept both per_page and limit; per_page wins if both provided + per_page_qs = request.args.get("per_page") + limit_qs = request.args.get("limit") + page = int(request.args.get("page", 1) or 1) + per_page = int(per_page_qs) if (per_page_qs and per_page_qs.isdigit()) else ( + int(limit_qs) if (limit_qs and limit_qs.isdigit()) else 15 + ) + sort = request.args.get("sort") + fields_qs = request.args.get("fields") # base spec per model spec = {} @@ -34,16 +35,8 @@ def init_listing_routes(app): row_classes = [] if model.lower() == 'inventory': spec = {"fields": [ - "label", - "name", - "barcode", - "serial", - "brand.name", - "model", - "device_type.description", - "condition", - "owner.label", - "location.label", + "label", "name", "barcode", "serial", "brand.name", "model", + "device_type.description", "condition", "owner.label", "location.label", ]} columns = [ {"field": "label"}, @@ -60,14 +53,9 @@ def init_listing_routes(app): ] elif model.lower() == 'user': spec = {"fields": [ - "label", - "last_name", - "first_name", - "supervisor.label", - "robot.overlord", - "staff", - "active", - ], "sort": "first_name,last_name"} # default for users + "label", "last_name", "first_name", "supervisor.label", + "robot.overlord", "staff", "active", + ], "sort": "first_name,last_name"} columns = [ {"field": "label", "label": "Full Name"}, {"field": "last_name"}, @@ -86,11 +74,7 @@ def init_listing_routes(app): ] elif model.lower() == 'worklog': spec = {"fields": [ - "work_item.label", - "contact.label", - "start_time", - "end_time", - "complete", + "work_item.label", "contact.label", "start_time", "end_time", "complete", ]} columns = [ {"field": "work_item.label", "label": "Work Item", @@ -106,44 +90,106 @@ def init_listing_routes(app): {"when": {"field": "complete", "is": False}, "class": "table-danger"} ] - # Build params to feed CRUDService (flat dict; parse_filters expects flat keys) + # Build params to feed CRUDService params = dict(spec) - # overlay fields from query (?fields=...) if fields_qs: params["fields"] = [p.strip() for p in fields_qs.split(",") if p.strip()] - - # overlay sort from query (?sort=...) if sort: params["sort"] = sort - # limit semantics: 0 means "unlimited" in your service layer - params["limit"] = limit - - # forward *all other* query params as filters (flat), excluding known control keys - CONTROL_KEYS = {"limit", "cursor", "sort", "fields"} + # forward remaining query params as filters (flat), excluding control keys + CONTROL_KEYS = {"page", "per_page", "limit", "sort", "fields"} for k, v in request.args.items(): - if k in CONTROL_KEYS: - continue - if v is None or v == "": + if k in CONTROL_KEYS or v in (None, ""): continue params[k] = v service = crudkit.crud.get_service(cls) - window = service.seek_window(params, key=key, backward=backward, include_total=True) + # Use page-based pagination from the service + result = service.page(params, page=page, per_page=per_page, include_total=True) + items = result["items"] - table = render_table(window.items, columns=columns, + table = render_table(items, columns=columns, opts={"object_class": model, "row_classes": row_classes}) + def _base_params(): + keep = {} + for k, v in request.args.items(): + if k == "page" or v in (None, ""): + continue + keep[k] = v + # keep both for compatibility; per_page wins in the service anyway + keep["per_page"] = per_page + keep["limit"] = per_page + return keep + + total = int(result["total"] or 0) + pages = int(result["pages"] or 1) + page = int(result["page"] or 1) + + has_prev = page > 1 + has_next = page < pages + + base = _base_params() + prev_url = url_for("listing.show_list", model=model, **{**base, "page": max(1, page - 1)}) + next_url = url_for("listing.show_list", model=model, **{**base, "page": min(pages, page + 1)}) + + def page_url(n: int) -> str: + return url_for("listing.show_list", model=model, **{**base, "page": n}) + + def build_nav(page: int, pages: int, window: int = 2): + """ + Returns a list like: + [{'type':'page','n':1,'url':'...','active':False}, {'type':'ellipsis'}, ...] + Shows first, last, current±window, with ellipses where gaps exist. + """ + if pages <= 1: + return [{'type': 'page', 'n': 1, 'url': page_url(1), 'active': True}] + + show = set([1, pages]) + for n in range(max(1, page - window), min(pages, page + window) + 1): + show.add(n) + + out = [] + last = 0 + for n in range(1, pages + 1): + if n in show: + out.append({'type': 'page', 'n': n, 'url': page_url(n), 'active': (n == page)}) + last = n + else: + # insert a single ellipsis per gap + if last != -1: + out.append({'type': 'ellipsis'}) + last = -1 + # skip the interior of the gap + # we let the for loop continue + # collapse any duplicate ellipses at ends (paranoia) + cleaned = [] + for i, item in enumerate(out): + if item['type'] == 'ellipsis' and (i == 0 or out[i-1]['type'] == 'ellipsis'): + continue + cleaned.append(item) + if cleaned and cleaned[-1]['type'] == 'ellipsis': + cleaned.pop() + if cleaned and cleaned[0]['type'] == 'ellipsis': + cleaned.pop(0) + return cleaned + pagination_ctx = { - "limit": window.limit, - "total": window.total, - "next_cursor": encode_cursor(window.last_key, list(window.order.desc), backward=False), - "prev_cursor": encode_cursor(window.first_key, list(window.order.desc), backward=True), - "sort": params.get("sort") # expose current sort to the template + "page": page, + "per_page": per_page, + "total": total, + "pages": pages, + "has_prev": has_prev, + "has_next": has_next, + "prev_url": prev_url, + "next_url": next_url, + "nav": build_nav(page, pages, window=2), # tweak window=2..3 to taste + "sort": params.get("sort") } return render_template("listing.html", model=model, table=table, pagination=pagination_ctx) - app.register_blueprint(bp_listing) \ No newline at end of file + app.register_blueprint(bp_listing) diff --git a/inventory/routes/search.py b/inventory/routes/search.py index 33d23d2..249f262 100644 --- a/inventory/routes/search.py +++ b/inventory/routes/search.py @@ -76,7 +76,7 @@ def init_search_routes(app): {"field": "updates", "format": lambda x: len(x)}, ] worklog_results = worklog_service.list({ - 'contact.label|work_item.label__icontains': q, + 'contact.label|work_item.label|updates.content__icontains': q, 'fields': [ "contact.label", "work_item.label", diff --git a/inventory/templates/listing.html b/inventory/templates/listing.html index 9483c18..23a5723 100644 --- a/inventory/templates/listing.html +++ b/inventory/templates/listing.html @@ -8,15 +8,49 @@ Inventory Manager - {{ model|title }} Listing

{{ model|title }} Listing

- +
{{ table | safe }} -
- - {{ pagination['total'] }} records - -
+ + +

+ Page {{ pagination.page }} of {{ pagination.pages }} · {{ pagination.total }} records +

{% endblock %} \ No newline at end of file diff --git a/inventory/templates/submit_button.html b/inventory/templates/submit_button.html index 08cdef1..1025b01 100644 --- a/inventory/templates/submit_button.html +++ b/inventory/templates/submit_button.html @@ -90,7 +90,9 @@ if (reply.status === 'success') { if (!hasId && reply.id) { window.queueToast('Created successfully.', 'success'); - window.location.href - `/entry/${model}/${reply.id}`; + window.newDrafts = []; + window.deletedIds = []; + window.location.assign(`/entry/${model}/${reply.id}`); return; } else { window.queueToast('Updated successfully.', 'success'); @@ -101,15 +103,12 @@ if (li) li.remove(); } } + + window.newDrafts = []; + window.deletedIds = []; + window.location.replace(window.location.href); + return; } - - window.newDrafts = []; - window.deletedIds = []; - - window.location.replace(window.location.href); - return; - } else { - toastMessage(`Unable to save entry: ${reply.error}`, 'danger'); } } catch (err) { toastMessage(`Network error: ${String(err)}`, 'danger');