mirror of
https://github.com/mwisnowski/mtg_python_deckbuilder.git
synced 2025-12-16 23:50:12 +01:00
overhaul: migrated to tailwind css for css management, consolidated custom css, removed inline css, removed unneeded css, and otherwise improved page styling
This commit is contained in:
parent
f1e21873e7
commit
b994978f60
81 changed files with 15784 additions and 2936 deletions
299
code/web/routes/api.py
Normal file
299
code/web/routes/api.py
Normal file
|
|
@ -0,0 +1,299 @@
|
|||
"""API endpoints for web services."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from fastapi import APIRouter, Query
|
||||
from fastapi.responses import FileResponse, JSONResponse, RedirectResponse
|
||||
|
||||
from code.file_setup.image_cache import ImageCache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api")
|
||||
|
||||
# Global image cache instance
|
||||
_image_cache = ImageCache()
|
||||
|
||||
|
||||
@router.get("/images/status")
|
||||
async def get_download_status():
|
||||
"""
|
||||
Get current image download status.
|
||||
|
||||
Returns:
|
||||
JSON response with download status
|
||||
"""
|
||||
import json
|
||||
|
||||
status_file = Path("card_files/images/.download_status.json")
|
||||
|
||||
if not status_file.exists():
|
||||
# Check cache statistics if no download in progress
|
||||
stats = _image_cache.cache_statistics()
|
||||
return JSONResponse({
|
||||
"running": False,
|
||||
"stats": stats
|
||||
})
|
||||
|
||||
try:
|
||||
with status_file.open('r', encoding='utf-8') as f:
|
||||
status = json.load(f)
|
||||
return JSONResponse(status)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not read status file: {e}")
|
||||
return JSONResponse({
|
||||
"running": False,
|
||||
"error": str(e)
|
||||
})
|
||||
|
||||
|
||||
@router.get("/images/debug")
|
||||
async def get_image_debug():
|
||||
"""
|
||||
Debug endpoint to check image cache configuration.
|
||||
|
||||
Returns:
|
||||
JSON with debug information
|
||||
"""
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
base_dir = Path(_image_cache.base_dir)
|
||||
|
||||
debug_info = {
|
||||
"cache_enabled": _image_cache.is_enabled(),
|
||||
"env_var": os.getenv("CACHE_CARD_IMAGES", "not set"),
|
||||
"base_dir": str(base_dir),
|
||||
"base_dir_exists": base_dir.exists(),
|
||||
"small_dir": str(base_dir / "small"),
|
||||
"small_dir_exists": (base_dir / "small").exists(),
|
||||
"normal_dir": str(base_dir / "normal"),
|
||||
"normal_dir_exists": (base_dir / "normal").exists(),
|
||||
}
|
||||
|
||||
# Count files if directories exist
|
||||
if (base_dir / "small").exists():
|
||||
debug_info["small_count"] = len(list((base_dir / "small").glob("*.jpg")))
|
||||
if (base_dir / "normal").exists():
|
||||
debug_info["normal_count"] = len(list((base_dir / "normal").glob("*.jpg")))
|
||||
|
||||
# Test with a sample card name
|
||||
test_card = "Lightning Bolt"
|
||||
debug_info["test_card"] = test_card
|
||||
test_path_small = _image_cache.get_image_path(test_card, "small")
|
||||
test_path_normal = _image_cache.get_image_path(test_card, "normal")
|
||||
debug_info["test_path_small"] = str(test_path_small) if test_path_small else None
|
||||
debug_info["test_path_normal"] = str(test_path_normal) if test_path_normal else None
|
||||
debug_info["test_exists_small"] = test_path_small.exists() if test_path_small else False
|
||||
debug_info["test_exists_normal"] = test_path_normal.exists() if test_path_normal else False
|
||||
|
||||
return JSONResponse(debug_info)
|
||||
|
||||
|
||||
@router.get("/images/{size}/{card_name}")
|
||||
async def get_card_image(size: str, card_name: str, face: str = Query(default="front")):
|
||||
"""
|
||||
Serve card image from cache or redirect to Scryfall API.
|
||||
|
||||
Args:
|
||||
size: Image size ('small' or 'normal')
|
||||
card_name: Name of the card
|
||||
face: Which face to show ('front' or 'back') for DFC cards
|
||||
|
||||
Returns:
|
||||
FileResponse if cached locally, RedirectResponse to Scryfall API otherwise
|
||||
"""
|
||||
# Validate size parameter
|
||||
if size not in ["small", "normal"]:
|
||||
size = "normal"
|
||||
|
||||
# Check if caching is enabled
|
||||
cache_enabled = _image_cache.is_enabled()
|
||||
|
||||
# Check if image exists in cache
|
||||
if cache_enabled:
|
||||
image_path = None
|
||||
|
||||
# For DFC cards, handle front/back faces differently
|
||||
if " // " in card_name:
|
||||
if face == "back":
|
||||
# For back face, ONLY try the back face name
|
||||
back_face = card_name.split(" // ")[1].strip()
|
||||
logger.debug(f"DFC back face requested: {back_face}")
|
||||
image_path = _image_cache.get_image_path(back_face, size)
|
||||
else:
|
||||
# For front face (or unspecified), try front face name
|
||||
front_face = card_name.split(" // ")[0].strip()
|
||||
logger.debug(f"DFC front face requested: {front_face}")
|
||||
image_path = _image_cache.get_image_path(front_face, size)
|
||||
else:
|
||||
# Single-faced card, try exact name
|
||||
image_path = _image_cache.get_image_path(card_name, size)
|
||||
|
||||
if image_path and image_path.exists():
|
||||
logger.info(f"Serving cached image: {card_name} ({size}, {face})")
|
||||
return FileResponse(
|
||||
image_path,
|
||||
media_type="image/jpeg",
|
||||
headers={
|
||||
"Cache-Control": "public, max-age=31536000", # 1 year
|
||||
}
|
||||
)
|
||||
else:
|
||||
logger.debug(f"No cached image found for: {card_name} (face: {face})")
|
||||
|
||||
# Fallback to Scryfall API
|
||||
# For back face requests of DFC cards, we need the full card name
|
||||
scryfall_card_name = card_name
|
||||
scryfall_params = f"fuzzy={quote_plus(scryfall_card_name)}&format=image&version={size}"
|
||||
|
||||
# If this is a back face request, try to find the full DFC name
|
||||
if face == "back":
|
||||
try:
|
||||
from code.services.all_cards_loader import AllCardsLoader
|
||||
loader = AllCardsLoader()
|
||||
df = loader.load()
|
||||
|
||||
# Look for cards where this face name appears in the card_faces
|
||||
# The card name format is "Front // Back"
|
||||
matching = df[df['name'].str.contains(card_name, case=False, na=False, regex=False)]
|
||||
if not matching.empty:
|
||||
# Find DFC cards (containing ' // ')
|
||||
dfc_matches = matching[matching['name'].str.contains(' // ', na=False, regex=False)]
|
||||
if not dfc_matches.empty:
|
||||
# Use the first matching DFC card's full name
|
||||
full_name = dfc_matches.iloc[0]['name']
|
||||
scryfall_card_name = full_name
|
||||
# Add face parameter to Scryfall request
|
||||
scryfall_params = f"exact={quote_plus(full_name)}&format=image&version={size}&face=back"
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not lookup full card name for back face '{card_name}': {e}")
|
||||
|
||||
scryfall_url = f"https://api.scryfall.com/cards/named?{scryfall_params}"
|
||||
return RedirectResponse(scryfall_url)
|
||||
|
||||
|
||||
@router.post("/images/download")
|
||||
async def download_images():
|
||||
"""
|
||||
Start downloading card images in background.
|
||||
|
||||
Returns:
|
||||
JSON response with status
|
||||
"""
|
||||
if not _image_cache.is_enabled():
|
||||
return JSONResponse({
|
||||
"ok": False,
|
||||
"message": "Image caching is disabled. Set CACHE_CARD_IMAGES=1 to enable."
|
||||
}, status_code=400)
|
||||
|
||||
# Write initial status
|
||||
try:
|
||||
status_dir = Path("card_files/images")
|
||||
status_dir.mkdir(parents=True, exist_ok=True)
|
||||
status_file = status_dir / ".download_status.json"
|
||||
|
||||
import json
|
||||
with status_file.open('w', encoding='utf-8') as f:
|
||||
json.dump({
|
||||
"running": True,
|
||||
"phase": "bulk_data",
|
||||
"message": "Downloading Scryfall bulk data...",
|
||||
"current": 0,
|
||||
"total": 0,
|
||||
"percentage": 0
|
||||
}, f)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not write initial status: {e}")
|
||||
|
||||
# Start download in background thread
|
||||
def _download_task():
|
||||
import json
|
||||
status_file = Path("card_files/images/.download_status.json")
|
||||
|
||||
try:
|
||||
# Download bulk data first
|
||||
logger.info("[IMAGE DOWNLOAD] Starting bulk data download...")
|
||||
|
||||
def bulk_progress(downloaded: int, total: int):
|
||||
"""Progress callback for bulk data download."""
|
||||
try:
|
||||
percentage = int(downloaded / total * 100) if total > 0 else 0
|
||||
with status_file.open('w', encoding='utf-8') as f:
|
||||
json.dump({
|
||||
"running": True,
|
||||
"phase": "bulk_data",
|
||||
"message": f"Downloading bulk data: {percentage}%",
|
||||
"current": downloaded,
|
||||
"total": total,
|
||||
"percentage": percentage
|
||||
}, f)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not update bulk progress: {e}")
|
||||
|
||||
_image_cache.download_bulk_data(progress_callback=bulk_progress)
|
||||
|
||||
# Download images
|
||||
logger.info("[IMAGE DOWNLOAD] Starting image downloads...")
|
||||
|
||||
def image_progress(current: int, total: int, card_name: str):
|
||||
"""Progress callback for image downloads."""
|
||||
try:
|
||||
percentage = int(current / total * 100) if total > 0 else 0
|
||||
with status_file.open('w', encoding='utf-8') as f:
|
||||
json.dump({
|
||||
"running": True,
|
||||
"phase": "images",
|
||||
"message": f"Downloading images: {card_name}",
|
||||
"current": current,
|
||||
"total": total,
|
||||
"percentage": percentage
|
||||
}, f)
|
||||
|
||||
# Log progress every 100 cards
|
||||
if current % 100 == 0:
|
||||
logger.info(f"[IMAGE DOWNLOAD] Progress: {current}/{total} ({percentage}%)")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not update image progress: {e}")
|
||||
|
||||
stats = _image_cache.download_images(progress_callback=image_progress)
|
||||
|
||||
# Write completion status
|
||||
with status_file.open('w', encoding='utf-8') as f:
|
||||
json.dump({
|
||||
"running": False,
|
||||
"phase": "complete",
|
||||
"message": f"Download complete: {stats.get('downloaded', 0)} new images",
|
||||
"stats": stats,
|
||||
"percentage": 100
|
||||
}, f)
|
||||
|
||||
logger.info(f"[IMAGE DOWNLOAD] Complete: {stats}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[IMAGE DOWNLOAD] Failed: {e}", exc_info=True)
|
||||
try:
|
||||
with status_file.open('w', encoding='utf-8') as f:
|
||||
json.dump({
|
||||
"running": False,
|
||||
"phase": "error",
|
||||
"message": f"Download failed: {str(e)}",
|
||||
"percentage": 0
|
||||
}, f)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Start background thread
|
||||
thread = threading.Thread(target=_download_task, daemon=True)
|
||||
thread.start()
|
||||
|
||||
return JSONResponse({
|
||||
"ok": True,
|
||||
"message": "Image download started in background"
|
||||
}, status_code=202)
|
||||
|
|
@ -25,6 +25,7 @@ from ..services.build_utils import (
|
|||
owned_set as owned_set_helper,
|
||||
builder_present_names,
|
||||
builder_display_map,
|
||||
commander_hover_context,
|
||||
)
|
||||
from ..app import templates
|
||||
from deck_builder import builder_constants as bc
|
||||
|
|
@ -1349,6 +1350,14 @@ async def build_new_modal(request: Request) -> HTMLResponse:
|
|||
for key in skip_keys:
|
||||
sess.pop(key, None)
|
||||
|
||||
# M2: Clear commander and form selections for fresh start
|
||||
commander_keys = [
|
||||
"commander", "partner", "background", "commander_mode",
|
||||
"themes", "bracket"
|
||||
]
|
||||
for key in commander_keys:
|
||||
sess.pop(key, None)
|
||||
|
||||
theme_context = _custom_theme_context(request, sess)
|
||||
ctx = {
|
||||
"request": request,
|
||||
|
|
@ -1483,20 +1492,14 @@ async def build_new_inspect(request: Request, name: str = Query(...)) -> HTMLRes
|
|||
merged_tags.append(token)
|
||||
ctx["tags"] = merged_tags
|
||||
|
||||
# Deduplicate recommended: remove any that are already in partner_tags
|
||||
partner_tags_lower = {str(tag).strip().casefold() for tag in partner_tags}
|
||||
existing_recommended = ctx.get("recommended") or []
|
||||
merged_recommended: list[str] = []
|
||||
rec_seen: set[str] = set()
|
||||
for source in (partner_tags, existing_recommended):
|
||||
for tag in source:
|
||||
token = str(tag).strip()
|
||||
if not token:
|
||||
continue
|
||||
key = token.casefold()
|
||||
if key in rec_seen:
|
||||
continue
|
||||
rec_seen.add(key)
|
||||
merged_recommended.append(token)
|
||||
ctx["recommended"] = merged_recommended
|
||||
deduplicated_recommended = [
|
||||
tag for tag in existing_recommended
|
||||
if str(tag).strip().casefold() not in partner_tags_lower
|
||||
]
|
||||
ctx["recommended"] = deduplicated_recommended
|
||||
|
||||
reason_map = dict(ctx.get("recommended_reasons") or {})
|
||||
for tag in partner_tags:
|
||||
|
|
@ -2907,6 +2910,11 @@ async def build_step2_get(request: Request) -> HTMLResponse:
|
|||
if is_gc and (sel_br is None or int(sel_br) < 3):
|
||||
sel_br = 3
|
||||
partner_enabled = bool(sess.get("partner_enabled") and ENABLE_PARTNER_MECHANICS)
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info(f"Step2 GET: commander={commander}, partner_enabled={partner_enabled}, secondary={sess.get('secondary_commander')}")
|
||||
|
||||
context = {
|
||||
"request": request,
|
||||
"commander": {"name": commander},
|
||||
|
|
@ -2940,7 +2948,22 @@ async def build_step2_get(request: Request) -> HTMLResponse:
|
|||
)
|
||||
partner_tags = context.pop("partner_theme_tags", None)
|
||||
if partner_tags:
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
context["tags"] = partner_tags
|
||||
# Deduplicate recommended tags: remove any that are already in partner_tags
|
||||
partner_tags_lower = {str(tag).strip().casefold() for tag in partner_tags}
|
||||
original_recommended = context.get("recommended", [])
|
||||
deduplicated_recommended = [
|
||||
tag for tag in original_recommended
|
||||
if str(tag).strip().casefold() not in partner_tags_lower
|
||||
]
|
||||
logger.info(
|
||||
f"Step2: partner_tags={len(partner_tags)}, "
|
||||
f"original_recommended={len(original_recommended)}, "
|
||||
f"deduplicated_recommended={len(deduplicated_recommended)}"
|
||||
)
|
||||
context["recommended"] = deduplicated_recommended
|
||||
resp = templates.TemplateResponse("build/_step2.html", context)
|
||||
resp.set_cookie("sid", sid, httponly=True, samesite="lax")
|
||||
return resp
|
||||
|
|
@ -3266,6 +3289,57 @@ async def build_step3_get(request: Request) -> HTMLResponse:
|
|||
sess["last_step"] = 3
|
||||
defaults = orch.ideal_defaults()
|
||||
values = sess.get("ideals") or defaults
|
||||
|
||||
# Check if any skip flags are enabled to show skeleton automation page
|
||||
skip_flags = {
|
||||
"skip_lands": "land selection",
|
||||
"skip_to_misc": "land selection",
|
||||
"skip_basics": "basic lands",
|
||||
"skip_staples": "staple lands",
|
||||
"skip_kindred": "kindred lands",
|
||||
"skip_fetches": "fetch lands",
|
||||
"skip_duals": "dual lands",
|
||||
"skip_triomes": "triome lands",
|
||||
"skip_all_creatures": "creature selection",
|
||||
"skip_creature_primary": "primary creatures",
|
||||
"skip_creature_secondary": "secondary creatures",
|
||||
"skip_creature_fill": "creature fills",
|
||||
"skip_all_spells": "spell selection",
|
||||
"skip_ramp": "ramp spells",
|
||||
"skip_removal": "removal spells",
|
||||
"skip_wipes": "board wipes",
|
||||
"skip_card_advantage": "card advantage spells",
|
||||
"skip_protection": "protection spells",
|
||||
"skip_spell_fill": "spell fills",
|
||||
}
|
||||
|
||||
active_skips = [desc for key, desc in skip_flags.items() if sess.get(key, False)]
|
||||
|
||||
if active_skips:
|
||||
# Show skeleton automation page with auto-submit
|
||||
automation_parts = []
|
||||
if any("land" in s for s in active_skips):
|
||||
automation_parts.append("lands")
|
||||
if any("creature" in s for s in active_skips):
|
||||
automation_parts.append("creatures")
|
||||
if any("spell" in s for s in active_skips):
|
||||
automation_parts.append("spells")
|
||||
|
||||
automation_message = f"Applying default values for {', '.join(automation_parts)}..."
|
||||
|
||||
resp = templates.TemplateResponse(
|
||||
"build/_step3_skeleton.html",
|
||||
{
|
||||
"request": request,
|
||||
"defaults": defaults,
|
||||
"commander": sess.get("commander"),
|
||||
"automation_message": automation_message,
|
||||
},
|
||||
)
|
||||
resp.set_cookie("sid", sid, httponly=True, samesite="lax")
|
||||
return resp
|
||||
|
||||
# No skips enabled, show normal form
|
||||
resp = templates.TemplateResponse(
|
||||
"build/_step3.html",
|
||||
{
|
||||
|
|
@ -3844,6 +3918,16 @@ async def build_step5_summary(request: Request, token: int = Query(0)) -> HTMLRe
|
|||
ctx["synergies"] = synergies
|
||||
ctx["summary_ready"] = True
|
||||
ctx["summary_token"] = active_token
|
||||
|
||||
# Add commander hover context for color identity and theme tags
|
||||
hover_meta = commander_hover_context(
|
||||
commander_name=ctx.get("commander"),
|
||||
deck_tags=sess.get("tags"),
|
||||
summary=summary_data,
|
||||
combined=ctx.get("combined_commander"),
|
||||
)
|
||||
ctx.update(hover_meta)
|
||||
|
||||
response = templates.TemplateResponse("partials/deck_summary.html", ctx)
|
||||
response.set_cookie("sid", sid, httponly=True, samesite="lax")
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -195,7 +195,11 @@ async def download_github():
|
|||
@router.get("/", response_class=HTMLResponse)
|
||||
async def setup_index(request: Request) -> HTMLResponse:
|
||||
import code.settings as settings
|
||||
from code.file_setup.image_cache import ImageCache
|
||||
|
||||
image_cache = ImageCache()
|
||||
return templates.TemplateResponse("setup/index.html", {
|
||||
"request": request,
|
||||
"similarity_enabled": settings.ENABLE_CARD_SIMILARITIES
|
||||
"similarity_enabled": settings.ENABLE_CARD_SIMILARITIES,
|
||||
"image_cache_enabled": image_cache.is_enabled()
|
||||
})
|
||||
|
|
|
|||
|
|
@ -291,28 +291,6 @@ def _diag_enabled() -> bool:
|
|||
return (os.getenv("WEB_THEME_PICKER_DIAGNOSTICS") or "").strip().lower() in {"1", "true", "yes", "on"}
|
||||
|
||||
|
||||
@router.get("/picker", response_class=HTMLResponse)
|
||||
async def theme_picker_page(request: Request):
|
||||
"""Render the theme picker shell.
|
||||
|
||||
Dynamic data (list, detail) loads via fragment endpoints. We still inject
|
||||
known archetype list for the filter select so it is populated on initial load.
|
||||
"""
|
||||
archetypes: list[str] = []
|
||||
try:
|
||||
idx = load_index()
|
||||
archetypes = sorted({t.deck_archetype for t in idx.catalog.themes if t.deck_archetype}) # type: ignore[arg-type]
|
||||
except Exception:
|
||||
archetypes = []
|
||||
return _templates.TemplateResponse(
|
||||
"themes/picker.html",
|
||||
{
|
||||
"request": request,
|
||||
"archetypes": archetypes,
|
||||
"theme_picker_diagnostics": _diag_enabled(),
|
||||
},
|
||||
)
|
||||
|
||||
@router.get("/metrics")
|
||||
async def theme_metrics():
|
||||
if not _diag_enabled():
|
||||
|
|
@ -746,89 +724,9 @@ async def api_theme_preview(
|
|||
return JSONResponse({"ok": True, "preview": payload})
|
||||
|
||||
|
||||
@router.get("/fragment/preview/{theme_id}", response_class=HTMLResponse)
|
||||
async def theme_preview_fragment(
|
||||
theme_id: str,
|
||||
limit: int = Query(12, ge=1, le=30),
|
||||
colors: str | None = None,
|
||||
commander: str | None = None,
|
||||
suppress_curated: bool = Query(False, description="If true, omit curated example cards/commanders from the sample area (used on detail page to avoid duplication)"),
|
||||
minimal: bool = Query(False, description="Minimal inline variant (no header/controls/rationale – used in detail page collapsible preview)"),
|
||||
request: Request = None,
|
||||
):
|
||||
"""Return HTML fragment for theme preview with caching headers.
|
||||
|
||||
Adds ETag and Last-Modified headers (no strong caching – enables conditional GET / 304).
|
||||
ETag composed of catalog index etag + stable hash of preview payload (theme id + limit + commander).
|
||||
"""
|
||||
try:
|
||||
payload = get_theme_preview(theme_id, limit=limit, colors=colors, commander=commander)
|
||||
except KeyError:
|
||||
return HTMLResponse("<div class='error'>Theme not found.</div>", status_code=404)
|
||||
# Load example commanders (authoritative list) from catalog detail for legality instead of inferring
|
||||
example_commanders: list[str] = []
|
||||
synergy_commanders: list[str] = []
|
||||
try:
|
||||
idx = load_index()
|
||||
slug = slugify(theme_id)
|
||||
entry = idx.slug_to_entry.get(slug)
|
||||
if entry:
|
||||
detail = project_detail(slug, entry, idx.slug_to_yaml, uncapped=False)
|
||||
example_commanders = [c for c in (detail.get("example_commanders") or []) if isinstance(c, str)]
|
||||
synergy_commanders_raw = [c for c in (detail.get("synergy_commanders") or []) if isinstance(c, str)]
|
||||
# De-duplicate any overlap with example commanders while preserving order
|
||||
seen = set(example_commanders)
|
||||
for c in synergy_commanders_raw:
|
||||
if c not in seen:
|
||||
synergy_commanders.append(c)
|
||||
seen.add(c)
|
||||
except Exception:
|
||||
example_commanders = []
|
||||
synergy_commanders = []
|
||||
# Build ETag (use catalog etag + hash of core identifying fields to reflect underlying data drift)
|
||||
import hashlib
|
||||
import json as _json
|
||||
import time as _time
|
||||
try:
|
||||
idx = load_index()
|
||||
catalog_tag = idx.etag
|
||||
except Exception:
|
||||
catalog_tag = "unknown"
|
||||
hash_src = _json.dumps({
|
||||
"theme": theme_id,
|
||||
"limit": limit,
|
||||
"commander": commander,
|
||||
"sample": payload.get("sample", [])[:3], # small slice for stability & speed
|
||||
"v": 1,
|
||||
}, sort_keys=True).encode("utf-8")
|
||||
etag = "pv-" + hashlib.sha256(hash_src).hexdigest()[:20] + f"-{catalog_tag}"
|
||||
# Conditional request support
|
||||
if request is not None:
|
||||
inm = request.headers.get("if-none-match")
|
||||
if inm and inm == etag:
|
||||
# 304 Not Modified – FastAPI HTMLResponse with empty body & headers
|
||||
resp = HTMLResponse(status_code=304, content="")
|
||||
resp.headers["ETag"] = etag
|
||||
from email.utils import formatdate as _fmtdate
|
||||
resp.headers["Last-Modified"] = _fmtdate(timeval=_time.time(), usegmt=True)
|
||||
resp.headers["Cache-Control"] = "no-cache"
|
||||
return resp
|
||||
ctx = {
|
||||
"request": request,
|
||||
"preview": payload,
|
||||
"example_commanders": example_commanders,
|
||||
"synergy_commanders": synergy_commanders,
|
||||
"theme_id": theme_id,
|
||||
"etag": etag,
|
||||
"suppress_curated": suppress_curated,
|
||||
"minimal": minimal,
|
||||
}
|
||||
resp = _templates.TemplateResponse("themes/preview_fragment.html", ctx)
|
||||
resp.headers["ETag"] = etag
|
||||
from email.utils import formatdate as _fmtdate
|
||||
resp.headers["Last-Modified"] = _fmtdate(timeval=_time.time(), usegmt=True)
|
||||
resp.headers["Cache-Control"] = "no-cache"
|
||||
return resp
|
||||
|
||||
@router.get("/fragment/list", response_class=HTMLResponse)
|
||||
|
||||
|
||||
# --- Preview Export Endpoints (CSV / JSON) ---
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue